pal-1.5.1/0000755000000000000000000000000011710516471010773 5ustar rootrootpal-1.5.1/src/0000755000000000000000000000000011710516471011562 5ustar rootrootpal-1.5.1/src/newmake.bat0000644000000000000000000000255010001765020013667 0ustar rootroot@rem @rem make in order (contributed by Thomas Keane) mkdir ..\classes cd ..\classes deltree pal deltree org cd ..\src\org\w3c\dom javac -classpath ../../../../classes -d ../../../../classes *.java cd ..\..\..\pal\io javac -classpath ../../../classes -d ../../../classes *.java cd .. javac -classpath ../../classes -d ../../classes math/*.java util/*.java misc/*.java cd datatype javac -classpath ../../../classes -d ../../../classes *.java cd ..\statistics javac -classpath ../../../classes -d ../../../classes *.java cd ..\alignment javac -classpath ../../../classes -d ../../../classes *.java cd ..\ javac -classpath ../../classes -d ../../classes tree/*.java distance/*.java mep/*.java substmodel/*.java cd coalescent javac -deprecation -classpath ../../../classes -d ../../../classes *.java cd ..\popgen javac -classpath ../../../classes -d ../../../classes *.java cd ..\eval javac -classpath ../../../classes -d ../../../classes *.java cd ..\gui javac -classpath ../../../classes -d ../../../classes *.java cd ..\xml javac -classpath ../../../classes;../../../classes/xml.jar -d ../../../classes *.java cd ..\algorithmics javac -classpath ../../../classes -d ../../../classes *.java cd ..\treesearch javac -classpath ../../../classes -d ../../../classes *.java cd ..\supgma javac -classpath ../../../classes -d ../../../classes *.java pausepal-1.5.1/src/makejar-signed.bat0000644000000000000000000000044007437371336015143 0ustar rootroot@echo off rem ************************************************************ rem This batch file makes and signs the jar file rem ************************************************************ @echo on cd ..\classes jar cvf ..\pal.jar pal org *.class jarsigner ..\pal.jar alexei pal-1.5.1/src/makefile0000644000000000000000000000176407421755130013273 0ustar rootroot### VARIABLES ### PACKAGES = \ pal.alignment \ pal.distance \ pal.eval \ pal.gui \ pal.io \ pal.math \ pal.mep \ pal.misc \ pal.popgen \ pal.statistics \ pal.substmodel \ pal.tree \ pal.datatype \ pal.util \ pal.coalescent \ pal.xml \ VERSION = pal-1.4 ### TARGETS ### .PHONY: classes native clean install installclean \ doc docclean zip zipclean fullclean zipsrc distrib classes: (cd pal; $(MAKE) classes) native: (cd pal; $(MAKE) native) clean: (cd pal; $(MAKE) clean) doc: javadoc -d ../doc/api $(PACKAGES) docclean: (rm -r -f ../doc/api/; mkdir ../doc/api) zip: classes zip -r pal pal -n .class -i '*.class' -i '*/' -x '*CVS/' zipclean: rm -f pal.zip fullclean: clean docclean installclean zipclean zipsrc: zip -r -9 pal-src pal -i '*.java' -i '*akefile*' -i '*.html' -i '*/' distrib: (cd ../..; zip -r -9 $(VERSION).zip $(VERSION) -x '*CVS*' -x '*.directory'; cd $(VERSION)/src); pal-1.5.1/src/makeclean.bat0000644000000000000000000000314707476747422014217 0ustar rootroot@rem @rem make in order cd ..\classes deltree pal deltree org cd ..\src\org\w3c\dom javac -source 1.3 -classpath ../../../../classes -d ../../../../classes *.java cd ..\..\..\pal\io javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\math javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\util javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\misc javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\datatype javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\statistics javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\substmodel javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\mep javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\alignment javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\distance javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\tree javac -source 1.3 -deprecation -classpath ../../../classes -d ../../../classes *.java cd ..\coalescent javac -source 1.3 -deprecation -classpath ../../../classes -d ../../../classes *.java cd ..\popgen javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\eval javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\gui javac -source 1.3 -classpath ../../../classes -d ../../../classes *.java cd ..\xml javac -source 1.3 -classpath ../../../classes;../../../classes/xml.jar -d ../../../classes *.java pal-1.5.1/src/pal/0000755000000000000000000000000010141733722012333 5ustar rootrootpal-1.5.1/src/pal/gui/0000755000000000000000000000000010141733720013115 5ustar rootrootpal-1.5.1/src/pal/gui/CircularGraphics.java0000644000000000000000000001605507323721302017215 0ustar rootroot// CircularGraphics.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import java.awt.*; /** @author Matthew Goode */ public class CircularGraphics { double worldRadius_, worldAngle_; public int screenCentreX; public int screenCentreY; public int screenRadius; final static double TWO_PI = 2*Math.PI; Graphics g_; FontMetrics fm_; Font font_; boolean invertY_; public CircularGraphics(Graphics g, double worldAngle, double worldRadius, int screenX, int screenY, int screenWidth, int screenHeight) { this(g,worldAngle,worldRadius,screenX, screenY, screenWidth, screenHeight,false); } public CircularGraphics(Graphics g, double worldAngle, double worldRadius, int screenX, int screenY, int screenWidth, int screenHeight, boolean invertY) { this.g_ = g; this.invertY_ = invertY; this.worldAngle_ = worldAngle; this.worldRadius_ = worldRadius; screenRadius = Math.min(screenWidth/2, screenHeight/2); screenCentreX = screenX+screenWidth/2; screenCentreY = screenY+screenHeight/2; font_ = g_.getFont(); if(font_!=null) { fm_ = g_.getFontMetrics(font_); } } public void setFont(Font newFont) { g_.setFont(newFont); this.font_ = newFont; fm_ = g_.getFontMetrics(font_); } /** For drawing arbitary lines */ public void drawLine( double angle1, double radius1, double angle2, double radius2) { g_.drawLine(getScreenX(angle1,radius1), getScreenY(angle1, radius1), getScreenX(angle2,radius2),getScreenY(angle2,radius2) ); } /** For drawing arbitary lines */ public void drawLine( double angle, double radiusStart, double radiusEnd) { g_.drawLine(getScreenX(angle,radiusStart), getScreenY(angle, radiusStart), getScreenX(angle,radiusEnd),getScreenY(angle,radiusEnd) ); } /** For drawing arbitary lines */ public void drawLineDegreeAlign( double angle, double radiusStart, double radiusEnd) { angle = ((int)(angle*360/worldAngle_))*worldAngle_/360; g_.drawLine(getScreenX(angle,radiusStart), getScreenY(angle, radiusStart), getScreenX(angle,radiusEnd),getScreenY(angle,radiusEnd) ); } private final double convertRadius(double radius) { //return radius; return worldRadius_ - radius; } /** For drawing arbitary lines */ public void drawArc( double angleStart, double angleEnd, double radius) { int actualRadius = (int)(screenRadius*convertRadius(radius)/worldRadius_); if(actualRadius<1) { return; } int startAngle, endAngle; // if(invertY_) { startAngle = (int)(angleStart*360/worldAngle_); endAngle = (int)(angleEnd*360/worldAngle_); // } else { // startAngle = (int)(angleStart*360/worldAngle_); // endAngle = (int)(angleEnd*360/worldAngle_); // } // if(invertY_) { // g_.drawArc(screenCentreX-actualRadius, screenCentreY+actualRadius, actualRadius*2,-actualRadius*2, startAngle, endAngle-startAngle ); // } else { g_.drawArc(screenCentreX-actualRadius, screenCentreY-actualRadius, actualRadius*2,actualRadius*2, startAngle, endAngle-startAngle ); /*System.out.println(""+(screenCentreX-actualRadius)+ " "+ ( screenCentreY-actualRadius)+ " " + ( actualRadius*2) + " " + ( actualRadius*2 ) + " " + ( startAngle ) + " " + ( endAngle-startAngle )); */ // } } public void setColor(Color c) { g_.setColor(c); } public void drawString(String s, double angle, double radius) { int sX =getScreenX(angle,radius); int sY = getScreenY(angle,radius); FontMetrics fm = g_.getFontMetrics(g_.getFont()); int stringWidth = fm.stringWidth(s); int stringHeight = fm.getHeight(); g_.drawString(s,sX-stringWidth/2,sY+stringHeight/2); } public void drawString(String s, double angle, double radius,int outdent) { int sX =getScreenX(angle,radius,outdent); int sY = getScreenY(angle,radius,outdent); FontMetrics fm = g_.getFontMetrics(g_.getFont()); int stringWidth = fm.stringWidth(s); int stringHeight = fm.getHeight(); g_.drawString(s,sX-stringWidth/2,sY+stringHeight/2); } public void circleString(String s, double angle, double radius,int outdent) { int sX =getScreenX(angle,radius,outdent); int sY = getScreenY(angle,radius,outdent); FontMetrics fm = g_.getFontMetrics(g_.getFont()); int stringWidth = fm.stringWidth(s); int stringHeight = fm.getHeight(); g_.drawString(s,sX-stringWidth/2,sY+stringHeight/2); g_.drawOval(sX-stringWidth/2-5,sY-stringHeight/2-2 ,stringWidth+10, stringHeight+10); } public int getScreenX(double angle, double radius) { return screenCentreX + (int)(convertRadius(radius)*screenRadius*Math.cos(angle*TWO_PI/worldAngle_)/worldRadius_); } public int getScreenX(double angle, double radius, int outdent) { double trigBit = Math.cos(angle*TWO_PI/worldAngle_); return screenCentreX + (int)(outdent*trigBit + convertRadius(radius)*screenRadius*trigBit/worldRadius_); } public int getScreenDeltaX(double angle, double radius) { return (int)(convertRadius(radius)*screenRadius*Math.cos(angle*TWO_PI/worldAngle_ )/worldRadius_ ); } public int getScreenY(double angle, double radius) { int offset = (int)(convertRadius(radius)*screenRadius*Math.sin(angle*TWO_PI/worldAngle_)/worldRadius_); return (invertY_ ? (screenCentreY + offset) : (screenCentreY - offset)); } public int getScreenY(double angle, double radius, int outdent) { double trigBit = Math.sin(angle*TWO_PI/worldAngle_); int offset = (int)(outdent*trigBit + convertRadius(radius)*screenRadius*trigBit/worldRadius_); return (invertY_ ? (screenCentreY + offset) : (screenCentreY - offset)); } public int getScreenDeltaY(double angle, double radius) { return (invertY_ ? 1 : -1 )*(int)(convertRadius(radius)*screenRadius*Math.sin(angle*TWO_PI/worldAngle_)/worldRadius_); } public void fillPoint(double angle, double radius, int size) { int x = getScreenX(angle,radius); int y = getScreenY(angle,radius); g_.fillOval(x-size,y-size,2*size,2*size); } public void drawPoint(double angle, double radius, int size) { int x = getScreenX(angle,radius); int y = getScreenY(angle,radius); g_.drawOval(x-size,y-size,2*size,2*size); } public void drawSymbol(double angle, double radius, int outdent, int width, int symbol) { int halfWidth = width / 2; int x = getScreenX(angle,radius,outdent); int y = getScreenY(angle,radius,outdent); switch (symbol% 6) { case 0: g_.fillRect(x, y, width, width); break; case 1: g_.drawRect(x, y, width, width); break; case 2: g_.fillOval(x, y, width, width); break; case 3: g_.drawOval(x, y, width, width); break; case 4: // draw triangle g_.drawLine(x, y + width, x + halfWidth, y); g_.drawLine(x + halfWidth, y, x + width, y + width); g_.drawLine(x, y + width, x + width, y + width); break; case 5: // draw X g_.drawLine(x, y, x + width, y + width); g_.drawLine(x, y + width, x + width, y); break; } } } pal-1.5.1/src/pal/gui/Painter.java0000644000000000000000000000113607374505304015374 0ustar rootroot// Painter.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import java.awt.Graphics; import java.awt.Dimension; /** * An interface for objects which are used for painting themselves in a * general Graphics object * * @author Matthew Goode */ public interface Painter { void paint(Graphics g, int displayWidth, int displayHeight); void paint(Graphics g, int displayWidth, int displayHeight, LayoutTracker lt); Dimension getPreferredSize(); } pal-1.5.1/src/pal/gui/PositionedNode.java0000644000000000000000000000357007420611326016713 0ustar rootroot// PositionedNode.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import pal.tree.*; /** * A tree node that has a scalar position for the purposes of drawing the tree. * * @author Alexei Drummond * @version $Id: PositionedNode.java,v 1.6 2002/01/14 04:18:13 matt Exp $ */ public class PositionedNode extends SimpleNode { protected double x; //Please excuse this - it will be returned to its non public state eventually - MG boolean highlight_; Node peer_; /** Builds a tree based on node, but highlights highlightNode */ public PositionedNode(Node node, Node highlightNode) { init(node); this.peer_ = node; if (!node.isLeaf()) { for (int i = 0; i < node.getChildCount(); i++) { addChild(new PositionedNode(node.getChild(i),highlightNode)); } } highlight_ = (node==highlightNode); } public PositionedNode(Node node) { init(node); this.peer_ = node; if (!node.isLeaf()) { for (int i = 0; i < node.getChildCount(); i++) { addChild(new PositionedNode(node.getChild(i))); } } } public void calculatePositions() { double[] currentXPos = {0.0}; calculateXPositions(currentXPos); } public Node getPeer() { return peer_; } private double calculateXPositions(double[] currentXPos) { if (!isLeaf()) { // find average x position x = ((PositionedNode)getChild(0)).calculateXPositions(currentXPos); for (int i = 1; i < getChildCount(); i++) { x += ((PositionedNode)getChild(i)).calculateXPositions(currentXPos); } x /= getChildCount(); } else { x = currentXPos[0]; currentXPos[0] += 1.0; } return x; } public boolean isHighlighted() { return highlight_; } public double getX() { return x; } } pal-1.5.1/src/pal/gui/TreePainterCircular.java0000644000000000000000000001075507706515310017705 0ustar rootroot// TreePainter.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import pal.tree.*; import pal.io.*; import pal.misc.*; import java.awt.*; /** * A class that can paint a tree (in a circular fashion) into a Graphics object . * * @version $Id: TreePainterCircular.java,v 1.13 2003/07/20 02:36:08 matt Exp $ * * @author Alexei Drummond, Matthew Goode */ public class TreePainterCircular extends TreePainter { public static final int RIGHTBORDER = 75; public static final int LEFTBORDER = 10; public static final int TOPBORDER = 20; public static final int BOTTOMBORDER = 30; static final int SYMBOL_SIZE = 8; public static final int YSPACER = 20; public static final int XSPACER = 10; double maxAngle_, maxRadius_; public TreePainterCircular(Tree toDisplay, String title, boolean showTitle) { super(toDisplay, title, showTitle); setTreeImpl(toDisplay); } public void setTreeImpl(Tree t) { maxRadius_ = treeNode.getNodeHeight(); maxAngle_ = NodeUtils.getLeafCount(treeNode); } /** * Returns the preferred size for drawing * (that is the size that will show everything nicely) */ public Dimension getPreferredSize() { return new Dimension(100 + LEFTBORDER + RIGHTBORDER, 100+TOPBORDER+ BOTTOMBORDER); } protected void paint(PositionedNode node, CircularGraphics cg) { cg.setColor(FOREGROUND); double angle = node.x; double radius = node.getNodeHeight(); if (node.hasChildren()) { for (int i = 0; i < node.getChildCount(); i++) { paintLeafBranch(node, (PositionedNode)node.getChild(i), cg); } for (int i = 0; i < node.getChildCount(); i++) { paint((PositionedNode)node.getChild(i), cg); } int bootStrapValue = getBootstrapValue(node); if(bootStrapValue>=50) { cg.setColor(BOOTSTRAP_SUPPORT_COLOUR); cg.drawString( bootStrapValue+"", angle,radius, XSPACER); } } else { if ((maxLeafTime > 0.0) && isUsingColor()) { cg.setColor(Color.getHSBColor((float)(maxLeafTime - radius)/(float)maxLeafTime, 1.0f, 1.0f)); } else { cg.setColor(NORMAL_LABEL_COLOR); } if (isUsingColor()) { cg.fillPoint(angle,radius,2); } if (isUsingSymbols() && (getTimeOrderCharacterData() != null)) { cg.drawSymbol(angle,radius, XSPACER, SYMBOL_SIZE, getTimeOrderCharacterData().getTimeOrdinal(getTimeOrderCharacterData().whichIdNumber(node.getIdentifier().getName()))); } else { if(isUsingColor()) { cg.drawString( node.getIdentifier().getName(), angle,radius, XSPACER); if(node.isHighlighted()) { cg.setColor(Color.red); cg.circleString(node.getIdentifier().getName(), angle,radius, XSPACER); } } else { if(node.isHighlighted()) { cg.setColor(Color.red); } cg.drawString( node.getIdentifier().getName(), angle,radius, XSPACER); } } } } private void paintLeafBranch(PositionedNode parentNode, PositionedNode childNode, CircularGraphics g) { // paint join to parent g.drawArc(parentNode.x,childNode.x, parentNode.getNodeHeight() ); // paint branch g.drawLineDegreeAlign(childNode.x, childNode.getNodeHeight(), parentNode.getNodeHeight() ); if (isShowingNodeHeights()) { String label = FormattedOutput.getInstance().getDecimalString(childNode.getNodeHeight(), 4); g.drawString(label, childNode.x, childNode.getNodeHeight(), XSPACER); } } public void paint(Graphics g, int displayWidth, int displayHeight) { paint(g,displayWidth,displayHeight,false,null); } public void paint(Graphics g, int displayWidth, int displayHeight, LayoutTracker lt) { paint(g,displayWidth,displayHeight,false,lt); } public final void paint(Graphics g, int displayWidth, int displayHeight,boolean invert) { paint(g,displayWidth,displayHeight,invert,null); } public final void paint(Graphics g, int displayWidth, int displayHeight,boolean invert, LayoutTracker lt) { CircularGraphics cg = new CircularGraphics(g,maxAngle_,maxRadius_, LEFTBORDER,TOPBORDER, (displayWidth - LEFTBORDER - RIGHTBORDER), (displayHeight - TOPBORDER - BOTTOMBORDER), invert ); cg.setFont(getLabelFont()); g.setColor(BACKGROUND); g.fillRect(0, 0, displayWidth, displayHeight); paint(treeNode, cg); doTitle(g,LEFTBORDER, TOPBORDER - 8); //doScale(g,1,LEFTBORDER,displayHeight - BOTTOMBORDER + 12); } } pal-1.5.1/src/pal/gui/makefile0000644000000000000000000000123707275407432014634 0ustar rootroot### VARIABLES ### JIKESOPTS := +P GCJOPTS := # Always check dependencies JIKESOPTS += +M SRC := $(wildcard *.java) CLS := $(patsubst %.java, %.class, $(SRC)) OBJ := $(patsubst %.class, %.o, $(wildcard *.class)) ### RULES ### # Compile Java sources into class files %.class: %.java jikes $(JIKESOPTS) $< # Alternative to using jikes: gcj -C # Compile class files into native code %.o: %.class gcj -c $(GCJOPTS) $< ### TARGETS ### .PHONY: classes native clean classes: $(CLS) ifneq ($(OBJ),) native: $(OBJ) (ar -rv package.a *.o; ranlib package.a) else native: $(OBJ) endif clean: rm -f *.class *.o *.u *.a *~ pal-1.5.1/src/pal/gui/TreePainterNormal.java0000644000000000000000000001462507717656572017413 0ustar rootroot// TreePainterNormal.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import pal.tree.*; import pal.io.*; import pal.misc.*; import java.awt.*; /** * A class that can paint a tree into a Graphics object. * * @version $Id: TreePainterNormal.java,v 1.17 2003/08/16 23:48:26 matt Exp $ * * @author Alexei Drummond * @note * */ public class TreePainterNormal extends TreePainter { public static final int RIGHTBORDER = 10; public static final int LEFTBORDER = 10; public static final int TOPBORDER = 20; public static final int BOTTOMBORDER = 30; public static final int FONT_SIZE = 15; public static final int YSPACER = 20; public static final int XSPACER = 4; private double xScale = 1.0; private double yScale = 1.0; private Font labelFont_ = new Font("Times", Font.PLAIN, FONT_SIZE); int maxLabelWidth_ = -1; public TreePainterNormal(Tree toDisplay, String title, boolean showTitle) { super(toDisplay,title,showTitle); } /** * Returns the preferred size for drawing * (that is the size that will show everything nicely) */ public Dimension getPreferredSize() { return new Dimension(100 + LEFTBORDER + RIGHTBORDER, (int)Math.round(width * FONT_SIZE) + TOPBORDER + BOTTOMBORDER); } protected void paint(PositionedNode node, Graphics g, int displayWidth, int displayHeight, LayoutTracker lt, boolean isRoot) { Point p = getPoint(node,displayWidth, displayHeight); g.setColor(FOREGROUND); if(isRoot) { g.fillRect(p.x-4, p.y-1, 4, 3); //Cheap hack! } if (node.hasChildren()) { for (int i = 0; i < node.getChildCount(); i++) { paintLeafBranch(p, getPoint((PositionedNode)node.getChild(i), displayWidth,displayHeight), node, g,lt); } for (int i = 0; i < node.getChildCount(); i++) { paint((PositionedNode)node.getChild(i), g,displayWidth, displayHeight,lt,false); } int bootStrapValue = getBootstrapValue(node); if(bootStrapValue>=50) { g.setColor(BOOTSTRAP_SUPPORT_COLOUR); g.drawString(bootStrapValue+"", p.x + XSPACER, p.y + (FONT_SIZE / 2)); } } else { if ((maxLeafTime > 0.0) && isUsingColor()) { g.setColor(Color.getHSBColor((float)(maxLeafTime - node.getNodeHeight())/(float)maxLeafTime, 1.0f, 1.0f)); } else { g.setColor(NORMAL_LABEL_COLOR); } if (isUsingColor()) { int halfWidth = getPenWidth() / 2; g.fillRect(p.x - halfWidth, p.y - halfWidth, getPenWidth(), getPenWidth()); } if (isUsingSymbols()&&getTimeOrderCharacterData()!=null) { drawSymbol(g, p.x + XSPACER, p.y - (FONT_SIZE / 2), FONT_SIZE, getTimeOrderCharacterData().getTimeOrdinal(getTimeOrderCharacterData().whichIdNumber(node.getIdentifier().getName()))); } else { String name = getNodeName(node); int width = g.getFontMetrics().stringWidth(name); if(isUsingColor()) { g.drawString(name, p.x + XSPACER, p.y + (FONT_SIZE / 2)); if(node.isHighlighted()) { g.setColor(Color.red); g.drawOval(p.x - 4+XSPACER, p.y-FONT_SIZE/2-5, width +10, FONT_SIZE+8 ); } } else { LabelDisplayer defaultDisplay =(node.isHighlighted() ? HILITED_LABEL_DISPLAY : NORMAL_LABEL_DISPLAY ); getNodeDisplay(node,defaultDisplay).display(g,name, p.x + XSPACER, p.y + (FONT_SIZE / 2)); } //Inform layout tracker of new String if(lt!=null) { lt.addMapping(name,new Rectangle(p.x+XSPACER, p.y - (FONT_SIZE / 2), width,FONT_SIZE)); } } } } public Point getPoint(PositionedNode node, int displayWidth, int displayHeight) { return new Point(displayWidth - (int)Math.round(node.getNodeHeight() * xScale) - RIGHTBORDER, (int)Math.round(node.x * yScale) + TOPBORDER); } private void paintLeafBranch(Point p, Point lp, PositionedNode node, Graphics g, LayoutTracker lt) { int halfWidth = getPenWidth() / 2; // paint join to parent g.fillRect(p.x - halfWidth, Math.min(p.y, lp.y) - halfWidth, getPenWidth(), Math.abs(lp.y - p.y) + getPenWidth()); // paint branch g.fillRect(Math.min(p.x, lp.x) - halfWidth, lp.y - halfWidth, Math.abs(lp.x - p.x) + getPenWidth(), getPenWidth()); if (isShowingNodeHeights()) { String label = FormattedOutput.getInstance().getDecimalString(node.getNodeHeight(), 4); int width = g.getFontMetrics().stringWidth(label); int x = Math.min(p.x, lp.x) - (width / 2); g.drawString(label, x, p.y - halfWidth - 1); } if (isShowingInternalLabels()) { String label = getNodeName(node); int width = g.getFontMetrics().stringWidth(label); int x = Math.min(p.x, lp.x) - (width / 2); g.drawString(label, x, p.y - halfWidth - 1); } Object att = null; if (attName != null) { if (attName.equals("node height")) { att = new Double(node.getNodeHeight()); } else if (attName.equals("branch length")) { att = new Double(node.getBranchLength()); } else { att = node.getAttribute(attName); } if (att != null) { String label = null; if (att instanceof Double) { label = FormattedOutput.getInstance().getDecimalString(((Double)att).doubleValue(), 3); } else label = att.toString(); int width = g.getFontMetrics().stringWidth(label); int height = g.getFontMetrics().getAscent(); int x = Math.min(p.x, lp.x) + halfWidth + 1; g.drawString(label, x, p.y + (height / 2)); } } } public void paint(Graphics g, int displayWidth, int displayHeight) { paint(g,displayWidth,displayHeight,null); } public void paint(Graphics g, int displayWidth, int displayHeight, LayoutTracker lt) { g.setFont(labelFont_); if(maxLabelWidth_<0) { maxLabelWidth_ = getLongestIdentifierPixelWidth(g.getFontMetrics()); } double h = height; if (maxHeight != -1.0) { h = maxHeight; } xScale = (double)(displayWidth - LEFTBORDER - RIGHTBORDER - maxLabelWidth_) / h; yScale = (double)(displayHeight - TOPBORDER - BOTTOMBORDER) / width; g.setColor(BACKGROUND); g.fillRect(0, 0, displayWidth, displayHeight); paint(treeNode, g, displayWidth-maxLabelWidth_, displayHeight,lt, true); doTitle(g,LEFTBORDER, TOPBORDER - 8); doScale(g,xScale,LEFTBORDER,displayHeight - BOTTOMBORDER + 12); } } pal-1.5.1/src/pal/gui/LayoutTracker.java0000644000000000000000000000215307400427350016555 0ustar rootroot// LayoutTracker.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; /** * Title: LayoutTracer * Description: A means for tracking the layout of labels * @author Matthew Goode * @version 1.0 */ import java.util.*; import java.awt.*; import pal.misc.*; public class LayoutTracker { Hashtable layoutMappings_ = new Hashtable(); public LayoutTracker() { } public void addMapping(String name, Rectangle bounds) { layoutMappings_.put(name,bounds); } public void addMapping(Identifier id, Rectangle bounds) { if(id!=null&&id.getName()!=null) { layoutMappings_.put(id.getName(),bounds); } } public Rectangle getBounds(String name) { if(name==null||!layoutMappings_.containsKey(name)) { return null; } return (Rectangle)layoutMappings_.get(name); } public Rectangle getBounds(Identifier id) { if(id==null) { return null; } return getBounds(id.getName()); } public void reset() { layoutMappings_.clear(); } }pal-1.5.1/src/pal/gui/package.html0000644000000000000000000000013207135703704015402 0ustar rootroot GUI components for some special objects (e.g., trees). pal-1.5.1/src/pal/gui/NameColouriser.java0000644000000000000000000000446707717656572016752 0ustar rootroot// NameColouriser.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; /** * Title: NameColouriser.java * Description: A means for mapping names to colours * @author Matthew Goode * @version 1.0 * @note * */ import java.util.*; import java.awt.*; import pal.misc.*; public final class NameColouriser implements java.io.Serializable { private final Hashtable displayMappings_ = new Hashtable(); public NameColouriser() { } public NameColouriser(String name, Color colour) { addMapping(name,colour); } public NameColouriser(String[] names, Color colour) { for(int i = 0 ; i < names.length ; i++) { addMapping(names[i],colour); } } public NameColouriser(Identifier name, Color colour) { addMapping(name,colour); } public void addMapping(String name, Color colour) { displayMappings_.put(name,LabelDisplayer.Utils.buildDisplay(colour)); } public void addMapping(String name, Color colour, int fontStyle) { displayMappings_.put(name,LabelDisplayer.Utils.buildDisplay(colour,fontStyle)); } public void addMapping(String name, LabelDisplayer display) { displayMappings_.put(name, display); } public void addMapping(Identifier id, Color colour) { if(id!=null&&id.getName()!=null) { displayMappings_.put(id.getName(),LabelDisplayer.Utils.buildDisplay(colour)); } } /** * @param names Names * @param colours associated colours * @note assumes parallel arrays */ public void addMappings(String[] names, Color[] colours) { for(int i = 0 ; i < names.length ; i++) { displayMappings_.put(names[i],LabelDisplayer.Utils.buildDisplay(colours[i])); } } public LabelDisplayer getDisplay(String name, LabelDisplayer defaultDisplay) { if(name==null||!displayMappings_.containsKey(name)) { return defaultDisplay; } return (LabelDisplayer)displayMappings_.get(name); } public LabelDisplayer getDisplay(Identifier id, LabelDisplayer defaultDisplay) { if(id==null) { return defaultDisplay; } return getDisplay(id.getName(),defaultDisplay); } } pal-1.5.1/src/pal/gui/LinkageDisequilibriumComponent.java0000644000000000000000000004356207351100336022137 0ustar rootroot// LinkageDisequilibriumComponent.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import pal.alignment.*; import java.awt.*; import java.util.*; import java.awt.print.*; import java.text.*; import pal.popgen.LinkageDisequilibrium; /** * An AWT Component for displaying information on linkage disequilibrium. * * Nice schematics are produced if an annotation alignment is used to construct * LinkageDisequilibrium. It can portray things both on the gene and chromosomal * scale. * * * @author Ed Buckler * @version $Id: LinkageDisequilibriumComponent.java */ public class LinkageDisequilibriumComponent extends Component implements Printable { public final static int P_VALUE = 0; public final static int DPRIME = 1; public final static int RSQUARE = 2; float minimumChromosomeLength=10; LinkageDisequilibrium theLD; AnnotationAlignment theAA; boolean includeBlockSchematic, chromosomalScale; BorderLayout borderLayout1 = new BorderLayout(); int totalVariableSites, totalLoci, totalChromosomes, totalIntervals, totalBlocks; float[] startPos, endPos; //These are the relative positions of the polymorphisms float[] blockBeginPos, blockEndPos; String[] blockNames; int[] xPos, yPos, xEndPos; //these hold positions of the upper left corners for each site int[] blockBeginX, blockEndX;//These are the absolute positions of the genes & chromosomes int ih, iw; float totalUnits; float[] blockStart, blockEnd; //this will range from 0 to 1 String upperLabel, lowerLabel; double[][] diseq; Color theColor=new Color(0,0,0); int distanceBetweenGraphAndGene=40; int hoff=70, h2off=70, voff=20; //hoff is on the left side for site labels //h2off is on the right side for legends boolean probability=true, upperProb=false, lowerProb=true; // boolean genesOrChromo=true; //true if display genes , false if display chromosomes public LinkageDisequilibriumComponent(LinkageDisequilibrium theLD, boolean includeBlockSchematic, boolean chromosomalScale) { this.theLD=theLD; theAA=theLD.getAnnotatedAlignment(); this.includeBlockSchematic=includeBlockSchematic; this.chromosomalScale=chromosomalScale; this.diseq=new double[theLD.getSiteCount()][theLD.getSiteCount()]; setUpperCorner(RSQUARE); setLowerCorner(P_VALUE); totalVariableSites=theLD.getSiteCount(); if(theAA!=null) {countGenesAndChromosomes(); calculateStartAndEndPositions(); } else {includeBlockSchematic=false;} xPos=new int[theLD.getSiteCount()+1]; yPos=new int[theLD.getSiteCount()+1]; xEndPos=new int[theLD.getSiteCount()+1]; try { jbInit(); } catch(Exception ex) { ex.printStackTrace(); } } /** * This determines what is displayed in the lower left corner. * Options are: P_VALUE, DPRIME, and RSQUARE */ public void setLowerCorner(int ldMeasure) { for(int r=0; rtheAA.getChromosomePosition(r)) blockStart[c]=theAA.getChromosomePosition(r); if(blockEnd[c]theAA.getLocusPosition(r)) blockStart[c]=theAA.getLocusPosition(r); if(blockEnd[c]0)&&(includeBlockSchematic)&&(theAA.getChromosome(r)!=theAA.getChromosome(r-1))) //transition between chromosomes if on chromosomal scale {currStartBase+=proportionPerPolymorphism;} if((!chromosomalScale)&&(r>0)&&(includeBlockSchematic)&&(!theAA.getLocusName(r).equals(theAA.getLocusName(r-1)))) //transition between loci if not at chromosomal scale {currStartBase+=proportionPerPolymorphism;} startPos[r]=currStartBase; currStartBase+=proportionPerPolymorphism; } //end of going through sites if(includeBlockSchematic) {currStartBase=0; for(int b=0; b0.999) {return theColor.getHSBColor(1f,1f,1f);} if(diseq[r][c]<-998.0) {return theColor.lightGray;} return theColor.getHSBColor((float)diseq[r][c],(float)diseq[r][c],1f); } private Color getProbabilityColor(int r, int c) { double p1=0.01, p2=0.001, p3=0.0001; if(diseq[r][c]<-998.0) {return theColor.lightGray;} if(diseq[r][c]>p1) {return theColor.white;} if(diseq[r][c]>p2) {return theColor.blue;} if(diseq[r][c]>p3) {return theColor.green;} return theColor.red; } private void addPolymorphismLabels(Graphics g, int ih) { int gr=0; String s; g.setFont(new java.awt.Font("Dialog", 0, 9)); g.setColor(theColor.black); for(int r=0; r0.01",xStart+barWidth+5,currY+10); currY+=yInc; g.setColor(theColor.blue); g.fillRect(xStart, currY, barWidth, yInc); g.setColor(Color.black); g.drawRect(xStart, currY, barWidth, yInc); g.drawString("<0.01",xStart+barWidth+5,currY+10); currY+=yInc; g.setColor(theColor.green); g.fillRect(xStart, currY, barWidth, yInc); g.setColor(Color.black); g.drawRect(xStart, currY, barWidth, yInc); g.drawString("<0.001",xStart+barWidth+5,currY+10); currY+=yInc; g.setColor(theColor.red); g.fillRect(xStart, currY, barWidth, yInc); g.setColor(Color.black); g.drawRect(xStart, currY, barWidth, yInc); g.drawString("<0.0001",xStart+barWidth+5,currY+10); } else {yInc=(yEnd-yStart)/11; dF=new DecimalFormat("0.00"); for(float d=1.0001f; d>=0; d-=0.1) {g.setColor(theColor.getHSBColor(d,d,1f)); g.fillRect(xStart, currY, barWidth, yInc); g.setColor(Color.black); g.drawRect(xStart, currY, barWidth, yInc); g.drawString(dF.format(d),xStart+barWidth+5,currY+10); currY+=yInc; } } } private void addGenePicture(Graphics g, int ih, int iw) { //This will add the gene picture to the left of the polymorphisms int yOfLinkBlock, yOfGene, yOfGeneLabel;//,totalBases,spacer, cpos; int halfIW=iw/2; // MultiAlleleSiteCharacteristic theMSC, lastMSC; Dimension d=this.getSize(); yOfLinkBlock=yPos[totalVariableSites]; yOfGene=yOfLinkBlock+(distanceBetweenGraphAndGene/2); yOfGeneLabel=yOfLinkBlock+(int)(0.8f*(float)distanceBetweenGraphAndGene); for(int r=0; rscaleH)?scaleH:scaleW; // System.out.println(scaleW+"=W H="+scaleH+" maxScale="+maxScale); g2.scale(maxScale,maxScale); // g2.scale(scaleW,scaleH); this.paint(g2); return PAGE_EXISTS; } /* public void sendToPrinter() { PrinterJob printJob = PrinterJob.getPrinterJob(); printJob.setPrintable(this); if (printJob.printDialog()) { try { printJob.print(); } catch (Exception ex) { ex.printStackTrace(); } } } */ }pal-1.5.1/src/pal/gui/LabelDisplayer.java0000644000000000000000000000401407725632134016666 0ustar rootroot// LabelDisplay.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import java.awt.*; /** * A Label display object displays a label at a particular location on a graphics object. * The class may change attributes such as font, colour, etc... but should leave the graphics state unchanged (ie, revert back to original colour) * @author Matthew Goode * @note *
    *
  • 14 August 2003 - Created to allow greater flexibility in label display on trees *
*/ public interface LabelDisplayer { public void display(Graphics g, String label, int x, int y ); //=--=-=-=-=-=-==--=-=-=-=-==--==-=-=--==--==--=-==--=-==--==-=-=-=-=--=-==--= public static final class Utils { public static final LabelDisplayer buildDisplay(Color c) { return new ColourDisplay(c); } public static final LabelDisplayer buildDisplay(Color c, int style) { return new ColourAndFontStyleDisplay(c,style); } private static final class ColourDisplay implements LabelDisplayer { private final Color c_; public ColourDisplay(Color c) { this.c_ = c; } public void display(Graphics g, String text, int x, int y ) { Color old = g.getColor(); g.setColor(c_); g.drawString(text,x,y); g.setColor(old); } } private static final class ColourAndFontStyleDisplay implements LabelDisplayer { private final Color c_; private final int fontStyle_; public ColourAndFontStyleDisplay(Color c, int fontStyle) { this.c_ = c; this.fontStyle_ = fontStyle; } public void display(Graphics g, String text, int x, int y ) { Color old = g.getColor(); Font oldFont = g.getFont(); g.setColor(c_); System.out.println("Making bold:"); Font newFont = oldFont.deriveFont(fontStyle_); g.setFont(newFont); g.drawString(text,x,y); g.setColor(old); g.setFont(oldFont); } } } //End of class Utils }pal-1.5.1/src/pal/gui/TreeComponent.java0000644000000000000000000000731007447633166016565 0ustar rootroot// TreeComponent.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import pal.tree.*; import pal.misc.*; import java.awt.*; /** * An AWT Component for displaying a tree. * * @author Alexei Drummond * @version $Id: TreeComponent.java,v 1.17 2002/03/25 02:38:45 alexi Exp $ */ public class TreeComponent extends Component { boolean circular_ = false; public final static int NORMAL_BW = 0; public final static int CIRCULAR_BW = 1; public final static int NORMAL_COLOR = 2; public final static int CIRCULAR_COLOR = 3; int mode_; public final static String[] MODE_NAMES = new String[4]; // unfortunate but necessary to avoid Java 1.1 language features static { MODE_NAMES[NORMAL_BW] = "Normal (bw)"; MODE_NAMES[CIRCULAR_BW] = "Circular (bw)"; MODE_NAMES[NORMAL_COLOR] = "Normal (color)"; MODE_NAMES[CIRCULAR_COLOR] = "Circular (color)"; } TreePainterCircular circlePainter_; TreePainter painter_; boolean invertCiruclar_; // constructors public TreeComponent(Tree tree, boolean usingSymbols) { this(tree, (TimeOrderCharacterData)null, usingSymbols); } public TreeComponent(Tree tree, TimeOrderCharacterData tocd, boolean usingSymbols) { this(tree); if (tocd != null) { painter_.setTimeOrderCharacterData(tocd); } painter_.setUsingSymbols(usingSymbols); } public TreeComponent(Tree tree) { this(tree, "", false); } public TreeComponent(Tree tree, String title) { this(tree, title, true); } public TreeComponent(Tree tree, String title, boolean showTitle) { painter_ = new TreePainterNormal(tree,title,showTitle); circlePainter_ = new TreePainterCircular(tree,title,showTitle); setMode(NORMAL_COLOR); setSize(getPreferredSize()); } public final void setLabelMapping(LabelMapping lm) { painter_.setLabelMapping(lm); circlePainter_.setLabelMapping(lm); } public void setColouriser(NameColouriser nc) { painter_.setColouriser(nc); circlePainter_.setColouriser(nc); } public void setAttributeName(String name) { painter_.setAttributeName(name); repaint(); } public void setMaxHeight(double maxHeight) { painter_.setMaxHeight(maxHeight); } public void setTree(Tree tree) { painter_.setTree(tree); circlePainter_.setTree(tree); } public void setTitle(String title) { painter_.setTitle(title); circlePainter_.setTitle(title); } public Dimension getPreferredSize() { return painter_.getPreferredSize(); } public void setInvertCircular(boolean invert) { this.invertCiruclar_ = invert; } public Dimension getMinimumSize() { return getPreferredSize(); } public String getTitle() { return painter_.getTitle(); } /** * Sets the mode of display for this component. * @param the mode to switch to. Valid arguments are NORMAL (for normal tree * display), and CIRCULAR (for a circular view of the trees) */ public void setMode(int mode) { this.mode_= mode; switch(mode) { case NORMAL_BW : { circular_ = false; painter_.setUsingColor(false); break; } case NORMAL_COLOR : { circular_ = false; painter_.setUsingColor(true); break; } case CIRCULAR_BW : { circular_ = true; circlePainter_.setUsingColor(false); break; } case CIRCULAR_COLOR : { circular_ = true; circlePainter_.setUsingColor(true); break; } } } public void paint(Graphics g) { if(!circular_) { painter_.paint(g,getSize().width,getSize().height); } else { circlePainter_.paint(g,getSize().width,getSize().height,invertCiruclar_); } } } pal-1.5.1/src/pal/gui/TreePainter.java0000644000000000000000000002263007717656572016235 0ustar rootroot// TreePainter.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.gui; import pal.tree.*; import pal.io.*; import pal.misc.*; import java.awt.*; /** * A class that can paint a tree into a Graphics object. * * @version $Id: TreePainter.java,v 1.24 2003/08/16 23:48:26 matt Exp $ * * @author Alexei Drummond * @note *
    *
  • 14 August 2003 - Changed to reflect NameColouriser changes *
*/ abstract public class TreePainter implements Painter { public static final Color BACKGROUND = Color.white; public static final Color FOREGROUND = Color.black; public static final Color NORMAL_LABEL_COLOR = Color.green.darker(); public static final LabelDisplayer NORMAL_LABEL_DISPLAY = LabelDisplayer.Utils.buildDisplay(Color.green.darker()); public static final LabelDisplayer HILITED_LABEL_DISPLAY = LabelDisplayer.Utils.buildDisplay(Color.red.darker(), Font.BOLD); public static final Color BOOTSTRAP_SUPPORT_COLOUR = Color.black; public static final String BOOTSTRAP_ATTRIBUTE_NAME = "bootstrap"; public static final int DEFAULT_FONT_SIZE = 15; public static final int DEFAULT_FONT_STYLE = Font.PLAIN; public static final String DEFAULT_FONT_NAME = "times"; public static final Font DEFAULT_FONT = new Font(DEFAULT_FONT_NAME, DEFAULT_FONT_STYLE, DEFAULT_FONT_SIZE); public PositionedNode treeNode; public String title_; protected String attName = null; boolean showTitle_; /** The tree being painted */ private Tree tree; /** The time order character data used for determining symbols. */ private TimeOrderCharacterData tocd = null; /** the number of leaves in the tree. */ double width; /** the height of the root */ double height; /** * the maximum height of the display area in the same units as tree. * if this is not -1.0 then it overrides the natural height. */ double maxHeight = -1.0; double maxLeafTime = 0.0; double sizeOfScale = 0.0; /** Width of pen used to paint lines */ private int penWidth = 2; /** determines whether colors are used to distinguish branch depth */ private boolean usingColor = true; /** determines whether node heights are displayed on the tree */ private boolean showingNodeHeights = false; /** determines whether internal nodes are labelled */ protected boolean showingInternalLabels = true; /** determines whether symbols are used instead of names */ private boolean usingSymbols = false; private NameColouriser colouriser_ = null; private LabelMapping labelMapping_ = null; private Font labelFont_ = DEFAULT_FONT; /** * @param toDisplay the tree being painted. * @param title the title of the tree. * @param showTitle true if a title is being displayed. * @param nc A name colouriser, may be null. */ public TreePainter(Tree toDisplay, String title, boolean showTitle) { this.title_ = title; this.showTitle_ = showTitle; this.tree = toDisplay; // if (toDisplay instanceof DatedTipsClockTree) { // tocd = ((DatedTipsClockTree)toDisplay).getTimeOrderCharacterData(); // } standardTreePrep(); } /** * Returns -1 if no * trap value available */ protected int getBootstrapValue(PositionedNode node){ Object o = tree.getAttribute(node.getPeer(),BOOTSTRAP_ATTRIBUTE_NAME); if(o==null) { return -1; } return ((Integer)o).intValue(); } /** * Rotates the tree by leaf count, creates a positioned node version of the * trees root and calculates postions and width and height information. */ protected void standardTreePrep() { TreeUtils.rotateByLeafCount(tree); treeNode = new PositionedNode(tree.getRoot()); treeNode.calculatePositions(); width = NodeUtils.getLeafCount(treeNode); height = treeNode.getNodeHeight(); maxLeafTime = 0.0; maxLeafTime = getMaxLeafTime(treeNode); maxLeafTime *= 1.5; sizeOfScale = getSizeOfScale( height / 5.0); } /** * sets the maximum height of plot. * if this height is smaller than root height then * only a proportion of tree is drawn. */ public final void setMaxHeight(double maxHeight) { this.maxHeight = maxHeight; sizeOfScale = getSizeOfScale( maxHeight / 5.0); } public final void setAttributeName(String name) { attName = name; } public final void setPenWidth(int p) { penWidth = p; } public final int getPenWidth() { return penWidth; } public final void setTree(Tree tree) { this.tree = tree; standardTreePrep(); setTreeImpl(tree); } /** * may be implemented by sub classes */ public void setTreeImpl(Tree tree) { } public final void setUsingColor(boolean use) { usingColor = use; } public final boolean isUsingColor() { return usingColor; } public final void setShowingNodeHeights(boolean s) { showingNodeHeights = s; } public final boolean isShowingNodeHeights() { return showingNodeHeights; } public final boolean isShowingInternalLabels() { return showingInternalLabels; } public final TimeOrderCharacterData getTimeOrderCharacterData() { return this.tocd; } public final void setTimeOrderCharacterData(TimeOrderCharacterData tocd) { this.tocd = tocd; usingSymbols = true; } public final boolean isUsingSymbols() { return usingSymbols; } /** * Sets whether the tree is painted with symbols. This can * only be set to true of a TimeOrderCharacterData has been set. */ public final void setUsingSymbols(boolean use) { usingSymbols = use; if (tocd == null) usingSymbols = false; } protected final Tree getTree() { return tree; } protected final double getSizeOfScale(double target) { double sos = 0.1; boolean accept = false; boolean divideByTwo = true; while (!accept) { if ((sos / target) >= 5.0) { sos /= (divideByTwo ? 2.0 : 5.0); divideByTwo = !divideByTwo; } else if ((sos / target) < 0.2) { sos *= (divideByTwo ? 5.0 : 2.0); divideByTwo = !divideByTwo; } else accept = true; } return sos; } protected static final double getMaxLeafTime(Node node) { if (!node.isLeaf()) { double max = getMaxLeafTime(node.getChild(0)); double posmax = 0.0; for (int i = 1; i < node.getChildCount(); i++) { posmax = getMaxLeafTime(node.getChild(i)); if (posmax > max) max = posmax; } return max; } else { return node.getNodeHeight(); } } public final static void drawSymbol(Graphics g, int x, int y, int width, int index) { int halfWidth = width / 2; switch (index % 6) { case 0: g.fillRect(x, y, width, width); break; case 1: g.drawRect(x, y, width, width); break; case 2: g.fillOval(x, y, width, width); break; case 3: g.drawOval(x, y, width, width); break; case 4: // draw triangle g.drawLine(x, y + width, x + halfWidth, y); g.drawLine(x + halfWidth, y, x + width, y + width); g.drawLine(x, y + width, x + width, y + width); break; case 5: // draw X g.drawLine(x, y, x + width, y + width); g.drawLine(x, y + width, x + width, y); break; } } public final boolean isShowTitle() { return showTitle_; } public final void setColouriser(NameColouriser nc) { this.colouriser_ = nc; } public final void setLabelMapping(LabelMapping lp) { this.labelMapping_ = lp; } public final void setTitle(String title) { this.title_ = title; showTitle_ = true; } protected final String getNodeName(Node node) { if(labelMapping_!=null) { return labelMapping_.getLabel(node.getIdentifier()); } return node.getIdentifier().getName(); } public final String getTitle() { return title_; } public final void doTitle(Graphics g, int x, int y) { if(showTitle_) { g.drawString(title_, x,y); } } protected final void doScale(Graphics g, double xScale, int x, int y) { g.setColor(FOREGROUND); g.drawLine(x, y, x+ (int)Math.round(sizeOfScale * xScale), y); g.drawString(sizeOfScale + " "+Units.UNIT_NAMES[tree.getUnits()], x,y-12); } protected final LabelDisplayer getNodeDisplay(Node n, LabelDisplayer defaultDisplay) { if(colouriser_!=null) { return colouriser_.getDisplay(n.getIdentifier(),defaultDisplay); } return defaultDisplay; } private int getLongestIdentifierPixelWidth(FontMetrics fm, Node n) { Identifier id = n.getIdentifier(); int myWidth = 0; if(id!=null) { String name = id.getName(); if(name!=null) { if(labelMapping_!=null) { name = labelMapping_.getLabel(name,name); } myWidth = fm.stringWidth(name); } } int numberChild = n.getChildCount(); for(int i = 0 ; i < numberChild ; i++) { myWidth = Math.max(myWidth, getLongestIdentifierPixelWidth(fm, n.getChild(i))); } return myWidth; } protected final int getLongestIdentifierPixelWidth(FontMetrics fm) { return getLongestIdentifierPixelWidth(fm,treeNode); } // ============================================================================ // == Font Stuff /** * Set the font used to display labels */ public final void setLabelFont(Font f) { this.labelFont_ = f; } /** * Set the font used to display labels */ public final void setLabelFontSize(int size) { this.labelFont_ = new Font(labelFont_.getFontName(),size,labelFont_.getSize()); } protected final Font getLabelFont() { return labelFont_; } } pal-1.5.1/src/pal/algorithmics/0000755000000000000000000000000010141733720015016 5ustar rootrootpal-1.5.1/src/pal/algorithmics/Assessor.java0000644000000000000000000000036507742737412017506 0ustar rootrootpackage pal.algorithmics; /** *

Title:

*

Description:

*

Copyright: Copyright (c) 2003

*

Company:

* @author not attributable * @version 1.0 */ public interface Assessor { public double getCurrentValue(); }pal-1.5.1/src/pal/algorithmics/GeneralObjectState.java0000644000000000000000000000277107742745450021415 0ustar rootrootpackage pal.algorithmics; /** *

Title:

*

Description:

*

Copyright: Copyright (c) 2003

*

Company:

* @author not attributable * @version 1.0 */ public class GeneralObjectState implements ObjectState { private final UndoableAction action_; private final StateProvider subject_; private final boolean maximise_; public GeneralObjectState(UndoableAction action, StateProvider subject, boolean maximise) { this.action_ = action; this.subject_ = subject; this.maximise_ = maximise; } /** * Perform an action * @param currentScore the current score before doing the action * @param desparationValue An indication of how desparate we are, values closer to 1 mean more desparate while values towards 0 mean less desparate * @return the current score after doing the action */ public double doAction(double currentScore, double desparationValue) { boolean succeeded = false; double score = currentScore; while(!succeeded) { score = action_.doAction(currentScore,desparationValue); succeeded = action_.isActionSuccessful(); } return score; } /** * * @return true if undo was successful */ public boolean undoAction() { return action_.undoAction(); } public Object getStateReference() { return subject_.getStateReference(); } public void restoreState(Object stateReference) { subject_.restoreState(stateReference); } public boolean isMaximiseScore() { return maximise_; } }pal-1.5.1/src/pal/algorithmics/UndoableAction.java0000644000000000000000000002644410024070372020557 0ustar rootrootpackage pal.algorithmics; /** *

Title: UndoableAction

*

Description: A stateful, single thread object

*

Copyright: Copyright (c) 2003

*

Company:

* @author Matthew Goode * @version 1.0 */ import java.util.*; public interface UndoableAction { /** * Perform an action * @param currentscore The current score before doing the action * @param desparationValue An indication by the processing machines of willingness to do more extreme actions. A value of 0 means not desparate at all, a value of 1 means very desparate * @return the current score after doing the action (or the input score if not successful) */ public double doAction(double currentScore, double desparationValue); /** * Was the last action deterministic? That is, if it wasn't chosen and state is still as * before is it worth doing it again? * @return true if last action deterministic */ public boolean isActionDeterministic(); /** * Was the last call to doAction() succesful? * @return true if last action successful, false otherwise */ public boolean isActionSuccessful(); /** * Undo the last action (if it was successful) * Users of undoable actions should accept that sometimes it isn't possible. * If an undo was not possible the action should not change any state * @return true if undo was successful */ public boolean undoAction(); // -=-=-==-=--=-==--=-==--=-==---==-=--=-=-=-=-==-=-=-=-=-=-=--==-=-=--==-=--=- // -=-=-= Utils -=-=-=-=-=-=-==--==-=--=-=-=-=-==-=-=-=-=-=-=--==-=-=--==-=--=- // -=-=-==-=--=-==--=-==--=-==---==-=--=-=-=-=-==-=-=-=-=-=-=--==-=-=--==-=--=- public static final class Utils { /** * Create an action that selects uniformly from a number of sub actions * @param subActions * @return */ public static final UndoableAction getSimpleUniformSelection(UndoableAction[] subActions) { return new Multi(subActions); } /** * Create an action that selects uniformly from a number of sub actions * @param subActions * @param acitionProportions * @throws IllegalArgumentException if action array and proportion arrays are different lengths * @return */ public static final UndoableAction getDistributedSelection(UndoableAction[] subActions, double[] actionProportions) { if(subActions.length>actionProportions.length) { throw new IllegalArgumentException("Actions and proportion array different lengths"); } return new DistributedMulti(subActions, actionProportions); } /** * Create an action that combines multiple actions * @param subActions The actions that are do in turn. * @return An action that performs all the sub actions */ public static final UndoableAction getCombined(UndoableAction[] subActions) { return new Combined(subActions); } /** * A simple tool for change actions when things get desparate * @param primaryAction The main action to do when things are going well * @param desparateAction The action to do when things get desparate. The desperation value for the desparate action will be scaled according to how much over the limit we are * @param desparationLimit The desparate value at which we start doing the desparate action * @param desparationInterval The time between desparate actions when we cross the cutoff (a value of one will mean do all the time after desparation value has crossed cutoff) */ public static final UndoableAction getSimpleDesparation(UndoableAction primaryAction, UndoableAction desparateAction, double desparationLimit, int desparationInterval) { return new SimpleDesparation(primaryAction,desparateAction,desparationLimit,desparationInterval); } // -=-==-=--==--=-=-=-=-=-=-==--=-= private static class SimpleDesparation implements UndoableAction { private final UndoableAction primaryAction_; private final UndoableAction desparateAction_; private final double desparationLimit_; private final int desparationInterval_; private int currentDesparateCount_ = 0; private UndoableAction lastAction_ = null; /** * A simple tool for change actions when things get desparate * @param primaryAction The main action to do when things are going well * @param desparateAction The action to do when things get desparate. The desperation value for the desparate action will be scaled according to how much over the limit we are * @param desparationLimit The desparate value at which we start doing the desparate action * @param desparationInterval The time between desparate actions when we cross the cutoff (a value of one will mean do all the time after desparation value has crossed cutoff) */ public SimpleDesparation(UndoableAction primaryAction, UndoableAction desparateAction, double desparationLimit, int desparationInterval) { this.primaryAction_ = primaryAction; this.desparateAction_ = desparateAction; this.desparationLimit_ = desparationLimit; this.desparationInterval_ = desparationInterval; } /** * @return false */ public boolean isActionDeterministic() { return false; } public double doAction(double currentScore, double desparationValue) { if(desparationValue>=desparationLimit_) { currentDesparateCount_++; if(currentDesparateCount_==desparationInterval_) { currentDesparateCount_ = 0; lastAction_ = desparateAction_; desparationValue = (desparationLimit_-desparationValue)/(1-desparationLimit_); } else { lastAction_ = primaryAction_; } } else { lastAction_ = primaryAction_; currentDesparateCount_ = 0; } return lastAction_.doAction(currentScore,desparationValue); } public boolean isActionSuccessful() { if(lastAction_!=null) { return lastAction_.isActionSuccessful(); } throw new RuntimeException("Assertion error : isActionSuccessful() called when no action has been done recently"); } public boolean undoAction() { if(lastAction_!=null) { final boolean successful = lastAction_.undoAction(); lastAction_ = null; return successful; } else { throw new RuntimeException("Assertion error : undoAction() called when no action has been done recently (or has already been undone)"); } } } //End of class Multi // -=-==-=--==--=-=-=-=-=-=-==--=-= private static class Multi implements UndoableAction { private final UndoableAction[] subActions_; private UndoableAction lastAction_ = null; private final Random random_; public Multi(UndoableAction[] subActions) { this.subActions_ = subActions; this.random_ = new Random(); } public double doAction(double currentScore, double desparationValue) { lastAction_ = subActions_[random_.nextInt(subActions_.length)]; return lastAction_.doAction(currentScore,desparationValue); } public boolean isActionSuccessful() { if(lastAction_!=null) { return lastAction_.isActionSuccessful(); } throw new RuntimeException("Assertion error : isActionSuccessful() called when no action has been done recently"); } /** * @return false */ public boolean isActionDeterministic() { return false; } public boolean undoAction() { if(lastAction_!=null) { final boolean successful = lastAction_.undoAction(); lastAction_ = null; return successful; } else { throw new RuntimeException("Assertion error : undoAction() called when no action has been done recently (or has already been undone)"); } } } //End of class Multi // -=-==-=--==--=-=-=-=-=-=-==--=-= private static class DistributedMulti implements UndoableAction { private final UndoableAction[] subActions_; private final double[] probabilities_; private UndoableAction lastAction_ = null; private final Random random_; public DistributedMulti(UndoableAction[] subActions, double[] proportions) { this.subActions_ = subActions; this.probabilities_ = new double[subActions.length]; double total = 0; for(int i = 0 ; i < subActions.length ; i++) { total+=proportions[i]; } for(int i = 0 ; i < subActions.length ; i++) { probabilities_[i] = proportions[i]/total; } this.random_ = new Random(); } /** * @return false */ public boolean isActionDeterministic() { return false; } public double doAction(double currentScore, double desparationValue) { double v = random_.nextDouble(); double total = 0; int index = subActions_.length-1; for(int i = 0 ; i < subActions_.length ; i++) { total+=probabilities_[i]; if(total>v) { index = i; break; } } lastAction_ = subActions_[index]; return lastAction_.doAction(currentScore,desparationValue); } public boolean isActionSuccessful() { if(lastAction_!=null) { return lastAction_.isActionSuccessful(); } throw new RuntimeException("Assertion error : isActionSuccessful() called when no action has been done recently"); } public boolean undoAction() { if(lastAction_!=null) { boolean successful = lastAction_.undoAction(); lastAction_ = null; return successful; } else { throw new RuntimeException("Assertion error : undoAction() called when no action has been done recently (or has already been undone)"); } } } //End of class DistributedMulti // -=-==-=--==--=-=-=-=-=-=-==--=-= private static class Combined implements UndoableAction { private final UndoableAction[] subActions_; private boolean deterministic_ = true; private boolean successful_ = false; public Combined(UndoableAction[] subActions) { this.subActions_ = subActions; } /** * @return false */ public boolean isActionDeterministic() { return deterministic_; } public double doAction(double currentScore, double desparationValue) { boolean d = true; boolean s = true; for(int i = 0 ; i < subActions_.length ; i++) { UndoableAction a = subActions_[i]; double score = a.doAction(currentScore, desparationValue); if(a.isActionSuccessful()) { s = true; currentScore = score; d = d & a.isActionDeterministic(); } } deterministic_ = d; successful_ = s; return currentScore; } public boolean isActionSuccessful() { return successful_; } public boolean undoAction() { boolean result = true; if(successful_) { for(int i = subActions_.length -1 ; i >= 0 ; i++) { UndoableAction a = subActions_[i]; if(a.isActionSuccessful()) { result = result & a.undoAction(); } } successful_ = false; return result; } else { throw new RuntimeException("Assertion error : undoAction() called when not successful"); } } } //End of class Combined } //End of class Utils }pal-1.5.1/src/pal/algorithmics/StateProvider.java0000644000000000000000000000046107742741016020467 0ustar rootrootpackage pal.algorithmics; /** *

Title:

*

Description:

*

Copyright: Copyright (c) 2003

*

Company:

* @author not attributable * @version 1.0 */ public interface StateProvider { public Object getStateReference(); public void restoreState(Object stateReference); }pal-1.5.1/src/pal/algorithmics/StoppingCriteria.java0000644000000000000000000003753607762652372021207 0ustar rootroot// StoppingCriteria.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.algorithmics; /** * Title: StoppingCriteria * Description: A means of deciding when to stop * @author Matthew Goode * @version 1.0 */ import pal.util.AlgorithmCallback; public interface StoppingCriteria extends java.io.Serializable { public boolean isTimeToStop(); /** * Get an indication of how close to stopping we currently are * @return a value between 0 and 1 where zero means not likely to stop soon, and a value of one means likely to stop very soon */ public double getRelativeStoppingRatio(); /** * @param externalStablized if true than other factors have stablized */ public void newIteration(double currentScore, double bestScore, boolean maximising, boolean externalStablized, AlgorithmCallback callback); public void reset(); //=========================================== //=========== Static Factory Class ============= //=========================================== public static interface Factory extends java.io.Serializable { public StoppingCriteria newInstance(); } //=========================================== //=========== Static Util Class ============= //=========================================== public static class Utils { /** * A stopping criteria that stops after a set number of iterations * @param maxIterationCount the maximum number of iterations. */ public static final StoppingCriteria.Factory getIterationCount(int maxIterationCount) { return new IterationCountSC.SCFactory(maxIterationCount); } /** * A stopping criteria that works by counting how many iterations occur at a given score (either the best score or the * current score) and stopping when score does not change after a set number of generations * @param maxIterationCountAtCurrentScore the number of iterations to wait at the current score before stopping * @param matchBestScore if true will examine the best score so far, else will examine the current score so far. */ public static final StoppingCriteria.Factory getUnchangedScore(int maxIterationCountAtCurrentScore, boolean matchBestScore) { return new UnchangedScoreSC.SCFactory(maxIterationCountAtCurrentScore, matchBestScore); } /** * A stopping criteria that works by counting how many iterations occur at a given score (either the best score or the * current score) and stopping when score does not change after a set number of generations * @param maxIterationCountAtCurrentScore the number of iterations to wait at the current score before stopping * @param matchBestScore if true will examine the best score so far, else will examine the current score so far. */ public static final StoppingCriteria.Factory getNonExactUnchangedScore(int maxIterationCountAtCurrentScore, boolean matchBestScore, double tolerance) { return new NonExactUnchangedScoreSC.SCFactory(maxIterationCountAtCurrentScore, matchBestScore, tolerance); } /** * A stopping criteria that is a composite of a set of criteria, stops when at least one * sub criteria wants to stop * @param subCriteria an array of StoppingCriteria to combine */ public static final StoppingCriteria.Factory getCombined(Factory[] subCriteria) { return new CombinedSC.SCFactory(subCriteria); } //Has Serialization code private static class IterationCountSC implements StoppingCriteria { int count_ = 0; int maxIterationCount_; // // Serialization code // private static final long serialVersionUID= -883722345529L; private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { out.writeByte(1); //Version number out.writeInt(count_); out.writeInt(maxIterationCount_); } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException{ byte version = in.readByte(); switch(version) { default : { count_ = in.readInt(); maxIterationCount_ = in.readInt(); break; } } } public IterationCountSC(int maxIterationCount) { this.maxIterationCount_ = maxIterationCount; } public void reset() { count_ = 0; } /** * Goes up as the count nears maximum * @return */ public double getRelativeStoppingRatio() { return count_/(double)maxIterationCount_; } public boolean isTimeToStop() { return count_>=maxIterationCount_; } /** * @param externalStablized if true than other factors have stablized */ public void newIteration(double currentScore, double bestScore, boolean maximising, boolean externalStablized, AlgorithmCallback callback) { count_++; callback.updateProgress(count_/(double)maxIterationCount_); } // ===== Factory ========== private static class SCFactory implements Factory { private int maxIterationCount_; // // Serialization code // private static final long serialVersionUID = -552478345529L; private void writeObject( java.io.ObjectOutputStream out ) throws java.io.IOException { out.writeByte( 1 ); //Version number out.writeInt(maxIterationCount_); } private void readObject( java.io.ObjectInputStream in ) throws java.io.IOException, ClassNotFoundException { byte version = in.readByte(); switch( version ) { default: { maxIterationCount_ = in.readInt(); break; } } } public SCFactory(int maxIterationCount) { this.maxIterationCount_ = maxIterationCount; } public StoppingCriteria newInstance() { return new IterationCountSC(maxIterationCount_); } } } //Has Serialization code private static class CombinedSC implements StoppingCriteria { private StoppingCriteria[] subCriteria_; // // Serialization code // private static final long serialVersionUID= -847823472529L; private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { out.writeByte(1); //Version number out.writeObject(subCriteria_); } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException{ byte version = in.readByte(); switch(version) { default : { subCriteria_ = (StoppingCriteria[])in.readObject(); break; } } } public CombinedSC(StoppingCriteria[] subCriteria) { this.subCriteria_ = subCriteria; } public void reset() { for(int i = 0 ; i < subCriteria_.length ; i++) { subCriteria_[i].reset(); } } public double getRelativeStoppingRatio() { double max = 0; for(int i = 0 ; i < subCriteria_.length ; i++) { max = Math.max(max,subCriteria_[i].getRelativeStoppingRatio()); } return max; } public boolean isTimeToStop() { for(int i = 0 ; i < subCriteria_.length ; i++) { if(subCriteria_[i].isTimeToStop()) { return true; } } return false; } /** * @param externalStablized if true than other factors have stablized */ public void newIteration(double currentScore, double bestScore, boolean maximising, boolean externalStablized, AlgorithmCallback callback) { for(int i = 0 ; i < subCriteria_.length ; i++) { subCriteria_[i].newIteration(currentScore,bestScore,maximising,externalStablized, callback); } } // ===== Factory ========== static class SCFactory implements Factory { Factory[] subCriteria_; // // Serialization code // private static final long serialVersionUID = -525566345529L; private void writeObject( java.io.ObjectOutputStream out ) throws java.io.IOException { out.writeByte( 1 ); //Version number out.writeObject(subCriteria_); } private void readObject( java.io.ObjectInputStream in ) throws java.io.IOException, ClassNotFoundException { byte version = in.readByte(); switch( version ) { default: { subCriteria_ = (Factory[])in.readObject(); break; } } } public SCFactory( Factory[] subCriteria ) { this.subCriteria_ = subCriteria; } public StoppingCriteria newInstance() { StoppingCriteria[] subs = new StoppingCriteria[subCriteria_.length]; for(int i = 0 ; i < subs.length ; i++) { subs[i] = subCriteria_[i].newInstance(); } return new CombinedSC(subs); } } } // -=-=-=-= //Has Serialization code private static class UnchangedScoreSC implements StoppingCriteria { private int count_ = 0; private int maxIterationCountAtCurrentScore_; private double lastScore_; private boolean matchBestScore_; // // Serialization code // private static final long serialVersionUID= -3242345529L; private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { out.writeByte(1); //Version number out.writeInt(count_); out.writeInt(maxIterationCountAtCurrentScore_); out.writeDouble(lastScore_); out.writeBoolean(matchBestScore_); } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException{ byte version = in.readByte(); switch(version) { default : { count_ = in.readInt(); maxIterationCountAtCurrentScore_ = in.readInt(); lastScore_ = in.readDouble(); matchBestScore_ = in.readBoolean(); break; } } } public UnchangedScoreSC(int maxIterationCountAtCurrentScore, boolean matchBestScore) { this.maxIterationCountAtCurrentScore_ = maxIterationCountAtCurrentScore; this.matchBestScore_ = matchBestScore; } public void reset() { count_ = 0; } /** * Goes up as the count nears maximum * @return */ public double getRelativeStoppingRatio() { return count_/(double)maxIterationCountAtCurrentScore_; } public boolean isTimeToStop() { return count_>=maxIterationCountAtCurrentScore_; } /** * @param externalStablized if true than other factors have stablized */ public void newIteration(double currentScore, double bestScore, boolean maximising, boolean externalStablized, AlgorithmCallback callback){ if(!externalStablized) { return; } if(count_==0) { lastScore_ = (matchBestScore_ ? bestScore : currentScore); } else { if(matchBestScore_) { if((!maximising&&(bestScorelastScore_))) { lastScore_ = bestScore; count_ = 0; } } else { if(lastScore_!=currentScore) { lastScore_ = currentScore; count_ = 0; callback.updateStatus("Restarting count..."); } } } count_++; } // ===== Factory ========== static class SCFactory implements Factory { private int maxIterationCountAtCurrentScore_; private boolean matchBestScore_; // // Serialization code // private static final long serialVersionUID = -1234567785529L; private void writeObject( java.io.ObjectOutputStream out ) throws java.io.IOException { out.writeByte( 1 ); //Version number out.writeInt(maxIterationCountAtCurrentScore_); out.writeBoolean(matchBestScore_); } private void readObject( java.io.ObjectInputStream in ) throws java.io.IOException, ClassNotFoundException { byte version = in.readByte(); switch( version ) { default: { maxIterationCountAtCurrentScore_ = in.readInt(); matchBestScore_ = in.readBoolean(); break; } } } public SCFactory( int maxIterationCountAtCurrentScore, boolean matchBestScore ) { this.maxIterationCountAtCurrentScore_ = maxIterationCountAtCurrentScore; this.matchBestScore_ = matchBestScore; } public StoppingCriteria newInstance() { return new UnchangedScoreSC(maxIterationCountAtCurrentScore_,matchBestScore_); } } } // -==-=--= private static class NonExactUnchangedScoreSC implements StoppingCriteria { private int count_ = 0; private int maxIterationCountAtCurrentScore_; private double lastScore_; private boolean matchBestScore_; private double tolerance_; // // Serialization Code // private static final long serialVersionUID = -56982234429L; private void writeObject( java.io.ObjectOutputStream out ) throws java.io.IOException { out.writeByte( 1 ); //Version number out.writeInt( count_ ); out.writeInt( maxIterationCountAtCurrentScore_ ); out.writeDouble( lastScore_ ); out.writeBoolean( matchBestScore_ ); out.writeDouble( tolerance_ ); } private void readObject( java.io.ObjectInputStream in ) throws java.io.IOException, ClassNotFoundException { byte version = in.readByte(); switch( version ) { default: { count_ = in.readInt(); maxIterationCountAtCurrentScore_ = in.readInt(); lastScore_ = in.readDouble(); matchBestScore_ = in.readBoolean(); tolerance_ = in.readDouble(); break; } } } public NonExactUnchangedScoreSC(int maxIterationCountAtCurrentScore, boolean matchBestScore, double tolerance) { this.maxIterationCountAtCurrentScore_ = maxIterationCountAtCurrentScore; this.tolerance_ = tolerance; this.matchBestScore_ = matchBestScore; } public void reset() { count_ = 0; } public boolean isTimeToStop() { return count_>=maxIterationCountAtCurrentScore_; } /** * Goes up as the count nears maximum * @return */ public double getRelativeStoppingRatio() { return count_/(double)maxIterationCountAtCurrentScore_; } /** * @param externalStablized if true than other factors have stablized */ public void newIteration(double currentScore, double bestScore, boolean maximising, boolean externalStablized, AlgorithmCallback callback){ if(!externalStablized) { return; } if(count_==0) { lastScore_ = (matchBestScore_ ? bestScore : currentScore); } else { if(matchBestScore_) { if((!maximising&&(bestScorelastScore_+tolerance_))) { lastScore_ = bestScore; count_ = 0; } } else { if(Math.abs(lastScore_-currentScore)>tolerance_) { lastScore_ = currentScore; count_ = 0; } } } count_++; } // ===== Factory ========== static class SCFactory implements Factory { private int maxIterationCountAtCurrentScore_; private boolean matchBestScore_; private double tolerance_; // // Serialization Code // private static final long serialVersionUID = -4523982234429L; private void writeObject( java.io.ObjectOutputStream out ) throws java.io.IOException { out.writeByte( 1 ); //Version number out.writeInt( maxIterationCountAtCurrentScore_ ); out.writeBoolean( matchBestScore_ ); out.writeDouble( tolerance_ ); } private void readObject( java.io.ObjectInputStream in ) throws java.io.IOException, ClassNotFoundException { byte version = in.readByte(); switch( version ) { default: { maxIterationCountAtCurrentScore_ = in.readInt(); matchBestScore_ = in.readBoolean(); tolerance_ = in.readDouble(); break; } } } public SCFactory(int maxIterationCountAtCurrentScore, boolean matchBestScore, double tolerance) { this.maxIterationCountAtCurrentScore_ = maxIterationCountAtCurrentScore; this.matchBestScore_ = matchBestScore; this.tolerance_ =tolerance; } public StoppingCriteria newInstance() { return new NonExactUnchangedScoreSC(maxIterationCountAtCurrentScore_,matchBestScore_, tolerance_); } } } } }pal-1.5.1/src/pal/algorithmics/ObjectState.java0000644000000000000000000000346307742735320020111 0ustar rootroot// ObjectState.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.algorithmics; /** *

Title: Object State

*

Description: A stateful, single thread object, that can act upon itself

* @author Matthew Goode * @version 1.0 */ public interface ObjectState { /** * Perform an action * @param currentScore The current score before doing the action * @param desparationValue An indication by the processing machines of willingness to do more extreme actions. A value of 0 means not desparate at all, a value of 1 means very desparate * @return the current score after doing the action (or the input score if not successful) */ public double doAction(double currentScore, double desparationValue); /** * Undo the previous action if possible * @return true if undo was successful, false otherwise */ /** * Undo the last action (if it was successful) * Users of an ObjectState should accept that sometimes undoing an action isn't possible. * If an undo was not possible the object state should be in the same state as it was previous to the call to undoAction() * @return true if undo was successful */ public boolean undoAction(); /** * * @return An object that can be used to reconstruct the current state of this object */ public Object getStateReference(); /** * Used to restore the state of the this object to that of a previous time point * @param stateReference An object returned by getStateReference() */ public void restoreState(Object stateReference); /** * If true, than a bigger score is better, otherwise a smaller score is better * @return True if the aim is to maximise */ public boolean isMaximiseScore(); }pal-1.5.1/src/pal/algorithmics/Markable.java0000644000000000000000000000070207742735450017415 0ustar rootroot// Markable.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.algorithmics; /** *

Title: Markable

*

Description: An interface for objects that can have their state marked

* @author Matthew Goode * @version 1.0 */ public interface Markable { public void mark(); public void undoToMark(); }pal-1.5.1/src/pal/algorithmics/ProbabilityIterator.java0000644000000000000000000000772507742735356021712 0ustar rootroot// ProbabilityIterator.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.algorithmics; /** * A function for obtaining probabilities (that may change over time) * @author Matthew Goode */ public interface ProbabilityIterator { public double getNextProbability(double currentBest, double testValue, boolean maximising); public boolean isStablised(); public static interface Factory { public ProbabilityIterator newInstance(); } public static class Utils { public static final Factory getConstant(double value) { return new Constant.PIFactory(value); } public static final Factory getHillClimb() { return new HillClimb.PIFactory(); } public static final Factory getBoltzman(double initialTemperature, double temperatureDecay, int chainLength) { return new Boltzman.PIFactory(initialTemperature,temperatureDecay,chainLength); } //============================================================= private static class Constant implements ProbabilityIterator { double value_; public Constant(double value ) { this.value_ = value; } public double getNextProbability(double currentBest, double testValue, boolean maximising) { return value_; } public boolean isStablised() { return true; } static class PIFactory implements Factory { Constant intstance_; public PIFactory(double value ) { this.intstance_ = new Constant(value); } public ProbabilityIterator newInstance() { return intstance_; } } } private static class HillClimb implements ProbabilityIterator { public double getNextProbability(double currentBest, double testValue, boolean maximising) { if(maximising) { return(testValue>=currentBest ? 1 : 0); } return(testValue<=currentBest ? 1 : 0); } public boolean isStablised() { return true; } static class PIFactory implements Factory { private static final HillClimb INSTANCE = new HillClimb(); public ProbabilityIterator newInstance() { return INSTANCE; } } } // === Boltzman ==== private static class Boltzman implements ProbabilityIterator{ double initialTemperature_; double temperatureDecay_; int chainLength_; double k_ = 1; double currentTemperature_; int chainPosition_; public Boltzman(double initialTemperature, double temperatureDecay, int chainLength) { this.initialTemperature_ = initialTemperature; this.temperatureDecay_ = temperatureDecay; this.chainLength_ = chainLength; this.currentTemperature_ = initialTemperature_; this.chainPosition_ = 0; } public boolean isStablised() { return currentTemperature_<0.005; } public double getNextProbability(double currentValue, double newValue, boolean maximising) { double toReturn; if(maximising) { if(newValue>currentValue) { toReturn = 1; } else { toReturn = Math.exp(-(currentValue-newValue)/(k_*currentTemperature_)); } } else { if(newValue0){ return store_[0].getObject(); } return null; } /** * Obtain the best score which may be the highest score (if maximising), or the lowest score (if minimising) * @return the best score */ public final double getBestScore() { return numberInStore_==0? 0 : store_[0].getScore(); } /** * Enquire to the merits of adding an object with a particular score * @param score The score in question * @return true if an object with such a score is going to make a difference to the current state of this ranker */ public final boolean isWorthAdding(final double score, boolean maximising) { return (numberInStore_!=store_.length) || (numberInStore_==0) || (maximising ? score > worstScore_: score < worstScore_ ); } /** * Obtain the objects in this ranker * @return the objects in the order of bestness (such that the first is the best) */ public final Object[] getObjects() { Object[] result = new Object[numberInStore_]; for(int i = 0 ; i < numberInStore_ ; i++) { result[i] = store_[i].getObject(); } return result; } /** * Add in (if it's good enough) a new object based on a score * If an object has equality with an object already in the store that object is replaced by the new version * @param object The object to add in * @param score The score of the object */ public void add(Object object, double score, boolean maximising) { int insertionPoint = numberInStore_; //Need to fix so that first sweep checks if object is already in store (by object equality), // and if so just replace and reshuffle according to score //Else do as done here and just insert... if(maximising) { for(int i = 0 ; i < numberInStore_ ; i++) { if(store_[i].getObject().equals(object)) { store_[i].update(object,score); return; } if(store_[i].hasLowerScore(score)) { insertionPoint = i; break; } } } else { for(int i = 0 ; i < numberInStore_ ; i++) { if(store_[i].getObject().equals(object)) { store_[i].update(object,score); return; } if(store_[i].hasHigherScore(score)) { insertionPoint = i; break; } } } insert(insertionPoint, new RankedObject(object,score)); } private void insert(int insertionPoint, RankedObject ro) { if(insertionPoint=store_.length-1))) { if(store_.length==numberInStore_) { System.arraycopy(store_,insertionPoint,store_,insertionPoint+1,numberInStore_-insertionPoint-1); } else { System.arraycopy(store_,insertionPoint,store_,insertionPoint+1,numberInStore_-insertionPoint); } } store_[insertionPoint] = ro; if(numberInStore_!=store_.length) { numberInStore_++; } worstScore_ = store_[numberInStore_-1].getScore(); } } public String toString() { StringBuffer sb = new StringBuffer(); sb.append('('); sb.append(numberInStore_); sb.append(") "); for(int i = 0 ; i < numberInStore_ ; i++) { sb.append(store_[i]); if(i!=numberInStore_-1) { sb.append(", "); } } return sb.toString(); } // -=-==--=-=-=-=-=-=-=-==--==--=-=-=-==-=-=-=- /** * A coupling of object and score */ private static final class RankedObject { private Object object_; private double score_; public RankedObject(Object object, double score) { update(object,score); } public final boolean hasLowerScore(double otherScore) { return score_otherScore; } public final void update(Object object, double score){ this.object_ = object; this.score_ = score; } public Object getObject() { return object_; } public double getScore() { return score_; } public String toString() { return "["+object_+", "+score_+"]"; } } }pal-1.5.1/src/pal/algorithmics/SearchEngine.java0000644000000000000000000000520307744511436020231 0ustar rootroot// SearchEngine.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.algorithmics; import pal.util.*; import java.util.*; /** * A simplistic class (most of the work is done elsewhere) that handles basic search algorithms * * @version $Id: SearchEngine.java,v 1.2 2003/10/19 02:35:26 matt Exp $ * * @author Matthew Goode */ public class SearchEngine { private final ProbabilityIterator.Factory probabilityIteratorFactory_; public SearchEngine( ProbabilityIterator.Factory probabilityIteratorFactory) { this.probabilityIteratorFactory_ = probabilityIteratorFactory; } public void run(AlgorithmCallback callback, final double initialScore, ObjectState subject, StoppingCriteria.Factory stoppingCriteria, Ranker ranker) { Object bestState = subject.getStateReference(); double score = initialScore; StoppingCriteria stopper = stoppingCriteria.newInstance(); ProbabilityIterator acceptanceProbability = probabilityIteratorFactory_.newInstance(); double bestScore = Double.POSITIVE_INFINITY; int evaluationCount = 0; final boolean maximising = subject.isMaximiseScore(); while(!stopper.isTimeToStop()) { double newScore = subject.doAction(score,stopper.getRelativeStoppingRatio()); // double newScore = assessor.evaluate(subject); evaluationCount++; double probability = acceptanceProbability.getNextProbability(score, newScore, maximising); if(ranker.isWorthAdding(newScore,maximising)) { ranker.add(subject.getStateReference(), newScore,maximising); System.out.println("Ranker best score:"+ranker.getBestScore()); if(ranker.getBestScore()!=bestScore) { bestScore = ranker.getBestScore(); System.out.println("Best score:"+bestScore); } } if( (!maximising&&(newScore<=score)) || (maximising&&(newScore>=score))|| probability==1.0|| Math.random()Title: UnconstrainedLikelihoodModel

*

Description: An UnconstrainedLikelihoodModel object must be treated as a stateful, single threaded object that can be used * for calculating components in an overall likelihood calculation.

*

UnconstrainedLikelihoodModel is a generalisation of the LHCalculator code, that no longer refers to SubstitutionModel directly (this is still need for other code though so LHCalculator remains)

*

History
*

    *
  • 25/10/2003 Added leaf handling interface
  • *
  • 30/3/2004 Changed certain methods to more intelligent ones (relating to posterior distribution of sites). Added abstract External class. *
*

* @author Matthew Goode * @version 1.0 * @note needs to have the use of the word likelihood altered in certain cases (to conditional probability) * */ import pal.misc.*; public interface UnconstrainedLikelihoodModel { /** * The External calculator does not maintain any state and is approapriate for * calculation where a store is provided */ public static interface External extends java.io.Serializable { /** * * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations must not overwrite or change * @param rightConditionalProbabilities Implementations must not overwrite or change * @param resultStore Where to stick the created categoryPatternState information * @note calls to getLastConditionalProbabilities() does not have to be valid after call this method */ public void calculateFlat( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ); /** * * @param distance the evolutionary distance * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations must not overwrite or change * @param rightConditionalProbabilities Implementations must not overwrite or change * @param resultStore Where to stick the created categoryPatternState information * @note calls to getLastConditionalProbabilities() does not have to be valid after call this method */ public void calculateExtended( double distance, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ); /** * Extend the conditionals back in time by some distance * @param distance The evolutionary distance to extend by * @param numberOfPatterns the number of patterns * @param conditionalProbabilities The probabilities to extend */ public void calculateSingleExtendedDirect( double distance, int numberOfPatterns, ConditionalProbabilityStore conditionalProbabilities ); /** * Extend the conditionals back in time by some distance * @param distance The evolutionary distance to extend by * @param numberOfPatterns the number of patterns * @param baseConditionalProbabilities The probabilities to extend * @param resultConditionalProbabilities The probabilities to extend */ public void calculateSingleExtendedIndirect( double distance, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities ); /** * Calculate the likelihood given two sub trees (left, right) and their flat (unextend) likeihood probabilities * @param distance The evolutionary distance * @param centerPattern the pattern information * @param leftFlatConditionalProbabilities The left conditional probabilities (unextended) * @param rightFlatConditionalProbabilities The right conditional probabilities (unextended) * @param tempStore may be used internally to calculate likelihood * @return the log likelihood */ public double calculateLogLikelihood( double distance, PatternInfo centerPattern, ConditionalProbabilityStore leftFlatConditionalProbabilities, ConditionalProbabilityStore rightFlatConditionalProbabilities, ConditionalProbabilityStore tempStore ); /** * Calculate the likelihood given two sub trees (left, right) and their extended likeihood probabilities * @param centerPattern the pattern information * @param leftConditionalProbabilities The left conditional probabilities * @param rightConditionalProbabilities The right conditional probabilities * @return the Log likelihood */ public double calculateLogLikelihood( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities ); /** * Calculate the likelihood given the conditional probabilites at the root * @param patternWeights the weights of each pattern * @param numberOfPatterns the number of patterns * @return the Log likelihood */ public double calculateLogLikelihoodSingle( int[] patternWeights, int numberOfPatterns, ConditionalProbabilityStore conditionalProbabilityStore); /** * Calculate the conditional probabilities of each pattern for each category * @param centerPattern the pattern information * @param leftConditionalProbabilitiesStore The left conditional probabilities * @param rightConditionalProbabilitiesStore The right conditional probabilities */ public SiteDetails calculateSiteDetailsRooted( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ); /** * Calculate the conditional probabilities of each pattern for each category * @param distance The distance between the two nodes * @param centerPattern the pattern information * @param leftConditionalProbabilitiesStore The left conditional probabilities * @param rightConditionalProbabilitiesStore The right conditional probabilities * @param tempStore after call will hold a matrix of values in the form [cat][pattern], where [cat][pattern] represents the site probability under a particular category/class, *not* multiplied by the category probability or pattern weights */ public SiteDetails calculateSiteDetailsUnrooted( double distance, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore tempStore ); } //End of class External // ================================================================================================= // ================= Internal ====================================================================== // ================================================================================================= /** * The Internal calculator may maintain state and is approapriate permanent attachment * to internal nodes of the tree structure */ public static interface Internal { /** * calculate flat probability information (not extended over a branch). * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilities Implementations should be allowed to overwrite in certain cases * @return true if results built from cached information * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculateFlat( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities ); /** * * @param distance The evolutionary distance * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilities Implementations should be allowed to overwrite in certain cases * @return resulting conditional probabilities * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculateExtended( double distance, PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilities, final ConditionalProbabilityStore rightConditionalProbabilities); } //End of Internal // ================================================================================================= // ================= Leaf ========================================================================== // ================================================================================================= /** * A LHCalculator.Leaf object is attached to each leaf node and can be used to calculated conditional probabilities across the related branch. * Allows for quick implementations as well as implementations that cope correctly with ambiguous characters * @note Should not be made serializable! */ public static interface Leaf { public ConditionalProbabilityStore getFlatConditionalProbabilities(); public ConditionalProbabilityStore getExtendedConditionalProbabilities( double distance); /** * Create a new Leaf calculator that has exactly the same properties as this one (but is different such that it may be used independently) * @return a copy of this leaf calculator */ public Leaf getCopy(); } public static interface Instance extends java.io.Serializable { /** * Create anew leaf calculator * @param patternStateMatchup The sequence as reduced to patterns. This should just be one state per pattern. * For example given a sequence [ 0, 1,0,1,3,0] a patternMatchup may be [0,1,3] (the first element is the first * pattern, which is state 0, the second element is the second pattern which is 1, and the third element is the * third pattern (novel pattern) which is state 3) * @param numberOfPatterns The number of patterns in the patternStateMatchup array * @return a leaf calculator object */ public Leaf createNewLeaf(int[] patternStateMatchup, int numberOfPatterns); public External createNewExternal(); public Internal createNewInternal(); /** * If true, then user can assume that areas of trees that haven't changed, and the model parameters haven't be altered, * can have their conditionals cached. * @return */ public boolean isAllowCaching(); public ConditionalProbabilityStore createAppropriateConditionalProbabilityStore( boolean isForLeaf ); public String getSubstitutionModelSummary(); public NeoParameterized getParameterAccess(); } }pal-1.5.1/src/pal/eval/SingleSplitMolecularClockLikelihoodModel.java0000644000000000000000000011276010141732236024175 0ustar rootroot// SimpleMolecularClockLikelihoodModel.java // // (c) 1999-2004 PAL Development Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** *

Title: SimpleMolecularClockLikelihoodModel

*

Description:

* @author Matthew Goode * @version 1.0 */ import java.io.*; import pal.datatype.*; import pal.math.*; import pal.misc.*; import pal.substmodel.*; public class SingleSplitMolecularClockLikelihoodModel implements MolecularClockLikelihoodModel { private static final boolean isUseLowerModelOnly(double changeHeight, double beforeSplitHeight, double lowerHeight) { return (changeHeight>=beforeSplitHeight); } private static final boolean isUseUpperSampleOnly(double changeHeight, double beforeSplitHeight, double afterSplitHeight) { return (changeHeight<=afterSplitHeight); } // -=-=--==-=-=-=---=-==-=--==-=-=-=- private static final class ExternalImpl implements External { private final LHCalculator.External base_; private final CombineModel model_; private final HeightConverter heightConverter_; public ExternalImpl(LHCalculator.External base, CombineModel model, HeightConverter heightConverter) { this.base_ = base; this.model_ = model; this.heightConverter_ = heightConverter; } public void calculateSingleExtendedConditionals(double topBaseHeight, double bottomBaseHeight, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities) { model_.setup(bottomBaseHeight,heightConverter_,false); base_.calculateSingleExtendedIndirect(model_.getAdjustedDistance(topBaseHeight), model_,numberOfPatterns,baseConditionalProbabilities,resultConditionalProbabilities); } public void calculateSingleDescendentExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore descendentConditionalProbabilities ) { model_.setup(bottomBaseHeight,heightConverter_,false); base_.calculateSingleExtendedDirect(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern.getNumberOfPatterns(),descendentConditionalProbabilities); } /** */ public void calculateSingleAscendentExtendedConditionalsDirect( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilityProbabilties ) { // System.out.println("**** ASCENDENT 3************"); model_.setup(bottomBaseHeight,heightConverter_,true); base_.calculateSingleExtendedDirect(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern.getNumberOfPatterns(),ascendentConditionalProbabilityProbabilties); } /** */ public void calculateSingleAscendentExtendedConditionalsIndirect( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore baseAscendentConditionalProbabilityProbabilties, ConditionalProbabilityStore resultConditionalProbabilityProbabilties ) { // System.out.println("**** ASCENDENT 2************"); model_.setup(bottomBaseHeight,heightConverter_,true); base_.calculateSingleExtendedIndirect(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern.getNumberOfPatterns(),baseAscendentConditionalProbabilityProbabilties,resultConditionalProbabilityProbabilties); } public void calculateExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ) { model_.setup(bottomBaseHeight,heightConverter_,false); base_.calculateExtended(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern,leftConditionalProbabilities,rightConditionalProbabilities,resultStore); } /** * Calculate the likelihood given a non root node * @param nodeHeight the height of node doing the likelihood calculation * @param centerPatter assumed left is ascendent component, right is descendent * @param ascendentConditionalProbabilities Assumed to be extended (downwards) to the nodeHeight * @param descendentConditionalProbabilities Assumed to be extended (upwards) to the nodeHeight * @return the Log likelihood */ public double calculateLogLikelihoodNonRoot( double nodeHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilitiesStore, ConditionalProbabilityStore descendentConditionalProbabilitiesStore ) { model_.setup(nodeHeight,heightConverter_,false); return base_.calculateLogLikelihood(model_,centerPattern,ascendentConditionalProbabilitiesStore,descendentConditionalProbabilitiesStore); } /** * Calculate the likelihood given two sub trees (left, right) and their extended likeihood probabilities * @param rootHeight the height of the likelihood calculation * @param leftConditionalProbabilities Assumed to be extended to the rootHeight * @param rightConditionalProbabilities Assumed to be extended to the rootHeight * @return the Log likelihood */ public double calculateLogLikelihood( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ) { model_.setup(rootHeight,heightConverter_,false); return base_.calculateLogLikelihood(model_,centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore); } public double calculateLogLikelihoodSingle( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore conditionalProbabilitiesStore ) { model_.setup(rootHeight,heightConverter_,false); return base_.calculateLogLikelihoodSingle(model_,centerPattern.getPatternWeights(),centerPattern.getNumberOfPatterns(),conditionalProbabilitiesStore); } public void calculateFlatConditionals( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore resultConditionalProbabilitiesStore) { model_.setup(rootHeight,heightConverter_,false); base_.calculateFlat(centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore,resultConditionalProbabilitiesStore); } public SiteDetails calculateSiteDetails( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ) { model_.setup(rootHeight,heightConverter_,false); return base_.calculateSiteDetailsRooted(model_,centerPattern, leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore); } public void calculateFlatConditionals( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore resultConditionalProbabilitiesStore, double[] sampleHeights) { model_.setup(rootHeight,heightConverter_,false); base_.calculateFlat(centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore,resultConditionalProbabilitiesStore); } } // -=-=--==-=-=-=---=-==-=--==-=-=-=- private static final class LeafImpl implements Leaf { private final LHCalculator.Leaf base_; private final CombineModel model_; private final int numberOfPatterns_; private final HeightConverter heightConverter_; public LeafImpl(LHCalculator.Leaf base, CombineModel model, int numberOfPatterns, HeightConverter heightConverter) { this.base_ = base; this.model_ = model; this.numberOfPatterns_ = numberOfPatterns; this.heightConverter_ = heightConverter; } public ConditionalProbabilityStore calculateExtendedConditionals(double topBaseHeight, double bottomBaseHeight) { model_.setup(bottomBaseHeight,heightConverter_,false); return base_.getExtendedConditionalProbabilities(model_.getAdjustedDistance(topBaseHeight),model_,true); } public ConditionalProbabilityStore calculateFlatConditionals(double relatedHeight) { return base_.getFlatConditionalProbabilities(); } } private static final class InternalImpl implements Internal { private final LHCalculator.Internal base_; private final CombineModel model_; private final HeightConverter heightConverter_; public InternalImpl(LHCalculator.Generator generator, CombineModel model, HeightConverter heightConverter) { this.base_ = generator.createNewInternal(); this.model_ = model; this.heightConverter_ = heightConverter; } public ConditionalProbabilityStore calculatePostExtendedFlatConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilityProbabilties, ConditionalProbabilityStore rightConditionalProbabilityProbabilties ) { model_.setup(bottomBaseHeight,heightConverter_,false); return base_.calculatePostExtendedFlat(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern,leftConditionalProbabilityProbabilties,rightConditionalProbabilityProbabilties,true); } public ConditionalProbabilityStore calculateExtendedConditionals( final double topBaseHeight, final double bottomBaseHeight, final PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilityProbabilties, final ConditionalProbabilityStore rightConditionalProbabilityProbabilties ) { model_.setup(bottomBaseHeight,heightConverter_,false); return base_.calculateExtended(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern,leftConditionalProbabilityProbabilties,rightConditionalProbabilityProbabilties,true); } public ConditionalProbabilityStore calculateAscendentExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascenedentConditionalProbabilityProbabilties, ConditionalProbabilityStore otherConditionalProbabilityProbabilties ) { // System.out.println("**** ASCENDENT 1************"); model_.setup(bottomBaseHeight,heightConverter_,true); return base_.calculateExtended(model_.getAdjustedDistance(topBaseHeight), model_,centerPattern,ascenedentConditionalProbabilityProbabilties,otherConditionalProbabilityProbabilties,true); } public ConditionalProbabilityStore calculateAscendentFlatConditionals( PatternInfo centerPattern, ConditionalProbabilityStore ascenedentConditionalProbabilityProbabilties, ConditionalProbabilityStore otherConditionalProbabilityProbabilties ) { // System.out.println("**** ASCENDENT FLAT ************"); return base_.calculateFlat(centerPattern,ascenedentConditionalProbabilityProbabilties,otherConditionalProbabilityProbabilties); } public ConditionalProbabilityStore calculateFlatConditionals( final PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilityProbabilties, final ConditionalProbabilityStore rightConditionalProbabilityProbabilties ) { return base_.calculateFlat(centerPattern,leftConditionalProbabilityProbabilties,rightConditionalProbabilityProbabilties); } } // -=-=--==-=-=-=---=-==-=--==-=-=-=- public static final Instance createInstance(RateMatrixGroup beforeSplitMatrices, RateMatrixGroup afterSplitMatrics, NeoParameterized acrossSplitParameters, SingleSplitDistribution probabilityModel, LHCalculator.Factory baseFactory, double splitTime) { int numberOfBaseCategories = beforeSplitMatrices.getNumberOfTransitionCategories(); DataType dt = beforeSplitMatrices.getDataType(); return new SimpleInstance(beforeSplitMatrices, afterSplitMatrics, acrossSplitParameters, probabilityModel, splitTime, baseFactory.createSeries( numberOfBaseCategories*numberOfBaseCategories, dt )); } public static final Instance createInstance(RateMatrixGroup beforeSplitMatrices, RateMatrixGroup afterSplitMatrics, NeoParameterized acrossSplitParameters, SingleSplitDistribution probabilityModel, double splitTime) { return createInstance(beforeSplitMatrices,afterSplitMatrics,acrossSplitParameters, probabilityModel,SimpleLHCalculator.getFactory(),splitTime); } public static final Instance createInstance(RateMatrixGroup beforeSplitMatrices, RateMatrixGroup afterSplitMatrics, NeoParameterized acrossSplitParameters, double splitTime) { return createInstance(beforeSplitMatrices,afterSplitMatrics, acrossSplitParameters,new SaturatedSingleSplitDistribution(beforeSplitMatrices.getNumberOfTransitionCategories()),SimpleLHCalculator.getFactory(),splitTime); } public static final Instance createInstance(RateMatrixGroup beforeSplitMatrices, RateMatrixGroup afterSplitMatrics, NeoParameterized acrossSplitParameters, double[] classProbabilities, double splitTime) { SingleSplitDistribution pm; int numberOfClasses = beforeSplitMatrices.getNumberOfTransitionCategories(); if(classProbabilities.length == numberOfClasses) { double[] result = new double[numberOfClasses*numberOfClasses]; for(int i = 0 ; i < numberOfClasses ; i++) { result[i*numberOfClasses+i] = classProbabilities[i]; } pm = new SaturatedSingleSplitDistribution(beforeSplitMatrices.getNumberOfTransitionCategories(),result ); } else { pm = new SaturatedSingleSplitDistribution( beforeSplitMatrices.getNumberOfTransitionCategories(),classProbabilities ); } return createInstance(beforeSplitMatrices,afterSplitMatrics,acrossSplitParameters,pm,SimpleLHCalculator.getFactory(),splitTime); } private static final class SimpleInstance implements Instance, java.io.Serializable { private final LHCalculator.Generator baseGenerator_; private final TotalModel totalModel_; public SimpleInstance(RateMatrixGroup beforeSplitModel, RateMatrixGroup afterSplitModel, NeoParameterized acrossSplitParameters, SingleSplitDistribution probabilityModel, double splitTime, LHCalculator.Generator baseGenerator) { this.totalModel_ = new TotalModel(beforeSplitModel,afterSplitModel,acrossSplitParameters, probabilityModel, splitTime,baseGenerator.createNewExternal()); this.baseGenerator_ = baseGenerator; } public NeoParameterized getSubstitutionModelParameterAccess() {return totalModel_; } public boolean hasSubstitutionModelParameters(){ return true; } public Leaf createNewLeaf(HeightConverter heightConverter, PatternInfo pattern, int[] patternStateMatchup) { return new LeafImpl( baseGenerator_.createNewLeaf( patternStateMatchup,pattern.getNumberOfPatterns() ), totalModel_.getCombineModel(), pattern.getNumberOfPatterns(), heightConverter ); } public External createNewExternal(HeightConverter heightConverter) { return new ExternalImpl( baseGenerator_.createNewExternal(), totalModel_.getCombineModel(), heightConverter); } public Internal createNewInternal(HeightConverter heightConverter) { return new InternalImpl( baseGenerator_, totalModel_.getCombineModel(), heightConverter ); } public ConditionalProbabilityStore createAppropriateConditionalProbabilityStore( boolean isForLeaf ) { return baseGenerator_.createAppropriateConditionalProbabilityStore(isForLeaf); } public String getSubstitutionModelSummary() { return "Model:"+totalModel_.getSummary(); } public NeoParameterized getParameterAccess() { return totalModel_; } } // -=-=--==-=-=-=---=-==-=--==-=-=-=- // ================================================================================================= // ================================ TotalModel ===================================================== // ================================================================================================= private final static class TotalModel implements java.io.Serializable, NeoParameterized { //Serialized variables private final SingleSplitDistribution probabilityModel_; private final CombineModel combineModel_; private final int numberOfProbabilityParameters_; private final int numberOfParameters_; public TotalModel(RateMatrixGroup beforeSplitMatrices, RateMatrixGroup afterSplitMatrices, NeoParameterized acrossSplitParameters, SingleSplitDistribution probabilityModel, double splitHeight, LHCalculator.External externalCalculator) { this.probabilityModel_ = probabilityModel; this.numberOfProbabilityParameters_ = probabilityModel.getNumberOfParameters(); this.combineModel_ = new CombineModel(this, beforeSplitMatrices, afterSplitMatrices, acrossSplitParameters, probabilityModel.getNumberOfBaseTransitionCategories(), splitHeight, externalCalculator); this.numberOfParameters_ = probabilityModel.getNumberOfParameters()+combineModel_.getNumberOfParameters(); } public final int getNumberOfTransitionCategories() { return combineModel_.getNumberOfTransitionCategories(); } public final double[][] getBaseCategoryProbabilities() { return probabilityModel_.getDistributionInfo(); } public final CombineModel getCombineModel() { return combineModel_; } public String getSummary() { return "Distribution:"+probabilityModel_+"\nModel:"+combineModel_.getSummary(); } public int getNumberOfParameters() { return numberOfParameters_; } public void setParameters(double[] parameters, int startIndex) { probabilityModel_.setParameters(parameters,startIndex); combineModel_.setParameters(parameters,startIndex+numberOfProbabilityParameters_); } public void getParameters(double[] parameterStore, int startIndex) { probabilityModel_.getParameters(parameterStore,startIndex); combineModel_.getParameters(parameterStore,startIndex+numberOfProbabilityParameters_); } public double getLowerLimit(int n) { if(n=beforeSplitBaseHeight || splitHeight_<=afterSplitBaseHeight_) { return false;} else { return true; } } private final void getSplitTransitionProbabilitiesDescendentImpl( boolean isTranspose, double[][][] tableStore) { if(isTranspose) { for( int first = 0; first=beforeSplitBaseHeight ) { //Use afterSplit Model getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(afterSplitBaseHeight_,beforeSplitBaseHeight),afterSplitTransitionStore_, afterSplitMatrices_, isTranspose); for(int beforeSplit = 0 ; beforeSplit < numberOfBaseTransitionCategories_ ; beforeSplit++) { for(int afterSplit = 0 ; afterSplit < numberOfBaseTransitionCategories_ ; afterSplit++) { copy(afterSplitTransitionStore_[afterSplit], tableStore[resultIndex(beforeSplit,afterSplit)]); } } } else if(splitHeight_<=afterSplitBaseHeight_) { //Use beforeSplit model getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(afterSplitBaseHeight_,beforeSplitBaseHeight),beforeSplitTransitionStore_,beforeSplitMatrices_,isTranspose); for(int beforeSplit = 0 ; beforeSplit < numberOfBaseTransitionCategories_ ; beforeSplit++) { for(int afterSplit = 0 ; afterSplit < numberOfBaseTransitionCategories_ ; afterSplit++) { copy(beforeSplitTransitionStore_[beforeSplit], tableStore[resultIndex(beforeSplit,afterSplit)]); } } } else { //split // System.out.println("Split:"+isAscendent_); getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(afterSplitBaseHeight_, splitHeight_),afterSplitTransitionStore_, afterSplitMatrices_, isTranspose); getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(splitHeight_, beforeSplitBaseHeight),beforeSplitTransitionStore_,beforeSplitMatrices_,isTranspose); if(isAscendent_) { getSplitTransitionProbabilitiesAscendentImpl(isTranspose,tableStore); } else { getSplitTransitionProbabilitiesDescendentImpl(isTranspose,tableStore); } } } private final void getTransitionProbabilitiesImpl(double branchLength, int category, double[][] tableStore, boolean isTranspose) { double beforeSplitBaseHeight = afterSplitBaseHeight_+branchLength; int beforeSplit = category/numberOfBaseTransitionCategories_; int afterSplit = category%numberOfBaseTransitionCategories_; if(splitHeight_>=beforeSplitBaseHeight ) { //Use afterSplit Model getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(afterSplitBaseHeight_,beforeSplitBaseHeight),afterSplit, tableStore, afterSplitMatrices_, isTranspose); } else if(splitHeight_<=afterSplitBaseHeight_) { //Use beforeSplit model getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(afterSplitBaseHeight_,beforeSplitBaseHeight),beforeSplit, tableStore, beforeSplitMatrices_, isTranspose); } else { //split getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(afterSplitBaseHeight_,splitHeight_),afterSplit, afterSplitTransitionStore_[0], afterSplitMatrices_, isTranspose); getTransitionProbabilities(currentHeightConverter_.getExpectedSubstitutionDistance(splitHeight_,beforeSplitBaseHeight),beforeSplit, beforeSplitTransitionStore_[0],beforeSplitMatrices_,isTranspose); if(isAscendent_) { if( isTranspose ) { combineTranspose( afterSplitTransitionStore_[0], beforeSplitTransitionStore_[0], tableStore ); } else { combine( afterSplitTransitionStore_[0], beforeSplitTransitionStore_[0], tableStore ); } } else { if( isTranspose ) { combineTranspose( beforeSplitTransitionStore_[0], afterSplitTransitionStore_[0], tableStore ); } else { combine( beforeSplitTransitionStore_[0], afterSplitTransitionStore_[0], tableStore ); } } } } // -=-=-=-=-= public void getTransitionProbabilities(double branchLength, double[][][] tableStore) { checkRebuild(); getTransitionProbabilitiesImpl(branchLength,tableStore, false); } public void getTransitionProbabilities(double branchLength, int category, double[][] tableStore) { checkRebuild(); getTransitionProbabilitiesImpl(branchLength,category,tableStore,false); } public void getTransitionProbabilitiesTranspose(double branchLength, double[][][] tableStore) { checkRebuild(); getTransitionProbabilitiesImpl(branchLength,tableStore, true); } public void getTransitionProbabilitiesTranspose(double branchLength, int category, double[][] tableStore) { checkRebuild(); getTransitionProbabilitiesImpl(branchLength,category,tableStore,true); } public double[] getEquilibriumFrequencies() { return equilibriumFrequencies_; } public void addPalObjectListener(PalObjectListener l) { throw new RuntimeException("Not implemented yet!"); } public void removePalObjectListener(PalObjectListener l) { throw new RuntimeException("Not implemented yet!"); } public OrthogonalHints getOrthogonalHints() { return null; } public void report(PrintWriter out) { } public String getSummary() { checkRebuild(); final double[] afterSplitProbs = new double[numberOfBaseTransitionCategories_]; final double[] beforeSplitProbs = new double[numberOfBaseTransitionCategories_]; for(int i = 0 ; i < numberOfTransitionCategories_ ; i++) { int beforeSplit = i/numberOfBaseTransitionCategories_; int afterSplit = i%numberOfBaseTransitionCategories_; afterSplitProbs[afterSplit]+=overallCategoryProbabilities_[i]; beforeSplitProbs[beforeSplit]+=overallCategoryProbabilities_[i]; } return "Split height:"+splitHeight_+"\n"+ "Category probs:"+pal.misc.Utils.toString(overallCategoryProbabilities_)+"\n"+ "Before Split probs:"+pal.misc.Utils.toString(beforeSplitCategoryProbabilities_)+"\n"+ "Before Split probs (check):"+pal.misc.Utils.toString(beforeSplitProbs)+"\n"+ "After Split probs:"+pal.misc.Utils.toString(afterSplitCategoryProbabilities_)+"\n"+ "After Split probs (check):"+pal.misc.Utils.toString(afterSplitProbs)+"\n"+ "Parameters:"+parameters_.toString(); } public final Object clone() { throw new RuntimeException("Not implemented yet!"); } } //End of class CombineModel private static final void transpose(final double[][] matrix, final int numberOfStates) { for(int from = 0 ; from < numberOfStates ; from++) { for( int to = from; to numChilds) { num = 0; } if (num == numChilds) { return center; } else { return center.getChild(num); } } private int getKey(Node node) { int key; if (node.isLeaf()) { key = node.getNumber(); } else { key = node.getNumber() + tree.getExternalNodeCount(); } return key; } /** returns number of branches centered around an internal node */ private int getBranchCount(Node center) { if (center.isRoot()) { return center.getChildCount(); } else { return center.getChildCount()+1; } } private void traverseTree() { if ((!currentBranch.isLeaf() && down) || currentBranch.isRoot()) { currentBranch = currentBranch.getChild(0); down = true; } else { Node center = currentBranch.getParent(); currentBranch = getNextBranchOrRoot(currentBranch, center); if (currentBranch == center) { down = false; } else { down = true; } } } /** init partial likelihoods */ private void initPartials() { currentBranch = tree.getRoot(); down = true; Node firstBranch = currentBranch; do { if (currentBranch.isRoot()) { //do nothing } else if (currentBranch.isLeaf()) { partialsExternal(currentBranch); } else if (!down) { productPartials(currentBranch, currentBranch); partialsInternal(currentBranch, currentBranch); } traverseTree(); } while (currentBranch != firstBranch); } /** calculate likelihood of any tree and infer MAP estimates of rates at a site */ private void treeLikelihood() { initPartials(); Node center = tree.getRoot(); Node firstBranch = center.getChild(0); Node lastBranch = center.getChild(center.getChildCount()-1); double[][][] partial1 = getPartial(firstBranch); double[][][] partial2 = getPartial(lastBranch); productPartials(lastBranch, center); logL = 0; for (int l = 0; l < numPatterns; l++) { int bestR = 0; double maxSum = 0; double rsum = 0.0; for (int r = 0; r < numRates; r++) { double[] p1 = partial1[l][r]; double[] p2 = partial2[l][r]; double sum = 0.0; for (int d = 0; d < numStates; d++) { sum += frequency[d]*p1[d]*p2[d]; } sum *= categoryProbabilities_[r]; // find rate category that contributes the most if (r == 0) { bestR = 0; maxSum = sum; } else { if (sum > maxSum) { bestR = r; maxSum = sum; } } rsum += sum; } siteLogL[l] = Math.log(rsum); rateAtSite[l] = bestR; logL += siteLogL[l]*sitePattern.weight[l]; } } /** optimise branch lengths and find SEs (UnconstrainedTree) */ private void optimiseUnconstrainedTree(boolean optimise) { int numBranches = tree.getInternalNodeCount() + tree.getExternalNodeCount()-1; initPartials(); Node firstBranch = currentBranch; double len, lenOld, lenDiff; int nconv = 0; int numRounds = 0; double lenSE; double INVARC = 1.0/(BranchLimits.MAXARC*BranchLimits.MAXARC); do { if (currentBranch.isRoot()) { // do nothing } else if (currentBranch.isLeaf()) { productPartials(currentBranch, currentBranch.getParent()); bl.setBranch(currentBranch); lenOld = currentBranch.getBranchLength(); //optimise if (optimise) { len = um.findMinimum(lenOld, bl, BranchLimits.FRACDIGITS); currentBranch.setBranchLength(len); } else { // find corresponding SE len = lenOld; lenSE = NumericalDerivative.secondDerivative(bl, lenOld); if (INVARC < lenSE) lenSE = Math.sqrt(1.0/lenSE); else lenSE = BranchLimits.MAXARC; currentBranch.setBranchLengthSE(lenSE); } // check progress lenDiff = Math.abs(len-lenOld); if (lenDiff < BranchLimits.ABSTOL) nconv++; else nconv = 0; if (nconv >= numBranches || numRounds == MAXROUNDS) { bl.evaluate(len); break; } // update partials partialsExternal(currentBranch); } else if (down) { productPartials(currentBranch, currentBranch.getParent()); partialsInternal(currentBranch, currentBranch.getParent()); } else // !down { productPartials(currentBranch, currentBranch); bl.setBranch(currentBranch); lenOld = currentBranch.getBranchLength(); //optimise if (optimise) { len = um.findMinimum(lenOld, bl, BranchLimits.FRACDIGITS); currentBranch.setBranchLength(len); } else { // find corresponding SE len = lenOld; lenSE = NumericalDerivative.secondDerivative(bl, lenOld); if (INVARC < lenSE) lenSE = Math.sqrt(1.0/lenSE); else lenSE = BranchLimits.MAXARC; currentBranch.setBranchLengthSE(lenSE); } // check progress lenDiff = Math.abs(len-lenOld); if (lenDiff < BranchLimits.ABSTOL) nconv++; else nconv = 0; if (nconv >= numBranches || numRounds == MAXROUNDS) { bl.evaluate(len); break; } // update branch length and partials partialsInternal(currentBranch, currentBranch); } traverseTree(); if (currentBranch == firstBranch) numRounds++; } while (true); } private Vector shortBranches = null; /** collapse internal branches that are close to zero */ private int collapseShortInternalBranches() { // minus 1 because root node has no own branch int numInternalBranches = tree.getInternalNodeCount()-1; int numShortBranches = 0; for (int i = 0; i < numInternalBranches; i++) { Node b = tree.getInternalNode(i); if (b.getBranchLength() <= 2*BranchLimits.MINARC) { numShortBranches++; NodeUtils.removeBranch(b); if (shortBranches == null) shortBranches = new Vector(); shortBranches.addElement(b); } } //numParams = numParams - numShortBranches; tree.createNodeList(); return numShortBranches; } /** restore internal branches */ private int restoreShortInternalBranches() { int size = 0; if (shortBranches != null) { size = shortBranches.size(); for (int i = size-1; i >= 0; i--) { Node node = (Node) shortBranches.elementAt(i); NodeUtils.restoreBranch(node); node.setBranchLength(BranchLimits.MINARC); node.setNodeHeight(node.getParent().getNodeHeight()-BranchLimits.MINARC); shortBranches.removeElementAt(i); } } //numParams = numParams+size; tree.createNodeList(); return size; } /** optimise branch lengths (ClockTree) */ private void optimiseClockTree(boolean datedTips) { throw new RuntimeException("not implemented anymore"); // int numNodes = tree.getInternalNodeCount(); // // double MAXHEIGHT = numNodes*BranchLimits.MAXARC; // // initPartials(); // // Node firstBranch = currentBranch; // double h, hOld, hDiff, hMin, hMax, hSE; // int nconv = 0; // // int numRounds = 0; // // double INVMAX = 1.0/(MAXHEIGHT*MAXHEIGHT); // do // { // if (currentBranch.isRoot()) // { // if (datedTips && numRounds > 0) // { // // in the first round we did not adjust the rate // // so we assume that the likelihood has not converged // if (numRounds == 1) nconv = 0; // //nconv = 0; // // double oldLogL = logL; // // // optimise rate // DatedTipsClockTree dtree = (DatedTipsClockTree) ptree; // double rOld = dtree.getRate(); // double maxR = dtree.getMaxRate(); // double r = um.findMinimum(rOld, rl); // rl.evaluate(r); // // // find corresponding SE // double rSE = um.f2minx; // if (1 < rSE) // rSE = Math.sqrt(1.0/rSE); // else // rSE = 1; // dtree.setRateSE(rSE); // // // check progress // /*double logLDiff = Math.abs(logL-oldLogL); // if (logLDiff > 0.001) // { // // reset // nconv = 0; // }*/ // } // // // min-max heights // hMin = NodeUtils.findLargestChild(currentBranch)+BranchLimits.MINARC; // hMax = MAXHEIGHT-BranchLimits.MINARC; // // //optimise // nl.setBranch(currentBranch, hMin, hMax); // hOld = currentBranch.getNodeHeight(); // h = um.findMinimum(hOld, nl, BranchLimits.FRACDIGITS); // nl.evaluate(h); // // // find corresponding SE // hSE = um.f2minx; // if (INVMAX < hSE) // hSE = Math.sqrt(1.0/hSE); // else // hSE = MAXHEIGHT; // //currentBranch.setNodeHeightSE(hSE); // if (currentBranch instanceof AttributeNode) { // ((AttributeNode)currentBranch).setAttribute(AttributeNode.NODE_HEIGHT_SE, new Double(hSE)); // } // // // check progress // hDiff = Math.abs(h-hOld); // if (hDiff < BranchLimits.ABSTOL) nconv++; // else nconv = 0; // // if (nconv >= numNodes || numRounds == MAXROUNDS) // { // break; // } // } // else if (currentBranch.isLeaf()) // { // productPartials(currentBranch, currentBranch.getParent()); // partialsExternal(currentBranch); // } // else if (down) // { // productPartials(currentBranch, currentBranch.getParent()); // // // min-max heights // hMin = NodeUtils.findLargestChild(currentBranch)+BranchLimits.MINARC; // hMax = currentBranch.getParent().getNodeHeight()-BranchLimits.MINARC; // // //optimise // nl.setBranch(currentBranch, hMin, hMax); // hOld = currentBranch.getNodeHeight(); // h = um.findMinimum(hOld, nl, BranchLimits.FRACDIGITS); // nl.evaluate(h); // // // find corresponding SE // hSE = um.f2minx; // if (INVMAX < hSE) // hSE = Math.sqrt(1.0/hSE); // else // hSE = MAXHEIGHT; // //currentBranch.setNodeHeightSE(hSE); // if (currentBranch instanceof AttributeNode) { // ((AttributeNode)currentBranch).setAttribute(AttributeNode.NODE_HEIGHT_SE, new Double(hSE)); // } // // // check progress // hDiff = Math.abs(h-hOld); // if (hDiff < BranchLimits.ABSTOL) nconv++; // else nconv = 0; // // if (nconv >= numNodes || numRounds == MAXROUNDS) // { // break; // } // // partialsInternal(currentBranch, currentBranch.getParent()); // } // else // !down // { // productPartials(currentBranch, currentBranch); // partialsInternal(currentBranch, currentBranch); // } // // traverseTree(); // // if (currentBranch == firstBranch) numRounds++; // } // while (true); } } class RateLikelihood implements UnivariateFunction { public RateLikelihood(LikelihoodValue lv) { this.lv = lv; update(); } public void update() { dtree = (MutationRateModelTree) lv.ptree; } public double evaluate(double param) { // set rate parameters dtree.setParameter(param,0); return -lv.compute(); } public double getLowerBound() { return 0; } public double getUpperBound() { throw new RuntimeException("BROKEN!"); // return dtree.getMaxRate(); } // private stuff private LikelihoodValue lv; private MutationRateModelTree dtree; } class TreeLikelihood implements MultivariateFunction { public TreeLikelihood(LikelihoodValue lv) { this.lv = lv; } public double evaluate(double[] params) { // set tree parameters for (int i = 0; i < lv.numParams; i++) { lv.ptree.setParameter(params[i], i); } return -lv.compute(); } public int getNumArguments() { return lv.numParams; } public double getLowerBound(int n) { return lv.ptree.getLowerLimit(n); } public double getUpperBound(int n) { return lv.ptree.getUpperLimit(n); } // private stuff private LikelihoodValue lv; /** * @note Not implemented * @return null */ public OrthogonalHints getOrthogonalHints() { return null; } } class ModelLikelihood implements MultivariateFunction { public ModelLikelihood(LikelihoodValue lv) { this.lv = lv; this.model_ = lv.getModel(); } public double evaluate(double[] params) { // set tree parameters for (int i = 0; i < lv.numParams; i++) { model_.setParameter(params[i], i); } return -lv.compute(); } public int getNumArguments() { return model_.getNumParameters(); } public double getLowerBound(int n) { return model_.getLowerLimit(n); } public double getUpperBound(int n) { return model_.getUpperLimit(n); } /** * @note Not implemented * @return null */ public OrthogonalHints getOrthogonalHints() { return null; } // private stuff private LikelihoodValue lv; private SubstitutionModel model_; //Cached results } /** Basically for cobmining model and tree likelihood optimising functions */ class CombinedLikelihood implements MultivariateFunction { public CombinedLikelihood(MultivariateFunction f1, MultivariateFunction f2, LikelihoodValue lv) { this.f1_ = f1; this.f2_ = f2; this.f1Params_ = new double[f1.getNumArguments()]; this.f2Params_ = new double[f2.getNumArguments()]; } public double evaluate(double[] params) { for(int i = 0 ; i < f1Params_.length ; i++) { f1Params_[i] = params[i]; } for(int i = 0 ; i < f2Params_.length ; i++) { f2Params_[i] = params[i-f1Params_.length]; } return -lv.compute(); } public int getNumArguments() { return f1Params_.length+f2Params_.length; } public double getLowerBound(int n) { if(nTitle: SimpleUnconstrainedLikelihoodModel

*

Description: A wrapper around LHCalculator stuff

* @author Matthew Goode * @version 1.0 */ import pal.misc.*; import pal.substmodel.*; public class SimpleUnconstrainedLikelihoodModel implements UnconstrainedLikelihoodModel { private static final class ExternalImpl implements External { private final LHCalculator.External base_; private final SubstitutionModel model_; public ExternalImpl(LHCalculator.External base, SubstitutionModel model) { this.base_ = base; this.model_ = model; } public void calculateFlat( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ) { base_.calculateFlat(centerPattern, leftConditionalProbabilities, rightConditionalProbabilities, resultStore); } public void calculateExtended( double distance, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ) { base_.calculateExtended(distance,model_,centerPattern,leftConditionalProbabilities,rightConditionalProbabilities,resultStore); } public void calculateSingleExtendedDirect( double distance, int numberOfPatterns, ConditionalProbabilityStore conditionalProbabilities ) { base_.calculateSingleExtendedDirect(distance,model_,numberOfPatterns,conditionalProbabilities); } public void calculateSingleExtendedIndirect( double distance, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities ) { base_.calculateSingleExtendedIndirect(distance,model_,numberOfPatterns,baseConditionalProbabilities,resultConditionalProbabilities); } public double calculateLogLikelihood( double distance, PatternInfo centerPattern, ConditionalProbabilityStore leftFlatConditionalProbabilities, ConditionalProbabilityStore rightFlatConditionalProbabilities, ConditionalProbabilityStore tempStore ) { return base_.calculateLogLikelihood(distance,model_,centerPattern,leftFlatConditionalProbabilities,rightFlatConditionalProbabilities,tempStore); } public double calculateLogLikelihood( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities ) { return base_.calculateLogLikelihood(model_,centerPattern,leftConditionalProbabilities,rightConditionalProbabilities); } public double calculateLogLikelihoodSingle( int[] patternWeights, int numberOfPatterns, ConditionalProbabilityStore conditionalProbabilityStore) { return base_.calculateLogLikelihoodSingle(model_,patternWeights,numberOfPatterns,conditionalProbabilityStore); } public SiteDetails calculateSiteDetailsRooted( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ) { return base_.calculateSiteDetailsRooted(model_,centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore); } public SiteDetails calculateSiteDetailsUnrooted( double distance, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore tempStore ) { return base_.calculateSiteDetailsUnrooted(distance,model_,centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore,tempStore); } } //End of class ExternalImpl // ================================================================================================= // ================= Internal ====================================================================== // ================================================================================================= public static final class InternalImpl implements Internal { private final LHCalculator.Internal base_; private final SubstitutionModel model_; public InternalImpl(LHCalculator.Internal base, SubstitutionModel model) { this.base_ = base; this.model_ = model; } public ConditionalProbabilityStore calculateFlat( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities ) { return base_.calculateFlat(centerPattern,leftConditionalProbabilities,rightConditionalProbabilities); } public ConditionalProbabilityStore calculateExtended( double distance, PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilities, final ConditionalProbabilityStore rightConditionalProbabilities) { return base_.calculateExtended(distance,model_,centerPattern,leftConditionalProbabilities,rightConditionalProbabilities,true); } } //End of class InternalImpl // ================================================================================================= // ================= Leaf ========================================================================== // ================================================================================================= public static final class LeafImpl implements Leaf { private final LHCalculator.Leaf base_; private final SubstitutionModel model_; public LeafImpl(LHCalculator.Leaf base, SubstitutionModel model) { this.base_ = base; this.model_ = model; } public ConditionalProbabilityStore getFlatConditionalProbabilities() { return base_.getFlatConditionalProbabilities(); } public ConditionalProbabilityStore getExtendedConditionalProbabilities( double distance) { return base_.getExtendedConditionalProbabilities(distance,model_,true); } public Leaf getCopy() { return new LeafImpl(base_.getCopy(), model_); } } private static final class InstanceImpl implements Instance { private final LHCalculator.Generator base_; private final SubstitutionModel model_; private final NeoParameterized parameterAccess_; public InstanceImpl(LHCalculator.Generator base, SubstitutionModel model) { this.base_ = base; this.model_ = model; this.parameterAccess_ = new ParameterizedNeoWrapper(model); } public Leaf createNewLeaf(int[] patternStateMatchup, int numberOfPatterns) { return new LeafImpl(base_.createNewLeaf(patternStateMatchup,numberOfPatterns),model_); } public External createNewExternal() { return new ExternalImpl(base_.createNewExternal(),model_); } public Internal createNewInternal() { return new InternalImpl(base_.createNewInternal(),model_); } public boolean isAllowCaching() { return base_.isAllowCaching(); } public ConditionalProbabilityStore createAppropriateConditionalProbabilityStore( boolean isForLeaf ) { return base_.createAppropriateConditionalProbabilityStore(isForLeaf); } public String getSubstitutionModelSummary() { return model_.toString(); } public NeoParameterized getParameterAccess() { return parameterAccess_; } } /** * Create a SimpleUnconstrainedLikelihoodModel instance * * @param base The base LHCalculator generator to utilise * @param model The substitution model * @return An appropriate UnconstrianedLikelihoodModel instance */ public static final Instance createInstance(LHCalculator.Generator base, SubstitutionModel model) { return new InstanceImpl(base,model); } /** * Create a SimpleUnconstrainedLikelihoodModel instance * * @param base The base LHCalculator generator to utilise * @param model The substitution model * @return An appropriate UnconstrianedLikelihoodModel instance */ public static final Instance createInstance(LHCalculator.Factory base, SubstitutionModel model) { return new InstanceImpl(base.createSeries(model.getNumberOfTransitionCategories(),model.getDataType()),model); } }pal-1.5.1/src/pal/eval/DemographicLikelihoodValue.java0000644000000000000000000000240110141732034021340 0ustar rootroot// DemographicLikelihoodValue.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) // - partial likelihoods need a lot of memory storage // memory usage could be opimized by working in a single site package pal.eval; import pal.alignment.*; import pal.coalescent.*; /** * Estimates the likelihood for a tree using a specified * model of sequence evolution and a sequence alignment and * a specific demographic model as a prior on coalescent intervals. * * Must be used in conjunction with DemographicClockTree! * * @version $Id: DemographicLikelihoodValue.java,v 1.2 2001/07/13 14:39:13 korbinian Exp $ * * @author Alexei Drummond */ public class DemographicLikelihoodValue extends LikelihoodValue { // // Public stuff // /** * Parameter taking a site pattern. */ public DemographicLikelihoodValue(SitePattern sp) { super(sp); } /** * compute log-likelihood * for current branch lengths and model * * return negative log-likelihood */ public double compute() { super.compute(); logL += ((DemographicTree)tree).computeDemoLogLikelihood(); return -logL; } } pal-1.5.1/src/pal/eval/SimpleLikelihoodCalculator.java0000644000000000000000000002136310141732164021400 0ustar rootroot// SimpleLikelihoodCalculator.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** * Only to be used by one thread. Based on the LikelihoodValue class but does not allow the use of * a rate distribution. * @author Korbinian Strimmer * @author Matthew Goode * @deprecated see new likelihood framework */ import pal.alignment.*; import pal.datatype.*; import pal.substmodel.*; import pal.tree.*; public class SimpleLikelihoodCalculator implements LikelihoodCalculator { SitePattern sitePattern_; Tree tree_; RateMatrix model_; DataType patternDatatype_; boolean modelChanged_ = false; /* Work variables */ private double[][][] partials_; /** [numberOfNodes] [numberOfPatterns] [numberOfStates] */ /* Cached variables */ private int numberOfStates_; private int numberOfPatterns_; //private int numberOfNodes_; private double[] frequency_; /** log-likelihood for each site pattern */ private double[] siteLogL_; /** Need to use setTree(), and setModel() before using compute() if you use this constructor */ public SimpleLikelihoodCalculator(SitePattern pattern) { setPattern(pattern); } private void setPattern(SitePattern pattern) { this.sitePattern_ = pattern; this.patternDatatype_ = sitePattern_.getDataType(); numberOfPatterns_ = sitePattern_.numPatterns; siteLogL_ = new double[numberOfPatterns_]; } public SimpleLikelihoodCalculator(SitePattern pattern, Tree tree, RateMatrix model) { setPattern(pattern); setTree(tree); setRateMatrix(model); } /** * Doesn't do anything... */ public void release() { } /** * compute log-likelihood for current tree (fixed branch lengths and model) * * return log-likelihood */ public double calculateLogLikelihood() { return treeLikelihood(); } public SitePattern getSitePattern() { return sitePattern_; } public Tree getTree() { return tree_; } /** * define model * (a site pattern must have been set before calling this method) * * @param m model of substitution (rate matrix + rate distribution) */ public void setRateMatrix(RateMatrix m) { if(m==null) { throw new RuntimeException("Assertion error : SetModel called with null model!"); } model_ = m; frequency_ = model_.getEquilibriumFrequencies(); numberOfStates_ = model_.getDataType().getNumStates(); int maxNodes = 2*sitePattern_.getSequenceCount()-2; allocatePartialMemory(maxNodes); } /** * define tree *,(must only be called only after a site pattern has been defined). * * @param t tree */ public void setTree(Tree t) { tree_ = t; if(t==null) { throw new RuntimeException("Assertion error : SetTree called with null tree!"); } // Assign sequences to leaves int[] alias = TreeUtils.mapExternalIdentifiers(sitePattern_, tree_); for (int i = 0; i < tree_.getExternalNodeCount(); i++) { tree_.getExternalNode(i).setSequence(sitePattern_.pattern[alias[i]]); } } public final void modelUpdated() { setRateMatrix(model_); } public final void treeUpdated() { setTree(tree_); } //=============================================================================== //======================= Non Public Stuff ====================================== //=============================================================================== private void allocatePartialMemory(int numberOfNodes) { // I love the profiler! // This 'if' statement sped my MCMC algorithm up by nearly 300% // Never underestimate the time it takes to allocate and de-allocate memory! // AD if ( (partials_ == null) || (numberOfNodes != partials_.length) || (numberOfPatterns_ != partials_[0].length) || (numberOfStates_ != partials_[0][0].length)) { partials_ = new double[numberOfNodes][numberOfPatterns_][numberOfStates_]; } } private int getKey(Node node) { if (node.isLeaf()) { return node.getNumber(); } return node.getNumber() + tree_.getExternalNodeCount(); } /** get partial likelihood of a branch */ protected double[][] getPartial(Node branch) { return partials_[getKey(branch)]; } /** get next branch around a center node (center may be root, and root may also be returned) */ private Node getNextBranchOrRoot(Node branch, Node center) { int numChilds = center.getChildCount(); int num; for (num = 0; num < numChilds; num++) { if (center.getChild(num) == branch) { break; } } // num is now child number (if num = numChilds then branch == center) // next node num++; if (num > numChilds) { num = 0; } if (num == numChilds) { return center; } else { return center.getChild(num); } } /** get next branch around a center node (center may be root, but root is never returned) */ protected Node getNextBranch(Node branch, Node center) { Node b = getNextBranchOrRoot(branch, center); if (b.isRoot()) { b = b.getChild(0); } return b; } /** multiply partials into the neighbour of branch */ protected void productPartials( Node center) { int numBranches = NodeUtils.getUnrootedBranchCount(center); Node nextBranch = center.getChild(0); double[][] partial = getPartial(nextBranch); for (int i = 1; i < center.getChildCount(); i++) { nextBranch = center.getChild(i); double[][] partial2 = getPartial(nextBranch); for (int patternIndex = 0; patternIndex < numberOfPatterns_; patternIndex++) { double[] p = partial[patternIndex]; double[] p2 = partial2[patternIndex]; for (int state = 0; state < numberOfStates_; state++) { p[state] *= p2[state]; } } } } /** compute partials for branch around center node (it is assumed that multiplied partials are available in the neighbor branch) */ protected void partialsInternal( Node center) { double[][] partial = getPartial(center); double[][] multPartial = getPartial(center.getChild(0)); model_.setDistance(center.getBranchLength()); for (int l = 0; l < numberOfPatterns_; l++) { double[] p = partial[l]; double[] mp = multPartial[l]; for (int d = 0; d < numberOfStates_; d++) { double sum = 0; for (int j = 0; j < numberOfStates_; j++) { sum += model_.getTransitionProbability(d, j)*mp[j]; } p[d] = sum; } } } /** compute partials for external branch */ protected void partialsExternal(Node branch) { double[][] partial = getPartial(branch); byte[] seq = branch.getSequence(); model_.setDistance(branch.getBranchLength()); for (int patternIndex = 0; patternIndex < numberOfPatterns_; patternIndex++) { double[] p = partial[patternIndex]; int endState = seq[patternIndex]; if(patternDatatype_.isUnknownState(endState)) { //A neater way of writing things but it may slow things down... //if (endState == numberOfStates_) { //Is this an gap? (A gap should be registered as unknown!) for (int startState = 0; startState < numberOfStates_; startState++) { p[startState] = 1; } } else { for (int startState = 0; startState < numberOfStates_; startState++) { p[startState] = model_.getTransitionProbability( startState, endState); } } } } private void traverseTree(Node currentNode){ if(currentNode.isLeaf()){ partialsExternal(currentNode); } else { for(int i = 0 ; i < currentNode.getChildCount() ; i++) { traverseTree(currentNode.getChild(i)); } if(!currentNode.isRoot()) { productPartials(currentNode); partialsInternal(currentNode); } } } /** returns number of branches centered around an internal node */ private int getBranchCount(Node center) { if (center.isRoot()) { return center.getChildCount(); } else { return center.getChildCount()+1; } } /** calculate likelihood of any tree and infer MAP estimates of rates at a site */ private double treeLikelihood() { //initPartials(); Node center = tree_.getRoot(); traverseTree(center); Node firstBranch = center.getChild(0); Node lastBranch = center.getChild(center.getChildCount()-1); double[][] partial1 = getPartial(firstBranch); productPartials(center); double logL = 0; for (int patternIndex = 0; patternIndex < numberOfPatterns_; patternIndex++) { double sum = 0; double[] p1 = partial1[patternIndex]; for (int d = 0; d < numberOfStates_; d++) { sum += frequency_[d]*p1[d]; } siteLogL_[patternIndex] = Math.log(sum); logL += siteLogL_[patternIndex]*sitePattern_.weight[patternIndex]; } return logL; } } pal-1.5.1/src/pal/eval/FastLikelihoodCalculator.java0000644000000000000000000003404407536661264021064 0ustar rootroot// FastLikelihoodCalculator.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; import pal.tree.*; import pal.alignment.*; import pal.substmodel.*; import pal.datatype.*; import pal.misc.*; import java.util.*; /** * Title: Fast Likelihood Calculator
* Description: A fast likelihood calculator
* Original code by Matthew Goode. This calculates the likelihood of similar trees * (or a single changing tree) on the same site pattern fast by remembering * partial likelihoods of invariant subtrees. * * This class should be avoided if the underlying tree doesn't change (use GeneralLikelihoodCalculator). * A new framework will be added one day to accomodate optimising tree topologies. * * @author Matthew Goode * @author Alexei Drummond * @version $Id: FastLikelihoodCalculator.java,v 1.11 2002/09/08 03:46:12 matt Exp $ */ public class FastLikelihoodCalculator implements PalObjectListener, LikelihoodCalculator { /** the root node of the cached tree. */ RootNode root_; /** true if the rate matrix has changed. */ boolean modelChanged_ = false; /** the number of sites in the site pattern. */ int numberOfSites_; int numberOfStates_; /** the rate matrix currently being used. */ RateMatrix model_; /** the site pattern for which likelihood is calculated. */ SitePattern sitePattern_; /** * the difference threshold within which * different branch lengths are deemed the same. */ private static double THRESHOLD = 1e-12; /** * Constructor taking only site pattern.
* NOTE: setTree and setRateMatrix must both be called (in that order) before * computeLikelihood. */ public FastLikelihoodCalculator(SitePattern pattern) { sitePattern_ = pattern; numberOfSites_ = pattern.getNumberOfPatterns(); numberOfStates_ = pattern.getDataType().getNumStates(); } /** * Constructor taking site pattern, tree and model. */ public FastLikelihoodCalculator(SitePattern pattern, Tree tree, RateMatrix model) { this(pattern); setTree(tree); setRateMatrix(model); } public void parametersChanged(PalObjectEvent pe) { modelChanged_ = true; } public void structureChanged(PalObjectEvent pe) { modelChanged_ = true; } public final void setRateMatrix(RateMatrix rateMatrix) { if ((model_ == null) || (model_ != rateMatrix)) { this.model_ = rateMatrix; model_.addPalObjectListener(this); root_.setModel(model_); modelChanged_ = true; } // same rate matrix, do nothing } public void release() { try{ model_.removePalObjectListener(this); model_ = null; } catch(NullPointerException e) {} } public final void setTree(Tree t) { if (root_ == null) { root_ = new RootNode(t.getRoot()); } else { NNode newNode = root_.switchNodes(t.getRoot()); if(newNode!=root_) { throw new RuntimeException("Assertion error : new tree generates new Root NNode (tree probably contains only one branch)"); } // must call this to generate transition prob arrays for new nodes! root_.setModel(model_); } // could be more efficient root_.setupSequences(sitePattern_); } public final void updateSitePattern(SitePattern pattern) { sitePattern_ = pattern; root_.setupSequences(pattern); if(pattern.numPatterns!=numberOfSites_) { numberOfSites_ = pattern.numPatterns; root_.setModel(model_); modelChanged_ = true; } } /** * @return the likelihood of this tree under the given model and data. */ public double calculateLogLikelihood() { double lkl = root_.computeLikelihood(); return lkl; } final NNode create(Node peer) { if(peer.getChildCount()==0) { return new LeafNode(peer); } return new InternalNode(peer); } //===================================================================== // // Abstract NNODE // //===================================================================== abstract class NNode { private double[][] transitionProbs_; double lastLength_ = Double.NEGATIVE_INFINITY; Node peer_; private byte[] sequence_; private double[][] siteStateProbabilities_;/** Site/State */ public NNode(Node peer) { this.peer_ = peer; } public void setModel(RateMatrix rm) { // only create these arrays if they are null // or the wrong size if ((transitionProbs_ == null) || (numberOfStates_ != transitionProbs_.length)) { transitionProbs_ = new double[numberOfStates_][numberOfStates_]; siteStateProbabilities_ = new double[numberOfSites_][numberOfStates_]; } } protected void setPeer(Node newPeer) { this.peer_ = newPeer; } public final boolean isBranchLengthChanged() { return Math.abs(peer_.getBranchLength()-lastLength_) > THRESHOLD; //return peer_.getBranchLength()!=lastLength_; } protected final double[][] getSiteStateProbabilities() { return siteStateProbabilities_; } public final void setSequence(byte[] sequence) { this.sequence_ = pal.misc.Utils.getCopy(sequence); for(int i = 0 ; i < sequence_.length ; i++) { if(sequence[i]>=numberOfStates_) { sequence_[i] = -1; } } } public final boolean hasSequence() { return this.sequence_!=null; } public final byte[] getSequence() { return this.sequence_; } protected double[][] getTransitionProbabilities() { if(modelChanged_||isBranchLengthChanged()) { double distance = peer_.getBranchLength(); model_.setDistance(distance); model_.getTransitionProbabilities(transitionProbs_); lastLength_ = distance; } return transitionProbs_; } protected double[][] getTransitionProbabilitiesReverse() { if(modelChanged_||isBranchLengthChanged()) { double distance = peer_.getBranchLength(); model_.setDistance(distance); model_.getTransitionProbabilities(transitionProbs_); lastLength_ = distance; } return transitionProbs_; } private String toString(byte[] bs) { char[] cs = new char[bs.length]; for(int i = 0 ; i < cs.length ; i++) { cs[i] = (char)('A'+bs[i]); } return new String(cs); } public void setupSequences(SitePattern sp) { Identifier id = peer_.getIdentifier(); if(id!=null) { int number = sp.whichIdNumber(id.getName()); if(number>=0) { if (sequence_ == null) { sequence_ = new byte[sp.pattern[number].length]; } //System.arraycopy(sp.pattern[number],0,sequence_,0,sequence_.length); byte[] pattern = sp.pattern[number]; for(int i = 0 ; i < sequence_.length ; i++) { if(pattern[i]>=numberOfStates_) { sequence_[i] = -1; } else { sequence_[i] = pattern[i]; } } } } } /** Can return null if no change from previous call */ abstract public double[][] calculateSiteStateProbabilities(); abstract public LeafNode[] getLeafNodes(); /** For dynamically switching tree */ abstract public NNode switchNodes(Node n); } //========================================================================================== // // LEAF NODE // //============================== class LeafNode extends NNode { public LeafNode(Node peer) { super(peer); } public double computeLikelihood() { return 0; } public boolean isLeaf() { return true; } protected final void setPeer(Node newPeer) { // if this is a different tip then force recalculate! if (!peer_.getIdentifier().getName().equals(newPeer.getIdentifier().getName())) { lastLength_ = Double.NEGATIVE_INFINITY; } this.peer_ = newPeer; } public NNode switchNodes(Node n) { if(n.getChildCount()==0) { setPeer(n); return this; } return create(n); } /** * Return all the leaf nodes in the tree defined by this node as the root (including this node) */ public LeafNode[] getLeafNodes() { return new LeafNode[] {this}; } public double[][] calculateSiteStateProbabilities() { if(!modelChanged_&&!isBranchLengthChanged()) { return null; } byte[] sequence = getSequence(); double[][] probs = getTransitionProbabilitiesReverse(); double[][] siteStateProbs = getSiteStateProbabilities(); for(int site = 0 ; site < sequence.length ; site++) { int endState = sequence[site]; if(endState<0) { for(int startState = 0 ; startState < numberOfStates_ ; startState++) { siteStateProbs[site][startState] = 1; } } else { //System.arraycopy(probs[eState],0,siteStateProbs[site],0,numberOfStates_); for(int startState = 0 ; startState < numberOfStates_ ; startState++) { siteStateProbs[site][startState] = probs[startState][endState]; } } } return siteStateProbs; } } //===================================================================== // // Internal NODE // //===================================================================== class InternalNode extends NNode{ private NNode[] children_; private double[][][] childSiteStateProbs_; double[] endStateProbs_; public InternalNode(Node peer) { super(peer); this.children_ = new NNode[peer.getChildCount()]; for(int i = 0 ; i < children_.length ; i++) { children_[i] = create(peer.getChild(i)); } childSiteStateProbs_ = new double[children_.length][][]; } public void setModel(RateMatrix rm) { super.setModel(rm); if ((endStateProbs_ == null) || (numberOfStates_ != endStateProbs_.length)) { endStateProbs_ = new double[numberOfStates_]; } for(int i = 0 ; i < children_.length ; i++) { children_[i].setModel(rm); } } public void setupSequences(SitePattern sp) { super.setupSequences(sp); for(int i= 0 ; i < children_.length ; i++) { children_[i].setupSequences(sp); } } public boolean isLeaf() { return false; } private final boolean populateChildSiteStateProbs() { double[][] ss; boolean allNull = true; for(int i = 0; i < children_.length ; i++) { ss = children_[i].calculateSiteStateProbabilities(); if(ss!=null) { childSiteStateProbs_[i] = ss; allNull = false; } else { if(childSiteStateProbs_[i]==null) { throw new RuntimeException("Assertion error : Not as should be!"); } } } return allNull; } protected final int getNumberOfChildren() { return children_.length; } public NNode switchNodes(Node n) { int nc = n.getChildCount(); if (nc == 0) { //We become a leaf! return create(n); } if(nc != children_.length) { NNode[] newChildren = new NNode[nc]; for(int i = 0 ; i < nc ; i++) { if(iTitle: Likelihood Summary
* Description: A container for advanced information derived from a likelihood analysis.
* (To fill the gaps between the abilities of GeneralLikelihoodCalculator and LikelihoodValue) * @author Matthew Goode * @version $Id: LikelihoodSummary.java,v 1.2 2003/10/13 04:15:59 matt Exp $ */ import pal.datatype.*; import pal.misc.Utils; public class LikelihoodSummary implements java.io.Serializable { private double overallLogLikelihood_; private double[] categoryProbabilities_; private double[][] individualLikelihoods_; private int[] sitePatternMatchup_; private DataType dataType_; // // Serialization code // private static final long serialVersionUID=-37625234234158192L; //serialver -classpath ./classes pal.eval.LikelihoodSummary private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { out.writeByte(1); //Version number out.writeDouble(overallLogLikelihood_); out.writeObject(categoryProbabilities_); out.writeObject(individualLikelihoods_); out.writeObject(sitePatternMatchup_); out.writeObject(dataType_); } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException{ byte version = in.readByte(); switch(version) { default : { overallLogLikelihood_ = in.readDouble(); categoryProbabilities_ = (double[])in.readObject(); individualLikelihoods_ = (double[][])in.readObject(); sitePatternMatchup_ = (int[])in.readObject(); dataType_ = (DataType)in.readObject(); } } } /** * @param dt The data type used (for reference) * @param overallLogLikelihood (the overall log likelihood found) * @param categoryProbabilities (the probabilities of each category ([1] if not separate categories) * @param individualLikelihoods The individual likelihoods of each pattern/category (organised [site][category]) * @param sitePatternMatchup for each site indicates which is the related pattern (it is assumed categoryProbabilities given with regard to patterns, if not sitePatternMatchup should contain {0,1,2, ... numberOfSites-1) */ public LikelihoodSummary(DataType dt, double overallLogLikelihood, double[] categoryProbabilities, double[][] individualLikelihoods, int[] sitePatternMatchup) { this.dataType_ = dt; this.overallLogLikelihood_ = overallLogLikelihood; this.categoryProbabilities_ = Utils.getCopy(categoryProbabilities); this.individualLikelihoods_ = Utils.getCopy(individualLikelihoods); this.sitePatternMatchup_ = Utils.getCopy(sitePatternMatchup); } public final double getOverallLogLikelihood() { return overallLogLikelihood_; } public final int[][] generateCategoryRankings() { int[][] rankings = new int[sitePatternMatchup_.length][]; for(int i = 0 ; i < rankings.length ; i++) { rankings[i] = generateCategoryRanking(i); } return rankings; } public final double[] generateSiteLikelihoods(int site) { return Utils.getCopy(individualLikelihoods_[sitePatternMatchup_[site]]); } public final double[] generateSitePosteriors(int site) { double[] rs = generateSiteLikelihoods(site); double total = 0; for(int i = 0 ; i < rs.length ;i++) { total+=rs[i]; } for(int i = 0 ; i < rs.length ;i++) { rs[i]/=total; } return rs; } public final int[] generateCategoryRanking(int site) { double[] likelihoods = individualLikelihoods_[sitePatternMatchup_[site]]; int[] ranking = new int[likelihoods.length]; boolean[] used = new boolean[likelihoods.length]; for(int i = 0 ; i < used.length ; i++) { used[i] = false; } //Yes, I know this is redundant, but it makes me feel better to do it. for(int i = 0 ; i < ranking.length ; i++) { int max = -1; double maxValue = -1; for(int j = 0 ; j < used.length ; j++) { if(!used[j]) { if(max<0||likelihoods[j]>maxValue) { max = j; maxValue = likelihoods[j]; } } } used[max] = true; ranking[i] = max; } return ranking; } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("Likelihood Summary\n\n"); sb.append("Data Type:"+dataType_+"\n"); sb.append("Overall Log Likelihood:"+overallLogLikelihood_+"\n"); sb.append("Number of sites:"+sitePatternMatchup_.length+"\n\n"); for(int i = 0 ; i < sitePatternMatchup_.length ; i++) { double[] sitePosteriors = generateSitePosteriors(i); int[] ranking = generateCategoryRanking(i); sb.append("Site:"+i); sb.append(' '); sb.append(Utils.toString(ranking)); sb.append("\n"); sb.append(" posteriors:"+Utils.toString(sitePosteriors)); sb.append("\n"); } return sb.toString(); } }pal-1.5.1/src/pal/eval/GeneralLikelihoodCalculator.java0000644000000000000000000007572010001772420021524 0ustar rootroot// GeneralLikelihoodCalculator.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of Lesser GNU General Public License (LGPL) package pal.eval; import pal.tree.*; import pal.alignment.*; import pal.substmodel.*; import pal.datatype.*; import pal.misc.*; import java.util.*; /** * Title: General Likelihood Calculator
* Description: A General likelihood calculator
* This calculates the likelihood of an topologically invariant tree * (on an unchanging alignment) quickly by remembering partial likelihoods of invariant subtrees, and * by essentially generating site patterns for each sub tree. Works with Rate Distributions and other more complex SubstitutionModels. * Will optimise (as in computational time) itself when using Nucleotide based data. * * @author Matthew Goode * @version $Id: GeneralLikelihoodCalculator.java,v 1.10 2004/01/12 09:30:27 matt Exp $ */ public class GeneralLikelihoodCalculator implements PalObjectListener, LikelihoodCalculator { /** the root node of the cached tree. */ NNode root_; /** true if the rate matrix has changed. */ boolean modelChanged_ = false; /** the number of sites in the site pattern. */ int numberOfSites_; int numberOfStates_; int numberOfTransitionCategories_; int[] patternWeightWorkingStore_; /** the Substitution Model currently being used. */ SubstitutionModel model_; double[] equilibriumFrequencies_; /** the base alignment for which likelihood is calculated. */ Alignment baseAlignment_; /** * the difference threshold within which * different branch lengths are deemed the same. */ private static double THRESHOLD = 1e-12; double[] gapPriors_; /** * Constructor taking site pattern, tree and a rate matrix. * @note giving a SitePattern is not going to make anything faster */ public GeneralLikelihoodCalculator(Alignment baseAlignment, Tree tree, RateMatrix model) { this(baseAlignment,tree, SubstitutionModel.Utils.createSubstitutionModel(model)); } /** * Constructor taking site pattern, tree rate matrix, and a rate distribution * @note giving a SitePattern is not going to make anything faster */ public GeneralLikelihoodCalculator(Alignment baseAlignment, Tree tree, RateMatrix model, RateDistribution distribution) { this(baseAlignment,tree, SubstitutionModel.Utils.createSubstitutionModel(model,distribution)); } /** * Constructor taking site pattern, tree and a general substitution model. * @note giving a SitePattern is not going to make anything faster */ public GeneralLikelihoodCalculator(Alignment baseAlignment, Tree tree, SubstitutionModel model) { this.baseAlignment_ = baseAlignment; this.numberOfTransitionCategories_ = model.getNumberOfTransitionCategories(); numberOfSites_ = baseAlignment.getSiteCount(); this.patternWeightWorkingStore_ = new int[numberOfSites_]; numberOfStates_ = baseAlignment.getDataType().getNumStates(); buildGapPriors(); setup(tree, model); } public void parametersChanged(PalObjectEvent pe) { modelChanged_ = true; } public void structureChanged(PalObjectEvent pe) { modelChanged_ = true; } private void buildGapPriors() { if(gapPriors_==null||gapPriors_.length!=numberOfStates_) { gapPriors_ = new double[numberOfStates_]; for(int i = 0 ; i < numberOfStates_ ; i++) { gapPriors_[i] = 1; } } } public final void setup(Tree t, SubstitutionModel model) { if ((model_ == null) || (model_ != model)) { if(model_!=null) { model_.removePalObjectListener(this); } this.model_ = model; model_.addPalObjectListener(this); modelChanged_ = true; this.equilibriumFrequencies_ = pal.misc.Utils.getCopy(model_.getEquilibriumFrequencies()); } // else the same rate matrix, do nothing if (root_ == null) { root_ = create(t.getRoot()); } // could be more efficient root_.setupSequences(patternWeightWorkingStore_, AlignmentUtils.getAlignedStates(baseAlignment_),baseAlignment_); //root_.printPatternInfo(); } public void release() { try{ model_.removePalObjectListener(this); model_ = null; } catch(NullPointerException e) {} } /** * @return the likelihood of this tree under the given model and data. */ public double calculateLogLikelihood() { double lkl = root_.computeLikelihood(); return lkl; } /** * @return the LikelihoodSummary of this tree under the given model and data. */ public LikelihoodSummary calculateLogLikelihoodSummary() { return root_.computeLikelihoodSummary(); } final NNode create(Node peer) { switch(peer.getChildCount()) { case 0 : { return new LeafNode(peer); } case 2 : { if(numberOfStates_==4) { return new BificatingFourStateInternalNode(peer); } return new BificatingInternalNode(peer); } default : { if(numberOfStates_==4) { return new FourStateInternalNode(peer); } return new InternalNode(peer); } } } /** * Static implementation of calculateFinalSummary (for use by InternalNode, and BificatingInternal node) */ private static final LikelihoodSummary calculateFinalSummaryImpl(DataType dt, double[] equilibriumProbabilities, int numberOfPatterns, double[] categoryProbabilities, int[] patternWeights, double[][][][] childPatternProbs, int[] patterns, int[] sitePatternMatchup) { final int numberOfTransitionCategories = categoryProbabilities.length; double[][] individualLikelihoods = new double[numberOfPatterns][numberOfTransitionCategories]; final int numberOfChildren = childPatternProbs.length; final int numberOfStates = dt.getNumStates(); double logSum = 0; int patternReadPoint = 0; for(int pattern = 0 ; pattern < numberOfPatterns ; pattern++) { double probabilitySum = 0; double[] patternCategoryLikelihoods = individualLikelihoods[pattern]; for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories; transitionCategory++) { double total = 0; for(int state = 0 ; state < numberOfStates ; state++) { double stateProb = childPatternProbs[0][transitionCategory][patterns[patternReadPoint]][state]; for(int i = 1 ; i THRESHOLD; //return peer_.getBranchLength()!=lastLength_; } protected final double getBranchLength() { return peer_.getBranchLength(); } protected boolean updateTransitionProbabilities() { if(modelChanged_||isBranchLengthChanged()) { double distance = peer_.getBranchLength(); model_.getTransitionProbabilities(distance,transitionProbs_); lastLength_ = distance; return true; } return false; } protected boolean updateTransitionProbabilitiesTranspose() { if(modelChanged_||isBranchLengthChanged()) { double distance = peer_.getBranchLength(); model_.getTransitionProbabilitiesTranspose(distance,transitionProbs_); lastLength_ = distance; return true; } return false; } public final Identifier getIdentifier() { return peer_.getIdentifier(); } private String toString(byte[] bs) { char[] cs = new char[bs.length]; for(int i = 0 ; i < cs.length ; i++) { cs[i] = (char)('A'+bs[i]); } return new String(cs); } abstract public void printPatternInfo(); abstract public boolean calculatePatternProbabilities(); abstract public void setupSequences(int[] patternWeightStore, int[][] alignment, Alignment base); abstract public double computeLikelihood(); abstract public LikelihoodSummary computeLikelihoodSummary(); } //========================================================================================== // // LEAF NODE // //============================== class LeafNode extends NNode { public LeafNode(Node peer) { super(peer); } public void setupSequences(int[] patternWeightStore, int[][] states, Alignment base) { Identifier id = getIdentifier(); if(id!=null) { int number = base.whichIdNumber(id.getName()); if(number>=0) { setSequence(states[number],base.getDataType()); } } else { throw new RuntimeException("Assertion error - leaf node has no matching sequence in base alignment"); } } public double computeLikelihood() { return 0; } public LikelihoodSummary computeLikelihoodSummary() { throw new RuntimeException("Cannot generate Likelihood Summary from leaf node"); } private final void setSequence(int[] sequence, final DataType dt) { final int numberOfStates = dt.getNumStates(); sequence = normalise(sequence,dt); int[] stateCount = new int[numberOfStates+1]; int uniqueCount = 0; sitePatternMatchup_ = new int[numberOfSites_]; for(int i = 0 ; i < sequence.length ; i++) { int state = sequence[i]; if(stateCount[state]==0) { uniqueCount++; } stateCount[state]++; } patternStateProbabilities_ = new double[numberOfTransitionCategories_][uniqueCount][]; patternWeights_ = new int[uniqueCount]; int index = 0; int[] statePatternMatchup = new int[numberOfStates+1]; for(int i = 0 ; i < numberOfStates ; i++) { if(stateCount[i]>0) { for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories_ ; transitionCategory++) { patternStateProbabilities_[transitionCategory][index] = transitionProbs_[transitionCategory][i]; } patternWeights_[index] = stateCount[i]; statePatternMatchup[i] = index; index++; } } int gapCount = stateCount[numberOfStates]; if(gapCount>0) { for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories_ ; transitionCategory++) { patternStateProbabilities_[transitionCategory][index] = gapPriors_; } patternWeights_[index] = gapCount; statePatternMatchup[numberOfStates] = index; } for(int i = 0 ; i < numberOfSites_ ; i++) { sitePatternMatchup_[i] = statePatternMatchup[sequence[i]]; } } private final int[] normalise(final int[] sequence, final DataType dt) { int[] normal = new int[sequence.length]; int numberOfStates = dt.getNumStates(); for(int i = 0 ; i < normal.length ; i++) { if(dt.isUnknownState(sequence[i])) { normal[i] =numberOfStates; } else { normal[i] = sequence[i]; } } return normal; } public void printPatternInfo() { System.out.print(patternWeights_.length); } public boolean calculatePatternProbabilities() { return updateTransitionProbabilitiesTranspose(); } } private static final boolean matches(final int[] patternStore, final int[] pattern, int patternIndex, final int patternSize) { for(int i = 0 ; i < patternSize ; i++) { if(patternStore[patternIndex++]!=pattern[i]) { return false; } } return true; } /** * Bificating Internal Node */ class BificatingInternalNode extends NNode{ protected NNode left_, right_; protected double[][][] leftChildPatternProbs_; protected double[][][] rightChildPatternProbs_; protected final double[] endStateProbs_; protected int[] patterns_; protected int numberOfPatterns_; public BificatingInternalNode(Node peer) { super(peer); endStateProbs_ = new double[numberOfStates_]; left_ = create(peer.getChild(0)); right_ = create(peer.getChild(1)); } public void setupSequences(int[] patternWeightStore, int[][] states, Alignment base) { left_.setupSequences(patternWeightStore, states,base); right_.setupSequences(patternWeightStore, states,base); leftChildPatternProbs_ = left_.patternStateProbabilities_; rightChildPatternProbs_ = right_.patternStateProbabilities_; int[] patternStore = new int[numberOfSites_*2]; int numberOfPatterns = 0; int insertionPoint = 0; int[] currentPattern = new int[2]; boolean patternFound; sitePatternMatchup_ = new int[numberOfSites_]; for(int site = 0 ; site < numberOfSites_; site++) { currentPattern[0] = left_.sitePatternMatchup_[site]; currentPattern[1] = right_.sitePatternMatchup_[site]; int patternIndex = 0; int patternInsertionIndex = -1; for(int pattern = 0 ; pattern < numberOfPatterns ; pattern++) { if(matches(patternStore,currentPattern,patternIndex,2)) { patternInsertionIndex = pattern; patternWeightStore[pattern]++; break; } patternIndex+=2; } if(patternInsertionIndex<0) { patternStore[insertionPoint++] = currentPattern[0]; patternStore[insertionPoint++] = currentPattern[1]; patternInsertionIndex = numberOfPatterns; patternWeightStore[numberOfPatterns] = 1; numberOfPatterns++; } sitePatternMatchup_[site] = patternInsertionIndex; } this.numberOfPatterns_ = numberOfPatterns; patterns_ = new int[insertionPoint]; System.arraycopy(patternStore,0,patterns_,0,insertionPoint); patternWeights_ = new int[numberOfPatterns]; System.arraycopy(patternWeightStore,0,patternWeights_,0,numberOfPatterns); this.patternStateProbabilities_ = new double[numberOfTransitionCategories_][numberOfPatterns][numberOfStates_]; } protected final boolean populateChildPatternProbs() { if(left_.calculatePatternProbabilities()) { right_.calculatePatternProbabilities(); return true; } return right_.calculatePatternProbabilities(); } public LikelihoodSummary computeLikelihoodSummary() { LikelihoodSummary ls = calculateFinalSummary(equilibriumFrequencies_); modelChanged_ = false; return ls; } public double computeLikelihood() { double lh = calculateFinal(equilibriumFrequencies_); modelChanged_ = false; return lh; } protected final int getNumberOfChildren() { return 2; } public void printPatternInfo() { System.out.print(numberOfPatterns_+":("); left_.printPatternInfo(); System.out.print(", "); right_.printPatternInfo(); System.out.print(")"); } /** * Populates child pattern probs and updates transition probabilities (if necessary respectively), and * returns true if we need to so stuff else false (if nothing in this sub tree has changed) */ protected boolean setupProbabilityCalculate() { boolean a = populateChildPatternProbs(); boolean b = updateTransitionProbabilities(); return (a||b); } public boolean calculatePatternProbabilities() { if(!setupProbabilityCalculate()) { return false; } int patternReadPoint; for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories_ ; transitionCategory++) { final double[][] leftProbs = leftChildPatternProbs_[transitionCategory]; final double[][] rightProbs = rightChildPatternProbs_[transitionCategory]; final double[][] patternStateProb = patternStateProbabilities_[transitionCategory]; final double[][] transProbs = transitionProbs_[transitionCategory]; patternReadPoint = 0; for(int pattern = 0; pattern < numberOfPatterns_; pattern++) { for(int endState = 0; endState < numberOfStates_; endState++) { endStateProbs_[endState] = leftProbs[patterns_[patternReadPoint]][endState]* rightProbs[patterns_[patternReadPoint+1]][endState]; } patternReadPoint+=2; final double[] probabilityStore = patternStateProb[pattern]; for(int startState = 0 ; startState < numberOfStates_ ; startState++) { double probOfStartState = 0; for(int endState = 0; endState < numberOfStates_ ; endState++) { probOfStartState += transProbs[startState][endState]*endStateProbs_[endState]; } probabilityStore[startState] = probOfStartState; } } } return true; } public double calculateFinal(double[] equilibriumProbs) { populateChildPatternProbs(); double logSum = 0; int patternReadPoint = 0; double[] categoryProbabilities = model_.getTransitionCategoryProbabilities(); for(int pattern = 0 ; pattern < numberOfPatterns_ ; pattern++) { double probabilitySum = 0; for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories_ ; transitionCategory++) { double total = 0; final double[][] leftProbs = leftChildPatternProbs_[transitionCategory]; final double[][] rightProbs = rightChildPatternProbs_[transitionCategory]; for(int state = 0 ; state < numberOfStates_ ; state++) { total+= equilibriumProbs[state]* leftProbs[patterns_[patternReadPoint]][state]* rightProbs[patterns_[patternReadPoint+1]][state]; } probabilitySum+=total*categoryProbabilities[transitionCategory]; } patternReadPoint+=2; logSum+=Math.log(probabilitySum)*patternWeights_[pattern]; } return logSum; } /** * Calculates the final Log Likelihood, and fills in align */ public LikelihoodSummary calculateFinalSummary(double[] equilibriumProbs) { populateChildPatternProbs(); return calculateFinalSummaryImpl( model_.getDataType(), equilibriumProbs, numberOfPatterns_, model_.getTransitionCategoryProbabilities(), patternWeights_, new double[][][][] {leftChildPatternProbs_,rightChildPatternProbs_}, patterns_, sitePatternMatchup_ ); } } //End of class BifactingInternalNode class BificatingFourStateInternalNode extends BificatingInternalNode{ public BificatingFourStateInternalNode(Node peer) { super(peer); } public boolean calculatePatternProbabilities() { if(!setupProbabilityCalculate()) { return false; } int patternReadPoint; for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories_ ; transitionCategory++) { final double[][] leftProbs = leftChildPatternProbs_[transitionCategory]; final double[][] rightProbs = rightChildPatternProbs_[transitionCategory]; final double[][] patternStateProb = patternStateProbabilities_[transitionCategory]; final double[][] transProbs = transitionProbs_[transitionCategory]; patternReadPoint = 0; for(int pattern = 0; pattern < numberOfPatterns_; pattern++) { directProduct4(leftProbs[patterns_[patternReadPoint]],rightProbs[patterns_[patternReadPoint+1]],endStateProbs_); patternReadPoint+=2; final double[] probabilityStore = patternStateProb[pattern]; probabilityStore[0] = dotProduct4(transProbs[0],endStateProbs_); probabilityStore[1] = dotProduct4(transProbs[1],endStateProbs_); probabilityStore[2] = dotProduct4(transProbs[2],endStateProbs_); probabilityStore[3] = dotProduct4(transProbs[3],endStateProbs_); } } return true; } public double calculateFinal(double[] equilibriumProbs) { populateChildPatternProbs(); double logSum = 0; int patternReadPoint = 0; double[] categoryProbabilities = model_.getTransitionCategoryProbabilities(); for(int pattern = 0 ; pattern < numberOfPatterns_ ; pattern++) { double probabilitySum = 0; final int patternLeft = patterns_[patternReadPoint]; final int patternRight = patterns_[patternReadPoint+1]; for(int transitionCategory = 0 ; transitionCategory < numberOfTransitionCategories_ ; transitionCategory++) { probabilitySum+= dotProduct4( equilibriumProbs, leftChildPatternProbs_[transitionCategory][patternLeft], rightChildPatternProbs_[transitionCategory][patternRight] )*categoryProbabilities[transitionCategory]; } patternReadPoint+=2; logSum+=Math.log(probabilitySum)*patternWeights_[pattern]; } return logSum; } } //End of class BifactingFourStateInternalNode /** * The InternalNode class is a basic non optimised methods for a polyficating node * where any number of states in the underlying datatype is catered for. */ class InternalNode extends NNode{ protected NNode[] children_; protected double[][][][] childPatternProbs_; protected final double[] endStateProbs_; protected int[] patterns_; protected int numberOfPatterns_; public InternalNode(Node peer) { super(peer); endStateProbs_ = new double[numberOfStates_]; this.children_ = new NNode[peer.getChildCount()]; for(int i = 0 ; i < children_.length ; i++) { children_[i] = create(peer.getChild(i)); } childPatternProbs_ = new double[children_.length][][][]; } public void setupSequences(int[] patternWeightStore, int[][] states, Alignment base) { for(int i= 0 ; i < children_.length ; i++) { children_[i].setupSequences(patternWeightStore, states,base); } for(int i = 0 ; i < children_.length ; i++) { childPatternProbs_[i] = children_[i].patternStateProbabilities_; } final int numberOfChildren = children_.length; int[] patternStore = new int[numberOfSites_*numberOfChildren]; int numberOfPatterns = 0; int insertionPoint = 0; int[] currentPattern = new int[numberOfChildren]; boolean patternFound; sitePatternMatchup_ = new int[numberOfSites_]; for(int site = 0 ; site < numberOfSites_; site++) { for(int child = 0 ; childTitle: SimpleLeafCalculator

*

Description: A simple implementation of a calculator for conditional probabilites are a leaf (tip), with no ambiguities in the data

*

Copyright: Copyright (c) 2003

*

Company:

* @author not attributable * @version 1.0 * @note not designed to be serialized */ import pal.substmodel.*; public class SimpleLeafCalculator implements LHCalculator.Leaf{ private final int numberOfStates_; private final int[] patternStateMatchup_; //Only needed for cloning... private final int numberOfPatterns_; //Only needed for cloning... private final int numberOfCategories_; //Only needed for cloning... private final LHCalculator.Generator parent_;//Only needed for cloning... private final double[][][] transitionProbabilitiyStore_; private final ConditionalProbabilityStore conditionalProbabilities_; private final ConditionalProbabilityStore flatConditionalProbabilities_; private double lastDistance_ = -1; private SimpleLeafCalculator(SimpleLeafCalculator toCopy) { this.numberOfStates_ = toCopy.numberOfStates_; this.patternStateMatchup_ = toCopy.patternStateMatchup_; this.numberOfPatterns_ = toCopy.numberOfPatterns_; this.numberOfCategories_ = toCopy.numberOfCategories_; this.parent_ = toCopy.parent_; this.transitionProbabilitiyStore_ = pal.misc.Utils.getCopy(toCopy.transitionProbabilitiyStore_); this.flatConditionalProbabilities_ = createFlat(patternStateMatchup_,numberOfPatterns_,numberOfCategories_,numberOfStates_,parent_); this.conditionalProbabilities_ = createExtended(transitionProbabilitiyStore_, patternStateMatchup_,numberOfPatterns_,numberOfCategories_,numberOfStates_,parent_); this.lastDistance_ = toCopy.lastDistance_; } public SimpleLeafCalculator( int[] patternStateMatchup, int numberOfPatterns, int numberOfStates, int numberOfCategories, LHCalculator.Generator parent ) { this.numberOfStates_ = numberOfStates; this.numberOfCategories_ = numberOfCategories; this.numberOfPatterns_ = numberOfPatterns; this.parent_ = parent; this.patternStateMatchup_ = pal.misc.Utils.getCopy(patternStateMatchup); // StatePatternMatchup matches a state to it's new pattern (is undefined if state does not occur) this.transitionProbabilitiyStore_ = new double[numberOfCategories][numberOfStates][numberOfStates]; this.conditionalProbabilities_ = createExtended(transitionProbabilitiyStore_,patternStateMatchup,numberOfPatterns,numberOfCategories,numberOfStates,parent); this.flatConditionalProbabilities_ = createFlat(patternStateMatchup,numberOfPatterns,numberOfCategories,numberOfStates,parent); } // ==================================== private static final ConditionalProbabilityStore createFlat(int[] patternStateMatchup, int numberOfPatterns, int numberOfCategories, int numberOfStates, LHCalculator.Generator parent ) { ConditionalProbabilityStore flatConditionalProbabilities = parent.createAppropriateConditionalProbabilityStore( true ); final double[] gapStore = new double[numberOfStates]; for(int i = 0 ; i < gapStore.length ; i++) { gapStore[i] = 1; } double[][] stateStuff = new double[numberOfStates][numberOfStates]; for( int i = 0; iTitle: LikelihoodTool

*

Description: A set of static methods for doing common Likelihood tasks. Also serves as example code for doing likeilihood analysis.

* @author Matthew Goode * @version 1.0 */ import pal.tree.*; import pal.math.*; import pal.alignment.*; import pal.substmodel.SubstitutionModel; import pal.misc.TimeOrderCharacterData; import pal.mep.*; import pal.datatype.*; public final class LikelihoodTool { /** * Calculate the log likelihood of a particular set of phylogenetic data * @param tree The tree with set branch lengths * @param alignment The alignment (sequence names must match tree) * @param model The substitution model to use * @return The log likelihood * @note If the alignment uses IUPACNucleotides and the model uses Nucleotides see getMatchingDataType() */ public final static double calculateLogLikelihood(Tree tree, Alignment alignment, SubstitutionModel model) { GeneralLikelihoodCalculator lc = new GeneralLikelihoodCalculator(alignment,tree,model); return lc.calculateLogLikelihood(); } /** * Optimise the branches of a tree with regard to maximum likelihood, with no constraints on the branchlengths (as for an unrooted tree). The topology is unchanged. * @param tree The tree (remains unchanged) * @param alignment The alignment (sequence names must match tree) * @param model The substitution model to use (is changed if optimisation of the model is choosen) * @param optimiseModel if true the model is also optimised, otherwise just the tree * @return The optimised tree * @see pal.treesearch.optimiseUnrootedFixed() for an equivalient, but potentially faster method. * @note If the alignment uses IUPACNucleotides and the model uses Nucleotides see getMatchingDataType() */ public final static Tree optimiseUnrooted(Tree tree, Alignment alignment, SubstitutionModel model, boolean optimiseModel) { UnconstrainedTree ut = new UnconstrainedTree(TreeTool.getUnrooted(tree)); DataTranslator dt = new DataTranslator(alignment); alignment = dt.toAlignment(MolecularDataType.Utils.getMolecularDataType(model.getDataType()),0); if(optimiseModel) { LikelihoodOptimiser.optimiseCombined(ut, alignment, model, new OrthogonalSearch(), 6, 6); } else { LikelihoodOptimiser.optimiseTree(ut, alignment, model, new OrthogonalSearch(), 6, 6); } return new SimpleTree(ut); } /** * Optimise the branches of a tree with regard to maximum likelihood, with a molecular clock assumption, that is, constrained such that all tips are contemporaneous, the tree is treated as rooted. The topology is unchanged. * @param tree The tree with set branch lengths * @param alignment The alignment (sequence names must match tree) * @param model The substitution model to use * @param optimiseModel if true the model is optimised as well * @return The resulting optimised tree * @note If the alignment uses IUPACNucleotides and the model uses Nucleotides see getMatchingDataType() */ public final static Tree optimiseClockConstrained(Tree tree, Alignment alignment, SubstitutionModel model, boolean optimiseModel) { ClockTree ut = new ClockTree(tree); DataTranslator dt = new DataTranslator(alignment); alignment = dt.toAlignment(MolecularDataType.Utils.getMolecularDataType(model.getDataType()),0); if(optimiseModel) { LikelihoodOptimiser.optimiseCombined(ut, alignment, model, new OrthogonalSearch(), 6, 6); } else { LikelihoodOptimiser.optimiseTree(ut, alignment, model, new OrthogonalSearch(), 6, 6); } return new SimpleTree(ut); } /** * Optimise the branches of a tree with regard to maximum likelihood, with under an assumption of a molecular clock with serially sampled data and a single mutation rate parameter. This is equivalent to the TipDate model. The topology is unchanged. * @param tree The tree with set branch lengths * @param alignment The alignment (sequence names must match tree) * @param model The substitution model to use * @param tocd The sample information object relating sequences to time or order * @param optimiseModel if true the model is optimised as well * @param rateStore storage space for the mutation rate, the initial value is used as the starting rate in the optimisation * @return The resulting optimised tree * @note If the alignment uses IUPACNucleotides and the model uses Nucleotides see getMatchingDataType() */ public final static Tree optimiseSRDT(Tree tree, Alignment alignment, SubstitutionModel model, TimeOrderCharacterData tocd, boolean optimiseModel, double[] rateStore) { ConstantMutationRate cm = new ConstantMutationRate(rateStore[0], tocd.getUnits(),1); DataTranslator dt = new DataTranslator(alignment); alignment = dt.toAlignment(MolecularDataType.Utils.getMolecularDataType(model.getDataType()),0); MutationRateModelTree mt = new MutationRateModelTree( tree,tocd, cm); if(optimiseModel) { LikelihoodOptimiser.optimiseCombined(mt, alignment, model, new OrthogonalSearch(), 6, 6); } else { LikelihoodOptimiser.optimiseTree(mt, alignment, model, new OrthogonalSearch(), 6, 6); } rateStore[0] = cm.getMu(); return new SimpleTree(mt); } /** * Optimise the branches of a tree with regard to maximum likelihood, with under an assumption of a molecular clock with serially sampled data and multiple mutation rate parameters, mu - one for each sampling interval. The topology is unchanged. * @param tree The tree with set branch lengths * @param alignment The alignment (sequence names must match tree) * @param model The substitution model to use * @param tocd The sample information object relating sequences to time or order * @param optimiseModel if true the model is optimised as well * @param rateStore storage space for the mus, the initial values are used as the starting mus in the optimisation * @return The resulting optimised tree * @note If the alignment uses IUPACNucleotides and the model uses Nucleotides see getMatchingDataType() */ public final static Tree optimiseMRDT(Tree tree, Alignment alignment, SubstitutionModel model, TimeOrderCharacterData tocd, boolean optimiseModel, double[] rateStore) { SteppedMutationRate smr = new SteppedMutationRate(pal.misc.Utils.getCopy(rateStore), tocd); DataTranslator dt = new DataTranslator(alignment); alignment = dt.toAlignment(MolecularDataType.Utils.getMolecularDataType(model.getDataType()),0); MutationRateModelTree mt = new MutationRateModelTree( tree,tocd, smr); if(optimiseModel) { LikelihoodOptimiser.optimiseCombined(mt, alignment, model, new OrthogonalSearch(), 6, 6); } else { LikelihoodOptimiser.optimiseTree(mt, alignment, model, new OrthogonalSearch(), 6, 6); } smr.getMus(rateStore); return new SimpleTree(mt); } /** * Optimise the branches of a tree with regard to maximum likelihood, with under an assumption of a molecular clock with serially sampled data and multiple mutation rate parameters, mu, over general time intervals. The topology is unchanged. * @param tree The tree with set branch lengths * @param alignment The alignment (sequence names must match tree) * @param model The substitution model to use * @param tocd The sample information object relating sequences to time or order * @param optimiseModel if true the model is optimised as well * @param rateChangeTimes the times (as related to the sample information) of when a new mu is used (should be of length mus.length -1 ) * @param rateStore storage space for the mus, the initial values are used as the starting mus in the optimisation * @return The resulting optimised tree * @note If the alignment uses IUPACNucleotides and the model uses Nucleotides see getMatchingDataType() */ public final static Tree optimiseMRDT(Tree tree, Alignment alignment, SubstitutionModel model, TimeOrderCharacterData tocd, boolean optimiseModel, double[] rateChangeTimes, double[] rateStore) { DataTranslator dt = new DataTranslator(alignment); alignment = dt.toAlignment(MolecularDataType.Utils.getMolecularDataType(model.getDataType()),0); SteppedMutationRate smr = new SteppedMutationRate(pal.misc.Utils.getCopy(rateStore), pal.misc.Utils.getCopy(rateChangeTimes), tocd.getUnits(),false,tocd.getSuggestedMaximumMutationRate()*2); MutationRateModelTree mt = new MutationRateModelTree( tree,tocd, smr); if(optimiseModel) { LikelihoodOptimiser.optimiseCombined(mt, alignment, model, new OrthogonalSearch(), 6, 6); } else { LikelihoodOptimiser.optimiseTree(mt, alignment, model, new OrthogonalSearch(), 6, 6); } smr.getMus(rateStore); return new SimpleTree(mt); } /** * Creates a new alignment that has a compatible data type with a substution model (needed for likelihood stuff) * @param alignment The base alignment * @param model The substitution model that will be used with the alignment data * @return An appropriately converted alignment * @note this is also neccessary if the alignment uses IUPACNucleotides and the model is Nucleotides */ public static final Alignment getMatchingDataType(Alignment alignment, SubstitutionModel model) { DataTranslator dt = new DataTranslator(alignment); return dt.toAlignment(MolecularDataType.Utils.getMolecularDataType(model.getDataType()),0); } }pal-1.5.1/src/pal/eval/LHCalculator.java0000644000000000000000000004133010141731414016437 0ustar rootroot// LHCalculator.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** *

Title: LHCalculator

*

Description: An LHCalculator object must be treated as a stateful, single threaded object that can be used * for calculating components in an overall likelihood calculation.

*

History
*

    *
  • 25/10/2003 Added leaf handling interface
  • *
  • 30/3/2004 Changed certain methods to more intelligent ones (relating to posterior distribution of sites). Added abstract External class. *
*

* @author Matthew Goode * @version 1.0 * @note needs to have the use of the word likelihood altered in certain cases (to conditional probability) * */ import pal.datatype.*; import pal.substmodel.*; public interface LHCalculator { /** * The External calculator does not maintain any state and is approapriate for * calculation where a store is provided */ public static interface External extends java.io.Serializable { /** * * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations must not overwrite or change * @param rightConditionalProbabilities Implementations must not overwrite or change * @param resultStore Where to stick the created categoryPatternState information * @note calls to getLastConditionalProbabilities() does not have to be valid after call this method */ public void calculateFlat( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ); /** * * @param distance * @param model * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations must not overwrite or change * @param rightConditionalProbabilities Implementations must not overwrite or change * @param resultStore Where to stick the created categoryPatternState information * @note calls to getLastConditionalProbabilities() does not have to be valid after call this method */ public void calculateExtended( double distance, SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ); /** * Extend the conditionals back in time by some distance, with some model * @param distance The distance to extend by * @param model The model to use * @param conditionalProbabilities The probabilities to extend */ public void calculateSingleExtendedDirect( double distance, SubstitutionModel model, int numberOfPatterns, ConditionalProbabilityStore conditionalProbabilities ); /** * Extend the conditionals back in time by some distance, with some model * @param distance The distance to extend by * @param model The model to use * @param baseConditionalProbabilities The probabilities to extend * @param resultConditionalProbabilities The probabilities to extend */ public void calculateSingleExtendedIndirect( double distance, SubstitutionModel model, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities ); /** * Calculate the likelihood given two sub trees (left, right) and their flat (unextend) likeihood probabilities * @param distance * @param model * @param centerPattern the pattern information * @param leftFlatConditionalProbabilities * @param rightFlatConditionalProbabilities * @param tempStore may be used internally to calculate likelihood * @return the log likelihood */ public double calculateLogLikelihood( double distance, SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftFlatConditionalProbabilities, ConditionalProbabilityStore rightFlatConditionalProbabilities, ConditionalProbabilityStore tempStore ); /** * Calculate the likelihood given two sub trees (left, right) and their extended likeihood probabilities * @param model * @param centerPattern the pattern information * @param leftConditionalProbabilities * @param rightConditionalProbabilities * @return the Log likelihood */ public double calculateLogLikelihood( SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities ); /** * Calculate the likelihood given the conditional probabilites at the root * @param model The substitution model used * @param patternWeights the weights of each pattern * @param numberOfPatterns the number of patterns * @param conditionalProbabilities The conditionals * @return the Log likelihood */ public double calculateLogLikelihoodSingle( SubstitutionModel model, int[] patternWeights, int numberOfPatterns, ConditionalProbabilityStore conditionalProbabilityStore); /** * Calculate the conditional probabilities of each pattern for each category * @param model * @param centerPattern the pattern information * @param leftConditionalProbabilities * @param rightConditionalProbabilities * @param categoryPatternLogLikelihoodStore after call will hold a matrix of values in the form [cat][pattern], where [cat][pattern] represents the site probability under a particular category/class, *not* multiplied by the category probability or pattern weights */ public SiteDetails calculateSiteDetailsRooted( SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ); /** * Calculate the conditional probabilities of each pattern for each category * @param distance The distance between the two nodes * @param model * @param centerPattern the pattern information * @param leftConditionalProbabilities * @param rightConditionalProbabilities * @param categoryPatternLogLikelihoodStore after call will hold a matrix of values in the form [cat][pattern], where [cat][pattern] represents the site probability under a particular category/class, *not* multiplied by the category probability or pattern weights */ public SiteDetails calculateSiteDetailsUnrooted( double distance, SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore tempStore ); } //End of class External // ================================================================================================= // ================= Internal ====================================================================== // ================================================================================================= /** * The Internal calculator may maintain state and is approapriate permanent attachment * to internal nodes of the tree structure */ public static interface Internal { /** * calculate flat probability information (not extended over a branch). * @param centerPattern the pattern information * @param leftConditionalProbabilities Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilities Implementations should be allowed to overwrite in certain cases * @return true if results built from cached information * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculateFlat( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities ); /** * * @param distance * @param model * @param centerPattern the pattern information * @param leftConditionalProbabilities * @param rightConditionalProbabilities * @param modelChangedSinceLastCall this should be true if the substituion model has altered since the last call to this method on this particular object, false otherwise * @return resulting conditional probabilities * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculateExtended( double distance, SubstitutionModel model, PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilities, final ConditionalProbabilityStore rightConditionalProbabilities, boolean modelChangedSinceLastCall ); public ConditionalProbabilityStore calculatePostExtendedFlat( double distance, SubstitutionModel model, PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilities, final ConditionalProbabilityStore rightConditionalProbabilities, boolean modelChangedSinceLastCall ); } //End of Internal // ================================================================================================= // ================= Leaf ========================================================================== // ================================================================================================= /** * A LHCalculator.Leaf object is attached to each leaf node and can be used to calculated conditional probabilities across the related branch. * Allows for quick implementations as well as implementations that cope correctly with ambiguous characters * @note Should not be made serializable! */ public static interface Leaf { public ConditionalProbabilityStore getFlatConditionalProbabilities(); public ConditionalProbabilityStore getExtendedConditionalProbabilities( double distance, SubstitutionModel model, boolean modelChanged); /** * Create a new Leaf calculator that has exactly the same properties as this one (but is different such that it may be used independently) * @return a copy of this leaf calculator */ public Leaf getCopy(); } public static interface Factory extends java.io.Serializable { public Generator createSeries( int numberOfCategories, DataType dt ); } public static interface Generator extends java.io.Serializable { /** * Create anew leaf calculator * @param patternStateMatchup The sequence as reduced to patterns. This should just be one state per pattern. * For example given a sequence [ 0, 1,0,1,3,0] a patternMatchup may be [0,1,3] (the first element is the first * pattern, which is state 0, the second element is the second pattern which is 1, and the third element is the * third pattern (novel pattern) which is state 3) * @param numberOfPatterns The number of patterns in the patternStateMatchup array * @return a leaf calculator object */ public Leaf createNewLeaf(int[] patternStateMatchup, int numberOfPatterns); public Leaf createNewLeaf(int[] patternStateMatchup, int numberOfPatterns, Generator parentGenerator ); public External createNewExternal(); public Internal createNewInternal(); public boolean isAllowCaching(); /** * An obscure method, primarily used by the High Accuracy calculator * @param parentGenerator A reference to an encompasing generator (that may for example * wish to impose it's own choice on the creation of ConditionalProbabilityStores) * @throws IllegalArgumentException Generator does not allow being a subserviant generator * @return */ public External createNewExternal( Generator parentGenerator ) throws IllegalArgumentException; /** * An obscure method, primarily used by the High Accuracy calculator * @param parentGenerator A reference to an encompasing generator (that may for example * wish to impose it's own choice on the creation of ConditionalProbabilityStores) * @throws IllegalArgumentException Generator does not allow being a subserviant generator * @return */ public Internal createNewInternal( Generator patentGenerator ) throws IllegalArgumentException; public ConditionalProbabilityStore createAppropriateConditionalProbabilityStore( boolean isForLeaf ); } // ====================================================================================== public abstract class AbstractExternal { public final SiteDetails calculateSiteDetailsUnrooted(double distance, SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftFlatConditionalProbabilities, ConditionalProbabilityStore rightFlatConditionalProbabilities, ConditionalProbabilityStore tempStore ) { double[][] store = new double[model.getNumberOfTransitionCategories()][centerPattern.getNumberOfPatterns()]; calculateCategoryPatternProbabilities(distance, model, centerPattern, leftFlatConditionalProbabilities,rightFlatConditionalProbabilities,tempStore, store); double[] siteLikelihoods = calculateSiteLikelihoods( store, model.getTransitionCategoryProbabilities(), model.getNumberOfTransitionCategories(), centerPattern.getSitePatternMatchup(),centerPattern.getNumberOfSites() ); return SiteDetails.Utils.create( store,false,model, centerPattern.getNumberOfPatterns(), centerPattern.getSitePatternMatchup(), centerPattern.getNumberOfSites(), siteLikelihoods ); } private final double[] calculateSiteLikelihoods( double[][] conditionals, final double[] catProbabilities, int numberOfCategories, int[] sitePatternMatchup, int numberOfSites) { final double[] siteLikeihoods = new double[numberOfSites]; for(int site= 0 ; site < numberOfSites ; site++) { double total = 0; int pattern = sitePatternMatchup[site]; for(int cat = 0 ; cat < numberOfCategories ; cat++) { total+=catProbabilities[cat]*conditionals[cat][pattern]; } siteLikeihoods[site] = total; } return siteLikeihoods; } public final SiteDetails calculateSiteDetailsRooted(SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore) { double[][] store = new double[model.getNumberOfTransitionCategories()][centerPattern.getNumberOfPatterns()]; calculateCategoryPatternProbabilities(model,centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore,store); final double[] siteLikelihoods = calculateSiteLikelihoods( store, model.getTransitionCategoryProbabilities(), model.getNumberOfTransitionCategories(), centerPattern.getSitePatternMatchup(),centerPattern.getNumberOfSites() ); return SiteDetails.Utils.create(store,false,model,centerPattern.getNumberOfPatterns(),centerPattern.getSitePatternMatchup(),centerPattern.getNumberOfSites(),siteLikelihoods); } protected abstract void calculateCategoryPatternProbabilities( double distance, SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftFlatConditionalProbabilities, ConditionalProbabilityStore rightFlatConditionalProbabilities, ConditionalProbabilityStore tempStore, double[][] categoryPatternLogLikelihoodStore ); protected abstract void calculateCategoryPatternProbabilities( SubstitutionModel model, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, double[][] categoryPatternLikelihoodStore ); } }pal-1.5.1/src/pal/eval/makefile0000644000000000000000000000123707275407432014777 0ustar rootroot### VARIABLES ### JIKESOPTS := +P GCJOPTS := # Always check dependencies JIKESOPTS += +M SRC := $(wildcard *.java) CLS := $(patsubst %.java, %.class, $(SRC)) OBJ := $(patsubst %.class, %.o, $(wildcard *.class)) ### RULES ### # Compile Java sources into class files %.class: %.java jikes $(JIKESOPTS) $< # Alternative to using jikes: gcj -C # Compile class files into native code %.o: %.class gcj -c $(GCJOPTS) $< ### TARGETS ### .PHONY: classes native clean classes: $(CLS) ifneq ($(OBJ),) native: $(OBJ) (ar -rv package.a *.o; ranlib package.a) else native: $(OBJ) endif clean: rm -f *.class *.o *.u *.a *~ pal-1.5.1/src/pal/eval/LikelihoodOptimiser.java0000644000000000000000000003063110001456430020100 0ustar rootroot// LikelihoodOptimiser.java // // (c) 1999-2004 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** *

Title: LikelihoodOptimiser

*

Description: A strange tool for optimizing the likelihood

* @author Matthew Goode * @version 1.0 */ import pal.misc.Parameterized; import pal.tree.Tree; import pal.tree.ParameterizedTree; import pal.alignment.Alignment; import pal.substmodel.SubstitutionModel; import pal.math.*; public class LikelihoodOptimiser { private final Function function_; private final Tree tree_; private final Alignment alignment_; private final SubstitutionModel model_; private double[] argumentStore_ = null; public LikelihoodOptimiser(Tree tree, Alignment alignment, SubstitutionModel model) { this.function_ = new Function(tree,alignment,model); this.tree_ = tree; this.alignment_ = alignment; this.model_ = model; } private final double[] setup(Parameterized parameters) { function_.setParameters(parameters); argumentStore_ = function_.getCurrentArgumentStore(argumentStore_); return argumentStore_; } public double optimiseLogLikelihood(Parameterized parameters, MultivariateMinimum minimiser, int fxFracDigits, int xFracDigits, MinimiserMonitor monitor) { double result = -minimiser.findMinimum(function_,setup(parameters),fxFracDigits, xFracDigits,monitor); return result; } public double optimiseLogLikelihood(Parameterized parameters, MultivariateMinimum minimiser, int fxFracDigits, int xFracDigits) { double result = -minimiser.findMinimum(function_,setup(parameters),fxFracDigits, xFracDigits); return result; } /** * Optimise parameters to acheive maximum likelihood using a combined stategy. That is, model and tree are optimised concurrently. * @param tree The tree to be optimised (will be altered by optimisation) * @param alignment The alignment related to tree * @param model The substitution model to be optimised (will be altered by optimisation) * @param fxFracDigits The number of decimal placess to stabilise to in the log likelihood * @param xFracDigits The number of decimal placess to stabilise to in the model/tree parameters * @param minimiser The MultivariateMinimum object that is used for minimising * @param monitor A minimiser monitor to monitor progress * @return The maximal log likelihood found */ public static final double optimiseCombined(ParameterizedTree tree, Alignment alignment, SubstitutionModel model, MultivariateMinimum minimiser, int fxFracDigits, int xFracDigits, MinimiserMonitor monitor) { final LikelihoodOptimiser lo = new LikelihoodOptimiser(tree,alignment,model); return lo.optimiseLogLikelihood(Parameterized.Utils.combine(tree,model),minimiser,fxFracDigits,xFracDigits,monitor); } /** * Optimise parameters to acheive maximum likelihood using a combined stategy. That is, model and tree are optimised concurrently. * @param tree The tree to be optimised (will be altered by optimisation) * @param alignment The alignment related to tree * @param model The substitution model to be optimised (will be altered by optimisation) * @param fxFracDigits The number of decimal placess to stabilise to in the log likelihood * @param xFracDigits The number of decimal placess to stabilise to in the model/tree parameters * @param minimiser The MultivariateMinimum object that is used for minimising * @return The maximal log likelihood found */ public static final double optimiseCombined(ParameterizedTree tree, Alignment alignment, SubstitutionModel model, MultivariateMinimum minimiser, int fxFracDigits, int xFracDigits) { final LikelihoodOptimiser lo = new LikelihoodOptimiser(tree,alignment,model); return lo.optimiseLogLikelihood(Parameterized.Utils.combine(tree,model),minimiser,fxFracDigits,xFracDigits); } /** * Optimise parameters to acheive maximum likelihood using an alternating stategy. That is first the model is optimised, than the tree branch lengths, then the model, then the tree, and so on until convergence. * @param tree The tree to be optimised (will be altered by optimisation) * @param alignment The alignment related to tree * @param model The substitution model to be optimised (will be altered by optimisation) * @param fxFracDigits The number of decimal placess to stabilise to in the log likelihood * @param xFracDigits The number of decimal placess to stabilise to in the model/tree parameters * @param minimiser The MultivariateMinimum object that is used for minimising * @return The maximal log likelihood found */ public static final double optimiseAlternate(ParameterizedTree tree, Alignment alignment, SubstitutionModel model, MultivariateMinimum minimiser, int fxFracDigits, int xFracDigits) { return optimiseAlternate(tree,alignment,model,minimiser,fxFracDigits,xFracDigits,null); } /** * Optimise parameters to acheive maximum likelihood using an alternating stategy. That is first the model is optimised, than the tree branch lengths, then the model, then the tree, and so on until convergence. * @param tree The tree to be optimised (will be altered by optimisation) * @param alignment The alignment related to tree * @param model The substitution model to be optimised (will be altered by optimisation) * @param fxFracDigits The number of decimal placess to stabilise to in the log likelihood * @param xFracDigits The number of decimal placess to stabilise to in the model/tree parameters * @param minimiser The MultivariateMinimum object that is used for minimising * @param monitor A minimiser monitor to monitor progress * @return The maximal log likelihood found */ public static final double optimiseAlternate(ParameterizedTree tree, Alignment alignment, SubstitutionModel model, MultivariateMinimum minimiser, int fxFracDigits, int xFracDigits, MinimiserMonitor monitor) { final LikelihoodOptimiser lo = new LikelihoodOptimiser(tree,alignment,model); double epsilon = generateEpsilon(fxFracDigits); double lastResult = 0; double result = 0; int round = 0; while(true) { //Optimise Model result = optimise(lo, model,minimiser,fxFracDigits,xFracDigits,monitor); //Optimiser tree result = optimise(lo, tree,minimiser,fxFracDigits,xFracDigits,monitor); if(round>0) { if( lastResult>result ) { break; } if( result-lastResultTitle: ConditionalProbabilityStore (was ConditionalLikelihoodStore)

*

Description: A container class for various bits of data relating to * the conditional likelihood. Things stored include the conditional likelihood, * an scale factors and whether the current conditional likelihoods were created * from cached data.

* @author Matthew Goode * @version 1.0 */ public final class ConditionalProbabilityStore implements java.io.Serializable { private final int numberOfCategories_; private final int numberOfStates_; private final double[][][] store_; private int patternCapacity_; private final ExtraProcessor extraProcessor_; private boolean isBasedOnCachedData_ = false; private boolean fix_ = false; private ConditionalProbabilityStore(ConditionalProbabilityStore toCopy) { this.numberOfCategories_ = toCopy.numberOfCategories_; this.numberOfStates_ = toCopy.numberOfStates_; this.store_ = pal.misc.Utils.getCopy(toCopy.store_); this.patternCapacity_ = toCopy.patternCapacity_; this.extraProcessor_ = (toCopy.extraProcessor_ == null ? null : toCopy.extraProcessor_.getCopy()); this.isBasedOnCachedData_ = toCopy.isBasedOnCachedData_; this.fix_ = toCopy.fix_; } public ConditionalProbabilityStore(int numberOfCategories, int numberOfStates) { this(numberOfCategories,numberOfStates,null); } public ConditionalProbabilityStore(int numberOfCategories, int numberOfStates, ExtraProcessor extraProcessor) { this.numberOfCategories_ = numberOfCategories; this.numberOfStates_ = numberOfStates; this.store_ = new double[numberOfCategories][][]; this.patternCapacity_ = 0; this.extraProcessor_ = extraProcessor; if(extraProcessor_!=null) { this.extraProcessor_.setParent(this); this.extraProcessor_.setNewNumberOfPatterns( 0 ); } } /** * Cloning * @return a copy of this conditional probability store */ public final ConditionalProbabilityStore getCopy() { return new ConditionalProbabilityStore(this); } public final boolean isHasExtraProcessor() { return extraProcessor_!=null; } public final ExtraProcessor getExtraProcessor() { return extraProcessor_; } /** * Will check the current allocation to see if it can accomodate the requested number * of patterns. If not, new arrays are allocated and old data is lost. * @param numberOfPatterns */ private final void ensureSize(int numberOfPatterns, boolean createStateArray) { if(numberOfPatterns>patternCapacity_) { if(fix_) { throw new IllegalArgumentException("Cannot resize to accomodate "+numberOfPatterns+" patterns (store has been fixed)"); } if(createStateArray) { for( int i = 0; ipatternCapacity_) { throw new IllegalArgumentException("Cannot provided for requested number of patterns. Asked for "+numberOfPatterns+" can only give "+patternCapacity_); } this.isBasedOnCachedData_ = resultsBasedOnCachedData; return store_; } /** * Use this when access the internal conditional likelihood store for the purpose * of changing the contents. * The state arrays will not be created. * @param numberOfPatterns An indication of how much space will be required. The result will always be big enough to accomodate the requested number of patterns. * @param resultsBasedOnCachedData An indication of whether the new conditionals about to be stored are based on cached data * @return */ public double[][][] getIncompleteConditionalProbabilityAccess(int numberOfPatterns, boolean resultsBasedOnCachedData, boolean fix) { ensureSize(numberOfPatterns,false); this.isBasedOnCachedData_ = resultsBasedOnCachedData; this.fix_ = fix; return store_; } public double calculateLogLikelihood(double[] categoryProbabilities, double[] equilibriumFrequencies, int[] patternWeights, int numberOfPatterns) { double logLikelihood = 0; for( int pattern = 0; patternTitle: FastFourStateLHCalculator

*

Description: A simpler LHCalculator with unrolled loops for four state data types (eg Nucleotides)

* @author Matthew Goode * @version 1.0 */ import pal.datatype.*; import pal.substmodel.*; public final class FastFourStateLHCalculator implements LHCalculator { private final static int FOUR_STATES = 4; private final static void calculateSingleExtendedIndirectImpl( double distance, SubstitutionModel model, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities, double[][][] transitionProbabilityStore, int numberOfCategories ) { model.getTransitionProbabilities( distance, transitionProbabilityStore ); double[][][] baseStoreValues = baseConditionalProbabilities.getCurrentConditionalProbabilities(); double[][][] resultStoreValues = baseConditionalProbabilities.getConditionalProbabilityAccess(numberOfPatterns,false); for( int category = 0; categoryTitle:

*

Description:

*

Copyright: Copyright (c) 2003

*

Company:

* @author not attributable * @version 1.0 */ import pal.datatype.*; import pal.substmodel.*; public class SimpleLHCalculator implements LHCalculator { private static final SimpleFactory FACTORY_INSTANCE = new SimpleFactory(); private final static void calculateSingleExtendedIndirectImpl( double distance, SubstitutionModel model, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities, double[][][] transitionProbabilityStore, int numberOfCategories, int numberOfStates ) { model.getTransitionProbabilities( distance, transitionProbabilityStore ); double[][][] resultStoreValues = resultConditionalProbabilities.getConditionalProbabilityAccess( numberOfPatterns, false ); double[][][] baseStoreValues = baseConditionalProbabilities.getCurrentConditionalProbabilities(); for( int category = 0; categoryTitle: MolecularClockLikelihoodModel

*

Description: An interface to objects that can be used for calculating likelihood estimates when a molecular clock is assumed (and therefore knowledge of the relative temporal order of events)

* @author Matthew Goode * @version 1.0 *

History
*

    *
  • 27/5/2004 Created
  • *
*

*/ import pal.misc.*; public interface MolecularClockLikelihoodModel { /** * The External calculator does not maintain any state and is approapriate for * calculation where a store is provided */ public static interface External { public void calculateSingleDescendentExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore descendentConditionalProbabilities ); /** */ public void calculateSingleAscendentExtendedConditionalsDirect( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilityProbabilties ); /** */ public void calculateSingleAscendentExtendedConditionalsIndirect( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore baseAscendentConditionalProbabilityProbabilties, ConditionalProbabilityStore resultConditionalProbabilityProbabilties ); /** * * @param distance * @param model * @param patternLookup * @param numberOfPatterns * @param leftConditionalProbabilityProbabilties Implementations must not overwrite or change * @param rightConditionalProbabilityProbabilties Implementations must not overwrite or change * @param resultStore Where to stick the created categoryPatternState information * @return true if built on cached information * @note calls to getLastConditionalProbabilities() does not have to be valid after call this method */ public void calculateExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ); /** * Calculate the likelihood given two sub trees (left, right) and their extended likeihood probabilities * @param rootHeight the height of the likelihood calculation * @param leftConditionalProbabilities Assumed to be extended to the rootHeight * @param rightConditionalProbabilities Assumed to be extended to the rootHeight * @return the Log likelihood */ public double calculateLogLikelihood(double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ); /** * Calculate the likelihood given a non root node * @param nodeHeight the height of node doing the likelihood calculation * @param centerPatter assumed left is ascendent component, right is descendent * @param ascendentConditionalProbabilities Assumed to be extended (downwards) to the nodeHeight * @param descendentConditionalProbabilities Assumed to be extended (upwards) to the nodeHeight * @return the Log likelihood */ public double calculateLogLikelihoodNonRoot(double nodeHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilitiesStore, ConditionalProbabilityStore descendentConditionalProbabilitiesStore ); public double calculateLogLikelihoodSingle( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore conditionalProbabilitiesStore ); public SiteDetails calculateSiteDetails( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ); public void calculateFlatConditionals( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore resultConditionalProbabilitiesStore ); } //End of class External /** * The Internal calculator may maintain state and is approapriate permanent attachment * to internal nodes of the tree structure */ public static interface Internal { /** * * @param patternLookup * @param numberOfPatterns * @param leftConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @return true if result build on cached information * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculateExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilityProbabilties, ConditionalProbabilityStore rightConditionalProbabilityProbabilties ); /** * Extends left and right conditionals by type and then calculates flat conditionals * @param patternLookup * @param numberOfPatterns * @param leftConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @return true if result build on cached information * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculatePostExtendedFlatConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilityProbabilties, ConditionalProbabilityStore rightConditionalProbabilityProbabilties ); /** */ public ConditionalProbabilityStore calculateAscendentExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilityProbabilties, ConditionalProbabilityStore otherConditionalProbabilityProbabilties ); /** */ public ConditionalProbabilityStore calculateAscendentFlatConditionals( PatternInfo centerPattern, ConditionalProbabilityStore ascenedentConditionalProbabilityProbabilties, ConditionalProbabilityStore otherConditionalProbabilityProbabilties ); /** * * @param centerPattern the center pattern info * @param leftConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @return true if result build on cached information * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculateFlatConditionals( PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilityProbabilties, ConditionalProbabilityStore rightConditionalProbabilityProbabilties ); } //End of Internal /** * A ConstrainedLHCalculator.Leaf object is attached to each leaf node and can be used to calculated conditional probabilities across the related branch. * Allows for quick implementations as well as implementations that cope correctly with ambiguous characters */ public static interface Leaf { public ConditionalProbabilityStore calculateExtendedConditionals(double topHeight, double bottomHeight); public ConditionalProbabilityStore calculateFlatConditionals(double height); } public static interface Simulator { public int[] getSimulated(int[] baseSequence, double topBaseHeight, double bottomBaseHeight); public void simulated(int[] baseSequence, double topBaseHeight, double bottomBaseHeight, int[] newSequence); public int[] generateRoot(double sampleHeight); public void resetDistributions(); } public static interface HeightConverter { public double getExpectedSubstitutionHeight(double baseHeight); public double getExpectedSubstitutionDistance(double lower, double upper); } public static interface Instance extends java.io.Serializable { public String getSubstitutionModelSummary(); public Leaf createNewLeaf(HeightConverter converter, PatternInfo pattern, int[] patternStateMatchup); public External createNewExternal(HeightConverter converter); // public Simulator createNewSimulator(int sequenceLength, boolean stochasticDistribution); public Internal createNewInternal(HeightConverter converter); public ConditionalProbabilityStore createAppropriateConditionalProbabilityStore( boolean isForLeaf ); public NeoParameterized getParameterAccess(); } }pal-1.5.1/src/pal/eval/SiteDetails.java0000644000000000000000000001530710141731202016334 0ustar rootroot// SiteDetails.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** *

Title: SiteDetails (was Posteriors)

*

Description: Access for site based information that are calculated post ML optimisation

* @author Matthew Goode * @version 1.0 */ import pal.substmodel.*; public interface SiteDetails { public SubstitutionModel getRelatedModel(); public double[] getSitePosteriors(int site); /** * Get the likelihoods for each site (not the log likelihoods) * @return a double array of likelihoods matching each site */ public double[] getSiteLikelihoods(); /** * Get the likelihoods for each site (logged) * @return a double array of log likelihoods matching each site */ public double[] getSiteLogLikelihoods(); // -=-=-==--==- public static final class Utils { /** * Create a Postriors object * @param categoryPatternConditionalProbabilities An array arranged [category][pattern] that holds the conditional probabilities for each category at each site * @param isLoggedConditionals should be true if the conditional probabilities are stored as logged values, false if not * @param model the related substitution models * @param numberOfPatterns the number of patterns * @param sitePatternMatchup an array that identifies what pattern is to used at which site * @param numberOfSites the number of sites * @param siteLikelihoods the site likelihood (unlogged) at each site * @return an appropriate Posteriors object */ public static final SiteDetails create(double[][] categoryPatternConditionalProbabilities, boolean isLoggedConditionals, SubstitutionModel model, int numberOfPatterns, int[] sitePatternMatchup, int numberOfSites, double[] siteLikelihoods) { return new SimpleSiteDetails(categoryPatternConditionalProbabilities,isLoggedConditionals, model, numberOfPatterns, sitePatternMatchup,numberOfSites,siteLikelihoods); } /** * Create a Postriors object with no related substitution model * @param categoryPatternConditionalProbabilities An array arranged [category][pattern] that holds the conditional probabilities for each category at each site * @param isLoggedConditionals should be true if the conditional probabilities are stored as logged values, false if not * @param numberOfPatterns the number of patterns * @param sitePatternMatchup an array that identifies what pattern is to used at which site * @param numberOfSites the number of sites * @param siteLikelihoods the site likelihood (unlogged) at each site * @return an appropriate Posteriors object */ public static final SiteDetails create(double[][] categoryPatternConditionalProbabilities, boolean isLoggedConditionals,int numberOfPatterns, int[] sitePatternMatchup, int numberOfSites, double[] siteLikelihoods) { return new SimpleSiteDetails(categoryPatternConditionalProbabilities,isLoggedConditionals, numberOfPatterns, sitePatternMatchup,numberOfSites,siteLikelihoods); } //-=-=-=-=-= private final static class SimpleSiteDetails implements SiteDetails { private final double[][] categoryPatternConditionalProbabilities_; private final double[][] patternPosteriors_; private final double[] siteLikelihoods_; private final double[] siteLogLikelihoods_; private final int[] sitePatternMatchup_; private final SubstitutionModel model_; private final int numberOfSites_; public SimpleSiteDetails( double[][] categoryPatternConditionalProbabilities, boolean isLoggedConditionals, int numberOfPatterns, int[] sitePatternMatchup, int numberOfSites, double[] siteLikelihoods ) { this(categoryPatternConditionalProbabilities,isLoggedConditionals, null,numberOfPatterns,sitePatternMatchup, numberOfSites, siteLikelihoods); } public SimpleSiteDetails( double[][] categoryPatternConditionalProbabilities, boolean isLoggedConditionals, SubstitutionModel model, int numberOfPatterns, int[] sitePatternMatchup, int numberOfSites, double[] siteLikelihoods ) { final int numberOfCategories = model.getNumberOfTransitionCategories(); this.siteLikelihoods_ = pal.misc.Utils.getCopy(siteLikelihoods); this.siteLogLikelihoods_ = new double[numberOfSites]; double llh = 0; for(int i = 0 ; i < numberOfSites ; i++) { this.siteLogLikelihoods_[i] = Math.log(this.siteLikelihoods_[i]); llh+=this.siteLogLikelihoods_[i]; } System.out.println("Total:"+llh); if( isLoggedConditionals ) { this.categoryPatternConditionalProbabilities_ = convertLogged( categoryPatternConditionalProbabilities, numberOfCategories, numberOfPatterns ); } else { this.categoryPatternConditionalProbabilities_ = pal.misc.Utils.getCopy( categoryPatternConditionalProbabilities ); } this.numberOfSites_ = numberOfSites; this.sitePatternMatchup_ = pal.misc.Utils.getCopy( sitePatternMatchup ); this.patternPosteriors_ = new double[numberOfPatterns][numberOfCategories]; for( int p = 0; p Classes for evaluating evolutionary hypothesis (chi-square and likelihood criteria) and estimating model parameters. pal-1.5.1/src/pal/eval/SimpleModelFastFourStateLHCalculator.java0000644000000000000000000006634510141732650023265 0ustar rootroot// SimpleModleFastFourStateLHCalculator.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** *

Title:

*

Description:

*

Copyright: Copyright (c) 2003

*

Company:

* @author not attributable * @version 1.0 */ import pal.datatype.*; import pal.substmodel.*; public class SimpleModelFastFourStateLHCalculator implements LHCalculator { private final static int FOUR_STATES = 4; private final static int ONE_CATEGORY = 1; private static final void calculateSingleExtendedIndirectImpl( double distance, SubstitutionModel model, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities, double[][] transitionProbabilityStore ) { model.getTransitionProbabilities( distance, 0, transitionProbabilityStore ); double[][][] baseStoreValues = baseConditionalProbabilities.getCurrentConditionalProbabilities(); double[][][] resultStoreValues = baseConditionalProbabilities.getConditionalProbabilityAccess(numberOfPatterns,false); final double[][] basePatternStateProbabilities = baseStoreValues[0]; final double[][] resultPatternStateProbabilities = resultStoreValues[0]; for( int pattern = 0; patternTitle: SimpleMolecularClockLikelihoodModel

*

Description:

* @author Matthew Goode * @version 1.0 */ import pal.datatype.*; import pal.misc.*; import pal.substmodel.*; public class SimpleMolecularClockLikelihoodModel implements MolecularClockLikelihoodModel { private static final class InternalImpl implements Internal { private final LHCalculator.Internal base_; private final SubstitutionModel model_; private final HeightConverter heightConverter_; public InternalImpl(LHCalculator.Internal base, SubstitutionModel model, HeightConverter heightConverter) { this.base_ = base; this.model_ = model; this.heightConverter_ = heightConverter; } /** * Extends left and right conditionals by type and then calculates flat conditionals * @param patternLookup * @param numberOfPatterns * @param leftConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @param rightConditionalProbabilityProbabilties Implementations should be allowed to overwrite in certain cases * @return true if result build on cached information * @note An assumption may be made that after a call to this method the leftConditionals and rightConditionals are not used again! */ public ConditionalProbabilityStore calculatePostExtendedFlatConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilityProbabilties, ConditionalProbabilityStore rightConditionalProbabilityProbabilties ) { return base_.calculatePostExtendedFlat(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern,leftConditionalProbabilityProbabilties,rightConditionalProbabilityProbabilties,true); } public ConditionalProbabilityStore calculateExtendedConditionals( final double topBaseHeight, final double bottomBaseHeight, final PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilityProbabilties, final ConditionalProbabilityStore rightConditionalProbabilityProbabilties ) { return base_.calculateExtended(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern,leftConditionalProbabilityProbabilties,rightConditionalProbabilityProbabilties,true); } public ConditionalProbabilityStore calculateAscendentExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascenedentConditionalProbabilityProbabilties, ConditionalProbabilityStore otherConditionalProbabilityProbabilties ) { return base_.calculateExtended(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern,ascenedentConditionalProbabilityProbabilties,otherConditionalProbabilityProbabilties,true); } public ConditionalProbabilityStore calculateAscendentFlatConditionals( PatternInfo centerPattern, ConditionalProbabilityStore ascenedentConditionalProbabilityProbabilties, ConditionalProbabilityStore otherConditionalProbabilityProbabilties ) { return base_.calculateFlat(centerPattern,ascenedentConditionalProbabilityProbabilties,otherConditionalProbabilityProbabilties); } public ConditionalProbabilityStore calculateFlatConditionals( final PatternInfo centerPattern, final ConditionalProbabilityStore leftConditionalProbabilityProbabilties, final ConditionalProbabilityStore rightConditionalProbabilityProbabilties ) { return base_.calculateFlat(centerPattern,leftConditionalProbabilityProbabilties,rightConditionalProbabilityProbabilties); } } // -=-=--==-=-=-=---=-==-=--==-=-=-=- private static final class ExternalImpl implements External { private final LHCalculator.External base_; private final SubstitutionModel model_; private final HeightConverter heightConverter_; public ExternalImpl(LHCalculator.External base, SubstitutionModel model, HeightConverter heightConverter) { this.base_ = base; this.model_ = model; this.heightConverter_ = heightConverter; } public void calculateSingleExtendedConditionals(double topBaseHeight, double bottomBaseHeight, int numberOfPatterns, ConditionalProbabilityStore baseConditionalProbabilities, ConditionalProbabilityStore resultConditionalProbabilities) { base_.calculateSingleExtendedIndirect(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,numberOfPatterns,baseConditionalProbabilities,resultConditionalProbabilities); } public void calculateSingleDescendentExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore descendentConditionalProbabilities ) { base_.calculateSingleExtendedDirect(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern.getNumberOfPatterns(),descendentConditionalProbabilities); } /** */ public void calculateSingleAscendentExtendedConditionalsDirect( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilityProbabilties ) { base_.calculateSingleExtendedDirect(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern.getNumberOfPatterns(),ascendentConditionalProbabilityProbabilties); } /** */ public void calculateSingleAscendentExtendedConditionalsIndirect( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore baseAscendentConditionalProbabilityProbabilties, ConditionalProbabilityStore resultConditionalProbabilityProbabilties ) { base_.calculateSingleExtendedIndirect(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern.getNumberOfPatterns(),baseAscendentConditionalProbabilityProbabilties,resultConditionalProbabilityProbabilties); } public void calculateExtendedConditionals( double topBaseHeight, double bottomBaseHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilities, ConditionalProbabilityStore rightConditionalProbabilities, ConditionalProbabilityStore resultStore ) { base_.calculateExtended(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight), model_,centerPattern,leftConditionalProbabilities,rightConditionalProbabilities,resultStore); } /** * Calculate the likelihood given a non root node * @param nodeHeight the height of node doing the likelihood calculation * @param centerPatter assumed left is ascendent component, right is descendent * @param ascendentConditionalProbabilities Assumed to be extended (downwards) to the nodeHeight * @param descendentConditionalProbabilities Assumed to be extended (upwards) to the nodeHeight * @return the Log likelihood */ public double calculateLogLikelihoodNonRoot( double nodeHeight, PatternInfo centerPattern, ConditionalProbabilityStore ascendentConditionalProbabilitiesStore, ConditionalProbabilityStore descendentConditionalProbabilitiesStore ) { return base_.calculateLogLikelihood(model_,centerPattern,ascendentConditionalProbabilitiesStore,descendentConditionalProbabilitiesStore); } /** * Calculate the likelihood given two sub trees (left, right) and their extended likeihood probabilities * @param rootHeight the height of the likelihood calculation * @param leftConditionalProbabilities Assumed to be extended to the rootHeight * @param rightConditionalProbabilities Assumed to be extended to the rootHeight * @return the Log likelihood */ public double calculateLogLikelihood( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ) { return base_.calculateLogLikelihood(model_,centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore); } public double calculateLogLikelihoodSingle( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore conditionalProbabilitiesStore ) { return base_.calculateLogLikelihoodSingle(model_,centerPattern.getPatternWeights(),centerPattern.getNumberOfPatterns(),conditionalProbabilitiesStore); } public void calculateFlatConditionals( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore, ConditionalProbabilityStore resultConditionalProbabilitiesStore) { base_.calculateFlat(centerPattern,leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore,resultConditionalProbabilitiesStore); } public SiteDetails calculateSiteDetails( double rootHeight, PatternInfo centerPattern, ConditionalProbabilityStore leftConditionalProbabilitiesStore, ConditionalProbabilityStore rightConditionalProbabilitiesStore ) { return base_.calculateSiteDetailsRooted(model_,centerPattern, leftConditionalProbabilitiesStore,rightConditionalProbabilitiesStore); } } // -=-=--==-=-=-=---=-==-=--==-=-=-=- private static final class LeafImpl implements Leaf { private final LHCalculator.Leaf base_; private final SubstitutionModel model_; private final HeightConverter heightConverter_; public LeafImpl(LHCalculator.Leaf base, SubstitutionModel model, HeightConverter heightConverter) { this.base_ = base; this.model_ = model; this.heightConverter_ = heightConverter; } public ConditionalProbabilityStore calculateExtendedConditionals(double topBaseHeight, double bottomBaseHeight) { return base_.getExtendedConditionalProbabilities(heightConverter_.getExpectedSubstitutionDistance(bottomBaseHeight,topBaseHeight),model_,true); } public ConditionalProbabilityStore calculateFlatConditionals(double relatedHeight) { return base_.getFlatConditionalProbabilities(); } } // -=-=--==-=-=-=---=-==-=--==-=-=-=- // private final static class SimulatorImpl implements Simulator{ // private final SequenceSimulator simulator_; // private final boolean stochasticDistribution_; // public SimulatorImpl(SubstitutionModel model, int sequenceLength, boolean stochasticDistribution) { // this.simulator_ = new SequenceSimulator(model,sequenceLength,stochasticDistribution); // this.stochasticDistribution_ = stochasticDistribution; // } // public int[] getSimulated(int[] baseSequence, double topBaseHeight, double bottomBaseHeight) { // return simulator_.getSimulated(baseSequence,topTime-bottomTime); // } // public void simulated(int[] baseSequence, double topBaseHeight, double bottomBaseHeight, int[] newSequence) { // simulator_.simulate(baseSequence,topTime-bottomTime,newSequence); // } // public int[] generateRoot(double rootHeight) { // return simulator_.generateRoot(); // } // public void resetDistributions() { // simulator_.resetSiteCategoryDistribution(stochasticDistribution_); // } // } //End of class SimulatorImpl // -=-=--==-=-=-=---=-==-=--==-=-=-=- public static final Instance createInstance(LHCalculator.Factory baseFactory, SubstitutionModel model) { int numberOfCategories = model.getNumberOfTransitionCategories(); DataType dt = model.getDataType(); return new SimpleInstance(model, baseFactory.createSeries( numberOfCategories, dt )); } public static final Instance createInstance(SubstitutionModel model) { return createInstance(SimpleLHCalculator.getFactory(), model); } // -=-=--==-=-=-=---=-==-=--==-=-=-=- private static final class SimpleInstance implements Instance { private int numberOfCategories_; private LHCalculator.Generator baseGenerator_; private SubstitutionModel substitutionModel_; private NeoParameterized parameterAccess_; // private static final // // Serialization Code // private static final long serialVersionUID = 2661663212643526344L; private void writeObject( java.io.ObjectOutputStream out ) throws java.io.IOException { out.writeByte( 1 ); //Version number out.writeInt( numberOfCategories_ ); out.writeObject( baseGenerator_ ); out.writeObject( substitutionModel_ ); } private void readObject( java.io.ObjectInputStream in ) throws java.io.IOException, ClassNotFoundException { byte version = in.readByte(); switch( version ) { default: { this.numberOfCategories_ = in.readInt(); this.baseGenerator_ = (LHCalculator.Generator)in.readObject(); this.substitutionModel_ = (SubstitutionModel)in.readObject(); break; } } this.parameterAccess_ = new ParameterizedNeoWrapper(substitutionModel_); } public SimpleInstance( SubstitutionModel sm, LHCalculator.Generator baseGenerator) { this.numberOfCategories_ = sm.getNumberOfTransitionCategories(); this.substitutionModel_ = sm; this.baseGenerator_ = baseGenerator; this.parameterAccess_ = new ParameterizedNeoWrapper(substitutionModel_); } public Parameterized getSubstitutionModelParameterAccess() { return substitutionModel_; } public boolean hasSubstitutionModelParameters(){ return substitutionModel_.getNumParameters()!=0; } public Leaf createNewLeaf(HeightConverter heightConverter, PatternInfo pattern, int[] patternStateMatchup) { return new LeafImpl( baseGenerator_.createNewLeaf(patternStateMatchup,pattern.getNumberOfPatterns()),substitutionModel_, heightConverter ); } public External createNewExternal(HeightConverter heightConverter) { return new ExternalImpl( baseGenerator_.createNewExternal(), substitutionModel_, heightConverter ); } public Internal createNewInternal(HeightConverter heightConverter) { return new InternalImpl( baseGenerator_.createNewInternal(), substitutionModel_, heightConverter ); } public ConditionalProbabilityStore createAppropriateConditionalProbabilityStore( boolean isForLeaf ) { return baseGenerator_.createAppropriateConditionalProbabilityStore(isForLeaf); } public String getSubstitutionModelSummary() { return "Model:"+substitutionModel_.toString(); } public NeoParameterized getParameterAccess() { return parameterAccess_; } } }pal-1.5.1/src/pal/eval/PatternInfo.java0000644000000000000000000001115610037743630016365 0ustar rootroot// PatternInfo.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; /** *

Title: PatternInfo

*

Description: Describes particular site pattern information based on upto two children patterns, and can adapt.

* @author Matthew Goode * @version 1.0 */ public final class PatternInfo { private final int[] sitePatternMatchup_; private final int[] patternWeights_; private final int[] patternLookup_; private int numberOfPatterns_; /** * Cloning constructor * @param toCopy The PatternInfo to copy */ private PatternInfo(PatternInfo toCopy) { this.sitePatternMatchup_ = pal.misc.Utils.getCopy(toCopy.sitePatternMatchup_); this.patternWeights_ = pal.misc.Utils.getCopy(toCopy.patternWeights_); this.patternLookup_ = pal.misc.Utils.getCopy(toCopy.patternLookup_); this.numberOfPatterns_ = toCopy.numberOfPatterns_; } public PatternInfo(int numberOfSites, boolean binaryPattern) { this.sitePatternMatchup_ = new int[numberOfSites]; this.patternWeights_ = new int[numberOfSites]; this.numberOfPatterns_ = 0; patternLookup_ = new int[(binaryPattern ? numberOfSites*2 : numberOfSites)]; } public PatternInfo(int[] sitePatternMatchup, int[] patternWeights, int[] patternLookup, int initialNumberOfPatterns) { this.sitePatternMatchup_ = sitePatternMatchup; this.patternWeights_ = patternWeights; this.patternLookup_ = patternLookup; this.numberOfPatterns_ = initialNumberOfPatterns; } public PatternInfo(int[] sitePatternMatchup, int initialNumberOfPatterns) { this(sitePatternMatchup,null,initialNumberOfPatterns); } public PatternInfo(int[] sitePatternMatchup, int[] patternWeights, int initialNumberOfPatterns) { this(sitePatternMatchup,patternWeights,null, initialNumberOfPatterns); } /** * Obtain an exact copy of this pattern info * @return the required copy */ public PatternInfo getCopy() { return new PatternInfo(this); } public String toString() { return pal.misc.Utils.toString(patternLookup_,numberOfPatterns_*2); } public String sitePatternMatchupToString() { return pal.misc.Utils.toString(sitePatternMatchup_); } public final int[] getPatternLookup() { return patternLookup_; } public int[] getPatternWeights() { return patternWeights_; } public final int getNumberOfSites() { return sitePatternMatchup_.length; } public final int[] getSitePatternMatchup() { return sitePatternMatchup_; } public final int getNumberOfPatterns() { return numberOfPatterns_; } public void setNumberOfPatterns(int n) { this.numberOfPatterns_ = n; } public int build(PatternInfo leftPattern, PatternInfo rightPattern, final int numberOfSites ) { if(rightPattern.getNumberOfPatterns()==0) { System.out.println("Error: right has zero patterns"); Thread.dumpStack(); } if(leftPattern.getNumberOfPatterns()==0) { System.out.println("Error: left has zero patterns"); } final int numberOfLeftPatterns = leftPattern.getNumberOfPatterns(); final int numberOfRightPatterns = rightPattern.getNumberOfPatterns(); final int[] leftSitePatternMatchup = leftPattern.getSitePatternMatchup(); final int[] rightSitePatternMatchup = rightPattern.getSitePatternMatchup(); int uniqueCount = 0; // table.clear(); int uniqueCountTimesTwo = 0; for(int i = 0 ; i < numberOfSites ; i++) { final int leftPatternIndex = leftSitePatternMatchup[i]; final int rightPatternIndex = rightSitePatternMatchup[i]; final int patternIndex = getMatchingPattern(leftPatternIndex,rightPatternIndex,patternLookup_, uniqueCount); if(patternIndex<0) { sitePatternMatchup_[i] = uniqueCount; patternLookup_[uniqueCountTimesTwo++] = leftPatternIndex; patternLookup_[uniqueCountTimesTwo++] = rightPatternIndex; patternWeights_[uniqueCount++]=1; } else { patternWeights_[patternIndex]++; sitePatternMatchup_[i] = patternIndex; } } numberOfPatterns_ = uniqueCount; return uniqueCount; } //End of buildPatternInfo() /** * @return the index of mathcing pattern (if already found), or -1 otherwise. */ private static final int getMatchingPattern(final int leftPattern, final int rightPattern, final int[] patternLookup, final int numberOfPatternsFoundSoFar) { int index = 0; for(int i = 0 ; i < numberOfPatternsFoundSoFar ; i++) { boolean matchLeft = patternLookup[index++]==leftPattern; boolean matchRight = patternLookup[index++]==rightPattern; if(matchLeft&&matchRight) { return i; } } return -1; } } pal-1.5.1/src/pal/eval/ChiSquareValue.java0000644000000000000000000000753407573706200017025 0ustar rootroot// ChiSquareValue.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.eval; import pal.distance.*; import pal.math.*; import pal.misc.*; import pal.tree.*; /** * computes chi-square value of a (parameterized) tree for * its set of parameters (e.g., branch lengths) * and a given distance matrix * * @version $Id: ChiSquareValue.java,v 1.14 2002/12/05 04:27:28 matt Exp $ * * @author Korbinian Strimmer */ public class ChiSquareValue implements MultivariateFunction { // // Public stuff // /** * initialization * * @param m distance matrix * @param w determines whether weighted or * unweighted chi-squares are computed */ public ChiSquareValue(DistanceMatrix m, boolean w) { givenMat = m; weighted = w; } /** * define (parameterized) tree * * @param t tree */ public void setTree(Tree t) { tree = t; //changed so that smaller trees could be easily handled //for tree-search purposes inducedMat = new TreeDistanceMatrix(tree); if (tree instanceof ParameterizedTree) { ptree = (ParameterizedTree) tree; numParams = ptree.getNumParameters(); } else { ptree = null; numParams = 0; } } /** * Returns the (parameterized) tree of this likelihood value. */ public Tree getTree() { return tree; } /** * compute (weighted) least-square value * for current tree (fixed branch lengths) * * return chi-square value */ public double compute() { inducedMat.recompute(tree); return DistanceMatrixUtils.squaredDistance(inducedMat, givenMat, weighted); } /** * optimise parameters of a tree by minimising its chi-square value * (tree must be a ParameterizedTree) * * @return minimimum chi-square value */ public double optimiseParameters() { return optimiseParameters(null); } /** * optimise parameters of a tree by minimising its chi-square value * (tree must be a ParameterizedTree) * * @param mm optimiser for ParameterizedTree * * @return minimum chi-square value */ public double optimiseParameters(MultivariateMinimum mm) { if (!(tree instanceof ParameterizedTree)) { // we need a ParameterizedTree here! new IllegalArgumentException("ParameterizedTree required"); } if (mm == null) { if (mvm == null) mvm = new DifferentialEvolution(numParams); } else { mvm = mm; } // first guess are the default parameters of the tree double[] estimate = new double[numParams]; for (int i = 0; i < numParams; i++) { estimate[i] = ptree.getDefaultValue(i); } mvm.findMinimum(this, estimate, BranchLimits.FRACDIGITS, BranchLimits.FRACDIGITS); return evaluate(estimate); } // interface MultivariateFunction /** * compute (weighted) least-squares value * * @param params parameters (branch lengths) of the tree */ public double evaluate(double[] params) { for (int i = 0; i < numParams; i++) { ptree.setParameter(params[i], i); } return compute(); } /** * get number of parameters in tree * * @return number of parameters */ public int getNumArguments() { return ptree.getNumParameters(); } public double getLowerBound(int n) { return ptree.getLowerLimit(n); } public double getUpperBound(int n) { return ptree.getUpperLimit(n); } /** * @return null */ public OrthogonalHints getOrthogonalHints() { return null; } // // Private stuff // private int numParams; private Tree tree; private ParameterizedTree ptree; private DistanceMatrix givenMat; private TreeDistanceMatrix inducedMat; private boolean weighted; private MultivariateMinimum mvm = null; } pal-1.5.1/src/pal/xml/0000755000000000000000000000000010141733722013133 5ustar rootrootpal-1.5.1/src/pal/xml/ElementParser.java0000644000000000000000000004343607712725522016567 0ustar rootroot// ElementParser.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.xml; import org.w3c.dom.*; import pal.coalescent.ConstExpGrowth; import java.util.*; import java.io.Reader; import pal.util.*; import pal.misc.*; import pal.alignment.*; import pal.datatype.*; import pal.tree.AttributeNode; /** * This class provides static methods for parsing PAL objects * from DOM Elements. Methods are ordered in public, protected, private and * alphabetic within each group. * * @version $Id: ElementParser.java,v 1.14 2003/08/02 01:15:14 matt Exp $ * * @author Alexei Drummond */ public class ElementParser implements XMLConstants { /** * @return the first child element of the given name. */ public static Element getFirstByName(Element parent, String name) { NodeList nodes = parent.getElementsByTagName(name); if (nodes.getLength() > 0) { return (Element)nodes.item(0); } else return null; } /** * Parses an alignment element and returns an alignment object. */ public static Alignment parseAlignmentElement(Element e) throws XmlParseException { pal.alignment.Alignment alignment = null; pal.datatype.DataType dataType = Nucleotides.DEFAULT_INSTANCE; String gaps = "-"; validateTagName(e, ALIGNMENT); if (hasAttribute(e, MISSING)) {gaps = e.getAttribute(MISSING);} if (hasAttribute(e, DATA_TYPE_ID)) { String dataTypeId = e.getAttribute(DATA_TYPE_ID); dataType = DataType.Utils.getInstance(Integer.parseInt(dataTypeId)); } else if (hasAttribute(e, DATA_TYPE)) { String dataTypeStr = e.getAttribute(DATA_TYPE); if (dataTypeStr.equals(DataType.NUCLEOTIDE_DESCRIPTION)) { dataType = Nucleotides.DEFAULT_INSTANCE; } else if (dataTypeStr.equals(DataType.AMINO_ACID_DESCRIPTION)) { dataType = AminoAcids.DEFAULT_INSTANCE; } else if (dataTypeStr.equals(DataType.CODON_DESCRIPTION)) { dataType = new Codons(); } else if (dataTypeStr.equals(DataType.TWO_STATE_DESCRIPTION)) { dataType = new TwoStates(); } } NodeList nodes = e.getElementsByTagName(SEQUENCE); String[] sequences = new String[nodes.getLength()]; String[] names = new String[nodes.getLength()]; for (int i = 0; i < sequences.length; i++) { Element sequence = (Element)nodes.item(i); names[i] = getNameAttr(sequence); sequences[i] = ""; NodeList seqs = sequence.getChildNodes(); for (int j = 0; j < seqs.getLength(); j++) { if (seqs.item(j) instanceof Text) { sequences[i] += ((Text)seqs.item(j)).getNodeValue(); } } } alignment = new SimpleAlignment(new SimpleIdGroup(names), sequences, gaps,dataType); return alignment; } /** * parses an attribute element. */ public static Attribute parseAttributeElement(Element e) throws pal.xml.XmlParseException { String name = null; String value = null; String type = null; validateTagName(e, ATTRIBUTE); if (hasAttribute(e, NAME)) { name = e.getAttribute(NAME); } else throw new XmlParseException(ATTRIBUTE + " tags require a name attribute!"); if (hasAttribute(e, VALUE)) { value = e.getAttribute(VALUE); } else throw new XmlParseException(ATTRIBUTE + " tags require a value attribute!"); if (hasAttribute(e, TYPE)) { type = e.getAttribute(TYPE); } return new Attribute(name, value, type); } /** * Parses an element from an DOM document into a DemographicModel. Recognises * ConstantPopulation, ExponentialGrowth, ConstExpGrowth. */ public static pal.coalescent.DemographicModel parseDemographicModel(Element e) throws XmlParseException { pal.coalescent.ConstantPopulation model = null; int units = pal.misc.Units.GENERATIONS; double growthParam = 0.0; double populationSize = 1.0; double ancestral = 0.0; double tx = 0.0; int parameterization = ConstExpGrowth.ALPHA_PARAMETERIZATION; validateTagName(e, DEMOGRAPHIC_MODEL); units = getUnitsAttr(e); NodeList nodes = e.getElementsByTagName(PARAMETER); for (int i = 0; i < nodes.getLength(); i++) { Element param = (Element)nodes.item(i); String name = getNameAttr(param); if (name.equals(POPULATION_SIZE)) { populationSize = getDoubleValue(param);} else if (name.equals(GROWTH_RATE)) { growthParam = getDoubleValue(param); } else if (name.equals(ALPHA)) { ancestral = getDoubleValue(param); } else if (name.equals(ANCESTRAL_POP_SIZE)) { ancestral = getDoubleValue(param); parameterization = parameterization | ConstExpGrowth.N1_PARAMETERIZATION; } else if (name.equals(CURRENT_POP_SIZE_DURATION)) { tx = getDoubleValue(param); } else if (name.equals(GROWTH_PHASE_DURATION)) { growthParam = getDoubleValue(param); System.out.println("Found LX=" + growthParam); parameterization = parameterization | ConstExpGrowth.LX_PARAMETERIZATION; } } String type = e.getAttribute(TYPE); if (type.equals(CONSTANT_POPULATION)) { model = new pal.coalescent.ConstantPopulation(populationSize, units); } else if (type.equals(EXPONENTIAL_GROWTH)) { model = new pal.coalescent.ExponentialGrowth(populationSize, growthParam, units); } else if (type.toLowerCase().equals(CONST_EXP_GROWTH)) { model = new pal.coalescent.ConstExpGrowth( populationSize, growthParam, ancestral, units, parameterization); } else if (type.toLowerCase().equals(CONST_EXP_CONST)) { model = new pal.coalescent.ConstExpConst( populationSize, growthParam, ancestral, tx, units, parameterization); } else if (type.toLowerCase().equals(EXPANDING_POPULATION)) { //ExpandingPopulation must have alpha parameterization!! if ((parameterization & ConstExpGrowth.N1_PARAMETERIZATION) > 0) { ancestral = ancestral / populationSize; } model = new pal.coalescent.ExpandingPopulation( populationSize, growthParam, ancestral, units); } return model; } /** * @return a tree node parsed from an XML element. */ public static pal.tree.Node parseEdgeNodeElement(Element e) throws XmlParseException { pal.tree.Node node = null; validateTagName(e, EDGE); node = pal.tree.NodeFactory.createNode(); if (hasAttribute(e, LENGTH)) { node.setBranchLength(Double.parseDouble(e.getAttribute(LENGTH))); } NodeList nodes = e.getChildNodes(); int nodeCount = 0; for (int i =0; i < nodes.getLength(); i++) { if (nodes.item(i) instanceof Element) { Element element = (Element)nodes.item(i); if (element.getTagName().equals(NODE)) { if (nodeCount > 0) { throw new RuntimeException("Each edge should contain only 1 node!!"); } parseNodeElement((Element)nodes.item(0), node); nodeCount += 1; } } } return node; } /** * reads XML format of frequencies.
* e.g <frequencies>0.19 0.31 0.16 0.34 </frequencies>. * @returns an array of double representing the equilibrium base frequencies. */ public static final double[] parseFrequencies(Element element) throws XmlParseException { Vector freqs = new Vector(); validateTagName(element, FREQUENCIES); NodeList nodes = element.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { Node node = nodes.item(i); if (node.getNodeType() == Node.TEXT_NODE) { String text = node.getNodeValue(); StringTokenizer tokens = new StringTokenizer(text); while (tokens.hasMoreElements()) { String token = (String)tokens.nextElement(); freqs.addElement(new Double(token)); } } } double[] frequencies = new double[freqs.size()]; for (int i =0 ; i < frequencies.length; i++) { frequencies[i] = ((Double)freqs.elementAt(i)).doubleValue(); } return frequencies; } /** * Reads a mutation rate model from a DOM Document element. */ public static pal.mep.MutationRateModel parseMutationRateModel(Element e) throws XmlParseException { pal.mep.MutationRateModel model = null; int units = pal.misc.Units.GENERATIONS; double mutationRate = 1.0; double stepTime = Double.MAX_VALUE; double ancestralRate = 0.0; validateTagName(e, MUTATION_RATE_MODEL); units = getUnitsAttr(e); if (units == pal.misc.Units.EXPECTED_SUBSTITUTIONS) { throw new RuntimeException("mutations rate can't be in mutation units!"); } NodeList nodes = e.getElementsByTagName(PARAMETER); for (int i = 0; i < nodes.getLength(); i++) { Element param = (Element)nodes.item(i); String name = getNameAttr(param); if (name.equals(MUTATION_RATE)) { mutationRate = getDoubleValue(param);} else if (name.equals(MU_STEP_TIME)) { stepTime = getDoubleValue(param); } else if (name.equals(ANCESTRAL_MU_RATE)) { ancestralRate = getDoubleValue(param); } } String type = e.getAttribute(TYPE); if (type.equals(CONSTANT_MUTATION_RATE)) { model = new pal.mep.ConstantMutationRate(mutationRate, units,1000); } else if (type.equals(STEPPED_MUTATION_RATE)) { double[] rates = new double[] {mutationRate, ancestralRate}; double[] steps = new double[] {stepTime}; model = new pal.mep.SteppedMutationRate(rates, steps, units,1000); } return model; } /** * @return a tree node parsed from an XML element. */ public static pal.tree.Node parseNodeElement(Element e) throws XmlParseException { pal.tree.Node node = pal.tree.NodeFactory.createNode(); parseNodeElement(e, node); return node; } /** * Reads a rate matrix from a DOM Document element. Reads JC, F81, HKY, GTR */ public static pal.substmodel.RateMatrix parseRateMatrix(Element e) throws XmlParseException { return parseRateMatrix(e, null); } public static pal.substmodel.RateDistribution parseRateDistribution(Element e) throws XmlParseException { validateTagName(e, RATE_DISTRIBUTION); String type = e.getAttribute(TYPE); if (type.equals(UNIFORM_RATE_DISTRIBUTION)) { return new pal.substmodel.UniformRate(); } else if (type.equals(GAMMA_DISTRIBUTION)) { double alpha = 1.0; int ncat = 4; NodeList nodes = e.getElementsByTagName(PARAMETER); System.out.println("Found " + nodes.getLength() + " parameters in rate distribution"); for (int i = 0; i < nodes.getLength(); i++) { Element param = (Element)nodes.item(i); String name = getNameAttr(param); if (name.equals(GAMMA_ALPHA)) { alpha = getDoubleValue(param); System.out.println("Found alpha=" + alpha); } if (name.equals(NUMBER_CATEGORIES)) { ncat = getIntegerValue(param); System.out.println("Found ncats=" + ncat); } } return new pal.substmodel.GammaRates(ncat, alpha); } else throw new XmlParseException("Unrecognized rate distribution type! Should be one of\n'" + UNIFORM_RATE_DISTRIBUTION + "', '" + GAMMA_DISTRIBUTION +"'." ); } /** * Reads a rate matrix from a DOM Document element. Reads JC, F81, HKY, GTR */ protected static pal.substmodel.RateMatrix parseRateMatrix(Element e, Alignment a) throws XmlParseException { pal.substmodel.RateMatrix rateMatrix = null; double[] frequencies = null; validateTagName(e, RATE_MATRIX); String type = e.getAttribute(MODEL); Element freqElement = getFirstByName(e, FREQUENCIES); if (type.equals(JC)) { if (freqElement != null) { throw new XmlParseException("Frequency sub-element not allowed in JC model!"); } return new pal.substmodel.F81(new double[] {0.25, 0.25, 0.25, 0.25}); } if (freqElement != null) { frequencies = parseFrequencies(freqElement); } else if (a != null) { frequencies = AlignmentUtils.estimateFrequencies(a); } else throw new XmlParseException("Must have either frequency element or an associated alignment!"); if (type.equals(F81)) { rateMatrix = new pal.substmodel.F81(frequencies); } else if (type.equals(F84)) { rateMatrix = new pal.substmodel.F84(1.0, frequencies); } else if (type.equals(HKY)) { rateMatrix = new pal.substmodel.HKY(1.0, frequencies); } else if (type.equals(GTR)) { rateMatrix = new pal.substmodel.GTR(1.0, 1.0, 1.0, 1.0, 1.0, frequencies); } else { throw new XmlParseException("rate matrix model '" + type + "' unexpected!"); } NodeList nodes = e.getElementsByTagName(PARAMETER); for (int i = 0; i < nodes.getLength(); i++) { Element param = (Element)nodes.item(i); String name = getNameAttr(param); if (name.equals(KAPPA)) { rateMatrix.setParameter(getDoubleValue(param), 0); } else if (name.equals(TS_TV_RATIO)) { rateMatrix.setParameter(getDoubleValue(param), 0); } else if (name.equals(A_TO_C)) { rateMatrix.setParameter(getDoubleValue(param), 0); } else if (name.equals(A_TO_G)) { rateMatrix.setParameter(getDoubleValue(param), 1); } else if (name.equals(A_TO_T)) { rateMatrix.setParameter(getDoubleValue(param), 2); } else if (name.equals(C_TO_G)) { rateMatrix.setParameter(getDoubleValue(param), 3); } else if (name.equals(C_TO_T)) { rateMatrix.setParameter(getDoubleValue(param), 4); } else if (name.equals(G_TO_T)) { rateMatrix.setParameter(getDoubleValue(param), 5); } else { throw new XmlParseException("rate matrix parameter '" + name + "' unexpected!"); } } return rateMatrix; } /** * @return a time data object based on the given XML element. */ public static pal.misc.TimeOrderCharacterData parseTimeDataElement(Element e) throws XmlParseException { pal.misc.TimeOrderCharacterData tocd = null; int units = pal.misc.Units.GENERATIONS; validateTagName(e, TIME_DATA); units = getUnitsAttr(e); NodeList nodes = e.getElementsByTagName(TIME); Vector names = new Vector(); Vector times = new Vector(); for (int i = 0; i < nodes.getLength(); i++) { Element timeElement = (Element)nodes.item(i); Double time = new Double(timeElement.getAttribute(VALUE)); NodeList children = timeElement.getChildNodes(); if (children.item(0) instanceof Text) { StringTokenizer tokens = new StringTokenizer(children.item(0).getNodeValue()); while (tokens.hasMoreTokens()) { names.addElement(tokens.nextToken()); times.addElement(time); } } else throw new XmlParseException("Non-text node found in time element!"); } String[] nameArray = new String[names.size()]; double[] timeArray = new double[names.size()]; for (int i =0 ; i < nameArray.length; i++) { nameArray[i] = (String)names.elementAt(i); timeArray[i] = ((Double)times.elementAt(i)).doubleValue(); } tocd = new TimeOrderCharacterData(new SimpleIdGroup(nameArray), units); tocd.setTimes(timeArray, units); return tocd; } /** * @return a tree object based on the XML element it was passed. */ public static pal.tree.Tree parseTreeElement(Element e) throws XmlParseException { int units = pal.misc.Units.GENERATIONS; validateTagName(e, TREE); units = getUnitsAttr(e); NodeList nodes = e.getElementsByTagName(NODE); // TODO // instead of getting all subelements named node, // only the direct children of the tree element // should be interrogated! This will allow // for better error detection. pal.tree.Node root = parseNodeElement((Element)nodes.item(0)); if (root.getNodeHeight() == 0.0) { pal.tree.NodeUtils.lengths2Heights(root); } else { pal.tree.NodeUtils.heights2Lengths(root); } pal.tree.SimpleTree tree = new pal.tree.SimpleTree(root); tree.setUnits(units); return tree; } /** * Throws a runtime exception if the element does not have * the given name. */ public static void validateTagName(Element e, String name) throws XmlParseException { if (!e.getTagName().equals(name)) { throw new XmlParseException("Wrong tag name! Expected " + name + ", found " + e.getTagName() + "."); } } // PROTECTED METHODS protected static double getDoubleValue(Element e) { return Double.parseDouble(e.getAttribute(VALUE)); } protected static int getIntegerValue(Element e) { return Integer.parseInt(e.getAttribute(VALUE)); } protected static String getNameAttr(Element e) { return e.getAttribute(NAME); } protected static int getUnitsAttr(Element e) { int units = pal.misc.Units.GENERATIONS; if (hasAttribute(e, UNITS)) { String unitsAttr = e.getAttribute(UNITS); if (unitsAttr.equals(YEARS)) { units = pal.misc.Units.YEARS;} else if (unitsAttr.equals(MONTHS)) { units = pal.misc.Units.MONTHS;} else if (unitsAttr.equals(DAYS)) { units = pal.misc.Units.DAYS;} else if (unitsAttr.equals(MUTATIONS)) { units = pal.misc.Units.EXPECTED_SUBSTITUTIONS;} } return units; } /** * This method allows the removeal of e.hasAttribute which is DOM Level 2. * I am trying to keep compliant with DOM level 1 for now. */ protected static final boolean hasAttribute(Element e, String name) { String attr = e.getAttribute(name); return ((attr != null) && !attr.equals("")); } // PRIVATE METHODS private static void parseNodeElement(Element e, pal.tree.Node node) throws XmlParseException { validateTagName(e, NODE); if (hasAttribute(e, HEIGHT)) { node.setNodeHeight(Double.parseDouble(e.getAttribute(HEIGHT))); } if (hasAttribute(e, NAME)) { node.setIdentifier(new Identifier(e.getAttribute(NAME))); } NodeList nodes = e.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { if (nodes.item(i) instanceof Element) { Element child = (Element)nodes.item(i); if (child.getTagName().equals(NODE)) { node.addChild(parseNodeElement((Element)nodes.item(i))); } else if (child.getTagName().equals(EDGE)) { node.addChild(parseEdgeNodeElement((Element)nodes.item(i))); } else if (child.getTagName().equals(ATTRIBUTE)) { if (node instanceof AttributeNode) { Attribute a = parseAttributeElement(child); ((AttributeNode)node).setAttribute(a.getName(), a.getValue()); } } } } } } pal-1.5.1/src/pal/xml/makefile0000644000000000000000000000123707403012256014635 0ustar rootroot### VARIABLES ### JIKESOPTS := +P GCJOPTS := # Always check dependencies JIKESOPTS += +M SRC := $(wildcard *.java) CLS := $(patsubst %.java, %.class, $(SRC)) OBJ := $(patsubst %.class, %.o, $(wildcard *.class)) ### RULES ### # Compile Java sources into class files %.class: %.java jikes $(JIKESOPTS) $< # Alternative to using jikes: gcj -C # Compile class files into native code %.o: %.class gcj -c $(GCJOPTS) $< ### TARGETS ### .PHONY: classes native clean classes: $(CLS) ifneq ($(OBJ),) native: $(OBJ) (ar -rv package.a *.o; ranlib package.a) else native: $(OBJ) endif clean: rm -f *.class *.o *.u *.a *~ pal-1.5.1/src/pal/xml/make.bat0000644000000000000000000000023407433462036014546 0ustar rootroot@rem make just this directory set CLASSPATH= javac -deprecation -classpath ../../../classes;../../../classes/xml.jar -d ../../../classes *.java pause pal-1.5.1/src/pal/xml/package.html0000644000000000000000000000031207361064506015417 0ustar rootroot Utility classes for converting PAL objects to and from XML documents. This package is dependent on the DOM API which is freely available and integrated into Java 1.4. pal-1.5.1/src/pal/xml/XmlParseException.java0000644000000000000000000000067307371523234017424 0ustar rootroot// TreeParseException.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.xml; /** * exception thrown by ElementParser. * * @author Alexei Drummond */ public class XmlParseException extends Exception { public XmlParseException() {} public XmlParseException(String msg) { super(msg); } } pal-1.5.1/src/pal/xml/ElementFactory.java0000644000000000000000000002177407706535312016742 0ustar rootroot// ElementFactory.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.xml; import org.w3c.dom.*; import pal.coalescent.*; import java.util.*; import java.io.Reader; import pal.util.*; import pal.misc.Units; import pal.tree.Tree; import pal.tree.Node; import pal.alignment.Alignment; import pal.misc.Identifier; import pal.misc.TimeOrderCharacterData; import pal.mep.*; import pal.substmodel.RateMatrix; import pal.misc.Attribute; import java.util.Enumeration; /** * This class provides static methods for creating DOM Elements from PAL Objects. * Arguably each object in PAL should be allowed to generate a DOM Element * representation itself -- however I have decided to restrict the PAL's * dependence on the DOM specification to this package only. * * @author Alexei Drummond * * @version $Id: ElementFactory.java,v 1.9 2003/07/20 04:52:42 matt Exp $ */ public class ElementFactory implements XMLConstants { public static Element createAlignmentElement(Alignment a, Document document) { Element alignmentNode = document.createElement(ALIGNMENT); alignmentNode.setAttribute(DATA_TYPE, a.getDataType().getDescription()); alignmentNode.setAttribute(DATA_TYPE_ID, a.getDataType().getTypeID()+""); for (int i = 0; i < a.getSequenceCount(); i++) { alignmentNode.appendChild(createSequenceElement(a.getIdentifier(i), a.getAlignedSequenceString(i), document)); } return alignmentNode; } /** * @return a DOM element describing an attribute element. */ public static Element createAttributeElement(Attribute a, Document document) { Element attNode = document.createElement(ATTRIBUTE); Object value = a.getValue(); String type = Attribute.STRING; if (value instanceof Double) { type = Attribute.DOUBLE; } if (value instanceof Float) { type = Attribute.FLOAT; } if (value instanceof Boolean) { type = Attribute.BOOLEAN; } if (value instanceof Integer) { type = Attribute.INTEGER; } attNode.setAttribute(NAME, a.getName()); attNode.setAttribute(VALUE, value.toString()); attNode.setAttribute(TYPE, type); return attNode; } /** * Creates an XML element representing a demographic model. */ public static Element createDemographicModelElement(DemographicModel demo, Document document) { Element demoNode = document.createElement(DEMOGRAPHIC_MODEL); if (demo instanceof ConstExpGrowth) { demoNode.setAttribute(TYPE, CONST_EXP_GROWTH); ConstExpGrowth ceg = (ConstExpGrowth)demo; if (ceg.getParameterization() == ConstExpGrowth.ALPHA_PARAMETERIZATION) { demoNode.appendChild(createParameterElement(ALPHA, ceg.getAncestral(), document)); } else { demoNode.appendChild(createParameterElement(ANCESTRAL_POP_SIZE, ceg.getAncestral(), document)); } } else if (demo instanceof ExponentialGrowth) { demoNode.setAttribute(TYPE, EXPONENTIAL_GROWTH); demoNode.appendChild(createParameterElement(GROWTH_RATE, ((ExponentialGrowth)demo).getGrowthRate(), document)); } else if (demo instanceof ConstantPopulation) { demoNode.setAttribute(TYPE, CONSTANT_POPULATION); demoNode.appendChild(createParameterElement(POPULATION_SIZE, ((ConstantPopulation)demo).getN0(), document)); } demoNode.setAttribute(UNITS, getUnitString(demo.getUnits())); return demoNode; } public static Element createEdgeNodeElement(pal.tree.Node node, Document document) { Element edgeNode = document.createElement(EDGE); edgeNode.setAttribute(LENGTH, node.getBranchLength()+""); for (int i =0; i < node.getChildCount(); i++) { edgeNode.appendChild(createNodeElement(node.getChild(i), document, true)); } return edgeNode; } /** * Creates a DOM element associated with the given document representing * the given equilibrium frequencies of a rate matrix. */ public static Element createFrequenciesElement(double[] frequencies, Document d) { Element freqNode = d.createElement(FREQUENCIES); String freqs = frequencies[0] + " "; for (int i =1; i < frequencies.length; i++) { freqs += " " + frequencies[i]; } freqNode.appendChild(d.createTextNode(freqs)); return freqNode; } /** * Creates an XML element representing a mutation rate model. */ public static Element createMutationRateModelElement(MutationRateModel muModel, Document document) { Element muNode = document.createElement(MUTATION_RATE_MODEL); if (muModel instanceof SteppedMutationRate) { muNode.setAttribute(TYPE, STEPPED_MUTATION_RATE); SteppedMutationRate smr = (SteppedMutationRate)muModel; muNode.appendChild(createParameterElement(MUTATION_RATE, smr.getMus()[0], document)); muNode.appendChild(createParameterElement(ANCESTRAL_MU_RATE, smr.getMus()[1], document)); muNode.appendChild(createParameterElement(MU_STEP_TIME, smr.getMuChanges()[0], document)); } else if (muModel instanceof ConstantMutationRate) { muNode.setAttribute(TYPE, CONSTANT_MUTATION_RATE); muNode.appendChild(createParameterElement(MUTATION_RATE, muModel.getMutationRate(0.0), document)); } return muNode; } public static Element createNodeElement(pal.tree.Node node, Document document) { return createNodeElement(node, document, false); } public static Element createNodeElement(pal.tree.Node node, Document document, boolean includeEdges) { Element nodeNode = document.createElement(NODE); nodeNode.setAttribute(HEIGHT, node.getNodeHeight()+""); nodeNode.setAttribute(NAME, node.getIdentifier().getName()); if (node instanceof pal.tree.AttributeNode) { pal.tree.AttributeNode attNode = (pal.tree.AttributeNode)node; Enumeration e = attNode.getAttributeNames(); while ((e != null) && e.hasMoreElements()) { String name = (String)e.nextElement(); Object value = attNode.getAttribute(name); nodeNode.appendChild(createAttributeElement(new Attribute(name, value), document)); } } for (int i =0; i < node.getChildCount(); i++) { if (includeEdges) { nodeNode.appendChild(createEdgeNodeElement(node.getChild(i), document)); } else { nodeNode.appendChild(createNodeElement(node.getChild(i), document)); } } return nodeNode; } /** * Creates an XML element representing a parameter. */ public static Element createParameterElement(String name, double value, Document document) { Element parameterNode = document.createElement(PARAMETER); parameterNode.setAttribute(NAME, name); parameterNode.setAttribute(VALUE, value+""); return parameterNode; } public static Element createRateMatrixElement(RateMatrix matrix, Document d) { Element matrixNode = d.createElement(RATE_MATRIX); matrixNode.setAttribute(MODEL, matrix.getUniqueName()); matrixNode.setAttribute(DATA_TYPE, matrix.getDataType().getDescription()); matrixNode.setAttribute(DATA_TYPE_ID, matrix.getDataType().getTypeID()+""); matrixNode.appendChild(createFrequenciesElement(matrix.getEquilibriumFrequencies(), d)); for (int i =0 ; i < matrix.getNumParameters(); i++) { matrixNode.appendChild( createParameterElement(matrix.getParameterName(i), matrix.getParameter(i), d)); } return matrixNode; } public static Element createSequenceElement(Identifier id, String sequence, Document document) { Element sequenceNode = document.createElement(SEQUENCE); sequenceNode.setAttribute(NAME, id.getName()); sequenceNode.appendChild(document.createTextNode(sequence)); return sequenceNode; } public static Element createTimeDataElement(TimeOrderCharacterData tocd, Document document) { Element timeDataNode = document.createElement(TIME_DATA); timeDataNode.setAttribute(UNITS, getUnitString(tocd.getUnits())); timeDataNode.setAttribute(ORIGIN, "0"); timeDataNode.setAttribute(DIRECTION, BACKWARDS); for (int i =0; i < tocd.getIdCount(); i++) { timeDataNode.appendChild( createTimeElement(tocd.getIdentifier(i), tocd.getTime(i), document)); } return timeDataNode; } public static Element createTreeElement(Tree tree, Document document, boolean includeEdges) { Element treeNode = document.createElement(TREE); treeNode.setAttribute(UNITS, getUnitString(tree.getUnits())); treeNode.appendChild(createNodeElement(tree.getRoot(), document, false)); return treeNode; } // PRIVATE METHODS private static Element createTimeElement(Identifier id, double time, Document document) { Element timeNode = document.createElement(TIME); timeNode.setAttribute(VALUE, time+""); timeNode.appendChild(document.createTextNode(id.getName())); return timeNode; } /** * Private method that converts a unit integer into a human-readable name. */ private static String getUnitString(int units) { switch (units) { case Units.GENERATIONS: return GENERATIONS; case Units.DAYS: return DAYS; case Units.MONTHS: return MONTHS; case Units.YEARS: return YEARS; case Units.EXPECTED_SUBSTITUTIONS: return MUTATIONS; default: return UNKNOWN; } } } pal-1.5.1/src/pal/coalescent/0000755000000000000000000000000010141733720014451 5ustar rootrootpal-1.5.1/src/pal/coalescent/DemographicClockTree.java0000644000000000000000000000507207323430030021332 0ustar rootroot// DemographicClockTree.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.misc.*; import pal.tree.*; /** * Provides parameter interface to a clock-like genealogy which is * assumed to have some demographic pattern of theta (diversity) as * well as branch parameters (the minimal node height differences * at each internal node). * * Must be used in conjunction with DemographicLikelihoodFunction! * * @author Alexei Drummond */ public class DemographicClockTree extends ClockTree implements DemographicTree { // // Public stuff // DemographicModel model; /** * take any tree and afford it with an interface * suitable for a clock-like genealogy, under a certain demographic * assumption. *

* This parameterisation of branches, ensuring that * all parameters are independent of each other is due to * Andrew Rambaut (personal communication). */ public DemographicClockTree(Tree t, DemographicModel model) { setBaseTree(t); this.model = model; if (t.getRoot().getChildCount() < 2) { throw new IllegalArgumentException( "The root node must have at least two childs!"); } NodeUtils.heights2Lengths(getRoot()); parameter = new double[getInternalNodeCount() + model.getNumParameters()]; heights2parameters(); } /** * Returns the likelihood of the current demographic model, given * the current branch lengths. */ public double computeDemoLogLikelihood() { CoalescentIntervals ci = IntervalsExtractor.extractFromTree(this); double value = ci.computeLogLikelihood(model); return value; } // interface Parameterized public int getNumParameters() { return getInternalNodeCount() + model.getNumParameters(); } // // Private stuff // protected void parameters2Heights() { super.parameters2Heights(); for (int i = 0; i < model.getNumParameters(); i++) { model.setParameter(parameter[getInternalNodeCount() + i], i); } } protected void heights2parameters() { super.heights2parameters(); if (model != null) { for (int i = 0; i < model.getNumParameters(); i++) { parameter[getInternalNodeCount() + i] = model.getParameter(i); } } } /** * Return the demographic model being used to optimize the * likelihood of this tree. */ public DemographicModel getDemographicModel() { return model; } } pal-1.5.1/src/pal/coalescent/DemographicModel.java0000644000000000000000000001206007427055404020527 0ustar rootroot// DemographicModel.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.math.*; import pal.misc.*; import pal.io.*; import java.io.*; /** * This abstract class contains methods that are of general use for * modelling coalescent intervals given a demographic model. * * Parts of this class were derived from C++ code provided by Oliver Pybus. * * @version $Id: DemographicModel.java,v 1.12 2002/02/02 08:32:52 alexi Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer */ public abstract class DemographicModel implements Units, Parameterized, Report, Cloneable, Serializable, Summarizable { // // Public stuff // public DemographicModel() { rng = new MersenneTwisterFast(); binom = new Binomial(); units = GENERATIONS; fo = FormattedOutput.getInstance(); } public abstract Object clone(); // // functions that define a demographic model (left for subclass) // /** * Gets the value of the demographic function N(t) at time t. */ public abstract double getDemographic(double t); /** * Returns value of demographic intensity function at time t * (= integral 1/N(x) dx from 0 to t). */ public abstract double getIntensity(double t); /** * Returns value of inverse demographic intensity function * (returns time, needed for simulation of coalescent intervals). */ public abstract double getInverseIntensity(double x); // Parameterized and Report interface is also left for subclass // general functions /** * Returns an random interval size selected from the Kingman prior of the demographic model. */ public double getSimulatedInterval (int numLin, double timeOfLastCoal) { double U = rng.nextDouble(); // create unit uniform random variate // has to be done somewhere! binom.setMax(numLin); double tmp = -Math.log(U)/binom.getNChoose2(numLin) + getIntensity(timeOfLastCoal); double interval = getInverseIntensity(tmp) - timeOfLastCoal; return interval; } /** * Calculates the integral 1/N(x) dx between start and finish. */ public double getIntegral(double start, double finish) { return getIntensity(finish) - getIntensity(start); } /** * Returns the likelihood of a given *coalescent* interval */ public double computeLogLikelihood(double width, double timeOfPrevCoal, int numLineages) { return computeLogLikelihood(width, timeOfPrevCoal, numLineages, CoalescentIntervals.COALESCENT); } /** * Returns the likelihood of a given interval,coalescent or otherwise. */ public double computeLogLikelihood(double width, double timeOfPrevCoal, int numLineages, int type) { binom.setMax(numLineages); double timeOfThisCoal = width + timeOfPrevCoal; double intervalArea = getIntegral(timeOfPrevCoal, timeOfThisCoal); double like = 0; switch (type) { case CoalescentIntervals.COALESCENT: like = -Math.log(getDemographic(timeOfThisCoal)) - (binom.getNChoose2(numLineages)*intervalArea); break; case CoalescentIntervals.NEW_SAMPLE: like = -(binom.getNChoose2(numLineages)*intervalArea); break; } return like; } /** * Units in which population size is measured. */ private int units; /** * sets units of measurement. * * @param u units */ public void setUnits(int u) { units = u; } /** * returns units of measurement. */ public int getUnits() { return units; } private double logL = 0.0; /** * sets log likelihood * * @param l log-likelihood */ public void setLogL(double l) { logL = l; } /** * returns log-likelihood. */ public double getLogL() { return logL; } /** * This function tests the consistency of the * getIntensity and getInverseIntensity methods * of this demographic model. If the model is * inconsistent then a RuntimeException will be thrown. * @param model the demographic model to test. * @param steps the number of steps between 0.0 and maxTime to test. * @param maxTime the maximum time to test. */ public void testConsistency(int steps, double maxTime) { double delta = maxTime / (double)steps; System.out.println("time\tN(time)\tIntensity(time)\tinverse"); for (int i =0; i <= steps; i++) { double time = (double)i * delta; double intensity = getIntensity(time); double newTime = getInverseIntensity(intensity); System.out.println(time + "\t" + getDemographic(time) + "\t" + intensity + "\t" + newTime); if (Math.abs(time-newTime) > 1e-12) { throw new RuntimeException( "Demographic model not consistent! error size = " + Math.abs(time-newTime)); } } System.out.println("Demographic model is consistent!"); } // // Private and protected stuff // private MersenneTwisterFast rng; private Binomial binom; protected FormattedOutput fo; } pal-1.5.1/src/pal/coalescent/CoalescentTree.java0000644000000000000000000000070007731100430020206 0ustar rootroot// CoalescentTree.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.tree.*; /** * interface defining a parameterized tree that * includes demographic information. * * @author Alexei Drummond */ public interface CoalescentTree { CoalescentIntervals getCoalescentIntervals(); } pal-1.5.1/src/pal/coalescent/CoalescentSimulator.java0000644000000000000000000000235307323430030021273 0ustar rootroot// CoalescentSimulator.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.tree.*; /** * Simulates a set of coalescent intervals given a demographic model. * * @version $Id: CoalescentSimulator.java,v 1.5 2001/07/12 12:17:43 korbinian Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer */ public class CoalescentSimulator { /** * Simulates a set of CoalescentIntervals from a genealogy assuming * contemporaneous tips. * @param numLines the number of tips in the sample genealogy * @param model the demographic model to use */ public CoalescentIntervals simulateIntervals(int numLines, DemographicModel model) { CoalescentIntervals ci = new CoalescentIntervals(numLines-1); double currentTime = 0.0; for (int i = 0; i < (numLines - 1); i++) { //try { ci.setInterval(i, model.getSimulatedInterval(numLines, currentTime)); //} catch (CoalescentException ce) { // ce.printStackTrace(); //} ci.setNumLineages(i, numLines); currentTime += ci.getInterval(i); numLines -= 1; } return ci; } } pal-1.5.1/src/pal/coalescent/DemographicTree.java0000644000000000000000000000071307543142200020356 0ustar rootroot// DemographicTree.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; /** * interface defining a parameterized tree that * includes demographic information. * * @author Alexei Drummond */ public interface DemographicTree { double computeDemoLogLikelihood(); DemographicModel getDemographicModel(); } pal-1.5.1/src/pal/coalescent/SerialCoalescentSimulator.java0000644000000000000000000001274107547074460022460 0ustar rootroot// SerialCoalescentSimulator.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.util.*; import pal.misc.*; import pal.tree.*; import java.util.*; import pal.math.*; import java.io.*; /** * Simulates a set of coalescent intervals given a demographic model. * * @author Alexei Drummond * @version $Id: SerialCoalescentSimulator.java,v 1.3 2002/10/03 06:06:55 matt Exp $ */ public class SerialCoalescentSimulator implements Serializable{ private SimpleTree tree = null; private static MersenneTwisterFast rand = new MersenneTwisterFast(); /** * Simulates a set of CoalescentIntervals from a genealogy. * @param tocd the time order character data determining the * order and time in which samples are added. * @param model the demographic model to use */ public CoalescentIntervals simulateIntervals( TimeOrderCharacterData tocd, DemographicModel model, boolean createTree) { // nodes used to build tree if necessary Vector currentTreeNodes = null; Node[] nodes = null; double[] times = tocd.getCopyOfTimes(); int[] indices = new int[times.length]; HeapSort.sort(times, indices); if (!createTree) { tree = null; } else { nodes = new Node[times.length]; IdGroup ids = tocd; for (int i = 0; i < ids.getIdCount(); i++) { nodes[i] = new SimpleNode(); nodes[i].setIdentifier(ids.getIdentifier(i)); } currentTreeNodes = new Vector(); } if (tocd.getUnits() != model.getUnits()) { System.err.println("Units do not match"); System.err.println("tocd units = " + tocd.getUnits()); System.err.println("model units = " + model.getUnits()); return null; } int uniqueIntervals = 0; double currentTime = 0.0; for (int i = 0; i < times.length; i++) { double time = times[indices[i]]; if (Math.abs(time - currentTime) > 1e-12) { uniqueIntervals += 1; currentTime = time; } } //System.out.println("Unique intervals = " + uniqueIntervals); CoalescentIntervals ci = new CoalescentIntervals(uniqueIntervals + times.length - 1); currentTime = 0.0; int count = 0; int numLines = 0; //add in all tips for (int i = 0; i < times.length; i++) { // find next tip time double nextTipTime = times[indices[i]]; // if next tip time is appreciably different from current time then one // or more intervals will be added between them. if (Math.abs(nextTipTime - currentTime) > 1e-12) { double newTime = currentTime + model.getSimulatedInterval(numLines, currentTime); while ((newTime < nextTipTime) && (numLines > 1)) { ci.setInterval(count, newTime - currentTime); ci.setNumLineages(count, numLines); // add an internal node to the tree if (createTree) { addInternalNode(currentTreeNodes, numLines, newTime); } numLines -= 1; count += 1; currentTime = newTime; if (numLines > 1) { newTime = currentTime + model.getSimulatedInterval(numLines, currentTime); } } // add new sample interval //ci.setInterval(count, nextTipTime - currentTime, // numLines, CoalescentIntervals.NEW_SAMPLE); ci.setInterval(count, newTime - currentTime); ci.setNumLineages(count, numLines); numLines += 1; // add new tip to the tree if (createTree) { Node newNode = nodes[indices[i]]; newNode.setNodeHeight(nextTipTime); currentTreeNodes.addElement(newNode); } count += 1; currentTime = nextTipTime; } else { //otherwise just add tip numLines += 1; if (createTree) { Node newNode = nodes[indices[i]]; newNode.setNodeHeight(currentTime); currentTreeNodes.addElement(newNode); } } } while (numLines > 1) { double newTime = currentTime + model.getSimulatedInterval(numLines, currentTime); //ci.setInterval(count, newTime - currentTime, // numLines, CoalescentIntervals.COALESCENT); ci.setInterval(count, newTime - currentTime); ci.setNumLineages(count, numLines); // add an internal node to the tree if (createTree) { addInternalNode(currentTreeNodes, numLines, newTime); } numLines -= 1; count += 1; currentTime = newTime; } if (createTree) { int size = currentTreeNodes.size(); if (size > 1) { System.err.println("ERROR: currentTreeNodes.size() = " + size); } Node root = (Node)currentTreeNodes.elementAt(0); NodeUtils.heights2Lengths(root); tree = new SimpleTree(root); tree.setUnits(model.getUnits()); } return ci; } private void addInternalNode(Vector currentTreeNodes, int numLines, double newTime) { if (numLines != currentTreeNodes.size()) { System.err.println("ERROR: Wrong number of nodes available!"); } int node1 = rand.nextInt(currentTreeNodes.size()); int node2 = node1; while (node2 == node1) { node2 = rand.nextInt(currentTreeNodes.size()); } Node left = (Node)currentTreeNodes.elementAt(node1); Node right = (Node)currentTreeNodes.elementAt(node2); Node newNode = new SimpleNode(); newNode.setNodeHeight(newTime); newNode.addChild(left); newNode.addChild(right); currentTreeNodes.removeElement(left); currentTreeNodes.removeElement(right); currentTreeNodes.addElement(newNode); } public Tree getTree() { return tree; } } pal-1.5.1/src/pal/coalescent/ConstExpConst.java0000644000000000000000000002402307433461760020103 0ustar rootroot// ConstExpGrowth.java // // (c) 1999-2002 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.math.*; import pal.misc.*; import pal.io.*; import java.io.*; /** * This class models a population that grows exponentially from an * initial population size alpha N0 at time y to a size N0 * at time x until the present-day. * (Parameters: N0=present-day population size; r=growth rate; alpha: ratio of * population sizes). * or * (Parameters: N0=present-day population size; r=growth rate; N1: pre-growth * ancestral population size). * This model is nested with the exponential-growth model (alpha -> 0 and tx -> 0). * * @version $Id: ConstExpConst.java,v 1.2 2002/02/16 00:51:43 alexi Exp $ * * @author Alexei Drummond * @author Andrew Rambaut */ public class ConstExpConst extends ConstExpGrowth implements Report, Parameterized, Serializable { // // Public stuff // /** time of end of exponential growth */ public double tx; /** standard error of time of growth */ public double txSE; /** * Construct demographic model with default settings. */ public ConstExpConst(int units, int parameterization) { super(units, parameterization); this.parameterization = parameterization; tx = getDefaultValue(3); } /** * Construct demographic model of constexpconst population. */ public ConstExpConst( double size, double growth, double ancestral, double timeX, int units, int parameterization) { super(size, growth, ancestral, units, parameterization); tx = timeX; } /** * Makes a copy of this demographic model. */ public Object clone() { return new ConstExpConst(getN0(), getGrowthParam(), getAncestral(), getTimeX(), getUnits(), getParameterization()); } /** * Gets the time of transition from initial constant phase to exponential phase. */ public double getTransitionTime() { if (isLxParameterized()) return lx + tx; return tx - (Math.log(getAncestralN0()) - Math.log(N0)) / r; } /** * @return the duration of the growth phase */ public double getGrowthPhaseDuration() { if (isLxParameterized()) return lx; return getTransitionTime() - getTimeX(); } //NOTE: setGrowthPhaseDuration is inherited. /** * @return the time at which the modern constant pop size * gives way to exponential phase. */ public double getTimeX() { return tx; } public void setTimeX(double timeX) { tx = timeX; } // Implementation of abstract methods /** * @return the population size at time t. */ public double getDemographic(double t) { if (isN1Parameterized()) alpha = N1 / N0; if (isLxParameterized()) calculateRFromLx(); if (alpha == 1.0 || r == 0.0) { // Constant size return N0; } else if (alpha == 0.0 && tx == 0.0) { // Exponential return N0 * Math.exp(-t * r); } else { double tc = tx - Math.log(alpha)/r; if (t < tx) { return N0; } else if (t < tc) { return N0 * Math.exp(- (t - tx) * r); } else { return N0 * alpha; } } } /** * @return the integral of 1 / N(t) from 0 to t. */ public double getIntensity(double t) { if (isN1Parameterized()) alpha = N1 / N0; if (isLxParameterized()) calculateRFromLx(); if (alpha == 1.0 || r == 0.0) { return t/N0; } else if (alpha == 0.0 && tx == 0.0) { return (Math.exp(t*r)-1.0)/N0/r; } else { double tc = -Math.log(alpha)/r + tx; if (t < tx) { return t / N0; } else if (t < tc) { return // constant phase up to tx (tx / N0) + // exponential phase from tx to t (Math.exp(r*(t-tx))-1.0)/N0/r; } else { return // constant phase up to tx; (tx / N0) + // exponential phase from tx to tc ((Math.exp(r*(tc-tx))-1.0)/N0/r) + // constant phase from tc to t ((t-tc)/(alpha*N0)); } } } /** * @return the time for the given intensity. */ public double getInverseIntensity(double x) { if (isN1Parameterized()) alpha = N1 / N0; if (isLxParameterized()) calculateRFromLx(); if (r == 0) { return N0*x; } else if (alpha == 0) { return Math.log(1.0+N0*x*r)/r; } else { double xx = tx/N0; double xc = (1.0-alpha)/(alpha*N0*r) + xx; if (x < xx) { return N0*x; } else if (x < xc) { return // time of modern constant phase tx + // time of exponential phase Math.log(1.0+N0*r*(x-xx))/r; } else { return // time of modern constant phase tx + // time of exponential phase Math.log(1.0+N0*r*(xc-xx))/r + // time of ancient constant phase (N0*alpha)*(x-xc); } } // To be done... } // Parameterized interface public int getNumParameters() { return 4; } public double getParameter(int k) { switch (k) { case 0: return N0; case 1: return r; case 2: if (isN1Parameterized()) return N1; else return alpha; case 3: return tx; default: return 0; } } public double getUpperLimit(int k) { double max = 0; switch (k) { case 0: max = 1e50; break; case 1: max = 1000; break; // we have to to compute lots of exp(rt) !! case 2: if (isN1Parameterized()) max = 1e50; else max = 1.0; break; case 3: max = 1e50; break; default: break; } return max; } public double getLowerLimit(int k) { double min = 0; switch (k) { case 0: min = 1e-12; break; case 1: min = 0; break; case 2: min = 0; break; case 3: min = 0; break; default: break; } return min; } public double getDefaultValue(int k) { if (k == 0) { //arbitrary default values if (getUnits() == GENERATIONS) { return 1000.0; } else { return 0.2; } } else if (k == 1) { return 0; //constant population } else if (k == 2) { if (isN1Parameterized()) return getDefaultValue(0); else return 0.5; } else { return 0; } } public void setParameter(double value, int k) { switch (k) { case 0: N0 = value; break; case 1: r = value; break; case 2: if (isN1Parameterized()) N1 = value; else alpha = value; break; case 3: tx = value; break; default: break; } } public void setParameterSE(double value, int k) { switch (k) { case 0: N0SE = value; break; case 1: rSE = value; break; case 2: if (isN1Parameterized()) N1SE = value; else alphaSE = value; break; case 3: txSE = value; break; default: break; } } public String toString() { OutputTarget out = OutputTarget.openString(); report(out); out.close(); return out.getString(); } public void report(PrintWriter out) { out.println("Demographic model: const-exp-const"); if (isN1Parameterized()) { out.println("Demographic function: N(t) = N0 for t < x"); out.println(" N0 exp(-r*(t-x)) for x < t < x - ln(N1/N0)/r"); out.println(" N1 otherwise"); } else { out.println("Demographic function: N(t) = N0 for t < x"); out.println(" N0 exp(-r*(t-x)) for x < t < x - ln(alpha)/r"); out.println(" N0 alpha otherwise"); } out.print("Unit of time: "); if (getUnits() == GENERATIONS) { out.print("generations"); } else { out.print("expected substitutions"); } out.println(); out.println(); out.println("Parameters of demographic function:"); out.print(" present-day population size N0: "); fo.displayDecimal(out, N0, 6); if (N0SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N0SE, 6); out.print(")"); } out.println(); out.print(" growth rate r: "); fo.displayDecimal(out, r, 6); if (rSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, rSE, 6); out.print(")"); } out.println(); if (isN1Parameterized()) { out.print(" pre-growth population size N1: "); fo.displayDecimal(out, N1, 6); if (N1SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N1SE, 6); out.print(")"); } out.println(); out.print(" Ratio of poulation sizes alpha: "); fo.displayDecimal(out, N1/N0, 6); out.println(); } else { out.print(" ratio of population sizes alpha: "); fo.displayDecimal(out, alpha, 6); if (alphaSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, alphaSE, 6); out.print(")"); } out.println(); out.print(" initial population size alpha N0: "); fo.displayDecimal(out, alpha*N0, 6); out.println(); } out.println(); out.print(" time of end of expansion phase x: "); fo.displayDecimal(out, tx, 6); if (txSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, txSE, 6); out.print(")"); } out.println(); if (getLogL() != 0.0) { out.println(); out.print("log L: "); fo.displayDecimal(out, getLogL(), 6); out.println(); } } public static void main(String[] args) { double size = 100.0; double growth = 0.02; double ancestral = 0.2; double timeX = 25; int units = Units.GENERATIONS; int param = ALPHA_PARAMETERIZATION; ConstExpConst model = new ConstExpConst(size, growth, ancestral, timeX, units, param); ConstExpGrowth model2 = new ConstExpGrowth(size, growth, ancestral, units, param); ConstExpConst model3 = new ConstExpConst(size, growth, ancestral, 0.0, units, param); model.testConsistency(5000, 200.0); model2.testConsistency(5000, 200.0); model3.testConsistency(5000, 200.0); } } pal-1.5.1/src/pal/coalescent/ExponentialGrowth.java0000644000000000000000000001213210141732650020775 0ustar rootroot// ExponentialGrowth.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import java.io.*; import pal.io.*; import pal.misc.*; /** * This class models an exponentially growing (or shrinking) population * (Parameters: N0=present-day population size; r=growth rate). * This model is nested with the constant-population size model (r=0). * * Parts of this class were inspired by C++ code * generously provided by Oliver Pybus. * * @version $Id: ExponentialGrowth.java,v 1.10 2001/07/12 12:17:43 korbinian Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer */ public class ExponentialGrowth extends ConstantPopulation implements Report, Parameterized, Serializable, Summarizable { // // Public stuff // private static final String[] EG_SUMMARY_TYPES = {"N0","N0SE", "R", "RSE"}; //This is dependent on ConstantPopulation! /** growth rate r */ public double r; /** standard error of growth rate r */ public double rSE; /** * Construct demographic model with default settings */ public ExponentialGrowth(int units) { super(units); r = getDefaultValue(1); } /** * Construct demographic model of an exponentially growing population */ public ExponentialGrowth(double size, double growth, int units) { super(size, units); r = growth; } public String[] getSummaryTypes() { return EG_SUMMARY_TYPES; } public double getSummaryValue(int summaryType) { //This is dependent on ConstantPopulation! switch(summaryType) { case 2 : { return r; } case 3 : { return rSE; } default : { return super.getSummaryValue(summaryType); } } } public Object clone() { return new ExponentialGrowth(getN0(), getGrowthRate(), getUnits()); } /** * returns growth rate. */ public double getGrowthRate() { return r; } // Implementation of abstract methods public double getDemographic(double t) { if (r == 0) { return N0; } else { return N0 * Math.exp(-t * r); } } public double getIntensity(double t) { if (r == 0) { return t/N0; } else { return (Math.exp(t*r)-1.0)/N0/r; } } public double getInverseIntensity(double x) { if (r == 0) { return N0*x; } else { return Math.log(1.0+N0*x*r)/r; } } // Parameterized interface public int getNumParameters() { return 2; } public double getParameter(int k) { if (k == 0) return N0; return r; } public double getUpperLimit(int k) { double max = 0; switch (k) { case 0: max = 1e50; break; case 1: max = 1000; break; // we have to to compute lots of exp(rt) !! default: break; } return max; } public double getLowerLimit(int k) { double min = 0; switch (k) { case 0: min = 1e-12; break; case 1: min = -200; break; // we allow also shrinking populations default: break; } return min; } public double getDefaultValue(int k) { if (k == 0) { //arbitrary default values if (getUnits() == GENERATIONS) { return 1000.0; } else { return 0.2; } } else { return 0; //constant population } } public void setParameter(double value, int k) { switch (k) { case 0: N0 = value; break; case 1: r = value; break; default: break; } } public void setParameterSE(double value, int k) { switch (k) { case 0: N0SE = value; break; case 1: rSE = value; break; default: break; } } public String toString() { /* String s = "Exponentially growing population:\n"; if (getUnits() == GENERATIONS) { s += "Effective Population Size = " + N0 + "\n"; s += "Growth rate (r) = " + r + "\n"; } else { s += "Theta (haploid) = " + (N0 * 2) + "\n"; s += "Growth rate (rho) = " + r + "\n"; } return s; */ OutputTarget out = OutputTarget.openString(); report(out); out.close(); return out.getString(); } public void report(PrintWriter out) { out.println("Demographic model: exponential growth"); out.println("Demographic function: N(t) = N0 exp(-r t)"); out.print("Unit of time: "); if (getUnits() == GENERATIONS) { out.print("generations"); } else { out.print("expected substitutions"); } out.println(); out.println(); out.println("Parameters of demographic function:"); out.print(" present-day population size N0: "); fo.displayDecimal(out, N0, 6); if (N0SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N0SE, 6); out.print(")"); } out.println(); out.print(" growth rate r: "); fo.displayDecimal(out, r, 6); if (rSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, rSE, 6); out.print(")"); } out.println(); if (getLogL() != 0.0) { out.println(); out.print("log L: "); fo.displayDecimal(out, getLogL(), 6); out.println(); } } } pal-1.5.1/src/pal/coalescent/SerialCoalescentGenerator.java0000644000000000000000000001335007744734372022430 0ustar rootroot// SerialCoalescentGenerator.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; /** * Title: SerialCoalescentGenerator * Description: A utility class for generating large numbers of Serail coalescent derived trees (and simulated alignments) * @author Cow * @version 1.0 * @note I'm not too sure where to put this class, or if it is of any use to anyone (outside of sUPGMA). It may jump packages. */ import pal.misc.*; import pal.coalescent.*; import pal.alignment.*; import pal.tree.*; import pal.util.*; public class SerialCoalescentGenerator implements java.io.Serializable { private TimeOrderCharacterData tocd_; private DemographicModel demographicModel_; private int numberOfTreesToGenerate_; private SimulatedAlignment.Factory alignmentFactory_; private final TreeOperation treeFinisher_; /** * Results will not contain alignments */ public SerialCoalescentGenerator(TimeOrderCharacterData tocd, DemographicModel demographicModel, int numberOfTreesToGenerate) { this(tocd,demographicModel,numberOfTreesToGenerate,TreeOperation.Utils.getNoOperation(), null); } /** * Results will not contain alignments */ public SerialCoalescentGenerator(TimeOrderCharacterData tocd, DemographicModel demographicModel, int numberOfTreesToGenerate, TreeOperation treeFinisher) { this(tocd,demographicModel,numberOfTreesToGenerate,treeFinisher, null); } public SerialCoalescentGenerator(TimeOrderCharacterData tocd, DemographicModel demographicModel, TreeOperation treeFinisher, SimulatedAlignment.Factory alignmentFactory) { this(tocd,demographicModel,1,treeFinisher, alignmentFactory); } /** * @param alignmentFactory Can be null if no alignments to be generated (otherwise results will contain alignments as well as trees) */ public SerialCoalescentGenerator(TimeOrderCharacterData tocd, DemographicModel demographicModel, int numberOfTreesToGenerate , TreeOperation treeFinisher, SimulatedAlignment.Factory alignmentFactory) { this.tocd_ = tocd; this.treeFinisher_ = treeFinisher; this.demographicModel_ = demographicModel; this.numberOfTreesToGenerate_ = numberOfTreesToGenerate; this.alignmentFactory_ = alignmentFactory; } private final Tree generateNewTree() { SerialCoalescentSimulator scs = new SerialCoalescentSimulator(); scs.simulateIntervals(tocd_, demographicModel_, true); return treeFinisher_.operateOn(scs.getTree()); } public final Tree generateTree() { return generateNewTree(); } /** * If callback request stop then returns trees creating thus far */ public final Tree[] generateTrees(AlgorithmCallback callback) { Tree[] trees = new Tree[numberOfTreesToGenerate_]; callback.updateStatus("Simulating trees"); for(int i = 0 ; i < numberOfTreesToGenerate_ ; i++) { if(callback.isPleaseStop()) { Tree[] toReturn = new Tree[i]; System.arraycopy(trees,0,toReturn,0,i); return toReturn; } trees[i] = generateNewTree(); callback.updateProgress(i/((double)numberOfTreesToGenerate_)); } callback.clearProgress(); return trees; } /** * If callback request stop then returns results creating thus far */ private final Results generateTreeAndAlignmentResults(AlgorithmCallback callback) { Tree[] trees = new Tree[numberOfTreesToGenerate_]; Alignment[] alignments = new Alignment[numberOfTreesToGenerate_]; callback.clearProgress(); double total = trees.length*2; for(int i = 0 ; i < trees.length ; i++) { if(callback.isPleaseStop()) { Tree[] ts = new Tree[i]; Alignment[] as = new Alignment[i]; System.arraycopy(trees,0,ts,0,i); System.arraycopy(alignments,0,as,0,i); return new Results(ts,as); } trees[i] = generateNewTree(); callback.updateProgress((2*i)/total); alignments[i] = alignmentFactory_.generateAlignment(trees[i]); callback.updateProgress((2*i+1)/total); } callback.clearProgress(); return new Results(trees,alignments); } /** * If callback request stop then returns results creating thus far */ private final Results generateTreeOnlyResults(AlgorithmCallback callback) { Tree[] trees = new Tree[numberOfTreesToGenerate_]; callback.clearProgress(); double total = trees.length; for(int i = 0 ; i < trees.length ; i++) { if(callback.isPleaseStop()) { Tree[] ts = new Tree[i]; System.arraycopy(trees,0,ts,0,i); return new Results(ts); } trees[i] = generateNewTree(); callback.updateProgress(i/total); } callback.clearProgress(); return new Results(trees); } public final Results generateResults(AlgorithmCallback callback) { if(alignmentFactory_!=null) { return generateTreeAndAlignmentResults(callback); } return generateTreeOnlyResults(callback); } // ============================================================================ // ==== Results class /** * A simple wrapper class for containing the results which may either be * a number of trees, or a number of trees and alignments (in parallel arrays) */ public final static class Results { private Tree[] trees_; private Alignment[] alignments_; public Results(Tree[] trees) { this(trees,null); } public Results(Tree[] trees, Alignment[] alignments) { this.trees_ = trees; this.alignments_ = alignments; } public final Tree[] getTrees() { return trees_; } public final Alignment[] getAlignments() { return alignments_; } public final boolean hasAlignments() { return alignments_!=null; } /** * @return the number of trees, or the number of tree/alignment pairs */ public final int getNumberOfPopulations() { return trees_.length; } } }pal-1.5.1/src/pal/coalescent/CoalescentIntervals.java0000644000000000000000000002212207333167134021274 0ustar rootroot// CoalescentIntervals.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.misc.*; import pal.tree.*; import pal.util.*; import pal.io.*; import java.util.*; import java.io.*; /** * A series of coalescent intervals representing the time * order information contained in a (serial) clock-constrained * tree. Can model both n-coalescents and s-coalescents. * * @version $Id: CoalescentIntervals.java,v 1.14 2001/08/04 19:57:31 alexi Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer */ public class CoalescentIntervals implements Units, Report, Serializable { // PUBLIC STUFF /** Denotes and interval after which a coalescent event is observed * (i.e. the number of lineages is smaller in the next interval) */ public static final int COALESCENT = 0; /** * Denotes an interval at the end of which a new sample addition is * observed (i.e. the number of lineages is larger in the next interval). */ public static final int NEW_SAMPLE = 1; /** * Denotes an interval at the end of which nothing is * observed (i.e. the number of lineages is the same in the next interval). */ public static final int NOTHING = 2; /** The widths of the intervals. */ private double[] intervals; /** The number of uncoalesced lineages within a particular interval. */ private int[] numLineages; /** * Parameterless constructor. */ public CoalescentIntervals() { units = GENERATIONS; fo = FormattedOutput.getInstance(); } /** * Constructor taking a number of intervals. */ public CoalescentIntervals(int size) { this(); intervals = new double[size]; numLineages = new int[size]; } /** The units in which the intervals are expressed. */ private int units; /** * Sets the units these coalescent intervals are * measured in. */ public void setUnits(int u) { units = u; } /** * Returns the units these coalescent intervals are * measured in. */ public int getUnits() { return units; } /** * Returns the number of uncoalesced lineages within this interval. * Required for s-coalescents, where new lineages are added as * earlier samples are come across. */ public int getNumLineages(int i) { return numLineages[i]; } /** * set the number lineages for this particular interval. */ public void setNumLineages(int i, int numLines) { numLineages[i] = numLines; } /** * Returns the number coalescent events in an interval */ public int getCoalescentEvents(int i) { if (i < intervals.length-1) { return numLineages[i]-numLineages[i+1]; } else { return numLineages[i]-1; } } /** * Returns the type of interval observed. */ public int getIntervalType(int i) { int numEvents = getCoalescentEvents(i); if (numEvents > 0) return COALESCENT; else if (numEvents < 0) return NEW_SAMPLE; else return NOTHING; } /** * Gets an interval. */ public double getInterval(int i) { return intervals[i]; } /** * Sets interval. */ public void setInterval(int i, double value) { intervals[i] = value; } /** * get the total height of the genealogy represented by these * intervals. */ public double getTotalHeight() { double height=0.0; for (int j=0; j < intervals.length; j++) { height += intervals[j]; } return height; } /** * get number of intervals */ public int getIntervalCount() { return intervals.length; } /** * Checks whether this set of coalescent intervals is fully resolved * (i.e. whether is has exactly one coalescent event in each * subsequent interval) */ public boolean isBinaryCoalescent() { for (int i = 0; i < intervals.length; i++) { if (getCoalescentEvents(i) != 1) return false; } return true; } /** * Checks whether this set of coalescent intervals coalescent only * (i.e. whether is has exactly one or more coalescent event in each * subsequent interval) */ public boolean isCoalescentOnly() { for (int i = 0; i < intervals.length; i++) { if (getCoalescentEvents(i) < 1) return false; } return true; } /** * Group intervals following a given (compatible) reference. * The reference must have the same number of lineages at * the start of the first interval, and the present * CoalsecentIntervals must be fully resolved. */ public void groupIntervals(CoalescentIntervals reference) { if (!isBinaryCoalescent()) { throw new IllegalArgumentException("CoalescentIntervals must purely consist of only single coalescents"); } if (getNumLineages(0) != reference.getNumLineages(0)) { throw new IllegalArgumentException("Incompatible reference CoalescentIntervals"); } int refSize = reference.getIntervalCount(); double[] newIntervals = new double[refSize]; int[] newNumLineages = new int[refSize]; int count = 0; for (int i = 0; i < refSize; i++) { newNumLineages[i] = reference.getNumLineages(i); int numEvents = reference.getCoalescentEvents(i); for (int j = 0; j < numEvents; j++) { newIntervals[i] += intervals[count]; count++; } } intervals = newIntervals; numLineages = newNumLineages; } /** * Returns a list stating which of the intervals are <= minSize * (and thus should be pooled). */ public void getSmallIntervals(double minSize, boolean[] smallInterval) { if (intervals.length != smallInterval.length) throw new IllegalArgumentException("Array length incompatible"); for (int i = 0; i < intervals.length; i++) { if (intervals[i] > minSize) { smallInterval[i] = false; } else { smallInterval[i] = true; } } } /** * Starting at time zero (i.e. with the interval with largest number of lineages), * the specified small intervals are pooled with the next non-small interval * (if this does not exist then with the previous non-small interval) */ public void poolIntervals(boolean[] smallInterval) { int uniqueIntervals = 0; for (int i = 0; i < intervals.length; i++) { if (smallInterval[i] == false) uniqueIntervals++; } if (uniqueIntervals == 0) uniqueIntervals = 1; double[] newIntervals = new double[uniqueIntervals]; int[] newNumLineages = new int[uniqueIntervals]; int count = 0; int coalescences = 0; int numLines = numLineages[0]; for (int i = 0; i < intervals.length; i++) { if (i < intervals.length-1) { coalescences += numLineages[i]-numLineages[i+1]; } else { coalescences += numLineages[i]-1; } newIntervals[count] = intervals[i] + newIntervals[count]; newNumLineages[count] = numLines; if (smallInterval[i] == false) { count++; if (count == uniqueIntervals) count--; numLines = numLines - coalescences; coalescences = 0; } } intervals = newIntervals; numLineages = newNumLineages; } /** * Starting at time zero (i.e. with the interval with largest number of lineages), * small intervals (<= minSize) are pooled with the next non-small interval * (if this does not exist then with the previous non-small interval) */ public void poolSmallIntervals(double minSize) { boolean[] smallInterval = new boolean[intervals.length]; getSmallIntervals(minSize, smallInterval); poolIntervals(smallInterval); } /** * Returns the log likelihood of this set of coalescent intervals, * given a demographic model. */ public double computeLogLikelihood(DemographicModel model) { double total=0.0; double currentTime = 0.0; double intervalVal = 0.0; try { for (int j = 0; j < intervals.length; j++) { total += model.computeLogLikelihood(intervals[j], currentTime, numLineages[j], getIntervalType(j)); // insert zero-length coalescent intervals int diff = getCoalescentEvents(j)-1; for (int k = 0; k < diff; k++) { total += model.computeLogLikelihood(0.0, currentTime, numLineages[j]-k-1, COALESCENT); } currentTime += intervals[j]; } } catch (ArrayIndexOutOfBoundsException e) { e.printStackTrace(); System.out.println(e); System.out.println(this); } return total; } public String toString() { OutputTarget out = OutputTarget.openString(); out.println("Lin.\tCoal.\tSize\tTotal"); double total = 0.0; for (int i = 0; i < intervals.length; i++) { total += intervals[i]; out.print(numLineages[i] + "\t"); out.print(getCoalescentEvents(i) + "\t"); fo.displayDecimal(out, intervals[i], 5); out.print("\t"); fo.displayDecimal(out, total, 5); out.println(); } out.close(); return out.getString(); } public void report(PrintWriter out) { out.println(this); } // // private stuff // private FormattedOutput fo; } pal-1.5.1/src/pal/coalescent/makefile0000644000000000000000000000123707275407432016170 0ustar rootroot### VARIABLES ### JIKESOPTS := +P GCJOPTS := # Always check dependencies JIKESOPTS += +M SRC := $(wildcard *.java) CLS := $(patsubst %.java, %.class, $(SRC)) OBJ := $(patsubst %.class, %.o, $(wildcard *.class)) ### RULES ### # Compile Java sources into class files %.class: %.java jikes $(JIKESOPTS) $< # Alternative to using jikes: gcj -C # Compile class files into native code %.o: %.class gcj -c $(GCJOPTS) $< ### TARGETS ### .PHONY: classes native clean classes: $(CLS) ifneq ($(OBJ),) native: $(OBJ) (ar -rv package.a *.o; ranlib package.a) else native: $(OBJ) endif clean: rm -f *.class *.o *.u *.a *~ pal-1.5.1/src/pal/coalescent/ConstExpGrowth.java0000644000000000000000000002715707433461760020302 0ustar rootroot// ConstExpGrowth.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.math.*; import pal.misc.*; import pal.io.*; import java.io.*; /** * This class models a population that grows * exponentially from an inital population size alpha N0 to a present-day size N0. * (Parameters: N0=present-day population size; r=growth rate; alpha: ratio of * population sizes). * or * (Parameters: N0=present-day population size; r=growth rate; N1: pre-growth * ancestral population size). * This model is nested with the exponential-growth model (alpha -> 0 or N1 = N0). * It is similar but not identical to the model used in ExpandingPopulation. * * * @version $Id: ConstExpGrowth.java,v 1.10 2002/02/16 00:51:43 alexi Exp $ * * @author Alexei Drummond * @author Andrew Rambaut * @author Korbinian Strimmer */ public class ConstExpGrowth extends ExponentialGrowth implements Report, Parameterized, Serializable { // // Public stuff // /** use alpha instead of N1 parameterization */ public static final int ALPHA_PARAMETERIZATION = 0; /** use N1 instead of alpha parameterization */ public static final int N1_PARAMETERIZATION = 1; /** use lx instead of growth parameterization */ public static final int LX_PARAMETERIZATION = 2; /** * parameterization bit string:
* first bit: 0 = ALPHA, 1 = N1
* second bit: 0 = GROWTH RATE, 1 = LX
*/ public int parameterization; /** ratio of pop. sizes */ public double alpha; /** standard error of time alpha */ public double alphaSE; /** ancestral pop. size */ public double N1; /** standard error of ancestral pop. size */ public double N1SE; /** the duration of the growth phase */ double lx; /** * Construct demographic model with default settings. * @param parameterization is a combination of bits representing the parameterization. Valid values are:
* ALPHA_PARAMETERIZATION
* N1_PARAMETERIZATION
* ALPHA_PARAMETERIZATION | LX_PARAMETERIZATION
* N1_PARAMETERIZATION | LX_PARAMETERIZATION
*/ public ConstExpGrowth(int units, int parameterization) { super(units); this.parameterization = parameterization; if (isN1Parameterized()) { N1 = getDefaultValue(2); } else { alpha = getDefaultValue(2); } if (isLxParameterized()) { lx = getDefaultValue(1); calculateRFromLx(); } else lx = getGrowthPhaseDuration(); } /** * Construct demographic model of an expanding population. * * */ public ConstExpGrowth(double size, double growthParam, double ancestral, int units, int parameterization) { super(size, growthParam, units); this.parameterization = parameterization; if (isN1Parameterized()) { N1 = ancestral; } else { alpha = ancestral; } if (isLxParameterized()) { lx = growthParam; calculateRFromLx(); } else lx = getGrowthPhaseDuration(); } public Object clone() { return new ConstExpGrowth(getN0(), getGrowthParam(), getAncestral(), getUnits(), getParameterization()); } /** * Gets the time of transition from ancestral constant phase to exponential phase. */ public double getTransitionTime() { if (isLxParameterized()) return lx; return -(Math.log(getAncestralN0()) - Math.log(N0)) / r; } /** * returns ancestral parameter. This may be either N1 or alpha * depending on the parameterization. */ public double getAncestral() { if (isN1Parameterized()) { return N1; } else { return alpha; } } /** * @return the growth parameter. This may be either growth rate * or growth phase duration depending on the parameterization. */ public double getGrowthParam() { if (isLxParameterized()) { return lx; } return r; } /** * This method overrides superclass to check parameterization. */ public double getGrowthRate() { if (isLxParameterized()) { calculateRFromLx(); } return super.getGrowthRate(); } /** * Sets the ancestral parameter. This may be either N1 or alpha * depending on the parameterization. */ public void setAncestral(double ancestral) { if (isN1Parameterized()) { N1 = ancestral; } else { alpha = ancestral; } } /** * Sets the growth parameter. This may be either growth rate (r) or * growth pahse duration (lx) depending on the parameterization. */ public void setGrowthParam(double g) { if (isLxParameterized()) { lx = g; } else { r = g; } } /** * returns ancestral population size */ public double getAncestralN0() { if (isN1Parameterized()) return N1; else return N0 * alpha; } /** * @return the duration of the growth phase */ public double getGrowthPhaseDuration() { return getTransitionTime(); } /** * Sets the length of the growth phase. This method is only valid * if ALPHA_LX_PARAMETERIZATION is used. */ public void setGrowthPhaseDuration(double lx) { if (isLxParameterized()) { this.lx = lx; } else throw new RuntimeException("You must use LX_PARAMETERIZATION to use this method!"); if (lx == 0.0) { throw new IllegalArgumentException("An lx value of zero is illegal!"); } } /** * @return parameterization */ public int getParameterization() { return parameterization; } /** * @return true if using lx instead of growth rate. */ public boolean isLxParameterized() { return (parameterization & LX_PARAMETERIZATION) > 0; } /** * @return true if using N1 instead of alpha. */ public boolean isN1Parameterized() { return (parameterization & N1_PARAMETERIZATION) > 0; } /** * Sets the parameterization. * @param parameterization is a combination of bits representing the parameterization. Valid values are:
* ALPHA_PARAMETERIZATION
* N1_PARAMETERIZATION
* ALPHA_PARAMETERIZATION | LX_PARAMETERIZATION
* N1_PARAMETERIZATION | LX_PARAMETERIZATION
*/ public void setParameterization(int p) { parameterization = p; } protected void calculateRFromLx() { r = (Math.log(getAncestralN0()) - Math.log(N0)) / -lx; } // Implementation of abstract methods public double getDemographic(double t) { if (isN1Parameterized()) alpha = N1 / N0; if (isLxParameterized()) calculateRFromLx(); if (alpha == 1.0 || r == 0.0) { return N0; } else if (alpha == 0.0) { return N0 * Math.exp(-t * r); } else { double tc = -Math.log(alpha)/r; if (t < tc) { return N0 * Math.exp(-t * r); } else { return N0 * alpha; } } } public double getIntensity(double t) { if (isN1Parameterized()) alpha = N1 / N0; if (isLxParameterized()) calculateRFromLx(); if (alpha == 1.0 || r == 0.0) { return t/N0; } else if (alpha == 0.0) { return (Math.exp(t*r)-1.0)/N0/r; } else { double tc = -Math.log(alpha)/r; if (t < tc) { return (Math.exp(r*t)-1.0)/(N0*r); } else { return (1.0-alpha+r*t+Math.log(alpha))/(alpha*N0*r); } } } public double getInverseIntensity(double x) { if (isN1Parameterized()) alpha = N1 / N0; if (isLxParameterized()) calculateRFromLx(); if (r == 0) { return N0*x; } else if (alpha == 0) { return Math.log(1.0+N0*x*r)/r; } else { double xc = (1.0-alpha)/(alpha*N0*r); if (x < xc) { return Math.log(1.0+N0*r*x)/r; } else { return (alpha-1.0+alpha*N0*r*x-Math.log(alpha))/r; } } } // Parameterized interface public int getNumParameters() { return 3; } public double getParameter(int k) { switch (k) { case 0: return N0; case 1: return r; case 2: if (isN1Parameterized()) return N1; else return alpha; default: return 0; } } public double getUpperLimit(int k) { double max = 0; switch (k) { case 0: max = 1e50; break; case 1: max = 1000; break; // we have to to compute lots of exp(rt) !! case 2: if (isN1Parameterized()) max = 1e50; else max = 1.0; break; default: break; } return max; } public double getLowerLimit(int k) { double min = 0; switch (k) { case 0: min = 1e-12; break; case 1: min = 0; break; case 2: min = 0; break; default: break; } return min; } public double getDefaultValue(int k) { if (k == 0) { //arbitrary default values if (getUnits() == GENERATIONS) { return 1000.0; } else { return 0.2; } } else if (k == 1) { return 0; //constant population } else { if (isN1Parameterized()) return getDefaultValue(0); else return 0.5; } } public void setParameter(double value, int k) { switch (k) { case 0: N0 = value; break; case 1: r = value; break; case 2: if (isN1Parameterized()) N1 = value; else alpha = value; break; default: break; } } public void setParameterSE(double value, int k) { switch (k) { case 0: N0SE = value; break; case 1: rSE = value; break; case 2: if (isN1Parameterized()) N1SE = value; else alphaSE = value; break; default: break; } } public String toString() { OutputTarget out = OutputTarget.openString(); report(out); out.close(); return out.getString(); } public void report(PrintWriter out) { out.println("Demographic model: const-exp growth"); if (isN1Parameterized()) { out.println("Demographic function: N(t) = N0 exp(-r t) for t < -ln(N1/N0)/r"); out.println(" N1 otherwise"); } else { out.println("Demographic function: N(t) = N0 exp(-r t) for t < -ln(alpha)/r"); out.println(" N0 alpha otherwise"); } out.print("Unit of time: "); if (getUnits() == GENERATIONS) { out.print("generations"); } else { out.print("expected substitutions"); } out.println(); out.println(); out.println("Parameters of demographic function:"); out.print(" present-day population size N0: "); fo.displayDecimal(out, N0, 6); if (N0SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N0SE, 6); out.print(")"); } out.println(); out.print(" growth rate r: "); fo.displayDecimal(out, r, 6); if (rSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, rSE, 6); out.print(")"); } out.println(); if (isN1Parameterized()) { out.print(" pre-growth population size N1: "); fo.displayDecimal(out, N1, 6); if (N1SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N1SE, 6); out.print(")"); } out.println(); out.print(" Ratio of poulation sizes alpha: "); fo.displayDecimal(out, N1/N0, 6); out.println(); } else { out.print(" ratio of population sizes alpha: "); fo.displayDecimal(out, alpha, 6); if (alphaSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, alphaSE, 6); out.print(")"); } out.println(); out.print(" initial population size alpha N0: "); fo.displayDecimal(out, alpha*N0, 6); out.println(); } out.println(); if (getLogL() != 0.0) { out.println(); out.print("log L: "); fo.displayDecimal(out, getLogL(), 6); out.println(); } } } pal-1.5.1/src/pal/coalescent/SkylinePlot.java0000644000000000000000000002030207323172204017570 0ustar rootroot// SkylinePlot.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.tree.*; import pal.misc.*; import pal.math.*; import pal.io.*; import pal.statistics.*; import java.io.*; /** * Skyline plot derived from a strictly bifurcating tree * or a coalescent interval. * * This class provides the "classic" skyline plot method by * Pybus, Rambaut and Harvey .2000. Genetics 155:1429-1437, as well * as the "generalized" skyline plot method described in * Strimmer and Pybus. 2001. MBE submitted. * * @version $Id: SkylinePlot.java,v 1.16 2001/07/11 13:51:16 korbinian Exp $ * * @author Korbinian Strimmer */ public class SkylinePlot implements Report, Units, Serializable { // // Public stuff // /** * Construct skyline plot from tree * * @param epsilon smoothing parameter (if set < 0 then epsilon will be optimized) */ public SkylinePlot(Tree tree, double epsilon) { this(IntervalsExtractor.extractFromClockTree(tree), epsilon); } /** * Construct skyline plot from given coalescent intervals * * @param epsilon smoothing parameter (if set < 0 then epsilon will be optimized) */ public SkylinePlot(CoalescentIntervals ci, double epsilon) { if (!ci.isBinaryCoalescent()) { throw new IllegalArgumentException("All coalescent intervals must contain only a single coalescent"); } fo = FormattedOutput.getInstance(); size = ci.getIntervalCount(); this.ci = ci; // population size in each coalescent interval populationSize = new double[size]; // cumulative interval sizes cis = new double[size]; maxTime = 0.0; for (int i = 0; i < size; i++) { cis[i] = maxTime; maxTime += ci.getInterval(i); } if (epsilon == 0.0) { /* init with classic skyline plot */ computeClassic(); } else if (epsilon > 0.0) { /* init with generalized skyline plot */ computeGeneralized(epsilon); } else { // find optimal generalized skyline plot optimize(); } } public String toString() { OutputTarget out = OutputTarget.openString(); report(out); out.close(); return out.getString(); } public void report(PrintWriter out) { out.println("Skyline Plot"); out.println(); out.print("Smoothing parameter epsilon = " + eps + " "); if (eps == 0.0) out.println("(classic skyline plot)"); else out.println("(generalized skyline plot)"); out.print("Unit of time: "); if (ci.getUnits() == GENERATIONS) { out.print("generations"); } else { out.print("expected substitutions"); } out.println(); printIntervals(out); out.println(); out.println("For each composite interval the first and the last simple interval is given."); out.println(); out.println("log L = " + getLogLikelihood()); out.println("Number of intervals: " + size); out.println("Number of composite intervals:" + params); if (params > size-2) out.println("log L(AICC) not available"); else out.println("log L(AICC) = " + getAICC()); } private void printIntervals(PrintWriter out) { out.println("Int.\tTime\tEstimated N(t)"); double total = 0.0; for (int i = 0; i < size; i++) { double m = populationSize[i]; printLine(out, i, total, m); total += ci.getInterval(i); int j; for (j = i+1; j < size; j++) { if (populationSize[j] != m) break; } i=j-1; printLine(out, i, total, m); } } private void printLine(PrintWriter out, int i, double total, double m) { out.print((size-i) + "\t"); fo.displayDecimal(out, total, 4); out.print("\t"); fo.displayDecimal(out, m, 4); out.println(); } /** * Compute classic skyline plot */ public void computeClassic() { for (int i = 0; i < size; i++) { double w = ci.getInterval(i); double n = ci.getNumLineages(i); populationSize[i] = w * (n*(n-1))/2.0 ; } params = size; eps = 0.0; } /** * Compute generalized skyline plot */ public void computeGeneralized(double epsilon) { params = 0; double cw = 0; //cumulative w for (int i = 0; i < size; i++) { double n = ci.getNumLineages(i); double w = ci.getInterval(i); int start = i; int k = 1; while (w < epsilon && i < size-1) { i++; k++; w += ci.getInterval(i); //System.out.println(ci.getInterval(i)); } //System.out.println("w=" + w + " k=" + k + " i=" + i); // if remainder is smaller than epsilon // continue pooling until the end if (maxTime - cw - w < epsilon) { for (int j = i+1; j < size; j++) { i++; k++; w += ci.getInterval(i); } } double m = w * (n*(n-k))/(2.0*k); // assign the same pop.size to all sub intervals for (int j = start; j < start+k; j++) { populationSize[j] = m; } params++; cw += w; } eps = epsilon; } /** * Optimize generalized skyline plot */ public void optimize() { // this is the naive way of doing this ... double besteps = getMaxTime(); computeGeneralized(besteps); double bestaicc = getAICC(); int GRID = 1000; double delta = besteps/GRID; double MINEPS = 1e-6; // Why MINEPS? // Because most "clock-like" trees are not properly // clock-like for a variety of reasons, i.e. the heights // of the tips are not exactly zero. eps = eps - delta; while(eps > MINEPS) { computeGeneralized(eps); double aicc = getAICC(); if (aicc > bestaicc && params < size-1) { besteps = eps; bestaicc = aicc; } eps = eps - delta; } computeGeneralized(besteps); } /** * Compute log-likelihood */ public double getLogLikelihood() { double logL = 0.0; for (int i = 0; i < size; i++) { double w = ci.getInterval(i); double m = populationSize[i]; double n = ci.getNumLineages(i); double nc2 = n*(n-1.0)/2.0; logL += Math.log(nc2/m) - w*nc2/m ; } return logL; } /** * Compute AICC-corrected log-likelihood */ public double getAICC() { double logL = getLogLikelihood(); return PenalizedLikelihood.AICC(logL, params, size); } /** * Find interval corresponding to a specific time */ public double findInterval(double time) { if (time < 0) throw new IllegalArgumentException("Negative values for time are not allowed"); for (int i = 0; i < size-1; i++) { if (time >= cis[i] && time < cis[i+1]) return i; } return size-1; } /** * Returns the largest value of time defined in this plot * (= maximum value for epsilon) */ public double getMaxTime() { return maxTime; } /** * Returns the largest estimate of population size. */ public double getMaxPopulationSize() { double max = 0.0; for (int i = 0; i < size; i++) { if (populationSize[i] > max) { max = populationSize[i]; } } return max; } /** * Returns the coalescent intervals in this skyline plot. */ public CoalescentIntervals getIntervals() { return ci; } /** * Returns the number of intervals in this skyline plot. */ public int getSize() { return size; } /** * Returns the number of composite intervals (=number of parameters). */ public int getParameterCount() { return params; } /** * Returns epsilon */ public double getEpsilon() { return eps; } /** * Returns the population size in interval i. */ public double getPopulationSize(int i) { return populationSize[i]; } /** * Returns unit of time. */ public int getUnits() { return ci.getUnits(); } // private private CoalescentIntervals ci; private FormattedOutput fo; private int size; private double maxTime; private double eps; private int params; /** cummulative interval sizes */ private double[] cis; /** estimated population size in a coalescent interval */ private double[] populationSize; } pal-1.5.1/src/pal/coalescent/package.html0000644000000000000000000000014707107207534016743 0ustar rootroot Classes to model population genetic processes using the coalescent. pal-1.5.1/src/pal/coalescent/IntervalsExtractor.java0000644000000000000000000001443610141732650021170 0ustar rootroot// IntervalsExtractor.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import java.util.*; import pal.mep.*; import pal.misc.*; import pal.tree.*; import pal.util.*; /** * A series of coalescent intervals representing the time * order information contained in a (serial) clock-constrained * tree. * * @version $Id: IntervalsExtractor.java,v 1.12 2001/07/12 12:17:43 korbinian Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer */ public class IntervalsExtractor implements Units { /** * extracts intervals from clock tree. */ public static CoalescentIntervals extractFromClockTree(Tree tree) { return extractFromClockTree(tree, -1); } /** * extracts intervals from clock tree. Leafs are assumed to have * height zero. Starting at time zero, small (<= minSize) intervals are pooled * with the next non-small interval (if this does not exist then * with the previous non-small interval) */ public static CoalescentIntervals extractFromClockTree(Tree tree, double minSize) { tree.createNodeList(); //make consistent NodeUtils.lengths2Heights(tree.getRoot()); //NodeUtils.lengths2HeightsKeepTips(tree.getRoot(),true); // Set heights of all external nodes to zero // we need a proper clock-tree //for (int i = 0; i < tree.getExternalNodeCount(); i++) //{ // tree.getExternalNode(i).setNodeHeight(0.0); //} Vector times = new Vector(); Vector childs = new Vector(); collectInternalNodeHeights(tree.getRoot(), times, childs); int[] indices = new int[times.size()]; HeapSort.sort(times, indices); int uniqueIntervals = 0; double currentTime = 0.0; for (int i = 0; i < times.size(); i++) { double time = ((ComparableDouble)times.elementAt(indices[i])).doubleValue(); if (Math.abs(time - currentTime) > minSize) { uniqueIntervals += 1; } currentTime = time; } if (uniqueIntervals == 0) uniqueIntervals = 1; CoalescentIntervals ci = new CoalescentIntervals(uniqueIntervals); ci.setUnits(tree.getUnits()); double start = 0.0; int numLines = tree.getExternalNodeCount(); int count = 0; int coalescences = 0; for (int i = 0; i < times.size(); i++) { double finish = ((ComparableDouble)times.elementAt(indices[i])).doubleValue(); int childCount = ((Integer)childs.elementAt(indices[i])).intValue(); double length = Math.abs(finish - start); coalescences += childCount-1; ci.setInterval(count, length + ci.getInterval(count) ); ci.setNumLineages(count, numLines); if (length > minSize) { count++; if (count == uniqueIntervals) count--; numLines = numLines - coalescences; coalescences = 0; } start = finish; } return ci; } /** * extracts intervals in generation times from serial clock tree (in mutation times) * after taking into account mutation rate model. */ public static CoalescentIntervals extractFromTree(Tree tree, MutationRateModel muModel) { Tree newTree = TreeUtils.mutationsToGenerations(tree, muModel); return extractFromTree(newTree); } /** * extracts intervals from serial clock tree. */ public static CoalescentIntervals extractFromTree(Tree tree) { double MULTIFURCATION_LIMIT = BranchLimits.MINARC; // get heights if it looks necessary if (tree.getRoot().getNodeHeight() == 0.0) { NodeUtils.lengths2Heights(tree.getRoot()); } Vector times = new Vector(); Vector childs = new Vector(); collectAllTimes(tree.getRoot(), times, childs); int[] indices = new int[times.size()]; Vector lineages = new Vector(); Vector intervals = new Vector(); HeapSort.sort(times, indices); double start = 0.0; int numLines = 0; int i = 0; while (i < times.size()) { int lineagesRemoved = 0; int lineagesAdded = 0; double finish = ((ComparableDouble)times.elementAt(indices[i])).doubleValue(); double next = finish; while (Math.abs(next - finish) < MULTIFURCATION_LIMIT) { int children = ((Integer)childs.elementAt(indices[i])).intValue(); if (children == 0) { lineagesAdded += 1; } else { lineagesRemoved += (children - 1); } i += 1; if (i < times.size()) { next = ((ComparableDouble)times.elementAt(indices[i])).doubleValue(); } else break; } //System.out.println("time = " + finish + " removed = " + lineagesRemoved + " added = " + lineagesAdded); if (lineagesAdded > 0) { if ((intervals.size() > 0) || ((finish - start) > MULTIFURCATION_LIMIT)) { intervals.addElement(new Double(finish - start)); lineages.addElement(new Integer(numLines)); } start = finish; } // add sample event numLines += lineagesAdded; if (lineagesRemoved > 0) { intervals.addElement(new Double(finish - start)); lineages.addElement(new Integer(numLines)); start = finish; } // coalescent event numLines -= lineagesRemoved; } CoalescentIntervals ci = new CoalescentIntervals(intervals.size()); for (i = 0; i < intervals.size(); i++) { ci.setInterval(i, ((Double)intervals.elementAt(i)).doubleValue()); ci.setNumLineages(i, ((Integer)lineages.elementAt(i)).intValue()); } // Same Units as tree ci.setUnits(tree.getUnits()); return ci; } // PRIVATE STUFF /** * extract coalescent times and tip information into Vector times from tree. */ private static void collectAllTimes(Node node, Vector times, Vector childs) { times.addElement(new ComparableDouble(node.getNodeHeight())); childs.addElement(new Integer(node.getChildCount())); for (int i = 0; i < node.getChildCount(); i++) { collectAllTimes(node.getChild(i), times, childs); } } /** * extract internal node heights Vector times from tree. */ private static void collectInternalNodeHeights(Node node, Vector times, Vector childs) { if (!node.isLeaf()) { times.addElement(new ComparableDouble(node.getNodeHeight())); childs.addElement(new Integer(node.getChildCount())); for (int i = 0; i < node.getChildCount(); i++) { collectInternalNodeHeights(node.getChild(i), times, childs); } } } } pal-1.5.1/src/pal/coalescent/ExpandingPopulation.java0000644000000000000000000001236207323430030021304 0ustar rootroot// ExpandingPopulation.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.math.*; import pal.misc.*; import pal.io.*; import java.io.*; /** * This class models a population that grows * exponentially from an inital population size alpha N0 to a present-day size N0. * (Parameters: N0=present-day population size; r=growth rate; alpha: ratio of * population sizes). * This model is nested with the exponential-growth model (alpha -> 0). * * * @version $Id: ExpandingPopulation.java,v 1.6 2001/07/12 12:17:43 korbinian Exp $ * * @author Korbinian Strimmer */ public class ExpandingPopulation extends ExponentialGrowth implements Report, Parameterized, Serializable { // // Public stuff // /** ratio of pop. sizes */ public double alpha; /** standard error of time alpha */ public double alphaSE; /** * Construct demographic model with default settings */ public ExpandingPopulation(int units) { super(units); alpha = getDefaultValue(2); } /** * Construct demographic model of an expanding population * */ public ExpandingPopulation(double size, double growth, double ratio, int units) { super(size, growth, units); alpha = ratio; } public Object clone() { return new ExpandingPopulation(getN0(), getGrowthRate(), getRatio(), getUnits()); } /** * returns ratio of population sizes */ public double getRatio() { return alpha; } // Implementation of abstract methods public double getDemographic(double t) { if (r == 0) { return N0; } else if (alpha == 0) { return N0 * Math.exp(-t * r); } else { return N0 * (alpha + (1.0-alpha) * Math.exp(-t * r)); } } public double getIntensity(double t) { if (r == 0) { return t/N0; } else if (alpha == 0) { return (Math.exp(t*r)-1.0)/N0/r; } else { return Math.log(1.0+alpha*(Math.exp(t*r)-1.0))/alpha/N0/r; } } public double getInverseIntensity(double x) { if (r == 0) { return N0*x; } else if (alpha == 0) { return Math.log(1.0+N0*x*r)/r; } else { return Math.log( (alpha-1.0+Math.exp(alpha*N0*x*r) )/alpha )/r; } } // Parameterized interface public int getNumParameters() { return 3; } public double getParameter(int k) { switch (k) { case 0: return N0; case 1: return r; case 2: return alpha; default: return 0; } } public double getUpperLimit(int k) { double max = 0; switch (k) { case 0: max = 1e50; break; case 1: max = 1000; break; // we have to to compute lots of exp(rt) !! case 2: max = 1; break; default: break; } return max; } public double getLowerLimit(int k) { double min = 0; switch (k) { case 0: min = 1e-12; break; case 1: min = 0; break; case 2: min = 0; break; default: break; } return min; } public double getDefaultValue(int k) { if (k == 0) { //arbitrary default values if (getUnits() == GENERATIONS) { return 1000.0; } else { return 0.2; } } else if (k == 1) { return 0; //constant population } else { return 0.5; } } public void setParameter(double value, int k) { switch (k) { case 0: N0 = value; break; case 1: r = value; break; case 2: alpha = value; break; default: break; } } public void setParameterSE(double value, int k) { switch (k) { case 0: N0SE = value; break; case 1: rSE = value; break; case 2: alphaSE = value; break; default: break; } } public String toString() { OutputTarget out = OutputTarget.openString(); report(out); out.close(); return out.getString(); } public void report(PrintWriter out) { out.println("Demographic model: expanding population"); out.println("Demographic function: N(t) = N0 (alpha + (1-alpha) exp(-r t)"); out.print("Unit of time: "); if (getUnits() == GENERATIONS) { out.print("generations"); } else { out.print("expected substitutions"); } out.println(); out.println(); out.println("Parameters of demographic function:"); out.print(" present-day population size N0: "); fo.displayDecimal(out, N0, 6); if (N0SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N0SE, 6); out.print(")"); } out.println(); out.print(" growth rate r: "); fo.displayDecimal(out, r, 6); if (rSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, rSE, 6); out.print(")"); } out.println(); out.print(" ratio of population sizes alpha: "); fo.displayDecimal(out, alpha, 6); if (alphaSE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, alphaSE, 6); out.print(")"); } out.println(); out.println(); out.print(" initial poulation size alpha N0: "); fo.displayDecimal(out, alpha*N0, 6); out.println(); if (getLogL() != 0.0) { out.println(); out.print("log L: "); fo.displayDecimal(out, getLogL(), 6); out.println(); } } } pal-1.5.1/src/pal/coalescent/CoalescentException.java0000644000000000000000000000122407323430030021246 0ustar rootroot// CoalescentException.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; /** * Exceptions to do with coalescent models et cetera. * * @version $Id: CoalescentException.java,v 1.2 2001/07/12 12:17:43 korbinian Exp $ * * @author Alexei Drummond */ public class CoalescentException extends Exception { /** * Parameterless constructor. */ public CoalescentException() { super(); } /** * Constructor taking message. */ public CoalescentException(String s) { super(s); } } pal-1.5.1/src/pal/coalescent/ConstantPopulation.java0000644000000000000000000001034307731111730021163 0ustar rootroot// ConstantPopulation.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.coalescent; import pal.math.*; import pal.misc.*; import pal.io.*; import java.io.*; /** * This class models coalescent intervals for a constant population * (parameter: N0=present-day population size).
* If time units are set to Units.EXPECTED_SUBSTITUTIONS then * the N0 parameter will be interpreted as N0 * mu.
* Also note that if you are dealing with a diploid population * N0 will be out by a factor of 2. * * @version $Id: ConstantPopulation.java,v 1.14 2003/09/14 05:14:15 matt Exp $ * * @author Alexei Drummond + @author Korbinian Strimmer */ public class ConstantPopulation extends DemographicModel implements Report, Summarizable, Parameterized, Serializable { // // private stuff // /** The summary descriptor stuff for the public values of this class @see Summarizable, getSummaryDescriptors() */ private static final String[] CP_SUMMARY_TYPES = {"N0","N0SE"}; //This is still 1.0 compliant... // // Public stuff // /** population size */ public double N0; /** standard error of population size */ public double N0SE = 0.0; /** * Construct demographic model with default settings */ public ConstantPopulation(int units) { super(); setUnits(units); N0 = getDefaultValue(0); } /** * Construct demographic model of a constant population size. */ public ConstantPopulation(double size, int units) { super(); N0 = size; setUnits(units); } public Object clone() { return new ConstantPopulation(getN0(), getUnits()); } public String[] getSummaryTypes() { return CP_SUMMARY_TYPES; } public double getSummaryValue(int summaryType) { switch(summaryType) { case 0 : { return N0; } case 1 : { return N0SE; } } throw new RuntimeException("Assertion error: unknown summary type :"+summaryType); } /** * returns initial population size. */ public double getN0() { return N0; } // Implementation of abstract methods public double getDemographic(double t) { return N0; } public double getIntensity(double t) { return t/N0; } public double getInverseIntensity(double x) { return N0*x; } // Parameterized interface public int getNumParameters() { return 1; } public double getParameter(int k) { return N0; } public double getUpperLimit(int k) { return 1e50; } public double getLowerLimit(int k) { return 1e-12; } public double getDefaultValue(int k) { //arbitrary default values if (getUnits() == GENERATIONS) { return 1000.0; } else { return 0.2; } } public void setParameter(double value, int k) { N0 = value; } public void setParameterSE(double value, int k) { N0SE = value; } public String toString() { /* String s = "Constant Population:\n"; if (getUnits() == GENERATIONS) { s += "Effective Population Size = " + N0 + "\n"; } else { s += "Theta (haploid) = " + (N0 * 2) + "\n"; } return s; */ OutputTarget out = OutputTarget.openString(); report(out); out.close(); return out.getString(); } public void report(PrintWriter out) { out.println("Demographic model: constant population size "); out.println("Demographic function: N(t) = N0"); out.print("Unit of time: "); //Units should either be EXPECTED_SUBSTITUTIONS, or GENERATIONS out.print(Units.UNIT_NAMES[getUnits()]); out.println(); out.println(); out.println("Parameters of demographic function:"); if (getUnits() == GENERATIONS) { out.print(" present day population size N0: "); fo.displayDecimal(out, N0, 6); } else { out.print(" present day Theta (N0 * mu): "); fo.displayDecimal(out, N0, 6); } if (N0SE != 0.0) { out.print(" (S.E. "); fo.displayDecimal(out, N0SE, 6); out.print(")"); } out.println(); if (getLogL() != 0.0) { out.println(); out.print("log L: "); fo.displayDecimal(out, getLogL(), 6); out.println(); } } } pal-1.5.1/src/pal/math/0000755000000000000000000000000010141733720013262 5ustar rootrootpal-1.5.1/src/pal/math/OrthogonalLineFunction.java0000644000000000000000000000475707645320634020607 0ustar rootroot// OrthogonalLineFunction.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * converts a multivariate function into a univariate function * by keeping all but one argument constant * * @author Korbinian Strimmer */ public class OrthogonalLineFunction implements UnivariateFunction { /** * construct univariate function from multivariate function * * @param func multivariate function */ public OrthogonalLineFunction(MultivariateFunction func) { this(func, 0, null); } /** * construct univariate function from multivariate function * * * @param func multivariate function * @param the initial arguments to the base MultivariateFunction (may be null) * @param selectedDimension The selected dimension/argument that the line "runs" along */ public OrthogonalLineFunction(MultivariateFunction func, int selectedDimension, double[] initialArguments ) { f = func; numArgs = f.getNumArguments(); x = new double[numArgs]; this.n = selectedDimension; if(initialArguments!=null) { System.arraycopy(initialArguments,0,x,0,Math.min(x.length,initialArguments.length)); } } /** * set (change) values of all arguments (start values) * * @param start start values */ public void setAllArguments(double[] start) { for (int i = 0; i < numArgs; i++) { x[i] = start[i]; } } /** * set (change) value of a single argument * (the one currently active) * * @param val value of argument */ public void setArgument(double val) { x[n] = val; bak = x[n]; } /** * use only the specified argument in the * constructed univariate function * and keep all others constant * * @param num argument number */ public void selectArgument(int num) { n = num; bak = x[n]; if(f.getLowerBound(num) == f.getUpperBound(num)){ System.out.println("Warning! Range is zero on parameter:"+num); } } // implementation of UnivariateFunction public double evaluate(double arg) { x[n] = arg; double v = f.evaluate(x); x[n] = bak; return v; } public double getLowerBound() { return f.getLowerBound(n); } public double getUpperBound() { return f.getUpperBound(n); } // // Private stuff // private MultivariateFunction f; private int numArgs, n; private double bak; private double[] s, x; } pal-1.5.1/src/pal/math/MultivariateMinimum.java0000644000000000000000000001514207640111206020131 0ustar rootroot// MultivariateMinimum.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * abstract base class for minimisation of a multivariate function * * @author Korbinian Strimmer */ public abstract class MultivariateMinimum { // // Public stuff // /** total number of function evaluations necessary */ public int numFun; /** * maxFun is the maximum number of calls to fun allowed. * the default value of 0 indicates no limit on the number * of calls. */ public int maxFun = 0; /** * numFuncStops is the number of consecutive positive * evaluations of the stop criterion based on function evaluation * necessary to cause the abortion of the optimization * (default is 4) */ public int numFuncStops = 4; /** * Find minimum close to vector x * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * * @return minimal function value */ public double findMinimum(MultivariateFunction f, double[] xvec) { optimize(f, xvec, MachineAccuracy.EPSILON, MachineAccuracy.EPSILON); return f.evaluate(xvec); } /** * Find minimum close to vector x * (desired fractional digits for each parameter is specified) * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * @param fxFracDigits desired fractional digits in the function value * @param xFracDigits desired fractional digits in parameters x * * @return minimal function value */ public double findMinimum(MultivariateFunction f, double[] xvec, int fxFracDigits, int xFracDigits) { return findMinimum(f,xvec,fxFracDigits,xFracDigits,null); } /** * Find minimum close to vector x * (desired fractional digits for each parameter is specified) * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * @param fxFracDigits desired fractional digits in the function value * @param xFracDigits desired fractional digits in parameters x * * @return minimal function value */ public double findMinimum(MultivariateFunction f, double[] xvec, int fxFracDigits, int xFracDigits, MinimiserMonitor monitor) { double tolfx = Math.pow(10, -1-fxFracDigits); double tolx = Math.pow(10, -1-xFracDigits); optimize(f, xvec, tolfx, tolx,monitor); // trim x double m = Math.pow(10, xFracDigits); for (int i = 0; i < xvec.length; i++) { xvec[i] = Math.round(xvec[i]*m)/m; } // trim fx return Math.round(f.evaluate(xvec)*m)/m; } /** * The actual optimization routine * (needs to be implemented in a subclass of MultivariateMinimum). * It finds a minimum close to vector x when the * absolute tolerance for each parameter is specified. * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * @param tolfx absolute tolerance of function value * @param tolx absolute tolerance of each parameter */ public abstract void optimize(MultivariateFunction f, double[] xvec, double tolfx, double tolx); /** * The actual optimization routine * * It finds a minimum close to vector x when the * absolute tolerance for each parameter is specified. * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * @param tolfx absolute tolerance of function value * @param tolx absolute tolerance of each parameter * @param monitor A monitor object that receives information about the minimising process (for display purposes) * @note The default implementation just calls the optimize function with out the Monitor! */ public void optimize(MultivariateFunction f, double[] xvec, double tolfx, double tolx, MinimiserMonitor monitor) { optimize(f,xvec,tolfx,tolx); } /** * Checks whether optimization should stop * * @param fx current function value * @param x current values of function parameters * @param tolfx absolute tolerance of function value * @param tolx absolute tolerance of each parameter * @param firstCall needs to be set to true when this routine is first called * otherwise it should be set to false * * @return true if either x and its previous value are sufficiently similar * or if fx and its previous values are sufficiently similar * (test on function value has to be succesful numFuncStops consecutive * times) */ public boolean stopCondition(double fx, double[] x, double tolfx, double tolx, boolean firstCall) { boolean stop = false; if (firstCall) { countFuncStops = 0; fxold = fx; xold = new double[x.length]; copy(xold, x); } else { if (xStop(x, xold, tolx)) { stop = true; } else { if (fxStop(fx, fxold, tolfx)) { countFuncStops++; } else { countFuncStops = 0; } if (countFuncStops >= numFuncStops) { stop = true; } } } if (!stop) { fxold = fx; copy(xold, x); } return stop; } /** * Copy source vector into target vector * * @param target parameter array * @param source parameter array */ public static final void copy(final double[] target, final double[] source) { System.arraycopy(source,0,target,0,source.length); } // // Private stuff // // number of fStops private int countFuncStops; // old function and parameter values private double fxold; private double[] xold; private boolean xStop(double[] x, double[] xold, double tolx) { boolean stop = true; for (int i = 0; i < x.length && stop == true; i++) { if (Math.abs(x[i]-xold[i]) > tolx) { stop = false; } } return stop; } private boolean fxStop(double fx, double fxold, double tolfx) { if (Math.abs(fx-fxold) > tolfx) { return false; } else { return true; } } // =========================================================================== // ==== Factory interface /** * A factory interface for MultivariateMinimums (because they aren't statefree) */ public static interface Factory { /** * Generate a new Multivariate Minimum */ MultivariateMinimum generateNewMinimiser(); } } pal-1.5.1/src/pal/math/DifferentialEvolution.java0000644000000000000000000002117207557032204020437 0ustar rootroot// DifferentialEvolution.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) // Price, K., and R. Storn. 1997. Differential evolution: a simple // strategy for fast optimization. Dr. Dobb's Journal 264(April), pp. 18-24. // Strategy used here: DE/rand-to-best/1/bin // http://www.icsi.berkeley.edu/~storn/code.html package pal.math; /** * global minimization of a real-valued function of several * variables without using derivatives using a genetic algorithm * (Differential Evolution) * @author Korbinian Strimmer */ public class DifferentialEvolution extends MultivariateMinimum { // // Public stuff // // Variables that control aspects of the inner workings of the // minimization algorithm. Setting them is optional, they // are all set to some reasonable default values given below. /** weight factor (default 0.7) */ public double F = 0.7 /* 0.5*/; /** Crossing over factor (default 0.9) */ public double CR = 0.9 /*1.0*/; /** * variable controlling print out, default value = 0 * (0 -> no output, 1 -> print final value, * 2 -> detailed map of optimization process) */ public int prin = 0; /** * construct DE optimization modul (population size is * selected automatically) * *

DE web page: * http://www.icsi.berkeley.edu/~storn/code.html * * @param dim dimension of optimization vector */ public DifferentialEvolution (int dim) { this(dim, 5*dim); } /** * construct optimization modul * * @param dim dimension of optimization vector * @param popSize population size */ public DifferentialEvolution (int dim, int popSize) { // random number generator rng = new MersenneTwisterFast(); // Dimension and Population size dimension = dim; populationSize = popSize; numFun = 0; // Allocate memory currentPopulation = new double[populationSize][dimension]; nextPopulation = new double[populationSize][dimension]; costs = new double[populationSize]; trialVector = new double[dimension]; // helper variable //numr = 5; // for strategy DE/best/2/bin numr = 3; // for stragey DE/rand-to-best/1/bin r = new int[numr]; } // implementation of abstract method public void optimize(MultivariateFunction func, double[] xvec, double tolfx, double tolx) { optimize(func,xvec,tolfx,tolx,null); } public void optimize(MultivariateFunction func, double[] xvec, double tolfx, double tolx, MinimiserMonitor monitor) { f = func; x = xvec; // Create first generation firstGeneration (); stopCondition(fx, x, tolfx, tolx, true); while (true) { if(monitor!=null) { monitor.newMinimum(fx,xvec,f); } boolean xHasChanged; do { xHasChanged = nextGeneration (); if (maxFun > 0 && numFun > maxFun) { break; } if (prin > 1 && currGen % 20 == 0) { printStatistics(); } } while (!xHasChanged); if (stopCondition(fx, x, tolfx, tolx, false) || (maxFun > 0 && numFun > maxFun)) { break; } } if (prin > 0) printStatistics(); } // // Private stuff // private MultivariateFunction f; private int currGen; private double fx; private double[] x; // Dimension private int dimension; // Population size private int populationSize; // Population data private double trialCost; private double[] costs; private double[] trialVector; private double[][] currentPopulation; private double[][] nextPopulation; // Random number generator private MersenneTwisterFast rng; // Helper variable private int numr; private int[] r; private void printStatistics() { // Compute mean double meanCost = 0.0; for (int i = 0; i < populationSize; i++) { meanCost += costs[i]; } meanCost = meanCost/populationSize; // Compute variance double varCost = 0.0; for (int i = 0; i < populationSize; i++) { double tmp = (costs[i]-meanCost); varCost += tmp*tmp; } varCost = varCost/(populationSize-1); System.out.println(); System.out.println(); System.out.println(); System.out.println("Smallest value: " + fx); System.out.println(); for (int k = 0; k < dimension; k++) { System.out.println("x[" + k + "] = " + x[k]); } System.out.println(); System.out.println("Current Generation: " + currGen); System.out.println("Function evaluations: " + numFun); System.out.println("Populations size (populationSize): " + populationSize); System.out.println("Average value: " + meanCost); System.out.println("Variance: " + varCost); System.out.println("Weight factor (F): " + F); System.out.println("Crossing-over (CR): " + CR); System.out.println(); } // Generate starting population private void firstGeneration() { currGen = 1; // Construct populationSize random start vectors for (int i = 0; i < populationSize; i++) { for (int j = 0; j < dimension; j++ ) { double min = f.getLowerBound(j); double max = f.getUpperBound(j); double diff = max - min; // Uniformly distributed sample points currentPopulation[i][j] = min + diff*rng.nextDouble(); } costs[i] = f.evaluate(currentPopulation[i]); } numFun += populationSize; findSmallestCost (); } // check whether a parameter is out of range private double checkBounds(double param, int numParam) { if (param < f.getLowerBound(numParam)) { return f.getLowerBound(numParam); } else if (param > f.getUpperBound(numParam)) { return f.getUpperBound(numParam); } else { return param; } } // Generate next generation private boolean nextGeneration() { boolean updateFlag = false; int best = 0; // to avoid compiler complaints double[][] swap; currGen++; // Loop through all population vectors for (int r0 = 0; r0 < populationSize; r0++) { // Choose ri so that r0 != r[1] != r[2] != r[3] != r[4] ... r[0] = r0; for (int k = 1; k < numr; k++) { r[k] = randomInteger (populationSize-k); for (int l = 0; l < k; l++) { if (r[k] >= r[l]) { r[k]++; } } } copy(trialVector, currentPopulation[r0]); int n = randomInteger (dimension); for (int i = 0; i < dimension; i++) // perform binomial trials { // change at least one parameter if (rng.nextDouble() < CR || i == dimension - 1) { // DE/rand-to-best/1/bin // (change to 'numr=3' in constructor when using this strategy) trialVector[n] = trialVector[n] + F*(x[n] - trialVector[n]) + F*(currentPopulation[r[1]][n] - currentPopulation[r[2]][n]); //DE/rand-to-best/2/bin //double K = rng.nextDouble(); //trialVector[n] = trialVector[n] + // K*(x[n] - trialVector[n]) + // F*(currentPopulation[r[1]][n] - currentPopulation[r[2]][n]); // DE/best/2/bin // (change to 'numr=5' in constructor when using this strategy) //trialVector[n] = x[n] + // (currentPopulation[r[1]][n]+currentPopulation[r[2]][n] // -currentPopulation[r[3]][n]-currentPopulation[r[4]][n])*F; } n = (n+1) % dimension; } // make sure that trial vector obeys boundaries for (int i = 0; i < dimension; i++) { trialVector[i] = checkBounds(trialVector[i], i); } // Test this choice trialCost = f.evaluate(trialVector); if (trialCost < costs[r0]) { // Better than old vector costs[r0] = trialCost; copy(nextPopulation[r0], trialVector); // Check for new best vector if (trialCost < fx) { fx = trialCost; best = r0; updateFlag = true; } } else { // Keep old vector copy(nextPopulation[r0], currentPopulation[r0]); } } numFun += populationSize; // Update best vector if (updateFlag) { copy(x, nextPopulation[best]); } // Switch pointers swap = currentPopulation; currentPopulation = nextPopulation; nextPopulation = swap; return updateFlag; } // Determine vector with smallest cost in current population private void findSmallestCost() { int best = 0; fx = costs[0]; for (int i = 1; i < populationSize; i++) { if (costs[i] < fx) { fx = costs[i]; best = i; } } copy(x, currentPopulation[best]); } // draw random integer in the range from 0 to n-1 private int randomInteger(int n) { return rng.nextInt(n); } } pal-1.5.1/src/pal/math/OrderEnumerator.java0000644000000000000000000003744507457051462017273 0ustar rootroot// OrderEnumerator.java // // (c) 1999-2002 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * A means for describing odering information, and Utilities for creating such Orderings * * @version $Id: OrderEnumerator.java,v 1.1 2002/04/16 05:37:05 matt Exp $ * * @author Matthew Goode */ public interface OrderEnumerator { /** * If hasMore returns false reset should be called */ boolean hasMore(); /** * The next value in the enumeration */ int getNext(); /** * Reset back to starting state, may have a differnet number of values, and a different ordering after a reset! */ void reset(); public static interface OEFactory { /** * For generating an ordering from 0..size-1. Enumerator doesn't have to actually produce */ public OrderEnumerator createOrderEnumerator(int size); } //===================================================================================================== //================================= Utilities, and hidden classes ===================================== //===================================================================================================== public static class Utils { private static final Constant ZERO = new Constant(0); /** * @param index The index to always return * @return an OrderEnumerator object that always returns 'index' */ public static final OrderEnumerator getConstant(int index) { return new Constant(index); } /** * @param size the number of different indexes returned (between 0 and size-1) * @return an OrderEnumerator object returns index in order between a certain range */ public static final OrderEnumerator getOrdered(int size) { return new Ordered(size); } /** * @param size the number of different indexes returned (between 0 and size-1) * @return an OrderEnumerator object returns index in random order between a certain range (order changes with each reset) */ public static final OrderEnumerator getShuffled(int size) { return new Shuffled(size); } /** * @param primary The primary OrderEnumerator, one index is taken from this enumertor than an entire sequence of the secondary is taken * @param secondar The primary OrderEnumerator, the entire sequence of a secondary enumerator is taken for every single index from the primary enumerator * * @return an OrderEnumerator object that combines two sub enumerators */ public static final OrderEnumerator getBiasAlternating(OrderEnumerator primary, OrderEnumerator secondary) { return new BiasAlternate(primary,secondary); } /** * @param primary The primary OrderEnumerator * @param secondar The primary OrderEnumerator * * @return an OrderEnumerator object that combines two sub enumerators, by alternating between outputs */ public static final OrderEnumerator getAlternating(OrderEnumerator primary, OrderEnumerator secondary) { return new Alternate(primary,secondary); } /** * @return OrderEnumerator that always returns 0 (zero) */ public static final OrderEnumerator getZero() { return ZERO; } /** * @param minimum minmim value released * @param range range of values released (that is values go between minimum (inclusive) and minimum+range(exclusive) * * @return an OrderEnumerator that is restricted in indexes it returns based on base Enumerator * */ public static final OrderEnumerator getRestricted(OrderEnumerator toRestrict, int minimum, int range) { return new Restricted(toRestrict,minimum, range); } /** * @return OrderEnumerator that always returns 0 (zero) */ public static final OrderEnumerator getAdjusted(OrderEnumerator toAdjust, int adjustmentFactor) { return new Adjust(toAdjust,adjustmentFactor); } //============================================================ //=================== Factory Stuff ========================== /** * @return OrderEnumerator that always returns 0 (zero) */ public static final OrderEnumerator.OEFactory getZeroFactory() { return ZERO; } /** * @param index The index to always return * @return an OrderEnumerator object that always returns 'index' */ public static final OrderEnumerator.OEFactory getConstantFactory(int index) { return new Constant(index); } /** * @return an OrderEnumerator object returns index in order between a certain range */ public static final OrderEnumerator.OEFactory getOrderedFactory() { return Ordered.Factory.INSTANCE; } /** * @return an OrderEnumerator object returns index in random order between a certain range (order changes with each reset) */ public static final OrderEnumerator.OEFactory getShuffledFactory() { return Shuffled.Factory.INSTANCE; } /** * @param adjustmentFactor If to adjust returns x, adjusted will return x+adjustmentFactory (it's that simple) * @return an OrderEnumerator that returns indexes adjusted from a base enumerator * */ public static final OrderEnumerator.OEFactory getAdjustedFactory(OrderEnumerator.OEFactory toAdjust, int adjustmentFactor) { return new Adjust.Factory(toAdjust,adjustmentFactor); } /** * @param minimum minmim value released * @param range range of values released (that is values go between minimum (inclusive) and minimum+range(exclusive) * * @return an OrderEnumerator that is restricted in indexes it returns based on base Enumerator * */ public static final OrderEnumerator.OEFactory getRestrictedFactory(OrderEnumerator.OEFactory toRestrict, int minimum, int range) { return new Restricted.Factory(toRestrict,minimum, range); } /** * @return an OrderEnumerator object that alternates outputs between two base enumerator */ public static final OrderEnumerator.OEFactory getAlternatingFactory(OrderEnumerator.OEFactory primary, OrderEnumerator.OEFactory secondary) { return new Alternate.Factory(primary,secondary); } /** * @return an OrderEnumerator object that alternates outputs between two base enumerator * (takes one from primary, than all from secondary, one from primary, all from secondary) */ public static final OrderEnumerator.OEFactory getBiasAlternatingFactory(OrderEnumerator.OEFactory primary, OrderEnumerator.OEFactory secondary) { return new BiasAlternate.Factory(primary,secondary); } //======================================================= /** * Returns the same index ever call */ private static class Constant implements OrderEnumerator, OrderEnumerator.OEFactory { int index_; boolean hasMore_; public Constant(int index) { this.index_ = index; } public boolean hasMore() { return hasMore_; } public int getNext() { hasMore_ = false; return index_; } public void reset() { hasMore_ = true; } /** * For generating an ordering from 0..size-1. Enumerator doesn't have to actually produce */ public OrderEnumerator createOrderEnumerator(int size) { return this; } } //End of Constant //======================================================= /** * Incrementally returns indexes */ private static class Ordered implements OrderEnumerator { int index_; int size_; public Ordered(int size) { this.size_= size; reset(); } public boolean hasMore() { return index_=minimum_&&i no output, 1 -> print only starting and final values, * 2 -> detailed map of the minimization process, * 3 -> print also eigenvalues and vectors of the * search directions), the default value is 0 */ public int prin = 0; /** * step is a steplength parameter and should be set equal * to the expected distance from the solution. * exceptionally small or large values of step lead to * slower convergence on the first few iterations * the default value for step is 1.0 */ public double step = 1.0; /** * scbd is a scaling parameter. 1.0 is the default and * indicates no scaling. if the scales for the different * parameters are very different, scbd should be set to * a value of about 10.0. */ public double scbd = 1.0; /** * illc should be set to true * if the problem is known to * be ill-conditioned. the default is false. this * variable is automatically set, when the problem * is found to to be ill-conditioned during iterations. */ public boolean illc = false; // implementation of abstract method public void optimize(MultivariateFunction f, double[] xvector, double tolfx, double tolx) { optimize(f,xvector,tolfx,tolx,null); } public void optimize(MultivariateFunction f, double[] xvector, double tolfx, double tolx, MinimiserMonitor monitor) { t = tolx; fun = f; x = xvector; dim = fun.getNumArguments();; checkBounds(x); h = step; d = new double[dim]; y = new double[dim]; z = new double[dim]; q0 = new double[dim]; q1 = new double[dim]; v = new double[dim][dim]; tflin = new double[dim]; small = MachineAccuracy.EPSILON*MachineAccuracy.EPSILON; vsmall = small*small; large = 1.0/small; vlarge = 1.0/vsmall; ldfac = (illc ? 0.1 : 0.01); nl = kt = 0; numFun = 1; fx = fun.evaluate(x); stopCondition(fx, x, tolfx, tolx, true); qf1 = fx; t2 = small + Math.abs(t); t = t2; dmin = small; if (h < 100.0*t) h = 100.0*t; ldt = h; for (i = 0; i < dim; i++) { for (j = 0; j < dim; j++) { v[i][j] = (i == j ? 1.0 : 0.0); } } d[0] = 0.0; qd0 = 0.0; for (i = 0; i < dim; i++) q1[i] = x[i]; if (prin > 1) { System.out.println("\n------------- enter function praxis -----------\n"); System.out.println("... current parameter settings ..."); System.out.println("... scaling ... " + scbd); System.out.println("... tolx ... " + t); System.out.println("... tolfx ... " + tolfx); System.out.println("... maxstep ... " + h); System.out.println("... illc ... " + illc); System.out.println("... maxFun ... " + maxFun); } if (prin > 0) System.out.println(); while(true) { sf = d[0]; s = d[0] = 0.0; /* minimize along first direction */ min1 = d[0]; min2 = s; min(0, 2, fx, false); d[0] = min1; s = min2; if (s <= 0.0) for (i = 0; i < dim; i++) { v[i][0] = -v[i][0]; } if ((sf <= (0.9 * d[0])) || ((0.9 * sf) >= d[0])) for (i=1; i < dim; i++) d[i] = 0.0; boolean gotoFret = false; for (k=1; k < dim; k++) { for (i=0; i< dim; i++) { y[i] = x[i]; } sf = fx; illc = illc || (kt > 0); boolean gotoNext; do { kl = k; df = 0.0; if (illc) { /* random step to get off resolution valley */ for (i=0; i < dim; i++) { z[i] = (0.1 * ldt + t2 * Math.pow(10.0,(double)kt)) * (rng.nextDouble() - 0.5); s = z[i]; for (j=0; j < dim; j++) { x[j] += s * v[j][i]; } } checkBounds(x); fx = fun.evaluate(x); numFun++; } /* minimize along non-conjugate directions */ for (k2=k; k2 < dim; k2++) { sl = fx; s = 0.0; min1 = d[k2]; min2 = s; min(k2, 2, fx, false); d[k2] = min1; s = min2; if (illc) { double szk = s + z[k2]; s = d[k2] * szk*szk; } else s = sl - fx; if (df < s) { df = s; kl = k2; } } if (!illc && (df < Math.abs(100.0 * MachineAccuracy.EPSILON * fx))) { illc = true; gotoNext = true; } else gotoNext = false; } while (gotoNext); if ((k == 1) && (prin > 1)) vecprint("\n... New Direction ...", d); /* minimize along conjugate directions */ for (k2=0; k2<=k-1; k2++) { s = 0.0; min1 = d[k2]; min2 = s; min(k2, 2, fx, false); d[k2] = min1; s = min2; } f1 = fx; fx = sf; lds = 0.0; for (i=0; i small) { for (i=kl-1; i>=k; i--) { for (j=0; j < dim; j++) v[j][i+1] = v[j][i]; d[i+1] = d[i]; } d[k] = 0.0; for (i=0; i < dim; i++) v[i][k] = y[i] / lds; min1 = d[k]; min2 = lds; min(k, 4, f1, true); d[k] = min1; lds = min2; if (lds <= 0.0) { lds = -lds; for (i=0; i< dim; i++) v[i][k] = -v[i][k]; } } ldt = ldfac * ldt; if (ldt < lds) ldt = lds; if (prin > 1) print(); if(monitor!=null) { monitor.newMinimum(fx,x,f); } if(stopCondition(fx, x, tolfx, tolx, false)) { kt++; } else { kt = 0; } if (kt > 1) { gotoFret = true; break; } } if (gotoFret) break; /* try quadratic extrapolation in case */ /* we are stuck in a curved valley */ quadr(); dn = 0.0; for (i=0; i < dim; i++) { d[i] = 1.0 / Math.sqrt(d[i]); if (dn < d[i]) dn = d[i]; } if (prin > 2) matprint("\n... New Matrix of Directions ...",v); for (j=0; j < dim; j++) { s = d[j] / dn; for (i=0; i < dim; i++) v[i][j] *= s; } if (scbd > 1.0) { /* scale axis to reduce condition number */ s = vlarge; for (i=0; i < dim; i++) { sl = 0.0; for (j=0; j < dim; j++) sl += v[i][j]*v[i][j]; z[i] = Math.sqrt(sl); if (z[i] < MachineAccuracy.SQRT_SQRT_EPSILON) z[i] = MachineAccuracy.SQRT_SQRT_EPSILON; if (s > z[i]) s = z[i]; } for (i=0; i < dim; i++) { sl = s / z[i]; z[i] = 1.0 / sl; if (z[i] > scbd) { sl = 1.0 / scbd; z[i] = scbd; } } } for (i=1; i < dim; i++) for (j=0; j<=i-1; j++) { s = v[i][j]; v[i][j] = v[j][i]; v[j][i] = s; } minfit(dim, MachineAccuracy.EPSILON, vsmall, v, d); if (scbd > 1.0) { for (i=0; i < dim; i++) { s = z[i]; for (j=0; j < dim; j++) v[i][j] *= s; } for (i=0; i < dim; i++) { s = 0.0; for (j=0; j < dim; j++) s += v[j][i]*v[j][i]; s = Math.sqrt(s); d[i] *= s; s = 1.0 / s; for (j=0; j < dim; j++) v[j][i] *= s; } } for (i=0; i < dim; i++) { if ((dn * d[i]) > large) d[i] = vsmall; else if ((dn * d[i]) < small) d[i] = vlarge; else d[i] = Math.pow(dn * d[i],-2.0); } sort(); /* the new eigenvalues and eigenvectors */ dmin = d[dim-1]; if (dmin < small) dmin = small; illc = (MachineAccuracy.SQRT_EPSILON * d[0]) > dmin; if ((prin > 2) && (scbd > 1.0)) vecprint("\n... Scale Factors ...",z); if (prin > 2) vecprint("\n... Eigenvalues of A ...",d); if (prin > 2) matprint("\n... Eigenvectors of A ...",v); if ((maxFun > 0) && (nl > maxFun)) { if (prin > 0) System.out.println("\n... maximum number of function calls reached ..."); break; } } if (prin > 0) { vecprint("\n... Final solution is ...", x); System.out.println("\n... Function value reduced to " + fx + " ..."); System.out.println("... after " + numFun + " function calls."); } //return (fx); } // // Private stuff // // some global variables private int i, j, k, k2, nl, kl, kt; private double s, sl, dn, dmin, fx, f1, lds, ldt, sf, df, qf1, qd0, qd1, qa, qb, qc, small, vsmall, large, vlarge, ldfac, t2; // need to be initialised private double[] d; private double[] y; private double[] z; private double[] q0; private double[] q1; private double[][] v; private double[] tflin; private int dim; private double[] x; private MultivariateFunction fun; // these will be set by praxis to the global control parameters private double h, t; // Random number generator private MersenneTwisterFast rng; // sort d and v in descending order private void sort() { int k, i, j; double s; for (i=0; i < dim-1; i++) { k = i; s = d[i]; for (j=i+1; j < dim; j++) { if (d[j] > s) { k = j; s = d[j]; } } if (k > i) { d[k] = d[i]; d[i] = s; for (j=0; j < dim; j++) { s = v[j][i]; v[j][i] = v[j][k]; v[j][k] = s; } } } } private void vecprint(String s, double[] x) { System.out.println(s); for (int i=0; i < x.length; i++) System.out.print(x[i] + " "); System.out.println(); } private void print() /* print a line of traces */ { System.out.println(); System.out.println("... function value reduced to ... " + fx); System.out.println("... after " + numFun + " function calls ..."); System.out.println("... including " + nl + " linear searches ..."); vecprint("... current values of x ...", x); } private void matprint(String s, double[][] v) { System.out.println(s); for (int k=0; k upper) { p[i] = upper; } } } private double min1; private double min2; private void min(int j, int nits, double f1, boolean fk) { int k; double x2, xm, f0, f2, fm, d1, t2, s, sf1, sx1; sf1 = f1; sx1 = min2; k = 0; xm = 0.0; fm = f0 = fx; boolean dz = (min1 < MachineAccuracy.EPSILON); /* find step size */ s = 0; for (int i=0; i < dim; i++) { s += x[i]*x[i]; } s = Math.sqrt(s); if (dz) { t2 = MachineAccuracy.SQRT_SQRT_EPSILON* Math.sqrt(Math.abs(fx)/dmin + s*ldt) + MachineAccuracy.SQRT_EPSILON*ldt; } else { t2 = MachineAccuracy.SQRT_SQRT_EPSILON* Math.sqrt(Math.abs(fx)/(min1) + s*ldt) + MachineAccuracy.SQRT_EPSILON*ldt; } s = s*MachineAccuracy.SQRT_SQRT_EPSILON + t; if (dz && t2 > s) t2 = s; if (t2 < small) t2 = small; if (t2 > 0.01*h) t2 = 0.01 * h; if (fk && f1 <= fm) { xm = min2; fm = f1; } if (!fk || Math.abs(min2) < t2) { min2 = (min2 > 0 ? t2 : -t2); f1 = flin( min2, j,f1 ); } if (f1 <= fm) { xm = min2; fm = f1; } boolean gotoNext; do { if (dz) { x2 = (f0 < f1 ? -(min2) : 2*(min2)); f2 = flin(x2, j,f1); //I used f1 because I can't use f2... if (f2 <= fm) { xm = x2; fm = f2; } min1 = (x2*(f1-f0) - (min2)*(f2-f0))/((min2)*x2*((min2)-x2)); } d1 = (f1-f0)/(min2) - min2*min1; dz = true; if (min1 <= small) { x2 = (d1 < 0 ? h : -h); } else { x2 = - 0.5*d1/(min1); } if (Math.abs(x2) > h) x2 = (x2 > 0 ? h : -h); f2 = flin(x2, j,f1); //I used f1 because I can't use f2... gotoNext = false; while ((k < nits) && (f2 > f0)) { k++; if ((f0 < f1) && (min2*x2 > 0.0)) { gotoNext = true; break; } x2 *= 0.5; f2 = flin(x2, j,f2); } } while (gotoNext); nl++; if (f2 > fm) x2 = xm; else fm = f2; if (Math.abs(x2*(x2-min2)) > small) { min1 = (x2*(f1-f0) - min2*(fm-f0))/(min2*x2*(min2-x2)); } else { if (k > 0) min1 = 0; } if (min1 <= small) min1 = small; min2 = x2; fx = fm; if (sf1 < fx) { fx = sf1; min2 = sx1; } if (j != -1) { for (i=0; i < dim; i++) { x[i] += (min2)*v[i][j]; } checkBounds(x); } } // Look for a minimum along the curve q0, q1, q2 private void quadr() { int i; double l, s; s = fx; fx = qf1; qf1 = s; qd1 = 0.0; for (i=0; i < dim; i++) { s = x[i]; l = q1[i]; x[i] = l; q1[i] = s; qd1 = qd1 + (s-l)*(s-l); } s = 0.0; qd1 = Math.sqrt(qd1); l = qd1; if (qd0>0.0 && qd1>0.0 &&nl>=3*dim*dim) { min1 = s; min2 = l; min(-1, 2, qf1, true); s = min1; l = min2; qa = l*(l-qd1)/(qd0*(qd0+qd1)); qb = (l+qd0)*(qd1-l)/(qd0*qd1); qc = l*(l+qd0)/(qd1*(qd0+qd1)); } else { fx = qf1; qa = qb = 0.0; qc = 1.0; } qd0 = qd1; for (i=0; i x) x = y; } /* accumulation of right hand transformations */ for (i=n-1; i >= 0; i--) { if (g != 0.0) { h = ab[i][i+1]*g; for (j=l; j= 0; k--) { kt = 0; do { kt++; boolean skipNext = false; for (l2=k; l2>=0; l2--) { l = l2; if (Math.abs(e[l]) <= eps) { skipNext = true; break; } if (Math.abs(q[l-1]) <= eps) break; } if (skipNext == false) { c = 0.0; s = 1.0; for (i=l; i<=k; i++) { f = s * e[i]; e[i] *= c; if (Math.abs(f) <= eps) break; g = q[i]; if (Math.abs(f) < Math.abs(g)) { double fg = f/g; h = Math.abs(g)*Math.sqrt(1.0+fg*fg); } else { double gf = g/f; h = (f!=0.0 ? Math.abs(f)*Math.sqrt(1.0+gf*gf) : 0.0); } q[i] = h; if (h == 0.0) { h = 1.0; g = 1.0; } c = g/h; s = -f/h; } } z = q[k]; if (l == k) { converged = true; break; } /* shift from bottom 2x2 minor */ x = q[l]; y = q[k-l]; g = e[k-1]; h = e[k]; f = ((y-z)*(y+z) + (g-h)*(g+h)) / (2.0*h*y); g = Math.sqrt(f*f+1.0); if (f <= 0.0) f = ((x-z)*(x+z) + h*(y/(f-g)-h))/x; else f = ((x-z)*(x+z) + h*(y/(f+g)-h))/x; /* next qr transformation */ s = c = 1.0; for (i=l+1; i<=k; i++) { g = e[i]; y = q[i]; h = s*g; g *= c; if (Math.abs(f) < Math.abs(h)) { double fh = f/h; z = Math.abs(h) * Math.sqrt(1.0 + fh*fh); } else { double hf = h/f; z = (f!=0.0 ? Math.abs(f)*Math.sqrt(1.0+hf*hf) : 0.0); } e[i-1] = z; if (z == 0.0) f = z = 1.0; c = f/z; s = h/z; f = x*c + g*s; g = - x*s + g*c; h = y*s; y *= c; for (j=0; j0, accurate to 10 decimal places * * @param alpha argument * * @param function value */ public static double lnGamma(double alpha) { // Pike MC & Hill ID (1966) Algorithm 291: Logarithm of the gamma function. // Communications of the Association for Computing Machinery, 9:684 double x = alpha, f = 0.0, z; if (x < 7) { f = 1; z = x-1; while (++z < 7) { f *= z; } x = z; f = -Math.log(f); } z = 1/(x*x); return f + (x-0.5)*Math.log(x) - x + 0.918938533204673 + (((-0.000595238095238*z+0.000793650793651) * z-0.002777777777778)*z + 0.083333333333333)/x; } /** * Incomplete Gamma function Q(a,x) * (a cleanroom implementation of Numerical Recipes gammq(a,x); * in Mathematica this function is called GammaRegularized) * * @param a parameter * @param x argument * * @return function value */ public static double incompleteGammaQ(double a, double x) { return 1.0 - incompleteGamma(x, a, lnGamma(a)); } /** * Incomplete Gamma function P(a,x) = 1-Q(a,x) * (a cleanroom implementation of Numerical Recipes gammp(a,x); * in Mathematica this function is 1-GammaRegularized) * * @param a parameter * @param x argument * * @return function value */ public static double incompleteGammaP(double a, double x) { return incompleteGamma(x, a, lnGamma(a)); } /** * Incomplete Gamma function P(a,x) = 1-Q(a,x) * (a cleanroom implementation of Numerical Recipes gammp(a,x); * in Mathematica this function is 1-GammaRegularized) * * @param a parameter * @param x argument * @param double lnGammaA precomputed lnGamma(a) * * @return function value */ public static double incompleteGammaP(double a, double x, double lnGammaA) { return incompleteGamma(x, a, lnGammaA); } /** * Returns the incomplete gamma ratio I(x,alpha) where x is the upper * limit of the integration and alpha is the shape parameter. */ private static double incompleteGamma(double x, double alpha, double ln_gamma_alpha) { // (1) series expansion if (alpha>x || x<=1) // (2) continued fraction otherwise // RATNEST FORTRAN by // Bhattacharjee GP (1970) The incomplete gamma integral. Applied Statistics, // 19: 285-287 (AS32) int i; double p = alpha, g = ln_gamma_alpha; double accurate = 1e-8, overflow = 1e30; double factor, gin = 0, rn = 0, a = 0,b = 0,an = 0, dif = 0, term = 0; double pn0, pn1, pn2, pn3, pn4, pn5; if (x == 0.0) { return 0.0; } if ( x < 0.0 || p <= 0.0) { throw new IllegalArgumentException("Arguments out of bounds"); } factor = Math.exp(p*Math.log(x)-x-g); if (x > 1 && x >= p) { // continued fraction a = 1-p; b = a+x+1; term = 0; pn0 = 1; pn1 = x; pn2 = x+1; pn3 = x*b; gin = pn2/pn3; do { a++; b += 2; term++; an = a*term; pn4 = b*pn2-an*pn0; pn5 = b*pn3-an*pn1; if (pn5 != 0) { rn = pn4/pn5; dif = Math.abs(gin-rn); if (dif <= accurate) { if (dif <= accurate*rn) { break; } } gin=rn; } pn0 = pn2; pn1 = pn3; pn2 = pn4; pn3 = pn5; if (Math.abs(pn4) >= overflow) { pn0 /= overflow; pn1 /= overflow; pn2 /= overflow; pn3 /= overflow; } } while (true); gin = 1-factor*gin; } else { // series expansion gin = 1; term = 1; rn = p; do { rn++; term *= x/rn; gin += term; } while (term > accurate); gin *= factor/p; } return gin; } } pal-1.5.1/src/pal/math/GeneralizedDEOptimizer.java0000644000000000000000000000706207665733056020522 0ustar rootroot// GeneralizedDEOptimizer.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * Provides an general interface to the DifferentialEvolution class that is not * tied to a certain number of parameters (as DifferentialEvolution is). Works but * creating a new DiffentialEvolution engine when presented with a new number of * parameters. All the actual optimisation work is handled by DifferentialEvolution., * @author Matthew Goode * @version $Id: GeneralizedDEOptimizer.java,v 1.8 2003/05/30 08:51:10 matt Exp $ */ public class GeneralizedDEOptimizer extends MultivariateMinimum { private DifferentialEvolution optimiser_; private int currentNumberOfParameters_ = 0; /** * A value of <1 means use default for given number of parameters */ private int populationSize_ = -1; public GeneralizedDEOptimizer() { this(-1); } public GeneralizedDEOptimizer(int populationSize) { this.populationSize_ = populationSize; } /** * The actual optimization routine * It finds a minimum close to vector x when the * absolute tolerance for each parameter is specified. * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * @param tolfx absolute tolerance of function value * @param tolx absolute tolerance of each parameter */ public void optimize(MultivariateFunction f, double[] xvec, double tolfx, double tolx) { optimize(f,xvec,tolfx,tolx,null); } /** * The actual optimization routine * It finds a minimum close to vector x when the * absolute tolerance for each parameter is specified. * * @param f multivariate function * @param xvec initial guesses for the minimum * (contains the location of the minimum on return) * @param tolfx absolute tolerance of function value * @param tolx absolute tolerance of each parameter */ public void optimize(MultivariateFunction f, double[] xvec, double tolfx, double tolx, MinimiserMonitor monitor) { if(optimiser_==null||xvec.length!=currentNumberOfParameters_) { if(populationSize_>0) { optimiser_ = new DifferentialEvolution(xvec.length,populationSize_); } else { optimiser_ = new DifferentialEvolution(xvec.length); } this.currentNumberOfParameters_= xvec.length; } optimiser_.optimize(f,xvec,tolfx, tolx,monitor); } //============ Static Methods ==================== /** * Generate a MultivariateMinimum.Factory for an GeneralizedDEOptimiser with a set population size * @param populationSize The set population size */ public static final Factory generateFactory(int populationSize) { return new SearchFactory(populationSize); } /** * Generate a MultivariateMinimum.Factory for an GeneralizedDEOptimiser with a population size proportional to the size of the problem */ public static final Factory generateFactory() { return new SearchFactory(); } // ============ The Factory Class for Orthogonal Searches =================== private static final class SearchFactory implements Factory { private final int populationSize_; private SearchFactory() { this(-1); } private SearchFactory(int populationSize) { this.populationSize_ = populationSize; } public MultivariateMinimum generateNewMinimiser() { if(populationSize_>0) { return new GeneralizedDEOptimizer(populationSize_); } return new GeneralizedDEOptimizer(); } } } pal-1.5.1/src/pal/math/Binomial.java0000644000000000000000000000273507323721302015667 0ustar rootroot// Binomial.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * Binomial coefficients * * @version $Id: Binomial.java,v 1.6 2001/07/13 14:39:13 korbinian Exp $ * * @author Korbinian Strimmer */ public class Binomial implements java.io.Serializable { // // Public stuff // /** * Binomial coefficient n choose k */ public double choose(double n, double k) { n = Math.floor(n + 0.5); k = Math.floor(k + 0.5); double lchoose = GammaFunction.lnGamma(n + 1.0) - GammaFunction.lnGamma(k + 1.0) - GammaFunction.lnGamma(n - k + 1.0); return Math.floor(Math.exp(lchoose) + 0.5); } /** * get (precomputed) n choose 2 */ public double getNChoose2(int n) { return nChoose2[n]; } /** * set capacity and precompute the n choose 2 values */ public void setMax(int max) { if (nChoose2 == null) { precalculate(max); } else if (max >= nChoose2.length) { precalculate(Math.max(nChoose2.length * 2, max)); } } // // private stuff // private double[] nChoose2 = null; /** * pre-calculates n choose 2 up to a given number of lineages, if * not already pre-calculated. */ private void precalculate(int n) { nChoose2 = new double[n+1]; for (int i=0; i < (n+1); i++) { nChoose2[i] = ((double) (i*(i-1))) * 0.5; } } } pal-1.5.1/src/pal/math/makefile0000644000000000000000000000123707275407432015001 0ustar rootroot### VARIABLES ### JIKESOPTS := +P GCJOPTS := # Always check dependencies JIKESOPTS += +M SRC := $(wildcard *.java) CLS := $(patsubst %.java, %.class, $(SRC)) OBJ := $(patsubst %.class, %.o, $(wildcard *.class)) ### RULES ### # Compile Java sources into class files %.class: %.java jikes $(JIKESOPTS) $< # Alternative to using jikes: gcj -C # Compile class files into native code %.o: %.class gcj -c $(GCJOPTS) $< ### TARGETS ### .PHONY: classes native clean classes: $(CLS) ifneq ($(OBJ),) native: $(OBJ) (ar -rv package.a *.o; ranlib package.a) else native: $(OBJ) endif clean: rm -f *.class *.o *.u *.a *~ pal-1.5.1/src/pal/math/MathUtils.java0000644000000000000000000001123010104436566016043 0ustar rootroot// MathUtils.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * Handy utility functions which have some Mathematical relavance. * * @author Matthew Goode * @author Alexei Drummond * * @version $Id: MathUtils.java,v 1.10 2004/08/05 03:00:22 matt Exp $ */ public class MathUtils { public MathUtils() {} /** * Calculate the logged difference in two values = x - y, where the log of x and y are given. * That is, given ln(x), and ln(y) calculate ln(x-y) * @param lnOfX the natural log of X * @param lnOfY the natural log of Y * @return X - Y (logged) */ public static final double getLoggedDifferenceFromLogged(double lnOfX, double lnOfY) { return lnOfY+Math.log(Math.exp(lnOfX-lnOfY)-1); } /** * Ensure a value of x is actaully bounded between two values. Useful when using numerical analysis tools that may, over the limits of the expressed accuracy, return values outside the specified range * @param x the value of interest * @param lower the lower bound * @param upper the upper bound * @return x if between the bounds, or lower bound if lower than lower bound, or upper bound if higher than upper */ public final static double ensureBounded(double x, double lower, double upper){ if(xupper) { return upper; } return x; } /** * A random number generator that is initialized with the clock when this * class is loaded into the JVM. Use this for all random numbers. * @note This method or getting random numbers in not thread-safe. Since * MersenneTwisterFast is currently (as of 9/01) not synchronized using * this function may cause concurrency issues. Use the static get methods of the * MersenneTwisterFast class for access to a single instance of the class, that * has synchronization. */ public static MersenneTwisterFast random = new MersenneTwisterFast(); /** * @return a new double array where all the values sum to 1. * Relative ratios are preserved. */ public static final double[] getNormalized(double[] array) { double[] newArray = new double[array.length]; double total = getTotal(array); for(int i = 0 ; i < array.length ; i++) { newArray[i] = array[i]/total; } return newArray; } /** * @param end the index of the element after the last one to be included * @return the total of a the values in a range of an array */ public static final double getTotal(double[] array, int start, int end) { double total = 0.0; for(int i = start ; i < array.length ; i++) { total+=array[i]; } return total; } /** * @param array * @param start * @param end the index of the element after the last one to be included * @return the minimum of a the values in a range of an array */ public static final double getMinimum(double[] array, int start, int end) { double minimum = array[start]; for(int i = start+1 ; i < array.length ; i++) { double v = array[i]; if(vmaximum) { maximum = v; } } return maximum; } /** * @return the total of the values in an array */ public static final double getTotal(double[] array) { return getTotal(array,0, array.length); } /** * @return a set of valid, but randomly generated, arguments for a particular MultivariateFunction */ public static final double[] getRandomArguments(MultivariateFunction mf) { double[] values = new double[mf.getNumArguments()]; for(int i = 0; i < values.length ; i++) { double min = mf.getLowerBound(i); double max = mf.getUpperBound(i); values[i] = (max-min)*MersenneTwisterFast.getNextDouble()+min; } return values; } } pal-1.5.1/src/pal/math/MinimiserMonitor.java0000644000000000000000000001174307730622130017441 0ustar rootroot// MultivariateMonitor.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; import java.io.*; /** * interface for a classes that wish to monitor the progress of a Minimiser * * @author Matthew Goode */ public interface MinimiserMonitor { /** * Inform monitor of current progress (as a number between 0 and 1), or -1 to reset */ public void updateProgress(double progress); /** * Inform monitor of a new minimum, along with the current arguments. Monitors should NOT * change the supplied array of parameterValues! * This should be called in the same thread as the minimisation so that beingOptimized may be accessed * within this call with out worry of conflicting with the optimisation process! */ public void newMinimum(double value, double[] parameterValues, MultivariateFunction beingOptimized); //===================================================================== //===================================================================== public static class Utils { /** * Creates a MinimiserMonitor that outputs current minimum to a print stream */ public static final MinimiserMonitor createSimpleMonitor(PrintWriter output) { return new Simple(output); } /** * @create a monitor such that all information sent to monitor is based on two sub monitors */ public static final MinimiserMonitor createSplitMonitor(MinimiserMonitor a, MinimiserMonitor b) { return new Split(a,b); } /** * Creates a MinimiserMonitor that outputs current minimum to a System.out */ public static final MinimiserMonitor createSystemOuptutMonitor() { return SystemOutput.INSTANCE; } /** * Creates a MinimiserMonitor that outputs current minimum to a System.err */ public static final MinimiserMonitor createSystemErrorMonitor() { return SystemError.INSTANCE; } /** * Creates a MinimiserMonitor that Stores output (use toString() to access current results) */ public static final MinimiserMonitor createStringMonitor() { return new StringMonitor(); } /** * Creates a MinimiserMonitor that looses all output */ public static final MinimiserMonitor createNullMonitor() { return NullMonitor.INSTANCE; } //============================================================= private static final class StringMonitor implements MinimiserMonitor { private final StringWriter sw_; private final PrintWriter pw_; public StringMonitor() { this.sw_ = new StringWriter(); this.pw_ = new PrintWriter(sw_,true); } public void updateProgress(double progress) { pw_.println("Update Progress:"+progress); } public void newMinimum(double value, double[] parameterValues, MultivariateFunction beingOptimized) { pw_.println("New Minimum:"+value); } public String toString() { return sw_.toString(); } } //============================================================= private static final class NullMonitor implements MinimiserMonitor { public static final MinimiserMonitor INSTANCE = new NullMonitor(); public NullMonitor() { } public void updateProgress(double progress) {} public void newMinimum(double value, double[] parameterValues, MultivariateFunction beingOptimized) {} public String toString() { return "Null Monitor"; } } private static class Split implements MinimiserMonitor { private final MinimiserMonitor a_; private final MinimiserMonitor b_; public Split(MinimiserMonitor a, MinimiserMonitor b) { this.a_ = a; this.b_ = b; } public void updateProgress(double progress) { a_.updateProgress(progress); b_.updateProgress(progress); } public void newMinimum(double value, double[] parameterValues, MultivariateFunction mf) { a_.newMinimum(value,parameterValues,mf); b_.newMinimum(value,parameterValues,mf); } } private static class Simple implements MinimiserMonitor { PrintWriter output_; Simple(PrintWriter output) { this.output_ = output; } public void updateProgress(double progress) { } public void newMinimum(double value, double[] parameterValues, MultivariateFunction mf) { output_.println("New Minimum:"+value); } } private static class SystemOutput implements MinimiserMonitor { static final SystemOutput INSTANCE = new SystemOutput(); public void updateProgress(double progress) { } public void newMinimum(double value, double[] parameterValues, MultivariateFunction mf) { System.out.println("New Minimum:"+value); } } private static class SystemError implements MinimiserMonitor { static final SystemError INSTANCE = new SystemError(); public void updateProgress(double progress) { } public void newMinimum(double value, double[] parameterValues, MultivariateFunction mf) { System.err.println("New Minimum:"+value); } } } }pal-1.5.1/src/pal/math/OrthogonalHints.java0000644000000000000000000002005607460517334017265 0ustar rootroot// OrthogonalHints.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * Provides a means for giving an Orthogonal base optimiser (IE, OrthognalMinimum) * hints about the function that may alow it to optimise better. * * @version $Id: OrthogonalHints.java,v 1.2 2002/04/21 00:13:31 matt Exp $ * * @author Matthew Goode */ public interface OrthogonalHints { /** * If there is a "best" ordering to use it can be specified here, * if not should return null * @param defaultOrdering The ordering suggested by the optimiser, may be null! * @return null, or default ordering if no known best ordering */ public OrderEnumerator getSuggestedOrdering(OrderEnumerator defaultOrdering); /** * A boundary is a value of a parameter for which values lower than the boundary and values * higher than the boundary are better treated as two separate functions (IE, they * are only piecewise connected), and minimisation should be performed over both ranges * individually (and then the true minimum taken as the minimuma of the ranges) * @return the number of boundary locations stored in storage, or -1 if not enough * room, or 0 if there are no boundaries (other than the normal parameter range) */ public int getInternalParameterBoundaries(int parameter, double[] storage); //===================================================================================================== //================================= Utilities, and hidden classes ===================================== //===================================================================================================== public static class Utils { /** * @return a new OrthogonalHints object base on toAdjust that works with parameters from adjustmentFactor + what toAdjust worked with * That is if the value x is the parameter will be passed toAdjust as x-adjustmentFactor, and * the suggested OrderEnumerator adjusts input x by adding adjustment factor before returning to * the sub toAdjust Enumerator (if you know what I mean) */ public final static OrthogonalHints getAdjusted(OrthogonalHints toAdjust, int adjustmentFactor) { return new Adjusted(toAdjust,adjustmentFactor); } /** * @return a new OrthogonalHints object that combines two sub OrthogonalHints objects so that * all parameter information between 0 upto (but not including) numberOfFirstParameters is * passed to first, and everything else is passed to second * @note automatically adjusts second so assumes both first and second handle parameters in * range 0..whatever (do not do preadjusment on second!) */ public final static OrthogonalHints getCombined(OrthogonalHints first, int numberOfFirstParameters, OrthogonalHints second, int numberOfSecondParameters) { return new Combined(first,numberOfFirstParameters,second,numberOfSecondParameters); } public final static double[] getInternalParameterBoundaries(OrthogonalHints base, int parameter) { double[] store = new double[100]; int numberReturned = base.getInternalParameterBoundaries(parameter,store); while(numberReturned<0) { store = new double[store.length+10]; numberReturned = base.getInternalParameterBoundaries(parameter,store); } double[] result = new double[numberReturned]; System.arraycopy(store,0,result,0,numberReturned); return result; } /** * @return an OrthogonalHints object that doesn't provide any hints */ public final static OrthogonalHints getNull() { return Null.INSTANCE; } // ======================================================================= /** * Implements a means for adjusting an orthogonal hints (that is introduce a simple * mapping between given parameter indexes and used parameter indexes) */ private final static class Adjusted implements OrthogonalHints { OrthogonalHints toAdjust_; int adjustmentFactor_; public Adjusted(OrthogonalHints toAdjust, int adjustmentFactor) { this.toAdjust_ = toAdjust; this.adjustmentFactor_ = adjustmentFactor; } public OrderEnumerator getSuggestedOrdering(OrderEnumerator defaultOrdering) { OrderEnumerator sub = toAdjust_.getSuggestedOrdering(defaultOrdering); if(sub==null||sub==defaultOrdering) { return defaultOrdering; } return OrderEnumerator.Utils.getAdjusted(sub,adjustmentFactor_); } public int getInternalParameterBoundaries(int parameter, double[] storage) { return toAdjust_.getInternalParameterBoundaries(parameter-adjustmentFactor_,storage); } } //End of Adjusted // ======================================================================= /** * An OrthogonalHints object that provides no hints! */ private final static class Null implements OrthogonalHints { public static final Null INSTANCE = new Null(); public Null() { } public OrderEnumerator getSuggestedOrdering(OrderEnumerator defaultOrdering) { return defaultOrdering; } public int getInternalParameterBoundaries(int parameter, double[] storage) { return 0; } } //End of Null // ======================================================================= /** * Implements a means for combining two OrthogonalHints objects */ private final static class Combined implements OrthogonalHints { OrthogonalHints hintsOne_,hintsTwo_; int hintOneParameterCount_, hintTwoParameterCount_ ; /** * @param hintOneParameterCount The number of parameters handled by hintsOne * @param hintTwoParameterCount The number of parameters handled by hintsTwo * */ public Combined(OrthogonalHints hintsOne, int hintOneParameterCount, OrthogonalHints hintsTwo, int hintTwoParameterCount) { this.hintsOne_ = hintsOne; this.hintOneParameterCount_ = hintOneParameterCount; this.hintTwoParameterCount_ = hintTwoParameterCount; this.hintsTwo_ = hintsTwo; } /** * if no suggested ordering from either sub hints returns null, if only * one hint has suggested ordering, creates an ordering where those parameters belonging to * the respecitive hint are given by the given ordering and the remaining ordering information * is shuffled. */ public OrderEnumerator getSuggestedOrdering(OrderEnumerator defaultOrdering) { OrderEnumerator oe1 = hintsOne_.getSuggestedOrdering(null); OrderEnumerator oe2 = hintsTwo_.getSuggestedOrdering(null); if(oe1==null&&oe2==null) { return defaultOrdering; } if(oe1==null&&oe2!=null) { if(defaultOrdering!=null) { return OrderEnumerator.Utils.getAlternating( OrderEnumerator.Utils.getRestricted(defaultOrdering,0,hintOneParameterCount_), OrderEnumerator.Utils.getAdjusted(oe2,hintOneParameterCount_) ); } return OrderEnumerator.Utils.getAlternating( OrderEnumerator.Utils.getShuffled(hintOneParameterCount_), OrderEnumerator.Utils.getAdjusted(oe2,hintOneParameterCount_) ); } if(oe2==null) { if(defaultOrdering!=null) { return OrderEnumerator.Utils.getAlternating( oe1, OrderEnumerator.Utils.getRestricted( defaultOrdering, hintOneParameterCount_,hintOneParameterCount_+hintTwoParameterCount_ ) ); } return OrderEnumerator.Utils.getAlternating( oe1, OrderEnumerator.Utils.getAdjusted( OrderEnumerator.Utils.getShuffled(hintTwoParameterCount_), hintOneParameterCount_ ) ); } return OrderEnumerator.Utils.getAlternating(oe1,OrderEnumerator.Utils.getAdjusted(oe2,hintOneParameterCount_)); } /** */ public int getInternalParameterBoundaries(int parameter, double[] storage) { if(parameteralgorithm: Brent's golden section method * (Richard P. Brent. 1973. Algorithms for finding zeros and extrema * of functions without calculating derivatives. Prentice-Hall.) * * @version $Id: UnivariateMinimum.java,v 1.9 2003/05/14 05:53:36 matt Exp $ * * @author Korbinian Strimmer */ public class UnivariateMinimum { // // Public stuff // /** last minimum */ public double minx; /** function value at minimum */ public double fminx; /** curvature at minimum */ public double f2minx; /** total number of function evaluations neccessary */ public int numFun; /** * maximum number of function evaluations * (default 0 indicates no limit on calls) */ public int maxFun = 0; /** * Find minimum * (first estimate given) * * @param x first estimate * @param f function * * @return position of minimum */ public double findMinimum(double x, UnivariateFunction f) { double tol = MachineAccuracy.EPSILON; return optimize(x, f, tol); } /** * Find minimum * (first estimate given, desired number of fractional digits specified) * * @param x first estimate * @param f function * @param fracDigits desired fractional digits * * @return position of minimum */ public double findMinimum(double x, UnivariateFunction f, int fracDigits) { double tol = Math.pow(10, -1-fracDigits); double optx = optimize(x, f, tol); //return trim(optx, fracDigits); return optx; } /** * Find minimum * (no first estimate given) * * @param f function * * @return position of minimum */ public double findMinimum(UnivariateFunction f) { double tol = MachineAccuracy.EPSILON; return optimize(f, tol); } /** * Find minimum * (no first estimate given, desired number of fractional digits specified) * * @param f function * @param fracDigits desired fractional digits * * @return position of minimum */ public double findMinimum(UnivariateFunction f, int fracDigits) { double tol = Math.pow(10, -1-fracDigits); double optx = optimize(f, tol); //return trim(optx, fracDigits); return optx; } /** * The actual optimization routine (Brent's golden section method) * * @param f univariate function * @param tol absolute tolerance of each parameter * @param lowerBound the lower bound of input * @param upperBound the upper bound of input * * @return position of minimum */ public double optimize(UnivariateFunction f, double tol, double lowerBound, double upperBound) { numFun = 2; return minin(lowerBound, upperBound, f.evaluate(lowerBound), f.evaluate(upperBound), f, tol); } /** * The actual optimization routine (Brent's golden section method) * * @param f univariate function * @param tol absolute tolerance of each parameter * * @return position of minimum */ public double optimize(UnivariateFunction f, double tol) { return optimize(f,tol,f.getLowerBound(), f.getUpperBound()); } /** * The actual optimization routine (Brent's golden section method) * * @param x initial guess * @param f univariate function * @param tol absolute tolerance of each parameter * @param lowerBound the lower bound of input * @param upperBound the upper bound of input * * @return position of minimum */ public double optimize(double x, UnivariateFunction f, double tol, double lowerBound, double upperBound) { double[] range = bracketize(lowerBound, x, upperBound, f); return minin(range[0], range[1], range[2], range[3], f, tol); } /** * The actual optimization routine (Brent's golden section method) * * @param x initial guess * @param f univariate function * @param tol absolute tolerance of each parameter * @note bounded by the given bounds of the function f * * @return position of minimum */ public double optimize(double x, UnivariateFunction f, double tol) { return optimize(x,f,tol,f.getLowerBound(),f.getUpperBound()); } // // Private stuff // private static final double C = (3.0- Math.sqrt(5.0))/2.0; // = 0.38197 private static final double GOLD = (Math.sqrt(5.0) + 1.0)/2.0; // = 1.61803 private static final double delta = 0.01; // Determines second trial point // trim x to have a specified number of fractional digits private double trim(double x, int fracDigits) { double m = Math.pow(10, fracDigits); return Math.round(x*m)/m; } private double constrain(double x, boolean toMax, double min, double max) { if (toMax) { if (x > max) { return max; } else { return x; } } else { if (x < min) { return min; } else { return x; } } } private double[] bracketize(double min, double a, double max, UnivariateFunction f) { if (min > max) { throw new IllegalArgumentException("Argument min (" + min + ") larger than argument max (" + max + ")"); } if (a < min) { a = min; } else if (a > max) { a = max; } if (a < min || a > max) { throw new IllegalArgumentException("Starting point not in given range (" + min + ", " + a + ", " + max + ")"); } // Get second point double b; if (a - min < max - a) { b = a + delta*(max - a); } else { b = a - delta*(a - min); } numFun = 0; double fa = f.evaluate(a); numFun++; double fb = f.evaluate(b); numFun++; double tmp; if (fb > fa) { tmp = a; a = b; b = tmp; tmp = fa; fa = fb; fb = tmp; } // From here on we always have fa >= fb // Our aims is to determine a new point c with fc >= fb // Direction of search (towards min or towards max) boolean searchToMax; double ulim; if (b > a) { searchToMax = true; ulim = max; } else { searchToMax = false; ulim = min; } // First guess: default magnification double c = b + GOLD * (b - a); c = constrain(c, searchToMax, min, max); double fc = f.evaluate(c); numFun++; while (fb > fc) { // Compute u as minimum of a parabola through a, b, c double r = (b - a) * (fb - fc); double q = (b - c) * (fb - fa); if (q == r) { q += MachineAccuracy.EPSILON; } double u = b - ((b - c) * q - (b - a) * r) / 2.0 / (q - r); u = constrain(u, searchToMax, min, max); double fu = 0; // Dont evaluate now boolean magnify = false; // Check out all possibilities // u is between b and c if ((b - u) * (u - c) > 0) { fu = f.evaluate(u); numFun++; // minimum between b and c if (fu < fc) { a = b; b = u; fa = fb; fb = fu; break; } // minimum between a and u else if (fu > fb) { c = u; fc = fu; break; } magnify = true; } // u is between c and limit else if ((c - u) * (u - ulim) > 0) { fu = f.evaluate(u); numFun++; // u is not a minimum if (fu < fc) { b = c; c = u; fb = fc; fc = fu; magnify = true; } } // u equals limit else if (u == ulim) { fu = f.evaluate(u); numFun++; } // All other cases else { magnify = true; } if (magnify) { // Next guess: default magnification u = c + GOLD * (c - b); u = constrain(u, searchToMax, min, max); fu = f.evaluate(u); numFun++; } a = b; b = c; c = u; fa = fb; fb = fc; fc = fu; } // Once we are here be have a minimum in [a, c] double[] result = new double[4]; result[0] = a; result[1] = c; result[2] = fa; result[3] = fc; return result; } private double minin(double a, double b, double fa , double fb, UnivariateFunction f, double tol) { double z, d = 0, e, m, p, q, r, t, u, v, w, fu, fv, fw, fz, tmp; if (tol <= 0) { throw new IllegalArgumentException("Nonpositive absolute tolerance tol"); } if (a == b) { minx = a; fminx = fa; f2minx = NumericalDerivative.secondDerivative(f, minx); return minx; //throw new IllegalArgumentException("Borders of range not distinct"); } if (b < a) { tmp = a; a = b; b = tmp; tmp = fa; fa = fb; fb = tmp; } w = a; fw = fa; z = b; fz = fb; if (fz > fw) // Exchange z and w { v = z; z = w; w = v; v = fz; fz = fw; fw = v; } v = w; fv = fw; e = 0.0; while (maxFun == 0 || numFun <= maxFun) { m = (a + b)*0.5; double tol_act = MachineAccuracy.SQRT_EPSILON + tol; // Absolute tolerance //double tol_act = MachineAccuracy.SQRT_EPSILON*Math.abs(z) + tol/3; // Actual tolerance double tol_act2 = 2.0*tol_act; if (Math.abs(z-m) <= tol_act2-(b - a)*0.5) { break; } p = q = r = 0.0; if (Math.abs(e) > tol_act) { r = (z-w)*(fz-fv); q = (z-v)*(fz-fw); p = (z-v)*q-(z-w)*r; q = (q-r)*2.0; if (q > 0.0) { p = -p; } else { q = -q; } r = e; e = d; } if (Math.abs(p) < Math.abs(q*r*0.5) && p > (a-z)*q && p < (b-z)*q) { d = p/q; u = z+d; if (u-(a) < tol_act2 || (b)-u < tol_act2) { d = ((z < m) ? tol_act : -tol_act); } } else { e = ((z < m) ? b : a) - z; d = C*e; } u = z + ((Math.abs(d) >= tol_act) ? d : ((d > 0.0) ? tol_act : -tol_act)); fu = f.evaluate(u); numFun++; if (fu <= fz) { if (u < z) { b = z; } else { a = z; } v = w; fv = fw; w = z; fw = fz; z = u; fz = fu; } else { if (u < z) { a = u; } else { b = u; } if (fu <= fw) { v = w; fv = fw; w = u; fw = fu; } else if (fu <= fv || v == w) { v = u; fv = fu; } } } minx = z; fminx = fz; f2minx = NumericalDerivative.secondDerivative(f, minx); return z; } } pal-1.5.1/src/pal/math/NumericalDerivative.java0000644000000000000000000000610107323721302020066 0ustar rootroot// NumericalDerivative.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) // Known bugs and limitations: // - the sparse number of function evaluations used can potentially // lead to strong inaccuracies if the function is ill-behaved package pal.math; /** * approximates numerically the first and second derivatives of a * function of a single variable and approximates gradient and * diagonal of Hessian for multivariate functions * * @author Korbinian Strimmer */ public class NumericalDerivative { // // Public stuff // /** * determine first derivative * * @param f univariate function * @param x argument * * @return first derivate at x */ public static double firstDerivative(UnivariateFunction f, double x) { double h = MachineAccuracy.SQRT_EPSILON*(Math.abs(x) + 1.0); // Centered first derivative return (f.evaluate(x + h) - f.evaluate(x - h))/(2.0*h); } /** * determine second derivative * * @param f univariate function * @param x argument * * @return second derivate at x */ public static double secondDerivative(UnivariateFunction f, double x) { double h = MachineAccuracy.SQRT_SQRT_EPSILON*(Math.abs(x) + 1.0); // Centered second derivative return (f.evaluate(x + h) - 2.0*f.evaluate(x) + f.evaluate(x - h))/(h*h); } /** * determine gradient * * @param f multivariate function * @param x argument vector * * @return gradient at x */ public static double[] gradient(MultivariateFunction f, double[] x) { double[] result = new double[x.length]; gradient(f, x, result); return result; } /** * determine gradient * * @param f multivariate function * @param x argument vector * @param grad vector for gradient */ public static void gradient(MultivariateFunction f, double[] x, double[] grad) { for (int i = 0; i < f.getNumArguments(); i++) { double h = MachineAccuracy.SQRT_EPSILON*(Math.abs(x[i]) + 1.0); double oldx = x[i]; x[i] = oldx + h; double fxplus = f.evaluate(x); x[i] = oldx - h; double fxminus = f.evaluate(x); x[i] = oldx; // Centered first derivative grad[i] = (fxplus-fxminus)/(2.0*h); } } /** * determine diagonal of Hessian * * @param f multivariate function * @param x argument vector * * @return vector with diagonal entries of Hessian */ public static double[] diagonalHessian(MultivariateFunction f, double[] x) { int len = f.getNumArguments(); double[] result = new double[len]; for (int i = 0; i < len; i++) { double h = MachineAccuracy.SQRT_SQRT_EPSILON*(Math.abs(x[i]) + 1.0); double oldx = x[i]; x[i] = oldx + h; double fxplus = f.evaluate(x); x[i] = oldx - h; double fxminus = f.evaluate(x); x[i] = oldx; double fx = f.evaluate(x); // Centered second derivative result[i] = (fxplus - 2.0*fx + fxminus)/(h*h); } return result; } } pal-1.5.1/src/pal/math/MersenneTwisterFast.java0000644000000000000000000006036607753713216020131 0ustar rootroot// MersenneTwisterFast.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) // Original file MersenneTwisterFast.java // (c) 1999-2000 by Michael Lecuyer and Sean Luke // see also http://www.cs.umd.edu/users/seanl/ package pal.math; import java.io.*; import java.util.*; /** * MersenneTwisterFast: * * A simulation quality fast random number generator (MT19937) * with the same public methods as java.util.Random. *

About the Mersenne Twister. * This is a Java version of the C-program for MT19937: Integer version. * next(32) generates one pseudorandom unsigned integer (32bit) * which is uniformly distributed among 0 to 2^32-1 for each * call. next(int bits) >>>'s by (32-bits) to get a value ranging * between 0 and 2^bits-1 long inclusive; hope that's correct. * setSeed(seed) set initial values to the working area * of 624 words. For setSeed(seed), seed is any 32-bit integer * except for 0. *

* Reference. * M. Matsumoto and T. Nishimura, * "Mersenne Twister: A 623-Dimensionally Equidistributed Uniform * Pseudo-Random Number Generator", * ACM Transactions on Modeling and Computer Simulation, * Vol. 8, No. 1, January 1998, pp 3--30. * *

Bug Fixes. This implementation implements the bug fixes made * in Java 1.2's version of Random, which means it can be used with * earlier versions of Java. See * * the JDK 1.2 java.util.Random documentation for further documentation * on the random-number generation contracts made. Additionally, there's * an undocumented bug in the JDK java.util.Random.nextBytes() method, * which this code fixes. * *

Important Note. Just like java.util.Random, this * generator accepts a long seed but doesn't use all of it. java.util.Random * uses 48 bits. The Mersenne Twister instead uses 32 bits (int size). * So it's best if your seed does not exceed the int range. * *

Sean Luke's web page * *

* - added shuffling method (Alexei Drummond) */ public class MersenneTwisterFast implements Serializable { // Period parameters private static final int N = 624; private static final int M = 397; private static final int MATRIX_A = 0x9908b0df; // private static final * constant vector a private static final int UPPER_MASK = 0x80000000; // most significant w-r bits private static final int LOWER_MASK = 0x7fffffff; // least significant r bits // Tempering parameters private static final int TEMPERING_MASK_B = 0x9d2c5680; private static final int TEMPERING_MASK_C = 0xefc60000; // #define TEMPERING_SHIFT_U(y) (y >>> 11) // #define TEMPERING_SHIFT_S(y) (y << 7) // #define TEMPERING_SHIFT_T(y) (y << 15) // #define TEMPERING_SHIFT_L(y) (y >>> 18) private int mt[]; // the array for the state vector private int mti; // mti==N+1 means mt[N] is not initialized private int mag01[]; // a good initial seed (of int size, though stored in a long) private static final long GOOD_SEED = 4357; private double nextNextGaussian; private boolean haveNextNextGaussian; // The following can be accessed externally by the static accessor methods which // inforce synchronization private static final MersenneTwisterFast DEFAULT_INSTANCE = new MersenneTwisterFast(); // Added to curernt time in default constructor, and then adjust to allow for programs that construct // multiple MersenneTwisterFast in a short amount of time. private static long seedAdditive_ = 0; /** * Constructor using the time of day as default seed. */ public MersenneTwisterFast() { this(System.currentTimeMillis()+seedAdditive_); seedAdditive_+=nextInt(); } /** * Constructor using a given seed. Though you pass this seed in * as a long, it's best to make sure it's actually an integer. * * @param seed generator starting number, often the time of day. */ public MersenneTwisterFast(long seed) { if (seed == 0) { setSeed(GOOD_SEED); } else { setSeed(seed); } } /** * Shuffles an array. * @param array The array of ints to shuffle */ public final void shuffle(int[] array) { int l = array.length; for (int i = 0; i < l; i++) { int index = nextInt(l-i) + i; int temp = array[index]; array[index] = array[i]; array[i] = temp; } } /** * Shuffles an array of objects. * @param array The array of objects to shuffle * @param startIndex the starting index of the portion of the array to shuffle * @param length the length of the portion of the array to shuffle */ public final void shuffleSubset(int startIndex, int length, Object[] array) { for (int i = 0; i < length; i++) { final int index = nextInt(length-i) + i; final int first = startIndex+index; final int second = startIndex+i; final Object temp = array[first]; array[first] = array[second]; array[second] = temp; } } /** * Shuffles an array of objects. * @param array The array of objects to shuffle */ public final void shuffle( Object[] array) { shuffleSubset(0,array.length, array); } /** * Shuffles an array by repeatedly choosing two random members and swapping them. * @param numberOfShuffles The number of times to do the random swap operation * @param array The array of ints to shuffle */ public final void shuffle(int[] array, int numberOfShuffles) { int i, j, temp, l = array.length; for (int shuffle = 0; shuffle < numberOfShuffles; shuffle++) { do { i = nextInt(l); j = nextInt(l); } while(i!=j); temp = array[j]; array[j] = array[i]; array[i] = temp; } } /** * Generates an array of ints that are shuffled * @param l length of the array required. * @return an array of shuffled indices of the specified length. */ public int[] shuffled(int l) { int[] array = new int[l]; // initialize array for (int i = 0; i < l; i++) { array[i] = i; } shuffle(array); return array; } /** * Initalize the pseudo random number generator. * The Mersenne Twister only uses an integer for its seed; * It's best that you don't pass in a long that's bigger * than an int. * * @param seed from constructor * */ public final void setSeed(long seed) { haveNextNextGaussian = false; mt = new int[N]; // setting initial seeds to mt[N] using // the generator Line 25 of Table 1 in // [KNUTH 1981, The Art of Computer Programming // Vol. 2 (2nd Ed.), pp102] // the 0xffffffff is commented out because in Java // ints are always 32 bits; hence i & 0xffffffff == i mt[0]= ((int)seed); // & 0xffffffff; for (mti = 1; mti < N; mti++) mt[mti] = (69069 * mt[mti-1]); //& 0xffffffff; // mag01[x] = x * MATRIX_A for x=0,1 mag01 = new int[2]; mag01[0] = 0x0; mag01[1] = MATRIX_A; } public final int nextInt() { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return y; } public final short nextShort() { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return (short)(y >>> 16); } public final char nextChar() { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return (char)(y >>> 16); } public final boolean nextBoolean() { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return (boolean)((y >>> 31) != 0); } public final byte nextByte() { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return (byte)(y >>> 24); } public final void nextBytes(byte[] bytes) { int y; for (int x=0;x= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) bytes[x] = (byte)(y >>> 24); } } public final long nextLong() { int y; int z; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { z = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (z >>> 1) ^ mag01[z & 0x1]; } for (; kk < N-1; kk++) { z = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (z >>> 1) ^ mag01[z & 0x1]; } z = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (z >>> 1) ^ mag01[z & 0x1]; mti = 0; } z = mt[mti++]; z ^= z >>> 11; // TEMPERING_SHIFT_U(z) z ^= (z << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(z) z ^= (z << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(z) z ^= (z >>> 18); // TEMPERING_SHIFT_L(z) return (((long)y) << 32) + (long)z; } public final double nextDouble() { int y; int z; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { z = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (z >>> 1) ^ mag01[z & 0x1]; } for (; kk < N-1; kk++) { z = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (z >>> 1) ^ mag01[z & 0x1]; } z = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (z >>> 1) ^ mag01[z & 0x1]; mti = 0; } z = mt[mti++]; z ^= z >>> 11; // TEMPERING_SHIFT_U(z) z ^= (z << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(z) z ^= (z << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(z) z ^= (z >>> 18); // TEMPERING_SHIFT_L(z) /* derived from nextDouble documentation in jdk 1.2 docs, see top */ return ((((long)(y >>> 6)) << 27) + (z >>> 5)) / (double)(1L << 53); } public final double nextGaussian() { if (haveNextNextGaussian) { haveNextNextGaussian = false; return nextNextGaussian; } else { double v1, v2, s; do { int y; int z; int a; int b; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { z = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (z >>> 1) ^ mag01[z & 0x1]; } for (; kk < N-1; kk++) { z = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (z >>> 1) ^ mag01[z & 0x1]; } z = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (z >>> 1) ^ mag01[z & 0x1]; mti = 0; } z = mt[mti++]; z ^= z >>> 11; // TEMPERING_SHIFT_U(z) z ^= (z << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(z) z ^= (z << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(z) z ^= (z >>> 18); // TEMPERING_SHIFT_L(z) if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { a = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (a >>> 1) ^ mag01[a & 0x1]; } for (; kk < N-1; kk++) { a = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (a >>> 1) ^ mag01[a & 0x1]; } a = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (a >>> 1) ^ mag01[a & 0x1]; mti = 0; } a = mt[mti++]; a ^= a >>> 11; // TEMPERING_SHIFT_U(a) a ^= (a << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(a) a ^= (a << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(a) a ^= (a >>> 18); // TEMPERING_SHIFT_L(a) if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { b = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (b >>> 1) ^ mag01[b & 0x1]; } for (; kk < N-1; kk++) { b = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (b >>> 1) ^ mag01[b & 0x1]; } b = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (b >>> 1) ^ mag01[b & 0x1]; mti = 0; } b = mt[mti++]; b ^= b >>> 11; // TEMPERING_SHIFT_U(b) b ^= (b << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(b) b ^= (b << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(b) b ^= (b >>> 18); // TEMPERING_SHIFT_L(b) /* derived from nextDouble documentation in jdk 1.2 docs, see top */ v1 = 2 * (((((long)(y >>> 6)) << 27) + (z >>> 5)) / (double)(1L << 53)) - 1; v2 = 2 * (((((long)(a >>> 6)) << 27) + (b >>> 5)) / (double)(1L << 53)) - 1; s = v1 * v1 + v2 * v2; } while (s >= 1); double multiplier = Math.sqrt(-2 * Math.log(s)/s); nextNextGaussian = v2 * multiplier; haveNextNextGaussian = true; return v1 * multiplier; } } public final float nextFloat() { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return (y >>> 8) / ((float)(1 << 24)); } /** Returns an integer drawn uniformly from 0 to n-1. Suffice it to say, n must be > 0, or an IllegalArgumentException is raised. */ public int nextInt(int n) { if (n<=0) throw new IllegalArgumentException("n must be positive"); if ((n & -n) == n) // i.e., n is a power of 2 { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) return (int)((n * (long) (y >>> 1) ) >> 31); } int bits, val; do { int y; if (mti >= N) // generate N words at one time { int kk; for (kk = 0; kk < N - M; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]; } for (; kk < N-1; kk++) { y = (mt[kk] & UPPER_MASK) | (mt[kk+1] & LOWER_MASK); mt[kk] = mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]; } y = (mt[N-1] & UPPER_MASK) | (mt[0] & LOWER_MASK); mt[N-1] = mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]; mti = 0; } y = mt[mti++]; y ^= y >>> 11; // TEMPERING_SHIFT_U(y) y ^= (y << 7) & TEMPERING_MASK_B; // TEMPERING_SHIFT_S(y) y ^= (y << 15) & TEMPERING_MASK_C; // TEMPERING_SHIFT_T(y) y ^= (y >>> 18); // TEMPERING_SHIFT_L(y) bits = (y >>> 1); val = bits % n; } while(bits - val + (n-1) < 0); return val; } // ===================== Static access methods to the private DEFAULT_INSTANCE =========== /** Access a default instance of this class, access is synchronized */ public static final byte getNextByte() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextByte(); } } /** Access a default instance of this class, access is synchronized */ public static final boolean getNextBoolean() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextBoolean(); } } /** Access a default instance of this class, access is synchronized */ public static final void getNextBytes(byte[] bs) { synchronized(DEFAULT_INSTANCE) { DEFAULT_INSTANCE.nextBytes(bs); } } /** Access a default instance of this class, access is synchronized */ public static final char getNextChar() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextChar(); } } /** Access a default instance of this class, access is synchronized */ public static final double getNextDouble() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextDouble(); } } /** Access a default instance of this class, access is synchronized */ public static final float getNextFloat() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextFloat(); } } /** Access a default instance of this class, access is synchronized */ public static final long getNextLong() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextLong(); } } /** Access a default instance of this class, access is synchronized */ public static final short getNextShort() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextShort(); } } /** Access a default instance of this class, access is synchronized */ public static final int getNextInt() { synchronized(DEFAULT_INSTANCE) { return DEFAULT_INSTANCE.nextInt(); } } } pal-1.5.1/src/pal/math/OrthogonalSearch.java0000644000000000000000000002405007660472220017377 0ustar rootroot// OrthogonalSearch.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * minimization of a real-valued function of * several variables without using derivatives, using the simple * strategy of optimizing variables one by one. * * @author Korbinian Strimmer * @author Matthew Goode */ public class OrthogonalSearch extends MultivariateMinimum { // // Public stuff // // // Private stuff // private OrderEnumerator.OEFactory orthogonalOrderingFactory_; /** Use the current value of dimension in univariate minimisation, or ignore (original method) */ private boolean useCurrentInUnivariateMinimisation_ = false; /** Sometimes the minimum gained through the single variate minimisation is * worse than the minimum currently found (in that it has found another minimum * which is not the original, and is not as minimumal). * This can cause convergence problems, if this is true than the original minima * will be kept if it is more minimal than the new minimuma. This ensures convergence. * In the future a possible strategy might be SimulatedAnealing with regard to accepting, * or rejecting new minima. */ private boolean ignoreNonMinimalUnivariateMinimisations_ = true; /** * If true, print out debug info... */ private boolean debug_ = false; /** * If true calls MinimiserMonitor methods after each orthogonal update, otherwise after each round */ private boolean frequentMonitoring_ = true; /** * Initialization */ public OrthogonalSearch() { //this(OrderUtils.getBiasAlternatingFactory( OrderUtils.getOrderedFactory(), OrderUtils.getZeroFactory())); this(OrderEnumerator.Utils.getOrderedFactory()); } /** * Initialization * @param shuffle If true uses shuffling, else uses ascending order, when choosing next parameter to optimse * (true means equivalent to old StochasticOSearch) */ public OrthogonalSearch(boolean shuffle) { //this(OrderUtils.getBiasAlternatingFactory( OrderUtils.getOrderedFactory(), OrderUtils.getZeroFactory())); this(shuffle? OrderEnumerator.Utils.getShuffledFactory() : OrderEnumerator.Utils.getOrderedFactory()); } /** * Initialization */ public OrthogonalSearch(OrderEnumerator.OEFactory orderingFactory) { this.orthogonalOrderingFactory_ = orderingFactory; } /** * */ public void setUseCurrentInUnivariateMinimisation(boolean value) { this.useCurrentInUnivariateMinimisation_ = value; } /** * Should we ignore new minisations that are not as minimal as the current one? */ public void setIgnoreNonMinimalUnivariateMinimisations(boolean value) { this.ignoreNonMinimalUnivariateMinimisations_ = value; } // implementation of abstract method public void optimize(MultivariateFunction f, double[] xvec, double tolfx, double tolx) { optimize(f,xvec,tolfx,tolx,null); } public void optimize(MultivariateFunction f, double[] xvec, double tolfx, double tolx, MinimiserMonitor monitor) { int numArgs = f.getNumArguments(); numFun = 1; double fx = f.evaluate(xvec); stopCondition(fx, xvec, tolfx, tolx, true); RoundOptimiser od = generateOrthogonalRoundOptimiser(f); UnivariateMinimum um = generateUnivariateMinimum(); double lastFX; while (true) { lastFX = fx; fx = od.doRound(xvec,um,tolx,fx, (frequentMonitoring_ ? monitor : null)); if(monitor!=null) { monitor.newMinimum(fx,xvec,f); } debug("Round fx:"+fx); if (stopCondition(fx, xvec, tolfx, tolx, false) || (maxFun > 0 && numFun > maxFun) || numArgs == 1) { break; } } } //============ Static Methods ==================== /** * Generate a MultivariateMinimum.Factory for an OrthogonalSearch * @param shuffle if true shuffles order for each round (see OrthogonalSearch constructors) */ public static final Factory generateFactory(boolean shuffle) { return new SearchFactory(shuffle); } //============ For sub classes =================== protected UnivariateMinimum generateUnivariateMinimum() { return new UnivariateMinimum(); } protected boolean isFrequentMonitoring() { return frequentMonitoring_; } protected RoundOptimiser generateOrthogonalRoundOptimiser(MultivariateFunction mf) { OrthogonalHints hints = mf.getOrthogonalHints(); if(hints!=null) { return new OrthogonalHintsDirection(mf,hints,orthogonalOrderingFactory_); } return new OrthogonalDirection(mf,orthogonalOrderingFactory_); } protected interface RoundOptimiser { /** * @param monitor - may be null; */ public double doRound(double[] xvec, UnivariateMinimum um, double tolx,double fx, MinimiserMonitor monitor); } protected final boolean isUseCurrentInUnivariateMinimisation() { return this.useCurrentInUnivariateMinimisation_; } /** * Should we ignore new minisations that are not as minimal as the current one? */ protected final boolean isIgnoreNonMinimalUnivariateMinimisations() { return this.ignoreNonMinimalUnivariateMinimisations_; } protected void debug(Object output) { if(debug_) { System.out.println(output); } } protected boolean isDebug() { return debug_; } // ============ The Factory Class for Orthogonal Searches =================== private static final class SearchFactory implements Factory { boolean shuffle_; private SearchFactory(boolean shuffle) { this.shuffle_ = shuffle; } public MultivariateMinimum generateNewMinimiser() { return new OrthogonalSearch(shuffle_); } } //============== A means for doing Orthogonal optimisation ================== private class OrthogonalDirection implements RoundOptimiser { OrderEnumerator order_; OrthogonalLineFunction olf_; MultivariateFunction base_; public OrthogonalDirection(MultivariateFunction mf, OrderEnumerator.OEFactory orderFactory) { base_ = mf; olf_ = new OrthogonalLineFunction(base_); this.order_ = orthogonalOrderingFactory_.createOrderEnumerator(base_.getNumArguments()); } public double doRound(double[] xvec, UnivariateMinimum um, double tolx,double fx, MinimiserMonitor monitor) { olf_.setAllArguments(xvec); order_.reset(); while(order_.hasMore()) { int argument = order_.getNext(); olf_.selectArgument(argument); double newArgValue = ( useCurrentInUnivariateMinimisation_ ? um.optimize(xvec[argument], olf_, tolx) : um.optimize(olf_, tolx) ); //If we actually found a better minimum... if(um.fminx<=fx) { xvec[argument] = newArgValue; olf_.setArgument(newArgValue); fx = um.fminx; } if(monitor!=null) { monitor.newMinimum(fx,xvec,base_); } debug(argument+":"+um.fminx+" "+fx); numFun += um.numFun; } return fx; } } //============== A means for doing Orthogonal optimisation ================== private class OrthogonalHintsDirection implements RoundOptimiser { OrderEnumerator order_; OrthogonalLineFunction olf_; OrthogonalHints hints_; double[] store_ = new double[100]; MultivariateFunction base_; public OrthogonalHintsDirection(MultivariateFunction mf, OrthogonalHints hints, OrderEnumerator.OEFactory orderFactory) { base_ = mf; olf_ = new OrthogonalLineFunction(base_); this.hints_ = hints; this.order_ = orthogonalOrderingFactory_.createOrderEnumerator(base_.getNumArguments()); } private final double getNormalMin(UnivariateMinimum um, double argumentValue, double tolx) { return ( useCurrentInUnivariateMinimisation_ ? um.optimize(argumentValue, olf_, tolx) : um.optimize(olf_, tolx) ); } private final double getBoundedMin(UnivariateMinimum um, double argumentValue, double tolx,double min, double max) { if(useCurrentInUnivariateMinimisation_ && (min<=argumentValue&&max>=argumentValue)) { return um.optimize(argumentValue, olf_, tolx,min,max); } return um.optimize(olf_, tolx,min,max); } public double doRound(double[] xvec, UnivariateMinimum um, double tolx,double fx, MinimiserMonitor monitor) { olf_.setAllArguments(xvec); order_.reset(); while(order_.hasMore()) { int argument = order_.getNext(); olf_.selectArgument(argument); int numberOfHints= hints_.getInternalParameterBoundaries(argument,store_); //Yes this expensive, but will not happen very often (and only at beging of optimisation) while(numberOfHints<0) { store_ = new double[store_.length+10]; numberOfHints= hints_.getInternalParameterBoundaries(argument,store_); } double newArgValue; double newFX; if(numberOfHints==0) { newArgValue= getNormalMin(um,xvec[argument],tolx); newFX = um.fminx; } else { debug("Number of hints:"+numberOfHints); //System.out.println("Store:"+pal.misc.Utils.toString(store_,numberOfHints)); double min = olf_.getLowerBound(); double x = xvec[argument]; newArgValue = xvec[argument]; newFX = Double.POSITIVE_INFINITY; for(int i = 0 ; i < numberOfHints ; i++) { x =getBoundedMin(um, xvec[argument], tolx, min,store_[i]); if(um.fminx0) { //Do it old school! getNormalMin(um,xvec[argument],tolx); } xvec[argument] = newArgValue; olf_.setArgument(newArgValue); if(monitor!=null) { monitor.newMinimum(newFX,xvec,base_); } fx = newFX; } if(debug_) { System.out.println(argument+":"+newFX+" "+fx+" "+um.fminx+" "+(um.fminx-newFX)+" "+((um.fminxTitle: Matrix

*

Description: A very simple implementation of a general matrix. Not really that useful. Will probably become an interface at some point, with this class as a default implementation (so that users may switch to alternative libraries such as Jama)

* @author Matthew Goode * @version 1.0 */ public final class Matrix { private final int width_; private final int height_; private final double[][] data_; public Matrix(int size, boolean identity) { this(size,size); if(identity) { for(int i = 0 ; i < size ; i++) { setValue(i,i,1); } } } public Matrix(int width, int height) { this.data_ = new double[height][width]; this.width_ = width; this.height_ = height; } public double[] toArray() { double[] result = new double[width_*height_]; int index = 0; for(int row = 0 ; row < height_ ; row++) { for(int col = 0 ; col < width_ ; col++) { result[index++] = data_[row][col]; } } return result; } public Matrix(double[][] data) { this.data_ = pal.misc.Utils.getCopy(data); this.height_ = data.length; this.width_ = data[0].length; } private Matrix(Matrix toCopy) { this(toCopy.data_); } public final void setValue(final int row, final int col, final double value) { data_[row][col] = value; } public final double getValue(final int row, final int col) { return data_[row][col]; } public final boolean isSquare() { return width_ == height_; } public final int getWidth() { return width_; } public final int getHeight() { return height_; } /** * @return a new Matrix that is this matrix with other appended on the end (eg [this | other ]) */ public Matrix getAppendedHorizontally(Matrix other) { if(other.height_!=this.height_) { throw new IllegalArgumentException("Height not same!"); } double[][] otherData = other.data_; double[][] newData = new double[height_][width_+other.width_]; for(int y = 0 ; y < height_ ; y++) { for(int x = 0 ; x < width_ ; x++) { newData[y][x] = data_[y][x]; } for(int x = 0 ; x < other.width_ ; x++) { newData[y][x+width_] = otherData[y][x]; } } return new Matrix(newData); } /** * @return a new Matrix that is this matrix with other appended on the bottom (eg [this / other ] */ public Matrix getAppendedVertically(Matrix other) { if(other.width_!=this.width_) { throw new IllegalArgumentException("Width not same!"); } double[][] otherData = other.data_; double[][] newData = new double[height_+other.height_][width_]; for(int x = 0 ; x < width_ ; x++) { for(int y = 0 ; y < height_ ; y++) { newData[y][x] = data_[y][x]; } for(int y = 0 ; y < other.height_ ; y++) { newData[y+height_][x] = otherData[y][x]; } } return new Matrix(newData); } /** * Returns a new Matrix that is formed from a subset of the colums of this matrix * @param startColumn the first column to include in new Matrix * @param numberToKeep the number of columns to keep */ public Matrix getSubsetColumns(int startColumn, int numberToKeep) { double[][] newData = new double[height_][numberToKeep]; for(int row = 0 ; row < height_ ; row++) { for(int i = 0 ; i < numberToKeep ; i ++) { newData[row][i] = data_[row][i+startColumn]; } } return new Matrix(newData); } public final void transpose() { if(!isSquare()) { throw new RuntimeException("Cannot transpose no square matrix!"); } for(int row = 0 ; row < height_ ; row++) { for(int col = row+1 ; col < width_ ; col++) { double temp = data_[row][col]; data_[row][col] = data_[col][row]; data_[col][row] = temp; } } } public final Matrix getTranspose() { double[][] newData = new double[width_][height_]; for(int row = 0 ; row-0.0000001)); } private void swapRow(int rowOne, int rowTwo) { double[] temp = data_[rowOne]; data_[rowOne] = data_[rowTwo]; data_[rowTwo] = temp; } /** * Check forward in rows after start row until we find one with a \ * non zero value at targetColumn and then swap with startRow * @return true if successful, or false if no row with zero at required position */ private boolean swapZeroRow(int startRow, int targetColumn) { for(int check = startRow+1 ; check < height_ ; check++) { if(!equalsZero(getValue(check,targetColumn))) { swapRow(startRow,check); return true; } } return false; } public void rowReduce() { int extent = Math.min(width_,height_); for(int reduce = 0 ; reduce < extent ; reduce++) { boolean doColumn = true; double primaryFactor = getValue(reduce,reduce); if(equalsZero(primaryFactor)) { if(swapZeroRow(reduce,reduce)) { doColumn = false; } else { primaryFactor = getValue(reduce,reduce); } } if(doColumn) { divideRow(reduce,primaryFactor); for(int row = 0 ; row < reduce ; row++) { double subScale = getValue(row, reduce); if(!equalsZero(subScale)) { subtractRow(reduce,subScale, row); } } for(int row = reduce+1 ; row < height_ ; row++) { double subFactor = getValue(row, reduce); if(equalsZero(subFactor)) { if(!swapZeroRow(row,reduce)) { break; } subFactor = getValue(row, reduce); } divideRow(row,subFactor); subtractRow(reduce,row); } } } } /** * Peforms a simple row reduction tramsformation * @return A row reduced version of this matrix */ public Matrix getRowReduced() { Matrix m = getMatrixCopy(); m.rowReduce(); return m; } /** * Cloning * @return an exact copy of this matrix */ public Matrix getMatrixCopy() { return new Matrix(this); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append('('); sb.append(width_); sb.append(", "); sb.append(height_); sb.append(")\n"); for(int row = 0 ; row < height_ ; row++) { sb.append('['); for(int col = 0 ; col < width_ ; col++) { sb.append(getValue(row,col)); sb.append(" "); } sb.append("]\n"); } return sb.toString(); } }pal-1.5.1/src/pal/math/package.html0000644000000000000000000000024407161524376015560 0ustar rootroot Classes for math stuff such as optimisation, numerical derivatives, matrix exponentials, random numbers, special function etc. pal-1.5.1/src/pal/math/LineFunction.java0000644000000000000000000001351307457051462016541 0ustar rootroot// (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * converts a multivariate function into a univariate function * * @author Korbinian Strimmer */ public class LineFunction implements UnivariateFunction { /** * construct univariate function from multivariate function * * @param func multivariate function * @param start start point * @param dir direction vector */ public LineFunction(MultivariateFunction func) { f = func; dim = f.getNumArguments(); x = new double[dim]; } /** * update start point and direction * (bounds and search direction are NOT checked) * * @param start new start point * @param dir new direction vector */ public void update(double[] start, double[] dir) { s = start; d = dir; computeBounds(); } /** * get point associated with the one-dimensional parameter * (bounds of of multivariate function are NOT checked) * * @param lambda argument * @param p array for coordinates of corresponding point */ public void getPoint(double lambda, double[] p) { for (int i = 0; i < dim; i++) { p[i] = s[i] + lambda*d[i]; } } // implementation of UnivariateFunction /** * evaluate f(start+lambda*dir) */ public double evaluate(double lambda) { getPoint(lambda, x); return f.evaluate(x); } public double getLowerBound() { return lowerBound; } public double getUpperBound() { return upperBound; } /** * find parameter lambda within the given bounds * that minimizes the univariate function * (due to numerical inaccuaries it may happen * that getPoint for the returned lambda produces * a point that lies * slightly out of bounds) * * @return lambda that achieves minimum */ public double findMinimum() { if (um == null) { um = new UnivariateMinimum(); } return um.findMinimum(this); } /** * get parameter that limits the upper bound * * @return parameter number */ public int getUpperBoundParameter() { return upperBoundParam; } /** * get parameter that limits the lower bound * * @return parameter number */ public int getLowerBoundParameter() { return lowerBoundParam; } /** * check (and modify, if necessary) whether a point lies properly * within the predefined bounds * * @param p coordinates of point * * @return true if p was modified, false otherwise */ public boolean checkPoint(double[] p) { boolean modified = false; for (int i = 0; i < dim; i++) { if (p[i] < f.getLowerBound(i)) { p[i] = f.getLowerBound(i); modified = true; } if (p[i] > f.getUpperBound(i)) { p[i] = f.getUpperBound(i); modified = true; } } return modified; } /** * determine active variables at a point p and corresponding * gradient grad (if a component of p lies on a border and * the corresponding component of the gradient points * out of the border the variable is considered inactive) * * @param p coordinates of point * @param grad gradient at that point * @param list of active variables (on return) * * @return number of active variables */ public int checkVariables(double[] p, double[] grad, boolean[] active) { // this seems to be a reasonable small value double EPS = MachineAccuracy.SQRT_EPSILON; int numActive = 0; for (int i = 0; i < dim; i++) { active[i] = true; if (p[i] <= f.getLowerBound(i)+EPS) { // no search towards lower boundary if (grad[i] > 0) { active[i] = false; } } else if (p[i] >= f.getUpperBound(i)-EPS) { // no search towards upper boundary if (grad[i] < 0) { active[i] = false; } } else { numActive++; } } return numActive; } /** * check direction vector. If it points out of the defined * area at a point at the boundary the corresponding component * of the direction vector is set to zero. * * @param p coordinates of point * @param dir direction vector at that point * * @return number of changed components in direction vector */ public int checkDirection(double[] p, double[] dir) { // this seems to be a reasonable small value double EPS = MachineAccuracy.SQRT_EPSILON; int numChanged = 0; for (int i = 0; i < dim; i++) { if (p[i] <= f.getLowerBound(i)+EPS) { // no search towards lower boundary if (dir[i] < 0) { dir[i] = 0; numChanged++; } } else if (p[i] >= f.getUpperBound(i)-EPS) { // no search towards upper boundary if (dir[i] > 0) { dir[i] = 0; numChanged++; } } } return numChanged; } // // Private stuff // private MultivariateFunction f; private int lowerBoundParam, upperBoundParam; private int dim; private double lowerBound, upperBound; private double[] s, d, x, min, max; private UnivariateMinimum um = null; private void computeBounds() { boolean firstVisit = true; for (int i = 0; i < dim; i++) { if (d[i] != 0) { double upper = (f.getUpperBound(i) - s[i])/d[i]; double lower = (f.getLowerBound(i) - s[i])/d[i]; if (lower > upper) { double tmp = upper; upper = lower; lower = tmp; } if (firstVisit) { lowerBound = lower; lowerBoundParam = i; upperBound = upper; upperBoundParam = i; firstVisit = false; } else { if (lower > lowerBound) { lowerBound = lower; lowerBoundParam = i; } if (upper < upperBound) { upperBound = upper; upperBoundParam = i; } } } } } } pal-1.5.1/src/pal/math/EvaluationCounter.java0000644000000000000000000000215407725635144017615 0ustar rootroot// OrthogonalSearch.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * A utiltity class that can be used to track the number of evaluations of a * general function * * @author Matthew Goode */ public class EvaluationCounter implements MultivariateFunction { private final MultivariateFunction base_; private int evaluationCount_ = 0; public EvaluationCounter(MultivariateFunction base) { this.base_ = base; } public final double evaluate(double[] argument) { evaluationCount_++; return base_.evaluate(argument); } public final void reset() { evaluationCount_=0; } public final int getEvaluationCount() { return evaluationCount_; } public final int getNumArguments() { return base_.getNumArguments(); } public final double getLowerBound(int n) { return base_.getLowerBound(n); } public final double getUpperBound(int n) { return base_.getUpperBound(n); } public final OrthogonalHints getOrthogonalHints() { return base_.getOrthogonalHints(); } }pal-1.5.1/src/pal/math/BoundsCheckedFunction.java0000644000000000000000000000431307457051462020351 0ustar rootroot// BoundsCheckedFunction.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * returns a very large number instead of the function value * if arguments are out of bound (useful for minimization with * minimizers that don't check argument boundaries) * * @author Korbinian Strimmer */ public class BoundsCheckedFunction implements MultivariateFunction { /** * construct bound-checked multivariate function * (a large number will be returned on function evaluation if argument * is out of bounds; default is 1000000) * * @param func unconstrained multivariate function * @param minArg lower constraint * @param maxArg upper constraint */ public BoundsCheckedFunction(MultivariateFunction func) { this(func, 1000000); } /** * construct constrained multivariate function * * @param func unconstrained multivariate function * @param largeNumber value returned on function evaluation * if argument is out of bounds */ public BoundsCheckedFunction(MultivariateFunction func, double largeNumber) { f = func; veryLarge = largeNumber; } /** * computes function value, taking into account the constraints on the * argument * * @param x function argument * * @return function value (if argument is not in the predefined constrained area * a very large number is returned instead of the true function value) */ public double evaluate(double[] x) { int len = f.getNumArguments(); for (int i = 0; i < len; i++) { if (x[i] < f.getLowerBound(i) || x[i] > f.getUpperBound(i)) { return veryLarge; } } return f.evaluate(x); } public int getNumArguments() { return f.getNumArguments(); } public double getLowerBound(int n) { return f.getLowerBound(n); } public double getUpperBound(int n) { return f.getUpperBound(n); } /** * @return null */ public OrthogonalHints getOrthogonalHints() { return null; } // // Private stuff // private MultivariateFunction f; private double veryLarge; } pal-1.5.1/src/pal/math/ConjugateGradientSearch.java0000644000000000000000000002674607557032204020675 0ustar rootroot// ConjugateGradientSearch.java // // (c) 2000-2001 PAL Development Core Team // // This package may be distributed under the // terms of the GNU Lesser General Public License (LGPL) package pal.math; /** * minimization of a real-valued function of * several variables using a the nonlinear * conjugate gradient method where several variants of the direction * update are available (Fletcher-Reeves, Polak-Ribiere, * Beale-Sorenson, Hestenes-Stiefel) and bounds are respected. * Gradients are computed numerically if they are not supplied by the * user. The line search is entirely based on derivative * evaluation, similar to the strategy used in macopt (Mackay). * * @version $Id: ConjugateGradientSearch.java,v 1.7 2002/10/27 05:46:28 matt Exp $ * * @author Korbinian Strimmer */ public class ConjugateGradientSearch extends MultivariateMinimum { // // Public stuff // public final static int FLETCHER_REEVES_UPDATE = 0; public final static int POLAK_RIBIERE_UPDATE = 1; public final static int BEALE_SORENSON_HESTENES_STIEFEL_UPDATE = 2; // Variables that control aspects of the inner workings of the // minimization algorithm. Setting them is optional, they // are all set to some reasonable default values given below. /** * controls the printed output from the routine * (0 -> no output, 1 -> print only starting and final values, * 2 -> detailed map of the minimisation process), * the default value is 0 */ public int prin = 0; /** * defaultStep is a steplength parameter and should be set equal * to the expected distance from the solution (in a line search) * exceptionally small or large values of defaultStep lead to * slower convergence on the first few iterations (the step length * itself is adapted during search), the default value is 1.0 */ public double defaultStep = 1.0; /** * conjugateGradientStyle determines the method for the * conjugate gradient direction update * update (0 -> Fletcher-Reeves, 1 -> Polak-Ribiere, * 2 -> Beale-Sorenson, Hestenes-Stiefel), the default is 2. */ public int conjugateGradientStyle = BEALE_SORENSON_HESTENES_STIEFEL_UPDATE; public ConjugateGradientSearch() { } public ConjugateGradientSearch(int conGradStyle) { this.conjugateGradientStyle = conGradStyle; } // implementation of abstract method public void optimize(MultivariateFunction f, double[] x, double tolfx, double tolx) { optimize(f,x,tolfx,tolx,null); } public void optimize(MultivariateFunction f, double[] x, double tolfx, double tolx, MinimiserMonitor monitor) { xvec = x; numArgs = f.getNumArguments(); boolean numericalGradient; if (f instanceof MFWithGradient) { numericalGradient = false; fgrad = (MFWithGradient) f; } else { numericalGradient = true; fgrad = null; } // line function LineFunction lf = new LineFunction(f); // xvec contains current guess for minimum lf.checkPoint(xvec); double[] xold = new double[numArgs]; copy(xold, xvec); // function value and gradient at current guess numFun = 0; double fx; numGrad = 0; gvec = new double[numArgs]; if (numericalGradient) { fx = f.evaluate(xvec); numFun++; NumericalDerivative.gradient(f, xvec, gvec); numFun += 2*numArgs; } else { fx = fgrad.evaluate(xvec, gvec); numFun++; numGrad++; } double[] gold = new double[numArgs]; copy(gold, gvec); // init stop condition stopCondition(fx, xvec, tolfx, tolx, true); // currently active variables boolean[] active = new boolean[numArgs]; double numActive = lf.checkVariables(xvec, gvec, active); // if no variables are active return if (numActive == 0) { return; } // initial search direction (steepest descent) sdir = new double[numArgs]; steepestDescentDirection(sdir, gvec, active); lf.update(xvec, sdir); // slope at start point in initial direction double slope = gradientProjection(sdir, gvec); if (prin > 0) { System.out.println("--- starting minimization ---"); System.out.println("... current parameter settings ..."); System.out.println("... tolx ... " + tolx); System.out.println("... tolfx ... " + tolfx); System.out.println("... maxFun ... " + maxFun); System.out.println(); printVec("... start vector ...", xvec); System.out.println(); printVec("... start direction ...", sdir); } int numLin = 0; lastStep = defaultStep; while(true) { // determine an appropriate step length double step = findStep(lf, fx, slope, numericalGradient); lastStep = step; numLin++; // update xvec lf.getPoint(step, xvec); lf.checkPoint(xvec); // function value at current guess if (numericalGradient) { fx = f.evaluate(xvec); numFun++; } else { // compute gradient as well fx = fgrad.evaluate(xvec, gvec); numFun++; numGrad++; } // test for for convergence if (stopCondition(fx, xvec, tolfx, tolx, false) || (maxFun > 0 && numFun > maxFun)) { break; } // Compute numerical gradient if (numericalGradient) { NumericalDerivative.gradient(f, xvec, gvec); numFun += 2*numArgs; } numActive = lf.checkVariables(xvec, gvec, active); // if all variables are inactive return if (numActive == 0) { break; } // determine new search direction conjugateGradientDirection(sdir, gvec, gold, active); lf.checkDirection(xvec, sdir); // compute slope in new direction slope = gradientProjection(sdir, gvec); if (slope >= 0) { //reset to steepest descent direction steepestDescentDirection(sdir, gvec, active); // compute slope in new direction slope = gradientProjection(sdir, gvec); // reset to default step length lastStep = defaultStep; } // other updates lf.update(xvec, sdir); copy(xold, xvec); copy(gold, gvec); if (prin > 1) { System.out.println(); System.out.println("Function value: " + f.evaluate(xvec)); System.out.println(); printVec("... new vector ...", xvec); System.out.println(); printVec("... new direction ...", sdir); System.out.println("... numFun ... " + numFun); if (!numericalGradient) { System.out.println("... numGrad ... " + numGrad); } System.out.println("... numLin ... " + numLin); System.out.println(); } if(monitor!=null) { monitor.newMinimum(f.evaluate(xvec),xvec,f); } } if (prin > 0) { System.out.println(); printVec("... final vector ...", xvec); System.out.println("... numFun ... " + numFun); System.out.println("... numLin ... " + numLin); System.out.println(); System.out.println("--- end of minimization ---"); } } // // Private stuff // private int numArgs, numGrad; private double lastStep; private double[] xvec, gvec, sdir; private MFWithGradient fgrad; private double findStep(LineFunction lf, double f0, double s0, boolean numericalGradient) { // f0 function value at step = 0 // s0 slope at step = 0 double step; double maxStep = lf.getUpperBound(); if (maxStep <= 0 || s0 == 0) { return 0.0; } //step = Math.abs(lf.findMinimum()); // growing/shrinking factors for bracketing double g1 = 2.0; double g2 = 1.25; double g3 = 0.5; // x1 and x2 try to bracket the minimum double x1 = 0; double s1 = s0; double x2 = lastStep*g2; if(x2 > maxStep) { x2 = maxStep*g3; } double s2 = computeDerivative(lf, x2, numericalGradient); // we need to go further to bracket minimum boolean boundReached = false; while (s2 <= 0 && !boundReached) { x1 = x2; s1 = s2; x2 = x2*g1; if (x2 > maxStep) { x2 = maxStep; boundReached = true; } s2 = computeDerivative(lf, x2, numericalGradient); } // determine step length by quadratic interpolation // for minimum in interval [x1,x2] if (s2 <= 0) { // true local minimum could NOT be bracketed // instead we have a local minimum on a boundary step = x2; } else { // minimum is bracketed step = (x1*s2-x2*s1)/(s2-s1); // note that nominator is always positive } // just to be safe - should not be necessary if (step >= maxStep) { step = maxStep; } if (step < 0) { step = 0; } return step; } private double computeDerivative(LineFunction lf, double lambda, boolean numericalGradient) { if (numericalGradient) { numFun += 2; return NumericalDerivative.firstDerivative(lf, lambda); } else { /* lf.getPoint(lambda, xvec); lf.checkPoint(xvec); fgrad.computeGradient(xvec, gvec); numGrad++; return gradientProjection(sdir, gvec); */ // the following code prevents overstepping // and is due to Jesse Stone double[] xtmp = new double[numArgs]; copy(xtmp, xvec); lf.getPoint(lambda, xtmp); lf.checkPoint(xtmp); fgrad.computeGradient(xtmp, gvec); numGrad++; return gradientProjection(sdir, gvec); } } private void testStep(double f0, double s0, double f1, double s1, double step) { // f0 function value at x=0 // s0 slope at x=0 // f1 function value at x=step // f2 function value at x=step double mue = 0.0001; double eta = 0.9; // sufficent decrease in function value if (f1 <= mue*s0*step + f0) { System.out.println("<<< Sufficient decrease in function value"); } else { System.out.println("<<< NO sufficient decrease in function value"); } // sufficient decrease in slope if (Math.abs(s1) <= eta*Math.abs(s0)) { System.out.println("<<< Sufficient decrease in slope"); } else { System.out.println("<<< NO sufficient decrease in slope"); } } private void conjugateGradientDirection(double[] sdir, double[] gvec, double[] gold, boolean[] active) { double gg = 0; double dgg = 0; for (int i = 0; i < numArgs; i++) { if (active[i]) { switch (conjugateGradientStyle) { case 0: // Fletcher-Reeves dgg += gvec[i]*gvec[i]; gg += gold[i]*gold[i]; break; case 1: // Polak-Ribiere dgg += gvec[i]*(gvec[i]-gold[i]); gg += gold[i]*gold[i]; break; case 2: // Beale-Sorenson // Hestenes-Stiefel dgg += gvec[i] * (gvec[i] - gold[i]); gg += sdir[i] * (gvec[i] - gold[i]); break; } } } double beta = dgg/gg; if (beta < 0 || gg == 0) { // better convergence (Gilbert and Nocedal) beta = 0; } for (int i = 0; i < numArgs; i++) { if (active[i]) { sdir[i] = -gvec[i] + beta*sdir[i]; } else { sdir[i] = 0; } } } private void steepestDescentDirection(double[] sdir, double[] gvec, boolean[] active) { for (int i = 0; i < numArgs; i++) { if (active[i]) { sdir[i] = -gvec[i]; } else { sdir[i] = 0; } } } private double gradientProjection(double[] sdir, double[] gvec) { double s = 0; double n = gvec.length; for (int i = 0; i < n; i++) { s += gvec[i]*sdir[i]; } return s; } private void printVec(String s, double[] x) { System.out.println(s); for (int i=0; i < x.length; i++) { System.out.print(x[i] + " "); } System.out.println(); } } pal-1.5.1/src/pal/math/LMSSolver.java0000644000000000000000000000276610141731032015760 0ustar rootroot// LMSSolver.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** *

Title: LMSSolver

*

Description: An interface for objects that can perform a Least Mean Squares type regression.

* @author Matthew Goode * @version 1.0 */ public interface LMSSolver { public double[] solve(double[][] xMatrix, double[] dMatrix); // -=-=-==-=-=-=-=-=-=-=--=-=-=-==--==-=--=-==-=-=-=-=-=--==-=--= /** * *

Title: Utils

* A store of LMSSolver related utiltiies */ public static final class Utils { /** * Obtain a simple LMSSolver that uses the "traditional" method for LMS stuff (IE, using row reduction to find the inverse, with poor numerical accuracy) * @return A LMSSolver object */ public static final LMSSolver getSimpleSolver() { return SimpleSolver.INSTANCE; } // -==-=-=--==- private static final class SimpleSolver implements LMSSolver { public static final LMSSolver INSTANCE = new SimpleSolver(); public double[] solve( double[][] xMatrix, double[] dMatrix ) { Matrix m = new Matrix( xMatrix ); Matrix mTranspose = m.getTranspose(); Matrix ls = ( mTranspose.getMultiplied( m ) ).getInverse().getMultiplied( mTranspose ); Matrix d = new Matrix( new double[][] {dMatrix} ).getTranspose(); Matrix result = ls.getMultiplied( d ); return result.toArray(); } } } }pal-1.5.1/src/pal/math/MachineAccuracy.java0000644000000000000000000000211107347102430017141 0ustar rootroot// MachineAccuracy.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * determines machine accuracy * * @version $Id: MachineAccuracy.java,v 1.4 2001/09/09 22:17:11 alexi Exp $ * * @author Korbinian Strimmer * @author Alexei Drummond */ public class MachineAccuracy { // // Public stuff // /** machine accuracy constant */ public static double EPSILON = 2.220446049250313E-16; public static double SQRT_EPSILON = 1.4901161193847656E-8; public static double SQRT_SQRT_EPSILON = 1.220703125E-4; /** compute EPSILON from scratch */ public static double computeEpsilon() { double eps = 1.0; while( eps + 1.0 != 1.0 ) { eps /= 2.0; } eps *= 2.0; return eps; } /** * @return true if the relative difference between the two parameters * is smaller than SQRT_EPSILON. */ public static boolean same(double a, double b) { return Math.abs((a/b)-1.0) <= SQRT_EPSILON; } } pal-1.5.1/src/pal/math/MFWithGradient.java0000644000000000000000000000164507323721302016750 0ustar rootroot// MFWithGradient.java // // (c) 2000-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * interface for a function of several variables with a gradient * * @version $Id: MFWithGradient.java,v 1.2 2001/07/13 14:39:13 korbinian Exp $ * * @author Korbinian Strimmer */ public interface MFWithGradient extends MultivariateFunction { /** * compute both function value and gradient at a point * * @param argument function argument (vector) * @param gradient gradient (on return) * * @return function value */ double evaluate(double[] argument, double[] gradient); /** * compute gradient at a point * * @param argument function argument (vector) * @param gradient gradient (on return) */ void computeGradient(double[] argument, double[] gradient); } pal-1.5.1/src/pal/math/ErrorFunction.java0000644000000000000000000000433207323721302016727 0ustar rootroot// ErrorFunction.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.math; /** * error function and related stuff * * @version $Id: ErrorFunction.java,v 1.2 2001/07/13 14:39:13 korbinian Exp $ * * @author Korbinian Strimmer */ public class ErrorFunction { // // Public stuff // /** * error function * * @param x argument * * @return function value */ public static double erf(double x) { if (x > 0.0) { return GammaFunction.incompleteGammaP(0.5, x*x); } else if (x < 0.0) { return -GammaFunction.incompleteGammaP(0.5, x*x); } else { return 0.0; } } /** * complementary error function = 1-erf(x) * * @param x argument * * @return function value */ public static double erfc(double x) { return 1.0-erf(x); } /** * inverse error function * * @param z argument * * @return function value */ public static double inverseErf(double z) { return pointNormal(0.5*z+0.5)/Math.sqrt(2.0); } // Private // Returns z so that Prob{x * @see pal.mep.MutationRateModel * * @version $Id: MutationRateModelTree.java,v 1.19 2003/06/04 03:17:52 matt Exp $ * * @author Alexei Drummond */ public class MutationRateModelTree extends ParameterizedTree.ParameterizedTreeBase implements OrthogonalHints, ParameterizedTree { // // Public stuff // TimeOrderCharacterData tocd = null; MutationRateModel model = null; int numParameters; double maxRelativeHeight_ = BranchLimits.MAXARC; // // Private stuff // private double[] parameter; private double lnL = 0.0; private final static double MIN_MU = 1e-12; private final static double MIN_DELTA = 1e-12; /** * take any tree and afford it with an interface * suitable for a clock-like tree (parameters * are the minimal node height differences at each internal node). * Includes model parameters as parameters of tree *

* This parameterisation of a clock-tree, ensuring that * all parameters are independent of each other is due to * Andrew Rambaut (personal communication). */ public MutationRateModelTree(Tree t, TimeOrderCharacterData tocd, MutationRateModel model) { this(t,tocd, model,true); } /** * take any tree and afford it with an interface * suitable for a clock-like tree (parameters * are the minimal node height differences at each internal node). *

* This parameterisation of a clock-tree, ensuring that * all parameters are independent of each other is due to * Andrew Rambaut (personal communication). */ public MutationRateModelTree(Tree t, TimeOrderCharacterData tocd, MutationRateModel model, boolean includeModelParameters) { setBaseTree(t); this.tocd = tocd; this.model = model; if (t.getRoot().getChildCount() < 2) { throw new RuntimeException( "The root node must have at least two childs!"); } NodeUtils.heights2Lengths(getRoot()); numParameters = getInternalNodeCount(); if(includeModelParameters) {numParameters+= model.getNumParameters(); } if (!tocd.hasTimes()) { throw new RuntimeException("Must have times!"); } parameter = new double[getInternalNodeCount()]; heights2parameters(); } /** * Cloning constructor */ protected MutationRateModelTree(MutationRateModelTree toCopy ){ this.tocd = toCopy.tocd; this.model = (MutationRateModel)toCopy.model.clone(); this.parameter = pal.misc.Utils.getCopy(toCopy.parameter); this.lnL = toCopy.lnL; this.numParameters = toCopy.numParameters; parameters2Heights(); NodeUtils.heights2Lengths(getRoot()); } /** * Sets the maximum distance between ancestor and latest descendant. * @note by default it as MAX_ARC in BranchLimits (around 1...) */ public void setMaxRelativeHeight(double value) { this.maxRelativeHeight_ = value; } // interface Parameterized public int getNumParameters() { return numParameters; } public void setParameter(double param, int n) { if (n < getInternalNodeCount()) { parameter[n] = param; } else model.setParameter(param, n - getInternalNodeCount()); // call this parameter2Heights parameters2Heights(); NodeUtils.heights2Lengths(getRoot()); } public double getParameter(int n) { if (n < getInternalNodeCount()) { return parameter[n]; } else { return model.getParameter(n - getInternalNodeCount()); } } /** * Returns lower limit of parameter estimate. */ public double getLowerLimit(int n) { if (n < getInternalNodeCount()) { return BranchLimits.MINARC; } else { return model.getLowerLimit(n - getInternalNodeCount()); } } public double getDefaultValue(int n) { if (n < getInternalNodeCount()) { return BranchLimits.DEFAULT_LENGTH; } else { return model.getDefaultValue(n - getInternalNodeCount()); } } public void setParameterSE(double paramSE, int n) { if (n < getInternalNodeCount()) { return ; //Todo } else { model.setParameterSE(paramSE, n - getInternalNodeCount()); } } public double getUpperLimit(int n) { if (n < getInternalNodeCount()) { return maxRelativeHeight_; } else { return model.getUpperLimit(n - getInternalNodeCount()); } } public String getParameterizationInfo() { return "Mutation Rate Model based tree ("+model.toSingleLine()+")"; } /** * returns mu */ public MutationRateModel getMutationRateModel() { return model; } protected void parameters2Heights() { // nodes have been stored by a post-order traversal int index; for (int i = 0; i < getExternalNodeCount(); i++) { index = tocd.whichIdNumber(getExternalNode(i).getIdentifier().getName()); //System.err.println(index + ":" + i); getExternalNode(i).setNodeHeight(model.getExpectedSubstitutions(tocd.getTime(index))); } // this could be more efficient for (int i = 0; i < getInternalNodeCount(); i++) { Node node = getInternalNode(i); node.setNodeHeight(parameter[i] + NodeUtils.findLargestChild(node)); } } protected void heights2parameters() { for (int i = 0; i < getInternalNodeCount(); i++) { Node node = getInternalNode(i); parameter[i] = node.getNodeHeight()-NodeUtils.findLargestChild(node); } // need to convert heights to model parameters somehow! } public void setLnL(double lnL) { this.lnL = lnL; } public double getLnL() { return lnL; } public OrthogonalHints getOrthogonalHints() { if(model.getNumParameters()==0) { return this; } OrthogonalHints modelHints = model.getOrthogonalHints(); if(modelHints!=null) { return OrthogonalHints.Utils.getCombined(this,parameter.length,modelHints,model.getNumParameters()); } return OrthogonalHints.Utils.getCombined( this,parameter.length, OrthogonalHints.Utils.getNull(), model.getNumParameters()); } // ===================== OrthogonalHints stuff ====================== public OrderEnumerator getSuggestedOrdering(OrderEnumerator defaultOrdering) { return defaultOrdering; } public int getInternalParameterBoundaries(int parameter, double[] storage) { Node n = getInternalNode(parameter); if(n.isRoot()) { return 0; } int count = 0; Node p = n.getParent(); Node current = n; double offset = 0; double baseLine = NodeUtils.findLargestChild(n); while(p!=null) { Node max = null; int numberOfChildren = p.getChildCount(); //Find Maximum double maxHeight = Double.NEGATIVE_INFINITY; double realMaxHeight = Double.NEGATIVE_INFINITY; for(int i = 0 ; i < numberOfChildren ; i++){ Node c = p.getChild(i); //We ignore the target node! double nh = c.getNodeHeight(); if(c!=n&&maxHeightmaxRelativeHeight_) { break; } if(value>0&&max!=current) { //System.out.println("MH:"+maxHeight+" OFS:"+offset+" BL:"+baseLine); if(count==storage.length) { return -1; } if(count==0||value>storage[count-1]) { storage[count++] = value; } } //The offset is to take into account how much we are "pushing" offset+=p.getNodeHeight()-realMaxHeight; current = p; p = p.getParent(); } return count; } // ===================== End of OrthogonalHints stuff ====================== public Tree getCopy() { return new MutationRateModelTree(this); } public Object clone() { return getCopy(); } // =========================================================================== // ===== Static stuff ======= /** * Obtain a ParameterizedTree.Factory for generating Unconstrained trees */ public static final ParameterizedTree.Factory getParameterizedTreeFactory(MutationRateModel.Factory rateModel, TimeOrderCharacterData tocd) { return new TreeFactory(rateModel,tocd); } private static class TreeFactory implements ParameterizedTree.Factory { MutationRateModel.Factory rateModel_; TimeOrderCharacterData tocd_; public TreeFactory(MutationRateModel.Factory rateModel, TimeOrderCharacterData tocd) { this.rateModel_ = rateModel; this.tocd_ = tocd; } public ParameterizedTree generateNewTree(Tree base) { return new MutationRateModelTree(base, tocd_, rateModel_.generateNewModel(),true); } } } pal-1.5.1/src/pal/tree/NeighborJoiningTree.java0000644000000000000000000001005407731622032020032 0ustar rootroot// NeighborJoiningTree.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) // computational complexity O(numSeqs^3) package pal.tree; import pal.distance.*; /** * constructs a neighbor-joining tree from pairwise distances *

* Saitou, N., and Nei, M., (1987) The neighbor-joining method: A new method for reconstructing phylogenetic trees. Mol. Biol. Evol, 4(4):406-425, *
* @version $Id: NeighborJoiningTree.java,v 1.12 2003/09/16 03:54:18 matt Exp $ * * @author Korbinian Strimmer * @author Alexei Drummond */ public class NeighborJoiningTree extends SimpleTree { // // Public stuff // /** * construct NJ tree * * @param m distance matrix */ public NeighborJoiningTree(DistanceMatrix m) { if (m.getSize() < 3) { new IllegalArgumentException("LESS THAN 3 TAXA IN DISTANCE MATRIX"); } if (!m.isSymmetric()) { new IllegalArgumentException("UNSYMMETRIC DISTANCE MATRIX"); } init(m); //while (numClusters > 3) while (true) { findNextPair(); newBranchLengths(); if (numClusters == 3) { break; } newCluster(); } finish(); } // // Private stuff // private int numClusters; private Node newCluster; private int besti, abi; private int bestj, abj; private int[] alias; private double[][] distance; private double[] r; private double scale; private double getDist(int a, int b) { return distance[alias[a]][alias[b]]; } private void init(DistanceMatrix m) { numClusters = m.getSize(); distance = m.getClonedDistances(); for (int i = 0; i < numClusters; i++) { Node tmp = NodeFactory.createNode(); tmp.setIdentifier(m.getIdentifier(i)); getRoot().addChild(tmp); } alias = new int[numClusters]; for (int i = 0; i < numClusters; i++) { alias[i] = i; } r = new double[numClusters]; } private void finish() { if (besti != 0 && bestj != 0) { getRoot().getChild(0).setBranchLength(updatedDistance(besti, bestj, 0)); } else if (besti != 1 && bestj != 1) { getRoot().getChild(1).setBranchLength(updatedDistance(besti, bestj, 1)); } else { getRoot().getChild(2).setBranchLength(updatedDistance(besti, bestj, 2)); } distance = null; // make node heights available also NodeUtils.lengths2Heights(getRoot()); } private void findNextPair() { for (int i = 0; i < numClusters; i++) { r[i] = 0; for (int j = 0; j < numClusters; j++) { r[i] += getDist(i,j); } } besti = 0; bestj = 1; double smax = -1.0; scale = 1.0/(numClusters-2); for (int i = 0; i < numClusters-1; i++) { for (int j = i+1; j < numClusters; j++) { double sij = (r[i] + r[j] ) * scale - getDist(i, j); if (sij > smax) { smax = sij; besti = i; bestj = j; } } } abi = alias[besti]; abj = alias[bestj]; } private void newBranchLengths() { double dij = getDist(besti, bestj); double li = (dij + (r[besti]-r[bestj])*scale)*0.5; double lj = dij - li; // = (dij + (r[bestj]-r[besti])*scale)*0.5 getRoot().getChild(besti).setBranchLength(li); getRoot().getChild(bestj).setBranchLength(lj); } private void newCluster() { // Update distances for (int k = 0; k < numClusters; k++) { if (k != besti && k != bestj) { int ak = alias[k]; distance[ak][abi] = distance[abi][ak] = updatedDistance(besti, bestj, k); } } distance[abi][abi] = 0.0; // Replace besti with new cluster NodeUtils.joinChilds(getRoot(), besti, bestj); // Update alias for (int i = bestj; i < numClusters-1; i++) { alias[i] = alias[i+1]; } numClusters--; } /** * compute updated distance between the new cluster (i,j) * to any other cluster k */ private double updatedDistance(int i, int j, int k) { return (getDist(k, i) + getDist(k, j) - getDist(i, j))*0.5; } } pal-1.5.1/src/pal/tree/LogParameterizedTree.java0000644000000000000000000000427707667377620020251 0ustar rootroot// LogParameterizedTree.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; import pal.misc.*; /** * This class logarithmically transforms tree parameters. * Hopefully this makes the function look more like a quadratic * for the optimizer! * * @version $Id: LogParameterizedTree.java,v 1.9 2003/06/04 03:17:52 matt Exp $ * * @author Alexei Drummond */ public class LogParameterizedTree extends ParameterizedTree.ParameterizedTreeBase implements ParameterizedTree { // // Public stuff // ParameterizedTree params; private double[] logMins; private double[] logMaxs; private double[] logDefaults; /** * Takes a parameterized object and transforms * the parameters logarithmically. */ public LogParameterizedTree(ParameterizedTree params) { setBaseTree(params); this.params = params; logMins = new double[params.getNumParameters()]; logMaxs = new double[params.getNumParameters()]; logDefaults = new double[params.getNumParameters()]; for (int i = 0; i < logMins.length; i++) { logMins[i] = Math.log(params.getLowerLimit(i)); logMaxs[i] = Math.log(params.getUpperLimit(i)); logDefaults[i] = Math.log(params.getDefaultValue(i)); } } // interface Parameterized public int getNumParameters() { return params.getNumParameters(); } public void setParameter(double logParam, int n) { // - logMins scales the value to lower bound of 0 double realParam = Math.exp(logParam + logMins[n]); params.setParameter(realParam, n); } public double getParameter(int n) { return Math.log(params.getParameter(n)) - logMins[n]; } public void setParameterSE(double paramSE, int n) { return; // DEBUG - not yet done } public double getLowerLimit(int n) { return 0; } public double getUpperLimit(int n) { return logMaxs[n] - logMins[n]; } public double getDefaultValue(int n) { return logDefaults[n] - logMins[n]; } public String getParameterizationInfo() { return params.getParameterizationInfo()+" (using log scaling)"; } } pal-1.5.1/src/pal/tree/Node.java0000644000000000000000000000620607536660770015046 0ustar rootroot// Node.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; import pal.misc.*; import java.io.*; import pal.io.*; /** * interface for a node (includes branch) in a binary/non-binary * rooted/unrooted tree * * @version $Id: Node.java,v 1.23 2002/09/08 03:43:04 matt Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer * */ public interface Node extends Serializable { /** Returns the parent node of this node. */ Node getParent(); /** Set the parent node of this node. */ void setParent(Node node); /** Returns the sequence at this node, in the form an array of bytes. */ byte[] getSequence(); /** Sets the sequence using an array of bytes. */ void setSequence(byte[] array); /** return the index of this node */ int getNumber(); /** set the index of this node */ void setNumber(int number); /** Get the length of the branch attaching this node to its parent. */ double getBranchLength(); /** * Set the length of the branch attaching this node to its parent. */ void setBranchLength(double value); /** Get the length SE of the branch attaching this node to its parent. */ double getBranchLengthSE(); /** Set the length SE of the branch attaching this node to its parent. */ void setBranchLengthSE(double value); /** Get the height of this node relative to the most recent node. */ double getNodeHeight(); /** * Set the height of this node relative to the most recent node. */ void setNodeHeight(double value); /** * Set the height of this node relative to the most recent node. * @param adjustChildBranchLengths if true */ void setNodeHeight(double value,boolean adjustChildBranchLengths); ///** Set the height SE of this node relative to the most recent node. */ //void setNodeHeightSE(double value); ///** Get the height SE of this node relative to the most recent node. */ //double getNodeHeightSE(); /** Returns the identifier for this node. */ Identifier getIdentifier(); /** Set identifier for this node. */ void setIdentifier(Identifier id); /** * Returns the number of children this node has. */ int getChildCount(); /** * check whether this node is an external node * * @return result (true or false) */ boolean isLeaf(); /** * check whether this node is a root node * * @return result (true or false) */ boolean isRoot(); /** * get child node * * @param n number of child * * @return child node */ Node getChild(int n); /** * set child node * * @param n number * @node node new child node */ void setChild(int n, Node node); /** * add new child node * * @param c new child node */ void addChild(Node c); /** * add new child node (insertion at a specific position) * * @param c new child node + @param pos position */ void insertChild(Node c, int pos); /** * remove child * * @param n number of child to be removed */ Node removeChild(int n); } pal-1.5.1/src/pal/tree/TreeGenerator.java0000644000000000000000000000411010045717000016671 0ustar rootroot// TreeGenerator.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; /** * An interface for classes that produce trees. Useful for cases where large numbers of * trees are generator (like bootstrapping) but, to save memory, it is better to * generate the trees on the fly, instead of pregenerating them and storing in an * array. * * @version $Id: TreeGenerator.java,v 1.4 2004/05/04 02:43:28 matt Exp $ * * @author Matthew Goode */ import pal.util.AlgorithmCallback; import pal.distance.*; public interface TreeGenerator { public Tree getNextTree( AlgorithmCallback callback); // ============================================================================== // ==================== Utilities =============================================== // ============================================================================== public static final class Utils { public static final TreeGenerator createNeighbourJoiningGenerator(DistanceMatrixGenerator dataGenerator, String[] outgroupNames) { return new NJGenerator(dataGenerator,outgroupNames); } // ================================================== // === NJ Generator // ================================================== private static final class NJGenerator implements TreeGenerator { private final DistanceMatrixGenerator dataGenerator_; private final String[] outgroupNames_; public NJGenerator( DistanceMatrixGenerator dataGenerator, String[] outgroupNames ) { this.dataGenerator_ = dataGenerator; this.outgroupNames_ = outgroupNames; } public Tree getNextTree( AlgorithmCallback callback) { DistanceMatrix dm = dataGenerator_.generateNextMatrix(callback); Tree t = new NeighborJoiningTree(dm); TreeManipulator tm = new TreeManipulator(t,TreeManipulator.REDUCE_CONSTRUCTION); return tm.getTreeRootedBy(outgroupNames_); } } //End of class NJGenerator } //End of class Utils } //End of interface TreeGenerator pal-1.5.1/src/pal/tree/Local.java0000644000000000000000000002272507416605702015206 0ustar rootrootpackage pal.tree; import pal.math.*; import pal.misc.*; import pal.io.*; /** * Implements LOCAL (Larget and Simon, 1999) and stochastic NNI moves for unrooted trees. * @author Alexei Drummond * @version $Id: Local.java,v 1.1 2002/01/08 02:09:53 alexi Exp $ */ public class Local { private static MersenneTwisterFast random = new MersenneTwisterFast(); private static double lambda = 1.0; public static Tree local(Tree tree) { return local(tree, 1.0); } /** * This method does a local interchange on * the given tree: Note the original tree is modified. * If this behaviour is inappropriate then pass new SimpleTree(tree) * to this method. *

	 * Actual		 Logical
	 *     2                 1            4
	 *    /|\                 \          /
	 *   / | \                 \        /
	 *  /  |  \                 \      /       
	 * 1   A   3                 2----3
	 *        / \               /      \
	 *       /   \             /        \
	 *      /     \           /          \
	 *     B       4         A            B
	 *
	 * 
* A random internal edge (2,3) is selected and extended in both directions * to create a back bone (1,2,3,4). One of the two internal nodes (2,3) is moved * to a new random position on backbone and the backbone is scale in size. * @param scale determines whether or not the backbone is scaled * @return a perturbation of given tree. */ public static Tree local(Tree tree, double scaleFactor) { if (tree.getRoot().getChildCount() != 3) { throw new RuntimeException("Root must have trifurcation!"); } // (node1, node2, node3, node4) is the backbone //------------------------------------------------------------- // select an internal edge (i.e. one not connected to a tip) // uniformly and randomly. //------------------------------------------------------------- // assumes root is last internal node and avoids it int pos = random.nextInt(tree.getInternalNodeCount()-1); Node node3 = tree.getInternalNode(pos); Node node2 = node3.getParent(); //------------------------------------------------------------- // reroot so that top of edge is root TreeUtils.reroot(tree, node2); int k = random.nextInt(node2.getChildCount()); //System.out.println("getting node1..."); while (node2.getChild(k) == node3) { k = random.nextInt(node2.getChildCount()); } Node node1 = node2.getChild(k); Node nodeA = null; for (int i =0; i < node2.getChildCount(); i++) { if ((node2.getChild(i) != node1) && (node2.getChild(i) != node3)) { nodeA = node2.getChild(i); } } //System.out.println("getting node4..."); Node node4, nodeB; node4 = node3.getChild(0); nodeB = node3.getChild(1); if (random.nextBoolean()) { nodeB = node3.getChild(0); node4 = node3.getChild(1); } double backBoneLength = node1.getBranchLength() + node3.getBranchLength() + node4.getBranchLength(); // modify backbone length double newLength = backBoneLength * scaleFactor; node1.setBranchLength(node1.getBranchLength() * scaleFactor); node3.setBranchLength(node3.getBranchLength() * scaleFactor); node4.setBranchLength(node4.getBranchLength() * scaleFactor); double newpos = random.nextDouble() * newLength; if (random.nextBoolean()) { // detach and reattach A double easyLength = node1.getBranchLength() + node3.getBranchLength(); if (newpos < easyLength) { //no topology change node1.setBranchLength(newpos); node3.setBranchLength(easyLength-newpos); } else { swapNodes(nodeA, nodeB); node1.setBranchLength(easyLength); node3.setBranchLength(newpos - easyLength); node4.setBranchLength(newLength - newpos); } } else { // detach and reattach B double easyLength = node3.getBranchLength() + node4.getBranchLength(); double hardLength = node1.getBranchLength(); if (newpos > hardLength) { // no topology change node3.setBranchLength(newpos - hardLength); node4.setBranchLength(newLength - newpos); } else { swapNodes(node1, node4); node1.setBranchLength(newpos); node3.setBranchLength(hardLength - newpos); node4.setBranchLength(easyLength); } } tree.createNodeList(); NodeUtils.lengths2Heights(tree.getRoot()); return tree; } public static Tree stochasticNNI(Tree tree) { if (tree.getRoot().getChildCount() != 3) { throw new RuntimeException("Root must have trifurcation!"); } // (node2, node3) is the backbone //------------------------------------------------------------- // select an internal edge (i.e. one not connected to a tip) // uniformly and randomly. //------------------------------------------------------------- // assumes root is last internal node and avoids it int pos = random.nextInt(tree.getInternalNodeCount()-1); Node node3 = tree.getInternalNode(pos); Node node2 = node3.getParent(); //------------------------------------------------------------- // reroot so that top of edge is root TreeUtils.reroot(tree, node2); int k = random.nextInt(node2.getChildCount()); while (node2.getChild(k) == node3) { k = random.nextInt(node2.getChildCount()); } Node node1 = node2.getChild(k); Node node4 = node3.getChild(0); if (random.nextBoolean()) { node4 = node3.getChild(1); } swapNodes(node1, node4); return tree; } private static void swapNodes(Node n1, Node n2) { Node parent1 = n1.getParent(); Node parent2 = n2.getParent(); for (int i = 0; i < parent1.getChildCount(); i++) { if (parent1.getChild(i) == n1) parent1.removeChild(i); } for (int i = 0; i < parent2.getChildCount(); i++) { if (parent2.getChild(i) == n2) parent2.removeChild(i); } parent1.addChild(n2); parent2.addChild(n1); } public static void print4TaxonTree(Tree tree, java.io.PrintWriter out) { FormattedOutput fo = FormattedOutput.getInstance(); Node root = tree.getRoot(); Node taxa1 = null, taxa2 = null, internal1 = null, taxa3 = null, taxa4 = null; for (int i =0; i < root.getChildCount(); i++) { if (root.getChild(i).isLeaf()) { if (taxa1 == null) taxa1 = root.getChild(i); else taxa2 = root.getChild(i); } else internal1 = root.getChild(i); } taxa3 = internal1.getChild(0); taxa4 = internal1.getChild(1); displayLabel(out, taxa1.getIdentifier().getName(), 8, true); out.print(" "); displayLabel(out, taxa3.getIdentifier().getName(), 8, true); out.println(); out.println(" \\ /"); out.print(" "); fo.displayDecimal(out, taxa1.getBranchLength(), 4); out.print(" "); fo.displayDecimal(out, taxa3.getBranchLength(), 4); out.println(); out.println(" \\ /"); displayLabel(out, root.getIdentifier().getName(), 8, false); out.print("--"); fo.displayDecimal(out, internal1.getBranchLength(), 4); out.println("--" + internal1.getIdentifier().getName()); out.println(" / \\"); out.print(" "); fo.displayDecimal(out, taxa2.getBranchLength(), 4); out.print(" "); fo.displayDecimal(out, taxa4.getBranchLength(), 4); out.println(); out.println(" / \\"); displayLabel(out, taxa2.getIdentifier().getName(), 8, true); out.print(" "); displayLabel(out, taxa4.getIdentifier().getName(), 8, true); out.println(); } /** * print label with a prespecified length * (label will be shortened or spaces will introduced, if necessary) * * @param out output stream * @param label label to be printed * @param width desired length */ public static void displayLabel(java.io.PrintWriter out, String label, int width, boolean center) { int len = label.length(); if (len == width) { // Print as is out.print(label); } else if (len < width) { int first = width-len; int second = 0; if (center) { first = first / 2; second = first - (width-len); } // fill rest with spaces for (int i = 0; i < first; i++) { out.print(' '); } out.print(label); for (int i = 0; i < second; i++) { out.print(' '); } } else { // Print first width characters for (int i = 0; i < width; i++) { out.print(label.charAt(i)); } } } public static final void main(String[] args) { // create test tree Node root = new SimpleNode("I", 0.0); Node node3 = new SimpleNode("I", 0.01); Node node1 = new SimpleNode("1", 0.01); Node nodeA = new SimpleNode("2", 0.01); Node node4 = new SimpleNode("3", 0.01); Node nodeB = new SimpleNode("4", 0.01); root.addChild(node1); root.addChild(nodeA); root.addChild(node3); node3.addChild(nodeB); node3.addChild(node4); Tree tree = new SimpleTree(root); Tree tree2 = new SimpleTree(tree); Tree tree3 = new SimpleTree(tree); java.io.PrintWriter pw = new java.io.PrintWriter(System.out); print4TaxonTree(tree, pw); pw.flush(); System.out.println(); System.out.println("scaled 0.5"); print4TaxonTree(local(tree, 0.5), pw); pw.flush(); System.out.println(); System.out.println("scaled 2.0"); print4TaxonTree(local(tree2, 2.0), pw); pw.flush(); System.out.println(); System.out.println("NNI"); print4TaxonTree(stochasticNNI(tree3), pw); pw.flush(); System.out.println(); } } pal-1.5.1/src/pal/tree/NodeFactory.java0000644000000000000000000000431707665733060016373 0ustar rootroot// NodeFactory.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; /** * Creates nodes * * The purpose of this class is to decouple the creation of * a class of type "Node" from its actual implementation. This * class should be used instead of calling the constructor * of an implementation of "Node" * (at the moment "SimpleNode") as it may change in the future.

* * Other plans: add features here to recyle old nodes rather than * leaving them to the Java garbage collector * * @author Korbinian Strimmer */ import pal.misc.Identifier; public class NodeFactory { /** create a node */ public static final Node createNode() { return new SimpleNode(); } /** create a node, with a specified identifier */ public static final Node createNode(Identifier id) { return new SimpleNode(id.getName(),0); } /** create a node, with a specified identifier */ public static final Node createNode(Identifier id, double height) { SimpleNode sn = new SimpleNode(id.getName(),0); sn.setNodeHeight(height); return sn; } /** create a node, with a specified identifier */ public static final Node createNodeBranchLength(double branchLength, Identifier id) { SimpleNode sn = new SimpleNode(id.getName(),0); sn.setBranchLength(branchLength); return sn; } /** constructor used to clone a node and all children */ public static final Node createNode(Node node) { return new SimpleNode(node); } public static final Node createNode(Node[] children) { return new SimpleNode(children); } /** * Create a node with the specified children, and the specified branch height */ public static final Node createNode(Node[] children, double height) { SimpleNode sn = new SimpleNode(children); sn.setNodeHeight(height); return sn; } /** * Create a node with the specified children, and the specified branch length */ public static final Node createNodeBranchLength(double branchLength, Node[] children) { SimpleNode sn = new SimpleNode(children); sn.setBranchLength(branchLength); return sn; } } pal-1.5.1/src/pal/tree/SimulatedAlignment.java0000644000000000000000000002073307637323316017742 0ustar rootroot// SimulatedAlignment.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; import pal.datatype.*; import pal.substmodel.*; import pal.alignment.*; import pal.math.*; import pal.misc.*; import pal.util.AlgorithmCallback; /** * generates an artificial data set * * @version $Id: SimulatedAlignment.java,v 1.19 2003/03/23 00:21:33 matt Exp $ * * @author Korbinian Strimmer * @author Alexei Drummond */ public class SimulatedAlignment extends AbstractAlignment { // // Public stuff // // // Private stuff // private Tree tree; private SubstitutionModel model; private double[] cumFreqs; private int[] rateAtSite; private double[] cumRateProbs; private int numStates; private byte[][] stateData; private MersenneTwisterFast rng; // // Serialization // //private static final long serialVersionUID = -5197800047652332969L; //serialver -classpath ./classes pal.tree.SimulatedAlignment private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { out.writeByte(1); //Version number out.writeObject(tree); out.writeObject(model); out.writeObject(cumFreqs); out.writeObject(rateAtSite); out.writeObject(cumRateProbs); out.writeObject(stateData); } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException{ byte version = in.readByte(); switch(version) { default : { tree = (Tree)in.readObject(); model = (SubstitutionModel)in.readObject(); cumFreqs = (double[])in.readObject(); rateAtSite = (int[])in.readObject(); cumRateProbs = (double[])in.readObject(); stateData = (byte[][])in.readObject(); numStates = getDataType().getNumStates(); rng = new MersenneTwisterFast(); break; } } } /** * Inititalisation * * @param sites number of sites * @param t tree relating the sequences * @param m model of evolution */ public SimulatedAlignment(int sites, Tree t, SubstitutionModel m) { rng = new MersenneTwisterFast(); setDataType(m.getDataType()); numStates = getDataType().getNumStates(); model = m; tree = t; tree.createNodeList(); numSeqs = tree.getExternalNodeCount(); numSites = sites; idGroup = new SimpleIdGroup(numSeqs); for (int i = 0; i < numSeqs; i++) { idGroup.setIdentifier(i, tree.getExternalNode(i).getIdentifier()); } stateData = new byte[numSeqs][numSites]; for (int i = 0; i < tree.getExternalNodeCount(); i++) { tree.getExternalNode(i).setSequence(stateData[i]); } for (int i = 0; i < tree.getInternalNodeCount()-1; i++) { tree.getInternalNode(i).setSequence(new byte[numSites]); } rateAtSite = new int[numSites]; cumFreqs = new double[numStates]; cumRateProbs = new double[m.getNumberOfTransitionCategories()]; } // Implementation of abstract Alignment method /** sequence alignment at (sequence, site) */ public char getData(int seq, int site) { return getChar(stateData[seq][site]); } /** generate new artificial data set (random root sequence) */ public void simulate() { simulate(makeRandomRootSequence()); } /** generate new artificial data set (random root sequence) */ public void simulate(String givenRootSequence) { simulate(DataType.Utils.getByteStates(givenRootSequence, model.getDataType())); } /** generate new artificial data set (specified root sequence) */ public void simulate(byte[] rootSeq) { double[][][] transitionStore = SubstitutionModel.Utils.generateTransitionProbabilityTables(model); // Check root sequence for (int i = 0; i < numSites; i++) { if (rootSeq[i] >= numStates || rootSeq[i] < 0) { throw new IllegalArgumentException("Root sequence contains illegal state (?,-, etc.)"); } } tree.getInternalNode(tree.getInternalNodeCount()-1).setSequence(rootSeq); // Assign new rate categories assignRates(); // Visit all nodes except root Node node = NodeUtils.preorderSuccessor(tree.getRoot()); do { determineMutatedSequence(node,transitionStore); node = NodeUtils.preorderSuccessor(node); } while (node != tree.getRoot()); } private void determineMutatedSequence(Node node, double[][][] transitionStore) { if (node.isRoot()) throw new IllegalArgumentException("Root node not allowed"); model.getTransitionProbabilities(node.getBranchLength(),transitionStore); byte[] oldS = node.getParent().getSequence(); byte[] newS = node.getSequence(); for (int i = 0; i < numSites; i++) { double[] freqs = transitionStore[rateAtSite[i]][oldS[i]]; cumFreqs[0] = freqs[0]; for (int j = 1; j < numStates; j++) { cumFreqs[j] = cumFreqs[j-1] + freqs[j]; } newS[i] = (byte) randomChoice(cumFreqs); } } private byte[] makeRandomRootSequence() { double[] frequencies = model.getEquilibriumFrequencies(); cumFreqs[0] = frequencies[0]; for (int i = 1; i < numStates; i++) { cumFreqs[i] = cumFreqs[i-1] + frequencies[i]; } byte[] rootSequence = new byte[numSites]; for (int i = 0; i < numSites; i++) { rootSequence[i] = (byte) randomChoice(cumFreqs); } return rootSequence; } private void assignRates() { double[] categoryProbabilities = model.getTransitionCategoryProbabilities(); cumRateProbs[0] = categoryProbabilities[0]; for (int i = 1; i < categoryProbabilities.length ; i++) { cumRateProbs[i] = cumRateProbs[i-1] + categoryProbabilities[i]; } for (int i = 0; i < numSites; i++) { rateAtSite[i] = randomChoice(cumRateProbs); } } // Chooses one category if a cumulative probability distribution is given private int randomChoice(double[] cf) { double rnd = rng.nextDouble(); int s; if (rnd <= cf[0]) { s = 0; } else { for (s = 1; s < cf.length; s++) { if (rnd <= cf[s] && rnd > cf[s-1]) { break; } } } return s; } // ============================================================================ // SimulatedAlignment.Factory /** * A utility class that can be used to generate Simulated alignments based on * a tree with predefined sequence length and substitution model */ public static final class Factory { private int sequenceLength_; private SubstitutionModel model_; public Factory(int sequenceLength, SubstitutionModel model) { if(sequenceLength<1) { throw new IllegalArgumentException("Invalid sequence length:"+sequenceLength); } this.sequenceLength_ = sequenceLength; this.model_ = model; } /** * Generate a simulated alignment based on input tree * @param tree The tree, with branchlengths set appropriately. * @note Units should be expected substitutions * @throws IllegalArgumentException if trees units are not EXPECTED SUBSTITUTIONS, or UNKNOWN */ public final SimulatedAlignment generateAlignment(final Tree tree) { if( (tree.getUnits()!=Units.EXPECTED_SUBSTITUTIONS)&& (tree.getUnits()!=Units.UNKNOWN) ) { throw new IllegalArgumentException("Tree units must be Expected Substitutions (or reluctantly Unknown)"); } //System.out.println("Simulating:"+model_); SimulatedAlignment sa = new SimulatedAlignment(sequenceLength_,tree,model_); sa.simulate(); return sa; } /** * Generate an array of simulated alignments based on an array of input trees * @param trees The tree, with branchlengths set appropriately. * @param callback An AlgorithmCallback for monitoring progress and premature stopping * @note Units should be expected substitutions * @note if AlgorithmCallback indicates premature stopping will return an array of * alignments created so far. * @throws IllegalArgumentException if trees units are not EXPECTED SUBSTITUTIONS, or UNKNOWN */ public final SimulatedAlignment[] generateAlignments(final Tree[] trees, final AlgorithmCallback callback) { SimulatedAlignment[] as = new SimulatedAlignment[trees.length]; for(int i = 0 ; i < trees.length ; i++) { if(callback.isPleaseStop()) { SimulatedAlignment[] partial = new SimulatedAlignment[i]; System.arraycopy(as,0,partial,0,i); return partial; } as[i] = generateAlignment(trees[i]); as[i].simulate(); callback.updateProgress(i/(double)trees.length); } callback.clearProgress(); return as; } } } pal-1.5.1/src/pal/tree/SimpleTree.java0000644000000000000000000001772107573706200016224 0ustar rootroot// SimpleTree.java // // (c) 1999-2001 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; import pal.misc.*; import pal.io.*; import java.io.*; import java.util.*; /** * data structure for a binary/non-binary rooted/unrooted trees * * @version $Id: SimpleTree.java,v 1.23 2002/12/05 04:27:28 matt Exp $ * * @author Alexei Drummond * @author Korbinian Strimmer * */ public class SimpleTree implements Tree, Report, Units, Serializable { // // This class has explicit serialization code so if you alter any fields please alter // the serialization code too (make sure you use a new version number - see readObject/writeObject // Thanks, Matthew // // Public stuff // // // Private stuff /** root node */ private Node root; /** list of internal nodes (including root) */ private Node[] internalNode = null; /** number of internal nodes (including root) */ private int numInternalNodes; /** list of external nodes */ private Node[] externalNode = null; /** number of external nodes */ private int numExternalNodes; /** attributes attached to this tree. */ private Hashtable[] attributes = null; /** holds the units of the trees branches. */ private int units = EXPECTED_SUBSTITUTIONS; // // Serialization Stuff // static final long serialVersionUID=-7330318631600898531L; //serialver -classpath ./classes pal.tree.SimpleTree /** I like doing things my self! */ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { out.writeByte(1); //Version number out.writeObject(root); out.writeObject(attributes); out.writeInt(units); } private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException{ byte version = in.readByte(); switch(version) { default : { root = (Node)in.readObject(); createNodeList(); attributes = (Hashtable[])in.readObject(); units = in.readInt(); } } } /** constructor tree consisting solely of root node */ public SimpleTree() { // Default configuration root = new SimpleNode(); //root.setIdentifier(new Identifier("ROOT")); root.setBranchLength(0.0); root.setBranchLengthSE(0.0); } /** constructor taking a root node */ public SimpleTree(Node r) { root = r; createNodeList(); } /** clone constructor */ public SimpleTree(Tree tree) { root = new SimpleNode(tree.getRoot()); setUnits(tree.getUnits()); createNodeList(); } /** clone constructor */ public SimpleTree(Tree tree, boolean keepIdentifiers) { root = new SimpleNode(tree.getRoot(), keepIdentifiers); setUnits(tree.getUnits()); createNodeList(); } /** * clone constructor * @param lm - a label mapping use for translating the original label names into something else */ public SimpleTree(Tree tree, LabelMapping lm) { root = new SimpleNode(tree.getRoot(), lm); setUnits(tree.getUnits()); createNodeList(); } /** * Return the units that this tree is expressed in. */ public final int getUnits() { return units; } /** * Sets the units that this tree is expressed in. */ public final void setUnits(int units) { this.units = units; } /** * Returns the number of external nodes. */ public final int getExternalNodeCount() { if(externalNode==null) { createNodeList(); } return numExternalNodes; } /** * Returns the ith external node. */ public final Node getExternalNode(int i) { if(externalNode==null) { createNodeList(); } return externalNode[i]; } /** * Returns the number of internal nodes. */ public final int getInternalNodeCount() { if(internalNode==null) { createNodeList(); } return numInternalNodes; } /** * Returns the ith internal node. */ public final Node getInternalNode(int i) { if(internalNode==null) { createNodeList(); } return internalNode[i]; } /** * Returns the root node of this tree. */ public final Node getRoot() { return root; } /** * Set a new node as root node. */ public final void setRoot(Node r) { root = r; createNodeList(); } /** count and list external and internal nodes and compute heights of each node */ public void createNodeList() { numInternalNodes = 0; numExternalNodes = 0; Node node = root; do { node = NodeUtils.postorderSuccessor(node); if (node.isLeaf()) { node.setNumber(numExternalNodes); numExternalNodes++; } else { node.setNumber(numInternalNodes); numInternalNodes++; } } while(node != root); internalNode = new Node[numInternalNodes]; externalNode = new Node[numExternalNodes]; node = root; do { node = NodeUtils.postorderSuccessor(node); if (node.isLeaf()) { externalNode[node.getNumber()] = node; } else { internalNode[node.getNumber()] = node; } } while(node != root); // compute heights if it seems necessary if (root.getNodeHeight() == 0.0) { NodeUtils.lengths2Heights(root); } } public String toString() { StringWriter sw = new StringWriter(); NodeUtils.printNH(new PrintWriter(sw), getRoot(), true, false, 0, false); sw.write(";"); return sw.toString(); } /** * return node with number num (as displayed in ASCII tree) * * @param num number of node * * @return node */ public Node findNode(int num) { createNodeList(); if (num <= numExternalNodes) { return externalNode[num-1]; } else { return internalNode[num-1-numExternalNodes]; } } private int getIndex(Node node) { if (node.isLeaf()) return node.getNumber(); return getExternalNodeCount() + node.getNumber(); } /** * Sets an named attribute for a given node. * @param node the node whose attribute is being set. * @param name the name of the attribute. * @param value the new value of the attribute. */ public void setAttribute(Node node, String name, Object value) { if (node instanceof AttributeNode) { ((AttributeNode)node).setAttribute(name, value); } else { int index = getIndex(node); if (attributes == null) { attributes = new Hashtable[getExternalNodeCount() + getInternalNodeCount()]; } if (attributes[index] == null) { attributes[index] = new Hashtable(); } attributes[index].put(name, value); } } // ========= IdGroup stuff =============================== public int getIdCount() { return getExternalNodeCount(); } public Identifier getIdentifier(int i) { return getExternalNode(i).getIdentifier(); } public void setIdentifier(int i, Identifier id) { getExternalNode(i).setIdentifier(id); } public int whichIdNumber(String s) { return IdGroup.Utils.whichIdNumber(this,s); } //======================================================== /** * @return an object representing the named attributed for the numbered node. * @param node the node being interrogated. * @param name the name of the attribute of interest. */ public Object getAttribute(Node node, String name) { if (node instanceof AttributeNode) { return ((AttributeNode)node).getAttribute(name); } else { int index = getIndex(node); if (attributes == null || attributes[index] == null) { return null; } return attributes[index].get(name); } } /** * make node with number num to root node * * @param num number of node */ public void reroot(int num) { TreeUtils.reroot(this, findNode(num)); } /** * make provided node the root node * * @param node the node to make the root. */ public void reroot(Node node) { TreeUtils.reroot(this, node); } // interface Report public void report(PrintWriter out) { TreeUtils.report(this, out); } public Tree getCopy() { return new SimpleTree(this); } } pal-1.5.1/src/pal/tree/TreeOperation.java0000644000000000000000000000535207744511436016735 0ustar rootroot// TreeOperation.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; /** *

Title: TreeOperation

*

Description: A class that creates an altered tree base on input tree. This isn't used by much, SerialCoalescentGenerator.

* @version $Id: TreeOperation.java,v 1.1 2003/10/19 02:35:26 matt Exp $ * @author Matthew Goode * @version 1.0 */ public interface TreeOperation { /** * Operates on input tree. Users can assume that the input tree is not altered, * but should allow that the result tree may just be the input tree! * @param tree The input tree. * @return the A new tree, or the input tree */ public Tree operateOn(Tree tree); // -==--=-=-=======-==--=-=-=-=-=-=-==-=--==--=-=-==-=--==-=-=--=-==--=-= /** * Utility class */ public static final class Utils { /** * Create a tree operation that scales the input tree and changes the units * @param scaleFactor The scaling to be done * @param resultingUnits The new units * @return A Tree Operation */ public static final TreeOperation createScale(double scaleFactor, int resultingUnits) { return new Scale(scaleFactor, resultingUnits); } /** * Creates a tree operation that first applies one tree operation and then applies a second operation to get it's result * @param first the first operation to apply * @param second the operation to apply on the result of the first * @return A tree operation */ public static final TreeOperation createPipeline(TreeOperation first, TreeOperation second) { return new Pipeline(first, second); } public static final TreeOperation getNoOperation() { return NOP.INSTANCE; } // ======== private static final class NOP implements TreeOperation { public static final TreeOperation INSTANCE = new NOP(); public Tree operateOn(Tree tree) { return tree; } } private static final class Scale implements TreeOperation { private final double scaleFactor_; private final int resultingUnits_; public Scale(double scaleFactor, int resultingUnits) { this.scaleFactor_ = scaleFactor; this.resultingUnits_ = resultingUnits; } public Tree operateOn(Tree tree) { return TreeUtils.getScaled(tree,scaleFactor_,resultingUnits_); } } // ======== private static final class Pipeline implements TreeOperation { private final TreeOperation first_, second_; public Pipeline(TreeOperation first, TreeOperation second) { this.first_ = first; this.second_ = second; } public Tree operateOn(Tree tree) { return second_.operateOn(first_.operateOn(tree)); } } } }pal-1.5.1/src/pal/tree/TreeIterator.java0000644000000000000000000000140510043156012016536 0ustar rootroot// TreeIterator.java // // (c) 1999-2003 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; /** * An interface for classes that iterate through trees. Useful for cases where large numbers of * trees are generator (like bootstrapping) but, to save memory, it is better to * generate the trees on the fly, instead of pregenerating them and storing in an * array. * * @version $Id: TreeIterator.java,v 1.2 2004/04/25 22:53:14 matt Exp $ * * @author Matthew Goode */ import pal.util.AlgorithmCallback; public interface TreeIterator extends TreeGenerator { /** * @return true if more trees to come */ public boolean isMoreTrees(); } pal-1.5.1/src/pal/tree/TreeManipulator.java0000644000000000000000000020342710103465054017256 0ustar rootroot// TreeManimulator.java // // (c) 1999-2004 PAL Development Core Team // // This package may be distributed under the // terms of the Lesser GNU General Public License (LGPL) package pal.tree; /** * Was TreeRooter. * A class to provide all your tree rooting and unrooting needs plus more. Allows * Unrooting, Midpoint Rooting (reasonably efficiently), General Rooting, and * obtaining every root. Also allows for the collapsing and uncollapsing of short branches, and the attachment of sub trees. * * This class replaces methods in TreeUtil (and is more swanky) * * In general just use the static access methods. (eg TreeManipulator.getUnrooted(myTree); ) * * @version $Id: TreeManipulator.java,v 1.3 2004/08/02 05:22:04 matt Exp $ * * @author Matthew Goode * @note REDUCE_CONSTRUCTION functioning (relatively untested) as of 18 September 2003 * *

History *
    *
  • 18/9/2003 MG:Corrected rooting for complex case, added in getAllRoot methods, REDUCED_CONSTRUCTION stuff working, added in ingroup branch length stuff to rooting (to help make pretty pictures), added getAsInput() methods
  • *
  • 25/10/2003 MG:Fixed bug with EXPAND_CONSTRUCTION on a unrooted tree
  • *
  • 16/4/2003 MG:Changed name (TreeRooter -> TreeManipulator), added branch access stuff *
*/ import pal.util.AlgorithmCallback; import pal.misc.Units; import pal.misc.BranchLimits; import pal.misc.Identifier; import java.util.ArrayList; public class TreeManipulator implements UnrootedTreeInterface.Instructee, RootedTreeInterface.Instructee { /** * Construct tree with same multification as original */ public static final int MIMIC_CONSTRUCTION = 100; /** * Construct tree, but convert general multifications to a series of bifications */ public static final int EXPAND_CONSTRUCTION = 200 ; /** * Construct tree, but convert bificating nodes that appear as multifications (due to very short branchlengths) to multifications */ public static final int REDUCE_CONSTRUCTION = 300; private Connection unrootedTree_; private final int units_; /** * Only used by getBinaryTree() */ private final double firstChildNodeLength_; private final boolean inputTreeUnrooted_; /** * Construct a TreeManipulator based around a normal tree * @param base The base tree, which can be rooted or unrooted (will be treated as unrooted either way) * @param constructionMode the way in which the internal tree representation is constructed * @note The base tree is never altered */ public TreeManipulator(Tree base, int constructionMode) { this(base.getRoot(), base.getUnits(),constructionMode); } /** * Construct a TreeManipulator based around a normal tree * @param base The base tree, which can be rooted or unrooted (will be treated as unrooted either way) * @note The base tree is never altered */ public TreeManipulator(Tree base) { this(base.getRoot(), base.getUnits()); } /** * Units will be Units.UNKNOWN */ public TreeManipulator(Node base) { this(base,Units.UNKNOWN); } /** * Construct a TreeManipulator based around a normal tree * @param base The base tree, which can be rooted or unrooted (will be treated as unrooted either way) * @param units, the units of generated trees. Not really of concern if only Node trees built * @note The base tree is never altered */ public TreeManipulator(Node base, int units) { this(base,units,MIMIC_CONSTRUCTION); } /** * Construct a TreeManipulator based around a normal tree * @param base The base tree, which can be rooted or unrooted (will be treated as unrooted either way) * @param units, the units of generated trees. Not really of concern if only Node trees built * @note The base tree is never altered */ public TreeManipulator(Node base, int units, int constructionMode) { SimpleNode simpleBase = new PALNodeWrapper(base); this.unrootedTree_ = construct(simpleBase, constructionMode); this.inputTreeUnrooted_ = base.getChildCount()>2; this.firstChildNodeLength_ = base.getChild(0).getBranchLength(); this.units_ = units; this.unrootedTree_.clearPathInfo(); } public TreeManipulator(UnrootedTreeInterface.Instructee base, int units, int constructionMode) { UnrootedInterfaceImpl ui = new UnrootedInterfaceImpl(); base.instruct(ui); SimpleBranch root = ui.getSimpleRootBranch(); this.unrootedTree_ = new Connection(root,constructionMode); this.firstChildNodeLength_ = root.getBranchLength()/2; this.units_ = units; this.unrootedTree_.clearPathInfo(); this.inputTreeUnrooted_ = true; } public TreeManipulator(RootedTreeInterface.Instructee base, int units, int constructionMode) { RootedInterfaceImpl ri = new RootedInterfaceImpl(); base.instruct(ri); SimpleNode root = ri.getSimpleRoot(); this.unrootedTree_ = construct(root, constructionMode); this.inputTreeUnrooted_ = false; this.firstChildNodeLength_ = root.getChild(0).getParentBranchLength(); this.units_ = units; this.unrootedTree_.clearPathInfo(); } /** * Attachment constructor * @param base The basis TreeManipulator * @param baseSubTreeConnector The connection in the base that the sub tree will be attached * @param subTree the sub tree to attach * @param constructionMode the construction mode for the new sub tree (construction will match current for other parts of the tree) */ private TreeManipulator(TreeManipulator base, Connection baseSubTreeConnector, Node subTree, int constructionMode) { SimpleNode simpleSubTree = new PALNodeWrapper(subTree); this.unrootedTree_ = base.unrootedTree_.getAttached(baseSubTreeConnector,simpleSubTree, constructionMode); this.inputTreeUnrooted_ = (base.unrootedTree_==baseSubTreeConnector ? true : base.inputTreeUnrooted_); this.firstChildNodeLength_ = base.firstChildNodeLength_; this.units_ = base.units_; this.unrootedTree_.clearPathInfo(); } private static final Connection construct(SimpleNode n, int constructionMode) { if(n.isLeaf()) { throw new IllegalArgumentException("Tree must contain more than a single OTU!"); } if(n.getNumberOfChildren()==2) { return new Connection(n.getChild(0), n.getChild(1),constructionMode); } UndirectedNode un = new UndirectedNode(n,constructionMode); return un.getPeerParentConnection(); } /** * @return the MidPoint rooted tree (as root node); */ public Node getMidPointRooted() { Node n = unrootedTree_.getMidPointRooted(); NodeUtils.lengths2Heights(n); return n; } /** * @return a tree rooted around the node it was originally rooted around (if originally rooted), * @note With */ public Node getDefaultRoot() { Node n = unrootedTree_.getRootedAround(firstChildNodeLength_); NodeUtils.lengths2Heights(n); return n; } /** * Tests if the given clade memebers form an exact clade that does not include any other members other * than the ones listed. If there are members that are not actually in the tree, they will be ignored. * @param possibleCladeMembers the names of the members in the clade of interest * @return true if the conditions are met * @note not currently correctly implemented */ private boolean isFormsFormsExactClade(String[] possibleCladeMembers) { return unrootedTree_.isFormsExactClade(possibleCladeMembers); } /** * A method for recovering the input (construction) tree (with the EXPANSION/MIMIC/REDUCED differences) * @return An unrooted tree if the input tree was unrooted, otherwise the default rooting */ public Node getAsInputRooting() { if(inputTreeUnrooted_) { return getUnrooted(); } return getDefaultRoot(); } /** * A method for recovering the input (construction) tree (with the EXPANSION/MIMIC/REDUCED differences) * @return An unrooted tree if the input tree was unrooted, otherwise the default rooting */ public Tree getAsInputRootingTree() { return constructTree(getAsInputRooting(),units_); } /** * @return a tree rooted around the node it was originally rooted around (if originally rooted), * @note With */ public Tree getDefaultRootTree() { return constructTree(getDefaultRoot(),units_); } /** * @return the MidPoint rooted tree */ public Tree getMidPointRootedTree() { return constructTree(getMidPointRooted(),units_); } /** * return unrooted node */ public Node getUnrooted() { Node n = unrootedTree_.getUnrooted(); NodeUtils.lengths2Heights(n); return n; } /** * return unrooted node */ public Tree getUnrootedTree() { return constructTree(getUnrooted(),units_); } /** * @return all connections in tree */ private Connection[] getAllConnections() { return unrootedTree_.getAllConnections(); } /** * @param outgroupNames the names of the members of the outgroup * @return the tree rooted by an outgroup defined by the mrca of a set of nodes * @throws IllegalArgument exception if outgroup names does not contain any valid node names * @note If the outgroup is not well defined, this may not be the only rooting */ public Node getRootedBy(String[] outgroupNames) { Node n = unrootedTree_.getRootedAroundMRCA(outgroupNames); NodeUtils.lengths2Heights(n); return n; } /** * @param outgroupNames the names of the members of the outgroup * @return the tree rooted by an outgroup defined by the mrca of a set of nodes * @throws IllegalArgument exception if outgroup names does not contain any valid node names * @note If the outgroup is not well defined, this may not be the only rooting */ public void instructRootedBy(RootedTreeInterface rootedInterface, String[] outgroupNames) { unrootedTree_.instructRootedAroundMRCA(rootedInterface, outgroupNames); } /** * @param outgroupNames the names of the members of the outgroup * @param ingroupBranchLength the maximum length of the branch leading to the ingroup clade * @return the tree rooted by an outgroup defined by the mrca of a set of nodes * @throws IllegalArgument exception if outgroup names does not contain any valid node names * @note If the outgroup is not well defined, this may not be the only rooting */ public Node getRootedBy(String[] outgroupNames,double ingroupBranchLength) { return unrootedTree_.getRootedAroundMRCA(outgroupNames,ingroupBranchLength); } /** * @param outgroupNames the names of the members of the outgroup * @return all the trees rooted by an outgroup defined by the mrca of a set of nodes * @throws IllegalArgument exception if outgroup names does not contain any valid node names */ public Node[] getAllRootedBy(String[] outgroupNames) { return unrootedTree_.getAllRootedAroundMRCA(outgroupNames); } /** * @param outgroupNames the names of the members of the outgroup * @return the tree rooted by an outgroup defined by the mrca of a set of nodes * @note If the outgroup is not well defined, this may not be the only rooting */ public Tree getTreeRootedBy(String[] outgroupNames) { return constructTree(getRootedBy(outgroupNames),units_); } /** * @param outgroupNames the names of the members of the outgroup * @param ingroupBranchLength the maximum length of the branch leading to the ingroup clade * @return the tree rooted by an outgroup defined by the mrca of a set of nodes * @note If the outgroup is not well defined, this may not be the only rooting */ public Tree getTreeRootedBy(String[] outgroupNames, double ingroupBranchLength) { return constructTree(getRootedBy(outgroupNames,ingroupBranchLength),units_); } /** * @param outgroupNames the names of the members of the outgroup * @return all the possible rootings defined by the outgroup */ public Tree[] getAllTreesRootedBy(String[] outgroupNames) { Node[] nodes = getAllRootedBy(outgroupNames); Tree[] trees = new Tree[nodes.length]; for(int i = 0 ; i < nodes.length ;i++) { trees[i] = constructTree(nodes[i],units_); } return trees; } /** * @return a tree iterator that returns each and every possible root of the base tree (as a new tree object each time) * @note All Rooted trees are not constructed at once, but only on request. Use this method instead * of getEveryRoot() if memory is an issue */ public TreeIterator getEveryRootIterator() { return new RootIterator(getAllConnections(),units_); } public void instruct(UnrootedTreeInterface treeInterface) { UnrootedTreeInterface.BaseBranch base = treeInterface.createBase(); unrootedTree_.instruct(base); } public void instruct(RootedTreeInterface treeInterface) { RootedTreeInterface.RNode base = treeInterface.createRoot(); unrootedTree_.instruct(base,firstChildNodeLength_); } /** * Obtain access to individual branches * @return an array of branch access objects */ public BranchAccess[] getBranchAccess() { final Connection[] connections = getAllConnections(); final BranchAccess[] results = new BranchAccess[connections.length]; for(int i = 0 ; i < connections.length ; i++) { results[i] = new BranchAccessImpl(this,connections[i], units_); } return results; } /** * @return each and every possible root of the base tree */ public Tree[] getEveryRoot() { final Connection[] connections = getAllConnections(); final Tree[] results = new Tree[connections.length]; for(int i = 0 ; i < connections.length ; i++) { results[i] = constructTree(connections[i].getRootedAround(), units_); } return results; } /** * @param Node n, a node from the original base tree that this TreeManipulator was * constructed on * @throws Illegal argument exception if input node was not in original base tree */ public Node getRootedAbove(Node base) { UndirectedNode match = unrootedTree_.getRelatedNode(base); if(match==null) { throw new IllegalArgumentException("Parameter node not found in original tree"); } Node n = match.getPeerParentConnection().getRootedAround(); NodeUtils.lengths2Heights(n); return n; } /** * @param Node n, a node from the original base tree that this TreeManipulator was * constructed on * @throws Illegal argument exception if input node was not in original base tree */ public Tree getTreeRootedAbove(Node n) { return constructTree(getRootedAbove(n),units_); } // -=-==--=-==-=--=-=-=-=-=-=-=-=-=-=-====--=-=-=--=====-=-=-=-=-=---====-=-=-= // Static access methods /** * Unroots a tree * @param base The input tree that may or may not be unrooted * @return an unrooted tree (has a trification at base) */ public static final Tree getUnrooted(Tree base) { return new TreeManipulator(base).getUnrootedTree(); } /** * Returns the mid point rooting of a tree. This is the rooting that divides * the data between the two most distinct taxa * @see http://www.mun.ca/biology/scarr/Panda_midpoint_rooting.htm * @param base The input tree that may or may not be unrooted * @return an unrooted tree (has a trification at base) */ public static final Tree getMidpointRooted(Tree base) { return new TreeManipulator(base).getMidPointRootedTree(); } /** * Obtains every rooting of a base tree * @param base The input tree that may or may not be unrooted */ public static final Tree[] getEveryRoot(Tree base) { return new TreeManipulator(base).getEveryRoot(); } /** * Obtains every rooting of a base tree * @param base The input tree that may or may not be unrooted */ public static final TreeIterator getEveryRootIterator(Tree base) { return new TreeManipulator(base).getEveryRootIterator(); } /** * Roots a tree by an outgroup * @param base The input tree that may or may not be unrooted * @param outgroupNames The names of the members of the outgroup. Names not matching taxa in the tree are ignored. The node that is the MCRA of * members of the outgroup will influence the rooting. * @throws IllegalArgumentException if no members of the tree appear in the outgroup * @note if the outgroup is not well defined the returned tree may not be the only rooting */ public static final Tree getRootedBy(Tree base, String[] outgroupNames) { return new TreeManipulator(base).getTreeRootedBy(outgroupNames); } /** * Roots a tree by an outgroup * @param base The input tree that may or may not be unrooted * @param outgroupNames The names of the members of the outgroup. Names not matching taxa in the tree are ignored. The node that is the MCRA of * members of the outgroup will influence the rooting. * @throws IllegalArgumentException if no members of the tree appear in the outgroup * @note if the outgroup is not well defined the returned tree may not be the only rooting */ public static final Tree getRootedBy(Tree base, String[] outgroupNames, double ingroupBranchLength) { return new TreeManipulator(base).getTreeRootedBy(outgroupNames,ingroupBranchLength); } /** * Roots a tree by an outgroup * @param base The input tree that may or may not be unrooted * @param ingroupBranchLength the maximum length of the branch leading to the ingroup clade * @param outgroupNames The names of the members of the outgroup. Names not matching taxa in the tree are ignored. The node that is the MCRA of * members of the outgroup will influence the rooting. * @return every possible interpretation of rooting a tree by the given outgroup. If the outgroup is well defined there will be only one tree. * @throws IllegalArgumentException if no members of the tree appear in the outgroup */ public static final Tree[] getAllRootingsBy(Tree base, String[] outgroupNames) { return new TreeManipulator(base).getAllTreesRootedBy(outgroupNames); } // -=-==--=-==-=--=-=-=-=-=-=-=-=-=-=-====--=-=-=--=====-=-=-=-=-=---====-=-=-= /** * A connection between two nodes */ private static final class Connection { private UndirectedNode firstNode_; private double maximumPathLengthToLeafViaFirstNode_; private boolean isFirstPathInfoFound_ = false; private UndirectedNode secondNode_; private double maximumPathLengthToLeafViaSecondNode_; private boolean isSecondPathInfoFound_ = false; private double distance_; private Object annotation_; public Connection(UndirectedNode firstNode, UndirectedNode secondNode, SimpleBranch connectingBranch) { this.firstNode_ = firstNode; this.secondNode_ = secondNode; this.distance_ = connectingBranch.getBranchLength(); this.annotation_ = connectingBranch.getAnnotation(); } public Connection(UndirectedNode baseNode, SimpleNode parent, int startingIndex, double branchLength, Object annotation) { this.firstNode_ = baseNode; this.secondNode_ = new UndirectedNode(this,startingIndex, parent); this.distance_ = branchLength; this.annotation_ = annotation; } public Connection(UndirectedNode parentNode, SimpleNode child, int constructionMode) { this.firstNode_ = parentNode; SimpleBranch connectingBranch = child.getParentBranch(); this.distance_ = connectingBranch.getBranchLength(); this.annotation_ = connectingBranch.getAnnotation(); this.secondNode_ = new UndirectedNode(constructionMode, this,child); } public Connection(SimpleNode first, SimpleNode second,int constructionMode) { this.distance_ = first.getParentBranchLength()+second.getParentBranchLength(); this.firstNode_ = new UndirectedNode(constructionMode, this,first); this.secondNode_ = new UndirectedNode(constructionMode, this,second); } /** * The root branch constructor * @param branch The simple root branch * @param constructionMode the construction mode */ public Connection(SimpleBranch branch,int constructionMode) { SimpleNode first = branch.getParentNode(); SimpleNode second = branch.getChildNode(); this.distance_ = branch.getBranchLength(); this.annotation_ = branch.getAnnotation(); this.firstNode_ = new UndirectedNode(constructionMode, this,first); this.secondNode_ = new UndirectedNode(constructionMode, this,second); } private Connection(Connection original, Connection attachmentPoint, SimpleNode subTree, int constructionMode) { if(original==attachmentPoint) { throw new RuntimeException("Not implemented yet!"); } else { this.distance_ = original.distance_; this.annotation_ = original.annotation_; this.firstNode_ = original.firstNode_.getAttached( attachmentPoint, subTree, constructionMode, this ); this.secondNode_ = original.secondNode_.getAttached( attachmentPoint, subTree, constructionMode, this ); } } public final Connection getAttached(Connection attachmentPoint, SimpleNode subTree, int constructionMode) { return new Connection(this,attachmentPoint,subTree,constructionMode); } public final String[][] getLabelSplit() { throw new RuntimeException("Not implemented yet!"); } public final void setDistance(double distance) { this.distance_ = distance; } public final UndirectedNode getFirst() { return firstNode_; } public final UndirectedNode getSecond() { return secondNode_; } public final int getExactCladeCount(String[] possibleCladeMembers, UndirectedNode caller) { if(caller==firstNode_) { return secondNode_.getExactCladeCount(possibleCladeMembers,this); } else if(caller==secondNode_) { return firstNode_.getExactCladeCount(possibleCladeMembers,this); } else { throw new RuntimeException("Assertion erro : unknown caller"); } } public final boolean isFormsExactClade(String[] possibleCladeMembers) { int leftCount = firstNode_.getExactCladeCount(possibleCladeMembers,this); int rightCount = secondNode_.getExactCladeCount(possibleCladeMembers,this); if(leftCount<0||rightCount<0) { return false; } return (leftCount>0&&rightCount==0)||(rightCount>0&&leftCount==0); } public final int getNumberOfMatchingLeaves(String[] leafSet) { return firstNode_.getNumberOfMatchingLeaves(leafSet,this) + secondNode_.getNumberOfMatchingLeaves(leafSet,this); } public final int getNumberOfMatchingLeaves(String[] leafSet, UndirectedNode caller) { if(caller==firstNode_) { return secondNode_.getNumberOfMatchingLeaves( leafSet, this ); } else if(caller==secondNode_) { return firstNode_.getNumberOfMatchingLeaves( leafSet, this ); } throw new RuntimeException("Assertion error : unknown caller"); } public final UndirectedNode getRelatedNode(Node n) { UndirectedNode fromFirst = firstNode_.getRelatedNode(n,this); if(fromFirst!=null) { return fromFirst; } return secondNode_.getRelatedNode(n,this); } /** * @return a new node rooted on the first node of this tree */ public Node getUnrooted() { if(firstNode_.isLeaf()) { return secondNode_.buildUnrootedTree(); } return firstNode_.buildUnrootedTree(); } public final double getMaximumPathLengthToLeafViaFirst() { if(!isFirstPathInfoFound_) { maximumPathLengthToLeafViaFirstNode_ = firstNode_.getMaximumPathLengthToLeaf(this); isFirstPathInfoFound_ = true; } return maximumPathLengthToLeafViaFirstNode_; } public final double getMaximumPathLengthToLeafViaSecond() { if(!isSecondPathInfoFound_) { maximumPathLengthToLeafViaSecondNode_ = secondNode_.getMaximumPathLengthToLeaf(this); isSecondPathInfoFound_ = true; } return maximumPathLengthToLeafViaSecondNode_; } public final void addLabels(ArrayList store, UndirectedNode callingNode) { if(callingNode==firstNode_) { secondNode_.addLabels(store,this); } else if(callingNode==secondNode_) { firstNode_.addLabels(store,this); } else { throw new RuntimeException("Assertion error : unknown calling node!"); } } public void setAnnotation(Object annotation) { this.annotation_ = annotation; } public void instruct(UnrootedTreeInterface.BaseBranch base) { base.setLength(this.distance_); if(annotation_!=null) { base.setAnnotation( annotation_ ); } firstNode_.instruct(base.getLeftNode(),this); secondNode_.instruct(base.getRightNode(),this); } public void instruct(RootedTreeInterface.RNode base, double firstChildLength) { base.resetChildren(); RootedTreeInterface.RNode left = base.createRChild(); RootedTreeInterface.RNode right = base.createRChild(); RootedTreeInterface.RBranch leftBranch = left.getParentRBranch(); RootedTreeInterface.RBranch rightBranch = right.getParentRBranch(); leftBranch.setLength(firstChildLength); rightBranch.setLength(distance_-firstChildLength); if(annotation_!=null) { leftBranch.setAnnotation( annotation_ ); rightBranch.setAnnotation( annotation_ ); } firstNode_.instruct(left,this); secondNode_.instruct(right,this); } public void instruct(UnrootedTreeInterface.UBranch base, UndirectedNode callingNode) { base.setLength(this.distance_); if(annotation_!=null) { base.setAnnotation( annotation_ ); } if(callingNode==firstNode_) { secondNode_.instruct(base.getFartherNode(),this); } else if(callingNode==secondNode_) { firstNode_.instruct(base.getFartherNode(),this); } else { throw new IllegalArgumentException("Calling node is unknown!"); } } public void instruct(RootedTreeInterface.RBranch base, UndirectedNode callingNode) { base.setLength(this.distance_); if(annotation_!=null) { base.setAnnotation( annotation_ ); } if(callingNode==firstNode_) { //We are fanning out towards more recent tips secondNode_.instruct(base.getMoreRecentNode(),this); } else if(callingNode==secondNode_) { firstNode_.instruct(base.getMoreRecentNode(),this); } else { throw new IllegalArgumentException("Calling node is unknown!"); } } /** * @return the difference between the maximum path length to leaf via first node * and the maximum path lenght to leaf via second node */ public final double getMaximumPathDifference() { return Math.abs(getMaximumPathLengthToLeafViaFirst()-getMaximumPathLengthToLeafViaSecond()); } public Connection getMRCAConnection(String[] nodeNames) { return getMRCAConnection(null, nodeNames); } public Node getRootedAroundMRCA(String[] nodeNames) { Connection mrca = getMRCAConnectionBaseTraverse(nodeNames); if(mrca!=null) { return mrca.getRootedAround(); } throw new IllegalArgumentException("Non existent outgroup:"+pal.misc.Utils.toString(nodeNames)); } public void instructRootedAroundMRCA(RootedTreeInterface rootedInterface, String[] nodeNames) { Connection mrca = getMRCAConnectionBaseTraverse(nodeNames); if(mrca!=null) { mrca.instructRootedAround(rootedInterface); } else{ throw new IllegalArgumentException( "Non existent outgroup:"+pal.misc.Utils.toString( nodeNames ) ); } } public Node[] getAllRootedAroundMRCA(String[] nodeNames) { Connection[] mrca = getAllMRCAConnectionBaseTraverse(nodeNames); if(mrca.length==0) { throw new IllegalArgumentException( "Non existent outgroup:"+ pal.misc.Utils.toString( nodeNames ) ); } Node[] nodes = new Node[mrca.length]; for(int i = 0 ; i < nodes.length ; i++) { nodes[i] = mrca[i].getRootedAround(); } return nodes; } public Node getRootedAroundMRCA(String[] nodeNames, double ingroupBranchLength) { Connection mrca = getMRCAConnectionBaseTraverse(nodeNames); if(mrca!=null) { return mrca.getRootedAround(ingroupBranchLength,nodeNames); } if(getNumberOfMatchingLeaves(nodeNames)>0) { //Basically the node names includes all of the taxa! return getRootedAround(ingroupBranchLength,nodeNames); } throw new IllegalArgumentException("Non existent outgroup:"+pal.misc.Utils.toString(nodeNames)); } /** * @param blockingNode * @param nodeNames * @return */ public Connection getMRCAConnection(UndirectedNode blockingNode, String[] nodeNames) { Connection first = (firstNode_!=blockingNode) ? firstNode_.getMRCAConnection(this,nodeNames) : null; Connection second = (secondNode_!=blockingNode) ? secondNode_.getMRCAConnection(this,nodeNames) : null; if(first!=null) { if(second!=null) { return this; } return first; } //Second may be null return second; } public Connection getMRCAConnectionBaseTraverse(String[] nodeNames) { return getMRCAConnectionBaseTraverse(null,nodeNames); } public Connection[] getAllMRCAConnectionBaseTraverse(String[] nodeNames) { Connection[] store = new Connection[getNumberOfConnections()]; int total = getAllMRCAConnectionBaseTraverse(nodeNames, store,0); Connection[] result = new Connection[total]; System.arraycopy(store,0,result,0,total); return result; } public int getAllMRCAConnectionBaseTraverse(String[] nodeNames, Connection[] store, int numberInStore) { return getAllMRCAConnectionBaseTraverse(null,nodeNames, store, numberInStore); } public Connection getMRCAConnectionBaseTraverse(UndirectedNode callingNode, String[] nodeNames) { Connection first = firstNode_.getMRCAConnection(this,nodeNames) ; Connection second = secondNode_.getMRCAConnection(this,nodeNames) ; System.out.println("Traverse:"+first+" "+second); if(first!=null) { if(second==null) { return first; } //If the MRCA of either sides is not us, then the true MRCA has not been found //(because the outgroup is distributed on both sides of this base). //We try a different base (by traversing tree, so we will eventually get a suitable base) if(firstNode_!=callingNode) { Connection attempt = firstNode_.getMRCAConnectionBaseTraverse( this, nodeNames); if( attempt!=null ) { return attempt; } } if(secondNode_!=callingNode) { Connection attempt = secondNode_.getMRCAConnectionBaseTraverse( this, nodeNames ); if( attempt!=null ) { return attempt; } } return null; } else { //Second may be null return second; } } private final int addToStore(Connection c, Connection[] store, int numberInStore) { for(int i = 0 ; i < numberInStore ; i++) { if(store[i]==c) { return numberInStore; } } store[numberInStore++] = c; return numberInStore; } public int getAllMRCAConnectionBaseTraverse(UndirectedNode callingNode, String[] nodeNames, Connection[] store, int numberInStore) { Connection first = firstNode_.getMRCAConnection(this,nodeNames) ; Connection second = secondNode_.getMRCAConnection(this,nodeNames) ; if(first!=null) { if(second==null) { return addToStore(first,store,numberInStore); } //Both left and right attempts return a connection, if(first==second&&second==this) { //If the MRCA of either side is us then we are the MRCA return addToStore(this,store,numberInStore); } //If the MRCA of either sides is not us, then the true MRCA has not been found //(because the outgroup is distributed on both sides of this base). //We try a different base (by traversing tree, so we will eventually get a suitable base) if(firstNode_!=callingNode) { numberInStore = firstNode_.getAllMRCAConnectionBaseTraverse( this, nodeNames,store, numberInStore ); } if(secondNode_!=callingNode) { numberInStore = secondNode_.getAllMRCAConnectionBaseTraverse( this, nodeNames,store, numberInStore ); } } return numberInStore; } /** * @return the total number of connections in the tree that this connection is part of */ public final int getNumberOfConnections() { return getNumberOfConnections(null); } protected final int getNumberOfConnections(UndirectedNode blockingNode) { int count = 0; if(firstNode_!=blockingNode) { count+=firstNode_.getNumberOfConnections(this); } if(secondNode_!=blockingNode) { count+=secondNode_.getNumberOfConnections(this); } return count+1; //Plus one for me! } /** * @return all connections in the tree that includes this connection */ public final Connection[] getAllConnections() { int size = getNumberOfConnections(); Connection[] array = new Connection[size]; getConnections(array,0); return array; } protected final int getConnections(Connection[] array, int index) { return getConnections(null,array,index); } protected final int getConnections(UndirectedNode blockingNode, Connection[] array, int index) { array[index++] = this; if(firstNode_!=blockingNode) { index=firstNode_.getConnections(this,array,index); } if(secondNode_!=blockingNode) { index=secondNode_.getConnections(this,array,index); } return index; //Plus one for me! } public final Connection getMidPointConnection(final UndirectedNode blockingNode, Connection best) { if(blockingNode==secondNode_) { best = firstNode_.getMidPointConnection(this,best); } else if(blockingNode==firstNode_) { best = secondNode_.getMidPointConnection(this,best); } else { throw new RuntimeException("Assertion error : getMidPointConnection called with invalid blockingNode"); } final double myPathDiff = getMaximumPathDifference(); final double bestDiff = best.getMaximumPathDifference(); return (myPathDiffnot include the length of this connection */ public double getMaxLengthToLeaf(UndirectedNode blockingNode) { if(secondNode_==blockingNode) { return getMaximumPathLengthToLeafViaFirst(); } if(firstNode_==blockingNode) { return getMaximumPathLengthToLeafViaSecond(); } throw new RuntimeException("Connection.GetMaxLengthToLeaf() called from unknown asking node"); } /** * Force a recalculation */ public void recalculateMaximumPathLengths() { clearPathInfo(); updatePathInfo(); assertPathInfo(); } public void assertPathInfo() { assertPathInfo(null); } /** * @throws RuntimeException if not all nodes have path info setup */ public void assertPathInfo(UndirectedNode blockingNode) { if(isFirstPathInfoFound_&&isSecondPathInfoFound_) { if(blockingNode!=firstNode_) { firstNode_.callMethodOnConnections(this,ASSERT_PATH_INFO_CALLER); } if(blockingNode!=secondNode_) { secondNode_.callMethodOnConnections(this,ASSERT_PATH_INFO_CALLER); } } else { throw new RuntimeException("Assertion error : assertPathInfo failed!"); } } public void updatePathInfo() { updatePathInfo(null); } public void updatePathInfo(UndirectedNode blockingNode) { if(!isFirstPathInfoFound_) { this.maximumPathLengthToLeafViaFirstNode_ = firstNode_.getMaximumPathLengthToLeaf(this); isFirstPathInfoFound_ = true; } if(blockingNode!=firstNode_) { firstNode_.callMethodOnConnections(this,UPDATE_PATH_INFO_CALLER); } if(!isSecondPathInfoFound_) { this.maximumPathLengthToLeafViaSecondNode_ = secondNode_.getMaximumPathLengthToLeaf(this); isSecondPathInfoFound_ = true; } if(blockingNode!=secondNode_) { secondNode_.callMethodOnConnections(this,UPDATE_PATH_INFO_CALLER); } } public void clearPathInfo() { clearPathInfo(null); } public void clearPathInfo(UndirectedNode blockingNode) { this.isFirstPathInfoFound_ = false; this.isSecondPathInfoFound_ = false; if(blockingNode!=firstNode_) { this.firstNode_.callMethodOnConnections(this,CLEAR_PATH_INFO_CALLER); } if(blockingNode!=secondNode_) { this.secondNode_.callMethodOnConnections(this,CLEAR_PATH_INFO_CALLER); } } public final double getDistance() { return distance_; } public final boolean isConnectedTo(final UndirectedNode node) { return(node==firstNode_)||(node==secondNode_); } public final UndirectedNode getOtherEnd(final UndirectedNode oneEnd) { if(oneEnd==firstNode_) { return secondNode_; } if(oneEnd==secondNode_) { return firstNode_; } throw new RuntimeException("Assertion error : getOtherEnd called with non connecting node"); } public final void instructRootedAround(RootedTreeInterface rootedInterface) { RootedTreeInterface.RNode root = rootedInterface.createRoot(); instructRootedAround(root); } public final void instructRootedAround(RootedTreeInterface.RNode peer) { double leftDist = getMaximumPathLengthToLeafViaFirst(); double rightDist = getMaximumPathLengthToLeafViaSecond(); double diff = leftDist-rightDist; if(diff>distance_) { diff = 0;//distance_; } else if(diff<-distance_) { diff = 0;//-distance_; } peer.resetChildren(); RootedTreeInterface.RNode left = peer.createRChild(); RootedTreeInterface.RNode right = peer.createRChild(); RootedTreeInterface.RBranch leftBranch = left.getParentRBranch(); RootedTreeInterface.RBranch rightBranch = right.getParentRBranch(); leftBranch.setLength((distance_-diff)/2); rightBranch.setLength((distance_+diff)/2); if(annotation_!=null) { leftBranch.setAnnotation( annotation_ ); rightBranch.setAnnotation( annotation_ ); } firstNode_.instruct(left, this); secondNode_.instruct(right, this); } public final Node getRootedAround() { double leftDist = getMaximumPathLengthToLeafViaFirst(); double rightDist = getMaximumPathLengthToLeafViaSecond(); double diff = leftDist-rightDist; if(diff>distance_) { diff = 0;//distance_; } else if(diff<-distance_) { diff = 0;//-distance_; } Node left = firstNode_.buildTree(this, (distance_-diff)/2); Node right = secondNode_.buildTree(this, (distance_+diff)/2); Node n = NodeFactory.createNode(new Node[] { left, right}); return n; } public final Node getRootedAround(double distanceForFirstChild) { double distanceForSecondChild = distance_-distanceForFirstChild; if(distanceForSecondChild<0) { distanceForFirstChild = distance_; distanceForSecondChild = 0; } Node left = firstNode_.buildTree(this, distanceForFirstChild); Node right = secondNode_.buildTree(this, distanceForSecondChild); Node n = NodeFactory.createNode(new Node[] { left, right}); return n; } /** * Not the most efficient way of doing this. Roots tree around outgroup, and restricts distance of ingroup to base (to make it look pretty) * @param ingroupDistance * @param outgroupMembers * @return */ public final Node getRootedAround(double ingroupDistance, String[] outgroupMembers) { final UndirectedNode ingroup, outgroup; if(firstNode_.getMRCA(this,outgroupMembers)!=null) { outgroup = firstNode_; ingroup = secondNode_; } else { ingroup = firstNode_; outgroup = secondNode_; } double distanceForOutgroup = distance_-ingroupDistance; if(distanceForOutgroup<0) { ingroupDistance = distance_; distanceForOutgroup = 0; } Node left = ingroup.buildTree(this, ingroupDistance); Node right = outgroup.buildTree(this, distanceForOutgroup); return NodeFactory.createNode(new Node[] { left, right}); } } // =-=-=-=-=-=-=----==-=-=--==--=-==-=--=-==--==--==--=-=--=----==-=-=-=-=-==-= // ==== Static methods // ------------------- /** * @return a new tree constructions with node n as root */ private final static Tree constructTree(Node n, int units) { SimpleTree st = new SimpleTree(n); st.setUnits(units); return st; } /** * @return tre if name is in names */ private static final boolean contains(String[] names, String name) { for(int i = 0 ; i < names.length ; i++) { if(name.equals(names[i])) { return true; } } return false; } // =-=-=-=-=-=-=----==-=-=--==--=-==-=--=-==--==--==--=-=--=----==-=-=-=-=-==-= private static interface ConnectionMethodCaller { public void callOn(Connection c, UndirectedNode callingNode); } private static final ConnectionMethodCaller ASSERT_PATH_INFO_CALLER = new ConnectionMethodCaller() { public void callOn(Connection c, UndirectedNode callingNode) { c.assertPathInfo(callingNode); } }; private static final ConnectionMethodCaller CLEAR_PATH_INFO_CALLER = new ConnectionMethodCaller() { public void callOn(Connection c, UndirectedNode callingNode) { c.clearPathInfo(callingNode); } }; private static final ConnectionMethodCaller UPDATE_PATH_INFO_CALLER = new ConnectionMethodCaller() { public void callOn(Connection c, UndirectedNode callingNode) { c.updatePathInfo(callingNode); } }; private static final ConnectionMethodCaller GET_NUMBER_OF_CONNECTIONS_CALLER = new ConnectionMethodCaller() { public void callOn(Connection c, UndirectedNode callingNode) { c.getNumberOfConnections(callingNode); } }; // =-=-=-=-=-=-=----==-=-=--==--=-==-=--=-==--==--==--=-=--=----==-=-=-=-=-==-= /** * A node with no set idea of parent and children (just sibling connections) */ private static final class UndirectedNode { private Connection[] connectedNodes_; private final Node palPeer_; private final String label_; private final Object annotation_; /** * Auto expands */ private UndirectedNode(Connection connection, int childStartingIndex, SimpleNode parent) { this.palPeer_ = null; this.label_ = null; this.annotation_ = null; this.connectedNodes_ = new Connection[3]; int numberOfChildren = parent.getNumberOfChildren(); this.connectedNodes_[0] = connection; if((numberOfChildren-childStartingIndex)==2) { this.connectedNodes_[1] = new Connection(this,parent.getChild(childStartingIndex), EXPAND_CONSTRUCTION); this.connectedNodes_[2] = new Connection(this,parent.getChild(childStartingIndex+1), EXPAND_CONSTRUCTION); } else { this.connectedNodes_[1] = new Connection(this,parent.getChild(childStartingIndex), EXPAND_CONSTRUCTION); this.connectedNodes_[2] = new Connection(this,parent,childStartingIndex+1,0,null); } } /** * The already unrooted tree constructor. * @param peer The root of the tree (expects three or more children) * @param constructionMode The construction mode * @throws IllegalArgumentException if peer has less than three children */ public UndirectedNode(SimpleNode peer, int constructionMode) { final int numberOfChildren = peer.getNumberOfChildren(); if(numberOfChildren<=2) { throw new IllegalArgumentException("Peer must have at least three children!"); } this.palPeer_ = peer.getPALPeer(); this.label_ = peer.getLabel(); this.annotation_ = peer.getLabel(); if(constructionMode==REDUCE_CONSTRUCTION) { int numberOfReducedChildren = countReducedChildren(peer); this.connectedNodes_ = new Connection[numberOfReducedChildren]; for(int i = 0 ;i < numberOfReducedChildren ; i++) { Connection c = new Connection(this,getReducedChild(peer, i), REDUCE_CONSTRUCTION); this.connectedNodes_[i] = c; } } else if((constructionMode==MIMIC_CONSTRUCTION)||(numberOfChildren<=3)) { //Plus one for parent connection this.connectedNodes_ = new Connection[numberOfChildren]; for(int i = 0 ; i< numberOfChildren ; i++) { this.connectedNodes_[i] = new Connection(this,peer.getChild(i),constructionMode); } } else { //Expand construction this.connectedNodes_ = new Connection[3]; this.connectedNodes_[0] = new Connection(this,peer.getChild(0), constructionMode); this.connectedNodes_[1] = new Connection(this,peer.getChild(1), constructionMode); this.connectedNodes_[2] = new Connection(this,peer,2, 0,null); } } private UndirectedNode( int constructionMode, Connection parentConnection, SimpleNode peer) { this.palPeer_ = peer.getPALPeer(); this.label_ = peer.getLabel(); this.annotation_ = peer.getAnnotation(); final int numberOfChildren = peer.getNumberOfChildren(); if(constructionMode==REDUCE_CONSTRUCTION) { int numberOfReducedChildren = countReducedChildren(peer); this.connectedNodes_ = new Connection[numberOfReducedChildren+1]; this.connectedNodes_[0] = parentConnection; for(int i = 0 ;i < numberOfReducedChildren ; i++) { Connection c = new Connection(this, getReducedChild(peer, i),REDUCE_CONSTRUCTION); this.connectedNodes_[i+1] = c; } } else if((constructionMode==MIMIC_CONSTRUCTION)||(numberOfChildren<=2)) { //Plus one for parent connection this.connectedNodes_ = new Connection[numberOfChildren+1]; this.connectedNodes_[0] = parentConnection; for(int i = 0 ; i< numberOfChildren ; i++) { this.connectedNodes_[i+1] = new Connection(this,peer.getChild(i),constructionMode); } } else { this.connectedNodes_ = new Connection[3]; this.connectedNodes_[0] = parentConnection; this.connectedNodes_[1] = new Connection(this,peer.getChild(0), constructionMode); this.connectedNodes_[2] = new Connection(this, peer, 1,0, null); } } private UndirectedNode(UndirectedNode orginal, Connection attachmentPoint, SimpleNode subTree, int constructionModel, Connection parent) { throw new RuntimeException("Not implemented yet!"); } public final UndirectedNode getAttached( Connection attachmentPoint, SimpleNode subTree, int constructionMode, Connection parent ) { return new UndirectedNode(this,attachmentPoint,subTree, constructionMode,parent); } private static final int countReducedChildren(SimpleNode base) { int count = 0; int childCount = base.getNumberOfChildren(); for(int i = 0 ; i < childCount ; i++) { SimpleNode c = base.getChild(i); if(!c.isLeaf()&&c.getParentBranchLength()<=BranchLimits.MINARC) { count+=countReducedChildren(c); } else { count++; } } return count; } private static final SimpleNode getReducedChild(SimpleNode base, int childIndex){ int childCount = base.getNumberOfChildren(); for(int i = 0 ; i < childCount ; i++) { SimpleNode c = base.getChild(i); if(!c.isLeaf()&&c.getParentBranchLength()<=BranchLimits.MINARC) { SimpleNode rc = getReducedChild(c,childIndex); if(rc!=null) { return rc; } childIndex-=countReducedChildren(c); } else { if(childIndex == 0) { return c; } childIndex--; } } return null; } public void instruct(UnrootedTreeInterface.UNode node, Connection callingConnection) { if(label_!=null) { node.setLabel(label_); } if(annotation_!=null) { node.setAnnotation(annotation_); } for(int i = 0 ; i < connectedNodes_.length ; i++) { Connection c = connectedNodes_[i]; if(c!=callingConnection) { c.instruct(node.createUChild().getParentUBranch(),this); } } } public final int getNumberOfMatchingLeaves(String[] leafSet, Connection caller) { if(isLeaf()) { return contains(leafSet,label_) ? 1 : 0; } else{ int count = 0; for(int i = 0 ; i < connectedNodes_.length ; i++) { Connection c = connectedNodes_[i]; if(c!=caller) { count+=c.getNumberOfMatchingLeaves(leafSet, this); } } return count; } } public int getExactCladeCount(String[] possibleCladeMembers,Connection caller) { if(isLeaf()) { return (pal.misc.Utils.isContains(possibleCladeMembers, label_) ? 1 : 0 ); } int count = 0; for(int i = 0 ; i < connectedNodes_.length ; i++) { Connection c = connectedNodes_[i]; if(c!=caller) { int subCount = c.getExactCladeCount(possibleCladeMembers,this); if(subCount<0) { return -1; } if(subCount==0) { if(count>0) { return -1; } } else if(i==0) { count=subCount; } else if(count==0) { return -1; } else { count+=subCount; } } } return count; } public void instruct(RootedTreeInterface.RNode base, Connection callingConnection) { if(label_!=null) { base.setLabel(label_); } if(annotation_!=null) { base.setAnnotation(annotation_); } for(int i = 0 ; i < connectedNodes_.length ; i++) { Connection c = connectedNodes_[i]; if(c!=callingConnection) { c.instruct(base.createRChild().getParentRBranch(),this); } } } public Connection getPeerParentConnection() { return connectedNodes_[0]; } private void assertCallingConnection(final Connection callingConnection) { boolean found = false; for(int i = 0 ; i < connectedNodes_.length ; i++ ){ if(connectedNodes_[i]==callingConnection) { found = true; break; } } if(!found) { throw new RuntimeException("Assertion error : calling connection not one of my connections"); } } public void callMethodOnConnections(Connection callingConnection, ConnectionMethodCaller caller) { assertCallingConnection(callingConnection); for(int i = 0 ; i < connectedNodes_.length ; i++ ){ if(connectedNodes_[i]!=callingConnection) { caller.callOn(connectedNodes_[i],this); } } } public int getNumberOfConnections() { return getNumberOfConnections(null); } public int getNumberOfConnections(Connection callingConnection) { int count = 0; for(int i = 0 ; i < connectedNodes_.length ; i++ ){ Connection c = connectedNodes_[i]; if(c!=callingConnection) { count+=c.getNumberOfConnections(this); } } return count; } public final void addLabels(ArrayList store, Connection callingConnection) { int count = 0; if(connectedNodes_.length==1) { if(callingConnection!=connectedNodes_[0]) { throw new RuntimeException("Assertion error : calling connection not recognised"); } store.add(label_); } else { for( int i = 0; i