java miner test cases

This commit is contained in:
fixminer
2020-04-11 20:54:48 +02:00
parent d9620acef6
commit e25310a882
78 changed files with 29923 additions and 644 deletions
+893 -570
View File
File diff suppressed because it is too large Load Diff
@@ -56,6 +56,7 @@ public abstract class AbstractJdtVisitor extends ASTVisitor {
}
protected void push(int type, String typeName, String label, int startPosition, int length) {
// label = label.replace("\n","");
ITree t = context.createTree(type, label, typeName);
t.setPos(startPosition);
t.setLength(length);
@@ -985,7 +985,13 @@ public class ExpJdtVisitor extends CdJdtVisitor {
private void visitList(List<?> list) {
for (Object obj : list) {
ASTNode node = (ASTNode) obj;
(node).accept(this);
if (node instanceof Block) {
List<?> statements = ((Block) node).statements();
visitList(statements);
} else {
node.accept(this);
}
// (node).accept(this);
}
}
///////////////////
@@ -995,12 +1001,12 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(CatchClause node) {
pushNode(node, node.getException().toString());
pushNode(node, node.toString().replace("\n",""));
SingleVariableDeclaration exc = node.getException();
exc.accept(this);
Statement body = node.getBody();
if (body != null) {
// push(8, "Block", "CatchBody", body.getStartPosition(), body.getLength());
// push(8, "Block", "CatchBody:"+body.toString().replace("\n",""), body.getStartPosition(), body.getLength());
visitBody(body);
// popNode();
}
@@ -1046,7 +1052,8 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(DoStatement node) {
Expression exp = node.getExpression();
pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
Statement body = node.getBody();
if (body != null) {
// push(8, "Block", "DoBody", body.getStartPosition(), body.getLength());
@@ -1070,7 +1077,8 @@ public class ExpJdtVisitor extends CdJdtVisitor {
public boolean visit(EnhancedForStatement node) {
SingleVariableDeclaration parameter = node.getParameter();
Expression exp = node.getExpression();
pushNode(node, parameter.toString() + ", " + exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, parameter.toString() + ", " + exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
parameter.accept(this);
exp.accept(this);
Statement body = node.getBody();
@@ -1103,7 +1111,8 @@ public class ExpJdtVisitor extends CdJdtVisitor {
}
value += update.toString();
pushNode(node, value);
// pushNode(node, value);
pushNode(node, node.toString().replace("\n",""));
visitList(init);
if (exp != null) {
exp.accept(this);
@@ -1123,27 +1132,29 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(IfStatement node) {
Expression exp = node.getExpression();
pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
exp.accept(this);
Statement stmt = node.getThenStatement();
if (stmt != null) {
// push(8, "Block", "ThenBody", stmt.getStartPosition(), stmt.getLength());
push(8, "Block", "ThenBody:"+stmt.toString().replace("\n",""), stmt.getStartPosition(), stmt.getLength());
visitBody(stmt);
// popNode();
popNode();
}
stmt = node.getElseStatement();
if (stmt != null) {
// push(8, "Block", "ElseBody", stmt.getStartPosition(), stmt.getLength());
push(8, "Block", "ElseBody:"+stmt.toString().replace("\n",""), stmt.getStartPosition(), stmt.getLength());
visitBody(stmt);
// popNode();
popNode();
}
return false;
}
@Override
public boolean visit(LabeledStatement node) {
pushNode(node, node.getLabel().getFullyQualifiedName());
// pushNode(node, node.getLabel().getFullyQualifiedName());
pushNode(node,node.toString().replace("\n",""));
Statement body = node.getBody();
if (body != null) {
// push(8, "Block", "LabelBody", body.getStartPosition(), body.getLength());
@@ -1179,7 +1190,8 @@ public class ExpJdtVisitor extends CdJdtVisitor {
public boolean visit(SwitchCase node) {
Expression exp = node.getExpression();
if (exp != null) {
pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
exp.accept(this);
} else {
pushNode(node, "default");
@@ -1190,7 +1202,8 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(SwitchStatement node) {
Expression exp = node.getExpression();
pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
exp.accept(this);
// int startPosition = exp.getStartPosition();
// int length1 = exp.getLength();
@@ -1204,13 +1217,14 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(SynchronizedStatement node) {
Expression exp = node.getExpression();
pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
exp.accept(this);
Statement body = node.getBody();
if (body != null) {
// push(8, "Block", "SyncBody", body.getStartPosition(), body.getLength());
push(8, "Block", "SyncBody:"+body.toString().replace("\n",""), body.getStartPosition(), body.getLength());
visitBody(body);
// popNode();
popNode();
}
// visitBody(node.getBody());
return false;
@@ -1227,7 +1241,8 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(TryStatement node) {
List<?> resources = node.resources();
pushNode(node, "try:" + resources.toString());
// pushNode(node, "try:" + resources.toString());
pushNode(node, node.toString().replace("\n",""));
visitList(resources);
Statement body = node.getBody();
@@ -1242,7 +1257,7 @@ public class ExpJdtVisitor extends CdJdtVisitor {
Statement stmt = node.getFinally();
if (stmt != null) {
push(8, "Block", "FinallyBody", stmt.getStartPosition(), stmt.getLength());
push(8, "Block", "FinallyBody:"+stmt.toString().replace("\n",""), stmt.getStartPosition(), stmt.getLength());
visitBody(stmt);
popNode();
}
@@ -1272,14 +1287,15 @@ public class ExpJdtVisitor extends CdJdtVisitor {
@Override
public boolean visit(WhileStatement node) {
Expression exp = node.getExpression();
pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
// pushNode(node, exp.getClass().getSimpleName() + COLON + exp.toString());
pushNode(node, node.toString().replace("\n",""));
exp.accept(this);
Statement body = node.getBody();
if (body != null) {
// push(8, "Block", "WhileBody", body.getStartPosition(), body.getLength());
push(8, "Block", "WhileBody:"+body.toString().replace("\n",""), body.getStartPosition(), body.getLength());
visitBody(body);
// popNode();
popNode();
}
return false;
}
@@ -31,9 +31,12 @@ public class GumTreeComparer {
} catch (Exception e) {
if (oldTree == null) {
log.info("Null GumTree of Previous File: " + prevFile.getPath());
throw new NullPointerException(prevFile.getPath());
} else if (newTree == null) {
log.info("Null GumTree of Revised File: " + revFile.getPath());
throw new NullPointerException(revFile.getPath());
}
}
if (oldTree != null && newTree != null) {
Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
@@ -18,7 +18,7 @@ public class GumTreeGenerator {
RAW_TOKEN,
}
public ITree generateITreeForJavaFile(File javaFile, GumTreeType type) {
public ITree generateITreeForJavaFile(File javaFile, GumTreeType type) throws IOException {
ITree gumTree = null;
try {
TreeContext tc = null;
@@ -37,7 +37,8 @@ public class GumTreeGenerator {
gumTree = tc.getRoot();
}
} catch (IOException e) {
e.printStackTrace();
throw new IOException(e);
// e.printStackTrace();
}
return gumTree;
}
Binary file not shown.
Binary file not shown.
Binary file not shown.
+1 -1
View File
@@ -104,7 +104,7 @@ def getMapping(pathMapping,x):
def cluster(clusterPath,pairsPath, level,rootType):
def cluster(clusterPath,pairsPath, level):
try:
# logging.info('Parameters: \ninputPath %s \nclusterPath %s \nport %s \nmatchesName %s \nthreshold %s \n%indexFile',inputPath,clusterPath,str(port),matchesName,str(threshold),indexFile)
Binary file not shown.
+16 -16
View File
@@ -178,22 +178,22 @@ def getRun():
# def shellCallTemplate(cmd,enc='utf-8'):
# process = subprocess.Popen(cmd,
# stdout=subprocess.PIPE,stderr=PIPE, shell=True,encoding=enc,
# universal_newlines=True)
#
# while True:
# output = process.stdout.readline()
# print(output.strip())
# # Do something else
# return_code = process.poll()
# if return_code is not None:
# print('RETURN CODE', return_code)
# # Process has finished, read rest of the output
# for output in process.stdout.readlines():
# print(output.strip())
# break
def shellCallTemplate4jar(cmd,enc='utf-8'):
process = subprocess.Popen(cmd,
stdout=subprocess.PIPE,stderr=PIPE, shell=True,encoding=enc,
universal_newlines=True)
while True:
output = process.stdout.readline()
print(output.strip())
# Do something else
return_code = process.poll()
if return_code is not None:
print('RETURN CODE', return_code)
# Process has finished, read rest of the output
for output in process.stdout.readlines():
print(output.strip())
break
def shellCallTemplate(cmd,enc='utf-8'):
try:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
+5 -5
View File
@@ -15,7 +15,7 @@ if __name__ == '__main__':
setEnv(args)
job = args.job
job = "cluster"
# job = "cluster"
ROOT_DIR = os.environ["ROOT_DIR"]
REPO_PATH = os.environ["REPO_PATH"]
CODE_PATH = os.environ["CODE_PATH"]
@@ -32,7 +32,7 @@ if __name__ == '__main__':
# subject = 'ALL'
rootType = 'if'
# rootType = 'if'
print(job)
@@ -46,7 +46,7 @@ if __name__ == '__main__':
elif job =='dataset4c':
from otherDatasets import core
core()
elif job =='richEditScript':
elif job =='richedit':
dbDir = join(DATA_PATH, 'redis')
stopDB(dbDir, REDIS_PORT)
cmd = "JAVA_HOME='" + jdk8 + "' java -jar " + join(Path(ROOT_DIR).parent, 'target','FixPatternMiner-1.0.0-jar-with-dependencies.jar') + " " + args.prop + " RICHEDITSCRIPT "
@@ -71,7 +71,7 @@ if __name__ == '__main__':
elif job =='compare':
# cmd = "mvn exec:java -f '/data/fixminer_source/' -Dexec.mainClass='edu.lu.uni.serval.richedit.akka.compare.CompareTrees' -Dexec.args='"+ " shape " + join(DATA_PATH,"redis") +" ALLdumps-gumInput.rdb " + "clusterl0-gumInputALL.rdb /data/richedit-core/python/data/richEditScript'"
cmd = "JAVA_HOME='" + jdk8 + "' java -jar " + join(Path(ROOT_DIR).parent, 'target','FixPatternMiner-1.0.0-jar-with-dependencies.jar') + " " + args.prop + " COMPARE "
output = shellCallTemplate(cmd)
output = shellCallTemplate4jar(cmd)
logging.info(output)
# elif job == 'clusterAdditional':
@@ -83,7 +83,7 @@ if __name__ == '__main__':
dbDir = join(DATA_PATH, 'redis')
startDB(dbDir, "6399", PROJECT_TYPE)
cluster(join(DATA_PATH,'shapes'),join(DATA_PATH, 'pairs'),'shapes',rootType)
cluster(join(DATA_PATH,'shapes'),join(DATA_PATH, 'pairs'),'shapes')
# elif job =='actionSI':
# from pairs import actionPairs
@@ -69,11 +69,11 @@ public class EDiffParser extends Parser {
if (isJava){
allActionSets = new HierarchicalRegrouper().regroupGumTreeResults(gumTreeResults);
}else{
HashSet<Integer> removeType = new HashSet<Integer>(Arrays.asList(171,172,99,100,101,102));
boolean b = gumTreeResults.stream().anyMatch(p -> removeType.contains(p.getNode().getType()));
if(b){
return actionSets;
}
// HashSet<Integer> removeType = new HashSet<Integer>(Arrays.asList(171,172,99,100,101,102));
// boolean b = gumTreeResults.stream().anyMatch(p -> removeType.contains(p.getNode().getType()));
// if(b){
// return actionSets;
// }
allActionSets = new HierarchicalRegrouperForC().regroupGumTreeResults(gumTreeResults);
}
@@ -8,6 +8,8 @@ import edu.lu.uni.serval.utils.ListSorter;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
* Regroup GumTree results to a hierarchical construction.
@@ -21,10 +23,8 @@ public class HierarchicalRegrouper {
/*
* First, sort actions by their positions.
*/
// List<Action> actions = new ListSorter<Action>(actionsArgu).sortAscending();
// if (actions == null) {
// actions = actionsArgu;
// }
actions = new ListSorter<Action>(actions).sortAscending();
/*
* Second, group actions by their positions.
@@ -65,9 +65,111 @@ public class HierarchicalRegrouper {
}
}
return reActionSets;
List<HierarchicalActionSet> reActionSets1 = new ArrayList<>();
ITree movDelNode = null;
for(HierarchicalActionSet a:reActionSets){
a = removeBlocks(a);
HierarchicalActionSet hierarchicalActionSet1 = postOrder(a).stream().collect(Collectors.toList()).get(0);
Action action = hierarchicalActionSet1.getAction();
if (hierarchicalActionSet1.getSubActions().size() == 0 && action instanceof Update){
List<ITree> collect = hierarchicalActionSet1.getNode().getChildren().stream().filter(x -> x.getType() == 14).collect(Collectors.toList());
// if(hierarchicalActionSet1.getNode().getChildren().size() == 1){
if(collect.size() == 1){
// if(hierarchicalActionSet1.getNode().getChild(0).getType() == 14){
if(collect.get(0).getType() == 14){
continue;
}
}
}
else{
Predicate<HierarchicalActionSet> predicate = x->x.getAction() instanceof Move;
List<HierarchicalActionSet> collect = postOrder(a).stream().filter(predicate).collect(Collectors.toList());
if(collect.size() == 1){
HierarchicalActionSet hierarchicalActionSet = collect.get(0);
movDelNode = hierarchicalActionSet.getNode().getParent();
reActionSets1.add(a);
continue;
}
}
if( movDelNode != null){
if(a.getNode().equals(movDelNode)){
continue;
}
}
reActionSets1.add(a);
}
return reActionSets1;
// return reActionSets;
}
private boolean isStatement(HierarchicalActionSet actSet){
String astNodeType = actSet.getAstNodeType();
if (astNodeType.endsWith("TypeDeclaration") || astNodeType.endsWith("FieldDeclaration") || astNodeType.endsWith("EnumDeclaration") ||
astNodeType.endsWith("MethodDeclaration") || astNodeType.endsWith("Statement") ||
astNodeType.endsWith("ConstructorInvocation") || astNodeType.endsWith("CatchClause") || astNodeType.endsWith("SwitchCase")) {
return true;
}
return false;
}
Predicate<HierarchicalActionSet> predicate = x-> isStatement(x);
private HierarchicalActionSet removeBlocks(HierarchicalActionSet actionSet){
List<HierarchicalActionSet> subActions = actionSet.getSubActions();
Action action = actionSet.getAction();
if (subActions.size() == 1){
HierarchicalActionSet subaction = subActions.get(0);
List<HierarchicalActionSet> collect = postOrder(subaction).stream().filter(predicate).collect(Collectors.toList());
if(collect.size() == 0){
return actionSet;
}
boolean b = collect.stream().anyMatch(p -> p.getAction().getName().equals(subActions.get(0).getAction().getName()));
if(!b){
return actionSet;
}
Action action1 = subaction.getAction();
if(action.getClass().equals(action1.getClass()) && action.getName().equals("UPD")) {
subaction.setParent(null);
return removeBlocks(subaction);
}
}
return actionSet;
}
public List<HierarchicalActionSet> postOrder(HierarchicalActionSet a) {
List<HierarchicalActionSet> trees = new ArrayList<>();
getAllSubActions(a, trees);
return trees;
}
private void getAllSubActions(HierarchicalActionSet a,List<HierarchicalActionSet> as) {
List<HierarchicalActionSet> subActions = a.getSubActions();
if (subActions.size() != 0){
for (HierarchicalActionSet s : subActions) {
getAllSubActions(s, as);
}
}
as.add(a);
// List<HierarchicalActionSet> b = new ArrayList<HierarchicalActionSet>();
// for (HierarchicalActionSet child: this.getSubActions())
// b.add(child);
// return b;
}
private HierarchicalActionSet createActionSet(Action act, Action parentAct, HierarchicalActionSet parent) {
HierarchicalActionSet actionSet = new HierarchicalActionSet();
actionSet.setAction(act);
@@ -129,19 +231,19 @@ public class HierarchicalRegrouper {
}
private boolean isPossibileSubAction(Action parent, Action child) {
if ((parent instanceof Update && !(child instanceof Addition))
|| (parent instanceof Delete && child instanceof Delete)
|| (parent instanceof Insert && (child instanceof Insert))) {
int startPosition = child.getPosition();
int length = child.getLength();
int startPosition2 = parent.getPosition();
int length2 = parent.getLength();
if (!(startPosition2 <= startPosition && startPosition + length <= startPosition2 + length2)) {
// when act is not the sub-set of action.
return false;
}
}
// if ((parent instanceof Update && !(child instanceof Addition))
// || (parent instanceof Delete && child instanceof Delete)
// || (parent instanceof Insert && (child instanceof Insert))) {
// int startPosition = child.getPosition();
// int length = child.getLength();
// int startPosition2 = parent.getPosition();
// int length2 = parent.getLength();
//
// if (!(startPosition2 <= startPosition && startPosition + length <= startPosition2 + length2)) {
// // when act is not the sub-set of action.
// return false;
// }
// }
return true;
}
@@ -214,9 +316,9 @@ public class HierarchicalRegrouper {
}
private boolean areRelatedActions(Action parent, Action child) {
if (parent instanceof Move && !(child instanceof Move)) {// If action is MOV, its children must be MOV.
return false;
}
// if (parent instanceof Move && !(child instanceof Move)) {// If action is MOV, its children must be MOV.
// return false;
// }
if (parent instanceof Delete && !(child instanceof Delete)) {// If action is INS, its children must be MOV or INS.
return false;
}
+28
View File
@@ -0,0 +1,28 @@
java:
8home: /Library/Java/JavaVirtualMachines/jdk1.8.0_181.jdk/Contents/Home
spinfer:
home: /Users/anilkoyuncu/projects/fixminer/spinfer/spinfer.native
coccinelle:
home: /Users/anilkoyuncu/projects/fixminer/spinfer/statics
dataset:
# home: /Users/anilkoyuncu/projects/fixminer/fixminer-core/python/data/gumInputLinux
inputPath : /Users/anilkoyuncu/projects/test/fixminer-data/patches
repo: /Users/anilkoyuncu/projects/test/fixminer-data/datasets
fixminer:
projectType : java
datapath: /Users/anilkoyuncu/projects/test/fixminer-data/
pjName : patches
portDumps : 6399
numOfWorkers : 14
hostname : localhost
hunkLimit : 2
patchSize : 50
projectList : spring-shell,fuse,metadata,commons-codec,commons-collections,commons-compress,commons-configuration,commons-crypto,commons-csv
inputPath : /Users/anilkoyuncu/projects/test/fixminer-data/patches
redisPath : /Users/anilkoyuncu/projects/release/test/fixminer_source/python/data/redis
srcMLPath : /usr/local/bin/srcml
@@ -0,0 +1,87 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec;
import java.util.Comparator;
/**
* Compares Strings using a {@link StringEncoder}. This comparator is used to sort Strings by an encoding scheme such as
* Soundex, Metaphone, etc. This class can come in handy if one need to sort Strings by an encoded form of a name such
* as Soundex.
*
* <p>This class is immutable and thread-safe.</p>
*
* @version $Id$
*/
public class StringEncoderComparator implements Comparator {
/**
* Internal encoder instance.
*/
private final StringEncoder stringEncoder;
/**
* Constructs a new instance.
*
* @deprecated Creating an instance without a {@link StringEncoder} leads to a {@link NullPointerException}. Will be
* removed in 2.0.
*/
@Deprecated
public StringEncoderComparator() {
this.stringEncoder = null; // Trying to use this will cause things to break
}
/**
* Constructs a new instance with the given algorithm.
*
* @param stringEncoder
* the StringEncoder used for comparisons.
*/
public StringEncoderComparator(final StringEncoder stringEncoder) {
this.stringEncoder = stringEncoder;
}
/**
* Compares two strings based not on the strings themselves, but on an encoding of the two strings using the
* StringEncoder this Comparator was created with.
*
* If an {@link EncoderException} is encountered, return <code>0</code>.
*
* @param o1
* the object to compare
* @param o2
* the object to compare to
* @return the Comparable.compareTo() return code or 0 if an encoding error was caught.
* @see Comparable
*/
@Override
public int compare(final Object o1, final Object o2) {
int compareCode = 0;
try {
final Comparable s1 = (Comparable) this.stringEncoder.encode(o1);
final Comparable s2 = (Comparable) this.stringEncoder.encode(o2);
compareCode = s1.compareTo(s2);
} catch (final EncoderException ee) {
compareCode = 0;
}
return compareCode;
}
}
@@ -0,0 +1,89 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.codec;
import java.util.Comparator;
/**
* Compares Strings using a {@link StringEncoder}. This comparator is used to sort Strings by an encoding scheme such as
* Soundex, Metaphone, etc. This class can come in handy if one need to sort Strings by an encoded form of a name such
* as Soundex.
*
* <p>This class is immutable and thread-safe.</p>
*
* @version $Id$
*/
@SuppressWarnings("rawtypes")
public class StringEncoderComparator implements Comparator {
/**
* Internal encoder instance.
*/
private final StringEncoder stringEncoder;
/**
* Constructs a new instance.
*
* @deprecated Creating an instance without a {@link StringEncoder} leads to a {@link NullPointerException}. Will be
* removed in 2.0.
*/
@Deprecated
public StringEncoderComparator() {
this.stringEncoder = null; // Trying to use this will cause things to break
}
/**
* Constructs a new instance with the given algorithm.
*
* @param stringEncoder
* the StringEncoder used for comparisons.
*/
public StringEncoderComparator(final StringEncoder stringEncoder) {
this.stringEncoder = stringEncoder;
}
/**
* Compares two strings based not on the strings themselves, but on an encoding of the two strings using the
* StringEncoder this Comparator was created with.
*
* If an {@link EncoderException} is encountered, return <code>0</code>.
*
* @param o1
* the object to compare
* @param o2
* the object to compare to
* @return the Comparable.compareTo() return code or 0 if an encoding error was caught.
* @see Comparable
*/
@SuppressWarnings("unchecked")
@Override
public int compare(final Object o1, final Object o2) {
int compareCode = 0;
try {
final Comparable s1 = (Comparable) this.stringEncoder.encode(o1);
final Comparable s2 = (Comparable) this.stringEncoder.encode(o2);
compareCode = s1.compareTo(s2);
} catch (final EncoderException ee) {
compareCode = 0;
}
return compareCode;
}
}
@@ -0,0 +1,841 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/BeanMap.java,v 1.25 2003/12/05 20:23:57 scolebourne Exp $
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001-2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowledgement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgement may appear in the software itself,
* if and wherever such third-party acknowledgements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import org.apache.commons.collections.list.UnmodifiableList;
import org.apache.commons.collections.keyvalue.AbstractMapEntry;
import org.apache.commons.collections.set.UnmodifiableSet;
/**
* An implementation of Map for JavaBeans which uses introspection to
* get and put properties in the bean.
* <p>
* If an exception occurs during attempts to get or set a property then the
* property is considered non existent in the Map
*
* @since Commons Collections 1.0
* @version $Revision: 1.25 $ $Date: 2003/12/05 20:23:57 $
*
* @author James Strachan
* @author Stephen Colebourne
*/
public class BeanMap extends AbstractMap implements Cloneable {
private transient Object bean;
private transient HashMap readMethods = new HashMap();
private transient HashMap writeMethods = new HashMap();
private transient HashMap types = new HashMap();
/**
* An empty array. Used to invoke accessors via reflection.
*/
public static final Object[] NULL_ARGUMENTS = {};
/**
* Maps primitive Class types to transformers. The transformer
* transform strings into the appropriate primitive wrapper.
*/
public static HashMap defaultTransformers = new HashMap();
static {
defaultTransformers.put(
Boolean.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Boolean.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Character.TYPE,
new Transformer() {
public Object transform( Object input ) {
return new Character( input.toString().charAt( 0 ) );
}
}
);
defaultTransformers.put(
Byte.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Byte.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Short.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Short.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Integer.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Integer.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Long.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Long.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Float.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Float.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Double.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Double.valueOf( input.toString() );
}
}
);
}
// Constructors
//-------------------------------------------------------------------------
/**
* Constructs a new empty <code>BeanMap</code>.
*/
public BeanMap() {
}
/**
* Constructs a new <code>BeanMap</code> that operates on the
* specified bean. If the given bean is <code>null</code>, then
* this map will be empty.
*
* @param bean the bean for this map to operate on
*/
public BeanMap(Object bean) {
this.bean = bean;
initialise();
}
// Map interface
//-------------------------------------------------------------------------
public String toString() {
return "BeanMap<" + String.valueOf(bean) + ">";
}
/**
* Clone this bean map using the following process:
*
* <ul>
* <li>If there is no underlying bean, return a cloned BeanMap without a
* bean.
*
* <li>Since there is an underlying bean, try to instantiate a new bean of
* the same type using Class.newInstance().
*
* <li>If the instantiation fails, throw a CloneNotSupportedException
*
* <li>Clone the bean map and set the newly instantiated bean as the
* underlying bean for the bean map.
*
* <li>Copy each property that is both readable and writable from the
* existing object to a cloned bean map.
*
* <li>If anything fails along the way, throw a
* CloneNotSupportedException.
*
* <ul>
*/
public Object clone() throws CloneNotSupportedException {
BeanMap newMap = (BeanMap)super.clone();
if(bean == null) {
// no bean, just an empty bean map at the moment. return a newly
// cloned and empty bean map.
return newMap;
}
Object newBean = null;
Class beanClass = null;
try {
beanClass = bean.getClass();
newBean = beanClass.newInstance();
} catch (Exception e) {
// unable to instantiate
throw new CloneNotSupportedException
("Unable to instantiate the underlying bean \"" +
beanClass.getName() + "\": " + e);
}
try {
newMap.setBean(newBean);
} catch (Exception exception) {
throw new CloneNotSupportedException
("Unable to set bean in the cloned bean map: " +
exception);
}
try {
// copy only properties that are readable and writable. If its
// not readable, we can't get the value from the old map. If
// its not writable, we can't write a value into the new map.
Iterator readableKeys = readMethods.keySet().iterator();
while(readableKeys.hasNext()) {
Object key = readableKeys.next();
if(getWriteMethod(key) != null) {
newMap.put(key, get(key));
}
}
} catch (Exception exception) {
throw new CloneNotSupportedException
("Unable to copy bean values to cloned bean map: " +
exception);
}
return newMap;
}
/**
* Puts all of the writeable properties from the given BeanMap into this
* BeanMap. Read-only and Write-only properties will be ignored.
*
* @param map the BeanMap whose properties to put
*/
public void putAllWriteable(BeanMap map) {
Iterator readableKeys = map.readMethods.keySet().iterator();
while (readableKeys.hasNext()) {
Object key = readableKeys.next();
if (getWriteMethod(key) != null) {
this.put(key, map.get(key));
}
}
}
/**
* This method reinitializes the bean map to have default values for the
* bean's properties. This is accomplished by constructing a new instance
* of the bean which the map uses as its underlying data source. This
* behavior for <code>clear()</code> differs from the Map contract in that
* the mappings are not actually removed from the map (the mappings for a
* BeanMap are fixed).
*/
public void clear() {
if(bean == null) return;
Class beanClass = null;
try {
beanClass = bean.getClass();
bean = beanClass.newInstance();
}
catch (Exception e) {
throw new UnsupportedOperationException( "Could not create new instance of class: " + beanClass );
}
}
/**
* Returns true if the bean defines a property with the given name.
* <p>
* The given name must be a <code>String</code>; if not, this method
* returns false. This method will also return false if the bean
* does not define a property with that name.
* <p>
* Write-only properties will not be matched as the test operates against
* property read methods.
*
* @param name the name of the property to check
* @return false if the given name is null or is not a <code>String</code>;
* false if the bean does not define a property with that name; or
* true if the bean does define a property with that name
*/
public boolean containsKey(Object name) {
Method method = getReadMethod(name);
return method != null;
}
/**
* Returns true if the bean defines a property whose current value is
* the given object.
*
* @param value the value to check
* @return false true if the bean has at least one property whose
* current value is that object, false otherwise
*/
public boolean containsValue(Object value) {
// use default implementation
return super.containsValue(value);
}
/**
* Returns the value of the bean's property with the given name.
* <p>
* The given name must be a {@link String} and must not be
* null; otherwise, this method returns <code>null</code>.
* If the bean defines a property with the given name, the value of
* that property is returned. Otherwise, <code>null</code> is
* returned.
* <p>
* Write-only properties will not be matched as the test operates against
* property read methods.
*
* @param name the name of the property whose value to return
* @return the value of the property with that name
*/
public Object get(Object name) {
if ( bean != null ) {
Method method = getReadMethod( name );
if ( method != null ) {
try {
return method.invoke( bean, NULL_ARGUMENTS );
}
catch ( IllegalAccessException e ) {
logWarn( e );
}
catch ( IllegalArgumentException e ) {
logWarn( e );
}
catch ( InvocationTargetException e ) {
logWarn( e );
}
catch ( NullPointerException e ) {
logWarn( e );
}
}
}
return null;
}
/**
* Sets the bean property with the given name to the given value.
*
* @param name the name of the property to set
* @param value the value to set that property to
* @return the previous value of that property
* @throws IllegalArgumentException if the given name is null;
* if the given name is not a {@link String}; if the bean doesn't
* define a property with that name; or if the bean property with
* that name is read-only
*/
public Object put(Object name, Object value) throws IllegalArgumentException, ClassCastException {
if ( bean != null ) {
Object oldValue = get( name );
Method method = getWriteMethod( name );
if ( method == null ) {
throw new IllegalArgumentException( "The bean of type: "+ bean.getClass().getName() + " has no property called: " + name );
}
try {
Object[] arguments = createWriteMethodArguments( method, value );
method.invoke( bean, arguments );
Object newValue = get( name );
firePropertyChange( name, oldValue, newValue );
}
catch ( InvocationTargetException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
catch ( IllegalAccessException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
return oldValue;
}
return null;
}
/**
* Returns the number of properties defined by the bean.
*
* @return the number of properties defined by the bean
*/
public int size() {
return readMethods.size();
}
/**
* Get the keys for this BeanMap.
* <p>
* Write-only properties are <b>not</b> included in the returned set of
* property names, although it is possible to set their value and to get
* their type.
*
* @return BeanMap keys. The Set returned by this method is not
* modifiable.
*/
public Set keySet() {
return UnmodifiableSet.decorate(readMethods.keySet());
}
/**
* Gets a Set of MapEntry objects that are the mappings for this BeanMap.
* <p>
* Each MapEntry can be set but not removed.
*
* @return the unmodifiable set of mappings
*/
public Set entrySet() {
return UnmodifiableSet.decorate(new AbstractSet() {
public Iterator iterator() {
return entryIterator();
}
public int size() {
return BeanMap.this.readMethods.size();
}
});
}
/**
* Returns the values for the BeanMap.
*
* @return values for the BeanMap. The returned collection is not
* modifiable.
*/
public Collection values() {
ArrayList answer = new ArrayList( readMethods.size() );
for ( Iterator iter = valueIterator(); iter.hasNext(); ) {
answer.add( iter.next() );
}
return UnmodifiableList.decorate(answer);
}
// Helper methods
//-------------------------------------------------------------------------
/**
* Returns the type of the property with the given name.
*
* @param name the name of the property
* @return the type of the property, or <code>null</code> if no such
* property exists
*/
public Class getType(String name) {
return (Class) types.get( name );
}
/**
* Convenience method for getting an iterator over the keys.
* <p>
* Write-only properties will not be returned in the iterator.
*
* @return an iterator over the keys
*/
public Iterator keyIterator() {
return readMethods.keySet().iterator();
}
/**
* Convenience method for getting an iterator over the values.
*
* @return an iterator over the values
*/
public Iterator valueIterator() {
final Iterator iter = keyIterator();
return new Iterator() {
public boolean hasNext() {
return iter.hasNext();
}
public Object next() {
Object key = iter.next();
return get( (String) key );
}
public void remove() {
throw new UnsupportedOperationException( "remove() not supported for BeanMap" );
}
};
}
/**
* Convenience method for getting an iterator over the entries.
*
* @return an iterator over the entries
*/
public Iterator entryIterator() {
final Iterator iter = keyIterator();
return new Iterator() {
public boolean hasNext() {
return iter.hasNext();
}
public Object next() {
Object key = iter.next();
Object value = get(key);
return new MyMapEntry( BeanMap.this, key, value );
}
public void remove() {
throw new UnsupportedOperationException( "remove() not supported for BeanMap" );
}
};
}
// Properties
//-------------------------------------------------------------------------
/**
* Returns the bean currently being operated on. The return value may
* be null if this map is empty.
*
* @return the bean being operated on by this map
*/
public Object getBean() {
return bean;
}
/**
* Sets the bean to be operated on by this map. The given value may
* be null, in which case this map will be empty.
*
* @param newBean the new bean to operate on
*/
public void setBean( Object newBean ) {
bean = newBean;
reinitialise();
}
/**
* Returns the accessor for the property with the given name.
*
* @param name the name of the property
* @return the accessor method for the property, or null
*/
public Method getReadMethod(String name) {
return (Method) readMethods.get(name);
}
/**
* Returns the mutator for the property with the given name.
*
* @param name the name of the property
* @return the mutator method for the property, or null
*/
public Method getWriteMethod(String name) {
return (Method) writeMethods.get(name);
}
// Implementation methods
//-------------------------------------------------------------------------
/**
* Returns the accessor for the property with the given name.
*
* @param name the name of the property
* @return null if the name is null; null if the name is not a
* {@link String}; null if no such property exists; or the accessor
* method for that property
*/
protected Method getReadMethod( Object name ) {
return (Method) readMethods.get( name );
}
/**
* Returns the mutator for the property with the given name.
*
* @param name the name of the
* @return null if the name is null; null if the name is not a
* {@link String}; null if no such property exists; null if the
* property is read-only; or the mutator method for that property
*/
protected Method getWriteMethod( Object name ) {
return (Method) writeMethods.get( name );
}
/**
* Reinitializes this bean. Called during {@link #setBean(Object)}.
* Does introspection to find properties.
*/
protected void reinitialise() {
readMethods.clear();
writeMethods.clear();
types.clear();
initialise();
}
private void initialise() {
if(getBean() == null) return;
Class beanClass = getBean().getClass();
try {
//BeanInfo beanInfo = Introspector.getBeanInfo( bean, null );
BeanInfo beanInfo = Introspector.getBeanInfo( beanClass );
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
if ( propertyDescriptors != null ) {
for ( int i = 0; i < propertyDescriptors.length; i++ ) {
PropertyDescriptor propertyDescriptor = propertyDescriptors[i];
if ( propertyDescriptor != null ) {
String name = propertyDescriptor.getName();
Method readMethod = propertyDescriptor.getReadMethod();
Method writeMethod = propertyDescriptor.getWriteMethod();
Class aType = propertyDescriptor.getPropertyType();
if ( readMethod != null ) {
readMethods.put( name, readMethod );
}
if ( writeMethods != null ) {
writeMethods.put( name, writeMethod );
}
types.put( name, aType );
}
}
}
}
catch ( IntrospectionException e ) {
logWarn( e );
}
}
/**
* Called during a successful {@link #put(Object,Object)} operation.
* Default implementation does nothing. Override to be notified of
* property changes in the bean caused by this map.
*
* @param key the name of the property that changed
* @param oldValue the old value for that property
* @param newValue the new value for that property
*/
protected void firePropertyChange( Object key, Object oldValue, Object newValue ) {
}
// Implementation classes
//-------------------------------------------------------------------------
/**
* Map entry used by {@link BeanMap}.
*/
protected static class MyMapEntry extends AbstractMapEntry {
private BeanMap owner;
/**
* Constructs a new <code>MyMapEntry</code>.
*
* @param owner the BeanMap this entry belongs to
* @param key the key for this entry
* @param value the value for this entry
*/
protected MyMapEntry( BeanMap owner, Object key, Object value ) {
super( key, value );
this.owner = owner;
}
/**
* Sets the value.
*
* @param value the new value for the entry
* @return the old value for the entry
*/
public Object setValue(Object value) {
Object key = getKey();
Object oldValue = owner.get( key );
owner.put( key, value );
Object newValue = owner.get( key );
super.setValue( newValue );
return oldValue;
}
}
/**
* Creates an array of parameters to pass to the given mutator method.
* If the given object is not the right type to pass to the method
* directly, it will be converted using {@link #convertType(Class,Object)}.
*
* @param method the mutator method
* @param value the value to pass to the mutator method
* @return an array containing one object that is either the given value
* or a transformed value
* @throws IllegalAccessException if {@link #convertType(Class,Object)}
* raises it
* @throws IllegalArgumentException if any other exception is raised
* by {@link #convertType(Class,Object)}
*/
protected Object[] createWriteMethodArguments( Method method, Object value ) throws IllegalAccessException, ClassCastException {
try {
if ( value != null ) {
Class[] types = method.getParameterTypes();
if ( types != null && types.length > 0 ) {
Class paramType = types[0];
if ( ! paramType.isAssignableFrom( value.getClass() ) ) {
value = convertType( paramType, value );
}
}
}
Object[] answer = { value };
return answer;
}
catch ( InvocationTargetException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
catch ( InstantiationException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
}
/**
* Converts the given value to the given type. First, reflection is
* is used to find a public constructor declared by the given class
* that takes one argument, which must be the precise type of the
* given value. If such a constructor is found, a new object is
* created by passing the given value to that constructor, and the
* newly constructed object is returned.<P>
*
* If no such constructor exists, and the given type is a primitive
* type, then the given value is converted to a string using its
* {@link Object#toString() toString()} method, and that string is
* parsed into the correct primitve type using, for instance,
* {@link Integer#valueOf(String)} to convert the string into an
* <code>int</code>.<P>
*
* If no special constructor exists and the given type is not a
* primitive type, this method returns the original value.
*
* @param newType the type to convert the value to
* @param value the value to convert
* @return the converted value
* @throws NumberFormatException if newType is a primitive type, and
* the string representation of the given value cannot be converted
* to that type
* @throws InstantiationException if the constructor found with
* reflection raises it
* @throws InvocationTargetExcetpion if the constructor found with
* reflection raises it
* @throws IllegalAccessException never
* @throws IllegalArgumentException never
*/
protected Object convertType( Class newType, Object value )
throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
// try call constructor
Class[] types = { value.getClass() };
try {
Constructor constructor = newType.getConstructor( types );
Object[] arguments = { value };
return constructor.newInstance( arguments );
}
catch ( NoSuchMethodException e ) {
// try using the transformers
Transformer transformer = getTypeTransformer( newType );
if ( transformer != null ) {
return transformer.transform( value );
}
return value;
}
}
/**
* Returns a transformer for the given primitive type.
*
* @param aType the primitive type whose transformer to return
* @return a transformer that will convert strings into that type,
* or null if the given type is not a primitive type
*/
protected Transformer getTypeTransformer( Class aType ) {
return (Transformer) defaultTransformers.get( aType );
}
/**
* Logs the given exception to <code>System.out</code>. Used to display
* warnings while accessing/mutating the bean.
*
* @param ex the exception to log
*/
protected void logInfo(Exception ex) {
// Deliberately do not use LOG4J or Commons Logging to avoid dependencies
System.out.println( "INFO: Exception: " + ex );
}
/**
* Logs the given exception to <code>System.err</code>. Used to display
* errors while accessing/mutating the bean.
*
* @param ex the exception to log
*/
protected void logWarn(Exception ex) {
// Deliberately do not use LOG4J or Commons Logging to avoid dependencies
System.out.println( "WARN: Exception: " + ex );
ex.printStackTrace();
}
}
@@ -0,0 +1,242 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.map;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Map;
import org.apache.commons.collections.IterableMap;
import org.apache.commons.collections.Transformer;
/**
* Decorates another <code>Map</code> to transform objects that are added.
* <p>
* The Map put methods and Map.Entry setValue method are affected by this class.
* Thus objects must be removed or searched for using their transformed form.
* For example, if the transformation converts Strings to Integers, you must
* use the Integer form to remove objects.
* <p>
* <strong>Note that TransformedMap is not synchronized and is not thread-safe.</strong>
* If you wish to use this map from multiple threads concurrently, you must use
* appropriate synchronization. The simplest approach is to wrap this map
* using {@link java.util.Collections#synchronizedMap(Map)}. This class may throw
* exceptions when accessed by concurrent threads without synchronization.
* <p>
* This class is Serializable from Commons Collections 3.1.
* <p>
* @see org.apache.commons.collections.splitmap.TransformedMap
*
* @since Commons Collections 3.0
* @version $Revision$ $Date$
*
* @author Stephen Colebourne
*/
public class TransformedMap<K, V>
extends AbstractInputCheckedMapDecorator<K, V>
implements Serializable {
/** Serialization version */
private static final long serialVersionUID = 7023152376788900464L;
/** The transformer to use for the key */
protected final Transformer<? super K, ? extends K> keyTransformer;
/** The transformer to use for the value */
protected final Transformer<? super V, ? extends V> valueTransformer;
/**
* Factory method to create a transforming map.
* <p>
* If there are any elements already in the map being decorated, they
* are NOT transformed.
* Contrast this with {@link #decorateTransform}.
*
* @param map the map to decorate, must not be null
* @param keyTransformer the transformer to use for key conversion, null means no transformation
* @param valueTransformer the transformer to use for value conversion, null means no transformation
* @throws IllegalArgumentException if map is null
*/
public static <K, V> IterableMap<K, V> decorate(Map<K, V> map,
Transformer<? super K, ? extends K> keyTransformer,
Transformer<? super V, ? extends V> valueTransformer) {
return new TransformedMap<K, V>(map, keyTransformer, valueTransformer);
}
/**
* Factory method to create a transforming map that will transform
* existing contents of the specified map.
* <p>
* If there are any elements already in the map being decorated, they
* will be transformed by this method.
* Contrast this with {@link #decorate}.
*
* @param map the map to decorate, must not be null
* @param keyTransformer the transformer to use for key conversion, null means no transformation
* @param valueTransformer the transformer to use for value conversion, null means no transformation
* @throws IllegalArgumentException if map is null
* @since Commons Collections 3.2
*/
public static <K, V> Map<K, V> decorateTransform(Map<K, V> map,
Transformer<? super K, ? extends K> keyTransformer,
Transformer<? super V, ? extends V> valueTransformer) {
TransformedMap<K, V> decorated = new TransformedMap<K, V>(map, keyTransformer, valueTransformer);
if (map.size() > 0) {
Map<K, V> transformed = decorated.transformMap(map);
decorated.clear();
decorated.decorated().putAll(transformed); // avoids double transformation
}
return decorated;
}
//-----------------------------------------------------------------------
/**
* Constructor that wraps (not copies).
* <p>
* If there are any elements already in the collection being decorated, they
* are NOT transformed.
*
* @param map the map to decorate, must not be null
* @param keyTransformer the transformer to use for key conversion, null means no conversion
* @param valueTransformer the transformer to use for value conversion, null means no conversion
* @throws IllegalArgumentException if map is null
*/
protected TransformedMap(Map<K, V> map, Transformer<? super K, ? extends K> keyTransformer,
Transformer<? super V, ? extends V> valueTransformer) {
super(map);
this.keyTransformer = keyTransformer;
this.valueTransformer = valueTransformer;
}
//-----------------------------------------------------------------------
/**
* Write the map out using a custom routine.
*
* @param out the output stream
* @throws IOException
* @since Commons Collections 3.1
*/
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
out.writeObject(map);
}
/**
* Read the map in using a custom routine.
*
* @param in the input stream
* @throws IOException
* @throws ClassNotFoundException
* @since Commons Collections 3.1
*/
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
map = (Map) in.readObject();
}
//-----------------------------------------------------------------------
/**
* Transforms a key.
* <p>
* The transformer itself may throw an exception if necessary.
*
* @param object the object to transform
* @throws the transformed object
*/
protected K transformKey(K object) {
if (keyTransformer == null) {
return object;
}
return keyTransformer.transform(object);
}
/**
* Transforms a value.
* <p>
* The transformer itself may throw an exception if necessary.
*
* @param object the object to transform
* @throws the transformed object
*/
protected V transformValue(V object) {
if (valueTransformer == null) {
return object;
}
return valueTransformer.transform(object);
}
/**
* Transforms a map.
* <p>
* The transformer itself may throw an exception if necessary.
*
* @param map the map to transform
* @throws the transformed object
*/
@SuppressWarnings("unchecked")
protected Map<K, V> transformMap(Map<? extends K, ? extends V> map) {
if (map.isEmpty()) {
return (Map<K, V>) map;
}
Map<K, V> result = new LinkedMap<K, V>(map.size());
for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
result.put((K) transformKey(entry.getKey()), transformValue(entry.getValue()));
}
return result;
}
/**
* Override to transform the value when using <code>setValue</code>.
*
* @param value the value to transform
* @return the transformed value
* @since Commons Collections 3.1
*/
@Override
protected V checkSetValue(V value) {
return valueTransformer.transform(value);
}
/**
* Override to only return true when there is a value transformer.
*
* @return true if a value transformer is in use
* @since Commons Collections 3.1
*/
@Override
protected boolean isSetValueChecking() {
return (valueTransformer != null);
}
//-----------------------------------------------------------------------
@Override
public V put(K key, V value) {
key = transformKey(key);
value = transformValue(value);
return decorated().put(key, value);
}
@Override
public void putAll(Map<? extends K, ? extends V> mapToCopy) {
mapToCopy = transformMap(mapToCopy);
decorated().putAll(mapToCopy);
}
}
@@ -0,0 +1,525 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/FastHashMap.java,v 1.2 2001/04/21 12:19:57 craigmcc Exp $
* $Revision: 1.2 $
* $Date: 2001/04/21 12:19:57 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* <p>A customized implementation of <code>java.util.HashMap</code> designed
* to operate in a multithreaded environment where the large majority of
* method calls are read-only, instead of structural changes. When operating
* in "fast" mode, read calls are non-synchronized and write calls perform the
* following steps:</p>
* <ul>
* <li>Clone the existing collection
* <li>Perform the modification on the clone
* <li>Replace the existing collection with the (modified) clone
* </ul>
* <p>When first created, objects of this class default to "slow" mode, where
* all accesses of any type are synchronized but no cloning takes place. This
* is appropriate for initially populating the collection, followed by a switch
* to "fast" mode (by calling <code>setFast(true)</code>) after initialization
* is complete.</p>
*
* <p><strong>NOTE</strong>: If you are creating and accessing a
* <code>HashMap</code> only within a single thread, you should use
* <code>java.util.HashMap</code> directly (with no synchronization), for
* maximum performance.</p>
*
* @author Craig R. McClanahan
* @version $Revision: 1.2 $ $Date: 2001/04/21 12:19:57 $
*/
public class FastHashMap extends HashMap {
// ----------------------------------------------------------- Constructors
/**
* Construct a an empty map.
*/
public FastHashMap() {
super();
this.map = new HashMap();
}
/**
* Construct an empty map with the specified capacity.
*
* @param capacity The initial capacity of the empty map
*/
public FastHashMap(int capacity) {
super();
this.map = new HashMap(capacity);
}
/**
* Construct an empty map with the specified capacity and load factor.
*
* @param capacity The initial capacity of the empty map
* @param factor The load factor of the new map
*/
public FastHashMap(int capacity, float factor) {
super();
this.map = new HashMap(capacity, factor);
}
/**
* Construct a new map with the same mappings as the specified map.
*
* @param map The map whose mappings are to be copied
*/
public FastHashMap(Map map) {
super();
this.map = new HashMap(map);
}
// ----------------------------------------------------- Instance Variables
/**
* The underlying map we are managing.
*/
protected HashMap map = null;
// ------------------------------------------------------------- Properties
/**
* Are we operating in "fast" mode?
*/
protected boolean fast = false;
public boolean getFast() {
return (this.fast);
}
public void setFast(boolean fast) {
this.fast = fast;
}
// --------------------------------------------------------- Public Methods
/**
* Remove all mappings from this map.
*/
public void clear() {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
temp.clear();
map = temp;
}
} else {
synchronized (map) {
map.clear();
}
}
}
/**
* Return a shallow copy of this <code>FastHashMap</code> instance.
* The keys and values themselves are not copied.
*/
public Object clone() {
FastHashMap results = null;
if (fast) {
results = new FastHashMap(map);
} else {
synchronized (map) {
results = new FastHashMap(map);
}
}
results.setFast(getFast());
return (results);
}
/**
* Return <code>true</code> if this map contains a mapping for the
* specified key.
*
* @param key Key to be searched for
*/
public boolean containsKey(Object key) {
if (fast) {
return (map.containsKey(key));
} else {
synchronized (map) {
return (map.containsKey(key));
}
}
}
/**
* Return <code>true</code> if this map contains one or more keys mapping
* to the specified value.
*
* @param value Value to be searched for
*/
public boolean containsValue(Object value) {
if (fast) {
return (map.containsValue(value));
} else {
synchronized (map) {
return (map.containsValue(value));
}
}
}
/**
* Return a collection view of the mappings contained in this map. Each
* element in the returned collection is a <code>Map.Entry</code>.
*/
public Set entrySet() {
if (fast) {
return (map.entrySet());
} else {
synchronized (map) {
return (map.entrySet());
}
}
}
/**
* Compare the specified object with this list for equality. This
* implementation uses exactly the code that is used to define the
* list equals function in the documentation for the
* <code>Map.equals</code> method.
*
* @param o Object to be compared to this list
*/
public boolean equals(Object o) {
// Simple tests that require no synchronization
if (o == this)
return (true);
else if (!(o instanceof Map))
return (false);
Map mo = (Map) o;
// Compare the two maps for equality
if (fast) {
if (mo.size() != map.size())
return (false);
Iterator i = map.entrySet().iterator();
while (i.hasNext()) {
Entry e = (Entry) i.next();
Object key = e.getKey();
Object value = e.getValue();
if (value == null) {
if (!(mo.get(key) == null && mo.containsKey(key)))
return (false);
} else {
if (!value.equals(mo.get(key)))
return (false);
}
}
return (true);
} else {
synchronized (map) {
if (mo.size() != map.size())
return (false);
Iterator i = map.entrySet().iterator();
while (i.hasNext()) {
Entry e = (Entry) i.next();
Object key = e.getKey();
Object value = e.getValue();
if (value == null) {
if (!(mo.get(key) == null && mo.containsKey(key)))
return (false);
} else {
if (!value.equals(mo.get(key)))
return (false);
}
}
return (true);
}
}
}
/**
* Return the value to which this map maps the specified key. Returns
* <code>null</code> if the map contains no mapping for this key, or if
* there is a mapping with a value of <code>null</code>. Use the
* <code>containsKey()</code> method to disambiguate these cases.
*
* @param key Key whose value is to be returned
*/
public Object get(Object key) {
if (fast) {
return (map.get(key));
} else {
synchronized (map) {
return (map.get(key));
}
}
}
/**
* Return the hash code value for this map. This implementation uses
* exactly the code that is used to define the list hash function in the
* documentation for the <code>Map.hashCode</code> method.
*/
public int hashCode() {
if (fast) {
int h = 0;
Iterator i = map.entrySet().iterator();
while (i.hasNext())
h += i.next().hashCode();
return (h);
} else {
synchronized (map) {
int h = 0;
Iterator i = map.entrySet().iterator();
while (i.hasNext())
h += i.next().hashCode();
return (h);
}
}
}
/**
* Return <code>true</code> if this map contains no mappings.
*/
public boolean isEmpty() {
if (fast) {
return (map.isEmpty());
} else {
synchronized (map) {
return (map.isEmpty());
}
}
}
/**
* Return a set view of the keys contained in this map.
*/
public Set keySet() {
if (fast) {
return (map.keySet());
} else {
synchronized (map) {
return (map.keySet());
}
}
}
/**
* Associate the specified value with the specified key in this map.
* If the map previously contained a mapping for this key, the old
* value is replaced and returned.
*
* @param key The key with which the value is to be associated
* @param value The value to be associated with this key
*/
public Object put(Object key, Object value) {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
Object result = temp.put(key, value);
map = temp;
return (result);
}
} else {
synchronized (map) {
return (map.put(key, value));
}
}
}
/**
* Copy all of the mappings from the specified map to this one, replacing
* any mappings with the same keys.
*
* @param in Map whose mappings are to be copied
*/
public void putAll(Map in) {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
temp.putAll(in);
map = temp;
}
} else {
synchronized (map) {
map.putAll(in);
}
}
}
/**
* Remove any mapping for this key, and return any previously
* mapped value.
*
* @param key Key whose mapping is to be removed
*/
public Object remove(Object key) {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
Object result = temp.remove(key);
map = temp;
return (result);
}
} else {
synchronized (map) {
return (map.remove(key));
}
}
}
/**
* Return the number of key-value mappings in this map.
*/
public int size() {
if (fast) {
return (map.size());
} else {
synchronized (map) {
return (map.size());
}
}
}
/**
* Return a collection view of the values contained in this map.
*/
public Collection values() {
if (fast) {
return (map.values());
} else {
synchronized (map) {
return (map.values());
}
}
}
}
@@ -0,0 +1,288 @@
/*
* Copyright 1999-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.iterators;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections.list.UnmodifiableList;
/**
* An IteratorChain is an Iterator that wraps a number of Iterators.
* <p>
* This class makes multiple iterators look like one to the caller
* When any method from the Iterator interface is called, the IteratorChain
* will delegate to a single underlying Iterator. The IteratorChain will
* invoke the Iterators in sequence until all Iterators are exhausted.
* <p>
* Under many circumstances, linking Iterators together in this manner is
* more efficient (and convenient) than reading out the contents of each
* Iterator into a List and creating a new Iterator.
* <p>
* Calling a method that adds new Iterator<i>after a method in the Iterator
* interface has been called</i> will result in an UnsupportedOperationException.
* Subclasses should <i>take care</i> to not alter the underlying List of Iterators.
* <p>
* NOTE: As from version 3.0, the IteratorChain may contain no
* iterators. In this case the class will function as an empty iterator.
*
* @since Commons Collections 2.1
* @version $Revision$ $Date$
*
* @author Morgan Delagrange
* @author Stephen Colebourne
*/
public class IteratorChain implements Iterator {
/** The chain of iterators */
protected final List iteratorChain = new ArrayList();
/** The index of the current iterator */
protected int currentIteratorIndex = 0;
/** The current iterator */
protected Iterator currentIterator = null;
/**
* The "last used" Iterator is the Iterator upon which
* next() or hasNext() was most recently called
* used for the remove() operation only
*/
protected Iterator lastUsedIterator = null;
/**
* ComparatorChain is "locked" after the first time
* compare(Object,Object) is called
*/
protected boolean isLocked = false;
//-----------------------------------------------------------------------
/**
* Construct an IteratorChain with no Iterators.
* <p>
* You will normally use {@link #addIterator(Iterator)} to add
* some iterators after using this constructor.
*/
public IteratorChain() {
super();
}
/**
* Construct an IteratorChain with a single Iterator.
*
* @param iterator first Iterator in the IteratorChain
* @throws NullPointerException if the iterator is null
*/
public IteratorChain(Iterator iterator) {
super();
addIterator(iterator);
}
/**
* Constructs a new <code>IteratorChain</code> over the two
* given iterators.
*
* @param a the first child iterator
* @param b the second child iterator
* @throws NullPointerException if either iterator is null
*/
public IteratorChain(Iterator a, Iterator b) {
super();
addIterator(a);
addIterator(b);
}
/**
* Constructs a new <code>IteratorChain</code> over the array
* of iterators.
*
* @param iterators the array of iterators
* @throws NullPointerException if iterators array is or contains null
*/
public IteratorChain(Iterator[] iterators) {
super();
for (int i = 0; i < iterators.length; i++) {
addIterator(iterators[i]);
}
}
/**
* Constructs a new <code>IteratorChain</code> over the collection
* of iterators.
*
* @param iterators the collection of iterators
* @throws NullPointerException if iterators collection is or contains null
* @throws ClassCastException if iterators collection doesn't contain an iterator
*/
public IteratorChain(Collection iterators) {
super();
for (Iterator it = iterators.iterator(); it.hasNext();) {
Iterator item = (Iterator) it.next();
addIterator(item);
}
}
//-----------------------------------------------------------------------
/**
* Add an Iterator to the end of the chain
*
* @param iterator Iterator to add
* @throws IllegalStateException if I've already started iterating
* @throws NullPointerException if the iterator is null
*/
public void addIterator(Iterator iterator) {
checkLocked();
if (iterator == null) {
throw new NullPointerException("Iterator must not be null");
}
iteratorChain.add(iterator);
}
/**
* Set the Iterator at the given index
*
* @param index index of the Iterator to replace
* @param iterator Iterator to place at the given index
* @throws IndexOutOfBoundsException if index &lt; 0 or index &gt; size()
* @throws IllegalStateException if I've already started iterating
* @throws NullPointerException if the iterator is null
*/
public void setIterator(int index, Iterator iterator) throws IndexOutOfBoundsException {
checkLocked();
if (iterator == null) {
throw new NullPointerException("Iterator must not be null");
}
iteratorChain.set(index, iterator);
}
/**
* Get the list of Iterators (unmodifiable)
*
* @return the unmodifiable list of iterators added
*/
public List getIterators() {
return UnmodifiableList.decorate(iteratorChain);
}
/**
* Number of Iterators in the current IteratorChain.
*
* @return Iterator count
*/
public int size() {
return iteratorChain.size();
}
/**
* Determine if modifications can still be made to the IteratorChain.
* IteratorChains cannot be modified once they have executed a method
* from the Iterator interface.
*
* @return true if IteratorChain cannot be modified, false if it can
*/
public boolean isLocked() {
return isLocked;
}
/**
* Checks whether the iterator chain is now locked and in use.
*/
private void checkLocked() {
if (isLocked == true) {
throw new UnsupportedOperationException("IteratorChain cannot be changed after the first use of a method from the Iterator interface");
}
}
/**
* Lock the chain so no more iterators can be added.
* This must be called from all Iterator interface methods.
*/
private void lockChain() {
if (isLocked == false) {
isLocked = true;
}
}
/**
* Updates the current iterator field to ensure that the current Iterator
* is not exhausted
*/
protected void updateCurrentIterator() {
if (currentIterator == null) {
if (iteratorChain.isEmpty()) {
currentIterator = EmptyIterator.INSTANCE;
} else {
currentIterator = (Iterator) iteratorChain.get(0);
}
// set last used iterator here, in case the user calls remove
// before calling hasNext() or next() (although they shouldn't)
lastUsedIterator = currentIterator;
}
while (currentIterator.hasNext() == false && currentIteratorIndex < iteratorChain.size() - 1) {
currentIteratorIndex++;
currentIterator = (Iterator) iteratorChain.get(currentIteratorIndex);
}
}
//-----------------------------------------------------------------------
/**
* Return true if any Iterator in the IteratorChain has a remaining element.
*
* @return true if elements remain
*/
public boolean hasNext() {
lockChain();
updateCurrentIterator();
lastUsedIterator = currentIterator;
return currentIterator.hasNext();
}
/**
* Returns the next Object of the current Iterator
*
* @return Object from the current Iterator
* @throws java.util.NoSuchElementException if all the Iterators are exhausted
*/
public Object next() {
lockChain();
updateCurrentIterator();
lastUsedIterator = currentIterator;
return currentIterator.next();
}
/**
* Removes from the underlying collection the last element
* returned by the Iterator. As with next() and hasNext(),
* this method calls remove() on the underlying Iterator.
* Therefore, this method may throw an
* UnsupportedOperationException if the underlying
* Iterator does not support this method.
*
* @throws UnsupportedOperationException
* if the remove operator is not supported by the underlying Iterator
* @throws IllegalStateException
* if the next method has not yet been called, or the remove method has
* already been called after the last call to the next method.
*/
public void remove() {
lockChain();
updateCurrentIterator();
lastUsedIterator.remove();
}
}
@@ -0,0 +1,888 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/SequencedHashMap.java,v 1.3 2002/02/18 20:34:57 morgand Exp $
* $Revision: 1.3 $
* $Date: 2002/02/18 20:34:57 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2002 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.AbstractCollection;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.NoSuchElementException;
/**
* A map of objects whose mapping entries are sequenced based on the order in
* which they were added. This data structure has fast <I>O(1)</I> search
* time, deletion time, and insertion time.
*
* This class inherits from {@link java.util.HashMap} purely for backwards
* compatibility. It should really be inheriting from {@link
* java.util.AbstractMap}, or with a tiny extra bit of work, implement the
* full map interface on its own. APIs should not rely on this class being an
* actual {@link java.util.HashMap}, and instead should recognize it only as a
* generic {@link java.util.Map} (unless, of course, you need the sequencing
* functionality, but even in that case, this class should not be referred to
* as a java.util.HashMap).
*
* <P>Although this map is sequenced, it cannot implement {@link
* java.util.List} because of incompatible interface definitions. The remove
* methods in List and Map have different return values (see: {@link
* java.util.List#remove(Object)} and {@link java.util.Map#remove(Object)}).
*
* <P>This class is not thread safe. When a thread safe implementation is
* required, use {@link Collections#synchronizedMap(Map)} as it is documented,
* or use explicit synchronization controls.
*
* @author <a href="mailto:michael@iammichael.org">Michael A. Smith</A>
* @author <a href="mailto:dlr@collab.net">Daniel Rall</a>
* @author <a href="mailto:hps@intermeta.de">Henning P. Schmiedehausen</a>
*/
public class SequencedHashMap extends HashMap {
/**
* {@link java.util.Map.Entry} that doubles as a node in the linked list
* of sequenced mappings.
**/
private static class Entry implements Map.Entry {
// Note: This class cannot easily be made clonable. While the actual
// implementation of a clone would be simple, defining the semantics is
// difficult. If a shallow clone is implemented, then entry.next.prev !=
// entry, which is unintuitive and probably breaks all sorts of assumptions
// in code that uses this implementation. If a deep clone is
// implementated, then what happens when the linked list is cyclical (as is
// the case with SequencedHashMap)? It's impossible to know in the clone
// when to stop cloning, and thus you end up in a recursive loop,
// continuously cloning the "next" in the list.
private final Object key;
private Object value;
// package private to allow the SequencedHashMap to access and manipulate
// them.
Entry next = null;
Entry prev = null;
public Entry(Object key, Object value) {
this.key = key;
this.value = value;
}
// per Map.Entry.getKey()
public Object getKey() {
return this.key;
}
// per Map.Entry.getValue()
public Object getValue() {
return this.value;
}
// per Map.Entry.setValue()
public Object setValue(Object value) {
Object oldValue = this.value;
this.value = value;
return oldValue;
}
public int hashCode() {
// implemented per api docs for Map.Entry.hashCode()
return ((getKey() == null ? 0 : getKey().hashCode()) ^
(getValue()==null ? 0 : getValue().hashCode()));
}
public boolean equals(Object obj) {
if(obj == null) return false;
if(obj == this) return true;
if(!(obj instanceof Map.Entry)) return false;
Map.Entry other = (Map.Entry)obj;
// implemented per api docs for Map.Entry.equals(Object)
return((getKey() == null ?
other.getKey() == null :
getKey().equals(other.getKey())) &&
(getValue() == null ?
other.getValue() == null :
getValue().equals(other.getValue())));
}
public String toString() {
return "[" + getKey() + "=" + getValue() + "]";
}
}
/**
* Construct an empty sentinel used to hold the head (sentinel.next) and the
* tail (sentinel.prev) of the list. The sentinal has a <code>null</code>
* key and value.
**/
private static final Entry createSentinel() {
Entry s = new Entry(null, null);
s.prev = s;
s.next = s;
return s;
}
/**
* Sentinel used to hold the head and tail of the list of entries.
**/
private Entry sentinel;
/**
* Map of keys to entries
**/
private HashMap entries;
/**
* Construct a new sequenced hash map with default initial size and load
* factor.
**/
public SequencedHashMap() {
sentinel = createSentinel();
entries = new HashMap();
}
/**
* Construct a new sequenced hash map with the specified initial size and
* default load factor.
*
* @param initialSize the initial size for the hash table
*
* @see HashMap#HashMap(int)
**/
public SequencedHashMap(int initialSize) {
sentinel = createSentinel();
entries = new HashMap(initialSize);
}
/**
* Construct a new sequenced hash map with the specified initial size and
* load factor.
*
* @param initialSize the initial size for the hash table
*
* @param loadFactor the load factor for the hash table.
*
* @see HashMap#HashMap(int,float)
**/
public SequencedHashMap(int initialSize, float loadFactor) {
sentinel = createSentinel();
entries = new HashMap(initialSize, loadFactor);
}
/**
* Construct a new sequenced hash map and add all the elements in the
* specified map. The order in which the mappings in the specified map are
* added is defined by {@link #putAll(Map)}.
**/
public SequencedHashMap(Map m) {
this();
putAll(m);
}
/**
* Removes an internal entry from the linked list. This does not remove
* it from the underlying map.
**/
private void removeEntry(Entry entry) {
entry.next.prev = entry.prev;
entry.prev.next = entry.next;
}
/**
* Inserts a new internal entry to the tail of the linked list. This does
* not add the entry to the underlying map.
**/
private void insertEntry(Entry entry) {
entry.next = sentinel;
entry.prev = sentinel.prev;
sentinel.prev.next = entry;
sentinel.prev = entry;
}
// per Map.size()
public int size() {
// use the underlying Map's size since size is not maintained here.
return entries.size();
}
// per Map.isEmpty()
public boolean isEmpty() {
// for quick check whether the map is entry, we can check the linked list
// and see if there's anything in it.
return sentinel.next == sentinel;
}
// per Map.containsKey(Object)
public boolean containsKey(Object key) {
// pass on to underlying map implementation
return entries.containsKey(key);
}
// per Map.containsValue(Object)
public boolean containsValue(Object value) {
// unfortunately, we cannot just pass this call to the underlying map
// because we are mapping keys to entries, not keys to values. The
// underlying map doesn't have an efficient implementation anyway, so this
// isn't a big deal.
// do null comparison outside loop so we only need to do it once. This
// provides a tighter, more efficient loop at the expense of slight
// code duplication.
if(value == null) {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(pos.getValue() == null) return true;
}
} else {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(value.equals(pos.getValue())) return true;
}
}
return false;
}
// per Map.get(Object)
public Object get(Object o) {
// find entry for the specified key object
Entry entry = (Entry)entries.get(o);
if(entry == null) return null;
return entry.getValue();
}
/**
* Return the entry for the "oldest" mapping. That is, return the Map.Entry
* for the key-value pair that was first put into the map when compared to
* all the other pairings in the map. This behavior is equivalent to using
* <code>entrySet().iterator().next()</code>, but this method provides an
* optimized implementation.
*
* @return The first entry in the sequence, or <code>null</code> if the
* map is empty.
**/
public Map.Entry getFirst() {
// sentinel.next points to the "first" element of the sequence -- the head
// of the list, which is exactly the entry we need to return. We must test
// for an empty list though because we don't want to return the sentinel!
return (isEmpty()) ? null : sentinel.next;
}
/**
* Return the key for the "oldest" mapping. That is, return the key for the
* mapping that was first put into the map when compared to all the other
* objects in the map. This behavior is equivalent to using
* <code>getFirst().getKey()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The first key in the sequence, or <code>null</code> if the
* map is empty.
**/
public Object getFirstKey() {
// sentinel.next points to the "first" element of the sequence -- the head
// of the list -- and the requisite key is returned from it. An empty list
// does not need to be tested. In cases where the list is empty,
// sentinel.next will point to the sentinel itself which has a null key,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.next.getKey();
}
/**
* Return the value for the "oldest" mapping. That is, return the value for
* the mapping that was first put into the map when compared to all the
* other objects in the map. This behavior is equivalent to using
* <code>getFirst().getValue()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The first value in the sequence, or <code>null</code> if the
* map is empty.
**/
public Object getFirstValue() {
// sentinel.next points to the "first" element of the sequence -- the head
// of the list -- and the requisite value is returned from it. An empty
// list does not need to be tested. In cases where the list is empty,
// sentinel.next will point to the sentinel itself which has a null value,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.next.getValue();
}
/**
* Return the entry for the "newest" mapping. That is, return the Map.Entry
* for the key-value pair that was first put into the map when compared to
* all the other pairings in the map. The behavior is equivalent to:
*
* <pre>
* Object obj = null;
* Iterator iter = entrySet().iterator();
* while(iter.hasNext()) {
* obj = iter.next();
* }
* return (Map.Entry)obj;
* </pre>
*
* However, the implementation of this method ensures an O(1) lookup of the
* last key rather than O(n).
*
* @return The last entry in the sequence, or <code>null</code> if the map
* is empty.
**/
public Map.Entry getLast() {
// sentinel.prev points to the "last" element of the sequence -- the tail
// of the list, which is exactly the entry we need to return. We must test
// for an empty list though because we don't want to return the sentinel!
return (isEmpty()) ? null : sentinel.prev;
}
/**
* Return the key for the "newest" mapping. That is, return the key for the
* mapping that was last put into the map when compared to all the other
* objects in the map. This behavior is equivalent to using
* <code>getLast().getKey()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The last key in the sequence, or <code>null</code> if the map is
* empty.
**/
public Object getLastKey() {
// sentinel.prev points to the "last" element of the sequence -- the tail
// of the list -- and the requisite key is returned from it. An empty list
// does not need to be tested. In cases where the list is empty,
// sentinel.prev will point to the sentinel itself which has a null key,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.prev.getKey();
}
/**
* Return the value for the "newest" mapping. That is, return the value for
* the mapping that was last put into the map when compared to all the other
* objects in the map. This behavior is equivalent to using
* <code>getLast().getValue()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The last value in the sequence, or <code>null</code> if the map
* is empty.
**/
public Object getLastValue() {
// sentinel.prev points to the "last" element of the sequence -- the tail
// of the list -- and the requisite value is returned from it. An empty
// list does not need to be tested. In cases where the list is empty,
// sentinel.prev will point to the sentinel itself which has a null value,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.prev.getValue();
}
// per Map.put(Object,Object)
public Object put(Object key, Object value) {
Object oldValue = null;
// lookup the entry for the specified key
Entry e = (Entry)entries.get(key);
// check to see if it already exists
if(e != null) {
// remove from list so the entry gets "moved" to the end of list
removeEntry(e);
// update value in map
oldValue = e.setValue(value);
// Note: We do not update the key here because its unnecessary. We only
// do comparisons using equals(Object) and we know the specified key and
// that in the map are equal in that sense. This may cause a problem if
// someone does not implement their hashCode() and/or equals(Object)
// method properly and then use it as a key in this map.
} else {
// add new entry
e = new Entry(key, value);
entries.put(key, e);
}
// assert(entry in map, but not list)
// add to list
insertEntry(e);
return oldValue;
}
// per Map.remove(Object)
public Object remove(Object key) {
Entry e = (Entry)entries.remove(key);
if(e == null) return null;
removeEntry(e);
return e.getValue();
}
/**
* Adds all the mappings in the specified map to this map, replacing any
* mappings that already exist (as per {@link Map#putAll(Map)}). The order
* in which the entries are added is determined by the iterator returned
* from {@link Map#entrySet()} for the specified map.
*
* @param t the mappings that should be added to this map.
*
* @exception NullPointerException if <code>t</code> is <code>null</code>
**/
public void putAll(Map t) {
Iterator iter = t.entrySet().iterator();
while(iter.hasNext()) {
Map.Entry entry = (Map.Entry)iter.next();
put(entry.getKey(), entry.getValue());
}
}
// per Map.clear()
public void clear() {
// remove all from the underlying map
entries.clear();
// and the list
sentinel.next = sentinel;
sentinel.prev = sentinel;
}
// per Map.keySet()
public Set keySet() {
return new AbstractSet() {
// required impls
public Iterator iterator() { return new OrderedIterator(KEY); }
public boolean remove(Object o) {
return SequencedHashMap.this.remove(o) != null;
}
// more efficient impls than abstract set
public void clear() {
SequencedHashMap.this.clear();
}
public int size() {
return SequencedHashMap.this.size();
}
public boolean isEmpty() {
return SequencedHashMap.this.isEmpty();
}
public boolean contains(Object o) {
return SequencedHashMap.this.containsKey(o);
}
};
}
// per Map.values()
public Collection values() {
return new AbstractCollection() {
// required impl
public Iterator iterator() { return new OrderedIterator(VALUE); }
public boolean remove(Object value) {
// do null comparison outside loop so we only need to do it once. This
// provides a tighter, more efficient loop at the expense of slight
// code duplication.
if(value == null) {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(pos.getValue() == null) {
SequencedHashMap.this.remove(pos.getKey());
return true;
}
}
} else {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(value.equals(pos.getValue())) {
SequencedHashMap.this.remove(pos.getKey());
return true;
}
}
}
return false;
}
// more efficient impls than abstract collection
public void clear() {
SequencedHashMap.this.clear();
}
public int size() {
return SequencedHashMap.this.size();
}
public boolean isEmpty() {
return SequencedHashMap.this.isEmpty();
}
public boolean contains(Object o) {
return SequencedHashMap.this.containsValue(o);
}
};
}
// per Map.entrySet()
public Set entrySet() {
return new AbstractSet() {
// helper
private Entry findEntry(Object o) {
if(o == null) return null;
if(!(o instanceof Map.Entry)) return null;
Map.Entry e = (Map.Entry)o;
Entry entry = (Entry)entries.get(e.getKey());
if(entry.equals(e)) return entry;
else return null;
}
// required impl
public Iterator iterator() {
return new OrderedIterator(ENTRY);
}
public boolean remove(Object o) {
Entry e = findEntry(o);
if(e == null) return false;
return SequencedHashMap.this.remove(e.getKey()) != null;
}
// more efficient impls than abstract collection
public void clear() {
SequencedHashMap.this.clear();
}
public int size() {
return SequencedHashMap.this.size();
}
public boolean isEmpty() {
return SequencedHashMap.this.isEmpty();
}
public boolean contains(Object o) {
return findEntry(o) != null;
}
};
}
// constants to define what the iterator should return on "next"
private static final int KEY = 0;
private static final int VALUE = 1;
private static final int ENTRY = 2;
private static final int REMOVED_MASK = 0x80000000;
private class OrderedIterator implements Iterator {
/**
* Holds the type that should be returned from the iterator. The value
* should be either {@link #KEY}, {@link #VALUE}, or {@link #ENTRY}. To
* save a tiny bit of memory, this field is also used as a marker for when
* remove has been called on the current object to prevent a second remove
* on the same element. Essientially, if this value is negative (i.e. the
* bit specified by {@link #REMOVED_MASK} is set), the current position
* has been removed. If positive, remove can still be called.
**/
private int returnType;
/**
* Holds the "current" position in the iterator. when pos.next is the
* sentinel, we've reached the end of the list.
**/
private Entry pos = sentinel;
/**
* Construct an iterator over the sequenced elements in the order in which
* they were added. The {@link #next()} method returns the type specified
* by <code>returnType</code> which must be either {@link #KEY}, {@link
* #VALUE}, or {@link #ENTRY}.
**/
public OrderedIterator(int returnType) {
//// Since this is a private inner class, nothing else should have
//// access to the constructor. Since we know the rest of the outer
//// class uses the iterator correctly, we can leave of the following
//// check:
//if(returnType >= 0 && returnType <= 2) {
// throw new IllegalArgumentException("Invalid iterator type");
//}
// Set the "removed" bit so that the iterator starts in a state where
// "next" must be called before "remove" will succeed.
this.returnType = returnType | REMOVED_MASK;
}
/**
* Returns whether there is any additional elements in the iterator to be
* returned.
*
* @return <code>true</code> if there are more elements left to be
* returned from the iterator; <code>false</code> otherwise.
**/
public boolean hasNext() {
return pos.next != sentinel;
}
/**
* Returns the next element from the iterator.
*
* @return the next element from the iterator.
*
* @exception NoSuchElementException if there are no more elements in the
* iterator.
**/
public Object next() {
if(pos.next == sentinel) {
throw new NoSuchElementException();
}
// clear the "removed" flag
returnType = returnType & ~REMOVED_MASK;
pos = pos.next;
switch(returnType) {
case KEY:
return pos.getKey();
case VALUE:
return pos.getValue();
case ENTRY:
return pos;
default:
// should never happen
throw new Error("bad iterator type: " + returnType);
}
}
/**
* Removes the last element returned from the {@link #next()} method from
* the sequenced map.
*
* @exception IllegalStateException if there isn't a "last element" to be
* removed. That is, if {@link #next()} has never been called, or if
* {@link #remove()} was already called on the element.
**/
public void remove() {
if((returnType & REMOVED_MASK) != 0) {
throw new IllegalStateException("remove() must follow next()");
}
// remove the entry
SequencedHashMap.this.remove(pos.getKey());
// set the removed flag
returnType = returnType | REMOVED_MASK;
}
}
// APIs maintained from previous version of SequencedHashMap for backwards
// compatibility
/**
* Creates a shallow copy of this object, preserving the internal structure
* by copying only references. The keys and values themselves are not
* <code>clone()</code>'d. The cloned object maintains the same sequence.
*
* @return A clone of this instance.
*/
public Object clone () {
// yes, calling super.clone() silly since we're just blowing away all
// the stuff that super might be doing anyway, but for motivations on
// this, see:
// http://www.javaworld.com/javaworld/jw-01-1999/jw-01-object.html
SequencedHashMap map = (SequencedHashMap)super.clone();
// create new, empty sentinel
map.sentinel = createSentinel();
// create a new, empty entry map
// note: this does not preserve the initial capacity and load factor.
map.entries = new HashMap();
// add all the mappings
map.putAll(this);
// Note: We cannot just clone the hashmap and sentinel because we must
// duplicate our internal structures. Cloning those two will not clone all
// the other entries they reference, and so the cloned hash map will not be
// able to maintain internal consistency because there are two objects with
// the same entries. See discussion in the Entry implementation on why we
// cannot implement a clone of the Entry (and thus why we need to recreate
// everything).
return map;
}
/**
* Returns the Map.Entry at the specified index
*
* @exception ArrayIndexOutOfBoundsException if the specified index is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
**/
private Map.Entry getEntry(int index) {
Entry pos = sentinel;
if(index < 0) {
throw new ArrayIndexOutOfBoundsException(index + " < 0");
}
// loop to one before the position
int i = -1;
while(i < (index-1) && pos.next != sentinel) {
i++;
pos = pos.next;
}
// pos.next is the requested position
// if sentinel is next, past end of list
if(pos.next == sentinel) {
throw new ArrayIndexOutOfBoundsException(index + " >= " + (i + 1));
}
return pos.next;
}
/**
* Returns the key at the specified index.
*
* @exception ArrayIndexOutOfBoundsException if the <code>index</code> is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
*/
public Object get (int index)
{
return getEntry(index).getKey();
}
/**
* Returns the value at the specified index.
*
* @exception ArrayIndexOutOfBoundsException if the <code>index</code> is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
*/
public Object getValue (int index)
{
return getEntry(index).getValue();
}
/**
* Returns the index of the specified key.
*/
public int indexOf (Object key)
{
Entry e = (Entry)entries.get(key);
int pos = 0;
while(e.prev != sentinel) {
pos++;
e = e.prev;
}
return pos;
}
/**
* Returns a key iterator.
*/
public Iterator iterator ()
{
return keySet().iterator();
}
/**
* Returns the last index of the specified key.
*/
public int lastIndexOf (Object key)
{
// keys in a map are guarunteed to be unique
return indexOf(key);
}
/**
* Returns a List view of the keys rather than a set view. The returned
* list is unmodifiable. This is required because changes to the values of
* the list (using {@link java.util.ListIterator#set(Object)}) will
* effectively remove the value from the list and reinsert that value at
* the end of the list, which is an unexpected side effect of changing the
* value of a list. This occurs because changing the key, changes when the
* mapping is added to the map and thus where it appears in the list.
*
* <P>An alternative to this method is to use {@link #keySet()}
*
* @see #keySet()
* @return The ordered list of keys.
*/
public List sequence()
{
List l = new ArrayList(size());
Iterator iter = keySet().iterator();
while(iter.hasNext()) {
l.add(iter.next());
}
return Collections.unmodifiableList(l);
}
/**
* Removes the element at the specified index.
*
* @param index The index of the object to remove.
* @return The previous value coressponding the <code>key</code>, or
* <code>null</code> if none existed.
*
* @exception ArrayIndexOutOfBoundsException if the <code>index</code> is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
*/
public Object remove (int index)
{
return remove(get(index));
}
}
@@ -0,0 +1,249 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/iterators/ArrayListIterator.java,v 1.4 2003/08/31 17:25:49 scolebourne Exp $
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowledgement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgement may appear in the software itself,
* if and wherever such third-party acknowledgements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections.iterators;
import java.lang.reflect.Array;
import java.util.NoSuchElementException;
/**
* Implements a {@link java.util.ListIterator ListIterator} over an array.
* <p>
* The array can be either an array of object or of primitives. If you know
* that you have an object array, the
* {@link org.apache.commons.collections.iterators.ObjectArrayListIterator ObjectArrayListIterator}
* class is a better choice, as it will perform better.
*
* <p>
* This iterator does not support {@link #add(Object)} or {@link #remove()}, as the array
* cannot be changed in size. The {@link #set(Object)} method is supported however.
*
* @see org.apache.commons.collections.iterators.ArrayIterator
* @see java.util.Iterator
* @see java.util.ListIterator
*
* @since Commons Collections 2.2
* @version $Revision: 1.4 $ $Date: 2003/08/31 17:25:49 $
*
* @author <a href="mailto:neilotoole@users.sourceforge.net">Neil O'Toole</a>
* @author Stephen Colebourne
*/
public class ArrayListIterator extends ArrayIterator implements ResetableListIterator {
/**
* Holds the index of the last item returned by a call to <code>next()</code> or <code>previous()</code>. This
* is set to <code>-1</code> if neither method has yet been invoked. <code>lastItemIndex</code> is used to to
* implement the {@link #set} method.
*
*/
protected int lastItemIndex = -1;
/**
* Constructor for use with <code>setArray</code>.
* <p>
* Using this constructor, the iterator is equivalent to an empty iterator
* until {@link #setArray(Object)} is called to establish the array to iterate over.
*/
public ArrayListIterator() {
super();
}
/**
* Constructs an ArrayListIterator that will iterate over the values in the
* specified array.
*
* @param array the array to iterate over
* @throws IllegalArgumentException if <code>array</code> is not an array.
* @throws NullPointerException if <code>array</code> is <code>null</code>
*/
public ArrayListIterator(Object array) {
super(array);
}
/**
* Constructs an ArrayListIterator that will iterate over the values in the
* specified array from a specific start index.
*
* @param array the array to iterate over
* @param start the index to start iterating at
* @throws IllegalArgumentException if <code>array</code> is not an array.
* @throws NullPointerException if <code>array</code> is <code>null</code>
* @throws IndexOutOfBoundsException if the start index is out of bounds
*/
public ArrayListIterator(Object array, int start) {
super(array, start);
this.startIndex = start;
}
/**
* Construct an ArrayListIterator that will iterate over a range of values
* in the specified array.
*
* @param array the array to iterate over
* @param start the index to start iterating at
* @param end the index (exclusive) to finish iterating at
* @throws IllegalArgumentException if <code>array</code> is not an array.
* @throws IndexOutOfBoundsException if the start or end index is out of bounds
* @throws IllegalArgumentException if end index is before the start
* @throws NullPointerException if <code>array</code> is <code>null</code>
*/
public ArrayListIterator(Object array, int start, int end) {
super(array, start, end);
this.startIndex = start;
}
// ListIterator interface
//-------------------------------------------------------------------------
/**
* Returns true if there are previous elements to return from the array.
*
* @return true if there is a previous element to return
*/
public boolean hasPrevious() {
return (this.index > this.startIndex);
}
/**
* Gets the previous element from the array.
*
* @return the previous element
* @throws NoSuchElementException if there is no previous element
*/
public Object previous() {
if (hasPrevious() == false) {
throw new NoSuchElementException();
}
this.lastItemIndex = --this.index;
return Array.get(this.array, this.index);
}
/**
* Gets the next element from the array.
*
* @return the next element
* @throws NoSuchElementException if there is no next element
*/
public Object next() {
if (hasNext() == false) {
throw new NoSuchElementException();
}
this.lastItemIndex = this.index;
return Array.get(this.array, this.index++);
}
/**
* Gets the next index to be retrieved.
*
* @return the index of the item to be retrieved next
*/
public int nextIndex() {
return this.index;
}
/**
* Gets the index of the item to be retrieved if {@link #previous()} is called.
*
* @return the index of the item to be retrieved next
*/
public int previousIndex() {
return this.index - 1;
}
/**
* This iterator does not support modification of its backing collection, and so will
* always throw an {@link UnsupportedOperationException} when this method is invoked.
*
* @throws UnsupportedOperationException always thrown.
* @see java.util.ListIterator#set
*/
public void add(Object o) {
throw new UnsupportedOperationException("add() method is not supported");
}
/**
* Sets the element under the cursor.
* <p>
* This method sets the element that was returned by the last call
* to {@link #next()} of {@link #previous()}.
* <p>
* <b>Note:</b> {@link ListIterator} implementations that support
* <code>add()</code> and <code>remove()</code> only allow <code>set()</code> to be called
* once per call to <code>next()</code> or <code>previous</code> (see the {@link ListIterator}
* javadoc for more details). Since this implementation does
* not support <code>add()</code> or <code>remove()</code>, <code>set()</code> may be
* called as often as desired.
*
* @see java.util.ListIterator#set
*/
public void set(Object o) {
if (this.lastItemIndex == -1) {
throw new IllegalStateException("must call next() or previous() before a call to set()");
}
Array.set(this.array, this.lastItemIndex, o);
}
/**
* Resets the iterator back to the start index.
*/
public void reset() {
super.reset();
this.lastItemIndex = -1;
}
}
@@ -0,0 +1,841 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/BeanMap.java,v 1.26 2004/01/05 21:37:13 scolebourne Exp $
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001-2004 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowledgement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgement may appear in the software itself,
* if and wherever such third-party acknowledgements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import org.apache.commons.collections.list.UnmodifiableList;
import org.apache.commons.collections.keyvalue.AbstractMapEntry;
import org.apache.commons.collections.set.UnmodifiableSet;
/**
* An implementation of Map for JavaBeans which uses introspection to
* get and put properties in the bean.
* <p>
* If an exception occurs during attempts to get or set a property then the
* property is considered non existent in the Map
*
* @since Commons Collections 1.0
* @version $Revision: 1.26 $ $Date: 2004/01/05 21:37:13 $
*
* @author James Strachan
* @author Stephen Colebourne
*/
public class BeanMap extends AbstractMap implements Cloneable {
private transient Object bean;
private transient HashMap readMethods = new HashMap();
private transient HashMap writeMethods = new HashMap();
private transient HashMap types = new HashMap();
/**
* An empty array. Used to invoke accessors via reflection.
*/
public static final Object[] NULL_ARGUMENTS = {};
/**
* Maps primitive Class types to transformers. The transformer
* transform strings into the appropriate primitive wrapper.
*/
public static HashMap defaultTransformers = new HashMap();
static {
defaultTransformers.put(
Boolean.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Boolean.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Character.TYPE,
new Transformer() {
public Object transform( Object input ) {
return new Character( input.toString().charAt( 0 ) );
}
}
);
defaultTransformers.put(
Byte.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Byte.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Short.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Short.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Integer.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Integer.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Long.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Long.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Float.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Float.valueOf( input.toString() );
}
}
);
defaultTransformers.put(
Double.TYPE,
new Transformer() {
public Object transform( Object input ) {
return Double.valueOf( input.toString() );
}
}
);
}
// Constructors
//-------------------------------------------------------------------------
/**
* Constructs a new empty <code>BeanMap</code>.
*/
public BeanMap() {
}
/**
* Constructs a new <code>BeanMap</code> that operates on the
* specified bean. If the given bean is <code>null</code>, then
* this map will be empty.
*
* @param bean the bean for this map to operate on
*/
public BeanMap(Object bean) {
this.bean = bean;
initialise();
}
// Map interface
//-------------------------------------------------------------------------
public String toString() {
return "BeanMap<" + String.valueOf(bean) + ">";
}
/**
* Clone this bean map using the following process:
*
* <ul>
* <li>If there is no underlying bean, return a cloned BeanMap without a
* bean.
*
* <li>Since there is an underlying bean, try to instantiate a new bean of
* the same type using Class.newInstance().
*
* <li>If the instantiation fails, throw a CloneNotSupportedException
*
* <li>Clone the bean map and set the newly instantiated bean as the
* underlying bean for the bean map.
*
* <li>Copy each property that is both readable and writable from the
* existing object to a cloned bean map.
*
* <li>If anything fails along the way, throw a
* CloneNotSupportedException.
*
* <ul>
*/
public Object clone() throws CloneNotSupportedException {
BeanMap newMap = (BeanMap)super.clone();
if(bean == null) {
// no bean, just an empty bean map at the moment. return a newly
// cloned and empty bean map.
return newMap;
}
Object newBean = null;
Class beanClass = null;
try {
beanClass = bean.getClass();
newBean = beanClass.newInstance();
} catch (Exception e) {
// unable to instantiate
throw new CloneNotSupportedException
("Unable to instantiate the underlying bean \"" +
beanClass.getName() + "\": " + e);
}
try {
newMap.setBean(newBean);
} catch (Exception exception) {
throw new CloneNotSupportedException
("Unable to set bean in the cloned bean map: " +
exception);
}
try {
// copy only properties that are readable and writable. If its
// not readable, we can't get the value from the old map. If
// its not writable, we can't write a value into the new map.
Iterator readableKeys = readMethods.keySet().iterator();
while(readableKeys.hasNext()) {
Object key = readableKeys.next();
if(getWriteMethod(key) != null) {
newMap.put(key, get(key));
}
}
} catch (Exception exception) {
throw new CloneNotSupportedException
("Unable to copy bean values to cloned bean map: " +
exception);
}
return newMap;
}
/**
* Puts all of the writeable properties from the given BeanMap into this
* BeanMap. Read-only and Write-only properties will be ignored.
*
* @param map the BeanMap whose properties to put
*/
public void putAllWriteable(BeanMap map) {
Iterator readableKeys = map.readMethods.keySet().iterator();
while (readableKeys.hasNext()) {
Object key = readableKeys.next();
if (getWriteMethod(key) != null) {
this.put(key, map.get(key));
}
}
}
/**
* This method reinitializes the bean map to have default values for the
* bean's properties. This is accomplished by constructing a new instance
* of the bean which the map uses as its underlying data source. This
* behavior for <code>clear()</code> differs from the Map contract in that
* the mappings are not actually removed from the map (the mappings for a
* BeanMap are fixed).
*/
public void clear() {
if(bean == null) return;
Class beanClass = null;
try {
beanClass = bean.getClass();
bean = beanClass.newInstance();
}
catch (Exception e) {
throw new UnsupportedOperationException( "Could not create new instance of class: " + beanClass );
}
}
/**
* Returns true if the bean defines a property with the given name.
* <p>
* The given name must be a <code>String</code>; if not, this method
* returns false. This method will also return false if the bean
* does not define a property with that name.
* <p>
* Write-only properties will not be matched as the test operates against
* property read methods.
*
* @param name the name of the property to check
* @return false if the given name is null or is not a <code>String</code>;
* false if the bean does not define a property with that name; or
* true if the bean does define a property with that name
*/
public boolean containsKey(Object name) {
Method method = getReadMethod(name);
return method != null;
}
/**
* Returns true if the bean defines a property whose current value is
* the given object.
*
* @param value the value to check
* @return false true if the bean has at least one property whose
* current value is that object, false otherwise
*/
public boolean containsValue(Object value) {
// use default implementation
return super.containsValue(value);
}
/**
* Returns the value of the bean's property with the given name.
* <p>
* The given name must be a {@link String} and must not be
* null; otherwise, this method returns <code>null</code>.
* If the bean defines a property with the given name, the value of
* that property is returned. Otherwise, <code>null</code> is
* returned.
* <p>
* Write-only properties will not be matched as the test operates against
* property read methods.
*
* @param name the name of the property whose value to return
* @return the value of the property with that name
*/
public Object get(Object name) {
if ( bean != null ) {
Method method = getReadMethod( name );
if ( method != null ) {
try {
return method.invoke( bean, NULL_ARGUMENTS );
}
catch ( IllegalAccessException e ) {
logWarn( e );
}
catch ( IllegalArgumentException e ) {
logWarn( e );
}
catch ( InvocationTargetException e ) {
logWarn( e );
}
catch ( NullPointerException e ) {
logWarn( e );
}
}
}
return null;
}
/**
* Sets the bean property with the given name to the given value.
*
* @param name the name of the property to set
* @param value the value to set that property to
* @return the previous value of that property
* @throws IllegalArgumentException if the given name is null;
* if the given name is not a {@link String}; if the bean doesn't
* define a property with that name; or if the bean property with
* that name is read-only
*/
public Object put(Object name, Object value) throws IllegalArgumentException, ClassCastException {
if ( bean != null ) {
Object oldValue = get( name );
Method method = getWriteMethod( name );
if ( method == null ) {
throw new IllegalArgumentException( "The bean of type: "+ bean.getClass().getName() + " has no property called: " + name );
}
try {
Object[] arguments = createWriteMethodArguments( method, value );
method.invoke( bean, arguments );
Object newValue = get( name );
firePropertyChange( name, oldValue, newValue );
}
catch ( InvocationTargetException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
catch ( IllegalAccessException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
return oldValue;
}
return null;
}
/**
* Returns the number of properties defined by the bean.
*
* @return the number of properties defined by the bean
*/
public int size() {
return readMethods.size();
}
/**
* Get the keys for this BeanMap.
* <p>
* Write-only properties are <b>not</b> included in the returned set of
* property names, although it is possible to set their value and to get
* their type.
*
* @return BeanMap keys. The Set returned by this method is not
* modifiable.
*/
public Set keySet() {
return UnmodifiableSet.decorate(readMethods.keySet());
}
/**
* Gets a Set of MapEntry objects that are the mappings for this BeanMap.
* <p>
* Each MapEntry can be set but not removed.
*
* @return the unmodifiable set of mappings
*/
public Set entrySet() {
return UnmodifiableSet.decorate(new AbstractSet() {
public Iterator iterator() {
return entryIterator();
}
public int size() {
return BeanMap.this.readMethods.size();
}
});
}
/**
* Returns the values for the BeanMap.
*
* @return values for the BeanMap. The returned collection is not
* modifiable.
*/
public Collection values() {
ArrayList answer = new ArrayList( readMethods.size() );
for ( Iterator iter = valueIterator(); iter.hasNext(); ) {
answer.add( iter.next() );
}
return UnmodifiableList.decorate(answer);
}
// Helper methods
//-------------------------------------------------------------------------
/**
* Returns the type of the property with the given name.
*
* @param name the name of the property
* @return the type of the property, or <code>null</code> if no such
* property exists
*/
public Class getType(String name) {
return (Class) types.get( name );
}
/**
* Convenience method for getting an iterator over the keys.
* <p>
* Write-only properties will not be returned in the iterator.
*
* @return an iterator over the keys
*/
public Iterator keyIterator() {
return readMethods.keySet().iterator();
}
/**
* Convenience method for getting an iterator over the values.
*
* @return an iterator over the values
*/
public Iterator valueIterator() {
final Iterator iter = keyIterator();
return new Iterator() {
public boolean hasNext() {
return iter.hasNext();
}
public Object next() {
Object key = iter.next();
return get(key);
}
public void remove() {
throw new UnsupportedOperationException( "remove() not supported for BeanMap" );
}
};
}
/**
* Convenience method for getting an iterator over the entries.
*
* @return an iterator over the entries
*/
public Iterator entryIterator() {
final Iterator iter = keyIterator();
return new Iterator() {
public boolean hasNext() {
return iter.hasNext();
}
public Object next() {
Object key = iter.next();
Object value = get(key);
return new MyMapEntry( BeanMap.this, key, value );
}
public void remove() {
throw new UnsupportedOperationException( "remove() not supported for BeanMap" );
}
};
}
// Properties
//-------------------------------------------------------------------------
/**
* Returns the bean currently being operated on. The return value may
* be null if this map is empty.
*
* @return the bean being operated on by this map
*/
public Object getBean() {
return bean;
}
/**
* Sets the bean to be operated on by this map. The given value may
* be null, in which case this map will be empty.
*
* @param newBean the new bean to operate on
*/
public void setBean( Object newBean ) {
bean = newBean;
reinitialise();
}
/**
* Returns the accessor for the property with the given name.
*
* @param name the name of the property
* @return the accessor method for the property, or null
*/
public Method getReadMethod(String name) {
return (Method) readMethods.get(name);
}
/**
* Returns the mutator for the property with the given name.
*
* @param name the name of the property
* @return the mutator method for the property, or null
*/
public Method getWriteMethod(String name) {
return (Method) writeMethods.get(name);
}
// Implementation methods
//-------------------------------------------------------------------------
/**
* Returns the accessor for the property with the given name.
*
* @param name the name of the property
* @return null if the name is null; null if the name is not a
* {@link String}; null if no such property exists; or the accessor
* method for that property
*/
protected Method getReadMethod( Object name ) {
return (Method) readMethods.get( name );
}
/**
* Returns the mutator for the property with the given name.
*
* @param name the name of the
* @return null if the name is null; null if the name is not a
* {@link String}; null if no such property exists; null if the
* property is read-only; or the mutator method for that property
*/
protected Method getWriteMethod( Object name ) {
return (Method) writeMethods.get( name );
}
/**
* Reinitializes this bean. Called during {@link #setBean(Object)}.
* Does introspection to find properties.
*/
protected void reinitialise() {
readMethods.clear();
writeMethods.clear();
types.clear();
initialise();
}
private void initialise() {
if(getBean() == null) return;
Class beanClass = getBean().getClass();
try {
//BeanInfo beanInfo = Introspector.getBeanInfo( bean, null );
BeanInfo beanInfo = Introspector.getBeanInfo( beanClass );
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
if ( propertyDescriptors != null ) {
for ( int i = 0; i < propertyDescriptors.length; i++ ) {
PropertyDescriptor propertyDescriptor = propertyDescriptors[i];
if ( propertyDescriptor != null ) {
String name = propertyDescriptor.getName();
Method readMethod = propertyDescriptor.getReadMethod();
Method writeMethod = propertyDescriptor.getWriteMethod();
Class aType = propertyDescriptor.getPropertyType();
if ( readMethod != null ) {
readMethods.put( name, readMethod );
}
if ( writeMethods != null ) {
writeMethods.put( name, writeMethod );
}
types.put( name, aType );
}
}
}
}
catch ( IntrospectionException e ) {
logWarn( e );
}
}
/**
* Called during a successful {@link #put(Object,Object)} operation.
* Default implementation does nothing. Override to be notified of
* property changes in the bean caused by this map.
*
* @param key the name of the property that changed
* @param oldValue the old value for that property
* @param newValue the new value for that property
*/
protected void firePropertyChange( Object key, Object oldValue, Object newValue ) {
}
// Implementation classes
//-------------------------------------------------------------------------
/**
* Map entry used by {@link BeanMap}.
*/
protected static class MyMapEntry extends AbstractMapEntry {
private BeanMap owner;
/**
* Constructs a new <code>MyMapEntry</code>.
*
* @param owner the BeanMap this entry belongs to
* @param key the key for this entry
* @param value the value for this entry
*/
protected MyMapEntry( BeanMap owner, Object key, Object value ) {
super( key, value );
this.owner = owner;
}
/**
* Sets the value.
*
* @param value the new value for the entry
* @return the old value for the entry
*/
public Object setValue(Object value) {
Object key = getKey();
Object oldValue = owner.get( key );
owner.put( key, value );
Object newValue = owner.get( key );
super.setValue( newValue );
return oldValue;
}
}
/**
* Creates an array of parameters to pass to the given mutator method.
* If the given object is not the right type to pass to the method
* directly, it will be converted using {@link #convertType(Class,Object)}.
*
* @param method the mutator method
* @param value the value to pass to the mutator method
* @return an array containing one object that is either the given value
* or a transformed value
* @throws IllegalAccessException if {@link #convertType(Class,Object)}
* raises it
* @throws IllegalArgumentException if any other exception is raised
* by {@link #convertType(Class,Object)}
*/
protected Object[] createWriteMethodArguments( Method method, Object value ) throws IllegalAccessException, ClassCastException {
try {
if ( value != null ) {
Class[] types = method.getParameterTypes();
if ( types != null && types.length > 0 ) {
Class paramType = types[0];
if ( ! paramType.isAssignableFrom( value.getClass() ) ) {
value = convertType( paramType, value );
}
}
}
Object[] answer = { value };
return answer;
}
catch ( InvocationTargetException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
catch ( InstantiationException e ) {
logInfo( e );
throw new IllegalArgumentException( e.getMessage() );
}
}
/**
* Converts the given value to the given type. First, reflection is
* is used to find a public constructor declared by the given class
* that takes one argument, which must be the precise type of the
* given value. If such a constructor is found, a new object is
* created by passing the given value to that constructor, and the
* newly constructed object is returned.<P>
*
* If no such constructor exists, and the given type is a primitive
* type, then the given value is converted to a string using its
* {@link Object#toString() toString()} method, and that string is
* parsed into the correct primitve type using, for instance,
* {@link Integer#valueOf(String)} to convert the string into an
* <code>int</code>.<P>
*
* If no special constructor exists and the given type is not a
* primitive type, this method returns the original value.
*
* @param newType the type to convert the value to
* @param value the value to convert
* @return the converted value
* @throws NumberFormatException if newType is a primitive type, and
* the string representation of the given value cannot be converted
* to that type
* @throws InstantiationException if the constructor found with
* reflection raises it
* @throws InvocationTargetException if the constructor found with
* reflection raises it
* @throws IllegalAccessException never
* @throws IllegalArgumentException never
*/
protected Object convertType( Class newType, Object value )
throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
// try call constructor
Class[] types = { value.getClass() };
try {
Constructor constructor = newType.getConstructor( types );
Object[] arguments = { value };
return constructor.newInstance( arguments );
}
catch ( NoSuchMethodException e ) {
// try using the transformers
Transformer transformer = getTypeTransformer( newType );
if ( transformer != null ) {
return transformer.transform( value );
}
return value;
}
}
/**
* Returns a transformer for the given primitive type.
*
* @param aType the primitive type whose transformer to return
* @return a transformer that will convert strings into that type,
* or null if the given type is not a primitive type
*/
protected Transformer getTypeTransformer( Class aType ) {
return (Transformer) defaultTransformers.get( aType );
}
/**
* Logs the given exception to <code>System.out</code>. Used to display
* warnings while accessing/mutating the bean.
*
* @param ex the exception to log
*/
protected void logInfo(Exception ex) {
// Deliberately do not use LOG4J or Commons Logging to avoid dependencies
System.out.println( "INFO: Exception: " + ex );
}
/**
* Logs the given exception to <code>System.err</code>. Used to display
* errors while accessing/mutating the bean.
*
* @param ex the exception to log
*/
protected void logWarn(Exception ex) {
// Deliberately do not use LOG4J or Commons Logging to avoid dependencies
System.out.println( "WARN: Exception: " + ex );
ex.printStackTrace();
}
}
@@ -0,0 +1,242 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.map;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Map;
import org.apache.commons.collections.IterableMap;
import org.apache.commons.collections.Transformer;
/**
* Decorates another <code>Map</code> to transform objects that are added.
* <p>
* The Map put methods and Map.Entry setValue method are affected by this class.
* Thus objects must be removed or searched for using their transformed form.
* For example, if the transformation converts Strings to Integers, you must
* use the Integer form to remove objects.
* <p>
* <strong>Note that TransformedMap is not synchronized and is not thread-safe.</strong>
* If you wish to use this map from multiple threads concurrently, you must use
* appropriate synchronization. The simplest approach is to wrap this map
* using {@link java.util.Collections#synchronizedMap(Map)}. This class may throw
* exceptions when accessed by concurrent threads without synchronization.
* <p>
* This class is Serializable from Commons Collections 3.1.
* <p>
* @see org.apache.commons.collections.splitmap.TransformedMap
*
* @since Commons Collections 3.0
* @version $Revision$ $Date$
*
* @author Stephen Colebourne
*/
public class TransformedMap<K, V>
extends AbstractInputCheckedMapDecorator<K, V>
implements Serializable {
/** Serialization version */
private static final long serialVersionUID = 7023152376788900464L;
/** The transformer to use for the key */
protected final Transformer<? super K, ? extends K> keyTransformer;
/** The transformer to use for the value */
protected final Transformer<? super V, ? extends V> valueTransformer;
/**
* Factory method to create a transforming map.
* <p>
* If there are any elements already in the map being decorated, they
* are NOT transformed.
* Contrast this with {@link #decorateTransform}.
*
* @param map the map to decorate, must not be null
* @param keyTransformer the transformer to use for key conversion, null means no transformation
* @param valueTransformer the transformer to use for value conversion, null means no transformation
* @throws IllegalArgumentException if map is null
*/
public static <K, V> IterableMap<K, V> decorate(Map<K, V> map,
Transformer<? super K, ? extends K> keyTransformer,
Transformer<? super V, ? extends V> valueTransformer) {
return new TransformedMap<K, V>(map, keyTransformer, valueTransformer);
}
/**
* Factory method to create a transforming map that will transform
* existing contents of the specified map.
* <p>
* If there are any elements already in the map being decorated, they
* will be transformed by this method.
* Contrast this with {@link #decorate}.
*
* @param map the map to decorate, must not be null
* @param keyTransformer the transformer to use for key conversion, null means no transformation
* @param valueTransformer the transformer to use for value conversion, null means no transformation
* @throws IllegalArgumentException if map is null
* @since Commons Collections 3.2
*/
public static <K, V> Map<K, V> decorateTransform(Map<K, V> map,
Transformer<? super K, ? extends K> keyTransformer,
Transformer<? super V, ? extends V> valueTransformer) {
TransformedMap<K, V> decorated = new TransformedMap<K, V>(map, keyTransformer, valueTransformer);
if (map.size() > 0) {
Map<K, V> transformed = decorated.transformMap(map);
decorated.clear();
decorated.decorated().putAll(transformed); // avoids double transformation
}
return decorated;
}
//-----------------------------------------------------------------------
/**
* Constructor that wraps (not copies).
* <p>
* If there are any elements already in the collection being decorated, they
* are NOT transformed.
*
* @param map the map to decorate, must not be null
* @param keyTransformer the transformer to use for key conversion, null means no conversion
* @param valueTransformer the transformer to use for value conversion, null means no conversion
* @throws IllegalArgumentException if map is null
*/
protected TransformedMap(Map<K, V> map, Transformer<? super K, ? extends K> keyTransformer,
Transformer<? super V, ? extends V> valueTransformer) {
super(map);
this.keyTransformer = keyTransformer;
this.valueTransformer = valueTransformer;
}
//-----------------------------------------------------------------------
/**
* Write the map out using a custom routine.
*
* @param out the output stream
* @throws IOException
* @since Commons Collections 3.1
*/
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
out.writeObject(map);
}
/**
* Read the map in using a custom routine.
*
* @param in the input stream
* @throws IOException
* @throws ClassNotFoundException
* @since Commons Collections 3.1
*/
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
map = (Map) in.readObject();
}
//-----------------------------------------------------------------------
/**
* Transforms a key.
* <p>
* The transformer itself may throw an exception if necessary.
*
* @param object the object to transform
* @throws the transformed object
*/
protected K transformKey(K object) {
if (keyTransformer == null) {
return object;
}
return keyTransformer.transform(object);
}
/**
* Transforms a value.
* <p>
* The transformer itself may throw an exception if necessary.
*
* @param object the object to transform
* @throws the transformed object
*/
protected V transformValue(V object) {
if (valueTransformer == null) {
return object;
}
return valueTransformer.transform(object);
}
/**
* Transforms a map.
* <p>
* The transformer itself may throw an exception if necessary.
*
* @param map the map to transform
* @throws the transformed object
*/
@SuppressWarnings("unchecked")
protected Map<K, V> transformMap(Map<? extends K, ? extends V> map) {
if (map.isEmpty()) {
return (Map<K, V>) map;
}
Map<K, V> result = new LinkedMap<K, V>(map.size());
for (Map.Entry<? extends K, ? extends V> entry : map.entrySet()) {
result.put(transformKey(entry.getKey()), transformValue(entry.getValue()));
}
return result;
}
/**
* Override to transform the value when using <code>setValue</code>.
*
* @param value the value to transform
* @return the transformed value
* @since Commons Collections 3.1
*/
@Override
protected V checkSetValue(V value) {
return valueTransformer.transform(value);
}
/**
* Override to only return true when there is a value transformer.
*
* @return true if a value transformer is in use
* @since Commons Collections 3.1
*/
@Override
protected boolean isSetValueChecking() {
return (valueTransformer != null);
}
//-----------------------------------------------------------------------
@Override
public V put(K key, V value) {
key = transformKey(key);
value = transformValue(value);
return decorated().put(key, value);
}
@Override
public void putAll(Map<? extends K, ? extends V> mapToCopy) {
mapToCopy = transformMap(mapToCopy);
decorated().putAll(mapToCopy);
}
}
@@ -0,0 +1,525 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/FastHashMap.java,v 1.3 2001/05/18 00:12:45 rwaldhoff Exp $
* $Revision: 1.3 $
* $Date: 2001/05/18 00:12:45 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections;
import java.io.Serializable;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* <p>A customized implementation of <code>java.util.HashMap</code> designed
* to operate in a multithreaded environment where the large majority of
* method calls are read-only, instead of structural changes. When operating
* in "fast" mode, read calls are non-synchronized and write calls perform the
* following steps:</p>
* <ul>
* <li>Clone the existing collection
* <li>Perform the modification on the clone
* <li>Replace the existing collection with the (modified) clone
* </ul>
* <p>When first created, objects of this class default to "slow" mode, where
* all accesses of any type are synchronized but no cloning takes place. This
* is appropriate for initially populating the collection, followed by a switch
* to "fast" mode (by calling <code>setFast(true)</code>) after initialization
* is complete.</p>
*
* <p><strong>NOTE</strong>: If you are creating and accessing a
* <code>HashMap</code> only within a single thread, you should use
* <code>java.util.HashMap</code> directly (with no synchronization), for
* maximum performance.</p>
*
* @author Craig R. McClanahan
* @version $Revision: 1.3 $ $Date: 2001/05/18 00:12:45 $
*/
public class FastHashMap extends HashMap {
// ----------------------------------------------------------- Constructors
/**
* Construct a an empty map.
*/
public FastHashMap() {
super();
this.map = new HashMap();
}
/**
* Construct an empty map with the specified capacity.
*
* @param capacity The initial capacity of the empty map
*/
public FastHashMap(int capacity) {
super();
this.map = new HashMap(capacity);
}
/**
* Construct an empty map with the specified capacity and load factor.
*
* @param capacity The initial capacity of the empty map
* @param factor The load factor of the new map
*/
public FastHashMap(int capacity, float factor) {
super();
this.map = new HashMap(capacity, factor);
}
/**
* Construct a new map with the same mappings as the specified map.
*
* @param map The map whose mappings are to be copied
*/
public FastHashMap(Map map) {
super();
this.map = new HashMap(map);
}
// ----------------------------------------------------- Instance Variables
/**
* The underlying map we are managing.
*/
protected HashMap map = null;
// ------------------------------------------------------------- Properties
/**
* Are we operating in "fast" mode?
*/
protected boolean fast = false;
public boolean getFast() {
return (this.fast);
}
public void setFast(boolean fast) {
this.fast = fast;
}
// --------------------------------------------------------- Public Methods
/**
* Remove all mappings from this map.
*/
public void clear() {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
temp.clear();
map = temp;
}
} else {
synchronized (map) {
map.clear();
}
}
}
/**
* Return a shallow copy of this <code>FastHashMap</code> instance.
* The keys and values themselves are not copied.
*/
public Object clone() {
FastHashMap results = null;
if (fast) {
results = new FastHashMap(map);
} else {
synchronized (map) {
results = new FastHashMap(map);
}
}
results.setFast(getFast());
return (results);
}
/**
* Return <code>true</code> if this map contains a mapping for the
* specified key.
*
* @param key Key to be searched for
*/
public boolean containsKey(Object key) {
if (fast) {
return (map.containsKey(key));
} else {
synchronized (map) {
return (map.containsKey(key));
}
}
}
/**
* Return <code>true</code> if this map contains one or more keys mapping
* to the specified value.
*
* @param value Value to be searched for
*/
public boolean containsValue(Object value) {
if (fast) {
return (map.containsValue(value));
} else {
synchronized (map) {
return (map.containsValue(value));
}
}
}
/**
* Return a collection view of the mappings contained in this map. Each
* element in the returned collection is a <code>Map.Entry</code>.
*/
public Set entrySet() {
if (fast) {
return (map.entrySet());
} else {
synchronized (map) {
return (map.entrySet());
}
}
}
/**
* Compare the specified object with this list for equality. This
* implementation uses exactly the code that is used to define the
* list equals function in the documentation for the
* <code>Map.equals</code> method.
*
* @param o Object to be compared to this list
*/
public boolean equals(Object o) {
// Simple tests that require no synchronization
if (o == this)
return (true);
else if (!(o instanceof Map))
return (false);
Map mo = (Map) o;
// Compare the two maps for equality
if (fast) {
if (mo.size() != map.size())
return (false);
Iterator i = map.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
Object key = e.getKey();
Object value = e.getValue();
if (value == null) {
if (!(mo.get(key) == null && mo.containsKey(key)))
return (false);
} else {
if (!value.equals(mo.get(key)))
return (false);
}
}
return (true);
} else {
synchronized (map) {
if (mo.size() != map.size())
return (false);
Iterator i = map.entrySet().iterator();
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
Object key = e.getKey();
Object value = e.getValue();
if (value == null) {
if (!(mo.get(key) == null && mo.containsKey(key)))
return (false);
} else {
if (!value.equals(mo.get(key)))
return (false);
}
}
return (true);
}
}
}
/**
* Return the value to which this map maps the specified key. Returns
* <code>null</code> if the map contains no mapping for this key, or if
* there is a mapping with a value of <code>null</code>. Use the
* <code>containsKey()</code> method to disambiguate these cases.
*
* @param key Key whose value is to be returned
*/
public Object get(Object key) {
if (fast) {
return (map.get(key));
} else {
synchronized (map) {
return (map.get(key));
}
}
}
/**
* Return the hash code value for this map. This implementation uses
* exactly the code that is used to define the list hash function in the
* documentation for the <code>Map.hashCode</code> method.
*/
public int hashCode() {
if (fast) {
int h = 0;
Iterator i = map.entrySet().iterator();
while (i.hasNext())
h += i.next().hashCode();
return (h);
} else {
synchronized (map) {
int h = 0;
Iterator i = map.entrySet().iterator();
while (i.hasNext())
h += i.next().hashCode();
return (h);
}
}
}
/**
* Return <code>true</code> if this map contains no mappings.
*/
public boolean isEmpty() {
if (fast) {
return (map.isEmpty());
} else {
synchronized (map) {
return (map.isEmpty());
}
}
}
/**
* Return a set view of the keys contained in this map.
*/
public Set keySet() {
if (fast) {
return (map.keySet());
} else {
synchronized (map) {
return (map.keySet());
}
}
}
/**
* Associate the specified value with the specified key in this map.
* If the map previously contained a mapping for this key, the old
* value is replaced and returned.
*
* @param key The key with which the value is to be associated
* @param value The value to be associated with this key
*/
public Object put(Object key, Object value) {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
Object result = temp.put(key, value);
map = temp;
return (result);
}
} else {
synchronized (map) {
return (map.put(key, value));
}
}
}
/**
* Copy all of the mappings from the specified map to this one, replacing
* any mappings with the same keys.
*
* @param in Map whose mappings are to be copied
*/
public void putAll(Map in) {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
temp.putAll(in);
map = temp;
}
} else {
synchronized (map) {
map.putAll(in);
}
}
}
/**
* Remove any mapping for this key, and return any previously
* mapped value.
*
* @param key Key whose mapping is to be removed
*/
public Object remove(Object key) {
if (fast) {
synchronized (this) {
HashMap temp = (HashMap) map.clone();
Object result = temp.remove(key);
map = temp;
return (result);
}
} else {
synchronized (map) {
return (map.remove(key));
}
}
}
/**
* Return the number of key-value mappings in this map.
*/
public int size() {
if (fast) {
return (map.size());
} else {
synchronized (map) {
return (map.size());
}
}
}
/**
* Return a collection view of the values contained in this map.
*/
public Collection values() {
if (fast) {
return (map.values());
} else {
synchronized (map) {
return (map.values());
}
}
}
}
@@ -0,0 +1,289 @@
/*
* Copyright 1999-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections.iterators;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections.list.UnmodifiableList;
/**
* An IteratorChain is an Iterator that wraps a number of Iterators.
* <p>
* This class makes multiple iterators look like one to the caller
* When any method from the Iterator interface is called, the IteratorChain
* will delegate to a single underlying Iterator. The IteratorChain will
* invoke the Iterators in sequence until all Iterators are exhausted.
* <p>
* Under many circumstances, linking Iterators together in this manner is
* more efficient (and convenient) than reading out the contents of each
* Iterator into a List and creating a new Iterator.
* <p>
* Calling a method that adds new Iterator<i>after a method in the Iterator
* interface has been called</i> will result in an UnsupportedOperationException.
* Subclasses should <i>take care</i> to not alter the underlying List of Iterators.
* <p>
* NOTE: As from version 3.0, the IteratorChain may contain no
* iterators. In this case the class will function as an empty iterator.
*
* @since Commons Collections 2.1
* @version $Revision$ $Date$
*
* @author Morgan Delagrange
* @author Stephen Colebourne
*/
public class IteratorChain implements Iterator {
/** The chain of iterators */
protected final List iteratorChain = new ArrayList();
/** The index of the current iterator */
protected int currentIteratorIndex = 0;
/** The current iterator */
protected Iterator currentIterator = null;
/**
* The "last used" Iterator is the Iterator upon which
* next() or hasNext() was most recently called
* used for the remove() operation only
*/
protected Iterator lastUsedIterator = null;
/**
* ComparatorChain is "locked" after the first time
* compare(Object,Object) is called
*/
protected boolean isLocked = false;
//-----------------------------------------------------------------------
/**
* Construct an IteratorChain with no Iterators.
* <p>
* You will normally use {@link #addIterator(Iterator)} to add
* some iterators after using this constructor.
*/
public IteratorChain() {
super();
}
/**
* Construct an IteratorChain with a single Iterator.
*
* @param iterator first Iterator in the IteratorChain
* @throws NullPointerException if the iterator is null
*/
public IteratorChain(Iterator iterator) {
super();
addIterator(iterator);
}
/**
* Constructs a new <code>IteratorChain</code> over the two
* given iterators.
*
* @param a the first child iterator
* @param b the second child iterator
* @throws NullPointerException if either iterator is null
*/
public IteratorChain(Iterator a, Iterator b) {
super();
addIterator(a);
addIterator(b);
}
/**
* Constructs a new <code>IteratorChain</code> over the array
* of iterators.
*
* @param iterators the array of iterators
* @throws NullPointerException if iterators array is or contains null
*/
public IteratorChain(Iterator[] iterators) {
super();
for (int i = 0; i < iterators.length; i++) {
addIterator(iterators[i]);
}
}
/**
* Constructs a new <code>IteratorChain</code> over the collection
* of iterators.
*
* @param iterators the collection of iterators
* @throws NullPointerException if iterators collection is or contains null
* @throws ClassCastException if iterators collection doesn't contain an iterator
*/
public IteratorChain(Collection iterators) {
super();
for (Iterator it = iterators.iterator(); it.hasNext();) {
Iterator item = (Iterator) it.next();
addIterator(item);
}
}
//-----------------------------------------------------------------------
/**
* Add an Iterator to the end of the chain
*
* @param iterator Iterator to add
* @throws IllegalStateException if I've already started iterating
* @throws NullPointerException if the iterator is null
*/
public void addIterator(Iterator iterator) {
checkLocked();
if (iterator == null) {
throw new NullPointerException("Iterator must not be null");
}
iteratorChain.add(iterator);
}
/**
* Set the Iterator at the given index
*
* @param index index of the Iterator to replace
* @param iterator Iterator to place at the given index
* @throws IndexOutOfBoundsException if index &lt; 0 or index &gt; size()
* @throws IllegalStateException if I've already started iterating
* @throws NullPointerException if the iterator is null
*/
public void setIterator(int index, Iterator iterator) throws IndexOutOfBoundsException {
checkLocked();
if (iterator == null) {
throw new NullPointerException("Iterator must not be null");
}
iteratorChain.set(index, iterator);
}
/**
* Get the list of Iterators (unmodifiable)
*
* @return the unmodifiable list of iterators added
*/
public List getIterators() {
return UnmodifiableList.decorate(iteratorChain);
}
/**
* Number of Iterators in the current IteratorChain.
*
* @return Iterator count
*/
public int size() {
return iteratorChain.size();
}
/**
* Determine if modifications can still be made to the IteratorChain.
* IteratorChains cannot be modified once they have executed a method
* from the Iterator interface.
*
* @return true if IteratorChain cannot be modified, false if it can
*/
public boolean isLocked() {
return isLocked;
}
/**
* Checks whether the iterator chain is now locked and in use.
*/
private void checkLocked() {
if (isLocked == true) {
throw new UnsupportedOperationException("IteratorChain cannot be changed after the first use of a method from the Iterator interface");
}
}
/**
* Lock the chain so no more iterators can be added.
* This must be called from all Iterator interface methods.
*/
private void lockChain() {
if (isLocked == false) {
isLocked = true;
}
}
/**
* Updates the current iterator field to ensure that the current Iterator
* is not exhausted
*/
protected void updateCurrentIterator() {
if (currentIterator == null) {
if (iteratorChain.isEmpty()) {
currentIterator = EmptyIterator.INSTANCE;
} else {
currentIterator = (Iterator) iteratorChain.get(0);
}
// set last used iterator here, in case the user calls remove
// before calling hasNext() or next() (although they shouldn't)
lastUsedIterator = currentIterator;
}
while (currentIterator.hasNext() == false && currentIteratorIndex < iteratorChain.size() - 1) {
currentIteratorIndex++;
currentIterator = (Iterator) iteratorChain.get(currentIteratorIndex);
}
}
//-----------------------------------------------------------------------
/**
* Return true if any Iterator in the IteratorChain has a remaining element.
*
* @return true if elements remain
*/
public boolean hasNext() {
lockChain();
updateCurrentIterator();
lastUsedIterator = currentIterator;
return currentIterator.hasNext();
}
/**
* Returns the next Object of the current Iterator
*
* @return Object from the current Iterator
* @throws java.util.NoSuchElementException if all the Iterators are exhausted
*/
public Object next() {
lockChain();
updateCurrentIterator();
lastUsedIterator = currentIterator;
return currentIterator.next();
}
/**
* Removes from the underlying collection the last element
* returned by the Iterator. As with next() and hasNext(),
* this method calls remove() on the underlying Iterator.
* Therefore, this method may throw an
* UnsupportedOperationException if the underlying
* Iterator does not support this method.
*
* @throws UnsupportedOperationException
* if the remove operator is not supported by the underlying Iterator
* @throws IllegalStateException
* if the next method has not yet been called, or the remove method has
* already been called after the last call to the next method.
*/
public void remove() {
lockChain();
if( currentIterator == null ) {
updateCurrentIterator();
}
lastUsedIterator.remove();
}
}
@@ -0,0 +1,888 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/SequencedHashMap.java,v 1.4 2002/02/21 13:46:25 mas Exp $
* $Revision: 1.4 $
* $Date: 2002/02/21 13:46:25 $
*
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2002 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.AbstractCollection;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.NoSuchElementException;
/**
* A map of objects whose mapping entries are sequenced based on the order in
* which they were added. This data structure has fast <I>O(1)</I> search
* time, deletion time, and insertion time.
*
* This class inherits from {@link java.util.HashMap} purely for backwards
* compatibility. It should really be inheriting from {@link
* java.util.AbstractMap}, or with a tiny extra bit of work, implement the
* full map interface on its own. APIs should not rely on this class being an
* actual {@link java.util.HashMap}, and instead should recognize it only as a
* generic {@link java.util.Map} (unless, of course, you need the sequencing
* functionality, but even in that case, this class should not be referred to
* as a java.util.HashMap).
*
* <P>Although this map is sequenced, it cannot implement {@link
* java.util.List} because of incompatible interface definitions. The remove
* methods in List and Map have different return values (see: {@link
* java.util.List#remove(Object)} and {@link java.util.Map#remove(Object)}).
*
* <P>This class is not thread safe. When a thread safe implementation is
* required, use {@link Collections#synchronizedMap(Map)} as it is documented,
* or use explicit synchronization controls.
*
* @author <a href="mailto:michael@iammichael.org">Michael A. Smith</A>
* @author <a href="mailto:dlr@collab.net">Daniel Rall</a>
* @author <a href="mailto:hps@intermeta.de">Henning P. Schmiedehausen</a>
*/
public class SequencedHashMap extends HashMap {
/**
* {@link java.util.Map.Entry} that doubles as a node in the linked list
* of sequenced mappings.
**/
private static class Entry implements Map.Entry {
// Note: This class cannot easily be made clonable. While the actual
// implementation of a clone would be simple, defining the semantics is
// difficult. If a shallow clone is implemented, then entry.next.prev !=
// entry, which is unintuitive and probably breaks all sorts of assumptions
// in code that uses this implementation. If a deep clone is
// implementated, then what happens when the linked list is cyclical (as is
// the case with SequencedHashMap)? It's impossible to know in the clone
// when to stop cloning, and thus you end up in a recursive loop,
// continuously cloning the "next" in the list.
private final Object key;
private Object value;
// package private to allow the SequencedHashMap to access and manipulate
// them.
Entry next = null;
Entry prev = null;
public Entry(Object key, Object value) {
this.key = key;
this.value = value;
}
// per Map.Entry.getKey()
public Object getKey() {
return this.key;
}
// per Map.Entry.getValue()
public Object getValue() {
return this.value;
}
// per Map.Entry.setValue()
public Object setValue(Object value) {
Object oldValue = this.value;
this.value = value;
return oldValue;
}
public int hashCode() {
// implemented per api docs for Map.Entry.hashCode()
return ((getKey() == null ? 0 : getKey().hashCode()) ^
(getValue()==null ? 0 : getValue().hashCode()));
}
public boolean equals(Object obj) {
if(obj == null) return false;
if(obj == this) return true;
if(!(obj instanceof Map.Entry)) return false;
Map.Entry other = (Map.Entry)obj;
// implemented per api docs for Map.Entry.equals(Object)
return((getKey() == null ?
other.getKey() == null :
getKey().equals(other.getKey())) &&
(getValue() == null ?
other.getValue() == null :
getValue().equals(other.getValue())));
}
public String toString() {
return "[" + getKey() + "=" + getValue() + "]";
}
}
/**
* Construct an empty sentinel used to hold the head (sentinel.next) and the
* tail (sentinel.prev) of the list. The sentinal has a <code>null</code>
* key and value.
**/
private static final Entry createSentinel() {
Entry s = new Entry(null, null);
s.prev = s;
s.next = s;
return s;
}
/**
* Sentinel used to hold the head and tail of the list of entries.
**/
private Entry sentinel;
/**
* Map of keys to entries
**/
private HashMap entries;
/**
* Construct a new sequenced hash map with default initial size and load
* factor.
**/
public SequencedHashMap() {
sentinel = createSentinel();
entries = new HashMap();
}
/**
* Construct a new sequenced hash map with the specified initial size and
* default load factor.
*
* @param initialSize the initial size for the hash table
*
* @see HashMap#HashMap(int)
**/
public SequencedHashMap(int initialSize) {
sentinel = createSentinel();
entries = new HashMap(initialSize);
}
/**
* Construct a new sequenced hash map with the specified initial size and
* load factor.
*
* @param initialSize the initial size for the hash table
*
* @param loadFactor the load factor for the hash table.
*
* @see HashMap#HashMap(int,float)
**/
public SequencedHashMap(int initialSize, float loadFactor) {
sentinel = createSentinel();
entries = new HashMap(initialSize, loadFactor);
}
/**
* Construct a new sequenced hash map and add all the elements in the
* specified map. The order in which the mappings in the specified map are
* added is defined by {@link #putAll(Map)}.
**/
public SequencedHashMap(Map m) {
this();
putAll(m);
}
/**
* Removes an internal entry from the linked list. This does not remove
* it from the underlying map.
**/
private void removeEntry(Entry entry) {
entry.next.prev = entry.prev;
entry.prev.next = entry.next;
}
/**
* Inserts a new internal entry to the tail of the linked list. This does
* not add the entry to the underlying map.
**/
private void insertEntry(Entry entry) {
entry.next = sentinel;
entry.prev = sentinel.prev;
sentinel.prev.next = entry;
sentinel.prev = entry;
}
// per Map.size()
public int size() {
// use the underlying Map's size since size is not maintained here.
return entries.size();
}
// per Map.isEmpty()
public boolean isEmpty() {
// for quick check whether the map is entry, we can check the linked list
// and see if there's anything in it.
return sentinel.next == sentinel;
}
// per Map.containsKey(Object)
public boolean containsKey(Object key) {
// pass on to underlying map implementation
return entries.containsKey(key);
}
// per Map.containsValue(Object)
public boolean containsValue(Object value) {
// unfortunately, we cannot just pass this call to the underlying map
// because we are mapping keys to entries, not keys to values. The
// underlying map doesn't have an efficient implementation anyway, so this
// isn't a big deal.
// do null comparison outside loop so we only need to do it once. This
// provides a tighter, more efficient loop at the expense of slight
// code duplication.
if(value == null) {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(pos.getValue() == null) return true;
}
} else {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(value.equals(pos.getValue())) return true;
}
}
return false;
}
// per Map.get(Object)
public Object get(Object o) {
// find entry for the specified key object
Entry entry = (Entry)entries.get(o);
if(entry == null) return null;
return entry.getValue();
}
/**
* Return the entry for the "oldest" mapping. That is, return the Map.Entry
* for the key-value pair that was first put into the map when compared to
* all the other pairings in the map. This behavior is equivalent to using
* <code>entrySet().iterator().next()</code>, but this method provides an
* optimized implementation.
*
* @return The first entry in the sequence, or <code>null</code> if the
* map is empty.
**/
public Map.Entry getFirst() {
// sentinel.next points to the "first" element of the sequence -- the head
// of the list, which is exactly the entry we need to return. We must test
// for an empty list though because we don't want to return the sentinel!
return (isEmpty()) ? null : sentinel.next;
}
/**
* Return the key for the "oldest" mapping. That is, return the key for the
* mapping that was first put into the map when compared to all the other
* objects in the map. This behavior is equivalent to using
* <code>getFirst().getKey()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The first key in the sequence, or <code>null</code> if the
* map is empty.
**/
public Object getFirstKey() {
// sentinel.next points to the "first" element of the sequence -- the head
// of the list -- and the requisite key is returned from it. An empty list
// does not need to be tested. In cases where the list is empty,
// sentinel.next will point to the sentinel itself which has a null key,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.next.getKey();
}
/**
* Return the value for the "oldest" mapping. That is, return the value for
* the mapping that was first put into the map when compared to all the
* other objects in the map. This behavior is equivalent to using
* <code>getFirst().getValue()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The first value in the sequence, or <code>null</code> if the
* map is empty.
**/
public Object getFirstValue() {
// sentinel.next points to the "first" element of the sequence -- the head
// of the list -- and the requisite value is returned from it. An empty
// list does not need to be tested. In cases where the list is empty,
// sentinel.next will point to the sentinel itself which has a null value,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.next.getValue();
}
/**
* Return the entry for the "newest" mapping. That is, return the Map.Entry
* for the key-value pair that was first put into the map when compared to
* all the other pairings in the map. The behavior is equivalent to:
*
* <pre>
* Object obj = null;
* Iterator iter = entrySet().iterator();
* while(iter.hasNext()) {
* obj = iter.next();
* }
* return (Map.Entry)obj;
* </pre>
*
* However, the implementation of this method ensures an O(1) lookup of the
* last key rather than O(n).
*
* @return The last entry in the sequence, or <code>null</code> if the map
* is empty.
**/
public Map.Entry getLast() {
// sentinel.prev points to the "last" element of the sequence -- the tail
// of the list, which is exactly the entry we need to return. We must test
// for an empty list though because we don't want to return the sentinel!
return (isEmpty()) ? null : sentinel.prev;
}
/**
* Return the key for the "newest" mapping. That is, return the key for the
* mapping that was last put into the map when compared to all the other
* objects in the map. This behavior is equivalent to using
* <code>getLast().getKey()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The last key in the sequence, or <code>null</code> if the map is
* empty.
**/
public Object getLastKey() {
// sentinel.prev points to the "last" element of the sequence -- the tail
// of the list -- and the requisite key is returned from it. An empty list
// does not need to be tested. In cases where the list is empty,
// sentinel.prev will point to the sentinel itself which has a null key,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.prev.getKey();
}
/**
* Return the value for the "newest" mapping. That is, return the value for
* the mapping that was last put into the map when compared to all the other
* objects in the map. This behavior is equivalent to using
* <code>getLast().getValue()</code>, but this method provides a slightly
* optimized implementation.
*
* @return The last value in the sequence, or <code>null</code> if the map
* is empty.
**/
public Object getLastValue() {
// sentinel.prev points to the "last" element of the sequence -- the tail
// of the list -- and the requisite value is returned from it. An empty
// list does not need to be tested. In cases where the list is empty,
// sentinel.prev will point to the sentinel itself which has a null value,
// which is exactly what we would want to return if the list is empty (a
// nice convient way to avoid test for an empty list)
return sentinel.prev.getValue();
}
// per Map.put(Object,Object)
public Object put(Object key, Object value) {
Object oldValue = null;
// lookup the entry for the specified key
Entry e = (Entry)entries.get(key);
// check to see if it already exists
if(e != null) {
// remove from list so the entry gets "moved" to the end of list
removeEntry(e);
// update value in map
oldValue = e.setValue(value);
// Note: We do not update the key here because its unnecessary. We only
// do comparisons using equals(Object) and we know the specified key and
// that in the map are equal in that sense. This may cause a problem if
// someone does not implement their hashCode() and/or equals(Object)
// method properly and then use it as a key in this map.
} else {
// add new entry
e = new Entry(key, value);
entries.put(key, e);
}
// assert(entry in map, but not list)
// add to list
insertEntry(e);
return oldValue;
}
// per Map.remove(Object)
public Object remove(Object key) {
Entry e = (Entry)entries.remove(key);
if(e == null) return null;
removeEntry(e);
return e.getValue();
}
/**
* Adds all the mappings in the specified map to this map, replacing any
* mappings that already exist (as per {@link Map#putAll(Map)}). The order
* in which the entries are added is determined by the iterator returned
* from {@link Map#entrySet()} for the specified map.
*
* @param t the mappings that should be added to this map.
*
* @exception NullPointerException if <code>t</code> is <code>null</code>
**/
public void putAll(Map t) {
Iterator iter = t.entrySet().iterator();
while(iter.hasNext()) {
Map.Entry entry = (Map.Entry)iter.next();
put(entry.getKey(), entry.getValue());
}
}
// per Map.clear()
public void clear() {
// remove all from the underlying map
entries.clear();
// and the list
sentinel.next = sentinel;
sentinel.prev = sentinel;
}
// per Map.keySet()
public Set keySet() {
return new AbstractSet() {
// required impls
public Iterator iterator() { return new OrderedIterator(KEY); }
public boolean remove(Object o) {
return SequencedHashMap.this.remove(o) != null;
}
// more efficient impls than abstract set
public void clear() {
SequencedHashMap.this.clear();
}
public int size() {
return SequencedHashMap.this.size();
}
public boolean isEmpty() {
return SequencedHashMap.this.isEmpty();
}
public boolean contains(Object o) {
return SequencedHashMap.this.containsKey(o);
}
};
}
// per Map.values()
public Collection values() {
return new AbstractCollection() {
// required impl
public Iterator iterator() { return new OrderedIterator(VALUE); }
public boolean remove(Object value) {
// do null comparison outside loop so we only need to do it once. This
// provides a tighter, more efficient loop at the expense of slight
// code duplication.
if(value == null) {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(pos.getValue() == null) {
SequencedHashMap.this.remove(pos.getKey());
return true;
}
}
} else {
for(Entry pos = sentinel.next; pos != sentinel; pos = pos.next) {
if(value.equals(pos.getValue())) {
SequencedHashMap.this.remove(pos.getKey());
return true;
}
}
}
return false;
}
// more efficient impls than abstract collection
public void clear() {
SequencedHashMap.this.clear();
}
public int size() {
return SequencedHashMap.this.size();
}
public boolean isEmpty() {
return SequencedHashMap.this.isEmpty();
}
public boolean contains(Object o) {
return SequencedHashMap.this.containsValue(o);
}
};
}
// per Map.entrySet()
public Set entrySet() {
return new AbstractSet() {
// helper
private Entry findEntry(Object o) {
if(o == null) return null;
if(!(o instanceof Map.Entry)) return null;
Map.Entry e = (Map.Entry)o;
Entry entry = (Entry)entries.get(e.getKey());
if(entry != null && entry.equals(e)) return entry;
else return null;
}
// required impl
public Iterator iterator() {
return new OrderedIterator(ENTRY);
}
public boolean remove(Object o) {
Entry e = findEntry(o);
if(e == null) return false;
return SequencedHashMap.this.remove(e.getKey()) != null;
}
// more efficient impls than abstract collection
public void clear() {
SequencedHashMap.this.clear();
}
public int size() {
return SequencedHashMap.this.size();
}
public boolean isEmpty() {
return SequencedHashMap.this.isEmpty();
}
public boolean contains(Object o) {
return findEntry(o) != null;
}
};
}
// constants to define what the iterator should return on "next"
private static final int KEY = 0;
private static final int VALUE = 1;
private static final int ENTRY = 2;
private static final int REMOVED_MASK = 0x80000000;
private class OrderedIterator implements Iterator {
/**
* Holds the type that should be returned from the iterator. The value
* should be either {@link #KEY}, {@link #VALUE}, or {@link #ENTRY}. To
* save a tiny bit of memory, this field is also used as a marker for when
* remove has been called on the current object to prevent a second remove
* on the same element. Essientially, if this value is negative (i.e. the
* bit specified by {@link #REMOVED_MASK} is set), the current position
* has been removed. If positive, remove can still be called.
**/
private int returnType;
/**
* Holds the "current" position in the iterator. when pos.next is the
* sentinel, we've reached the end of the list.
**/
private Entry pos = sentinel;
/**
* Construct an iterator over the sequenced elements in the order in which
* they were added. The {@link #next()} method returns the type specified
* by <code>returnType</code> which must be either {@link #KEY}, {@link
* #VALUE}, or {@link #ENTRY}.
**/
public OrderedIterator(int returnType) {
//// Since this is a private inner class, nothing else should have
//// access to the constructor. Since we know the rest of the outer
//// class uses the iterator correctly, we can leave of the following
//// check:
//if(returnType >= 0 && returnType <= 2) {
// throw new IllegalArgumentException("Invalid iterator type");
//}
// Set the "removed" bit so that the iterator starts in a state where
// "next" must be called before "remove" will succeed.
this.returnType = returnType | REMOVED_MASK;
}
/**
* Returns whether there is any additional elements in the iterator to be
* returned.
*
* @return <code>true</code> if there are more elements left to be
* returned from the iterator; <code>false</code> otherwise.
**/
public boolean hasNext() {
return pos.next != sentinel;
}
/**
* Returns the next element from the iterator.
*
* @return the next element from the iterator.
*
* @exception NoSuchElementException if there are no more elements in the
* iterator.
**/
public Object next() {
if(pos.next == sentinel) {
throw new NoSuchElementException();
}
// clear the "removed" flag
returnType = returnType & ~REMOVED_MASK;
pos = pos.next;
switch(returnType) {
case KEY:
return pos.getKey();
case VALUE:
return pos.getValue();
case ENTRY:
return pos;
default:
// should never happen
throw new Error("bad iterator type: " + returnType);
}
}
/**
* Removes the last element returned from the {@link #next()} method from
* the sequenced map.
*
* @exception IllegalStateException if there isn't a "last element" to be
* removed. That is, if {@link #next()} has never been called, or if
* {@link #remove()} was already called on the element.
**/
public void remove() {
if((returnType & REMOVED_MASK) != 0) {
throw new IllegalStateException("remove() must follow next()");
}
// remove the entry
SequencedHashMap.this.remove(pos.getKey());
// set the removed flag
returnType = returnType | REMOVED_MASK;
}
}
// APIs maintained from previous version of SequencedHashMap for backwards
// compatibility
/**
* Creates a shallow copy of this object, preserving the internal structure
* by copying only references. The keys and values themselves are not
* <code>clone()</code>'d. The cloned object maintains the same sequence.
*
* @return A clone of this instance.
*/
public Object clone () {
// yes, calling super.clone() silly since we're just blowing away all
// the stuff that super might be doing anyway, but for motivations on
// this, see:
// http://www.javaworld.com/javaworld/jw-01-1999/jw-01-object.html
SequencedHashMap map = (SequencedHashMap)super.clone();
// create new, empty sentinel
map.sentinel = createSentinel();
// create a new, empty entry map
// note: this does not preserve the initial capacity and load factor.
map.entries = new HashMap();
// add all the mappings
map.putAll(this);
// Note: We cannot just clone the hashmap and sentinel because we must
// duplicate our internal structures. Cloning those two will not clone all
// the other entries they reference, and so the cloned hash map will not be
// able to maintain internal consistency because there are two objects with
// the same entries. See discussion in the Entry implementation on why we
// cannot implement a clone of the Entry (and thus why we need to recreate
// everything).
return map;
}
/**
* Returns the Map.Entry at the specified index
*
* @exception ArrayIndexOutOfBoundsException if the specified index is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
**/
private Map.Entry getEntry(int index) {
Entry pos = sentinel;
if(index < 0) {
throw new ArrayIndexOutOfBoundsException(index + " < 0");
}
// loop to one before the position
int i = -1;
while(i < (index-1) && pos.next != sentinel) {
i++;
pos = pos.next;
}
// pos.next is the requested position
// if sentinel is next, past end of list
if(pos.next == sentinel) {
throw new ArrayIndexOutOfBoundsException(index + " >= " + (i + 1));
}
return pos.next;
}
/**
* Returns the key at the specified index.
*
* @exception ArrayIndexOutOfBoundsException if the <code>index</code> is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
*/
public Object get (int index)
{
return getEntry(index).getKey();
}
/**
* Returns the value at the specified index.
*
* @exception ArrayIndexOutOfBoundsException if the <code>index</code> is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
*/
public Object getValue (int index)
{
return getEntry(index).getValue();
}
/**
* Returns the index of the specified key.
*/
public int indexOf (Object key)
{
Entry e = (Entry)entries.get(key);
int pos = 0;
while(e.prev != sentinel) {
pos++;
e = e.prev;
}
return pos;
}
/**
* Returns a key iterator.
*/
public Iterator iterator ()
{
return keySet().iterator();
}
/**
* Returns the last index of the specified key.
*/
public int lastIndexOf (Object key)
{
// keys in a map are guarunteed to be unique
return indexOf(key);
}
/**
* Returns a List view of the keys rather than a set view. The returned
* list is unmodifiable. This is required because changes to the values of
* the list (using {@link java.util.ListIterator#set(Object)}) will
* effectively remove the value from the list and reinsert that value at
* the end of the list, which is an unexpected side effect of changing the
* value of a list. This occurs because changing the key, changes when the
* mapping is added to the map and thus where it appears in the list.
*
* <P>An alternative to this method is to use {@link #keySet()}
*
* @see #keySet()
* @return The ordered list of keys.
*/
public List sequence()
{
List l = new ArrayList(size());
Iterator iter = keySet().iterator();
while(iter.hasNext()) {
l.add(iter.next());
}
return Collections.unmodifiableList(l);
}
/**
* Removes the element at the specified index.
*
* @param index The index of the object to remove.
* @return The previous value coressponding the <code>key</code>, or
* <code>null</code> if none existed.
*
* @exception ArrayIndexOutOfBoundsException if the <code>index</code> is
* <code>&lt; 0</code> or <code>&gt;</code> the size of the map.
*/
public Object remove (int index)
{
return remove(get(index));
}
}
@@ -0,0 +1,250 @@
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/iterators/ArrayListIterator.java,v 1.5 2003/09/29 03:56:12 psteitz Exp $
* ====================================================================
*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999-2003 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowledgement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgement may appear in the software itself,
* if and wherever such third-party acknowledgements normally appear.
*
* 4. The names "The Jakarta Project", "Commons", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.commons.collections.iterators;
import java.lang.reflect.Array;
import java.util.NoSuchElementException;
/**
* Implements a {@link java.util.ListIterator ListIterator} over an array.
* <p>
* The array can be either an array of object or of primitives. If you know
* that you have an object array, the
* {@link org.apache.commons.collections.iterators.ObjectArrayListIterator ObjectArrayListIterator}
* class is a better choice, as it will perform better.
*
* <p>
* This iterator does not support {@link #add(Object)} or {@link #remove()}, as the array
* cannot be changed in size. The {@link #set(Object)} method is supported however.
*
* @see org.apache.commons.collections.iterators.ArrayIterator
* @see java.util.Iterator
* @see java.util.ListIterator
*
* @since Commons Collections 2.2
* @version $Revision: 1.5 $ $Date: 2003/09/29 03:56:12 $
*
* @author <a href="mailto:neilotoole@users.sourceforge.net">Neil O'Toole</a>
* @author Stephen Colebourne
* @author Phil Steitz
*/
public class ArrayListIterator extends ArrayIterator implements ResetableListIterator {
/**
* Holds the index of the last item returned by a call to <code>next()</code> or <code>previous()</code>. This
* is set to <code>-1</code> if neither method has yet been invoked. <code>lastItemIndex</code> is used to to
* implement the {@link #set} method.
*
*/
protected int lastItemIndex = -1;
/**
* Constructor for use with <code>setArray</code>.
* <p>
* Using this constructor, the iterator is equivalent to an empty iterator
* until {@link #setArray(Object)} is called to establish the array to iterate over.
*/
public ArrayListIterator() {
super();
}
/**
* Constructs an ArrayListIterator that will iterate over the values in the
* specified array.
*
* @param array the array to iterate over
* @throws IllegalArgumentException if <code>array</code> is not an array.
* @throws NullPointerException if <code>array</code> is <code>null</code>
*/
public ArrayListIterator(Object array) {
super(array);
}
/**
* Constructs an ArrayListIterator that will iterate over the values in the
* specified array from a specific start index.
*
* @param array the array to iterate over
* @param start the index to start iterating at
* @throws IllegalArgumentException if <code>array</code> is not an array.
* @throws NullPointerException if <code>array</code> is <code>null</code>
* @throws IndexOutOfBoundsException if the start index is out of bounds
*/
public ArrayListIterator(Object array, int start) {
super(array, start);
this.startIndex = start;
}
/**
* Construct an ArrayListIterator that will iterate over a range of values
* in the specified array.
*
* @param array the array to iterate over
* @param start the index to start iterating at
* @param end the index (exclusive) to finish iterating at
* @throws IllegalArgumentException if <code>array</code> is not an array.
* @throws IndexOutOfBoundsException if the start or end index is out of bounds
* @throws IllegalArgumentException if end index is before the start
* @throws NullPointerException if <code>array</code> is <code>null</code>
*/
public ArrayListIterator(Object array, int start, int end) {
super(array, start, end);
this.startIndex = start;
}
// ListIterator interface
//-------------------------------------------------------------------------
/**
* Returns true if there are previous elements to return from the array.
*
* @return true if there is a previous element to return
*/
public boolean hasPrevious() {
return (this.index > this.startIndex);
}
/**
* Gets the previous element from the array.
*
* @return the previous element
* @throws NoSuchElementException if there is no previous element
*/
public Object previous() {
if (hasPrevious() == false) {
throw new NoSuchElementException();
}
this.lastItemIndex = --this.index;
return Array.get(this.array, this.index);
}
/**
* Gets the next element from the array.
*
* @return the next element
* @throws NoSuchElementException if there is no next element
*/
public Object next() {
if (hasNext() == false) {
throw new NoSuchElementException();
}
this.lastItemIndex = this.index;
return Array.get(this.array, this.index++);
}
/**
* Gets the next index to be retrieved.
*
* @return the index of the item to be retrieved next
*/
public int nextIndex() {
return this.index - this.startIndex;
}
/**
* Gets the index of the item to be retrieved if {@link #previous()} is called.
*
* @return the index of the item to be retrieved next
*/
public int previousIndex() {
return this.index - this.startIndex - 1;
}
/**
* This iterator does not support modification of its backing collection, and so will
* always throw an {@link UnsupportedOperationException} when this method is invoked.
*
* @throws UnsupportedOperationException always thrown.
* @see java.util.ListIterator#set
*/
public void add(Object o) {
throw new UnsupportedOperationException("add() method is not supported");
}
/**
* Sets the element under the cursor.
* <p>
* This method sets the element that was returned by the last call
* to {@link #next()} of {@link #previous()}.
* <p>
* <b>Note:</b> {@link ListIterator} implementations that support
* <code>add()</code> and <code>remove()</code> only allow <code>set()</code> to be called
* once per call to <code>next()</code> or <code>previous</code> (see the {@link ListIterator}
* javadoc for more details). Since this implementation does
* not support <code>add()</code> or <code>remove()</code>, <code>set()</code> may be
* called as often as desired.
*
* @see java.util.ListIterator#set
*/
public void set(Object o) {
if (this.lastItemIndex == -1) {
throw new IllegalStateException("must call next() or previous() before a call to set()");
}
Array.set(this.array, this.lastItemIndex, o);
}
/**
* Resets the iterator back to the start index.
*/
public void reset() {
super.reset();
this.lastItemIndex = -1;
}
}
@@ -0,0 +1,286 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.commons.compress.compressors.snappy;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream;
import org.apache.commons.compress.utils.ByteUtils;
/**
* CompressorInputStream for the raw Snappy format.
*
* <p>This implementation uses an internal buffer in order to handle
* the back-references that are at the heart of the LZ77 algorithm.
* The size of the buffer must be at least as big as the biggest
* offset used in the compressed stream. The current version of the
* Snappy algorithm as defined by Google works on 32k blocks and
* doesn't contain offsets bigger than 32k which is the default block
* size used by this class.</p>
*
* @see <a href="https://github.com/google/snappy/blob/master/format_description.txt">Snappy compressed format description</a>
* @since 1.7
*/
public class SnappyCompressorInputStream extends AbstractLZ77CompressorInputStream {
/** Mask used to determine the type of "tag" is being processed */
private static final int TAG_MASK = 0x03;
/** Default block size */
public static final int DEFAULT_BLOCK_SIZE = 32768;
/** The size of the uncompressed data */
private final int size;
/** Number of uncompressed bytes still to be read. */
private int uncompressedBytesRemaining;
/** Current state of the stream */
private State state = State.NO_BLOCK;
private boolean endReached = false;
/**
* Constructor using the default buffer size of 32k.
*
* @param is
* An InputStream to read compressed data from
*
* @throws IOException if reading fails
*/
public SnappyCompressorInputStream(final InputStream is) throws IOException {
this(is, DEFAULT_BLOCK_SIZE);
}
/**
* Constructor using a configurable buffer size.
*
* @param is
* An InputStream to read compressed data from
* @param blockSize
* The block size used in compression
*
* @throws IOException if reading fails
*/
public SnappyCompressorInputStream(final InputStream is, final int blockSize)
throws IOException {
super(is, blockSize);
uncompressedBytesRemaining = size = (int) readSize();
}
/**
* {@inheritDoc}
*/
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
if (endReached) {
return -1;
}
switch (state) {
case NO_BLOCK:
fill();
return read(b, off, len);
case IN_LITERAL:
int litLen = readLiteral(b, off, len);
if (!hasMoreDataInBlock()) {
state = State.NO_BLOCK;
}
return litLen;
case IN_BACK_REFERENCE:
int backReferenceLen = readBackReference(b, off, len);
if (!hasMoreDataInBlock()) {
state = State.NO_BLOCK;
}
return backReferenceLen;
default:
throw new IOException("Unknown stream state " + state);
}
}
/**
* Try to fill the buffer with the next block of data.
*/
private void fill() throws IOException {
if (uncompressedBytesRemaining == 0) {
endReached = true;
return;
}
int b = readOneByte();
if (b == -1) {
throw new IOException("Premature end of stream reading block start");
}
int length = 0;
int offset = 0;
switch (b & TAG_MASK) {
case 0x00:
length = readLiteralLength(b);
uncompressedBytesRemaining -= length;
startLiteral(length);
state = State.IN_LITERAL;
break;
case 0x01:
/*
* These elements can encode lengths between [4..11] bytes and
* offsets between [0..2047] bytes. (len-4) occupies three bits
* and is stored in bits [2..4] of the tag byte. The offset
* occupies 11 bits, of which the upper three are stored in the
* upper three bits ([5..7]) of the tag byte, and the lower
* eight are stored in a byte following the tag byte.
*/
length = 4 + ((b >> 2) & 0x07);
uncompressedBytesRemaining -= length;
offset = (b & 0xE0) << 3;
b = readOneByte();
if (b == -1) {
throw new IOException("Premature end of stream reading back-reference length");
}
offset |= b;
startBackReference(offset, length);
state = State.IN_BACK_REFERENCE;
break;
case 0x02:
/*
* These elements can encode lengths between [1..64] and offsets
* from [0..65535]. (len-1) occupies six bits and is stored in
* the upper six bits ([2..7]) of the tag byte. The offset is
* stored as a little-endian 16-bit integer in the two bytes
* following the tag byte.
*/
length = (b >> 2) + 1;
uncompressedBytesRemaining -= length;
offset = (int) ByteUtils.fromLittleEndian(supplier, 2);
startBackReference(offset, length);
state = State.IN_BACK_REFERENCE;
break;
case 0x03:
/*
* These are like the copies with 2-byte offsets (see previous
* subsection), except that the offset is stored as a 32-bit
* integer instead of a 16-bit integer (and thus will occupy
* four bytes).
*/
length = (b >> 2) + 1;
uncompressedBytesRemaining -= length;
offset = (int) ByteUtils.fromLittleEndian(supplier, 4) & 0x7fffffff;
startBackReference(offset, length);
state = State.IN_BACK_REFERENCE;
break;
default:
// impossible as TAG_MASK is two bits and all four possible cases have been covered
break;
}
}
/*
* For literals up to and including 60 bytes in length, the
* upper six bits of the tag byte contain (len-1). The literal
* follows immediately thereafter in the bytestream. - For
* longer literals, the (len-1) value is stored after the tag
* byte, little-endian. The upper six bits of the tag byte
* describe how many bytes are used for the length; 60, 61, 62
* or 63 for 1-4 bytes, respectively. The literal itself follows
* after the length.
*/
private int readLiteralLength(final int b) throws IOException {
int length;
switch (b >> 2) {
case 60:
length = readOneByte();
if (length == -1) {
throw new IOException("Premature end of stream reading literal length");
}
break;
case 61:
length = (int) ByteUtils.fromLittleEndian(supplier, 2);
break;
case 62:
length = (int) ByteUtils.fromLittleEndian(supplier, 3);
break;
case 63:
length = (int) ByteUtils.fromLittleEndian(supplier, 4);
break;
default:
length = b >> 2;
break;
}
return length + 1;
}
/**
* The stream starts with the uncompressed length (up to a maximum of 2^32 -
* 1), stored as a little-endian varint. Varints consist of a series of
* bytes, where the lower 7 bits are data and the upper bit is set iff there
* are more bytes to be read. In other words, an uncompressed length of 64
* would be stored as 0x40, and an uncompressed length of 2097150 (0x1FFFFE)
* would be stored as 0xFE 0xFF 0x7F.
*
* @return The size of the uncompressed data
*
* @throws IOException
* Could not read a byte
*/
private long readSize() throws IOException {
int index = 0;
long sz = 0;
int b = 0;
do {
b = readOneByte();
if (b == -1) {
throw new IOException("Premature end of stream reading size");
}
sz |= (b & 0x7f) << (index++ * 7);
} while (0 != (b & 0x80));
return sz;
}
/**
* Get the uncompressed size of the stream
*
* @return the uncompressed size
*/
@Override
public int getSize() {
return size;
}
private enum State {
NO_BLOCK, IN_LITERAL, IN_BACK_REFERENCE
}
}
@@ -0,0 +1,217 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.compress.archivers.sevenz;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.tukaani.xz.LZMA2InputStream;
class Coders {
static InputStream addDecoder(final InputStream is,
final Coder coder, final String password) throws IOException {
for (final CoderId coderId : coderTable) {
if (Arrays.equals(coderId.id, coder.decompressionMethodId)) {
return coderId.coder.decode(is, coder, password);
}
}
throw new IOException("Unsupported compression method " +
Arrays.toString(coder.decompressionMethodId));
}
static CoderId[] coderTable = new CoderId[] {
new CoderId(new byte[] { (byte)0x00 }, new CopyDecoder()),
new CoderId(new byte[] { (byte)0x21 }, new LZMA2Decoder()),
// FIXME: gives corrupt output
//new CoderId(new byte[] { (byte)0x04, (byte)0x01, (byte)0x08 }, new DeflateDecoder()),
new CoderId(new byte[] { (byte)0x04, (byte)0x02, (byte)0x02 }, new BZIP2Decoder()),
new CoderId(new byte[] { (byte)0x06, (byte)0xf1, (byte)0x07, (byte)0x01 }, new AES256SHA256Decoder())
};
static class CoderId {
CoderId(final byte[] id, final CoderBase coder) {
this.id = id;
this.coder = coder;
}
final byte[] id;
final CoderBase coder;
}
static abstract class CoderBase {
abstract InputStream decode(final InputStream in, final Coder coder,
String password) throws IOException;
}
static class CopyDecoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder,
String password) throws IOException {
return in;
}
}
static class LZMA2Decoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder,
String password) throws IOException {
final int dictionarySizeBits = 0xff & coder.properties[0];
if ((dictionarySizeBits & (~0x3f)) != 0) {
throw new IOException("Unsupported LZMA2 property bits");
}
if (dictionarySizeBits > 40) {
throw new IOException("Dictionary larger than 4GiB maximum size");
}
final int dictionarySize;
if (dictionarySizeBits == 40) {
dictionarySize = 0xFFFFffff;
} else {
dictionarySize = (2 | (dictionarySizeBits & 0x1)) << (dictionarySizeBits / 2 + 11);
}
return new LZMA2InputStream(in, dictionarySize);
}
}
// static class DeflateDecoder extends CoderBase {
// @Override
// InputStream decode(final InputStream in, final Coder coder, final String password)
// throws IOException {
// System.out.println("deflate prop count = " + (coder.properties == null ? -1 : coder.properties.length));
// return new DeflaterInputStream(in, new Deflater(Deflater.DEFAULT_COMPRESSION, true));
// //return new GZIPInputStream(in);
// }
// }
static class BZIP2Decoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder, final String password)
throws IOException {
return new BZip2CompressorInputStream(in);
}
}
static class AES256SHA256Decoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder,
final String password) throws IOException {
return new InputStream() {
private boolean isInitialized = false;
private CipherInputStream cipherInputStream = null;
private CipherInputStream init() throws IOException {
if (isInitialized) {
return cipherInputStream;
}
final int byte0 = 0xff & coder.properties[0];
final int numCyclesPower = byte0 & 0x3f;
final int byte1 = 0xff & coder.properties[1];
final int ivSize = ((byte0 >> 6) & 1) + (byte1 & 0x0f);
final int saltSize = ((byte0 >> 7) & 1) + (byte1 >> 4);
//debug("numCyclesPower=" + numCyclesPower + ", saltSize=" + saltSize + ", ivSize=" + ivSize);
if (2 + saltSize + ivSize > coder.properties.length) {
throw new IOException("Salt size + IV size too long");
}
final byte[] salt = new byte[saltSize];
System.arraycopy(coder.properties, 2, salt, 0, saltSize);
final byte[] iv = new byte[16];
System.arraycopy(coder.properties, 2 + saltSize, iv, 0, ivSize);
if (password == null) {
throw new IOException("Cannot read encrypted files without a password");
}
final byte[] passwordBytes = password.getBytes("UTF-16LE");
final byte[] aesKeyBytes;
if (numCyclesPower == 0x3f) {
aesKeyBytes = new byte[32];
System.arraycopy(salt, 0, aesKeyBytes, 0, saltSize);
System.arraycopy(passwordBytes, 0, aesKeyBytes, saltSize,
Math.min(passwordBytes.length, aesKeyBytes.length - saltSize));
} else {
final MessageDigest digest;
try {
digest = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException noSuchAlgorithmException) {
IOException ioe = new IOException("SHA-256 is unsupported by your Java implementation");
ioe.initCause(noSuchAlgorithmException);
throw ioe;
// TODO: simplify when Compress requires Java 1.6
// throw new IOException("SHA-256 is unsupported by your Java implementation",
// noSuchAlgorithmException);
}
final byte[] extra = new byte[8];
for (long j = 0; j < (1L << numCyclesPower); j++) {
digest.update(salt);
digest.update(passwordBytes);
digest.update(extra);
for (int k = 0; k < extra.length; k++) {
++extra[k];
if (extra[k] != 0) {
break;
}
}
}
aesKeyBytes = digest.digest();
}
final SecretKey aesKey = new SecretKeySpec(aesKeyBytes, "AES");
try {
final Cipher cipher = Cipher.getInstance("AES/CBC/NoPadding");
cipher.init(Cipher.DECRYPT_MODE, aesKey, new IvParameterSpec(iv));
cipherInputStream = new CipherInputStream(in, cipher);
isInitialized = true;
return cipherInputStream;
} catch (GeneralSecurityException generalSecurityException) {
IOException ioe = new IOException("Decryption error " +
"(do you have the JCE Unlimited Strength Jurisdiction Policy Files installed?)");
ioe.initCause(generalSecurityException);
throw ioe;
// TODO: simplify when Compress requires Java 1.6
// throw new IOException("Decryption error " +
// "(do you have the JCE Unlimited Strength Jurisdiction Policy Files installed?)",
// generalSecurityException);
}
}
@Override
public int read() throws IOException {
return init().read();
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return init().read();
}
@Override
public void close() {
}
};
}
}
}
@@ -0,0 +1,968 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.compress.archivers.sevenz;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.util.Arrays;
import java.util.BitSet;
import java.util.zip.CRC32;
import org.apache.commons.compress.utils.CRC32VerifyingInputStream;
/**
* Reads a 7z file, using RandomAccessFile under
* the covers.
* <p>
* The 7z file format is a flexible container
* that can contain many compression and
* encryption types, but at the moment only
* only Copy, LZMA2, BZIP2, and AES-256 + SHA-256
* are supported, and archive header compression
* (when it uses the unsupported LZMA
* compression) isn't. So the only archives
* that can be read are the following:
* <pre>
* 7z a -mhc=off [-mhe=on] -mx=0 [-ppassword] archive.7z files
* 7z a -mhc=off [-mhe=on] -m0=LZMA2 [-ppassword] archive.7z files
* 7z a -mhc=off [-mhe=on] -m0=BZIP2 [-ppassword] archive.7z files
* </pre>
* <p>
* The format is very Windows/Intel specific,
* so it uses little-endian byte order,
* doesn't store user/group or permission bits,
* and represents times using NTFS timestamps
* (100 nanosecond units since 1 January 1601).
* Hence the official tools recommend against
* using it for backup purposes on *nix, and
* recommend .tar.7z or .tar.lzma or .tar.xz
* instead.
* <p>
* Both the header and file contents may be
* compressed and/or encrypted. With both
* encrypted, neither file names nor file
* contents can be read, but the use of
* encryption isn't plausibly deniable.
*
* @NotThreadSafe
*/
public class SevenZFile {
private static final boolean DEBUG = false;
private static final int SIGNATURE_HEADER_SIZE = 32;
private RandomAccessFile file;
private final Archive archive;
private int currentEntryIndex = -1;
private int currentFolderIndex = -1;
private InputStream currentFolderInputStream = null;
private InputStream currentEntryInputStream = null;
private String password;
private static final byte[] sevenZSignature = {
(byte)'7', (byte)'z', (byte)0xBC, (byte)0xAF, (byte)0x27, (byte)0x1C
};
public SevenZFile(final File filename, final String password) throws IOException {
boolean succeeded = false;
this.password = password;
this.file = new RandomAccessFile(filename, "r");
try {
archive = readHeaders();
succeeded = true;
} finally {
if (!succeeded) {
this.file.close();
}
}
}
public SevenZFile(final File filename) throws IOException {
this(filename, null);
}
public void close() {
if (file != null) {
try {
file.close();
} catch (IOException ignored) { // NOPMD
}
file = null;
}
}
private static void debug(String str) {
if (DEBUG) {
System.out.println(str);
}
}
private static void debug(String fmt, Object... args) {
if (DEBUG) {
System.out.format(fmt, args);
}
}
public SevenZArchiveEntry getNextEntry() throws IOException {
if (currentEntryIndex >= (archive.files.length - 1)) {
return null;
}
++currentEntryIndex;
final SevenZArchiveEntry entry = archive.files[currentEntryIndex];
buildDecodingStream();
return entry;
}
private Archive readHeaders() throws IOException {
debug("SignatureHeader");
final byte[] signature = new byte[6];
file.readFully(signature);
if (!Arrays.equals(signature, sevenZSignature)) {
throw new IOException("Bad 7z signature");
}
// 7zFormat.txt has it wrong - it's first major then minor
final byte archiveVersionMajor = file.readByte();
final byte archiveVersionMinor = file.readByte();
debug(" archiveVersion major=%d, minor=%d\n",
archiveVersionMajor, archiveVersionMinor);
if (archiveVersionMajor != 0) {
throw new IOException(String.format("Unsupported 7z version (%d,%d)",
archiveVersionMajor, archiveVersionMinor));
}
final int startHeaderCrc = Integer.reverseBytes(file.readInt());
final StartHeader startHeader = readStartHeader(startHeaderCrc);
final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
if (nextHeaderSizeInt != startHeader.nextHeaderSize) {
throw new IOException("cannot handle nextHeaderSize " + startHeader.nextHeaderSize);
}
file.seek(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
final byte[] nextHeader = new byte[nextHeaderSizeInt];
file.readFully(nextHeader);
final CRC32 crc = new CRC32();
crc.update(nextHeader);
if (startHeader.nextHeaderCrc != (int) crc.getValue()) {
throw new IOException("NextHeader CRC mismatch");
}
final ByteArrayInputStream byteStream = new ByteArrayInputStream(nextHeader);
DataInputStream nextHeaderInputStream = new DataInputStream(
byteStream);
Archive archive = new Archive();
int nid = nextHeaderInputStream.readUnsignedByte();
if (nid == NID.kEncodedHeader) {
nextHeaderInputStream = readEncodedHeader(nextHeaderInputStream, archive);
// Archive gets rebuilt with the new header
archive = new Archive();
nid = nextHeaderInputStream.readUnsignedByte();
}
if (nid == NID.kHeader) {
readHeader(nextHeaderInputStream, archive);
} else {
throw new IOException("Broken or unsupported archive: no Header");
}
return archive;
}
private StartHeader readStartHeader(final int startHeaderCrc) throws IOException {
final StartHeader startHeader = new StartHeader();
DataInputStream dataInputStream = null;
try {
dataInputStream = new DataInputStream(new CRC32VerifyingInputStream(
new BoundedRandomAccessFileInputStream(file, 20), 20, startHeaderCrc));
startHeader.nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong());
startHeader.nextHeaderSize = Long.reverseBytes(dataInputStream.readLong());
startHeader.nextHeaderCrc = Integer.reverseBytes(dataInputStream.readInt());
return startHeader;
} finally {
if (dataInputStream != null) {
dataInputStream.close();
}
}
}
private void readHeader(final DataInput header, final Archive archive) throws IOException {
debug("Header");
int nid = header.readUnsignedByte();
if (nid == NID.kArchiveProperties) {
readArchiveProperties(header);
nid = header.readUnsignedByte();
}
if (nid == NID.kAdditionalStreamsInfo) {
throw new IOException("Additional streams unsupported");
//nid = header.readUnsignedByte();
}
if (nid == NID.kMainStreamsInfo) {
readStreamsInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid == NID.kFilesInfo) {
readFilesInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated header");
}
}
private void readArchiveProperties(final DataInput input) throws IOException {
// FIXME: the reference implementation just throws them away?
debug("ArchiveProperties");
int nid = input.readUnsignedByte();
while (nid != NID.kEnd) {
final long propertySize = readUint64(input);
final byte[] property = new byte[(int)propertySize];
input.readFully(property);
nid = input.readUnsignedByte();
}
}
private DataInputStream readEncodedHeader(final DataInputStream header, final Archive archive) throws IOException {
debug("EncodedHeader");
readStreamsInfo(header, archive);
// FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage?
final Folder folder = archive.folders[0];
final int firstPackStreamIndex = 0;
final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
0;
file.seek(folderOffset);
InputStream inputStreamStack = new BoundedRandomAccessFileInputStream(file,
archive.packSizes[firstPackStreamIndex]);
for (final Coder coder : folder.coders) {
if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
throw new IOException("Multi input/output stream coders are not yet supported");
}
inputStreamStack = Coders.addDecoder(inputStreamStack, coder, password);
}
if (folder.hasCrc) {
inputStreamStack = new CRC32VerifyingInputStream(inputStreamStack,
folder.getUnpackSize(), folder.crc);
}
final byte[] nextHeader = new byte[(int)folder.getUnpackSize()];
final DataInputStream nextHeaderInputStream = new DataInputStream(inputStreamStack);
try {
nextHeaderInputStream.readFully(nextHeader);
} finally {
nextHeaderInputStream.close();
}
return new DataInputStream(new ByteArrayInputStream(nextHeader));
//throw new IOException("LZMA compression unsupported, so files with compressed header cannot be read");
// FIXME: this extracts the header to an LZMA file which can then be
// manually decompressed.
// long offset = SIGNATURE_HEADER_SIZE + archive.packPos;
// file.seek(offset);
// long unpackSize = archive.folders[0].getUnpackSize();
// byte[] packed = new byte[(int)archive.packSizes[0]];
// file.readFully(packed);
//
// FileOutputStream fos = new FileOutputStream(new File("/tmp/encodedHeader.7z"));
// fos.write(archive.folders[0].coders[0].properties);
// // size - assuming < 256
// fos.write((int)(unpackSize & 0xff));
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(packed);
// fos.close();
}
private void readStreamsInfo(final DataInput header, final Archive archive) throws IOException {
debug("StreamsInfo");
int nid = header.readUnsignedByte();
if (nid == NID.kPackInfo) {
readPackInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid == NID.kUnpackInfo) {
readUnpackInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid == NID.kSubStreamsInfo) {
readSubStreamsInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated StreamsInfo");
}
}
private void readPackInfo(final DataInput header, final Archive archive) throws IOException {
debug("PackInfo");
archive.packPos = readUint64(header);
final long numPackStreams = readUint64(header);
debug(" " + numPackStreams + " pack streams");
int nid = header.readUnsignedByte();
if (nid == NID.kSize) {
archive.packSizes = new long[(int)numPackStreams];
for (int i = 0; i < archive.packSizes.length; i++) {
archive.packSizes[i] = readUint64(header);
debug(" pack size %d is %d\n", i, archive.packSizes[i]);
}
nid = header.readUnsignedByte();
}
if (nid == NID.kCRC) {
archive.packCrcsDefined = readAllOrBits(header, (int)numPackStreams);
archive.packCrcs = new int[(int)numPackStreams];
for (int i = 0; i < (int)numPackStreams; i++) {
if (archive.packCrcsDefined.get(i)) {
archive.packCrcs[i] = Integer.reverseBytes(header.readInt());
}
}
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated PackInfo (" + nid + ")");
}
}
private void readUnpackInfo(final DataInput header, final Archive archive) throws IOException {
debug("UnpackInfo");
int nid = header.readUnsignedByte();
if (nid != NID.kFolder) {
throw new IOException("Expected kFolder, got " + nid);
}
final long numFolders = readUint64(header);
debug(" " + numFolders + " folders");
final Folder[] folders = new Folder[(int)numFolders];
archive.folders = folders;
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("External unsupported");
} else {
for (int i = 0; i < (int)numFolders; i++) {
folders[i] = readFolder(header);
}
}
nid = header.readUnsignedByte();
if (nid != NID.kCodersUnpackSize) {
throw new IOException("Expected kCodersUnpackSize, got " + nid);
}
for (final Folder folder : folders) {
folder.unpackSizes = new long[(int)folder.totalOutputStreams];
for (int i = 0; i < folder.totalOutputStreams; i++) {
folder.unpackSizes[i] = readUint64(header);
}
}
nid = header.readUnsignedByte();
if (nid == NID.kCRC) {
final BitSet crcsDefined = readAllOrBits(header, (int)numFolders);
for (int i = 0; i < (int)numFolders; i++) {
if (crcsDefined.get(i)) {
folders[i].hasCrc = true;
folders[i].crc = Integer.reverseBytes(header.readInt());
} else {
folders[i].hasCrc = false;
}
}
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated UnpackInfo");
}
}
private void readSubStreamsInfo(final DataInput header, final Archive archive) throws IOException {
debug("SubStreamsInfo");
for (final Folder folder : archive.folders) {
folder.numUnpackSubStreams = 1;
}
int totalUnpackStreams = archive.folders.length;
int nid = header.readUnsignedByte();
if (nid == NID.kNumUnpackStream) {
totalUnpackStreams = 0;
for (final Folder folder : archive.folders) {
final long numStreams = readUint64(header);
folder.numUnpackSubStreams = (int)numStreams;
totalUnpackStreams += numStreams;
}
nid = header.readUnsignedByte();
}
final SubStreamsInfo subStreamsInfo = new SubStreamsInfo();
subStreamsInfo.unpackSizes = new long[totalUnpackStreams];
subStreamsInfo.hasCrc = new BitSet(totalUnpackStreams);
subStreamsInfo.crcs = new int[totalUnpackStreams];
int nextUnpackStream = 0;
for (final Folder folder : archive.folders) {
if (folder.numUnpackSubStreams == 0) {
continue;
}
long sum = 0;
if (nid == NID.kSize) {
for (int i = 0; i < (folder.numUnpackSubStreams - 1); i++) {
final long size = readUint64(header);
subStreamsInfo.unpackSizes[nextUnpackStream++] = size;
sum += size;
}
}
subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum;
}
if (nid == NID.kSize) {
nid = header.readUnsignedByte();
}
int numDigests = 0;
for (final Folder folder : archive.folders) {
if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) {
numDigests += folder.numUnpackSubStreams;
}
}
if (nid == NID.kCRC) {
final BitSet hasMissingCrc = readAllOrBits(header, numDigests);
final int[] missingCrcs = new int[numDigests];
for (int i = 0; i < numDigests; i++) {
if (hasMissingCrc.get(i)) {
missingCrcs[i] = Integer.reverseBytes(header.readInt());
}
}
int nextCrc = 0;
int nextMissingCrc = 0;
for (final Folder folder: archive.folders) {
if (folder.numUnpackSubStreams == 1 && folder.hasCrc) {
subStreamsInfo.hasCrc.set(nextCrc, true);
subStreamsInfo.crcs[nextCrc] = folder.crc;
++nextCrc;
} else {
for (int i = 0; i < folder.numUnpackSubStreams; i++) {
subStreamsInfo.hasCrc.set(nextCrc, hasMissingCrc.get(nextMissingCrc));
subStreamsInfo.crcs[nextCrc] = missingCrcs[nextMissingCrc];
++nextCrc;
++nextMissingCrc;
}
}
}
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated SubStreamsInfo");
}
archive.subStreamsInfo = subStreamsInfo;
}
private Folder readFolder(final DataInput header) throws IOException {
final Folder folder = new Folder();
final long numCoders = readUint64(header);
final Coder[] coders = new Coder[(int)numCoders];
long totalInStreams = 0;
long totalOutStreams = 0;
for (int i = 0; i < coders.length; i++) {
coders[i] = new Coder();
int bits = header.readUnsignedByte();
final int idSize = bits & 0xf;
final boolean isSimple = ((bits & 0x10) == 0);
final boolean hasAttributes = ((bits & 0x20) != 0);
final boolean moreAlternativeMethods = ((bits & 0x80) != 0);
coders[i].decompressionMethodId = new byte[idSize];
header.readFully(coders[i].decompressionMethodId);
if (isSimple) {
coders[i].numInStreams = 1;
coders[i].numOutStreams = 1;
} else {
coders[i].numInStreams = readUint64(header);
coders[i].numOutStreams = readUint64(header);
}
totalInStreams += coders[i].numInStreams;
totalOutStreams += coders[i].numOutStreams;
if (hasAttributes) {
final long propertiesSize = readUint64(header);
coders[i].properties = new byte[(int)propertiesSize];
header.readFully(coders[i].properties);
}
if (DEBUG) {
final StringBuilder methodStr = new StringBuilder();
for (final byte b : coders[i].decompressionMethodId) {
methodStr.append(String.format("%02X", 0xff & b));
}
debug(" coder entry %d numInStreams=%d, numOutStreams=%d, method=%s, properties=%s\n", i,
coders[i].numInStreams, coders[i].numOutStreams, methodStr.toString(),
Arrays.toString(coders[i].properties));
}
// would need to keep looping as above:
while (moreAlternativeMethods) {
throw new IOException("Alternative methods are unsupported, please report. " +
"The reference implementation doesn't support them either.");
}
}
folder.coders = coders;
folder.totalInputStreams = totalInStreams;
folder.totalOutputStreams = totalOutStreams;
if (totalOutStreams == 0) {
throw new IOException("Total output streams can't be 0");
}
final long numBindPairs = totalOutStreams - 1;
final BindPair[] bindPairs = new BindPair[(int)numBindPairs];
for (int i = 0; i < bindPairs.length; i++) {
bindPairs[i] = new BindPair();
bindPairs[i].inIndex = readUint64(header);
bindPairs[i].outIndex = readUint64(header);
debug(" bind pair in=%d out=%d\n", bindPairs[i].inIndex, bindPairs[i].outIndex);
}
folder.bindPairs = bindPairs;
if (totalInStreams < numBindPairs) {
throw new IOException("Total input streams can't be less than the number of bind pairs");
}
final long numPackedStreams = totalInStreams - numBindPairs;
final long packedStreams[] = new long[(int)numPackedStreams];
if (numPackedStreams == 1) {
int i;
for (i = 0; i < (int)totalInStreams; i++) {
if (folder.findBindPairForInStream(i) < 0) {
break;
}
}
if (i == (int)totalInStreams) {
throw new IOException("Couldn't find stream's bind pair index");
}
packedStreams[0] = i;
} else {
for (int i = 0; i < (int)numPackedStreams; i++) {
packedStreams[i] = readUint64(header);
}
}
folder.packedStreams = packedStreams;
return folder;
}
private BitSet readAllOrBits(final DataInput header, final int size) throws IOException {
final int areAllDefined = header.readUnsignedByte();
final BitSet bits;
if (areAllDefined != 0) {
bits = new BitSet(size);
for (int i = 0; i < size; i++) {
bits.set(i, true);
}
} else {
bits = readBits(header, size);
}
return bits;
}
private BitSet readBits(final DataInput header, final int size) throws IOException {
final BitSet bits = new BitSet(size);
int mask = 0;
int cache = 0;
for (int i = 0; i < size; i++) {
if (mask == 0) {
mask = 0x80;
cache = header.readUnsignedByte();
}
bits.set(i, (cache & mask) != 0);
mask >>>= 1;
}
return bits;
}
private void readFilesInfo(final DataInput header, final Archive archive) throws IOException {
debug("FilesInfo");
final long numFiles = readUint64(header);
final SevenZArchiveEntry[] files = new SevenZArchiveEntry[(int)numFiles];
for (int i = 0; i < files.length; i++) {
files[i] = new SevenZArchiveEntry();
}
BitSet isEmptyStream = null;
BitSet isEmptyFile = null;
BitSet isAnti = null;
while (true) {
final int propertyType = header.readUnsignedByte();
if (propertyType == 0) {
break;
}
long size = readUint64(header);
switch (propertyType) {
case NID.kEmptyStream: {
debug(" kEmptyStream");
isEmptyStream = readBits(header, files.length);
break;
}
case NID.kEmptyFile: {
debug(" kEmptyFile");
if (isEmptyStream == null) { // protect against NPE
throw new IOException("Header format error: kEmptyStream must appear before kEmptyFile");
}
isEmptyFile = readBits(header, isEmptyStream.cardinality());
break;
}
case NID.kAnti: {
debug(" kAnti");
if (isEmptyStream == null) { // protect against NPE
throw new IOException("Header format error: kEmptyStream must appear before kAnti");
}
isAnti = readBits(header, isEmptyStream.cardinality());
break;
}
case NID.kName: {
debug(" kNames");
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Not implemented");
} else {
if (((size - 1) & 1) != 0) {
throw new IOException("File names length invalid");
}
final byte[] names = new byte[(int)(size - 1)];
header.readFully(names);
int nextFile = 0;
int nextName = 0;
for (int i = 0; i < names.length; i += 2) {
if (names[i] == 0 && names[i+1] == 0) {
files[nextFile++].setName(new String(names, nextName, i-nextName, "UTF-16LE"));
nextName = i + 2;
}
}
if (nextName != names.length || nextFile != files.length) {
throw new IOException("Error parsing file names");
}
}
break;
}
case NID.kCTime: {
debug(" kCreationTime");
final BitSet timesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasCreationDate(timesDefined.get(i));
if (files[i].getHasCreationDate()) {
files[i].setCreationDate(Long.reverseBytes(header.readLong()));
}
}
}
break;
}
case NID.kATime: {
debug(" kLastAccessTime");
final BitSet timesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasAcessDate(timesDefined.get(i));
if (files[i].getHasAcessDate()) {
files[i].setAccessDate(Long.reverseBytes(header.readLong()));
}
}
}
break;
}
case NID.kMTime: {
debug(" kLastWriteTime");
final BitSet timesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasLastModifiedDate(timesDefined.get(i));
if (files[i].getHasLastModifiedDate()) {
files[i].setLastModifiedDate(Long.reverseBytes(header.readLong()));
}
}
}
break;
}
case NID.kWinAttributes: {
debug(" kWinAttributes");
final BitSet attributesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasWindowsAttributes(attributesDefined.get(i));
if (files[i].getHasWindowsAttributes()) {
files[i].setWindowsAttributes(Integer.reverseBytes(header.readInt()));
}
}
}
break;
}
case NID.kStartPos: {
debug(" kStartPos");
throw new IOException("kStartPos is unsupported, please report");
}
case NID.kDummy: {
debug(" kDummy");
throw new IOException("kDummy is unsupported, please report");
}
default: {
throw new IOException("Unknown property " + propertyType);
// FIXME: Should actually:
//header.skipBytes((int)size);
}
}
}
int nonEmptyFileCounter = 0;
int emptyFileCounter = 0;
for (int i = 0; i < files.length; i++) {
files[i].setHasStream((isEmptyStream == null) ? true : !isEmptyStream.get(i));
if (files[i].hasStream()) {
files[i].setDirectory(false);
files[i].setAntiItem(false);
files[i].setHasCrc(archive.subStreamsInfo.hasCrc.get(nonEmptyFileCounter));
files[i].setCrc(archive.subStreamsInfo.crcs[nonEmptyFileCounter]);
files[i].setSize(archive.subStreamsInfo.unpackSizes[nonEmptyFileCounter]);
++nonEmptyFileCounter;
} else {
files[i].setDirectory((isEmptyFile == null) ? true : !isEmptyFile.get(emptyFileCounter));
files[i].setAntiItem((isAnti == null) ? false : isAnti.get(emptyFileCounter));
files[i].setHasCrc(false);
files[i].setSize(0);
++emptyFileCounter;
}
}
archive.files = files;
calculateStreamMap(archive);
}
private void calculateStreamMap(final Archive archive) throws IOException {
final StreamMap streamMap = new StreamMap();
int nextFolderPackStreamIndex = 0;
final int numFolders = (archive.folders != null) ? archive.folders.length : 0;
streamMap.folderFirstPackStreamIndex = new int[numFolders];
for (int i = 0; i < numFolders; i++) {
streamMap.folderFirstPackStreamIndex[i] = nextFolderPackStreamIndex;
nextFolderPackStreamIndex += archive.folders[i].packedStreams.length;
}
long nextPackStreamOffset = 0;
final int numPackSizes = (archive.packSizes != null) ? archive.packSizes.length : 0;
streamMap.packStreamOffsets = new long[numPackSizes];
for (int i = 0; i < numPackSizes; i++) {
streamMap.packStreamOffsets[i] = nextPackStreamOffset;
nextPackStreamOffset += archive.packSizes[i];
}
streamMap.folderFirstFileIndex = new int[numFolders];
streamMap.fileFolderIndex = new int[archive.files.length];
int nextFolderIndex = 0;
int nextFolderUnpackStreamIndex = 0;
for (int i = 0; i < archive.files.length; i++) {
if (!archive.files[i].hasStream() && nextFolderUnpackStreamIndex == 0) {
streamMap.fileFolderIndex[i] = -1;
continue;
}
if (nextFolderUnpackStreamIndex == 0) {
for (; nextFolderIndex < archive.folders.length; ++nextFolderIndex) {
streamMap.folderFirstFileIndex[nextFolderIndex] = i;
if (archive.folders[nextFolderIndex].numUnpackSubStreams > 0) {
break;
}
}
if (nextFolderIndex >= archive.folders.length) {
throw new IOException("Too few folders in archive");
}
}
streamMap.fileFolderIndex[i] = nextFolderIndex;
if (!archive.files[i].hasStream()) {
continue;
}
++nextFolderUnpackStreamIndex;
if (nextFolderUnpackStreamIndex >= archive.folders[nextFolderIndex].numUnpackSubStreams) {
++nextFolderIndex;
nextFolderUnpackStreamIndex = 0;
}
}
archive.streamMap = streamMap;
}
private void buildDecodingStream() throws IOException {
final int folderIndex = archive.streamMap.fileFolderIndex[currentEntryIndex];
if (folderIndex < 0) {
currentEntryInputStream = new BoundedInputStream(
new ByteArrayInputStream(new byte[0]), 0);
return;
}
if (currentFolderIndex == folderIndex) {
// need to advance the folder input stream past the current file
drainPreviousEntry();
} else {
currentFolderIndex = folderIndex;
if (currentFolderInputStream != null) {
currentFolderInputStream.close();
currentFolderInputStream = null;
}
final Folder folder = archive.folders[folderIndex];
final int firstPackStreamIndex = archive.streamMap.folderFirstPackStreamIndex[folderIndex];
final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
archive.streamMap.packStreamOffsets[firstPackStreamIndex];
currentFolderInputStream = buildDecoderStack(folder, folderOffset, firstPackStreamIndex);
}
final SevenZArchiveEntry file = archive.files[currentEntryIndex];
final InputStream fileStream = new BoundedInputStream(
currentFolderInputStream, file.getSize());
if (file.getHasCrc()) {
currentEntryInputStream = new CRC32VerifyingInputStream(
fileStream, file.getSize(), file.getCrc());
} else {
currentEntryInputStream = fileStream;
}
}
private void drainPreviousEntry() throws IOException {
if (currentEntryInputStream != null) {
final byte[] buffer = new byte[64*1024];
while (currentEntryInputStream.read(buffer) >= 0) { // NOPMD
}
currentEntryInputStream.close();
currentEntryInputStream = null;
}
}
private InputStream buildDecoderStack(final Folder folder, final long folderOffset,
final int firstPackStreamIndex) throws IOException {
file.seek(folderOffset);
InputStream inputStreamStack = new BoundedRandomAccessFileInputStream(file,
archive.packSizes[firstPackStreamIndex]);
for (final Coder coder : folder.coders) {
if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
throw new IOException("Multi input/output stream coders are not yet supported");
}
inputStreamStack = Coders.addDecoder(inputStreamStack, coder, password);
}
if (folder.hasCrc) {
return new CRC32VerifyingInputStream(inputStreamStack,
folder.getUnpackSize(), folder.crc);
} else {
return inputStreamStack;
}
}
public int read() throws IOException {
return currentEntryInputStream.read();
}
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
public int read(byte[] b, int off, int len) throws IOException {
return currentEntryInputStream.read(b, off, len);
}
private static long readUint64(final DataInput in) throws IOException {
int firstByte = in.readUnsignedByte();
int mask = 0x80;
int value = 0;
for (int i = 0; i < 8; i++) {
if ((firstByte & mask) == 0) {
return value | ((firstByte & (mask - 1)) << (8 * i));
}
int nextByte = in.readUnsignedByte();
value |= (nextByte << (8 * i));
mask >>>= 1;
}
return value;
}
private static class BoundedInputStream extends InputStream {
private final InputStream in;
private long bytesRemaining;
public BoundedInputStream(final InputStream in, final long size) {
this.in = in;
bytesRemaining = size;
}
@Override
public int read() throws IOException {
if (bytesRemaining > 0) {
--bytesRemaining;
return in.read();
} else {
return -1;
}
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (bytesRemaining == 0) {
return -1;
}
int bytesToRead = len;
if (bytesToRead > bytesRemaining) {
bytesToRead = (int) bytesRemaining;
}
final int bytesRead = in.read(b, off, bytesToRead);
if (bytesRead >= 0) {
bytesRemaining -= bytesRead;
}
return bytesRead;
}
@Override
public void close() {
}
}
}
@@ -0,0 +1,118 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.compress.archivers.sevenz;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.compress.PasswordRequiredException;
class AES256SHA256Decoder extends CoderBase {
@Override
InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
final Coder coder, final byte[] passwordBytes, int maxMemoryLimitInKb) throws IOException {
return new InputStream() {
private boolean isInitialized = false;
private CipherInputStream cipherInputStream = null;
private CipherInputStream init() throws IOException {
if (isInitialized) {
return cipherInputStream;
}
final int byte0 = 0xff & coder.properties[0];
final int numCyclesPower = byte0 & 0x3f;
final int byte1 = 0xff & coder.properties[1];
final int ivSize = ((byte0 >> 6) & 1) + (byte1 & 0x0f);
final int saltSize = ((byte0 >> 7) & 1) + (byte1 >> 4);
if (2 + saltSize + ivSize > coder.properties.length) {
throw new IOException("Salt size + IV size too long in " + archiveName);
}
final byte[] salt = new byte[saltSize];
System.arraycopy(coder.properties, 2, salt, 0, saltSize);
final byte[] iv = new byte[16];
System.arraycopy(coder.properties, 2 + saltSize, iv, 0, ivSize);
if (passwordBytes == null) {
throw new PasswordRequiredException(archiveName);
}
final byte[] aesKeyBytes;
if (numCyclesPower == 0x3f) {
aesKeyBytes = new byte[32];
System.arraycopy(salt, 0, aesKeyBytes, 0, saltSize);
System.arraycopy(passwordBytes, 0, aesKeyBytes, saltSize,
Math.min(passwordBytes.length, aesKeyBytes.length - saltSize));
} else {
final MessageDigest digest;
try {
digest = MessageDigest.getInstance("SHA-256");
} catch (final NoSuchAlgorithmException noSuchAlgorithmException) {
throw new IOException("SHA-256 is unsupported by your Java implementation",
noSuchAlgorithmException);
}
final byte[] extra = new byte[8];
for (long j = 0; j < (1L << numCyclesPower); j++) {
digest.update(salt);
digest.update(passwordBytes);
digest.update(extra);
for (int k = 0; k < extra.length; k++) {
++extra[k];
if (extra[k] != 0) {
break;
}
}
}
aesKeyBytes = digest.digest();
}
final SecretKey aesKey = new SecretKeySpec(aesKeyBytes, "AES");
try {
final Cipher cipher = Cipher.getInstance("AES/CBC/NoPadding");
cipher.init(Cipher.DECRYPT_MODE, aesKey, new IvParameterSpec(iv));
cipherInputStream = new CipherInputStream(in, cipher);
isInitialized = true;
return cipherInputStream;
} catch (final GeneralSecurityException generalSecurityException) {
throw new IOException("Decryption error " +
"(do you have the JCE Unlimited Strength Jurisdiction Policy Files installed?)",
generalSecurityException);
}
}
@Override
public int read() throws IOException {
return init().read();
}
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
return init().read(b, off, len);
}
@Override
public void close() {
}
};
}
}
@@ -0,0 +1,286 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.commons.compress.compressors.snappy;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream;
import org.apache.commons.compress.utils.ByteUtils;
/**
* CompressorInputStream for the raw Snappy format.
*
* <p>This implementation uses an internal buffer in order to handle
* the back-references that are at the heart of the LZ77 algorithm.
* The size of the buffer must be at least as big as the biggest
* offset used in the compressed stream. The current version of the
* Snappy algorithm as defined by Google works on 32k blocks and
* doesn't contain offsets bigger than 32k which is the default block
* size used by this class.</p>
*
* @see <a href="https://github.com/google/snappy/blob/master/format_description.txt">Snappy compressed format description</a>
* @since 1.7
*/
public class SnappyCompressorInputStream extends AbstractLZ77CompressorInputStream {
/** Mask used to determine the type of "tag" is being processed */
private static final int TAG_MASK = 0x03;
/** Default block size */
public static final int DEFAULT_BLOCK_SIZE = 32768;
/** The size of the uncompressed data */
private final int size;
/** Number of uncompressed bytes still to be read. */
private int uncompressedBytesRemaining;
/** Current state of the stream */
private State state = State.NO_BLOCK;
private boolean endReached = false;
/**
* Constructor using the default buffer size of 32k.
*
* @param is
* An InputStream to read compressed data from
*
* @throws IOException if reading fails
*/
public SnappyCompressorInputStream(final InputStream is) throws IOException {
this(is, DEFAULT_BLOCK_SIZE);
}
/**
* Constructor using a configurable buffer size.
*
* @param is
* An InputStream to read compressed data from
* @param blockSize
* The block size used in compression
*
* @throws IOException if reading fails
*/
public SnappyCompressorInputStream(final InputStream is, final int blockSize)
throws IOException {
super(is, blockSize);
uncompressedBytesRemaining = size = (int) readSize();
}
/**
* {@inheritDoc}
*/
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
if (endReached) {
return -1;
}
switch (state) {
case NO_BLOCK:
fill();
return read(b, off, len);
case IN_LITERAL:
int litLen = readLiteral(b, off, len);
if (!hasMoreDataInBlock()) {
state = State.NO_BLOCK;
}
return litLen > 0 ? litLen : read(b, off, len);
case IN_BACK_REFERENCE:
int backReferenceLen = readBackReference(b, off, len);
if (!hasMoreDataInBlock()) {
state = State.NO_BLOCK;
}
return backReferenceLen > 0 ? backReferenceLen : read(b, off, len);
default:
throw new IOException("Unknown stream state " + state);
}
}
/**
* Try to fill the buffer with the next block of data.
*/
private void fill() throws IOException {
if (uncompressedBytesRemaining == 0) {
endReached = true;
return;
}
int b = readOneByte();
if (b == -1) {
throw new IOException("Premature end of stream reading block start");
}
int length = 0;
int offset = 0;
switch (b & TAG_MASK) {
case 0x00:
length = readLiteralLength(b);
uncompressedBytesRemaining -= length;
startLiteral(length);
state = State.IN_LITERAL;
break;
case 0x01:
/*
* These elements can encode lengths between [4..11] bytes and
* offsets between [0..2047] bytes. (len-4) occupies three bits
* and is stored in bits [2..4] of the tag byte. The offset
* occupies 11 bits, of which the upper three are stored in the
* upper three bits ([5..7]) of the tag byte, and the lower
* eight are stored in a byte following the tag byte.
*/
length = 4 + ((b >> 2) & 0x07);
uncompressedBytesRemaining -= length;
offset = (b & 0xE0) << 3;
b = readOneByte();
if (b == -1) {
throw new IOException("Premature end of stream reading back-reference length");
}
offset |= b;
startBackReference(offset, length);
state = State.IN_BACK_REFERENCE;
break;
case 0x02:
/*
* These elements can encode lengths between [1..64] and offsets
* from [0..65535]. (len-1) occupies six bits and is stored in
* the upper six bits ([2..7]) of the tag byte. The offset is
* stored as a little-endian 16-bit integer in the two bytes
* following the tag byte.
*/
length = (b >> 2) + 1;
uncompressedBytesRemaining -= length;
offset = (int) ByteUtils.fromLittleEndian(supplier, 2);
startBackReference(offset, length);
state = State.IN_BACK_REFERENCE;
break;
case 0x03:
/*
* These are like the copies with 2-byte offsets (see previous
* subsection), except that the offset is stored as a 32-bit
* integer instead of a 16-bit integer (and thus will occupy
* four bytes).
*/
length = (b >> 2) + 1;
uncompressedBytesRemaining -= length;
offset = (int) ByteUtils.fromLittleEndian(supplier, 4) & 0x7fffffff;
startBackReference(offset, length);
state = State.IN_BACK_REFERENCE;
break;
default:
// impossible as TAG_MASK is two bits and all four possible cases have been covered
break;
}
}
/*
* For literals up to and including 60 bytes in length, the
* upper six bits of the tag byte contain (len-1). The literal
* follows immediately thereafter in the bytestream. - For
* longer literals, the (len-1) value is stored after the tag
* byte, little-endian. The upper six bits of the tag byte
* describe how many bytes are used for the length; 60, 61, 62
* or 63 for 1-4 bytes, respectively. The literal itself follows
* after the length.
*/
private int readLiteralLength(final int b) throws IOException {
int length;
switch (b >> 2) {
case 60:
length = readOneByte();
if (length == -1) {
throw new IOException("Premature end of stream reading literal length");
}
break;
case 61:
length = (int) ByteUtils.fromLittleEndian(supplier, 2);
break;
case 62:
length = (int) ByteUtils.fromLittleEndian(supplier, 3);
break;
case 63:
length = (int) ByteUtils.fromLittleEndian(supplier, 4);
break;
default:
length = b >> 2;
break;
}
return length + 1;
}
/**
* The stream starts with the uncompressed length (up to a maximum of 2^32 -
* 1), stored as a little-endian varint. Varints consist of a series of
* bytes, where the lower 7 bits are data and the upper bit is set iff there
* are more bytes to be read. In other words, an uncompressed length of 64
* would be stored as 0x40, and an uncompressed length of 2097150 (0x1FFFFE)
* would be stored as 0xFE 0xFF 0x7F.
*
* @return The size of the uncompressed data
*
* @throws IOException
* Could not read a byte
*/
private long readSize() throws IOException {
int index = 0;
long sz = 0;
int b = 0;
do {
b = readOneByte();
if (b == -1) {
throw new IOException("Premature end of stream reading size");
}
sz |= (b & 0x7f) << (index++ * 7);
} while (0 != (b & 0x80));
return sz;
}
/**
* Get the uncompressed size of the stream
*
* @return the uncompressed size
*/
@Override
public int getSize() {
return size;
}
private enum State {
NO_BLOCK, IN_LITERAL, IN_BACK_REFERENCE
}
}
@@ -0,0 +1,217 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.compress.archivers.sevenz;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.tukaani.xz.LZMA2InputStream;
class Coders {
static InputStream addDecoder(final InputStream is,
final Coder coder, final String password) throws IOException {
for (final CoderId coderId : coderTable) {
if (Arrays.equals(coderId.id, coder.decompressionMethodId)) {
return coderId.coder.decode(is, coder, password);
}
}
throw new IOException("Unsupported compression method " +
Arrays.toString(coder.decompressionMethodId));
}
static CoderId[] coderTable = new CoderId[] {
new CoderId(new byte[] { (byte)0x00 }, new CopyDecoder()),
new CoderId(new byte[] { (byte)0x21 }, new LZMA2Decoder()),
// FIXME: gives corrupt output
//new CoderId(new byte[] { (byte)0x04, (byte)0x01, (byte)0x08 }, new DeflateDecoder()),
new CoderId(new byte[] { (byte)0x04, (byte)0x02, (byte)0x02 }, new BZIP2Decoder()),
new CoderId(new byte[] { (byte)0x06, (byte)0xf1, (byte)0x07, (byte)0x01 }, new AES256SHA256Decoder())
};
static class CoderId {
CoderId(final byte[] id, final CoderBase coder) {
this.id = id;
this.coder = coder;
}
final byte[] id;
final CoderBase coder;
}
static abstract class CoderBase {
abstract InputStream decode(final InputStream in, final Coder coder,
String password) throws IOException;
}
static class CopyDecoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder,
String password) throws IOException {
return in;
}
}
static class LZMA2Decoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder,
String password) throws IOException {
final int dictionarySizeBits = 0xff & coder.properties[0];
if ((dictionarySizeBits & (~0x3f)) != 0) {
throw new IOException("Unsupported LZMA2 property bits");
}
if (dictionarySizeBits > 40) {
throw new IOException("Dictionary larger than 4GiB maximum size");
}
final int dictionarySize;
if (dictionarySizeBits == 40) {
dictionarySize = 0xFFFFffff;
} else {
dictionarySize = (2 | (dictionarySizeBits & 0x1)) << (dictionarySizeBits / 2 + 11);
}
return new LZMA2InputStream(in, dictionarySize);
}
}
// static class DeflateDecoder extends CoderBase {
// @Override
// InputStream decode(final InputStream in, final Coder coder, final String password)
// throws IOException {
// System.out.println("deflate prop count = " + (coder.properties == null ? -1 : coder.properties.length));
// return new DeflaterInputStream(in, new Deflater(Deflater.DEFAULT_COMPRESSION, true));
// //return new GZIPInputStream(in);
// }
// }
static class BZIP2Decoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder, final String password)
throws IOException {
return new BZip2CompressorInputStream(in);
}
}
static class AES256SHA256Decoder extends CoderBase {
@Override
InputStream decode(final InputStream in, final Coder coder,
final String password) throws IOException {
return new InputStream() {
private boolean isInitialized = false;
private CipherInputStream cipherInputStream = null;
private CipherInputStream init() throws IOException {
if (isInitialized) {
return cipherInputStream;
}
final int byte0 = 0xff & coder.properties[0];
final int numCyclesPower = byte0 & 0x3f;
final int byte1 = 0xff & coder.properties[1];
final int ivSize = ((byte0 >> 6) & 1) + (byte1 & 0x0f);
final int saltSize = ((byte0 >> 7) & 1) + (byte1 >> 4);
//debug("numCyclesPower=" + numCyclesPower + ", saltSize=" + saltSize + ", ivSize=" + ivSize);
if (2 + saltSize + ivSize > coder.properties.length) {
throw new IOException("Salt size + IV size too long");
}
final byte[] salt = new byte[saltSize];
System.arraycopy(coder.properties, 2, salt, 0, saltSize);
final byte[] iv = new byte[16];
System.arraycopy(coder.properties, 2 + saltSize, iv, 0, ivSize);
if (password == null) {
throw new IOException("Cannot read encrypted files without a password");
}
final byte[] passwordBytes = password.getBytes("UTF-16LE");
final byte[] aesKeyBytes;
if (numCyclesPower == 0x3f) {
aesKeyBytes = new byte[32];
System.arraycopy(salt, 0, aesKeyBytes, 0, saltSize);
System.arraycopy(passwordBytes, 0, aesKeyBytes, saltSize,
Math.min(passwordBytes.length, aesKeyBytes.length - saltSize));
} else {
final MessageDigest digest;
try {
digest = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException noSuchAlgorithmException) {
IOException ioe = new IOException("SHA-256 is unsupported by your Java implementation");
ioe.initCause(noSuchAlgorithmException);
throw ioe;
// TODO: simplify when Compress requires Java 1.6
// throw new IOException("SHA-256 is unsupported by your Java implementation",
// noSuchAlgorithmException);
}
final byte[] extra = new byte[8];
for (long j = 0; j < (1L << numCyclesPower); j++) {
digest.update(salt);
digest.update(passwordBytes);
digest.update(extra);
for (int k = 0; k < extra.length; k++) {
++extra[k];
if (extra[k] != 0) {
break;
}
}
}
aesKeyBytes = digest.digest();
}
final SecretKey aesKey = new SecretKeySpec(aesKeyBytes, "AES");
try {
final Cipher cipher = Cipher.getInstance("AES/CBC/NoPadding");
cipher.init(Cipher.DECRYPT_MODE, aesKey, new IvParameterSpec(iv));
cipherInputStream = new CipherInputStream(in, cipher);
isInitialized = true;
return cipherInputStream;
} catch (GeneralSecurityException generalSecurityException) {
IOException ioe = new IOException("Decryption error " +
"(do you have the JCE Unlimited Strength Jurisdiction Policy Files installed?)");
ioe.initCause(generalSecurityException);
throw ioe;
// TODO: simplify when Compress requires Java 1.6
// throw new IOException("Decryption error " +
// "(do you have the JCE Unlimited Strength Jurisdiction Policy Files installed?)",
// generalSecurityException);
}
}
@Override
public int read() throws IOException {
return init().read();
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return init().read(b, off, len);
}
@Override
public void close() {
}
};
}
}
}
@@ -0,0 +1,968 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.compress.archivers.sevenz;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.util.Arrays;
import java.util.BitSet;
import java.util.zip.CRC32;
import org.apache.commons.compress.utils.CRC32VerifyingInputStream;
/**
* Reads a 7z file, using RandomAccessFile under
* the covers.
* <p>
* The 7z file format is a flexible container
* that can contain many compression and
* encryption types, but at the moment only
* only Copy, LZMA2, BZIP2, and AES-256 + SHA-256
* are supported, and archive header compression
* (when it uses the unsupported LZMA
* compression) isn't. So the only archives
* that can be read are the following:
* <pre>
* 7z a -mhc=off [-mhe=on] -mx=0 [-ppassword] archive.7z files
* 7z a -mhc=off [-mhe=on] -m0=LZMA2 [-ppassword] archive.7z files
* 7z a -mhc=off [-mhe=on] -m0=BZIP2 [-ppassword] archive.7z files
* </pre>
* <p>
* The format is very Windows/Intel specific,
* so it uses little-endian byte order,
* doesn't store user/group or permission bits,
* and represents times using NTFS timestamps
* (100 nanosecond units since 1 January 1601).
* Hence the official tools recommend against
* using it for backup purposes on *nix, and
* recommend .tar.7z or .tar.lzma or .tar.xz
* instead.
* <p>
* Both the header and file contents may be
* compressed and/or encrypted. With both
* encrypted, neither file names nor file
* contents can be read, but the use of
* encryption isn't plausibly deniable.
*
* @NotThreadSafe
*/
public class SevenZFile {
private static final boolean DEBUG = false;
private static final int SIGNATURE_HEADER_SIZE = 32;
private RandomAccessFile file;
private final Archive archive;
private int currentEntryIndex = -1;
private int currentFolderIndex = -1;
private InputStream currentFolderInputStream = null;
private InputStream currentEntryInputStream = null;
private String password;
private static final byte[] sevenZSignature = {
(byte)'7', (byte)'z', (byte)0xBC, (byte)0xAF, (byte)0x27, (byte)0x1C
};
public SevenZFile(final File filename, final String password) throws IOException {
boolean succeeded = false;
this.password = password;
this.file = new RandomAccessFile(filename, "r");
try {
archive = readHeaders();
succeeded = true;
} finally {
if (!succeeded) {
this.file.close();
}
}
}
public SevenZFile(final File filename) throws IOException {
this(filename, null);
}
public void close() {
if (file != null) {
try {
file.close();
} catch (IOException ignored) { // NOPMD
}
file = null;
}
}
private static void debug(String str) {
if (DEBUG) {
System.out.println(str);
}
}
private static void debug(String fmt, Object... args) {
if (DEBUG) {
System.out.format(fmt, args);
}
}
public SevenZArchiveEntry getNextEntry() throws IOException {
if (currentEntryIndex >= (archive.files.length - 1)) {
return null;
}
++currentEntryIndex;
final SevenZArchiveEntry entry = archive.files[currentEntryIndex];
buildDecodingStream();
return entry;
}
private Archive readHeaders() throws IOException {
debug("SignatureHeader");
final byte[] signature = new byte[6];
file.readFully(signature);
if (!Arrays.equals(signature, sevenZSignature)) {
throw new IOException("Bad 7z signature");
}
// 7zFormat.txt has it wrong - it's first major then minor
final byte archiveVersionMajor = file.readByte();
final byte archiveVersionMinor = file.readByte();
debug(" archiveVersion major=%d, minor=%d\n",
archiveVersionMajor, archiveVersionMinor);
if (archiveVersionMajor != 0) {
throw new IOException(String.format("Unsupported 7z version (%d,%d)",
archiveVersionMajor, archiveVersionMinor));
}
final int startHeaderCrc = Integer.reverseBytes(file.readInt());
final StartHeader startHeader = readStartHeader(startHeaderCrc);
final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
if (nextHeaderSizeInt != startHeader.nextHeaderSize) {
throw new IOException("cannot handle nextHeaderSize " + startHeader.nextHeaderSize);
}
file.seek(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
final byte[] nextHeader = new byte[nextHeaderSizeInt];
file.readFully(nextHeader);
final CRC32 crc = new CRC32();
crc.update(nextHeader);
if (startHeader.nextHeaderCrc != (int) crc.getValue()) {
throw new IOException("NextHeader CRC mismatch");
}
final ByteArrayInputStream byteStream = new ByteArrayInputStream(nextHeader);
DataInputStream nextHeaderInputStream = new DataInputStream(
byteStream);
Archive archive = new Archive();
int nid = nextHeaderInputStream.readUnsignedByte();
if (nid == NID.kEncodedHeader) {
nextHeaderInputStream = readEncodedHeader(nextHeaderInputStream, archive);
// Archive gets rebuilt with the new header
archive = new Archive();
nid = nextHeaderInputStream.readUnsignedByte();
}
if (nid == NID.kHeader) {
readHeader(nextHeaderInputStream, archive);
} else {
throw new IOException("Broken or unsupported archive: no Header");
}
return archive;
}
private StartHeader readStartHeader(final int startHeaderCrc) throws IOException {
final StartHeader startHeader = new StartHeader();
DataInputStream dataInputStream = null;
try {
dataInputStream = new DataInputStream(new CRC32VerifyingInputStream(
new BoundedRandomAccessFileInputStream(file, 20), 20, startHeaderCrc));
startHeader.nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong());
startHeader.nextHeaderSize = Long.reverseBytes(dataInputStream.readLong());
startHeader.nextHeaderCrc = Integer.reverseBytes(dataInputStream.readInt());
return startHeader;
} finally {
if (dataInputStream != null) {
dataInputStream.close();
}
}
}
private void readHeader(final DataInput header, final Archive archive) throws IOException {
debug("Header");
int nid = header.readUnsignedByte();
if (nid == NID.kArchiveProperties) {
readArchiveProperties(header);
nid = header.readUnsignedByte();
}
if (nid == NID.kAdditionalStreamsInfo) {
throw new IOException("Additional streams unsupported");
//nid = header.readUnsignedByte();
}
if (nid == NID.kMainStreamsInfo) {
readStreamsInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid == NID.kFilesInfo) {
readFilesInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated header");
}
}
private void readArchiveProperties(final DataInput input) throws IOException {
// FIXME: the reference implementation just throws them away?
debug("ArchiveProperties");
int nid = input.readUnsignedByte();
while (nid != NID.kEnd) {
final long propertySize = readUint64(input);
final byte[] property = new byte[(int)propertySize];
input.readFully(property);
nid = input.readUnsignedByte();
}
}
private DataInputStream readEncodedHeader(final DataInputStream header, final Archive archive) throws IOException {
debug("EncodedHeader");
readStreamsInfo(header, archive);
// FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage?
final Folder folder = archive.folders[0];
final int firstPackStreamIndex = 0;
final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
0;
file.seek(folderOffset);
InputStream inputStreamStack = new BoundedRandomAccessFileInputStream(file,
archive.packSizes[firstPackStreamIndex]);
for (final Coder coder : folder.coders) {
if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
throw new IOException("Multi input/output stream coders are not yet supported");
}
inputStreamStack = Coders.addDecoder(inputStreamStack, coder, password);
}
if (folder.hasCrc) {
inputStreamStack = new CRC32VerifyingInputStream(inputStreamStack,
folder.getUnpackSize(), folder.crc);
}
final byte[] nextHeader = new byte[(int)folder.getUnpackSize()];
final DataInputStream nextHeaderInputStream = new DataInputStream(inputStreamStack);
try {
nextHeaderInputStream.readFully(nextHeader);
} finally {
nextHeaderInputStream.close();
}
return new DataInputStream(new ByteArrayInputStream(nextHeader));
//throw new IOException("LZMA compression unsupported, so files with compressed header cannot be read");
// FIXME: this extracts the header to an LZMA file which can then be
// manually decompressed.
// long offset = SIGNATURE_HEADER_SIZE + archive.packPos;
// file.seek(offset);
// long unpackSize = archive.folders[0].getUnpackSize();
// byte[] packed = new byte[(int)archive.packSizes[0]];
// file.readFully(packed);
//
// FileOutputStream fos = new FileOutputStream(new File("/tmp/encodedHeader.7z"));
// fos.write(archive.folders[0].coders[0].properties);
// // size - assuming < 256
// fos.write((int)(unpackSize & 0xff));
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(0);
// fos.write(packed);
// fos.close();
}
private void readStreamsInfo(final DataInput header, final Archive archive) throws IOException {
debug("StreamsInfo");
int nid = header.readUnsignedByte();
if (nid == NID.kPackInfo) {
readPackInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid == NID.kUnpackInfo) {
readUnpackInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid == NID.kSubStreamsInfo) {
readSubStreamsInfo(header, archive);
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated StreamsInfo");
}
}
private void readPackInfo(final DataInput header, final Archive archive) throws IOException {
debug("PackInfo");
archive.packPos = readUint64(header);
final long numPackStreams = readUint64(header);
debug(" " + numPackStreams + " pack streams");
int nid = header.readUnsignedByte();
if (nid == NID.kSize) {
archive.packSizes = new long[(int)numPackStreams];
for (int i = 0; i < archive.packSizes.length; i++) {
archive.packSizes[i] = readUint64(header);
debug(" pack size %d is %d\n", i, archive.packSizes[i]);
}
nid = header.readUnsignedByte();
}
if (nid == NID.kCRC) {
archive.packCrcsDefined = readAllOrBits(header, (int)numPackStreams);
archive.packCrcs = new int[(int)numPackStreams];
for (int i = 0; i < (int)numPackStreams; i++) {
if (archive.packCrcsDefined.get(i)) {
archive.packCrcs[i] = Integer.reverseBytes(header.readInt());
}
}
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated PackInfo (" + nid + ")");
}
}
private void readUnpackInfo(final DataInput header, final Archive archive) throws IOException {
debug("UnpackInfo");
int nid = header.readUnsignedByte();
if (nid != NID.kFolder) {
throw new IOException("Expected kFolder, got " + nid);
}
final long numFolders = readUint64(header);
debug(" " + numFolders + " folders");
final Folder[] folders = new Folder[(int)numFolders];
archive.folders = folders;
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("External unsupported");
} else {
for (int i = 0; i < (int)numFolders; i++) {
folders[i] = readFolder(header);
}
}
nid = header.readUnsignedByte();
if (nid != NID.kCodersUnpackSize) {
throw new IOException("Expected kCodersUnpackSize, got " + nid);
}
for (final Folder folder : folders) {
folder.unpackSizes = new long[(int)folder.totalOutputStreams];
for (int i = 0; i < folder.totalOutputStreams; i++) {
folder.unpackSizes[i] = readUint64(header);
}
}
nid = header.readUnsignedByte();
if (nid == NID.kCRC) {
final BitSet crcsDefined = readAllOrBits(header, (int)numFolders);
for (int i = 0; i < (int)numFolders; i++) {
if (crcsDefined.get(i)) {
folders[i].hasCrc = true;
folders[i].crc = Integer.reverseBytes(header.readInt());
} else {
folders[i].hasCrc = false;
}
}
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated UnpackInfo");
}
}
private void readSubStreamsInfo(final DataInput header, final Archive archive) throws IOException {
debug("SubStreamsInfo");
for (final Folder folder : archive.folders) {
folder.numUnpackSubStreams = 1;
}
int totalUnpackStreams = archive.folders.length;
int nid = header.readUnsignedByte();
if (nid == NID.kNumUnpackStream) {
totalUnpackStreams = 0;
for (final Folder folder : archive.folders) {
final long numStreams = readUint64(header);
folder.numUnpackSubStreams = (int)numStreams;
totalUnpackStreams += numStreams;
}
nid = header.readUnsignedByte();
}
final SubStreamsInfo subStreamsInfo = new SubStreamsInfo();
subStreamsInfo.unpackSizes = new long[totalUnpackStreams];
subStreamsInfo.hasCrc = new BitSet(totalUnpackStreams);
subStreamsInfo.crcs = new int[totalUnpackStreams];
int nextUnpackStream = 0;
for (final Folder folder : archive.folders) {
if (folder.numUnpackSubStreams == 0) {
continue;
}
long sum = 0;
if (nid == NID.kSize) {
for (int i = 0; i < (folder.numUnpackSubStreams - 1); i++) {
final long size = readUint64(header);
subStreamsInfo.unpackSizes[nextUnpackStream++] = size;
sum += size;
}
}
subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum;
}
if (nid == NID.kSize) {
nid = header.readUnsignedByte();
}
int numDigests = 0;
for (final Folder folder : archive.folders) {
if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) {
numDigests += folder.numUnpackSubStreams;
}
}
if (nid == NID.kCRC) {
final BitSet hasMissingCrc = readAllOrBits(header, numDigests);
final int[] missingCrcs = new int[numDigests];
for (int i = 0; i < numDigests; i++) {
if (hasMissingCrc.get(i)) {
missingCrcs[i] = Integer.reverseBytes(header.readInt());
}
}
int nextCrc = 0;
int nextMissingCrc = 0;
for (final Folder folder: archive.folders) {
if (folder.numUnpackSubStreams == 1 && folder.hasCrc) {
subStreamsInfo.hasCrc.set(nextCrc, true);
subStreamsInfo.crcs[nextCrc] = folder.crc;
++nextCrc;
} else {
for (int i = 0; i < folder.numUnpackSubStreams; i++) {
subStreamsInfo.hasCrc.set(nextCrc, hasMissingCrc.get(nextMissingCrc));
subStreamsInfo.crcs[nextCrc] = missingCrcs[nextMissingCrc];
++nextCrc;
++nextMissingCrc;
}
}
}
nid = header.readUnsignedByte();
}
if (nid != NID.kEnd) {
throw new IOException("Badly terminated SubStreamsInfo");
}
archive.subStreamsInfo = subStreamsInfo;
}
private Folder readFolder(final DataInput header) throws IOException {
final Folder folder = new Folder();
final long numCoders = readUint64(header);
final Coder[] coders = new Coder[(int)numCoders];
long totalInStreams = 0;
long totalOutStreams = 0;
for (int i = 0; i < coders.length; i++) {
coders[i] = new Coder();
int bits = header.readUnsignedByte();
final int idSize = bits & 0xf;
final boolean isSimple = ((bits & 0x10) == 0);
final boolean hasAttributes = ((bits & 0x20) != 0);
final boolean moreAlternativeMethods = ((bits & 0x80) != 0);
coders[i].decompressionMethodId = new byte[idSize];
header.readFully(coders[i].decompressionMethodId);
if (isSimple) {
coders[i].numInStreams = 1;
coders[i].numOutStreams = 1;
} else {
coders[i].numInStreams = readUint64(header);
coders[i].numOutStreams = readUint64(header);
}
totalInStreams += coders[i].numInStreams;
totalOutStreams += coders[i].numOutStreams;
if (hasAttributes) {
final long propertiesSize = readUint64(header);
coders[i].properties = new byte[(int)propertiesSize];
header.readFully(coders[i].properties);
}
if (DEBUG) {
final StringBuilder methodStr = new StringBuilder();
for (final byte b : coders[i].decompressionMethodId) {
methodStr.append(String.format("%02X", 0xff & b));
}
debug(" coder entry %d numInStreams=%d, numOutStreams=%d, method=%s, properties=%s\n", i,
coders[i].numInStreams, coders[i].numOutStreams, methodStr.toString(),
Arrays.toString(coders[i].properties));
}
// would need to keep looping as above:
while (moreAlternativeMethods) {
throw new IOException("Alternative methods are unsupported, please report. " +
"The reference implementation doesn't support them either.");
}
}
folder.coders = coders;
folder.totalInputStreams = totalInStreams;
folder.totalOutputStreams = totalOutStreams;
if (totalOutStreams == 0) {
throw new IOException("Total output streams can't be 0");
}
final long numBindPairs = totalOutStreams - 1;
final BindPair[] bindPairs = new BindPair[(int)numBindPairs];
for (int i = 0; i < bindPairs.length; i++) {
bindPairs[i] = new BindPair();
bindPairs[i].inIndex = readUint64(header);
bindPairs[i].outIndex = readUint64(header);
debug(" bind pair in=%d out=%d\n", bindPairs[i].inIndex, bindPairs[i].outIndex);
}
folder.bindPairs = bindPairs;
if (totalInStreams < numBindPairs) {
throw new IOException("Total input streams can't be less than the number of bind pairs");
}
final long numPackedStreams = totalInStreams - numBindPairs;
final long packedStreams[] = new long[(int)numPackedStreams];
if (numPackedStreams == 1) {
int i;
for (i = 0; i < (int)totalInStreams; i++) {
if (folder.findBindPairForInStream(i) < 0) {
break;
}
}
if (i == (int)totalInStreams) {
throw new IOException("Couldn't find stream's bind pair index");
}
packedStreams[0] = i;
} else {
for (int i = 0; i < (int)numPackedStreams; i++) {
packedStreams[i] = readUint64(header);
}
}
folder.packedStreams = packedStreams;
return folder;
}
private BitSet readAllOrBits(final DataInput header, final int size) throws IOException {
final int areAllDefined = header.readUnsignedByte();
final BitSet bits;
if (areAllDefined != 0) {
bits = new BitSet(size);
for (int i = 0; i < size; i++) {
bits.set(i, true);
}
} else {
bits = readBits(header, size);
}
return bits;
}
private BitSet readBits(final DataInput header, final int size) throws IOException {
final BitSet bits = new BitSet(size);
int mask = 0;
int cache = 0;
for (int i = 0; i < size; i++) {
if (mask == 0) {
mask = 0x80;
cache = header.readUnsignedByte();
}
bits.set(i, (cache & mask) != 0);
mask >>>= 1;
}
return bits;
}
private void readFilesInfo(final DataInput header, final Archive archive) throws IOException {
debug("FilesInfo");
final long numFiles = readUint64(header);
final SevenZArchiveEntry[] files = new SevenZArchiveEntry[(int)numFiles];
for (int i = 0; i < files.length; i++) {
files[i] = new SevenZArchiveEntry();
}
BitSet isEmptyStream = null;
BitSet isEmptyFile = null;
BitSet isAnti = null;
while (true) {
final int propertyType = header.readUnsignedByte();
if (propertyType == 0) {
break;
}
long size = readUint64(header);
switch (propertyType) {
case NID.kEmptyStream: {
debug(" kEmptyStream");
isEmptyStream = readBits(header, files.length);
break;
}
case NID.kEmptyFile: {
debug(" kEmptyFile");
if (isEmptyStream == null) { // protect against NPE
throw new IOException("Header format error: kEmptyStream must appear before kEmptyFile");
}
isEmptyFile = readBits(header, isEmptyStream.cardinality());
break;
}
case NID.kAnti: {
debug(" kAnti");
if (isEmptyStream == null) { // protect against NPE
throw new IOException("Header format error: kEmptyStream must appear before kAnti");
}
isAnti = readBits(header, isEmptyStream.cardinality());
break;
}
case NID.kName: {
debug(" kNames");
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Not implemented");
} else {
if (((size - 1) & 1) != 0) {
throw new IOException("File names length invalid");
}
final byte[] names = new byte[(int)(size - 1)];
header.readFully(names);
int nextFile = 0;
int nextName = 0;
for (int i = 0; i < names.length; i += 2) {
if (names[i] == 0 && names[i+1] == 0) {
files[nextFile++].setName(new String(names, nextName, i-nextName, "UTF-16LE"));
nextName = i + 2;
}
}
if (nextName != names.length || nextFile != files.length) {
throw new IOException("Error parsing file names");
}
}
break;
}
case NID.kCTime: {
debug(" kCreationTime");
final BitSet timesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasCreationDate(timesDefined.get(i));
if (files[i].getHasCreationDate()) {
files[i].setCreationDate(Long.reverseBytes(header.readLong()));
}
}
}
break;
}
case NID.kATime: {
debug(" kLastAccessTime");
final BitSet timesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasAccessDate(timesDefined.get(i));
if (files[i].getHasAccessDate()) {
files[i].setAccessDate(Long.reverseBytes(header.readLong()));
}
}
}
break;
}
case NID.kMTime: {
debug(" kLastWriteTime");
final BitSet timesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasLastModifiedDate(timesDefined.get(i));
if (files[i].getHasLastModifiedDate()) {
files[i].setLastModifiedDate(Long.reverseBytes(header.readLong()));
}
}
}
break;
}
case NID.kWinAttributes: {
debug(" kWinAttributes");
final BitSet attributesDefined = readAllOrBits(header, files.length);
final int external = header.readUnsignedByte();
if (external != 0) {
throw new IOException("Unimplemented");
} else {
for (int i = 0; i < files.length; i++) {
files[i].setHasWindowsAttributes(attributesDefined.get(i));
if (files[i].getHasWindowsAttributes()) {
files[i].setWindowsAttributes(Integer.reverseBytes(header.readInt()));
}
}
}
break;
}
case NID.kStartPos: {
debug(" kStartPos");
throw new IOException("kStartPos is unsupported, please report");
}
case NID.kDummy: {
debug(" kDummy");
throw new IOException("kDummy is unsupported, please report");
}
default: {
throw new IOException("Unknown property " + propertyType);
// FIXME: Should actually:
//header.skipBytes((int)size);
}
}
}
int nonEmptyFileCounter = 0;
int emptyFileCounter = 0;
for (int i = 0; i < files.length; i++) {
files[i].setHasStream((isEmptyStream == null) ? true : !isEmptyStream.get(i));
if (files[i].hasStream()) {
files[i].setDirectory(false);
files[i].setAntiItem(false);
files[i].setHasCrc(archive.subStreamsInfo.hasCrc.get(nonEmptyFileCounter));
files[i].setCrc(archive.subStreamsInfo.crcs[nonEmptyFileCounter]);
files[i].setSize(archive.subStreamsInfo.unpackSizes[nonEmptyFileCounter]);
++nonEmptyFileCounter;
} else {
files[i].setDirectory((isEmptyFile == null) ? true : !isEmptyFile.get(emptyFileCounter));
files[i].setAntiItem((isAnti == null) ? false : isAnti.get(emptyFileCounter));
files[i].setHasCrc(false);
files[i].setSize(0);
++emptyFileCounter;
}
}
archive.files = files;
calculateStreamMap(archive);
}
private void calculateStreamMap(final Archive archive) throws IOException {
final StreamMap streamMap = new StreamMap();
int nextFolderPackStreamIndex = 0;
final int numFolders = (archive.folders != null) ? archive.folders.length : 0;
streamMap.folderFirstPackStreamIndex = new int[numFolders];
for (int i = 0; i < numFolders; i++) {
streamMap.folderFirstPackStreamIndex[i] = nextFolderPackStreamIndex;
nextFolderPackStreamIndex += archive.folders[i].packedStreams.length;
}
long nextPackStreamOffset = 0;
final int numPackSizes = (archive.packSizes != null) ? archive.packSizes.length : 0;
streamMap.packStreamOffsets = new long[numPackSizes];
for (int i = 0; i < numPackSizes; i++) {
streamMap.packStreamOffsets[i] = nextPackStreamOffset;
nextPackStreamOffset += archive.packSizes[i];
}
streamMap.folderFirstFileIndex = new int[numFolders];
streamMap.fileFolderIndex = new int[archive.files.length];
int nextFolderIndex = 0;
int nextFolderUnpackStreamIndex = 0;
for (int i = 0; i < archive.files.length; i++) {
if (!archive.files[i].hasStream() && nextFolderUnpackStreamIndex == 0) {
streamMap.fileFolderIndex[i] = -1;
continue;
}
if (nextFolderUnpackStreamIndex == 0) {
for (; nextFolderIndex < archive.folders.length; ++nextFolderIndex) {
streamMap.folderFirstFileIndex[nextFolderIndex] = i;
if (archive.folders[nextFolderIndex].numUnpackSubStreams > 0) {
break;
}
}
if (nextFolderIndex >= archive.folders.length) {
throw new IOException("Too few folders in archive");
}
}
streamMap.fileFolderIndex[i] = nextFolderIndex;
if (!archive.files[i].hasStream()) {
continue;
}
++nextFolderUnpackStreamIndex;
if (nextFolderUnpackStreamIndex >= archive.folders[nextFolderIndex].numUnpackSubStreams) {
++nextFolderIndex;
nextFolderUnpackStreamIndex = 0;
}
}
archive.streamMap = streamMap;
}
private void buildDecodingStream() throws IOException {
final int folderIndex = archive.streamMap.fileFolderIndex[currentEntryIndex];
if (folderIndex < 0) {
currentEntryInputStream = new BoundedInputStream(
new ByteArrayInputStream(new byte[0]), 0);
return;
}
if (currentFolderIndex == folderIndex) {
// need to advance the folder input stream past the current file
drainPreviousEntry();
} else {
currentFolderIndex = folderIndex;
if (currentFolderInputStream != null) {
currentFolderInputStream.close();
currentFolderInputStream = null;
}
final Folder folder = archive.folders[folderIndex];
final int firstPackStreamIndex = archive.streamMap.folderFirstPackStreamIndex[folderIndex];
final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
archive.streamMap.packStreamOffsets[firstPackStreamIndex];
currentFolderInputStream = buildDecoderStack(folder, folderOffset, firstPackStreamIndex);
}
final SevenZArchiveEntry file = archive.files[currentEntryIndex];
final InputStream fileStream = new BoundedInputStream(
currentFolderInputStream, file.getSize());
if (file.getHasCrc()) {
currentEntryInputStream = new CRC32VerifyingInputStream(
fileStream, file.getSize(), file.getCrc());
} else {
currentEntryInputStream = fileStream;
}
}
private void drainPreviousEntry() throws IOException {
if (currentEntryInputStream != null) {
final byte[] buffer = new byte[64*1024];
while (currentEntryInputStream.read(buffer) >= 0) { // NOPMD
}
currentEntryInputStream.close();
currentEntryInputStream = null;
}
}
private InputStream buildDecoderStack(final Folder folder, final long folderOffset,
final int firstPackStreamIndex) throws IOException {
file.seek(folderOffset);
InputStream inputStreamStack = new BoundedRandomAccessFileInputStream(file,
archive.packSizes[firstPackStreamIndex]);
for (final Coder coder : folder.coders) {
if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
throw new IOException("Multi input/output stream coders are not yet supported");
}
inputStreamStack = Coders.addDecoder(inputStreamStack, coder, password);
}
if (folder.hasCrc) {
return new CRC32VerifyingInputStream(inputStreamStack,
folder.getUnpackSize(), folder.crc);
} else {
return inputStreamStack;
}
}
public int read() throws IOException {
return currentEntryInputStream.read();
}
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
public int read(byte[] b, int off, int len) throws IOException {
return currentEntryInputStream.read(b, off, len);
}
private static long readUint64(final DataInput in) throws IOException {
int firstByte = in.readUnsignedByte();
int mask = 0x80;
int value = 0;
for (int i = 0; i < 8; i++) {
if ((firstByte & mask) == 0) {
return value | ((firstByte & (mask - 1)) << (8 * i));
}
int nextByte = in.readUnsignedByte();
value |= (nextByte << (8 * i));
mask >>>= 1;
}
return value;
}
private static class BoundedInputStream extends InputStream {
private final InputStream in;
private long bytesRemaining;
public BoundedInputStream(final InputStream in, final long size) {
this.in = in;
bytesRemaining = size;
}
@Override
public int read() throws IOException {
if (bytesRemaining > 0) {
--bytesRemaining;
return in.read();
} else {
return -1;
}
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (bytesRemaining == 0) {
return -1;
}
int bytesToRead = len;
if (bytesToRead > bytesRemaining) {
bytesToRead = (int) bytesRemaining;
}
final int bytesRead = in.read(b, off, bytesToRead);
if (bytesRead >= 0) {
bytesRemaining -= bytesRead;
}
return bytesRead;
}
@Override
public void close() {
}
}
}
@@ -0,0 +1,121 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.compress.archivers.sevenz;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.compress.PasswordRequiredException;
class AES256SHA256Decoder extends CoderBase {
@Override
InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
final Coder coder, final byte[] passwordBytes, int maxMemoryLimitInKb) throws IOException {
return new InputStream() {
private boolean isInitialized = false;
private CipherInputStream cipherInputStream = null;
private CipherInputStream init() throws IOException {
if (isInitialized) {
return cipherInputStream;
}
final int byte0 = 0xff & coder.properties[0];
final int numCyclesPower = byte0 & 0x3f;
final int byte1 = 0xff & coder.properties[1];
final int ivSize = ((byte0 >> 6) & 1) + (byte1 & 0x0f);
final int saltSize = ((byte0 >> 7) & 1) + (byte1 >> 4);
if (2 + saltSize + ivSize > coder.properties.length) {
throw new IOException("Salt size + IV size too long in " + archiveName);
}
final byte[] salt = new byte[saltSize];
System.arraycopy(coder.properties, 2, salt, 0, saltSize);
final byte[] iv = new byte[16];
System.arraycopy(coder.properties, 2 + saltSize, iv, 0, ivSize);
if (passwordBytes == null) {
throw new PasswordRequiredException(archiveName);
}
final byte[] aesKeyBytes;
if (numCyclesPower == 0x3f) {
aesKeyBytes = new byte[32];
System.arraycopy(salt, 0, aesKeyBytes, 0, saltSize);
System.arraycopy(passwordBytes, 0, aesKeyBytes, saltSize,
Math.min(passwordBytes.length, aesKeyBytes.length - saltSize));
} else {
final MessageDigest digest;
try {
digest = MessageDigest.getInstance("SHA-256");
} catch (final NoSuchAlgorithmException noSuchAlgorithmException) {
throw new IOException("SHA-256 is unsupported by your Java implementation",
noSuchAlgorithmException);
}
final byte[] extra = new byte[8];
for (long j = 0; j < (1L << numCyclesPower); j++) {
digest.update(salt);
digest.update(passwordBytes);
digest.update(extra);
for (int k = 0; k < extra.length; k++) {
++extra[k];
if (extra[k] != 0) {
break;
}
}
}
aesKeyBytes = digest.digest();
}
final SecretKey aesKey = new SecretKeySpec(aesKeyBytes, "AES");
try {
final Cipher cipher = Cipher.getInstance("AES/CBC/NoPadding");
cipher.init(Cipher.DECRYPT_MODE, aesKey, new IvParameterSpec(iv));
cipherInputStream = new CipherInputStream(in, cipher);
isInitialized = true;
return cipherInputStream;
} catch (final GeneralSecurityException generalSecurityException) {
throw new IOException("Decryption error " +
"(do you have the JCE Unlimited Strength Jurisdiction Policy Files installed?)",
generalSecurityException);
}
}
@Override
public int read() throws IOException {
return init().read();
}
@Override
public int read(final byte[] b, final int off, final int len) throws IOException {
return init().read(b, off, len);
}
@Override
public void close() throws IOException {
if (cipherInputStream != null) {
cipherInputStream.close();
}
}
};
}
}
@@ -0,0 +1,72 @@
/*
* Copyright 2001-2006 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.sax.SAXSource;
import org.apache.commons.jxpath.JXPathContext;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import junit.framework.TestCase;
/**
* Test class for HierarchicalConfigurationXMLReader.
*
* @version $Id$
*/
public class TestHierarchicalConfigurationXMLReader extends TestCase
{
private static final String TEST_FILE = "conf/testHierarchicalXMLConfiguration.xml";
private HierarchicalConfigurationXMLReader parser;
protected void setUp() throws Exception
{
XMLConfiguration config =
new XMLConfiguration();
config.setFileName(TEST_FILE);
config.load();
parser = new HierarchicalConfigurationXMLReader(config);
}
public void testParse() throws Exception
{
SAXSource source = new SAXSource(parser, new InputSource());
DOMResult result = new DOMResult();
Transformer trans = TransformerFactory.newInstance().newTransformer();
trans.transform(source, result);
Node root = ((Document) result.getNode()).getDocumentElement();
JXPathContext ctx = JXPathContext.newContext(root);
assertEquals("Wrong name of root element", "config", root.getNodeName());
assertEquals("Wrong number of children of root", 1, ctx.selectNodes(
"/*").size());
assertEquals("Wrong number of tables", 2, ctx.selectNodes(
"/tables/table").size());
assertEquals("Wrong name of first table", "users", ctx
.getValue("/tables/table[1]/name"));
assertEquals("Wrong number of fields in first table", 5, ctx
.selectNodes("/tables/table[1]/fields/field").size());
assertEquals("Wrong attribute value", "system", ctx
.getValue("/tables/table[1]/@tableType"));
}
}
@@ -0,0 +1,81 @@
/*
* Copyright 2001-2006 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.sax.SAXSource;
import org.apache.commons.jxpath.JXPathContext;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import junit.framework.TestCase;
/**
* Test class for HierarchicalConfigurationXMLReader.
*
* @version $Id$
*/
public class TestHierarchicalConfigurationXMLReader extends TestCase
{
private static final String TEST_FILE = "conf/testHierarchicalXMLConfiguration.xml";
private HierarchicalConfigurationXMLReader parser;
protected void setUp() throws Exception
{
XMLConfiguration config =
new XMLConfiguration();
config.setFileName(TEST_FILE);
config.load();
parser = new HierarchicalConfigurationXMLReader(config);
}
public void testParse() throws Exception
{
SAXSource source = new SAXSource(parser, new InputSource());
DOMResult result = new DOMResult();
Transformer trans = TransformerFactory.newInstance().newTransformer();
try
{
//When executed on a JDK 1.3 this line throws a NoSuchMethodError
//somewhere deep in Xalan. We simply ignore this.
trans.transform(source, result);
}
catch(NoSuchMethodError ex)
{
return;
}
Node root = ((Document) result.getNode()).getDocumentElement();
JXPathContext ctx = JXPathContext.newContext(root);
assertEquals("Wrong name of root element", "config", root.getNodeName());
assertEquals("Wrong number of children of root", 1, ctx.selectNodes(
"/*").size());
assertEquals("Wrong number of tables", 2, ctx.selectNodes(
"/tables/table").size());
assertEquals("Wrong name of first table", "users", ctx
.getValue("/tables/table[1]/name"));
assertEquals("Wrong number of fields in first table", 5, ctx
.selectNodes("/tables/table[1]/fields/field").size());
assertEquals("Wrong attribute value", "system", ctx
.getValue("/tables/table[1]/@tableType"));
}
}
@@ -0,0 +1,56 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.insight.camel.audit;
import org.codehaus.jackson.map.ObjectMapper;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
public final class ScriptUtils {
private static final SimpleDateFormat format;
private static final ObjectMapper mapper;
static {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
mapper = new ObjectMapper();
mapper.setSerializationConfig(mapper.getSerializationConfig().withDateFormat(format));
}
public static String toIso(Date d) {
return format.format(d);
}
public static String toJson(Object o) {
try {
return mapper.writeValueAsString(o);
} catch (Exception e) {
throw new IllegalArgumentException("Could not serialize " + o, e);
}
}
public static Map parseJson(String str) {
try {
return mapper.readValue(str, Map.class);
} catch (Exception e) {
throw new IllegalArgumentException("Could not deserialize " + str, e);
}
}
}
@@ -0,0 +1,190 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.service.jclouds;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Module;
import org.fusesource.fabric.api.*;
import org.fusesource.fabric.api.CreateJCloudsContainerOptions;
import org.jclouds.compute.ComputeService;
import org.jclouds.compute.ComputeServiceContext;
import org.jclouds.compute.ComputeServiceContextFactory;
import org.jclouds.compute.RunNodesException;
import org.jclouds.compute.domain.NodeMetadata;
import org.jclouds.compute.domain.TemplateBuilder;
import org.jclouds.compute.options.RunScriptOptions;
import org.jclouds.domain.Credentials;
import org.jclouds.rest.RestContextFactory;
import static org.fusesource.fabric.internal.ContainerProviderUtils.buildStartupScript;
/**
* A concrete {@link org.fusesource.fabric.api.ContainerProvider} that creates {@link org.fusesource.fabric.api.Container}s via jclouds {@link ComputeService}.
*/
public class JcloudsContainerProvider implements ContainerProvider<CreateJCloudsContainerOptions, CreateJCloudsContainerMetadata> {
private final ConcurrentMap<String, ComputeService> computeServiceMap = new ConcurrentHashMap<String, ComputeService>();
public void bind(ComputeService computeService) {
if(computeService != null) {
String providerName = computeService.getContext().getProviderSpecificContext().getId();
if(providerName != null) {
computeServiceMap.put(providerName,computeService);
}
}
}
public void unbind(ComputeService computeService) {
if(computeService != null) {
String providerName = computeService.getContext().getProviderSpecificContext().getId();
if(providerName != null) {
computeServiceMap.remove(providerName);
}
}
}
public ConcurrentMap<String, ComputeService> getComputeServiceMap() {
return computeServiceMap;
}
public Set<CreateJCloudsContainerMetadata> create(CreateJCloudsContainerOptions options) throws MalformedURLException, RunNodesException, URISyntaxException {
Set<CreateJCloudsContainerMetadata> result = new LinkedHashSet<CreateJCloudsContainerMetadata>();
ComputeService computeService = computeServiceMap.get(options.getProviderName());
if (computeService == null) {
Iterable<? extends Module> modules = ImmutableSet.of();
Properties props = new Properties();
props.put("provider", options.getProviderName());
props.put("identity", options.getIdentity());
props.put("credential", options.getCredential());
if (!Strings.isNullOrEmpty(options.getOwner()) && options.getProviderName().equals("aws-ec2")) {
props.put("jclouds.ec2.ami-owners", options.getOwner());
}
RestContextFactory restFactory = new RestContextFactory();
ComputeServiceContext context = new ComputeServiceContextFactory(restFactory).createContext(options.getProviderName(), options.getIdentity(), options.getCredential(), modules, props);
computeService = context.getComputeService();
}
TemplateBuilder builder = computeService.templateBuilder();
builder.any();
switch (options.getInstanceType()) {
case Smallest:
builder.smallest();
break;
case Biggest:
builder.biggest();
break;
case Fastest:
builder.fastest();
}
if (options.getLocationId() != null) {
builder.locationId(options.getLocationId());
}
if (options.getImageId() != null) {
builder.imageId(options.getImageId());
}
if (options.getHardwareId() != null) {
builder.hardwareId(options.getHardwareId());
}
Set<? extends NodeMetadata> metadatas = null;
metadatas = computeService.createNodesInGroup(options.getGroup(), options.getNumber(), builder.build());
int suffix = 1;
StringBuilder buffer = new StringBuilder();
boolean first = true;
if (metadatas != null) {
for (NodeMetadata nodeMetadata : metadatas) {
Credentials credentials = null;
//For some cloud providers return do not allow shell access to root, so the user needs to be overrided.
if (options.getUser() != null) {
credentials = new Credentials(options.getUser(), nodeMetadata.getCredentials().credential);
} else {
credentials = nodeMetadata.getCredentials();
}
String id = nodeMetadata.getId();
Set<String> publicAddresses = nodeMetadata.getPublicAddresses();
for (String pa: publicAddresses) {
if (first) {
first = false;
} else {
buffer.append(",");
}
buffer.append(pa + ":" + options.getServicePort());
}
String containerName = options.getName();
if(options.getNumber() > 1) {
containerName+=suffix++;
}
String script = buildStartupScript(options.name(containerName));
if (credentials != null) {
computeService.runScriptOnNode(id, script, RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false));
} else {
computeService.runScriptOnNode(id, script);
}
CreateJCloudsContainerMetadata jCloudsContainerMetadata = new CreateJCloudsContainerMetadata();
jCloudsContainerMetadata.setNodeId(nodeMetadata.getId());
jCloudsContainerMetadata.setContainerName(containerName);
jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses());
jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname());
result.add(jCloudsContainerMetadata);
}
}
return result;
}
public Map<String, String> parseQuery(String uri) throws URISyntaxException {
//TODO: This is copied form URISupport. We should move URISupport to core so that we don't have to copy stuff arround.
try {
Map<String, String> rc = new HashMap<String, String>();
if (uri != null) {
String[] parameters = uri.split("&");
for (int i = 0; i < parameters.length; i++) {
int p = parameters[i].indexOf("=");
if (p >= 0) {
String name = URLDecoder.decode(parameters[i].substring(0, p), "UTF-8");
String value = URLDecoder.decode(parameters[i].substring(p + 1), "UTF-8");
rc.put(name, value);
} else {
rc.put(parameters[i], null);
}
}
}
return rc;
} catch (UnsupportedEncodingException e) {
throw (URISyntaxException) new URISyntaxException(e.toString(), "Invalid encoding").initCause(e);
}
}
}
@@ -0,0 +1,476 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.agent.download;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;
import javax.xml.parsers.ParserConfigurationException;
import org.fusesource.fabric.agent.mvn.DownloadableArtifact;
import org.fusesource.fabric.agent.mvn.MavenConfiguration;
import org.fusesource.fabric.agent.mvn.MavenRepositoryURL;
import org.fusesource.fabric.agent.mvn.Parser;
import org.fusesource.fabric.agent.mvn.Version;
import org.fusesource.fabric.agent.mvn.VersionRange;
import org.fusesource.fabric.agent.utils.URLUtils;
import org.fusesource.fabric.agent.utils.XmlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
public class MavenDownloadTask extends AbstractDownloadTask implements Runnable {
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(AbstractDownloadTask.class);
/**
* 2 spaces indent;
*/
private static final String Ix2 = " ";
/**
* 4 spaces indent;
*/
private static final String Ix4 = " ";
private final MavenRepositoryURL system;
private final MavenConfiguration configuration;
public MavenDownloadTask(String url, MavenRepositoryURL system, MavenConfiguration configuration, ExecutorService executor) {
super(url, executor);
this.system = system;
this.configuration = configuration;
}
protected File download() throws Exception {
Parser parser = new Parser(url.substring("mvn:".length()));
Set<DownloadableArtifact> downloadables;
if (!parser.getVersion().contains("SNAPSHOT")) {
downloadables = doCollectPossibleDownloads(parser, Collections.singletonList(system));
if (!downloadables.isEmpty()) {
DownloadableArtifact artifact = downloadables.iterator().next();
URL url = artifact.getArtifactURL();
File file = new File(url.getFile());
if (file.exists()) {
return file;
}
}
}
downloadables = collectPossibleDownloads(parser);
if (LOG.isTraceEnabled()) {
LOG.trace("Possible download locations for [" + url + "]");
for (DownloadableArtifact artifact : downloadables) {
LOG.trace(" " + artifact);
}
}
for (DownloadableArtifact artifact : downloadables) {
LOG.trace("Downloading [" + artifact + "]");
try {
configuration.enableProxy(artifact.getArtifactURL());
String repository = system.getFile().getAbsolutePath();
if (!repository.endsWith(Parser.FILE_SEPARATOR)) {
repository = repository + Parser.FILE_SEPARATOR;
}
InputStream is = artifact.getInputStream();
File file = new File(repository + parser.getArtifactPath());
File tmp = new File(file.getAbsolutePath() + ".tmp");
tmp.getParentFile().mkdirs();
OutputStream os = new FileOutputStream(tmp);
copy(is, os);
file.delete();
tmp.renameTo(file);
return file;
} catch (IOException ignore) {
// go on with next repository
LOG.debug(Ix2 + "Could not download [" + artifact + "]");
LOG.trace(Ix2 + "Reason [" + ignore.getClass().getName() + ": " + ignore.getMessage() + "]");
}
}
// no artifact found
throw new IOException("URL [" + url + "] could not be resolved.");
}
/**
* Searches all available repositories for possible artifacts to download. The returned set of downloadable
* artifacts (never null, but maybe empty) will be sorted descending by version of the artifact and by positon of
* repository in the list of repositories to be searched.
*
* @return a non null sorted set of artifacts
* @throws java.net.MalformedURLException re-thrown
*/
private Set<DownloadableArtifact> collectPossibleDownloads(final Parser parser)
throws MalformedURLException {
final List<MavenRepositoryURL> repositories = new ArrayList<MavenRepositoryURL>();
repositories.addAll(configuration.getRepositories());
repositories.add(system);
repositories.add(configuration.getLocalRepository());
// if the url contains a prefered repository add that repository as the first repository to be searched
if (parser.getRepositoryURL() != null) {
repositories.add(
repositories.size() == 0 ? 0 : 1,
parser.getRepositoryURL()
);
}
return doCollectPossibleDownloads(parser, repositories);
}
/**
* Search the default repositories for possible artifacts to download.
*/
private Set<DownloadableArtifact> collectDefaultPossibleDownloads(final Parser parser)
throws MalformedURLException {
return doCollectPossibleDownloads(parser, configuration.getDefaultRepositories());
}
private Set<DownloadableArtifact> doCollectPossibleDownloads(final Parser parser,
final List<MavenRepositoryURL> repositories)
throws MalformedURLException {
final Set<DownloadableArtifact> downloadables = new TreeSet<DownloadableArtifact>(new DownloadComparator());
// find artifact type
final boolean isLatest = parser.getVersion().contains("LATEST");
final boolean isSnapshot = parser.getVersion().endsWith("SNAPSHOT");
VersionRange versionRange = null;
if (!isLatest && !isSnapshot) {
try {
versionRange = new VersionRange(parser.getVersion());
} catch (Exception ignore) {
// well, we do not have a range of versions
}
}
final boolean isVersionRange = versionRange != null;
final boolean isExactVersion = !(isLatest || isSnapshot || isVersionRange);
int priority = 0;
for (MavenRepositoryURL repositoryURL : repositories) {
LOG.debug("Collecting versions from repository [" + repositoryURL + "]");
priority++;
try {
if (isExactVersion) {
downloadables.add(resolveExactVersion(parser, repositoryURL, priority));
} else if (isSnapshot) {
final DownloadableArtifact snapshot =
resolveSnapshotVersion(parser, repositoryURL, priority, parser.getVersion());
downloadables.add(snapshot);
// if we have a local built snapshot we skip the rest of repositories
if (snapshot.isLocalSnapshotBuild()) {
break;
}
} else {
final Document metadata = getMetadata(repositoryURL.getURL(),
new String[]
{
parser.getArtifactLocalMetdataPath(),
parser.getArtifactMetdataPath()
}
);
if (isLatest) {
downloadables.add(resolveLatestVersion(parser, metadata, repositoryURL, priority));
} else {
downloadables.addAll(resolveRangeVersions(parser, metadata, repositoryURL, priority, versionRange));
}
}
} catch (IOException ignore) {
// if metadata cannot be found we go on with the next repository. Maybe we have better luck.
LOG.debug(Ix2 + "Skipping repository [" + repositoryURL + "], reason: " + ignore.getMessage());
}
}
return downloadables;
}
/**
* Returns maven metadata by looking first for a local metatdata xml file and then for a remote one.
* If no metadata file is found or cannot be used an IOException is thrown.
*
* @param repositoryURL url of the repository from where the metadata should be parsed
* @param metadataLocations array of location paths to try as metadata
* @return parsed xml document for the metadata file
* @throws java.io.IOException if:
* metadata file cannot be located
*/
private Document getMetadata(final URL repositoryURL,
final String[] metadataLocations)
throws IOException {
LOG.debug(Ix2 + "Resolving metadata");
InputStream inputStream = null;
String foundLocation = null;
for (String location : metadataLocations) {
try {
// first try to get the artifact local metadata
inputStream = prepareInputStream(repositoryURL, location);
// get out at first found location
foundLocation = location;
LOG.trace(Ix4 + "Metadata found: [" + location + "]");
break;
} catch (IOException ignore) {
LOG.trace(Ix4 + "Metadata not found: [" + location + "]");
}
}
if (inputStream == null) {
throw new IOException("Metadata not found in repository [" + repositoryURL + "]");
}
try {
return XmlUtils.parseDoc(inputStream);
} catch (ParserConfigurationException e) {
throw initIOException("Metadata [" + foundLocation + "] could not be parsed.", e);
} catch (SAXException e) {
throw initIOException("Metadata [" + foundLocation + "] could not be parsed.", e);
}
}
/**
* Returns a downloadable artifact where the version is fully specified.
*
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @return a downloadable artifact
* @throws IOException re-thrown
*/
private DownloadableArtifact resolveExactVersion(final Parser parser,
final MavenRepositoryURL repositoryURL,
final int priority)
throws IOException {
if (!repositoryURL.isReleasesEnabled()) {
throw new IOException("Releases not enabled");
}
LOG.debug(Ix2 + "Resolving exact version");
return new DownloadableArtifact(
parser.getVersion(),
priority,
repositoryURL.getURL(),
parser.getArtifactPath(),
false, // no local built snapshot
configuration.getCertificateCheck()
);
}
/**
* Resolves the latest version of the artifact.
*
* @param metadata parsed metadata xml
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @return a downloadable artifact or throw an IOException if latest version cannot be determined.
* @throws IOException if the artifact could not be resolved
*/
private DownloadableArtifact resolveLatestVersion(final Parser parser,
final Document metadata,
final MavenRepositoryURL repositoryURL,
final int priority)
throws IOException {
LOG.debug(Ix2 + "Resolving latest version");
final String version = XmlUtils.getTextContentOfElement(metadata, "versioning/versions/version[last]");
if (version != null) {
if (version.endsWith("SNAPSHOT")) {
return resolveSnapshotVersion(parser, repositoryURL, priority, version);
} else {
return new DownloadableArtifact(
version,
priority,
repositoryURL.getURL(),
parser.getArtifactPath(version),
false, // no local built snapshot
configuration.getCertificateCheck()
);
}
}
throw new IOException("LATEST version could not be resolved.");
}
/**
* Resolves snapshot version of the artifact.
* Snapshot versions are resolved by parsing the metadata within the directory that contains the version as:
* 1. if the metadata containes entries like "versioning/snapshot/timestamp (most likely on remote repos) it will
* use the timestamp and buildnumber to point the real version
* 2. if the metatdata does not contain the above (most likely a local repo) it will use as version the
* versioning/lastUpdated
*
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @param version snapshot version to resolve
* @return an input stream to the artifact
* @throws IOException if the artifact could not be resolved
*/
private DownloadableArtifact resolveSnapshotVersion(final Parser parser,
final MavenRepositoryURL repositoryURL,
final int priority,
final String version)
throws IOException {
if (!repositoryURL.isSnapshotsEnabled()) {
throw new IOException("Snapshots not enabled");
}
LOG.debug(Ix2 + "Resolving snapshot version [" + version + "]");
try {
final Document snapshotMetadata = getMetadata(repositoryURL.getURL(),
new String[]
{
parser.getVersionLocalMetadataPath(version),
parser.getVersionMetadataPath(version)
}
);
final String timestamp =
XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/snapshot/timestamp");
final String buildNumber =
XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/snapshot/buildNumber");
final String localSnapshot =
XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/snapshot/localCopy");
if (timestamp != null && buildNumber != null) {
return new DownloadableArtifact(
parser.getSnapshotVersion(version, timestamp, buildNumber),
priority,
repositoryURL.getURL(),
parser.getSnapshotPath(version, timestamp, buildNumber),
localSnapshot != null,
configuration.getCertificateCheck()
);
} else {
String lastUpdated = XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/lastUpdated");
if (lastUpdated != null) {
// last updated should contain in the first 8 chars the date and then the time,
// fact that is not compatible with timeStamp from remote repos which has a "." after date
if (lastUpdated.length() > 8) {
lastUpdated = lastUpdated.substring(0, 8) + "." + lastUpdated.substring(8);
return new DownloadableArtifact(
parser.getSnapshotVersion(version, lastUpdated, "0"),
priority,
repositoryURL.getURL(),
parser.getArtifactPath(version),
localSnapshot != null,
configuration.getCertificateCheck()
);
}
}
}
} catch (IOException ignore) {
// in this case we could not find any metadata so try to get the *-SNAPSHOT file directly
}
return new DownloadableArtifact(
parser.getVersion(),
priority,
repositoryURL.getURL(),
parser.getArtifactPath(),
false, // no local built snapshot
configuration.getCertificateCheck()
);
}
/**
* Resolves all versions that fits the provided range.
*
* @param metadata parsed metadata xml
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @param versionRange version range to fulfill
* @return list of downloadable artifacts that match the range
* @throws IOException re-thrown
*/
private List<DownloadableArtifact> resolveRangeVersions(final Parser parser,
final Document metadata,
final MavenRepositoryURL repositoryURL,
final int priority,
final VersionRange versionRange)
throws IOException {
LOG.debug(Ix2 + "Resolving versions in range [" + versionRange + "]");
final List<DownloadableArtifact> downladables = new ArrayList<DownloadableArtifact>();
final List<Element> elements = XmlUtils.getElements(metadata, "versioning/versions/version");
if (elements != null && elements.size() > 0) {
for (Element element : elements) {
final String versionString = XmlUtils.getTextContent(element);
if (versionString != null) {
final Version version = new Version(versionString);
if (versionRange.includes(version)) {
if (versionString.endsWith("SNAPSHOT")) {
downladables.add(
resolveSnapshotVersion(parser, repositoryURL, priority, versionString)
);
} else {
downladables.add(
new DownloadableArtifact(
versionString,
priority,
repositoryURL.getURL(),
parser.getArtifactPath(versionString),
false, // no local built snapshot
configuration.getCertificateCheck()
)
);
}
}
}
}
}
return downladables;
}
/**
* @param repositoryURL url to reporsitory
* @param path a path to the artifact jar file
* @return prepared input stream
* @throws IOException re-thrown
* @see org.ops4j.net.URLUtils#prepareInputStream(java.net.URL, boolean)
*/
private InputStream prepareInputStream(URL repositoryURL, final String path)
throws IOException {
String repository = repositoryURL.toExternalForm();
if (!repository.endsWith(org.ops4j.pax.url.mvn.internal.Parser.FILE_SEPARATOR)) {
repository = repository + org.ops4j.pax.url.mvn.internal.Parser.FILE_SEPARATOR;
}
configuration.enableProxy(repositoryURL);
final URL url = new URL(repository + path);
LOG.trace("Reading " + url.toExternalForm());
return URLUtils.prepareInputStream(url, !configuration.getCertificateCheck());
}
/**
* Sorting comparator for downladable artifacts.
* The sorting is done by:
* 1. descending version
* 2. ascending priority.
*/
private static class DownloadComparator
implements Comparator<DownloadableArtifact> {
public int compare(final DownloadableArtifact first,
final DownloadableArtifact second) {
// first descending by version
int result = -1 * first.getVersion().compareTo(second.getVersion());
if (result == 0) {
// then ascending by priority
if (first.getPriority() < second.getPriority()) {
result = -1;
} else if (first.getPriority() > second.getPriority()) {
result = 1;
}
}
return result;
}
}
}
@@ -0,0 +1,189 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.service;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferencePolicy;
import org.apache.felix.scr.annotations.Service;
import org.fusesource.fabric.api.DataStore;
import org.fusesource.fabric.api.DataStorePlugin;
import org.fusesource.fabric.api.DataStoreRegistrationHandler;
import org.fusesource.fabric.api.DataStoreTemplate;
import org.fusesource.fabric.api.FabricException;
import org.fusesource.fabric.api.PlaceholderResolver;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceRegistration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.felix.scr.annotations.ReferenceCardinality.OPTIONAL_MULTIPLE;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getProperties;
/**
* Manager of {@link DataStore} using configuration to decide which
* implementation to export.
*/
@Component(name = "org.fusesource.fabric.datastore.manager",
description = "Configured DataStore Factory",
immediate = true)
@Service(DataStoreRegistrationHandler.class)
public class DataStoreManager implements DataStoreRegistrationHandler {
private static final transient Logger LOG = LoggerFactory.getLogger(DataStoreManager.class);
public static final String DATASTORE_TYPE_PID = "org.fusesource.fabric.datastore";
public static final String DATASTORE_TYPE_PROPERTY = "type";
public static final String DEFAULT_DATASTORE_TYPE = "git";
@Reference(cardinality = OPTIONAL_MULTIPLE,
referenceInterface = DataStorePlugin.class,
bind = "bindDataStore", unbind = "unbindDataStore",
policy = ReferencePolicy.DYNAMIC)
private final Map<String, DataStorePlugin> dataStorePlugins = new HashMap<String, DataStorePlugin>();
private BundleContext bundleContext;
private Map<String,String> configuration;
private String type;
private DataStore dataStore;
private Dictionary<String, String> properties = new Hashtable<String, String>();
private ServiceRegistration<DataStore> registration;
private final List<DataStoreTemplate> registrationCallbacks = new CopyOnWriteArrayList<DataStoreTemplate>();
@Activate
public synchronized void init(BundleContext bundleContext, Map<String,String> configuration) throws Exception {
this.bundleContext = bundleContext;
this.configuration = configuration;
this.type = readType(configuration);
updateServiceRegistration();
}
@Deactivate
public synchronized void destroy() {
unregister();
}
public void updateServiceRegistration() {
unregister();
if (dataStorePlugins.containsKey(type)) {
dataStore = dataStorePlugins.get(type).getDataStore();
Properties dataStoreProperties = new Properties();
dataStoreProperties.putAll(configuration);
dataStore.setDataStoreProperties(dataStoreProperties);
dataStore.start();
for(DataStoreTemplate callback : registrationCallbacks) {
registrationCallbacks.remove(callback);
try {
callback.doWith(dataStore);
} catch (Exception e) {
throw new FabricException(e);
}
}
properties.put(DATASTORE_TYPE_PROPERTY, type);
registration = bundleContext.registerService(DataStore.class, dataStore, properties);
LOG.info("Registered DataStore " + dataStore + " with " + properties);
}
}
/**
* Unregisters the {@link DataStore}.
*/
private void unregister() {
if (registration != null) {
registration.unregister();
registration = null;
}
if (dataStore != null) {
dataStore.stop();
}
}
/**
* Extracts the type from the specified map or System configuration.
* @param configuration The map to use as a source.
* @return
*/
private static String readType(Map<String, String> configuration) {
if (configuration.containsKey(DATASTORE_TYPE_PROPERTY)) {
return configuration.get(DATASTORE_TYPE_PROPERTY);
} else {
return System.getProperty(DATASTORE_TYPE_PID + "." + DATASTORE_TYPE_PROPERTY, DEFAULT_DATASTORE_TYPE);
}
}
// Properties
//-------------------------------------------------------------------------
public BundleContext getBundleContext() {
return bundleContext;
}
public void setBundleContext(BundleContext bundleContext) {
this.bundleContext = bundleContext;
}
public synchronized void bindDataStore(DataStorePlugin dataStorePlugin) {
if (dataStorePlugin != null) {
dataStorePlugins.put(dataStorePlugin.getName(), dataStorePlugin);
if (type.equals(dataStorePlugin.getName())) {
updateServiceRegistration();
}
}
}
public synchronized void unbindDataStore(DataStorePlugin dataStorePlugin) {
if (dataStorePlugin != null) {
dataStorePlugins.remove(dataStorePlugin.getName());
if (type.equals(dataStorePlugin.getName())) {
updateServiceRegistration();
}
}
}
public Map<String, String> getConfiguration() {
return configuration;
}
public void setConfiguration(Map<String, String> configuration) {
this.configuration = configuration;
}
@Override
public void addRegistrationCallback(DataStoreTemplate template) {
this.registrationCallbacks.add(template);
}
@Override
public void removeRegistrationCallback(DataStoreTemplate template) {
this.registrationCallbacks.remove(template);
}
}
@@ -0,0 +1,713 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.service;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
import org.apache.curator.framework.recipes.cache.TreeCache;
import org.apache.zookeeper.KeeperException;
import org.fusesource.fabric.api.CreateContainerMetadata;
import org.fusesource.fabric.api.CreateContainerOptions;
import org.fusesource.fabric.api.DataStore;
import org.fusesource.fabric.api.DynamicReference;
import org.fusesource.fabric.api.FabricException;
import org.fusesource.fabric.api.PlaceholderResolver;
import org.fusesource.fabric.api.jcip.GuardedBy;
import org.fusesource.fabric.api.jcip.ThreadSafe;
import org.fusesource.fabric.api.scr.AbstractComponent;
import org.fusesource.fabric.api.scr.InvalidComponentException;
import org.fusesource.fabric.api.scr.ValidatingReference;
import org.fusesource.fabric.internal.DataStoreHelpers;
import org.fusesource.fabric.utils.Base64Encoder;
import org.fusesource.fabric.utils.Closeables;
import org.fusesource.fabric.utils.ObjectUtils;
import org.fusesource.fabric.zookeeper.ZkDefs;
import org.fusesource.fabric.zookeeper.ZkPath;
import org.fusesource.fabric.zookeeper.utils.InterpolationHelper;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InvalidClassException;
import java.io.ObjectInputStream;
import java.io.ObjectStreamClass;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.fusesource.fabric.internal.DataStoreHelpers.substituteBundleProperty;
import static org.fusesource.fabric.internal.PlaceholderResolverHelpers.getSchemesForProfileConfigurations;
import static org.fusesource.fabric.internal.PlaceholderResolverHelpers.waitForPlaceHolderResolvers;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.deleteSafe;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.exists;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getByteData;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getChildren;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getStringData;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getSubstitutedPath;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.setData;
@ThreadSafe
public abstract class AbstractDataStore extends AbstractComponent implements DataStore, PathChildrenCacheListener {
private static final transient Logger LOG = LoggerFactory.getLogger(AbstractDataStore.class);
public static final String REQUIREMENTS_JSON_PATH = "/fabric/configs/org.fusesource.fabric.requirements.json";
public static final String JVM_OPTIONS_PATH = "/fabric/configs/org.fusesource.fabric.containers.jvmOptions";
private final ValidatingReference<CuratorFramework> curator = new ValidatingReference<CuratorFramework>();
private final ExecutorService callbacksExecutor = Executors.newSingleThreadExecutor();
private final ExecutorService cacheExecutor = Executors.newSingleThreadExecutor();
private final ExecutorService placeholderExecutor = Executors.newCachedThreadPool();
private final AtomicBoolean active = new AtomicBoolean(false);
@GuardedBy("ConcurrentHashMap") private final ConcurrentMap<String, DynamicReference<PlaceholderResolver>> placeholderResolvers = new ConcurrentHashMap<String, DynamicReference<PlaceholderResolver>>();
@GuardedBy("CopyOnWriteArrayList") private final CopyOnWriteArrayList<Runnable> callbacks = new CopyOnWriteArrayList<Runnable>();
@GuardedBy("this") private Map<String, String> dataStoreProperties;
@GuardedBy("active") private volatile TreeCache treeCache;
@Override
public abstract void importFromFileSystem(String from);
@Override
public void start() {
try {
if (active.compareAndSet(false, true)) {
LOG.info("Starting up DataStore " + this);
treeCache = new TreeCache(getCurator(), ZkPath.CONFIGS.getPath(), true, false, true, cacheExecutor);
treeCache.start(TreeCache.StartMode.NORMAL);
treeCache.getListenable().addListener(this);
}
} catch (FabricException ex) {
throw ex;
} catch (Exception ex) {
throw new FabricException("Failed to start data store.", ex);
}
}
@Override
public void stop() {
if (active.compareAndSet(true, false)) {
treeCache.getListenable().removeListener(this);
Closeables.closeQuitely(treeCache);
treeCache = null;
callbacksExecutor.shutdownNow();
cacheExecutor.shutdownNow();
placeholderExecutor.shutdownNow();
}
}
protected TreeCache getTreeCache() {
if (!active.get())
throw new InvalidComponentException();
return treeCache;
}
@Override
public synchronized Map<String, String> getDataStoreProperties() {
assertValid();
return Collections.unmodifiableMap(dataStoreProperties);
}
@Override
public synchronized void setDataStoreProperties(Map<String, String> dataStoreProperties) {
assertValid();
this.dataStoreProperties = new HashMap<String, String>(dataStoreProperties);
}
@Override
public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
if (isValid()) {
switch (event.getType()) {
case CHILD_ADDED:
case CHILD_REMOVED:
case CHILD_UPDATED:
case INITIALIZED:
runCallbacks();
break;
}
}
}
protected void runCallbacks() {
callbacksExecutor.submit(new Runnable() {
@Override
public void run() {
doRunCallbacks();
}
});
}
protected void doRunCallbacks() {
assertValid();
for (Runnable callback : callbacks) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Running callback " + callback);
}
callback.run();
} catch (Throwable e) {
LOG.warn("Caught: " + e, e);
}
}
}
@Override
public void trackConfiguration(Runnable callback) {
if (isValid()) {
callbacks.addIfAbsent(callback);
}
}
@Override
public void untrackConfiguration(Runnable callback) {
callbacks.remove(callback);
}
// PlaceholderResolver stuff
//-------------------------------------------------------------------------
/**
* Performs substitution to configuration based on the registered {@link PlaceholderResolver} instances.
*/
public void substituteConfigurations(final Map<String, Map<String, String>> configs) {
assertValid();
//Check for all required resolver schemes.
Set<String> requiredSchemes = getSchemesForProfileConfigurations(configs);
for (String scheme : requiredSchemes) {
placeholderResolvers.putIfAbsent(scheme, new DynamicReference<PlaceholderResolver>());
}
//Wait for resolvers before starting to resolve.
final Map<String, PlaceholderResolver> availableResolvers = waitForPlaceHolderResolvers(placeholderExecutor, requiredSchemes, getPlaceholderResolvers());
for (Map.Entry<String, Map<String, String>> entry : configs.entrySet()) {
final String pid = entry.getKey();
Map<String, String> props = entry.getValue();
for (Map.Entry<String, String> e : props.entrySet()) {
final String key = e.getKey();
final String value = e.getValue();
props.put(key, InterpolationHelper.substVars(value, key, null, props, new InterpolationHelper.SubstitutionCallback() {
public String getValue(String toSubstitute) {
if (toSubstitute != null && toSubstitute.contains(":")) {
String scheme = toSubstitute.substring(0, toSubstitute.indexOf(":"));
if (availableResolvers.containsKey(scheme)) {
return availableResolvers.get(scheme).resolve(pid, key, toSubstitute);
}
}
return substituteBundleProperty(toSubstitute, getBundleContext());
}
}));
}
}
}
private Map<String, DynamicReference<PlaceholderResolver>> getPlaceholderResolvers() {
return Collections.unmodifiableMap(placeholderResolvers);
}
private BundleContext getBundleContext() {
try {
return FrameworkUtil.getBundle(AbstractDataStore.class).getBundleContext();
} catch (Throwable t) {
return null;
}
}
// Container stuff
//-------------------------------------------------------------------------
@Override
public List<String> getContainers() {
assertValid();
try {
return getChildren(getCurator(), ZkPath.CONFIGS_CONTAINERS.getPath());
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public boolean hasContainer(String containerId) {
assertValid();
return getContainers().contains(containerId);
}
@Override
public String getContainerParent(String containerId) {
assertValid();
try {
String parentName = getStringData(getCurator(), ZkPath.CONTAINER_PARENT.getPath(containerId));
return parentName != null ? parentName.trim() : "";
} catch (KeeperException.NoNodeException e) {
// Ignore
return "";
} catch (Throwable e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void deleteContainer(String containerId) {
assertValid();
try {
if (getCurator() == null) {
throw new IllegalStateException("Zookeeper service not available");
}
//Wipe all config entries that are related to the container for all versions.
for (String version : getVersions()) {
deleteSafe(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(version, containerId));
}
deleteSafe(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
deleteSafe(getCurator(), ZkPath.CONTAINER.getPath(containerId));
deleteSafe(getCurator(), ZkPath.CONTAINER_DOMAINS.getPath(containerId));
deleteSafe(getCurator(), ZkPath.CONTAINER_PROVISION.getPath(containerId));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void createContainerConfig(CreateContainerOptions options) {
assertValid();
try {
String parent = options.getParent();
String containerId = options.getName();
String versionId = options.getVersion();
Set<String> profileIds = options.getProfiles();
StringBuilder sb = new StringBuilder();
for (String profileId : profileIds) {
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(profileId);
}
setData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId), versionId);
setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), sb.toString());
setData(getCurator(), ZkPath.CONTAINER_PARENT.getPath(containerId), parent);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void createContainerConfig(CreateContainerMetadata metadata) {
assertValid();
try {
CreateContainerOptions options = metadata.getCreateOptions();
String containerId = metadata.getContainerName();
// String parent = options.getParent();
// String versionId = options.getVersion() != null ? options.getVersion() : getDefaultVersion();
// Set<String> profileIds = options.getProfiles();
// if (profileIds == null || profileIds.isEmpty()) {
// profileIds = new LinkedHashSet<String>();
// profileIds.add("default");
// }
// StringBuilder sb = new StringBuilder();
// for (String profileId : profileIds) {
// if (sb.length() > 0) {
// sb.append(" ");
// }
// sb.append(profileId);
// }
//
// setData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId), versionId);
// setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), sb.toString());
// setData(getCurator(), ZkPath.CONTAINER_PARENT.getPath(containerId), parent);
setContainerMetadata(metadata);
Map<String, String> configuration = metadata.getContainerConfiguration();
for (Map.Entry<String, String> entry : configuration.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
setData(getCurator(), ZkPath.CONTAINER_ENTRY.getPath(metadata.getContainerName(), key), value);
}
// If no resolver specified but a resolver is already present in the registry, use the registry value
String resolver = metadata.getOverridenResolver() != null ? metadata.getOverridenResolver() : options.getResolver();
if (resolver == null && exists(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId)) != null) {
resolver = getStringData(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId));
} else if (options.getResolver() != null) {
// Use the resolver specified in the options and do nothing.
} else if (exists(getCurator(), ZkPath.POLICIES.getPath(ZkDefs.RESOLVER)) != null) {
// If there is a globlal resolver specified use it.
resolver = getStringData(getCurator(), ZkPath.POLICIES.getPath(ZkDefs.RESOLVER));
} else {
// Fallback to the default resolver
resolver = ZkDefs.DEFAULT_RESOLVER;
}
// Set the resolver if not already set
setData(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId), resolver);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public CreateContainerMetadata getContainerMetadata(String containerId, final ClassLoader classLoader) {
assertValid();
try {
byte[] encoded = getByteData(getTreeCache(), ZkPath.CONTAINER_METADATA.getPath(containerId));
if (encoded == null) {
return null;
}
byte[] decoded = Base64Encoder.decode(encoded);
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(decoded)) {
@Override
protected Class<?> resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException {
return classLoader.loadClass(desc.getName());
}
};
return (CreateContainerMetadata) ois.readObject();
} catch (ClassNotFoundException e) {
return null;
} catch (InvalidClassException e) {
return null;
} catch (KeeperException.NoNodeException e) {
return null;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setContainerMetadata(CreateContainerMetadata metadata) {
assertValid();
//We encode the metadata so that they are more friendly to import/export.
try {
setData(getCurator(), ZkPath.CONTAINER_METADATA.getPath(metadata.getContainerName()), Base64Encoder.encode(ObjectUtils.toBytes(metadata)));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public String getContainerVersion(String containerId) {
assertValid();
try {
return getStringData(getTreeCache(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setContainerVersion(String containerId, String versionId) {
assertValid();
try {
String oldVersionId = getStringData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
String oldProfileIds = getStringData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(oldVersionId, containerId));
setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), oldProfileIds);
setData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId), versionId);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public List<String> getContainerProfiles(String containerId) {
assertValid();
try {
String versionId = getStringData(getTreeCache(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
String str = getStringData(getTreeCache(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId));
return str == null || str.isEmpty() ? Collections.<String> emptyList() : Arrays.asList(str.trim().split(" +"));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setContainerProfiles(String containerId, List<String> profileIds) {
assertValid();
try {
String versionId = getStringData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
StringBuilder sb = new StringBuilder();
for (String profileId : profileIds) {
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(profileId);
}
setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), sb.toString());
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public boolean isContainerAlive(String id) {
assertValid();
try {
return exists(getCurator(), ZkPath.CONTAINER_ALIVE.getPath(id)) != null;
} catch (KeeperException.NoNodeException e) {
return false;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public String getContainerAttribute(String containerId, ContainerAttribute attribute, String def, boolean mandatory, boolean substituted) {
assertValid();
if (attribute == ContainerAttribute.Domains) {
try {
List<String> list = getCurator().getChildren().forPath(ZkPath.CONTAINER_DOMAINS.getPath(containerId));
Collections.sort(list);
StringBuilder sb = new StringBuilder();
for (String l : list) {
if (sb.length() > 0) {
sb.append("\n");
}
sb.append(l);
}
return sb.toString();
} catch (Exception e) {
return "";
}
} else {
try {
if (substituted) {
return getSubstitutedPath(getCurator(), getAttributePath(containerId, attribute));
} else {
return getStringData(getCurator(), getAttributePath(containerId, attribute));
}
} catch (KeeperException.NoNodeException e) {
if (mandatory) {
throw FabricException.launderThrowable(e);
}
return def;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
}
@Override
public void setContainerAttribute(String containerId, ContainerAttribute attribute, String value) {
assertValid();
// Special case for resolver
// TODO: we could use a double indirection on the ip so that it does not need to change
// TODO: something like ${zk:container/${zk:container/resolver}}
if (attribute == ContainerAttribute.Resolver) {
try {
setData(getCurator(), ZkPath.CONTAINER_IP.getPath(containerId), "${zk:" + containerId + "/" + value + "}");
setData(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId), value);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
} else {
try {
// if (value == null) {
// deleteSafe(zk, getAttributePath(containerId, attribute));
// } else {
setData(getCurator(), getAttributePath(containerId, attribute), value);
// }
} catch (KeeperException.NoNodeException e) {
// Ignore
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
}
@Override
public String getDefaultVersion() {
assertValid();
try {
String version = null;
if (getTreeCache().getCurrentData(ZkPath.CONFIG_DEFAULT_VERSION.getPath()) != null) {
version = getStringData(getTreeCache(), ZkPath.CONFIG_DEFAULT_VERSION.getPath());
}
if (version == null || version.isEmpty()) {
version = ZkDefs.DEFAULT_VERSION;
setData(getCurator(), ZkPath.CONFIG_DEFAULT_VERSION.getPath(), version);
setData(getCurator(), ZkPath.CONFIG_VERSION.getPath(version), (String) null);
}
return version;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setDefaultVersion(String versionId) {
assertValid();
try {
setData(getCurator(), ZkPath.CONFIG_DEFAULT_VERSION.getPath(), versionId);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
// Profile methods
//-------------------------------------------------------------------------
@Override
public boolean hasProfile(String version, String profile) {
assertValid();
return getProfile(version, profile, false) != null;
}
// Implementation
//-------------------------------------------------------------------------
private String getAttributePath(String containerId, ContainerAttribute attribute) {
switch (attribute) {
case BlueprintStatus:
return ZkPath.CONTAINER_EXTENDER_STATUS.getPath(containerId, "blueprint");
case SpringStatus:
return ZkPath.CONTAINER_EXTENDER_STATUS.getPath(containerId, "spring");
case ProvisionStatus:
return ZkPath.CONTAINER_PROVISION_RESULT.getPath(containerId);
case ProvisionException:
return ZkPath.CONTAINER_PROVISION_EXCEPTION.getPath(containerId);
case ProvisionList:
return ZkPath.CONTAINER_PROVISION_LIST.getPath(containerId);
case Location:
return ZkPath.CONTAINER_LOCATION.getPath(containerId);
case GeoLocation:
return ZkPath.CONTAINER_GEOLOCATION.getPath(containerId);
case Resolver:
return ZkPath.CONTAINER_RESOLVER.getPath(containerId);
case Ip:
return ZkPath.CONTAINER_IP.getPath(containerId);
case LocalIp:
return ZkPath.CONTAINER_LOCAL_IP.getPath(containerId);
case LocalHostName:
return ZkPath.CONTAINER_LOCAL_HOSTNAME.getPath(containerId);
case PublicIp:
return ZkPath.CONTAINER_PUBLIC_IP.getPath(containerId);
case PublicHostName:
return ZkPath.CONTAINER_PUBLIC_HOSTNAME.getPath(containerId);
case ManualIp:
return ZkPath.CONTAINER_MANUAL_IP.getPath(containerId);
case BindAddress:
return ZkPath.CONTAINER_BINDADDRESS.getPath(containerId);
case JmxUrl:
return ZkPath.CONTAINER_JMX.getPath(containerId);
case JolokiaUrl:
return ZkPath.CONTAINER_JOLOKIA.getPath(containerId);
case HttpUrl:
return ZkPath.CONTAINER_HTTP.getPath(containerId);
case SshUrl:
return ZkPath.CONTAINER_SSH.getPath(containerId);
case PortMin:
return ZkPath.CONTAINER_PORT_MIN.getPath(containerId);
case PortMax:
return ZkPath.CONTAINER_PORT_MAX.getPath(containerId);
case ProcessId:
return ZkPath.CONTAINER_PROCESS_ID.getPath(containerId);
case OpenShift:
return ZkPath.CONTAINER_OPENSHIFT.getPath(containerId);
default:
throw new IllegalArgumentException("Unsupported container attribute " + attribute);
}
}
@Override
public Map<String, String> getProfileAttributes(String version, String profile) {
assertValid();
Map<String, String> attributes = new HashMap<String, String>();
Map<String, String> config = getConfiguration(version, profile, AGENT_PID);
for (Map.Entry<String, String> entry : config.entrySet()) {
String key = entry.getKey();
if (key.startsWith(ATTRIBUTE_PREFIX)) {
String attribute = key.substring(ATTRIBUTE_PREFIX.length());
String value = entry.getValue();
attributes.put(attribute, value);
}
}
return attributes;
}
@Override
public void setProfileAttribute(final String version, final String profile, final String key, final String value) {
assertValid();
Map<String, String> config = getConfiguration(version, profile, AGENT_PID);
if (value != null) {
config.put(ATTRIBUTE_PREFIX + key, value);
} else {
config.remove(key);
}
setConfiguration(version, profile, AGENT_PID, config);
}
@Override
public Map<String, Map<String, String>> getConfigurations(String version, String profile) {
assertValid();
try {
Map<String, Map<String, String>> configurations = new HashMap<String, Map<String, String>>();
Map<String, byte[]> configs = getFileConfigurations(version, profile);
for (Map.Entry<String, byte[]> entry : configs.entrySet()) {
if (entry.getKey().endsWith(".properties")) {
String pid = DataStoreHelpers.stripSuffix(entry.getKey(), ".properties");
configurations.put(pid, DataStoreHelpers.toMap(DataStoreHelpers.toProperties(entry.getValue())));
}
}
return configurations;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
public CuratorFramework getCurator() {
return curator.get();
}
// [FIXME] Test case polutes public API
public void bindCuratorForTesting(CuratorFramework curator) {
bindCurator(curator);
}
protected void bindCurator(CuratorFramework curator) {
this.curator.bind(curator);
}
protected void unbindCurator(CuratorFramework curator) {
this.curator.unbind(curator);
}
protected void bindPlaceholderResolver(PlaceholderResolver resolver) {
placeholderResolvers.putIfAbsent(resolver.getScheme(), new DynamicReference<PlaceholderResolver>());
placeholderResolvers.get(resolver.getScheme()).bind(resolver);
}
protected void unbindPlaceholderResolver(PlaceholderResolver resolver) {
placeholderResolvers.get(resolver.getScheme()).unbind();
}
}
@@ -0,0 +1,399 @@
package org.fusesource.fabric.features;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.karaf.features.Feature;
import org.apache.karaf.features.FeaturesService;
import org.apache.karaf.features.Repository;
import org.apache.karaf.features.internal.FeatureValidationUtil;
import org.apache.karaf.features.internal.FeaturesServiceImpl;
import org.apache.karaf.features.internal.RepositoryImpl;
import org.apache.zookeeper.KeeperException;
import org.fusesource.fabric.api.Container;
import org.fusesource.fabric.api.FabricService;
import org.fusesource.fabric.api.Profile;
import org.fusesource.fabric.api.Version;
import org.fusesource.fabric.zookeeper.IZKClient;
import org.fusesource.fabric.zookeeper.ZkPath;
import org.linkedin.zookeeper.client.LifecycleListener;
import org.linkedin.zookeeper.tracker.NodeEvent;
import org.linkedin.zookeeper.tracker.NodeEventsListener;
import org.linkedin.zookeeper.tracker.ZKStringDataReader;
import org.linkedin.zookeeper.tracker.ZooKeeperTreeTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.fusesource.fabric.utils.features.FeatureUtils.search;
/**
* A FeaturesService implementation for Fabric managed containers.
*/
public class FabricFeaturesServiceImpl extends FeaturesServiceImpl implements FeaturesService, NodeEventsListener<String>, LifecycleListener {
private static final Logger LOGGER = LoggerFactory.getLogger(FeaturesService.class);
private FabricService fabricService;
private IZKClient zooKeeper;
private ZooKeeperTreeTracker<String> profilesTracker;
private final Set<Repository> repositories = new HashSet<Repository>();
private final Set<Feature> allfeatures = new HashSet<Feature>();
private final Set<Feature> installed = new HashSet<Feature>();
public void init() throws Exception {
}
public void destroy() throws Exception {
profilesTracker.destroy();
}
@Override
public void onEvents(Collection<NodeEvent<String>> nodeEvents) {
try {
repositories.clear();
listRepositories();
allfeatures.clear();
listFeatures();
installed.clear();
listInstalledFeatures();
} catch (Exception e) {
LOGGER.error("Error while updating FeaturesService information from Fabric Registry.", e);
}
}
@Override
public void onConnected() {
profilesTracker = new ZooKeeperTreeTracker<String>(zooKeeper, new ZKStringDataReader(), ZkPath.CONFIG_VERSIONS.getPath());
try {
profilesTracker.track(this);
} catch (InterruptedException e) {
LOGGER.error("Error while setting tracker for Fabric Features Service.", e);
} catch (KeeperException e) {
LOGGER.error("Error while setting tracker for Fabric Features Service.", e);
}
onEvents(null);
}
@Override
public void onDisconnected() {
}
@Override
public void validateRepository(URI uri) throws Exception {
FeatureValidationUtil.validate(uri);
}
@Override
public void addRepository(URI uri) throws Exception {
addRepository(uri, true);
}
@Override
public void addRepository(URI uri, boolean b) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, please use fabric:profile-edit --repositories %s target-profile instead. See fabric:profile-edit for more information.", uri.toString()));
}
@Override
public void removeRepository(URI uri) throws Exception {
removeRepository(uri, true);
}
@Override
public void removeRepository(URI uri, boolean b) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, please use fabric:profile-edit --delete --repositories %s target-profile instead. See fabric:profile-edit for more information.", uri.toString()));
}
@Override
public void restoreRepository(URI uri) throws Exception {
}
/**
* Lists all {@link Repository} entries found in any {@link Profile} of the current {@link Container} {@link Version}.
*
* @return
*/
@Override
public Repository[] listRepositories() {
if (repositories.isEmpty()) {
Set<String> repositoryUris = new LinkedHashSet<String>();
Container container = fabricService.getCurrentContainer();
Version version = container.getVersion();
Profile[] profiles = fabricService.getProfiles(version.getName());
if (profiles != null) {
for (Profile profile : profiles) {
if (profile.getRepositories() != null) {
for (String uri : profile.getRepositories()) {
repositoryUris.add(uri);
addRepositoryUri(uri, repositoryUris);
}
}
}
}
for (String uri : repositoryUris) {
try {
repositories.add(new RepositoryImpl(new URI(uri)));
} catch (URISyntaxException e) {
LOGGER.debug("Error while adding repository with uri {}.", uri);
}
}
}
return repositories.toArray(new Repository[repositories.size()]);
}
@Override
public void installFeature(String s) throws Exception {
installFeature(s, (EnumSet) null);
}
@Override
public void installFeature(String s, EnumSet<Option> options) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", s));
}
@Override
public void installFeature(String s, String s2) throws Exception {
installFeature(s, s2, null);
}
@Override
public void installFeature(String s, String s2, EnumSet<Option> options) throws Exception {
String featureName = s;
if (s2 != null && s2.equals("0.0.0")) {
featureName = s + "/" + s2;
}
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", featureName));
}
@Override
public void installFeature(Feature feature, EnumSet<Option> options) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", feature.getName()));
}
@Override
public void installFeatures(Set<Feature> features, EnumSet<Option> options) throws Exception {
StringBuffer sb = new StringBuffer();
for (Feature feature : features) {
sb.append("--feature ").append(feature.getName());
}
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", sb.toString()));
}
@Override
public void uninstallFeature(String s) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --delete --features %s target-profile instead. See fabric:profile-edit for more information.", s));
}
@Override
public void uninstallFeature(String s, String s2) throws Exception {
String featureName = s;
if (s2 != null && s2.equals("0.0.0")) {
featureName = s + "/" + s2;
}
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", featureName));
}
@Override
public Feature[] listFeatures() throws Exception {
if (allfeatures.isEmpty()) {
Repository[] repositories = listRepositories();
for (Repository repository : repositories) {
for (Feature feature : repository.getFeatures()) {
if (!allfeatures.contains(feature)) {
allfeatures.add(feature);
}
}
}
}
return allfeatures.toArray(new Feature[allfeatures.size()]);
}
@Override
public Feature[] listInstalledFeatures() {
if (installed.isEmpty()) {
try {
Map<String, Map<String, Feature>> allFeatures = getFeatures(listProfileRepositories());
Container container = fabricService.getCurrentContainer();
Profile[] profiles = container.getProfiles();
if (profiles != null) {
for (Profile profile : profiles) {
List<String> featureNames = profile.getFeatures();
for (String featureName : featureNames) {
try {
Feature f;
if (featureName.contains("/")) {
String[] parts = featureName.split("/");
String name = parts[0];
String version = parts[1];
f = allFeatures.get(name).get(version);
} else {
TreeMap<String, Feature> versionMap = (TreeMap<String, Feature>) allFeatures.get(featureName);
f = versionMap.lastEntry().getValue();
}
addFeatures(f, installed);
} catch (Exception ex) {
LOGGER.debug("Error while adding {} to the features list");
}
}
}
}
} catch (Exception e) {
LOGGER.error("Error retrieveing features.", e);
}
}
return installed.toArray(new Feature[installed.size()]);
}
@Override
public boolean isInstalled(Feature feature) {
if (installed.isEmpty()) {
listInstalledFeatures();
}
return installed.contains(feature);
}
protected Map<String, Map<String, Feature>> getFeatures(Repository[] repositories) throws Exception {
Map<String, Map<String, Feature>> features = new HashMap<String, Map<String, Feature>>();
for (Repository repo : repositories) {
for (Feature f : repo.getFeatures()) {
if (features.get(f.getName()) == null) {
Map<String, Feature> versionMap = new TreeMap<String, Feature>();
versionMap.put(f.getVersion(), f);
features.put(f.getName(), versionMap);
} else {
features.get(f.getName()).put(f.getVersion(), f);
}
}
}
return features;
}
/**
* Lists all {@link Repository} enties found in the {@link Profile}s assigned to the current {@link Container}.
*
* @return
*/
private Repository[] listProfileRepositories() {
Set<String> repositoryUris = new LinkedHashSet<String>();
Set<Repository> repositories = new LinkedHashSet<Repository>();
Container container = fabricService.getCurrentContainer();
Set<Profile> profilesWithParents = new HashSet<Profile>();
Profile[] profiles = container.getProfiles();
if (profiles != null) {
for (Profile profile : profiles) {
addProfiles(profile, profilesWithParents);
}
for (Profile profile : profilesWithParents) {
if (profile.getRepositories() != null) {
for (String uri : profile.getRepositories()) {
repositoryUris.add(uri);
addRepositoryUri(uri, repositoryUris);
}
}
}
}
for (String uri : repositoryUris) {
try {
repositories.add(new RepositoryImpl(new URI(uri)));
} catch (URISyntaxException e) {
LOGGER.debug("Error while adding repository with uri {}.", uri);
}
}
return repositories.toArray(new Repository[repositories.size()]);
}
/**
* Adds the {@link URI} of {@link Feature} {@link Repository} and its internals to the set of repositories {@link URI}s.
*
* @param uri
* @param repositoryUris
*/
protected void addRepositoryUri(String uri, Set<String> repositoryUris) {
if (repositoryUris.contains(uri)) {
return;
}
repositoryUris.add(uri);
try {
Repository repository = new RepositoryImpl(new URI(uri));
URI[] internalUris = repository.getRepositories();
if (internalUris != null) {
for (URI u : internalUris) {
addRepositoryUri(u.toString(), repositoryUris);
}
}
} catch (Exception e) {
LOGGER.debug("Error while adding internal repositories of {}.", uri);
}
}
/**
* Adds {@link Profile} and its parents to the set of {@link Profile}s.
*
* @param profile
* @param profiles
*/
protected void addProfiles(Profile profile, Set<Profile> profiles) {
if (profiles.contains(profile)) {
return;
}
profiles.add(profile);
for (Profile parent : profile.getParents()) {
addProfiles(parent, profiles);
}
}
/**
* Adds {@link Feature} and its dependencies to the set of {@link Feature}s.
*
* @param feature
* @param features
*/
protected void addFeatures(Feature feature, Set<Feature> features) {
if (features.contains(feature)) {
return;
}
features.add(feature);
for (Feature dependency : feature.getDependencies()) {
addFeatures(search(dependency.getName(), dependency.getVersion(), repositories), features);
}
}
public FabricService getFabricService() {
return fabricService;
}
public void setFabricService(FabricService fabricService) {
this.fabricService = fabricService;
}
public IZKClient getZooKeeper() {
return zooKeeper;
}
public void setZooKeeper(IZKClient zooKeeper) {
this.zooKeeper = zooKeeper;
}
}
@@ -0,0 +1,158 @@
/**
* Copyright (C) 2011, FuseSource Corp. All rights reserved.
* http://fusesource.com
*
* The software in this package is published under the terms of the
* CDDL license a copy of which has been included with this distribution
* in the license.txt file.
*/
package org.fusesource.fabric.zookeeper.commands;
import org.apache.felix.gogo.commands.Argument;
import org.apache.felix.gogo.commands.Command;
import org.apache.felix.gogo.commands.Option;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.List;
import java.util.regex.Pattern;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.getPatterns;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.matches;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.merge;
@Command(name = "export", scope = "zk", description = "Export data from zookeeper")
public class Export extends ZooKeeperCommandSupport {
@Argument(description="path of the directory to export to")
String target = "." + File.separator + "export";
@Option(name="-f", aliases={"--regex"}, description="regex to filter on what paths to export, can specify this option more than once for additional filters", multiValued=true)
String regex[];
@Option(name="-rf", aliases={"--reverse-regex"}, description="regex to filter what paths to exclude from the export, can specify this option more than once for additional filters", multiValued=true)
String nregex[];
@Option(name="-p", aliases={"--path"}, description="Top level context to export")
String topLevel = "/";
@Option(name="-d", aliases={"--delete"}, description="Clear target directory before exporting (CAUTION! Performs recursive delete!)")
boolean delete;
@Option(name="--dry-run", description="Runs the export but instead prints out what's going to happen rather than performing the action")
boolean dryRun = false;
File ignore = new File(".fabricignore");
File include = new File(".fabricinclude");
@Override
protected Object doExecute() throws Exception {
if (ignore.exists() && ignore.isFile()) {
nregex = merge(ignore, nregex);
}
if (include.exists() && include.isFile()) {
regex = merge(include, regex);
}
export(topLevel);
System.out.printf("Export to %s completed successfully\n", target);
return null;
}
private void delete(File parent) throws Exception {
if (!parent.exists()) {
return;
}
if (parent.isDirectory()) {
for (File f : parent.listFiles()) {
delete(f);
}
}
parent.delete();
}
protected void export(String path) throws Exception {
if (!path.endsWith("/")) {
path = path + "/";
}
if (!path.startsWith("/")) {
path = "/" + path;
}
List<Pattern> include = getPatterns(regex);
List<Pattern> exclude = getPatterns(nregex);
List<String> paths = getZooKeeper().getAllChildren(path);
SortedSet<File> directories = new TreeSet<File>();
Map<File, String> settings = new HashMap<File, String>();
for(String p : paths) {
p = path + p;
if (!matches(include, p) || matches(exclude, p)) {
continue;
}
byte[] data = getZooKeeper().getData(p);
if (data != null) {
settings.put(new File(target + File.separator + p + ".cfg"), new String(data));
} else {
directories.add(new File(target + File.separator + p));
}
}
if (delete) {
if (!dryRun) {
delete(new File(target));
} else {
System.out.printf("Deleting %s and everything under it\n", new File(target));
}
}
for (File d : directories) {
if (d.exists() && !d.isDirectory()) {
throw new IllegalArgumentException("Directory " + d + " exists but is not a directory");
}
if (!d.exists()) {
if (!dryRun) {
if (!d.mkdirs()) {
throw new RuntimeException("Failed to create directory " + d);
}
} else {
System.out.printf("Creating directory path : %s\n", d);
}
}
}
for (File f : settings.keySet()) {
if (f.exists() && !f.isFile()) {
throw new IllegalArgumentException("File " + f + " exists but is not a file");
}
if (!f.getParentFile().exists()) {
if (!dryRun) {
if (!f.getParentFile().mkdirs()) {
throw new RuntimeException("Failed to create directory " + f.getParentFile());
}
} else {
System.out.printf("Creating directory path : %s\n", f);
}
}
if (!f.exists()) {
try {
if (!dryRun) {
if (!f.createNewFile()) {
throw new RuntimeException("Failed to create file " + f);
}
} else {
System.out.printf("Creating file : %s\n", f);
}
} catch (IOException io) {
throw new RuntimeException("Failed to create file " + f + " : " + io);
}
}
if (!dryRun) {
FileWriter writer = new FileWriter(f, false);
writer.write(settings.get(f));
writer.close();
} else {
System.out.printf("Writing value \"%s\" to file : %s\n", settings.get(f), f);
}
}
}
}
@@ -0,0 +1,204 @@
/**
* Copyright (C) 2011, FuseSource Corp. All rights reserved.
* http://fusesource.com
*
* The software in this package is published under the terms of the
* CDDL license a copy of which has been included with this distribution
* in the license.txt file.
*/
package org.fusesource.fabric.zookeeper.commands;
import org.apache.felix.gogo.commands.Argument;
import org.apache.felix.gogo.commands.Command;
import org.apache.felix.gogo.commands.Option;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.ZooDefs;
import java.io.*;
import java.net.URL;
import java.util.*;
import java.util.List;
import java.util.regex.Pattern;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.getPatterns;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.matches;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.merge;
@Command(name = "import", scope = "zk", description = "Import data into zookeeper")
public class Import extends ZooKeeperCommandSupport {
@Argument(description = "Location of the file or filesystem to load")
protected String source = "." + File.separator + "import";
@Option(name="-d", aliases={"--delete"}, description="Delete any paths not in the tree being imported, ignored when importing a properties file (CAUTION!)")
boolean delete = false;
@Option(name="-t", aliases={"--target"}, description="Target location in ZooKeeper tree to import to")
String target = "/";
@Option(name="-props", aliases={"--properties"}, description="Argument is URL pointing to a properties file")
boolean properties = false;
@Option(name="-fs", aliases={"--filesystem"}, description="Argument is the top level directory of a local filesystem tree")
boolean filesystem = true;
@Option(name="-f", aliases={"--regex"}, description="regex to filter on what paths to import, can specify this option more than once for additional filters", multiValued=true)
String regex[];
@Option(name="-rf", aliases={"--reverse-regex"}, description="regex to filter what paths to exclude, can specify this option more than once for additional filters", multiValued=true)
protected String[] nregex;
@Option(name="--dry-run", description="Runs the import but prints out what's going to happen instead of making any changes")
boolean dryRun = false;
File ignore = new File(".fabricignore");
File include = new File(".fabricinclude");
@Override
protected Object doExecute() throws Exception {
if (ignore.exists() && ignore.isFile()) {
nregex = merge(ignore, nregex);
}
if (include.exists() && include.isFile()) {
regex = merge(include, regex);
}
if (properties == true) {
filesystem = false;
}
if (filesystem == true) {
properties = false;
}
if (properties) {
readPropertiesFile();
}
if (filesystem) {
readFileSystem();
}
System.out.println("Successfully imported settings from " + source);
return null;
}
private String stripPath(String path) {
String strs[] = path.split(source);
if (strs.length == 0) {
return "";
}
return strs[strs.length - 1].substring(0, strs[1].length() - ".cfg".length());
}
private String buildZKPath(File parent, File current) {
String rc = "";
if (current != null && !parent.equals(current)) {
rc = buildZKPath(parent, current.getParentFile()) + "/" + current.getName();
}
return rc;
}
private void getCandidates(File parent, File current, Map<String, String> settings) throws Exception {
if (current.isDirectory()) {
for (File child : current.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
if (file.isDirectory() || file.getName().endsWith(".cfg")) {
return true;
}
return false;
}
})) {
getCandidates(parent, child, settings);
}
String p = buildZKPath(parent, current).replaceFirst("/", "");
settings.put(p, null);
} else {
BufferedInputStream in = new BufferedInputStream(new FileInputStream(current));
byte[] contents = new byte[in.available()];
in.read(contents);
in.close();
String p = buildZKPath(parent, current).replaceFirst("/", "");
if (p.endsWith(".cfg")) {
p = p.substring(0, p.length() - ".cfg".length());
}
settings.put(p, new String(contents));
}
}
private void readFileSystem() throws Exception {
Map<String, String> settings = new TreeMap<String, String>();
File s = new File(source);
getCandidates(s, s, settings);
List<Pattern> include = getPatterns(regex);
List<Pattern> exclude = getPatterns(nregex);
if (!target.endsWith("/")) {
target = target + "/";
}
if (!target.startsWith("/")) {
target = "/" + target;
}
List<String> paths = new ArrayList<String>();
for(String key : settings.keySet()) {
String data = settings.get(key);
key = target + key;
paths.add(key);
if (!matches(include, key) || matches(exclude, key)) {
continue;
}
if (!dryRun) {
getZooKeeper().createOrSetWithParents(key, data, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
} else {
System.out.printf("Creating path \"%s\" with value \"%s\"\n", key, data);
}
}
if (delete) {
deletePathsNotIn(paths);
}
}
private void deletePathsNotIn(List<String> paths) throws Exception {
List<String> zkPaths = getZooKeeper().getAllChildren(target);
for (String path : zkPaths) {
path = "/" + path;
if (!paths.contains(path)) {
if (!dryRun) {
getZooKeeper().deleteWithChildren(path);
} else {
System.out.printf("Deleting path %s and everything under it\n", path);
}
}
}
}
private void readPropertiesFile() throws Exception {
List<Pattern> includes = getPatterns(regex);
List<Pattern> excludes = getPatterns(nregex);
InputStream in = new BufferedInputStream(new URL(source).openStream());
List<String> paths = new ArrayList<String>();
Properties props = new Properties();
props.load(in);
for (Enumeration names = props.propertyNames(); names.hasMoreElements();) {
String name = (String) names.nextElement();
String value = props.getProperty(name);
if (value != null && value.isEmpty()) {
value = null;
}
if (!name.startsWith("/")) {
name = "/" + name;
}
name = target + name;
if (!matches(includes, name) || matches(excludes, name)) {
continue;
}
if (!dryRun) {
getZooKeeper().createOrSetWithParents(name, value, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
} else {
System.out.printf("Creating path \"%s\" with value \"%s\"\n", name, value);
}
}
}
}
@@ -0,0 +1,91 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.insight.camel.audit;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Map;
public final class ScriptUtils {
private static final SimpleDateFormat format;
private static final ObjectMapper mapper;
static {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
mapper = new ObjectMapper();
mapper.setSerializationConfig(
mapper.getSerializationConfig()
.withDateFormat(format)
);
}
public static String toIso(Date d) {
return format.format(d);
}
public static String toJson(Object o) {
try {
if (o instanceof Collection) {
StringBuilder sb = new StringBuilder();
sb.append("[");
for (Object c : (Collection) o) {
if (sb.length() > 1) {
sb.append(",");
}
sb.append(toJson(c));
}
sb.append("]");
return sb.toString();
} else if (o instanceof Map) {
StringBuilder sb = new StringBuilder();
sb.append("{");
for (Map.Entry<Object, Object> e : ((Map<Object, Object>) o).entrySet()) {
if (sb.length() > 1) {
sb.append(",");
}
sb.append(toJson(e.getKey().toString()));
sb.append(":");
sb.append(toJson(e.getValue()));
}
sb.append("}");
return sb.toString();
} else if (o == null) {
return "null";
} else if (o instanceof Date) {
return "\"" + toIso((Date) o) + "\"";
} else {
return mapper.writeValueAsString(o.toString());
}
} catch (Exception e) {
throw new IllegalArgumentException("Could not serialize " + o, e);
}
}
public static Map parseJson(String str) {
try {
return mapper.readValue(str, Map.class);
} catch (Exception e) {
throw new IllegalArgumentException("Could not deserialize " + str, e);
}
}
}
@@ -0,0 +1,184 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.service.jclouds;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Module;
import org.fusesource.fabric.api.*;
import org.fusesource.fabric.api.CreateJCloudsContainerOptions;
import org.jclouds.compute.ComputeService;
import org.jclouds.compute.ComputeServiceContext;
import org.jclouds.compute.ComputeServiceContextFactory;
import org.jclouds.compute.RunNodesException;
import org.jclouds.compute.domain.NodeMetadata;
import org.jclouds.compute.domain.TemplateBuilder;
import org.jclouds.compute.options.RunScriptOptions;
import org.jclouds.domain.Credentials;
import org.jclouds.rest.RestContextFactory;
import static org.fusesource.fabric.internal.ContainerProviderUtils.buildStartupScript;
/**
* A concrete {@link org.fusesource.fabric.api.ContainerProvider} that creates {@link org.fusesource.fabric.api.Container}s via jclouds {@link ComputeService}.
*/
public class JcloudsContainerProvider implements ContainerProvider<CreateJCloudsContainerOptions, CreateJCloudsContainerMetadata> {
private final ConcurrentMap<String, ComputeService> computeServiceMap = new ConcurrentHashMap<String, ComputeService>();
public void bind(ComputeService computeService) {
if(computeService != null) {
String providerName = computeService.getContext().getProviderSpecificContext().getId();
if(providerName != null) {
computeServiceMap.put(providerName,computeService);
}
}
}
public void unbind(ComputeService computeService) {
if(computeService != null) {
String providerName = computeService.getContext().getProviderSpecificContext().getId();
if(providerName != null) {
computeServiceMap.remove(providerName);
}
}
}
public ConcurrentMap<String, ComputeService> getComputeServiceMap() {
return computeServiceMap;
}
public Set<CreateJCloudsContainerMetadata> create(CreateJCloudsContainerOptions options) throws MalformedURLException, RunNodesException, URISyntaxException, InterruptedException {
final Set<CreateJCloudsContainerMetadata> result = new LinkedHashSet<CreateJCloudsContainerMetadata>();
ComputeService computeService = computeServiceMap.get(options.getProviderName());
if (computeService == null) {
Iterable<? extends Module> modules = ImmutableSet.of();
Properties props = new Properties();
props.put("provider", options.getProviderName());
props.put("identity", options.getIdentity());
props.put("credential", options.getCredential());
if (!Strings.isNullOrEmpty(options.getOwner()) && options.getProviderName().equals("aws-ec2")) {
props.put("jclouds.ec2.ami-owners", options.getOwner());
}
RestContextFactory restFactory = new RestContextFactory();
ComputeServiceContext context = new ComputeServiceContextFactory(restFactory).createContext(options.getProviderName(), options.getIdentity(), options.getCredential(), modules, props);
computeService = context.getComputeService();
}
TemplateBuilder builder = computeService.templateBuilder();
builder.any();
switch (options.getInstanceType()) {
case Smallest:
builder.smallest();
break;
case Biggest:
builder.biggest();
break;
case Fastest:
builder.fastest();
}
if (options.getLocationId() != null) {
builder.locationId(options.getLocationId());
}
if (options.getImageId() != null) {
builder.imageId(options.getImageId());
}
if (options.getHardwareId() != null) {
builder.hardwareId(options.getHardwareId());
}
Set<? extends NodeMetadata> metadatas = null;
metadatas = computeService.createNodesInGroup(options.getGroup(), options.getNumber(), builder.build());
Thread.sleep(5000);
int suffix = 1;
StringBuilder buffer = new StringBuilder();
boolean first = true;
if (metadatas != null) {
for (NodeMetadata nodeMetadata : metadatas) {
Credentials credentials = null;
//For some cloud providers return do not allow shell access to root, so the user needs to be overrided.
if (options.getUser() != null) {
credentials = new Credentials(options.getUser(), nodeMetadata.getCredentials().credential);
} else {
credentials = nodeMetadata.getCredentials();
}
String id = nodeMetadata.getId();
Set<String> publicAddresses = nodeMetadata.getPublicAddresses();
String containerName = options.getName();
if(options.getNumber() > 1) {
containerName+=suffix++;
}
String script = buildStartupScript(options.name(containerName));
if (credentials != null) {
computeService.runScriptOnNode(id, script, RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false));
} else {
computeService.runScriptOnNode(id, script);
}
CreateJCloudsContainerMetadata jCloudsContainerMetadata = new CreateJCloudsContainerMetadata();
jCloudsContainerMetadata.setNodeId(nodeMetadata.getId());
jCloudsContainerMetadata.setContainerName(containerName);
jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses());
jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname());
result.add(jCloudsContainerMetadata);
}
}
return result;
}
public Map<String, String> parseQuery(String uri) throws URISyntaxException {
//TODO: This is copied form URISupport. We should move URISupport to core so that we don't have to copy stuff arround.
try {
Map<String, String> rc = new HashMap<String, String>();
if (uri != null) {
String[] parameters = uri.split("&");
for (int i = 0; i < parameters.length; i++) {
int p = parameters[i].indexOf("=");
if (p >= 0) {
String name = URLDecoder.decode(parameters[i].substring(0, p), "UTF-8");
String value = URLDecoder.decode(parameters[i].substring(p + 1), "UTF-8");
rc.put(name, value);
} else {
rc.put(parameters[i], null);
}
}
}
return rc;
} catch (UnsupportedEncodingException e) {
throw (URISyntaxException) new URISyntaxException(e.toString(), "Invalid encoding").initCause(e);
}
}
}
@@ -0,0 +1,486 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.agent.download;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;
import javax.xml.parsers.ParserConfigurationException;
import org.fusesource.fabric.agent.mvn.DownloadableArtifact;
import org.fusesource.fabric.agent.mvn.MavenConfiguration;
import org.fusesource.fabric.agent.mvn.MavenRepositoryURL;
import org.fusesource.fabric.agent.mvn.Parser;
import org.fusesource.fabric.agent.mvn.Version;
import org.fusesource.fabric.agent.mvn.VersionRange;
import org.fusesource.fabric.agent.utils.URLUtils;
import org.fusesource.fabric.agent.utils.XmlUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
public class MavenDownloadTask extends AbstractDownloadTask implements Runnable {
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(AbstractDownloadTask.class);
/**
* 2 spaces indent;
*/
private static final String Ix2 = " ";
/**
* 4 spaces indent;
*/
private static final String Ix4 = " ";
private final MavenRepositoryURL system;
private final MavenConfiguration configuration;
public MavenDownloadTask(String url, MavenRepositoryURL system, MavenConfiguration configuration, ExecutorService executor) {
super(url, executor);
this.system = system;
this.configuration = configuration;
}
protected File download() throws Exception {
Parser parser = new Parser(url.substring("mvn:".length()));
Set<DownloadableArtifact> downloadables;
if (!parser.getVersion().contains("SNAPSHOT")) {
downloadables = doCollectPossibleDownloads(parser, Collections.singletonList(system));
if (!downloadables.isEmpty()) {
DownloadableArtifact artifact = downloadables.iterator().next();
URL url = artifact.getArtifactURL();
File file = new File(url.getFile());
if (file.exists()) {
return file;
}
}
}
downloadables = collectPossibleDownloads(parser);
if (LOG.isTraceEnabled()) {
LOG.trace("Possible download locations for [" + url + "]");
for (DownloadableArtifact artifact : downloadables) {
LOG.trace(" " + artifact);
}
}
for (DownloadableArtifact artifact : downloadables) {
LOG.trace("Downloading [" + artifact + "]");
try {
configuration.enableProxy(artifact.getArtifactURL());
String repository = system.getFile().getAbsolutePath();
if (!repository.endsWith(Parser.FILE_SEPARATOR)) {
repository = repository + Parser.FILE_SEPARATOR;
}
InputStream is = artifact.getInputStream();
File file = new File(repository + parser.getArtifactPath());
file.getParentFile().mkdirs();
if (!file.getParentFile().isDirectory()) {
throw new IOException("Unable to create directory " + file.getParentFile().toString());
}
File tmp = File.createTempFile("fabric-agent-", null, file.getParentFile());
OutputStream os = new FileOutputStream(tmp);
copy(is, os);
is.close();
os.close();
if (file.exists() && !file.delete()) {
throw new IOException("Unable to delete file: " + file.toString());
}
if (!tmp.renameTo(file)) {
throw new IOException("Unable to rename file " + tmp.toString() + " to " + file.toString());
}
return file;
} catch (IOException ignore) {
// go on with next repository
LOG.debug(Ix2 + "Could not download [" + artifact + "]");
LOG.trace(Ix2 + "Reason [" + ignore.getClass().getName() + ": " + ignore.getMessage() + "]");
}
}
// no artifact found
throw new IOException("URL [" + url + "] could not be resolved.");
}
/**
* Searches all available repositories for possible artifacts to download. The returned set of downloadable
* artifacts (never null, but maybe empty) will be sorted descending by version of the artifact and by positon of
* repository in the list of repositories to be searched.
*
* @return a non null sorted set of artifacts
* @throws java.net.MalformedURLException re-thrown
*/
private Set<DownloadableArtifact> collectPossibleDownloads(final Parser parser)
throws MalformedURLException {
final List<MavenRepositoryURL> repositories = new ArrayList<MavenRepositoryURL>();
repositories.addAll(configuration.getRepositories());
repositories.add(system);
repositories.add(configuration.getLocalRepository());
// if the url contains a prefered repository add that repository as the first repository to be searched
if (parser.getRepositoryURL() != null) {
repositories.add(
repositories.size() == 0 ? 0 : 1,
parser.getRepositoryURL()
);
}
return doCollectPossibleDownloads(parser, repositories);
}
/**
* Search the default repositories for possible artifacts to download.
*/
private Set<DownloadableArtifact> collectDefaultPossibleDownloads(final Parser parser)
throws MalformedURLException {
return doCollectPossibleDownloads(parser, configuration.getDefaultRepositories());
}
private Set<DownloadableArtifact> doCollectPossibleDownloads(final Parser parser,
final List<MavenRepositoryURL> repositories)
throws MalformedURLException {
final Set<DownloadableArtifact> downloadables = new TreeSet<DownloadableArtifact>(new DownloadComparator());
// find artifact type
final boolean isLatest = parser.getVersion().contains("LATEST");
final boolean isSnapshot = parser.getVersion().endsWith("SNAPSHOT");
VersionRange versionRange = null;
if (!isLatest && !isSnapshot) {
try {
versionRange = new VersionRange(parser.getVersion());
} catch (Exception ignore) {
// well, we do not have a range of versions
}
}
final boolean isVersionRange = versionRange != null;
final boolean isExactVersion = !(isLatest || isSnapshot || isVersionRange);
int priority = 0;
for (MavenRepositoryURL repositoryURL : repositories) {
LOG.debug("Collecting versions from repository [" + repositoryURL + "]");
priority++;
try {
if (isExactVersion) {
downloadables.add(resolveExactVersion(parser, repositoryURL, priority));
} else if (isSnapshot) {
final DownloadableArtifact snapshot =
resolveSnapshotVersion(parser, repositoryURL, priority, parser.getVersion());
downloadables.add(snapshot);
// if we have a local built snapshot we skip the rest of repositories
if (snapshot.isLocalSnapshotBuild()) {
break;
}
} else {
final Document metadata = getMetadata(repositoryURL.getURL(),
new String[]
{
parser.getArtifactLocalMetdataPath(),
parser.getArtifactMetdataPath()
}
);
if (isLatest) {
downloadables.add(resolveLatestVersion(parser, metadata, repositoryURL, priority));
} else {
downloadables.addAll(resolveRangeVersions(parser, metadata, repositoryURL, priority, versionRange));
}
}
} catch (IOException ignore) {
// if metadata cannot be found we go on with the next repository. Maybe we have better luck.
LOG.debug(Ix2 + "Skipping repository [" + repositoryURL + "], reason: " + ignore.getMessage());
}
}
return downloadables;
}
/**
* Returns maven metadata by looking first for a local metatdata xml file and then for a remote one.
* If no metadata file is found or cannot be used an IOException is thrown.
*
* @param repositoryURL url of the repository from where the metadata should be parsed
* @param metadataLocations array of location paths to try as metadata
* @return parsed xml document for the metadata file
* @throws java.io.IOException if:
* metadata file cannot be located
*/
private Document getMetadata(final URL repositoryURL,
final String[] metadataLocations)
throws IOException {
LOG.debug(Ix2 + "Resolving metadata");
InputStream inputStream = null;
String foundLocation = null;
for (String location : metadataLocations) {
try {
// first try to get the artifact local metadata
inputStream = prepareInputStream(repositoryURL, location);
// get out at first found location
foundLocation = location;
LOG.trace(Ix4 + "Metadata found: [" + location + "]");
break;
} catch (IOException ignore) {
LOG.trace(Ix4 + "Metadata not found: [" + location + "]");
}
}
if (inputStream == null) {
throw new IOException("Metadata not found in repository [" + repositoryURL + "]");
}
try {
return XmlUtils.parseDoc(inputStream);
} catch (ParserConfigurationException e) {
throw initIOException("Metadata [" + foundLocation + "] could not be parsed.", e);
} catch (SAXException e) {
throw initIOException("Metadata [" + foundLocation + "] could not be parsed.", e);
}
}
/**
* Returns a downloadable artifact where the version is fully specified.
*
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @return a downloadable artifact
* @throws IOException re-thrown
*/
private DownloadableArtifact resolveExactVersion(final Parser parser,
final MavenRepositoryURL repositoryURL,
final int priority)
throws IOException {
if (!repositoryURL.isReleasesEnabled()) {
throw new IOException("Releases not enabled");
}
LOG.debug(Ix2 + "Resolving exact version");
return new DownloadableArtifact(
parser.getVersion(),
priority,
repositoryURL.getURL(),
parser.getArtifactPath(),
false, // no local built snapshot
configuration.getCertificateCheck()
);
}
/**
* Resolves the latest version of the artifact.
*
* @param metadata parsed metadata xml
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @return a downloadable artifact or throw an IOException if latest version cannot be determined.
* @throws IOException if the artifact could not be resolved
*/
private DownloadableArtifact resolveLatestVersion(final Parser parser,
final Document metadata,
final MavenRepositoryURL repositoryURL,
final int priority)
throws IOException {
LOG.debug(Ix2 + "Resolving latest version");
final String version = XmlUtils.getTextContentOfElement(metadata, "versioning/versions/version[last]");
if (version != null) {
if (version.endsWith("SNAPSHOT")) {
return resolveSnapshotVersion(parser, repositoryURL, priority, version);
} else {
return new DownloadableArtifact(
version,
priority,
repositoryURL.getURL(),
parser.getArtifactPath(version),
false, // no local built snapshot
configuration.getCertificateCheck()
);
}
}
throw new IOException("LATEST version could not be resolved.");
}
/**
* Resolves snapshot version of the artifact.
* Snapshot versions are resolved by parsing the metadata within the directory that contains the version as:
* 1. if the metadata containes entries like "versioning/snapshot/timestamp (most likely on remote repos) it will
* use the timestamp and buildnumber to point the real version
* 2. if the metatdata does not contain the above (most likely a local repo) it will use as version the
* versioning/lastUpdated
*
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @param version snapshot version to resolve
* @return an input stream to the artifact
* @throws IOException if the artifact could not be resolved
*/
private DownloadableArtifact resolveSnapshotVersion(final Parser parser,
final MavenRepositoryURL repositoryURL,
final int priority,
final String version)
throws IOException {
if (!repositoryURL.isSnapshotsEnabled()) {
throw new IOException("Snapshots not enabled");
}
LOG.debug(Ix2 + "Resolving snapshot version [" + version + "]");
try {
final Document snapshotMetadata = getMetadata(repositoryURL.getURL(),
new String[]
{
parser.getVersionLocalMetadataPath(version),
parser.getVersionMetadataPath(version)
}
);
final String timestamp =
XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/snapshot/timestamp");
final String buildNumber =
XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/snapshot/buildNumber");
final String localSnapshot =
XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/snapshot/localCopy");
if (timestamp != null && buildNumber != null) {
return new DownloadableArtifact(
parser.getSnapshotVersion(version, timestamp, buildNumber),
priority,
repositoryURL.getURL(),
parser.getSnapshotPath(version, timestamp, buildNumber),
localSnapshot != null,
configuration.getCertificateCheck()
);
} else {
String lastUpdated = XmlUtils.getTextContentOfElement(snapshotMetadata, "versioning/lastUpdated");
if (lastUpdated != null) {
// last updated should contain in the first 8 chars the date and then the time,
// fact that is not compatible with timeStamp from remote repos which has a "." after date
if (lastUpdated.length() > 8) {
lastUpdated = lastUpdated.substring(0, 8) + "." + lastUpdated.substring(8);
return new DownloadableArtifact(
parser.getSnapshotVersion(version, lastUpdated, "0"),
priority,
repositoryURL.getURL(),
parser.getArtifactPath(version),
localSnapshot != null,
configuration.getCertificateCheck()
);
}
}
}
} catch (IOException ignore) {
// in this case we could not find any metadata so try to get the *-SNAPSHOT file directly
}
return new DownloadableArtifact(
parser.getVersion(),
priority,
repositoryURL.getURL(),
parser.getArtifactPath(),
false, // no local built snapshot
configuration.getCertificateCheck()
);
}
/**
* Resolves all versions that fits the provided range.
*
* @param metadata parsed metadata xml
* @param repositoryURL the url of the repository to download from
* @param priority repository priority
* @param versionRange version range to fulfill
* @return list of downloadable artifacts that match the range
* @throws IOException re-thrown
*/
private List<DownloadableArtifact> resolveRangeVersions(final Parser parser,
final Document metadata,
final MavenRepositoryURL repositoryURL,
final int priority,
final VersionRange versionRange)
throws IOException {
LOG.debug(Ix2 + "Resolving versions in range [" + versionRange + "]");
final List<DownloadableArtifact> downladables = new ArrayList<DownloadableArtifact>();
final List<Element> elements = XmlUtils.getElements(metadata, "versioning/versions/version");
if (elements != null && elements.size() > 0) {
for (Element element : elements) {
final String versionString = XmlUtils.getTextContent(element);
if (versionString != null) {
final Version version = new Version(versionString);
if (versionRange.includes(version)) {
if (versionString.endsWith("SNAPSHOT")) {
downladables.add(
resolveSnapshotVersion(parser, repositoryURL, priority, versionString)
);
} else {
downladables.add(
new DownloadableArtifact(
versionString,
priority,
repositoryURL.getURL(),
parser.getArtifactPath(versionString),
false, // no local built snapshot
configuration.getCertificateCheck()
)
);
}
}
}
}
}
return downladables;
}
/**
* @param repositoryURL url to reporsitory
* @param path a path to the artifact jar file
* @return prepared input stream
* @throws IOException re-thrown
* @see org.ops4j.net.URLUtils#prepareInputStream(java.net.URL, boolean)
*/
private InputStream prepareInputStream(URL repositoryURL, final String path)
throws IOException {
String repository = repositoryURL.toExternalForm();
if (!repository.endsWith(org.ops4j.pax.url.mvn.internal.Parser.FILE_SEPARATOR)) {
repository = repository + org.ops4j.pax.url.mvn.internal.Parser.FILE_SEPARATOR;
}
configuration.enableProxy(repositoryURL);
final URL url = new URL(repository + path);
LOG.trace("Reading " + url.toExternalForm());
return URLUtils.prepareInputStream(url, !configuration.getCertificateCheck());
}
/**
* Sorting comparator for downladable artifacts.
* The sorting is done by:
* 1. descending version
* 2. ascending priority.
*/
private static class DownloadComparator
implements Comparator<DownloadableArtifact> {
public int compare(final DownloadableArtifact first,
final DownloadableArtifact second) {
// first descending by version
int result = -1 * first.getVersion().compareTo(second.getVersion());
if (result == 0) {
// then ascending by priority
if (first.getPriority() < second.getPriority()) {
result = -1;
} else if (first.getPriority() > second.getPriority()) {
result = 1;
}
}
return result;
}
}
}
@@ -0,0 +1,189 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.service;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferencePolicy;
import org.apache.felix.scr.annotations.Service;
import org.fusesource.fabric.api.DataStore;
import org.fusesource.fabric.api.DataStorePlugin;
import org.fusesource.fabric.api.DataStoreRegistrationHandler;
import org.fusesource.fabric.api.DataStoreTemplate;
import org.fusesource.fabric.api.FabricException;
import org.fusesource.fabric.api.PlaceholderResolver;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceRegistration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.felix.scr.annotations.ReferenceCardinality.OPTIONAL_MULTIPLE;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getProperties;
/**
* Manager of {@link DataStore} using configuration to decide which
* implementation to export.
*/
@Component(name = "org.fusesource.fabric.datastore.manager",
description = "Configured DataStore Factory",
immediate = true)
@Service(DataStoreRegistrationHandler.class)
public class DataStoreManager implements DataStoreRegistrationHandler {
private static final transient Logger LOG = LoggerFactory.getLogger(DataStoreManager.class);
public static final String DATASTORE_TYPE_PID = "org.fusesource.fabric.datastore";
public static final String DATASTORE_TYPE_PROPERTY = "type";
public static final String DEFAULT_DATASTORE_TYPE = "git";
@Reference(cardinality = OPTIONAL_MULTIPLE,
referenceInterface = DataStorePlugin.class,
bind = "bindDataStore", unbind = "unbindDataStore",
policy = ReferencePolicy.DYNAMIC)
private final Map<String, DataStorePlugin> dataStorePlugins = new HashMap<String, DataStorePlugin>();
private BundleContext bundleContext;
private Map<String,String> configuration;
private String type;
private DataStore dataStore;
private Dictionary<String, String> properties = new Hashtable<String, String>();
private ServiceRegistration<DataStore> registration;
private final List<DataStoreTemplate> registrationCallbacks = new CopyOnWriteArrayList<DataStoreTemplate>();
@Activate
public synchronized void init(BundleContext bundleContext, Map<String,String> configuration) throws Exception {
this.bundleContext = bundleContext;
this.configuration = configuration;
this.type = readType(configuration);
updateServiceRegistration();
}
@Deactivate
public synchronized void destroy() {
unregister();
}
public void updateServiceRegistration() {
unregister();
if (dataStorePlugins.containsKey(type)) {
dataStore = dataStorePlugins.get(type).getDataStore();
Properties dataStoreProperties = new Properties();
dataStoreProperties.putAll(configuration);
dataStore.setDataStoreProperties(dataStoreProperties);
dataStore.start();
for(DataStoreTemplate callback : registrationCallbacks) {
registrationCallbacks.remove(callback);
try {
callback.doWith(dataStore);
} catch (Exception e) {
throw new FabricException(e);
}
}
properties.put(DATASTORE_TYPE_PROPERTY, type);
registration = bundleContext.registerService(DataStore.class, dataStore, properties);
LOG.info("Registered DataStore " + dataStore + " with " + properties);
}
}
/**
* Unregisters the {@link DataStore}.
*/
private void unregister() {
if (registration != null) {
registration.unregister();
registration = null;
}
if (dataStore != null) {
dataStore.stop();
}
}
/**
* Extracts the type from the specified map or System configuration.
* @param configuration The map to use as a source.
* @return
*/
private static String readType(Map<String, String> configuration) {
if (configuration.containsKey(DATASTORE_TYPE_PROPERTY)) {
return configuration.get(DATASTORE_TYPE_PROPERTY);
} else {
return System.getProperty(DATASTORE_TYPE_PID + "." + DATASTORE_TYPE_PROPERTY, DEFAULT_DATASTORE_TYPE);
}
}
// Properties
//-------------------------------------------------------------------------
public BundleContext getBundleContext() {
return bundleContext;
}
public void setBundleContext(BundleContext bundleContext) {
this.bundleContext = bundleContext;
}
public synchronized void bindDataStore(DataStorePlugin dataStorePlugin) {
if (dataStorePlugin != null) {
dataStorePlugins.put(dataStorePlugin.getName(), dataStorePlugin);
if (type != null && type.equals(dataStorePlugin.getName())) {
updateServiceRegistration();
}
}
}
public synchronized void unbindDataStore(DataStorePlugin dataStorePlugin) {
if (dataStorePlugin != null) {
dataStorePlugins.remove(dataStorePlugin.getName());
if (type != null && type.equals(dataStorePlugin.getName())) {
updateServiceRegistration();
}
}
}
public Map<String, String> getConfiguration() {
return configuration;
}
public void setConfiguration(Map<String, String> configuration) {
this.configuration = configuration;
}
@Override
public void addRegistrationCallback(DataStoreTemplate template) {
this.registrationCallbacks.add(template);
}
@Override
public void removeRegistrationCallback(DataStoreTemplate template) {
this.registrationCallbacks.remove(template);
}
}
@@ -0,0 +1,713 @@
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.service;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent;
import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener;
import org.apache.curator.framework.recipes.cache.TreeCache;
import org.apache.zookeeper.KeeperException;
import org.fusesource.fabric.api.CreateContainerMetadata;
import org.fusesource.fabric.api.CreateContainerOptions;
import org.fusesource.fabric.api.DataStore;
import org.fusesource.fabric.api.DynamicReference;
import org.fusesource.fabric.api.FabricException;
import org.fusesource.fabric.api.PlaceholderResolver;
import org.fusesource.fabric.api.jcip.GuardedBy;
import org.fusesource.fabric.api.jcip.ThreadSafe;
import org.fusesource.fabric.api.scr.AbstractComponent;
import org.fusesource.fabric.api.scr.InvalidComponentException;
import org.fusesource.fabric.api.scr.ValidatingReference;
import org.fusesource.fabric.internal.DataStoreHelpers;
import org.fusesource.fabric.utils.Base64Encoder;
import org.fusesource.fabric.utils.Closeables;
import org.fusesource.fabric.utils.ObjectUtils;
import org.fusesource.fabric.zookeeper.ZkDefs;
import org.fusesource.fabric.zookeeper.ZkPath;
import org.fusesource.fabric.zookeeper.utils.InterpolationHelper;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InvalidClassException;
import java.io.ObjectInputStream;
import java.io.ObjectStreamClass;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.fusesource.fabric.internal.DataStoreHelpers.substituteBundleProperty;
import static org.fusesource.fabric.internal.PlaceholderResolverHelpers.getSchemesForProfileConfigurations;
import static org.fusesource.fabric.internal.PlaceholderResolverHelpers.waitForPlaceHolderResolvers;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.deleteSafe;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.exists;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getByteData;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getChildren;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getStringData;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getSubstitutedPath;
import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.setData;
@ThreadSafe
public abstract class AbstractDataStore extends AbstractComponent implements DataStore, PathChildrenCacheListener {
private static final transient Logger LOG = LoggerFactory.getLogger(AbstractDataStore.class);
public static final String REQUIREMENTS_JSON_PATH = "/fabric/configs/org.fusesource.fabric.requirements.json";
public static final String JVM_OPTIONS_PATH = "/fabric/configs/org.fusesource.fabric.containers.jvmOptions";
private final ValidatingReference<CuratorFramework> curator = new ValidatingReference<CuratorFramework>();
private final ExecutorService callbacksExecutor = Executors.newSingleThreadExecutor();
private final ExecutorService cacheExecutor = Executors.newSingleThreadExecutor();
private final ExecutorService placeholderExecutor = Executors.newCachedThreadPool();
private final AtomicBoolean active = new AtomicBoolean(false);
@GuardedBy("ConcurrentHashMap") private final ConcurrentMap<String, DynamicReference<PlaceholderResolver>> placeholderResolvers = new ConcurrentHashMap<String, DynamicReference<PlaceholderResolver>>();
@GuardedBy("CopyOnWriteArrayList") private final CopyOnWriteArrayList<Runnable> callbacks = new CopyOnWriteArrayList<Runnable>();
@GuardedBy("this") private Map<String, String> dataStoreProperties;
@GuardedBy("active") private volatile TreeCache treeCache;
@Override
public abstract void importFromFileSystem(String from);
@Override
public void start() {
try {
if (active.compareAndSet(false, true)) {
LOG.info("Starting up DataStore " + this);
treeCache = new TreeCache(getCurator(), ZkPath.CONFIGS.getPath(), true, false, true, cacheExecutor);
treeCache.start(TreeCache.StartMode.NORMAL);
treeCache.getListenable().addListener(this);
}
} catch (FabricException ex) {
throw ex;
} catch (Exception ex) {
throw new FabricException("Failed to start data store.", ex);
}
}
@Override
public void stop() {
if (active.compareAndSet(true, false)) {
treeCache.getListenable().removeListener(this);
Closeables.closeQuitely(treeCache);
treeCache = null;
callbacksExecutor.shutdownNow();
cacheExecutor.shutdownNow();
placeholderExecutor.shutdownNow();
}
}
protected TreeCache getTreeCache() {
if (!active.get())
throw new InvalidComponentException();
return treeCache;
}
@Override
public synchronized Map<String, String> getDataStoreProperties() {
assertValid();
return Collections.unmodifiableMap(dataStoreProperties);
}
@Override
public synchronized void setDataStoreProperties(Map<String, String> dataStoreProperties) {
assertValid();
this.dataStoreProperties = new HashMap<String, String>(dataStoreProperties);
}
@Override
public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
if (isValid()) {
switch (event.getType()) {
case CHILD_ADDED:
case CHILD_REMOVED:
case CHILD_UPDATED:
case INITIALIZED:
runCallbacks();
break;
}
}
}
protected void runCallbacks() {
callbacksExecutor.submit(new Runnable() {
@Override
public void run() {
doRunCallbacks();
}
});
}
protected void doRunCallbacks() {
assertValid();
for (Runnable callback : callbacks) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Running callback " + callback);
}
callback.run();
} catch (Throwable e) {
LOG.warn("Caught: " + e, e);
}
}
}
@Override
public void trackConfiguration(Runnable callback) {
if (isValid()) {
callbacks.addIfAbsent(callback);
}
}
@Override
public void untrackConfiguration(Runnable callback) {
callbacks.remove(callback);
}
// PlaceholderResolver stuff
//-------------------------------------------------------------------------
/**
* Performs substitution to configuration based on the registered {@link PlaceholderResolver} instances.
*/
public void substituteConfigurations(final Map<String, Map<String, String>> configs) {
assertValid();
//Check for all required resolver schemes.
Set<String> requiredSchemes = getSchemesForProfileConfigurations(configs);
for (String scheme : requiredSchemes) {
placeholderResolvers.putIfAbsent(scheme, new DynamicReference<PlaceholderResolver>());
}
//Wait for resolvers before starting to resolve.
final Map<String, PlaceholderResolver> availableResolvers = waitForPlaceHolderResolvers(placeholderExecutor, requiredSchemes, getPlaceholderResolvers());
for (Map.Entry<String, Map<String, String>> entry : configs.entrySet()) {
final String pid = entry.getKey();
Map<String, String> props = entry.getValue();
for (Map.Entry<String, String> e : props.entrySet()) {
final String key = e.getKey();
final String value = e.getValue();
props.put(key, InterpolationHelper.substVars(value, key, null, props, new InterpolationHelper.SubstitutionCallback() {
public String getValue(String toSubstitute) {
if (toSubstitute != null && toSubstitute.contains(":")) {
String scheme = toSubstitute.substring(0, toSubstitute.indexOf(":"));
if (availableResolvers.containsKey(scheme)) {
return availableResolvers.get(scheme).resolve(configs, pid, key, toSubstitute);
}
}
return substituteBundleProperty(toSubstitute, getBundleContext());
}
}));
}
}
}
private Map<String, DynamicReference<PlaceholderResolver>> getPlaceholderResolvers() {
return Collections.unmodifiableMap(placeholderResolvers);
}
private BundleContext getBundleContext() {
try {
return FrameworkUtil.getBundle(AbstractDataStore.class).getBundleContext();
} catch (Throwable t) {
return null;
}
}
// Container stuff
//-------------------------------------------------------------------------
@Override
public List<String> getContainers() {
assertValid();
try {
return getChildren(getCurator(), ZkPath.CONFIGS_CONTAINERS.getPath());
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public boolean hasContainer(String containerId) {
assertValid();
return getContainers().contains(containerId);
}
@Override
public String getContainerParent(String containerId) {
assertValid();
try {
String parentName = getStringData(getCurator(), ZkPath.CONTAINER_PARENT.getPath(containerId));
return parentName != null ? parentName.trim() : "";
} catch (KeeperException.NoNodeException e) {
// Ignore
return "";
} catch (Throwable e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void deleteContainer(String containerId) {
assertValid();
try {
if (getCurator() == null) {
throw new IllegalStateException("Zookeeper service not available");
}
//Wipe all config entries that are related to the container for all versions.
for (String version : getVersions()) {
deleteSafe(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(version, containerId));
}
deleteSafe(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
deleteSafe(getCurator(), ZkPath.CONTAINER.getPath(containerId));
deleteSafe(getCurator(), ZkPath.CONTAINER_DOMAINS.getPath(containerId));
deleteSafe(getCurator(), ZkPath.CONTAINER_PROVISION.getPath(containerId));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void createContainerConfig(CreateContainerOptions options) {
assertValid();
try {
String parent = options.getParent();
String containerId = options.getName();
String versionId = options.getVersion();
Set<String> profileIds = options.getProfiles();
StringBuilder sb = new StringBuilder();
for (String profileId : profileIds) {
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(profileId);
}
setData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId), versionId);
setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), sb.toString());
setData(getCurator(), ZkPath.CONTAINER_PARENT.getPath(containerId), parent);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void createContainerConfig(CreateContainerMetadata metadata) {
assertValid();
try {
CreateContainerOptions options = metadata.getCreateOptions();
String containerId = metadata.getContainerName();
// String parent = options.getParent();
// String versionId = options.getVersion() != null ? options.getVersion() : getDefaultVersion();
// Set<String> profileIds = options.getProfiles();
// if (profileIds == null || profileIds.isEmpty()) {
// profileIds = new LinkedHashSet<String>();
// profileIds.add("default");
// }
// StringBuilder sb = new StringBuilder();
// for (String profileId : profileIds) {
// if (sb.length() > 0) {
// sb.append(" ");
// }
// sb.append(profileId);
// }
//
// setData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId), versionId);
// setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), sb.toString());
// setData(getCurator(), ZkPath.CONTAINER_PARENT.getPath(containerId), parent);
setContainerMetadata(metadata);
Map<String, String> configuration = metadata.getContainerConfiguration();
for (Map.Entry<String, String> entry : configuration.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
setData(getCurator(), ZkPath.CONTAINER_ENTRY.getPath(metadata.getContainerName(), key), value);
}
// If no resolver specified but a resolver is already present in the registry, use the registry value
String resolver = metadata.getOverridenResolver() != null ? metadata.getOverridenResolver() : options.getResolver();
if (resolver == null && exists(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId)) != null) {
resolver = getStringData(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId));
} else if (options.getResolver() != null) {
// Use the resolver specified in the options and do nothing.
} else if (exists(getCurator(), ZkPath.POLICIES.getPath(ZkDefs.RESOLVER)) != null) {
// If there is a globlal resolver specified use it.
resolver = getStringData(getCurator(), ZkPath.POLICIES.getPath(ZkDefs.RESOLVER));
} else {
// Fallback to the default resolver
resolver = ZkDefs.DEFAULT_RESOLVER;
}
// Set the resolver if not already set
setData(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId), resolver);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public CreateContainerMetadata getContainerMetadata(String containerId, final ClassLoader classLoader) {
assertValid();
try {
byte[] encoded = getByteData(getTreeCache(), ZkPath.CONTAINER_METADATA.getPath(containerId));
if (encoded == null) {
return null;
}
byte[] decoded = Base64Encoder.decode(encoded);
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(decoded)) {
@Override
protected Class<?> resolveClass(ObjectStreamClass desc) throws IOException, ClassNotFoundException {
return classLoader.loadClass(desc.getName());
}
};
return (CreateContainerMetadata) ois.readObject();
} catch (ClassNotFoundException e) {
return null;
} catch (InvalidClassException e) {
return null;
} catch (KeeperException.NoNodeException e) {
return null;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setContainerMetadata(CreateContainerMetadata metadata) {
assertValid();
//We encode the metadata so that they are more friendly to import/export.
try {
setData(getCurator(), ZkPath.CONTAINER_METADATA.getPath(metadata.getContainerName()), Base64Encoder.encode(ObjectUtils.toBytes(metadata)));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public String getContainerVersion(String containerId) {
assertValid();
try {
return getStringData(getTreeCache(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setContainerVersion(String containerId, String versionId) {
assertValid();
try {
String oldVersionId = getStringData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
String oldProfileIds = getStringData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(oldVersionId, containerId));
setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), oldProfileIds);
setData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId), versionId);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public List<String> getContainerProfiles(String containerId) {
assertValid();
try {
String versionId = getStringData(getTreeCache(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
String str = getStringData(getTreeCache(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId));
return str == null || str.isEmpty() ? Collections.<String> emptyList() : Arrays.asList(str.trim().split(" +"));
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setContainerProfiles(String containerId, List<String> profileIds) {
assertValid();
try {
String versionId = getStringData(getCurator(), ZkPath.CONFIG_CONTAINER.getPath(containerId));
StringBuilder sb = new StringBuilder();
for (String profileId : profileIds) {
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(profileId);
}
setData(getCurator(), ZkPath.CONFIG_VERSIONS_CONTAINER.getPath(versionId, containerId), sb.toString());
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public boolean isContainerAlive(String id) {
assertValid();
try {
return exists(getCurator(), ZkPath.CONTAINER_ALIVE.getPath(id)) != null;
} catch (KeeperException.NoNodeException e) {
return false;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public String getContainerAttribute(String containerId, ContainerAttribute attribute, String def, boolean mandatory, boolean substituted) {
assertValid();
if (attribute == ContainerAttribute.Domains) {
try {
List<String> list = getCurator().getChildren().forPath(ZkPath.CONTAINER_DOMAINS.getPath(containerId));
Collections.sort(list);
StringBuilder sb = new StringBuilder();
for (String l : list) {
if (sb.length() > 0) {
sb.append("\n");
}
sb.append(l);
}
return sb.toString();
} catch (Exception e) {
return "";
}
} else {
try {
if (substituted) {
return getSubstitutedPath(getCurator(), getAttributePath(containerId, attribute));
} else {
return getStringData(getCurator(), getAttributePath(containerId, attribute));
}
} catch (KeeperException.NoNodeException e) {
if (mandatory) {
throw FabricException.launderThrowable(e);
}
return def;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
}
@Override
public void setContainerAttribute(String containerId, ContainerAttribute attribute, String value) {
assertValid();
// Special case for resolver
// TODO: we could use a double indirection on the ip so that it does not need to change
// TODO: something like ${zk:container/${zk:container/resolver}}
if (attribute == ContainerAttribute.Resolver) {
try {
setData(getCurator(), ZkPath.CONTAINER_IP.getPath(containerId), "${zk:" + containerId + "/" + value + "}");
setData(getCurator(), ZkPath.CONTAINER_RESOLVER.getPath(containerId), value);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
} else {
try {
// if (value == null) {
// deleteSafe(zk, getAttributePath(containerId, attribute));
// } else {
setData(getCurator(), getAttributePath(containerId, attribute), value);
// }
} catch (KeeperException.NoNodeException e) {
// Ignore
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
}
@Override
public String getDefaultVersion() {
assertValid();
try {
String version = null;
if (getTreeCache().getCurrentData(ZkPath.CONFIG_DEFAULT_VERSION.getPath()) != null) {
version = getStringData(getTreeCache(), ZkPath.CONFIG_DEFAULT_VERSION.getPath());
}
if (version == null || version.isEmpty()) {
version = ZkDefs.DEFAULT_VERSION;
setData(getCurator(), ZkPath.CONFIG_DEFAULT_VERSION.getPath(), version);
setData(getCurator(), ZkPath.CONFIG_VERSION.getPath(version), (String) null);
}
return version;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
@Override
public void setDefaultVersion(String versionId) {
assertValid();
try {
setData(getCurator(), ZkPath.CONFIG_DEFAULT_VERSION.getPath(), versionId);
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
// Profile methods
//-------------------------------------------------------------------------
@Override
public boolean hasProfile(String version, String profile) {
assertValid();
return getProfile(version, profile, false) != null;
}
// Implementation
//-------------------------------------------------------------------------
private String getAttributePath(String containerId, ContainerAttribute attribute) {
switch (attribute) {
case BlueprintStatus:
return ZkPath.CONTAINER_EXTENDER_STATUS.getPath(containerId, "blueprint");
case SpringStatus:
return ZkPath.CONTAINER_EXTENDER_STATUS.getPath(containerId, "spring");
case ProvisionStatus:
return ZkPath.CONTAINER_PROVISION_RESULT.getPath(containerId);
case ProvisionException:
return ZkPath.CONTAINER_PROVISION_EXCEPTION.getPath(containerId);
case ProvisionList:
return ZkPath.CONTAINER_PROVISION_LIST.getPath(containerId);
case Location:
return ZkPath.CONTAINER_LOCATION.getPath(containerId);
case GeoLocation:
return ZkPath.CONTAINER_GEOLOCATION.getPath(containerId);
case Resolver:
return ZkPath.CONTAINER_RESOLVER.getPath(containerId);
case Ip:
return ZkPath.CONTAINER_IP.getPath(containerId);
case LocalIp:
return ZkPath.CONTAINER_LOCAL_IP.getPath(containerId);
case LocalHostName:
return ZkPath.CONTAINER_LOCAL_HOSTNAME.getPath(containerId);
case PublicIp:
return ZkPath.CONTAINER_PUBLIC_IP.getPath(containerId);
case PublicHostName:
return ZkPath.CONTAINER_PUBLIC_HOSTNAME.getPath(containerId);
case ManualIp:
return ZkPath.CONTAINER_MANUAL_IP.getPath(containerId);
case BindAddress:
return ZkPath.CONTAINER_BINDADDRESS.getPath(containerId);
case JmxUrl:
return ZkPath.CONTAINER_JMX.getPath(containerId);
case JolokiaUrl:
return ZkPath.CONTAINER_JOLOKIA.getPath(containerId);
case HttpUrl:
return ZkPath.CONTAINER_HTTP.getPath(containerId);
case SshUrl:
return ZkPath.CONTAINER_SSH.getPath(containerId);
case PortMin:
return ZkPath.CONTAINER_PORT_MIN.getPath(containerId);
case PortMax:
return ZkPath.CONTAINER_PORT_MAX.getPath(containerId);
case ProcessId:
return ZkPath.CONTAINER_PROCESS_ID.getPath(containerId);
case OpenShift:
return ZkPath.CONTAINER_OPENSHIFT.getPath(containerId);
default:
throw new IllegalArgumentException("Unsupported container attribute " + attribute);
}
}
@Override
public Map<String, String> getProfileAttributes(String version, String profile) {
assertValid();
Map<String, String> attributes = new HashMap<String, String>();
Map<String, String> config = getConfiguration(version, profile, AGENT_PID);
for (Map.Entry<String, String> entry : config.entrySet()) {
String key = entry.getKey();
if (key.startsWith(ATTRIBUTE_PREFIX)) {
String attribute = key.substring(ATTRIBUTE_PREFIX.length());
String value = entry.getValue();
attributes.put(attribute, value);
}
}
return attributes;
}
@Override
public void setProfileAttribute(final String version, final String profile, final String key, final String value) {
assertValid();
Map<String, String> config = getConfiguration(version, profile, AGENT_PID);
if (value != null) {
config.put(ATTRIBUTE_PREFIX + key, value);
} else {
config.remove(key);
}
setConfiguration(version, profile, AGENT_PID, config);
}
@Override
public Map<String, Map<String, String>> getConfigurations(String version, String profile) {
assertValid();
try {
Map<String, Map<String, String>> configurations = new HashMap<String, Map<String, String>>();
Map<String, byte[]> configs = getFileConfigurations(version, profile);
for (Map.Entry<String, byte[]> entry : configs.entrySet()) {
if (entry.getKey().endsWith(".properties")) {
String pid = DataStoreHelpers.stripSuffix(entry.getKey(), ".properties");
configurations.put(pid, DataStoreHelpers.toMap(DataStoreHelpers.toProperties(entry.getValue())));
}
}
return configurations;
} catch (Exception e) {
throw FabricException.launderThrowable(e);
}
}
public CuratorFramework getCurator() {
return curator.get();
}
// [FIXME] Test case polutes public API
public void bindCuratorForTesting(CuratorFramework curator) {
bindCurator(curator);
}
protected void bindCurator(CuratorFramework curator) {
this.curator.bind(curator);
}
protected void unbindCurator(CuratorFramework curator) {
this.curator.unbind(curator);
}
protected void bindPlaceholderResolver(PlaceholderResolver resolver) {
placeholderResolvers.putIfAbsent(resolver.getScheme(), new DynamicReference<PlaceholderResolver>());
placeholderResolvers.get(resolver.getScheme()).bind(resolver);
}
protected void unbindPlaceholderResolver(PlaceholderResolver resolver) {
placeholderResolvers.get(resolver.getScheme()).unbind();
}
}
@@ -0,0 +1,407 @@
package org.fusesource.fabric.features;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.karaf.features.Feature;
import org.apache.karaf.features.FeaturesService;
import org.apache.karaf.features.Repository;
import org.apache.karaf.features.internal.FeatureValidationUtil;
import org.apache.karaf.features.internal.FeaturesServiceImpl;
import org.apache.karaf.features.internal.RepositoryImpl;
import org.apache.zookeeper.KeeperException;
import org.fusesource.fabric.api.Container;
import org.fusesource.fabric.api.FabricService;
import org.fusesource.fabric.api.Profile;
import org.fusesource.fabric.api.Version;
import org.fusesource.fabric.zookeeper.IZKClient;
import org.fusesource.fabric.zookeeper.ZkPath;
import org.linkedin.zookeeper.client.LifecycleListener;
import org.linkedin.zookeeper.tracker.NodeEvent;
import org.linkedin.zookeeper.tracker.NodeEventsListener;
import org.linkedin.zookeeper.tracker.ZKStringDataReader;
import org.linkedin.zookeeper.tracker.ZooKeeperTreeTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.fusesource.fabric.utils.features.FeatureUtils.search;
/**
* A FeaturesService implementation for Fabric managed containers.
*/
public class FabricFeaturesServiceImpl extends FeaturesServiceImpl implements FeaturesService, NodeEventsListener<String>, LifecycleListener {
private static final Logger LOGGER = LoggerFactory.getLogger(FeaturesService.class);
private FabricService fabricService;
private IZKClient zooKeeper;
private ZooKeeperTreeTracker<String> profilesTracker;
private final Set<Repository> repositories = new HashSet<Repository>();
private final Set<Feature> allfeatures = new HashSet<Feature>();
private final Set<Feature> installed = new HashSet<Feature>();
public void init() throws Exception {
}
public void destroy() throws Exception {
profilesTracker.destroy();
}
@Override
public void onEvents(Collection<NodeEvent<String>> nodeEvents) {
try {
repositories.clear();
listRepositories();
allfeatures.clear();
listFeatures();
installed.clear();
listInstalledFeatures();
} catch (Exception e) {
LOGGER.error("Error while updating FeaturesService information from Fabric Registry.", e);
}
}
@Override
public void onConnected() {
profilesTracker = new ZooKeeperTreeTracker<String>(zooKeeper, new ZKStringDataReader(), ZkPath.CONFIG_VERSIONS.getPath());
try {
profilesTracker.track(this);
} catch (InterruptedException e) {
LOGGER.error("Error while setting tracker for Fabric Features Service.", e);
} catch (KeeperException e) {
LOGGER.error("Error while setting tracker for Fabric Features Service.", e);
}
onEvents(null);
}
@Override
public void onDisconnected() {
}
@Override
public void validateRepository(URI uri) throws Exception {
FeatureValidationUtil.validate(uri);
}
@Override
public void addRepository(URI uri) throws Exception {
addRepository(uri, true);
}
@Override
public void addRepository(URI uri, boolean b) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, please use fabric:profile-edit --repositories %s target-profile instead. See fabric:profile-edit for more information.", uri.toString()));
}
@Override
public void removeRepository(URI uri) throws Exception {
removeRepository(uri, true);
}
@Override
public void removeRepository(URI uri, boolean b) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, please use fabric:profile-edit --delete --repositories %s target-profile instead. See fabric:profile-edit for more information.", uri.toString()));
}
@Override
public void restoreRepository(URI uri) throws Exception {
}
/**
* Lists all {@link Repository} entries found in any {@link Profile} of the current {@link Container} {@link Version}.
*
* @return
*/
@Override
public Repository[] listRepositories() {
if (repositories.isEmpty()) {
Set<String> repositoryUris = new LinkedHashSet<String>();
Container container = fabricService.getCurrentContainer();
Version version = container.getVersion();
Profile[] profiles = fabricService.getProfiles(version.getName());
if (profiles != null) {
for (Profile profile : profiles) {
if (profile.getRepositories() != null) {
for (String uri : profile.getRepositories()) {
repositoryUris.add(uri);
addRepositoryUri(uri, repositoryUris);
}
}
}
}
for (String uri : repositoryUris) {
try {
repositories.add(new RepositoryImpl(new URI(uri)));
} catch (URISyntaxException e) {
LOGGER.debug("Error while adding repository with uri {}.", uri);
}
}
}
return repositories.toArray(new Repository[repositories.size()]);
}
@Override
public void installFeature(String s) throws Exception {
installFeature(s, (EnumSet) null);
}
@Override
public void installFeature(String s, EnumSet<Option> options) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", s));
}
@Override
public void installFeature(String s, String s2) throws Exception {
installFeature(s, s2, null);
}
@Override
public void installFeature(String s, String s2, EnumSet<Option> options) throws Exception {
String featureName = s;
if (s2 != null && s2.equals("0.0.0")) {
featureName = s + "/" + s2;
}
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", featureName));
}
@Override
public void installFeature(Feature feature, EnumSet<Option> options) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", feature.getName()));
}
@Override
public void installFeatures(Set<Feature> features, EnumSet<Option> options) throws Exception {
StringBuffer sb = new StringBuffer();
for (Feature feature : features) {
sb.append("--feature ").append(feature.getName());
}
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", sb.toString()));
}
@Override
public void uninstallFeature(String s) throws Exception {
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --delete --features %s target-profile instead. See fabric:profile-edit for more information.", s));
}
@Override
public void uninstallFeature(String s, String s2) throws Exception {
String featureName = s;
if (s2 != null && s2.equals("0.0.0")) {
featureName = s + "/" + s2;
}
throw new UnsupportedOperationException(String.format("The container is managed by fabric, pleas use fabric:profile-edit --features %s target-profile instead. See fabric:profile-edit for more information.", featureName));
}
@Override
public Feature[] listFeatures() throws Exception {
if (allfeatures.isEmpty()) {
Repository[] repositories = listRepositories();
for (Repository repository : repositories) {
try {
for (Feature feature : repository.getFeatures()) {
if (!allfeatures.contains(feature)) {
allfeatures.add(feature);
}
}
} catch (Exception ex) {
LOGGER.debug("Could not load features from %s.", repository.getURI());
}
}
}
return allfeatures.toArray(new Feature[allfeatures.size()]);
}
@Override
public Feature[] listInstalledFeatures() {
if (installed.isEmpty()) {
try {
Map<String, Map<String, Feature>> allFeatures = getFeatures(listProfileRepositories());
Container container = fabricService.getCurrentContainer();
Profile[] profiles = container.getProfiles();
if (profiles != null) {
for (Profile profile : profiles) {
List<String> featureNames = profile.getFeatures();
for (String featureName : featureNames) {
try {
Feature f;
if (featureName.contains("/")) {
String[] parts = featureName.split("/");
String name = parts[0];
String version = parts[1];
f = allFeatures.get(name).get(version);
} else {
TreeMap<String, Feature> versionMap = (TreeMap<String, Feature>) allFeatures.get(featureName);
f = versionMap.lastEntry().getValue();
}
addFeatures(f, installed);
} catch (Exception ex) {
LOGGER.debug("Error while adding {} to the features list");
}
}
}
}
} catch (Exception e) {
LOGGER.error("Error retrieveing features.", e);
}
}
return installed.toArray(new Feature[installed.size()]);
}
@Override
public boolean isInstalled(Feature feature) {
if (installed.isEmpty()) {
listInstalledFeatures();
}
return installed.contains(feature);
}
protected Map<String, Map<String, Feature>> getFeatures(Repository[] repositories) throws Exception {
Map<String, Map<String, Feature>> features = new HashMap<String, Map<String, Feature>>();
for (Repository repo : repositories) {
try {
for (Feature f : repo.getFeatures()) {
if (features.get(f.getName()) == null) {
Map<String, Feature> versionMap = new TreeMap<String, Feature>();
versionMap.put(f.getVersion(), f);
features.put(f.getName(), versionMap);
} else {
features.get(f.getName()).put(f.getVersion(), f);
}
}
} catch (Exception ex) {
LOGGER.debug("Could not load features from %s.", repo.getURI());
}
}
return features;
}
/**
* Lists all {@link Repository} enties found in the {@link Profile}s assigned to the current {@link Container}.
*
* @return
*/
private Repository[] listProfileRepositories() {
Set<String> repositoryUris = new LinkedHashSet<String>();
Set<Repository> repositories = new LinkedHashSet<Repository>();
Container container = fabricService.getCurrentContainer();
Set<Profile> profilesWithParents = new HashSet<Profile>();
Profile[] profiles = container.getProfiles();
if (profiles != null) {
for (Profile profile : profiles) {
addProfiles(profile, profilesWithParents);
}
for (Profile profile : profilesWithParents) {
if (profile.getRepositories() != null) {
for (String uri : profile.getRepositories()) {
repositoryUris.add(uri);
addRepositoryUri(uri, repositoryUris);
}
}
}
}
for (String uri : repositoryUris) {
try {
repositories.add(new RepositoryImpl(new URI(uri)));
} catch (URISyntaxException e) {
LOGGER.debug("Error while adding repository with uri {}.", uri);
}
}
return repositories.toArray(new Repository[repositories.size()]);
}
/**
* Adds the {@link URI} of {@link Feature} {@link Repository} and its internals to the set of repositories {@link URI}s.
*
* @param uri
* @param repositoryUris
*/
protected void addRepositoryUri(String uri, Set<String> repositoryUris) {
if (repositoryUris.contains(uri)) {
return;
}
repositoryUris.add(uri);
try {
Repository repository = new RepositoryImpl(new URI(uri));
URI[] internalUris = repository.getRepositories();
if (internalUris != null) {
for (URI u : internalUris) {
addRepositoryUri(u.toString(), repositoryUris);
}
}
} catch (Exception e) {
LOGGER.debug("Error while adding internal repositories of {}.", uri);
}
}
/**
* Adds {@link Profile} and its parents to the set of {@link Profile}s.
*
* @param profile
* @param profiles
*/
protected void addProfiles(Profile profile, Set<Profile> profiles) {
if (profiles.contains(profile)) {
return;
}
profiles.add(profile);
for (Profile parent : profile.getParents()) {
addProfiles(parent, profiles);
}
}
/**
* Adds {@link Feature} and its dependencies to the set of {@link Feature}s.
*
* @param feature
* @param features
*/
protected void addFeatures(Feature feature, Set<Feature> features) {
if (features.contains(feature)) {
return;
}
features.add(feature);
for (Feature dependency : feature.getDependencies()) {
addFeatures(search(dependency.getName(), dependency.getVersion(), repositories), features);
}
}
public FabricService getFabricService() {
return fabricService;
}
public void setFabricService(FabricService fabricService) {
this.fabricService = fabricService;
}
public IZKClient getZooKeeper() {
return zooKeeper;
}
public void setZooKeeper(IZKClient zooKeeper) {
this.zooKeeper = zooKeeper;
}
}
@@ -0,0 +1,158 @@
/**
* Copyright (C) 2011, FuseSource Corp. All rights reserved.
* http://fusesource.com
*
* The software in this package is published under the terms of the
* CDDL license a copy of which has been included with this distribution
* in the license.txt file.
*/
package org.fusesource.fabric.zookeeper.commands;
import org.apache.felix.gogo.commands.Argument;
import org.apache.felix.gogo.commands.Command;
import org.apache.felix.gogo.commands.Option;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
import java.util.List;
import java.util.regex.Pattern;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.getPatterns;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.matches;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.merge;
@Command(name = "export", scope = "zk", description = "Export data from zookeeper")
public class Export extends ZooKeeperCommandSupport {
@Argument(description="path of the directory to export to")
String target = "." + File.separator + "export";
@Option(name="-f", aliases={"--regex"}, description="regex to filter on what paths to export, can specify this option more than once for additional filters", multiValued=true)
String regex[];
@Option(name="-rf", aliases={"--reverse-regex"}, description="regex to filter what paths to exclude from the export, can specify this option more than once for additional filters", multiValued=true)
String nregex[];
@Option(name="-p", aliases={"--path"}, description="Top level context to export")
String topLevel = "/";
@Option(name="-d", aliases={"--delete"}, description="Clear target directory before exporting (CAUTION! Performs recursive delete!)")
boolean delete;
@Option(name="--dry-run", description="Runs the export but instead prints out what's going to happen rather than performing the action")
boolean dryRun = false;
File ignore = new File(".fabricignore");
File include = new File(".fabricinclude");
@Override
protected Object doExecute() throws Exception {
if (ignore.exists() && ignore.isFile()) {
nregex = merge(ignore, nregex);
}
if (include.exists() && include.isFile()) {
regex = merge(include, regex);
}
export(topLevel);
System.out.printf("Export to %s completed successfully\n", target);
return null;
}
private void delete(File parent) throws Exception {
if (!parent.exists()) {
return;
}
if (parent.isDirectory()) {
for (File f : parent.listFiles()) {
delete(f);
}
}
parent.delete();
}
protected void export(String path) throws Exception {
if (!path.endsWith("/")) {
path = path + "/";
}
if (!path.startsWith("/")) {
path = "/" + path;
}
List<Pattern> include = getPatterns(regex);
List<Pattern> exclude = getPatterns(nregex);
List<String> paths = getZooKeeper().getAllChildren(path);
SortedSet<File> directories = new TreeSet<File>();
Map<File, String> settings = new HashMap<File, String>();
for(String p : paths) {
p = path + p;
if (!matches(include, p, true) || matches(exclude, p, false)) {
continue;
}
byte[] data = getZooKeeper().getData(p);
if (data != null) {
settings.put(new File(target + File.separator + p + ".cfg"), new String(data));
} else {
directories.add(new File(target + File.separator + p));
}
}
if (delete) {
if (!dryRun) {
delete(new File(target));
} else {
System.out.printf("Deleting %s and everything under it\n", new File(target));
}
}
for (File d : directories) {
if (d.exists() && !d.isDirectory()) {
throw new IllegalArgumentException("Directory " + d + " exists but is not a directory");
}
if (!d.exists()) {
if (!dryRun) {
if (!d.mkdirs()) {
throw new RuntimeException("Failed to create directory " + d);
}
} else {
System.out.printf("Creating directory path : %s\n", d);
}
}
}
for (File f : settings.keySet()) {
if (f.exists() && !f.isFile()) {
throw new IllegalArgumentException("File " + f + " exists but is not a file");
}
if (!f.getParentFile().exists()) {
if (!dryRun) {
if (!f.getParentFile().mkdirs()) {
throw new RuntimeException("Failed to create directory " + f.getParentFile());
}
} else {
System.out.printf("Creating directory path : %s\n", f);
}
}
if (!f.exists()) {
try {
if (!dryRun) {
if (!f.createNewFile()) {
throw new RuntimeException("Failed to create file " + f);
}
} else {
System.out.printf("Creating file : %s\n", f);
}
} catch (IOException io) {
throw new RuntimeException("Failed to create file " + f + " : " + io);
}
}
if (!dryRun) {
FileWriter writer = new FileWriter(f, false);
writer.write(settings.get(f));
writer.close();
} else {
System.out.printf("Writing value \"%s\" to file : %s\n", settings.get(f), f);
}
}
}
}
@@ -0,0 +1,204 @@
/**
* Copyright (C) 2011, FuseSource Corp. All rights reserved.
* http://fusesource.com
*
* The software in this package is published under the terms of the
* CDDL license a copy of which has been included with this distribution
* in the license.txt file.
*/
package org.fusesource.fabric.zookeeper.commands;
import org.apache.felix.gogo.commands.Argument;
import org.apache.felix.gogo.commands.Command;
import org.apache.felix.gogo.commands.Option;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.ZooDefs;
import java.io.*;
import java.net.URL;
import java.util.*;
import java.util.List;
import java.util.regex.Pattern;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.getPatterns;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.matches;
import static org.fusesource.fabric.zookeeper.commands.RegexSupport.merge;
@Command(name = "import", scope = "zk", description = "Import data into zookeeper")
public class Import extends ZooKeeperCommandSupport {
@Argument(description = "Location of the file or filesystem to load")
protected String source = "." + File.separator + "import";
@Option(name="-d", aliases={"--delete"}, description="Delete any paths not in the tree being imported, ignored when importing a properties file (CAUTION!)")
boolean delete = false;
@Option(name="-t", aliases={"--target"}, description="Target location in ZooKeeper tree to import to")
String target = "/";
@Option(name="-props", aliases={"--properties"}, description="Argument is URL pointing to a properties file")
boolean properties = false;
@Option(name="-fs", aliases={"--filesystem"}, description="Argument is the top level directory of a local filesystem tree")
boolean filesystem = true;
@Option(name="-f", aliases={"--regex"}, description="regex to filter on what paths to import, can specify this option more than once for additional filters", multiValued=true)
String regex[];
@Option(name="-rf", aliases={"--reverse-regex"}, description="regex to filter what paths to exclude, can specify this option more than once for additional filters", multiValued=true)
protected String[] nregex;
@Option(name="--dry-run", description="Runs the import but prints out what's going to happen instead of making any changes")
boolean dryRun = false;
File ignore = new File(".fabricignore");
File include = new File(".fabricinclude");
@Override
protected Object doExecute() throws Exception {
if (ignore.exists() && ignore.isFile()) {
nregex = merge(ignore, nregex);
}
if (include.exists() && include.isFile()) {
regex = merge(include, regex);
}
if (properties == true) {
filesystem = false;
}
if (filesystem == true) {
properties = false;
}
if (properties) {
readPropertiesFile();
}
if (filesystem) {
readFileSystem();
}
System.out.println("Successfully imported settings from " + source);
return null;
}
private String stripPath(String path) {
String strs[] = path.split(source);
if (strs.length == 0) {
return "";
}
return strs[strs.length - 1].substring(0, strs[1].length() - ".cfg".length());
}
private String buildZKPath(File parent, File current) {
String rc = "";
if (current != null && !parent.equals(current)) {
rc = buildZKPath(parent, current.getParentFile()) + "/" + current.getName();
}
return rc;
}
private void getCandidates(File parent, File current, Map<String, String> settings) throws Exception {
if (current.isDirectory()) {
for (File child : current.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
if (file.isDirectory() || file.getName().endsWith(".cfg")) {
return true;
}
return false;
}
})) {
getCandidates(parent, child, settings);
}
String p = buildZKPath(parent, current).replaceFirst("/", "");
settings.put(p, null);
} else {
BufferedInputStream in = new BufferedInputStream(new FileInputStream(current));
byte[] contents = new byte[in.available()];
in.read(contents);
in.close();
String p = buildZKPath(parent, current).replaceFirst("/", "");
if (p.endsWith(".cfg")) {
p = p.substring(0, p.length() - ".cfg".length());
}
settings.put(p, new String(contents));
}
}
private void readFileSystem() throws Exception {
Map<String, String> settings = new TreeMap<String, String>();
File s = new File(source);
getCandidates(s, s, settings);
List<Pattern> include = getPatterns(regex);
List<Pattern> exclude = getPatterns(nregex);
if (!target.endsWith("/")) {
target = target + "/";
}
if (!target.startsWith("/")) {
target = "/" + target;
}
List<String> paths = new ArrayList<String>();
for(String key : settings.keySet()) {
String data = settings.get(key);
key = target + key;
paths.add(key);
if (!matches(include, key, true) || matches(exclude, key, false)) {
continue;
}
if (!dryRun) {
getZooKeeper().createOrSetWithParents(key, data, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
} else {
System.out.printf("Creating path \"%s\" with value \"%s\"\n", key, data);
}
}
if (delete) {
deletePathsNotIn(paths);
}
}
private void deletePathsNotIn(List<String> paths) throws Exception {
List<String> zkPaths = getZooKeeper().getAllChildren(target);
for (String path : zkPaths) {
path = "/" + path;
if (!paths.contains(path)) {
if (!dryRun) {
getZooKeeper().deleteWithChildren(path);
} else {
System.out.printf("Deleting path %s and everything under it\n", path);
}
}
}
}
private void readPropertiesFile() throws Exception {
List<Pattern> includes = getPatterns(regex);
List<Pattern> excludes = getPatterns(nregex);
InputStream in = new BufferedInputStream(new URL(source).openStream());
List<String> paths = new ArrayList<String>();
Properties props = new Properties();
props.load(in);
for (Enumeration names = props.propertyNames(); names.hasMoreElements();) {
String name = (String) names.nextElement();
String value = props.getProperty(name);
if (value != null && value.isEmpty()) {
value = null;
}
if (!name.startsWith("/")) {
name = "/" + name;
}
name = target + name;
if (!matches(includes, name, true) || matches(excludes, name, false)) {
continue;
}
if (!dryRun) {
getZooKeeper().createOrSetWithParents(name, value, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
} else {
System.out.printf("Creating path \"%s\" with value \"%s\"\n", name, value);
}
}
}
}
@@ -0,0 +1,73 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.metadata.parser.jbossweb;
import java.util.ArrayList;
import java.util.List;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.jboss.metadata.javaee.spec.ParamValueMetaData;
import org.jboss.metadata.parser.ee.ParamValueMetaDataParser;
import org.jboss.metadata.parser.util.MetaDataElementParser;
import org.jboss.metadata.web.jboss.ContainerListenerMetaData;
import org.jboss.metadata.web.jboss.ContainerListenerType;
/**
* @author Remy Maucherat
*/
public class ContainerListenerMetaDataParser extends MetaDataElementParser {
public static ContainerListenerMetaData parse(XMLStreamReader reader) throws XMLStreamException {
ContainerListenerMetaData containerListener = new ContainerListenerMetaData();
// Handle elements
while (reader.hasNext() && reader.nextTag() != END_ELEMENT) {
final Element element = Element.forName(reader.getLocalName());
switch (element) {
case CLASS_NAME:
containerListener.setListenerClass(getElementText(reader));
break;
case MODULE:
containerListener.setModule(getElementText(reader));
break;
case LISTENER_TYPE:
containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));
break;
case PARAM:
List<ParamValueMetaData> params = containerListener.getParams();
if (params == null) {
params = new ArrayList<ParamValueMetaData>();
containerListener.setParams(params);
}
params.add(ParamValueMetaDataParser.parse(reader));
break;
default: throw unexpectedElement(reader);
}
}
return containerListener;
}
}
@@ -0,0 +1,140 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.metadata.parser.ee;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.jboss.metadata.parser.util.MetaDataElementParser;
import org.jboss.metadata.javaee.jboss.JBossPortComponentRef;
import org.jboss.metadata.javaee.jboss.JBossServiceReferenceMetaData;
import org.jboss.metadata.javaee.spec.DescriptionGroupMetaData;
import org.jboss.metadata.javaee.spec.DescriptionsImpl;
import org.jboss.metadata.javaee.spec.EJBReferenceMetaData;
import org.jboss.metadata.javaee.spec.EJBReferenceType;
import org.jboss.metadata.javaee.spec.EnvironmentEntryMetaData;
import org.jboss.metadata.javaee.spec.MessageDestinationMetaData;
import org.jboss.metadata.javaee.spec.PortComponentRef;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlerChainMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlerChainsMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlerMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlersMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceMetaData;
/**
* @author Remy Maucherat
*/
public class ServiceReferenceMetaDataParser extends MetaDataElementParser {
public static ServiceReferenceMetaData parse(XMLStreamReader reader) throws XMLStreamException {
JBossServiceReferenceMetaData serviceReference = new JBossServiceReferenceMetaData();
// Handle attributes
final int count = reader.getAttributeCount();
for (int i = 0; i < count; i ++) {
final String value = reader.getAttributeValue(i);
if (attributeHasNamespace(reader, i)) {
continue;
}
final Attribute attribute = Attribute.forName(reader.getAttributeLocalName(i));
switch (attribute) {
case ID: {
serviceReference.setId(value);
break;
}
default: throw unexpectedAttribute(reader, i);
}
}
DescriptionGroupMetaData descriptionGroup = new DescriptionGroupMetaData();
// Handle elements
while (reader.hasNext() && reader.nextTag() != END_ELEMENT) {
if (DescriptionGroupMetaDataParser.parse(reader, descriptionGroup)) {
if (serviceReference.getDescriptionGroup() == null) {
serviceReference.setDescriptionGroup(descriptionGroup);
}
continue;
}
if (ResourceInjectionMetaDataParser.parse(reader, serviceReference)) {
continue;
}
final Element element = Element.forName(reader.getLocalName());
switch (element) {
case SERVICE_REF_NAME:
serviceReference.setServiceRefName(getElementText(reader));
break;
case SERVICE_INTERFACE:
serviceReference.setServiceInterface(getElementText(reader));
break;
case SERVICE_REF_TYPE:
serviceReference.setServiceRefType(getElementText(reader));
break;
case WSDL_FILE:
serviceReference.setWsdlFile(getElementText(reader));
break;
case JAXRPC_MAPPING_FILE:
serviceReference.setJaxrpcMappingFile(getElementText(reader));
break;
case SERVICE_QNAME:
serviceReference.setServiceQname(QName.valueOf(getElementText(reader)));
break;
case PORT_COMPONENT_REF:
List<JBossPortComponentRef> portComponentRefs = (List<JBossPortComponentRef>) serviceReference.getPortComponentRef();
if (portComponentRefs == null) {
portComponentRefs = new ArrayList<JBossPortComponentRef>();
serviceReference.setJBossPortComponentRef(portComponentRefs);
}
portComponentRefs.add(PortComponentRefParser.parse(reader));
break;
case HANDLER:
ServiceReferenceHandlersMetaData handlers = serviceReference.getHandlers();
if (handlers == null) {
handlers = new ServiceReferenceHandlersMetaData();
serviceReference.setHandlers(handlers);
}
handlers.add(ServiceReferenceHandlerMetaDataParser.parse(reader));
break;
case HANDLER_CHAIN:
ServiceReferenceHandlerChainsMetaData handlerChains = serviceReference.getHandlerChains();
if (handlerChains == null) {
handlerChains = new ServiceReferenceHandlerChainsMetaData();
handlerChains.setHandlers(new ArrayList<ServiceReferenceHandlerChainMetaData>());
serviceReference.setHandlerChains(handlerChains);
}
handlerChains.getHandlers().add(ServiceReferenceHandlerChainMetaDataParser.parse(reader));
break;
case WSDL_OVERRIDE:
serviceReference.setWsdlOverride(getElementText(reader));
break;
default: throw unexpectedElement(reader);
}
}
return serviceReference;
}
}
@@ -0,0 +1,130 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.metadata.appclient.parser.spec;
import org.jboss.metadata.appclient.spec.AppClientEnvironmentRefsGroupMetaData;
import org.jboss.metadata.appclient.spec.ApplicationClientMetaData;
import org.jboss.metadata.javaee.spec.DescriptionGroupMetaData;
import org.jboss.metadata.parser.ee.DescriptionGroupMetaDataParser;
import org.jboss.metadata.parser.ee.EnvironmentRefsGroupMetaDataParser;
import org.jboss.metadata.parser.util.MetaDataElementParser;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
/**
* Parses an application-client.xml file and creates metadata out of it
* <p/>
* @author Stuart Douglas
*/
public class ApplicationClientMetaDataParser extends MetaDataElementParser
{
public static final ApplicationClientMetaDataParser INSTANCE = new ApplicationClientMetaDataParser();
public ApplicationClientMetaData parse(XMLStreamReader reader) throws XMLStreamException
{
reader.require(START_DOCUMENT, null, null);
// Read until the first start element
while (reader.hasNext() && reader.next() != START_ELEMENT)
{
}
ApplicationClientMetaData appClientMetadata = new ApplicationClientMetaData();
processAttributes(appClientMetadata, reader);
// parse and create metadata out of the elements under the root ejb-jar element
processElements(appClientMetadata, reader);
return appClientMetadata;
}
protected void processAttribute(ApplicationClientMetaData metaData, XMLStreamReader reader, int i) throws XMLStreamException
{
final String value = reader.getAttributeValue(i);
if (attributeHasNamespace(reader, i))
{
return;
}
final ApplicationClientAttribute ejbJarAttribute = ApplicationClientAttribute.forName(reader.getAttributeLocalName(i));
switch (ejbJarAttribute)
{
case ID:
{
metaData.setId(value);
break;
}
case VERSION:
{
metaData.setVersion(value);
}
case METADATA_COMPLETE:
{
metaData.setMetadataComplete(true);
break;
}
default:
throw unexpectedAttribute(reader, i);
}
}
protected void processAttributes(final ApplicationClientMetaData applicationClientMetaData, XMLStreamReader reader) throws XMLStreamException
{
// Handle attributes and set them in the EjbJarMetaData
final int count = reader.getAttributeCount();
for (int i = 0; i < count; i++)
{
processAttribute(applicationClientMetaData, reader, i);
}
}
protected void processElements(final ApplicationClientMetaData applicationClientMetaData, XMLStreamReader reader) throws XMLStreamException
{
final DescriptionGroupMetaData descriptionGroup = new DescriptionGroupMetaData();
final AppClientEnvironmentRefsGroupMetaData environmentRefsGroupMetaData = new AppClientEnvironmentRefsGroupMetaData();
// Handle elements
while (reader.hasNext() && reader.nextTag() != END_ELEMENT) {
if (DescriptionGroupMetaDataParser.parse(reader, descriptionGroup)) {
continue;
}
if (EnvironmentRefsGroupMetaDataParser.parseRemote(reader, environmentRefsGroupMetaData)) {
continue;
}
final AppClientElement element = AppClientElement.forName(reader.getLocalName());
switch (element) {
case CALLBACK_HANDLER: {
applicationClientMetaData.setCallbackHandler(getElementText(reader));
break;
}
default:
throw unexpectedElement(reader);
}
}
applicationClientMetaData.setDescriptionGroup(descriptionGroup);
applicationClientMetaData.setEnvironmentRefsGroupMetaData(environmentRefsGroupMetaData);
}
}
@@ -0,0 +1,77 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.metadata.parser.jbossweb;
import java.util.ArrayList;
import java.util.List;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.jboss.metadata.javaee.spec.ParamValueMetaData;
import org.jboss.metadata.parser.ee.ParamValueMetaDataParser;
import org.jboss.metadata.parser.util.MetaDataElementParser;
import org.jboss.metadata.web.jboss.ContainerListenerMetaData;
import org.jboss.metadata.web.jboss.ContainerListenerType;
/**
* @author Remy Maucherat
*/
public class ContainerListenerMetaDataParser extends MetaDataElementParser {
public static ContainerListenerMetaData parse(XMLStreamReader reader) throws XMLStreamException {
ContainerListenerMetaData containerListener = new ContainerListenerMetaData();
// Handle elements
while (reader.hasNext() && reader.nextTag() != END_ELEMENT) {
final Element element = Element.forName(reader.getLocalName());
switch (element) {
case CLASS_NAME:
containerListener.setListenerClass(getElementText(reader));
break;
case MODULE:
containerListener.setModule(getElementText(reader));
break;
case LISTENER_TYPE:
try {
containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));
} catch (IllegalArgumentException e) {
throw unexpectedValue(reader, e);
}
break;
case PARAM:
List<ParamValueMetaData> params = containerListener.getParams();
if (params == null) {
params = new ArrayList<ParamValueMetaData>();
containerListener.setParams(params);
}
params.add(ParamValueMetaDataParser.parse(reader));
break;
default: throw unexpectedElement(reader);
}
}
return containerListener;
}
}
@@ -0,0 +1,140 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.metadata.parser.ee;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.jboss.metadata.parser.util.MetaDataElementParser;
import org.jboss.metadata.javaee.jboss.JBossPortComponentRef;
import org.jboss.metadata.javaee.jboss.JBossServiceReferenceMetaData;
import org.jboss.metadata.javaee.spec.DescriptionGroupMetaData;
import org.jboss.metadata.javaee.spec.DescriptionsImpl;
import org.jboss.metadata.javaee.spec.EJBReferenceMetaData;
import org.jboss.metadata.javaee.spec.EJBReferenceType;
import org.jboss.metadata.javaee.spec.EnvironmentEntryMetaData;
import org.jboss.metadata.javaee.spec.MessageDestinationMetaData;
import org.jboss.metadata.javaee.spec.PortComponentRef;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlerChainMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlerChainsMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlerMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceHandlersMetaData;
import org.jboss.metadata.javaee.spec.ServiceReferenceMetaData;
/**
* @author Remy Maucherat
*/
public class ServiceReferenceMetaDataParser extends MetaDataElementParser {
public static ServiceReferenceMetaData parse(XMLStreamReader reader) throws XMLStreamException {
JBossServiceReferenceMetaData serviceReference = new JBossServiceReferenceMetaData();
// Handle attributes
final int count = reader.getAttributeCount();
for (int i = 0; i < count; i ++) {
final String value = reader.getAttributeValue(i);
if (attributeHasNamespace(reader, i)) {
continue;
}
final Attribute attribute = Attribute.forName(reader.getAttributeLocalName(i));
switch (attribute) {
case ID: {
serviceReference.setId(value);
break;
}
default: throw unexpectedAttribute(reader, i);
}
}
DescriptionGroupMetaData descriptionGroup = new DescriptionGroupMetaData();
// Handle elements
while (reader.hasNext() && reader.nextTag() != END_ELEMENT) {
if (DescriptionGroupMetaDataParser.parse(reader, descriptionGroup)) {
if (serviceReference.getDescriptionGroup() == null) {
serviceReference.setDescriptionGroup(descriptionGroup);
}
continue;
}
if (ResourceInjectionMetaDataParser.parse(reader, serviceReference)) {
continue;
}
final Element element = Element.forName(reader.getLocalName());
switch (element) {
case SERVICE_REF_NAME:
serviceReference.setServiceRefName(getElementText(reader));
break;
case SERVICE_INTERFACE:
serviceReference.setServiceInterface(getElementText(reader));
break;
case SERVICE_REF_TYPE:
serviceReference.setServiceRefType(getElementText(reader));
break;
case WSDL_FILE:
serviceReference.setWsdlFile(getElementText(reader));
break;
case JAXRPC_MAPPING_FILE:
serviceReference.setJaxrpcMappingFile(getElementText(reader));
break;
case SERVICE_QNAME:
serviceReference.setServiceQname(parseQName(reader, getElementText(reader)));
break;
case PORT_COMPONENT_REF:
List<JBossPortComponentRef> portComponentRefs = (List<JBossPortComponentRef>) serviceReference.getPortComponentRef();
if (portComponentRefs == null) {
portComponentRefs = new ArrayList<JBossPortComponentRef>();
serviceReference.setJBossPortComponentRef(portComponentRefs);
}
portComponentRefs.add(PortComponentRefParser.parse(reader));
break;
case HANDLER:
ServiceReferenceHandlersMetaData handlers = serviceReference.getHandlers();
if (handlers == null) {
handlers = new ServiceReferenceHandlersMetaData();
serviceReference.setHandlers(handlers);
}
handlers.add(ServiceReferenceHandlerMetaDataParser.parse(reader));
break;
case HANDLER_CHAIN:
ServiceReferenceHandlerChainsMetaData handlerChains = serviceReference.getHandlerChains();
if (handlerChains == null) {
handlerChains = new ServiceReferenceHandlerChainsMetaData();
handlerChains.setHandlers(new ArrayList<ServiceReferenceHandlerChainMetaData>());
serviceReference.setHandlerChains(handlerChains);
}
handlerChains.getHandlers().add(ServiceReferenceHandlerChainMetaDataParser.parse(reader));
break;
case WSDL_OVERRIDE:
serviceReference.setWsdlOverride(getElementText(reader));
break;
default: throw unexpectedElement(reader);
}
}
return serviceReference;
}
}
@@ -0,0 +1,131 @@
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.metadata.appclient.parser.spec;
import org.jboss.metadata.appclient.spec.AppClientEnvironmentRefsGroupMetaData;
import org.jboss.metadata.appclient.spec.ApplicationClientMetaData;
import org.jboss.metadata.javaee.spec.DescriptionGroupMetaData;
import org.jboss.metadata.parser.ee.DescriptionGroupMetaDataParser;
import org.jboss.metadata.parser.ee.EnvironmentRefsGroupMetaDataParser;
import org.jboss.metadata.parser.util.MetaDataElementParser;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
/**
* Parses an application-client.xml file and creates metadata out of it
* <p/>
* @author Stuart Douglas
*/
public class ApplicationClientMetaDataParser extends MetaDataElementParser
{
public static final ApplicationClientMetaDataParser INSTANCE = new ApplicationClientMetaDataParser();
public ApplicationClientMetaData parse(XMLStreamReader reader) throws XMLStreamException
{
reader.require(START_DOCUMENT, null, null);
// Read until the first start element
while (reader.hasNext() && reader.next() != START_ELEMENT)
{
}
ApplicationClientMetaData appClientMetadata = new ApplicationClientMetaData();
processAttributes(appClientMetadata, reader);
// parse and create metadata out of the elements under the root ejb-jar element
processElements(appClientMetadata, reader);
return appClientMetadata;
}
protected void processAttribute(ApplicationClientMetaData metaData, XMLStreamReader reader, int i) throws XMLStreamException
{
final String value = reader.getAttributeValue(i);
if (attributeHasNamespace(reader, i))
{
return;
}
final ApplicationClientAttribute ejbJarAttribute = ApplicationClientAttribute.forName(reader.getAttributeLocalName(i));
switch (ejbJarAttribute)
{
case ID:
{
metaData.setId(value);
break;
}
case VERSION:
{
metaData.setVersion(value);
break;
}
case METADATA_COMPLETE:
{
metaData.setMetadataComplete(Boolean.parseBoolean(value));
break;
}
default:
throw unexpectedAttribute(reader, i);
}
}
protected void processAttributes(final ApplicationClientMetaData applicationClientMetaData, XMLStreamReader reader) throws XMLStreamException
{
// Handle attributes and set them in the EjbJarMetaData
final int count = reader.getAttributeCount();
for (int i = 0; i < count; i++)
{
processAttribute(applicationClientMetaData, reader, i);
}
}
protected void processElements(final ApplicationClientMetaData applicationClientMetaData, XMLStreamReader reader) throws XMLStreamException
{
final DescriptionGroupMetaData descriptionGroup = new DescriptionGroupMetaData();
final AppClientEnvironmentRefsGroupMetaData environmentRefsGroupMetaData = new AppClientEnvironmentRefsGroupMetaData();
// Handle elements
while (reader.hasNext() && reader.nextTag() != END_ELEMENT) {
if (DescriptionGroupMetaDataParser.parse(reader, descriptionGroup)) {
continue;
}
if (EnvironmentRefsGroupMetaDataParser.parseRemote(reader, environmentRefsGroupMetaData)) {
continue;
}
final AppClientElement element = AppClientElement.forName(reader.getLocalName());
switch (element) {
case CALLBACK_HANDLER: {
applicationClientMetaData.setCallbackHandler(getElementText(reader));
break;
}
default:
throw unexpectedElement(reader);
}
}
applicationClientMetaData.setDescriptionGroup(descriptionGroup);
applicationClientMetaData.setEnvironmentRefsGroupMetaData(environmentRefsGroupMetaData);
}
}
@@ -4,11 +4,15 @@ package edu.lu.uni.serval;
import edu.lu.uni.serval.richedit.ediff.EDiffHunkParser;
import edu.lu.uni.serval.richedit.ediff.HierarchicalActionSet;
import edu.lu.uni.serval.utils.CallShell;
import org.yaml.snakeyaml.Yaml;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class BaseTest {
@@ -46,4 +50,53 @@ public class BaseTest {
}
}
public List<HierarchicalActionSet> getHierarchicalActionSets4java(String s) throws IOException {
Properties appProps = new Properties();
Yaml yaml = new Yaml();
// InputStream inputStream = this.getClass()
// .getClassLoader()
// .getResourceAsStream("customer.yaml");
Map<String, Object> obj = yaml.load(new FileInputStream("src/main/resource/config.yml"));
// appProps.load(new FileInputStream(appConfigPath));
Map<String, Object> fixminer = (Map<String, Object>) obj.get("fixminer");
Map<String, Object> dataset = (Map<String, Object>) obj.get("dataset");
String srcMLPath = (String) fixminer.get("srcMLPath");
String inputPath = (String) dataset.get("inputPath");
// appProps.load(new FileInputStream("src/main/resource/app.properties"));
// String srcMLPath = appProps.getProperty("srcMLPath", "FORKJOIN");
// String root = appProps.getProperty("inputPath");
String root = "src/main/resource/testFiles";
String project = s.split("_")[0];
root = root + "/"+project+"/";
String filename = s.replace(project+"_","");
try{
File revFile = new File(root + "revFiles/" + filename);
File prevFile = new File(root + "prevFiles/prev_" + filename);
EDiffHunkParser parser = new EDiffHunkParser();
List<HierarchicalActionSet> hierarchicalActionSets = parser.parseChangedSourceCodeWithGumTree2(prevFile, revFile, srcMLPath,true);
return hierarchicalActionSets;
}catch (NullPointerException n){
String cmd1 = "mkdir -p " + System.getProperty("user.dir")+"/" + root + n.getMessage().split(root)[1].split("/")[0];
String cmd = "cp " +inputPath+"/" +project+"/"+n.getMessage().split(root)[1] + " "+System.getProperty("user.dir")+"/"+n.getMessage();
CallShell cs = new CallShell();
try {
cs.runShell(cmd1);
cs.runShell(cmd);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
}
@@ -0,0 +1,842 @@
package edu.lu.uni.serval;
import edu.lu.uni.serval.richedit.ediff.HierarchicalActionSet;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
public class TestJavaMiner extends BaseTest {
//commons-codec_55a865_ecec1c_src#main#java#org#apache#commons#codec#StringEncoderComparator.java.txt_0
@Test
public void test_gzip_051ed8_8b83dc() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-codec_55a865_ecec1c_src#main#java#org#apache#commons#codec#StringEncoderComparator.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD TypeDeclaration@@[public]StringEncoderComparator, [Comparator] @TO@ [@SuppressWarnings(\"rawtypes\"), public]StringEncoderComparator, [Comparator] @AT@ 1206 @LENGTH@ 1756\n" +
"---INS SingleMemberAnnotation@@@SuppressWarnings(\"rawtypes\") @TO@ TypeDeclaration@@[public]StringEncoderComparator, [Comparator] @AT@ 1206 @LENGTH@ 29\n");
}
//
@Test
public void test_commons_configuration_b24b7b_3ac9d8() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-configuration_b24b7b_3ac9d8_src#test#org#apache#commons#configuration#TestHierarchicalConfigurationXMLReader.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"INS TryStatement@@try { trans.transform(source,result);} catch (NoSuchMethodError ex) { return;} @TO@ MethodDeclaration@@public, void, MethodName:testParse, Exception, @AT@ 1804 @LENGTH@ 284\n" +
"---MOV ExpressionStatement@@MethodInvocation:trans.transform(source,result) @TO@ TryStatement@@try { trans.transform(source,result);} catch (NoSuchMethodError ex) { return;} @AT@ 1804 @LENGTH@ 32\n" +
"---INS CatchClause@@catch (NoSuchMethodError ex) { return;} @TO@ TryStatement@@try { trans.transform(source,result);} catch (NoSuchMethodError ex) { return;} @AT@ 2021 @LENGTH@ 67\n" +
"------INS SingleVariableDeclaration@@NoSuchMethodError ex @TO@ CatchClause@@catch (NoSuchMethodError ex) { return;} @AT@ 2027 @LENGTH@ 20\n" +
"---------INS SimpleType@@NoSuchMethodError @TO@ SingleVariableDeclaration@@NoSuchMethodError ex @AT@ 2027 @LENGTH@ 17\n" +
"---------INS SimpleName@@ex @TO@ SingleVariableDeclaration@@NoSuchMethodError ex @AT@ 2045 @LENGTH@ 2\n" +
"------INS ReturnStatement@@ @TO@ CatchClause@@catch (NoSuchMethodError ex) { return;} @AT@ 2071 @LENGTH@ 7\n");
}
//commons-collections_05b6c7_eedfe8_src#java#org#apache#commons#collections#BeanMap.java
@Test
public void test_commons_collections_05b6c7_eedfe8() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_05b6c7_eedfe8_src#java#org#apache#commons#collections#BeanMap.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ReturnStatement@@MethodInvocation:get((String)key) @TO@ MethodInvocation:get(key) @AT@ 18236 @LENGTH@ 27\n" +
"---UPD MethodInvocation@@get((String)key) @TO@ get(key) @AT@ 18243 @LENGTH@ 19\n" +
"------UPD SimpleName@@MethodName:get:[(String)key] @TO@ MethodName:get:[key] @AT@ 18243 @LENGTH@ 19\n" +
"---------INS SimpleName@@key @TO@ SimpleName@@MethodName:get:[(String)key] @AT@ 18247 @LENGTH@ 3\n" +
"---------DEL CastExpression@@(String)key @AT@ 18248 @LENGTH@ 12\n" +
"------------DEL SimpleType@@String @AT@ 18249 @LENGTH@ 6\n" +
"------------DEL SimpleName@@key @AT@ 18257 @LENGTH@ 3\n");
}
//commons-collections_906468_31bc59_src#java#org#apache#commons#collections#SequencedHashMap.java
@Test
public void test_commons_collections_906468_31bc59() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_906468_31bc59_src#java#org#apache#commons#collections#SequencedHashMap.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD IfStatement@@if (entry.equals(e)) return entry; else return null; @TO@ if (entry != null && entry.equals(e)) return entry; else return null; @AT@ 21117 @LENGTH@ 59\n" +
"---INS InfixExpression@@entry != null && entry.equals(e) @TO@ IfStatement@@if (entry.equals(e)) return entry; else return null; @AT@ 21116 @LENGTH@ 32\n" +
"------INS InfixExpression@@entry != null @TO@ InfixExpression@@entry != null && entry.equals(e) @AT@ 21116 @LENGTH@ 13\n" +
"---------INS SimpleName@@entry @TO@ InfixExpression@@entry != null @AT@ 21116 @LENGTH@ 5\n" +
"---------INS Operator@@!= @TO@ InfixExpression@@entry != null @AT@ 21121 @LENGTH@ 2\n" +
"---------INS NullLiteral@@null @TO@ InfixExpression@@entry != null @AT@ 21125 @LENGTH@ 4\n" +
"------MOV MethodInvocation@@entry.equals(e) @TO@ InfixExpression@@entry != null && entry.equals(e) @AT@ 21120 @LENGTH@ 15\n" +
"------INS Operator@@&& @TO@ InfixExpression@@entry != null && entry.equals(e) @AT@ 21129 @LENGTH@ 2\n");
}
//commons-compress_5273bd_c25c8d_src#main#java#org#apache#commons#compress#archivers#sevenz#Coders.java
@Test
public void test_commons_compress_5273bd_c25c8d() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-compress_5273bd_c25c8d_src#main#java#org#apache#commons#compress#archivers#sevenz#Coders.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ReturnStatement@@MethodInvocation:init().read() @TO@ MethodInvocation:init().read(b,off,len) @AT@ 9806 @LENGTH@ 21\n" +
"---UPD MethodInvocation@@init().read() @TO@ init().read(b,off,len) @AT@ 9813 @LENGTH@ 13\n" +
"------UPD SimpleName@@MethodName:read:[] @TO@ MethodName:read:[b, off, len] @AT@ 9820 @LENGTH@ 6\n" +
"---------INS SimpleName@@b @TO@ SimpleName@@MethodName:read:[] @AT@ 9825 @LENGTH@ 1\n" +
"---------INS SimpleName@@off @TO@ SimpleName@@MethodName:read:[] @AT@ 9828 @LENGTH@ 3\n" +
"---------INS SimpleName@@len @TO@ SimpleName@@MethodName:read:[] @AT@ 9833 @LENGTH@ 3\n");
}
//commons-compress_d8ca98_2ed556_src#main#java#org#apache#commons#compress#archivers#sevenz#AES256SHA256Decoder.java
@Test
public void test_commons_compress_d8ca98_2ed556() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-compress_d8ca98_2ed556_src#main#java#org#apache#commons#compress#archivers#sevenz#AES256SHA256Decoder.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD MethodDeclaration@@public, void, MethodName:close, @TO@ public, void, MethodName:close, IOException, @AT@ 5337 @LENGTH@ 35\n" +
"---INS SimpleType@@IOException @TO@ MethodDeclaration@@public, void, MethodName:close, @AT@ 5364 @LENGTH@ 11\n" +
"---INS IfStatement@@if (cipherInputStream != null) { cipherInputStream.close();} @TO@ MethodDeclaration@@public, void, MethodName:close, @AT@ 5394 @LENGTH@ 97\n" +
"------INS InfixExpression@@cipherInputStream != null @TO@ IfStatement@@if (cipherInputStream != null) { cipherInputStream.close();} @AT@ 5398 @LENGTH@ 25\n" +
"---------INS SimpleName@@cipherInputStream @TO@ InfixExpression@@cipherInputStream != null @AT@ 5398 @LENGTH@ 17\n" +
"---------INS Operator@@!= @TO@ InfixExpression@@cipherInputStream != null @AT@ 5415 @LENGTH@ 2\n" +
"---------INS NullLiteral@@null @TO@ InfixExpression@@cipherInputStream != null @AT@ 5419 @LENGTH@ 4\n" +
"------INS Block@@ThenBody:{ cipherInputStream.close();} @TO@ IfStatement@@if (cipherInputStream != null) { cipherInputStream.close();} @AT@ 5425 @LENGTH@ 66\n" +
"---------INS ExpressionStatement@@MethodInvocation:cipherInputStream.close() @TO@ Block@@ThenBody:{ cipherInputStream.close();} @AT@ 5447 @LENGTH@ 26\n" +
"------------INS MethodInvocation@@cipherInputStream.close() @TO@ ExpressionStatement@@MethodInvocation:cipherInputStream.close() @AT@ 5447 @LENGTH@ 25\n" +
"---------------INS SimpleName@@Name:cipherInputStream @TO@ MethodInvocation@@cipherInputStream.close() @AT@ 5447 @LENGTH@ 17\n" +
"---------------INS SimpleName@@MethodName:close:[] @TO@ MethodInvocation@@cipherInputStream.close() @AT@ 5465 @LENGTH@ 7\n");
}
//fuse_2006f48_4dad2f_insight#insight-camel#src#main#java#org#fusesource#insight#camel#audit#ScriptUtils.java
@Test
public void test_fuse_2006f48_4dad2f() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_2006f48_4dad2f_insight#insight-camel#src#main#java#org#fusesource#insight#camel#audit#ScriptUtils.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD TryStatement@@try { return mapper.writeValueAsString(o);} catch (Exception e) { throw new IllegalArgumentException(\"Could not serialize \" + o,e);} @TO@ try { if (o instanceof Collection) { StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString(); } else if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString(); } else if (o == null) { return \"null\"; } else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\"; } else { return mapper.writeValueAsString(o.toString()); }} catch (Exception e) { throw new IllegalArgumentException(\"Could not serialize \" + o,e);} @AT@ 1282 @LENGTH@ 175\n" +
"---DEL ReturnStatement@@MethodInvocation:mapper.writeValueAsString(o) @AT@ 1300 @LENGTH@ 36\n" +
"---INS IfStatement@@if (o instanceof Collection) { StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} else if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ TryStatement@@try { return mapper.writeValueAsString(o);} catch (Exception e) { throw new IllegalArgumentException(\"Could not serialize \" + o,e);} @AT@ 1431 @LENGTH@ 1233\n" +
"------INS InstanceofExpression@@o instanceof Collection @TO@ IfStatement@@if (o instanceof Collection) { StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} else if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 1435 @LENGTH@ 23\n" +
"---------INS SimpleName@@o @TO@ InstanceofExpression@@o instanceof Collection @AT@ 1435 @LENGTH@ 1\n" +
"---------INS Instanceof@@instanceof @TO@ InstanceofExpression@@o instanceof Collection @AT@ 1437 @LENGTH@ 10\n" +
"---------INS SimpleType@@Collection @TO@ InstanceofExpression@@o instanceof Collection @AT@ 1448 @LENGTH@ 10\n" +
"------INS Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} @TO@ IfStatement@@if (o instanceof Collection) { StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} else if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 1460 @LENGTH@ 388\n" +
"---------INS VariableDeclarationStatement@@StringBuilder sb=new StringBuilder(); @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} @AT@ 1478 @LENGTH@ 39\n" +
"------------INS SimpleType@@StringBuilder @TO@ VariableDeclarationStatement@@StringBuilder sb=new StringBuilder(); @AT@ 1478 @LENGTH@ 13\n" +
"------------INS VariableDeclarationFragment@@sb=new StringBuilder() @TO@ VariableDeclarationStatement@@StringBuilder sb=new StringBuilder(); @AT@ 1492 @LENGTH@ 24\n" +
"---------------INS SimpleName@@sb @TO@ VariableDeclarationFragment@@sb=new StringBuilder() @AT@ 1492 @LENGTH@ 2\n" +
"---------------INS ClassInstanceCreation@@StringBuilder[] @TO@ VariableDeclarationFragment@@sb=new StringBuilder() @AT@ 1497 @LENGTH@ 19\n" +
"------------------INS New@@new @TO@ ClassInstanceCreation@@StringBuilder[] @AT@ 1497 @LENGTH@ 3\n" +
"------------------INS SimpleType@@StringBuilder @TO@ ClassInstanceCreation@@StringBuilder[] @AT@ 1501 @LENGTH@ 13\n" +
"---------INS ExpressionStatement@@MethodInvocation:sb.append(\"[\") @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} @AT@ 1534 @LENGTH@ 15\n" +
"------------INS MethodInvocation@@sb.append(\"[\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\"[\") @AT@ 1534 @LENGTH@ 14\n" +
"---------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\"[\") @AT@ 1534 @LENGTH@ 2\n" +
"---------------INS SimpleName@@MethodName:append:[\"[\"] @TO@ MethodInvocation@@sb.append(\"[\") @AT@ 1537 @LENGTH@ 11\n" +
"------------------INS StringLiteral@@\"[\" @TO@ SimpleName@@MethodName:append:[\"[\"] @AT@ 1544 @LENGTH@ 3\n" +
"---------INS EnhancedForStatement@@for (Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c));} @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} @AT@ 1566 @LENGTH@ 198\n" +
"------------INS SingleVariableDeclaration@@Object c @TO@ EnhancedForStatement@@for (Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c));} @AT@ 1571 @LENGTH@ 8\n" +
"---------------INS SimpleType@@Object @TO@ SingleVariableDeclaration@@Object c @AT@ 1571 @LENGTH@ 6\n" +
"---------------INS SimpleName@@c @TO@ SingleVariableDeclaration@@Object c @AT@ 1578 @LENGTH@ 1\n" +
"------------INS CastExpression@@(Collection)o @TO@ EnhancedForStatement@@for (Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c));} @AT@ 1582 @LENGTH@ 14\n" +
"---------------INS SimpleType@@Collection @TO@ CastExpression@@(Collection)o @AT@ 1583 @LENGTH@ 10\n" +
"---------------INS SimpleName@@o @TO@ CastExpression@@(Collection)o @AT@ 1595 @LENGTH@ 1\n" +
"------------INS IfStatement@@if (sb.length() > 1) { sb.append(\",\");} @TO@ EnhancedForStatement@@for (Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c));} @AT@ 1620 @LENGTH@ 84\n" +
"---------------INS InfixExpression@@sb.length() > 1 @TO@ IfStatement@@if (sb.length() > 1) { sb.append(\",\");} @AT@ 1624 @LENGTH@ 15\n" +
"------------------INS MethodInvocation@@sb.length() @TO@ InfixExpression@@sb.length() > 1 @AT@ 1624 @LENGTH@ 11\n" +
"---------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.length() @AT@ 1624 @LENGTH@ 2\n" +
"---------------------INS SimpleName@@MethodName:length:[] @TO@ MethodInvocation@@sb.length() @AT@ 1627 @LENGTH@ 8\n" +
"------------------INS Operator@@> @TO@ InfixExpression@@sb.length() > 1 @AT@ 1635 @LENGTH@ 1\n" +
"------------------INS NumberLiteral@@1 @TO@ InfixExpression@@sb.length() > 1 @AT@ 1638 @LENGTH@ 1\n" +
"---------------INS Block@@ThenBody:{ sb.append(\",\");} @TO@ IfStatement@@if (sb.length() > 1) { sb.append(\",\");} @AT@ 1641 @LENGTH@ 63\n" +
"------------------INS ExpressionStatement@@MethodInvocation:sb.append(\",\") @TO@ Block@@ThenBody:{ sb.append(\",\");} @AT@ 1667 @LENGTH@ 15\n" +
"---------------------INS MethodInvocation@@sb.append(\",\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\",\") @AT@ 1667 @LENGTH@ 14\n" +
"------------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\",\") @AT@ 1667 @LENGTH@ 2\n" +
"------------------------INS SimpleName@@MethodName:append:[\",\"] @TO@ MethodInvocation@@sb.append(\",\") @AT@ 1670 @LENGTH@ 11\n" +
"---------------------------INS StringLiteral@@\",\" @TO@ SimpleName@@MethodName:append:[\",\"] @AT@ 1677 @LENGTH@ 3\n" +
"------------INS ExpressionStatement@@MethodInvocation:sb.append(toJson(c)) @TO@ EnhancedForStatement@@for (Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c));} @AT@ 1725 @LENGTH@ 21\n" +
"---------------INS MethodInvocation@@sb.append(toJson(c)) @TO@ ExpressionStatement@@MethodInvocation:sb.append(toJson(c)) @AT@ 1725 @LENGTH@ 20\n" +
"------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(toJson(c)) @AT@ 1725 @LENGTH@ 2\n" +
"------------------INS SimpleName@@MethodName:append:[toJson(c)] @TO@ MethodInvocation@@sb.append(toJson(c)) @AT@ 1728 @LENGTH@ 17\n" +
"---------------------INS MethodInvocation@@toJson(c) @TO@ SimpleName@@MethodName:append:[toJson(c)] @AT@ 1735 @LENGTH@ 9\n" +
"------------------------INS SimpleName@@MethodName:toJson:[c] @TO@ MethodInvocation@@toJson(c) @AT@ 1735 @LENGTH@ 9\n" +
"---------------------------INS SimpleName@@c @TO@ SimpleName@@MethodName:toJson:[c] @AT@ 1742 @LENGTH@ 1\n" +
"---------INS ExpressionStatement@@MethodInvocation:sb.append(\"]\") @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} @AT@ 1781 @LENGTH@ 15\n" +
"------------INS MethodInvocation@@sb.append(\"]\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\"]\") @AT@ 1781 @LENGTH@ 14\n" +
"---------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\"]\") @AT@ 1781 @LENGTH@ 2\n" +
"---------------INS SimpleName@@MethodName:append:[\"]\"] @TO@ MethodInvocation@@sb.append(\"]\") @AT@ 1784 @LENGTH@ 11\n" +
"------------------INS StringLiteral@@\"]\" @TO@ SimpleName@@MethodName:append:[\"]\"] @AT@ 1791 @LENGTH@ 3\n" +
"---------INS ReturnStatement@@MethodInvocation:sb.toString() @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} @AT@ 1813 @LENGTH@ 21\n" +
"------------INS MethodInvocation@@sb.toString() @TO@ ReturnStatement@@MethodInvocation:sb.toString() @AT@ 1820 @LENGTH@ 13\n" +
"---------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.toString() @AT@ 1820 @LENGTH@ 2\n" +
"---------------INS SimpleName@@MethodName:toString:[] @TO@ MethodInvocation@@sb.toString() @AT@ 1823 @LENGTH@ 10\n" +
"------INS Block@@ElseBody:if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ IfStatement@@if (o instanceof Collection) { StringBuilder sb=new StringBuilder(); sb.append(\"[\"); for ( Object c : (Collection)o) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(c)); } sb.append(\"]\"); return sb.toString();} else if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 1854 @LENGTH@ 810\n" +
"---------INS IfStatement@@if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ Block@@ElseBody:if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 1854 @LENGTH@ 810\n" +
"------------INS InstanceofExpression@@o instanceof Map @TO@ IfStatement@@if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 1858 @LENGTH@ 16\n" +
"---------------INS SimpleName@@o @TO@ InstanceofExpression@@o instanceof Map @AT@ 1858 @LENGTH@ 1\n" +
"---------------INS Instanceof@@instanceof @TO@ InstanceofExpression@@o instanceof Map @AT@ 1860 @LENGTH@ 10\n" +
"---------------INS SimpleType@@Map @TO@ InstanceofExpression@@o instanceof Map @AT@ 1871 @LENGTH@ 3\n" +
"------------INS Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} @TO@ IfStatement@@if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 1876 @LENGTH@ 538\n" +
"---------------INS VariableDeclarationStatement@@StringBuilder sb=new StringBuilder(); @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} @AT@ 1894 @LENGTH@ 39\n" +
"------------------INS SimpleType@@StringBuilder @TO@ VariableDeclarationStatement@@StringBuilder sb=new StringBuilder(); @AT@ 1894 @LENGTH@ 13\n" +
"------------------INS VariableDeclarationFragment@@sb=new StringBuilder() @TO@ VariableDeclarationStatement@@StringBuilder sb=new StringBuilder(); @AT@ 1908 @LENGTH@ 24\n" +
"---------------------INS SimpleName@@sb @TO@ VariableDeclarationFragment@@sb=new StringBuilder() @AT@ 1908 @LENGTH@ 2\n" +
"---------------------INS ClassInstanceCreation@@StringBuilder[] @TO@ VariableDeclarationFragment@@sb=new StringBuilder() @AT@ 1913 @LENGTH@ 19\n" +
"------------------------INS New@@new @TO@ ClassInstanceCreation@@StringBuilder[] @AT@ 1913 @LENGTH@ 3\n" +
"------------------------INS SimpleType@@StringBuilder @TO@ ClassInstanceCreation@@StringBuilder[] @AT@ 1917 @LENGTH@ 13\n" +
"---------------INS ExpressionStatement@@MethodInvocation:sb.append(\"{\") @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} @AT@ 1950 @LENGTH@ 15\n" +
"------------------INS MethodInvocation@@sb.append(\"{\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\"{\") @AT@ 1950 @LENGTH@ 14\n" +
"---------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\"{\") @AT@ 1950 @LENGTH@ 2\n" +
"---------------------INS SimpleName@@MethodName:append:[\"{\"] @TO@ MethodInvocation@@sb.append(\"{\") @AT@ 1953 @LENGTH@ 11\n" +
"------------------------INS StringLiteral@@\"{\" @TO@ SimpleName@@MethodName:append:[\"{\"] @AT@ 1960 @LENGTH@ 3\n" +
"---------------INS EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} @AT@ 1982 @LENGTH@ 348\n" +
"------------------INS SingleVariableDeclaration@@Map.Entry<Object,Object> e @TO@ EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @AT@ 1987 @LENGTH@ 27\n" +
"---------------------INS ParameterizedType@@Map.Entry<Object,Object> @TO@ SingleVariableDeclaration@@Map.Entry<Object,Object> e @AT@ 1987 @LENGTH@ 25\n" +
"------------------------INS SimpleType@@Map.Entry @TO@ ParameterizedType@@Map.Entry<Object,Object> @AT@ 1987 @LENGTH@ 9\n" +
"------------------------INS SimpleType@@Object @TO@ ParameterizedType@@Map.Entry<Object,Object> @AT@ 1997 @LENGTH@ 6\n" +
"------------------------INS SimpleType@@Object @TO@ ParameterizedType@@Map.Entry<Object,Object> @AT@ 2005 @LENGTH@ 6\n" +
"---------------------INS SimpleName@@e @TO@ SingleVariableDeclaration@@Map.Entry<Object,Object> e @AT@ 2013 @LENGTH@ 1\n" +
"------------------INS MethodInvocation@@((Map<Object,Object>)o).entrySet() @TO@ EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @AT@ 2017 @LENGTH@ 36\n" +
"---------------------INS ParenthesizedExpression@@((Map<Object,Object>)o) @TO@ MethodInvocation@@((Map<Object,Object>)o).entrySet() @AT@ 2017 @LENGTH@ 25\n" +
"------------------------INS CastExpression@@(Map<Object,Object>)o @TO@ ParenthesizedExpression@@((Map<Object,Object>)o) @AT@ 2018 @LENGTH@ 23\n" +
"---------------------------INS ParameterizedType@@Map<Object,Object> @TO@ CastExpression@@(Map<Object,Object>)o @AT@ 2019 @LENGTH@ 19\n" +
"------------------------------INS SimpleType@@Map @TO@ ParameterizedType@@Map<Object,Object> @AT@ 2019 @LENGTH@ 3\n" +
"------------------------------INS SimpleType@@Object @TO@ ParameterizedType@@Map<Object,Object> @AT@ 2023 @LENGTH@ 6\n" +
"------------------------------INS SimpleType@@Object @TO@ ParameterizedType@@Map<Object,Object> @AT@ 2031 @LENGTH@ 6\n" +
"---------------------------INS SimpleName@@o @TO@ CastExpression@@(Map<Object,Object>)o @AT@ 2040 @LENGTH@ 1\n" +
"---------------------INS SimpleName@@MethodName:entrySet:[] @TO@ MethodInvocation@@((Map<Object,Object>)o).entrySet() @AT@ 2043 @LENGTH@ 10\n" +
"------------------INS IfStatement@@if (sb.length() > 1) { sb.append(\",\");} @TO@ EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @AT@ 2077 @LENGTH@ 84\n" +
"---------------------INS InfixExpression@@sb.length() > 1 @TO@ IfStatement@@if (sb.length() > 1) { sb.append(\",\");} @AT@ 2081 @LENGTH@ 15\n" +
"------------------------INS MethodInvocation@@sb.length() @TO@ InfixExpression@@sb.length() > 1 @AT@ 2081 @LENGTH@ 11\n" +
"---------------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.length() @AT@ 2081 @LENGTH@ 2\n" +
"---------------------------INS SimpleName@@MethodName:length:[] @TO@ MethodInvocation@@sb.length() @AT@ 2084 @LENGTH@ 8\n" +
"------------------------INS Operator@@> @TO@ InfixExpression@@sb.length() > 1 @AT@ 2092 @LENGTH@ 1\n" +
"------------------------INS NumberLiteral@@1 @TO@ InfixExpression@@sb.length() > 1 @AT@ 2095 @LENGTH@ 1\n" +
"---------------------INS Block@@ThenBody:{ sb.append(\",\");} @TO@ IfStatement@@if (sb.length() > 1) { sb.append(\",\");} @AT@ 2098 @LENGTH@ 63\n" +
"------------------------INS ExpressionStatement@@MethodInvocation:sb.append(\",\") @TO@ Block@@ThenBody:{ sb.append(\",\");} @AT@ 2124 @LENGTH@ 15\n" +
"---------------------------INS MethodInvocation@@sb.append(\",\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\",\") @AT@ 2124 @LENGTH@ 14\n" +
"------------------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\",\") @AT@ 2124 @LENGTH@ 2\n" +
"------------------------------INS SimpleName@@MethodName:append:[\",\"] @TO@ MethodInvocation@@sb.append(\",\") @AT@ 2127 @LENGTH@ 11\n" +
"---------------------------------INS StringLiteral@@\",\" @TO@ SimpleName@@MethodName:append:[\",\"] @AT@ 2134 @LENGTH@ 3\n" +
"------------------INS ExpressionStatement@@MethodInvocation:sb.append(toJson(e.getKey().toString())) @TO@ EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @AT@ 2182 @LENGTH@ 41\n" +
"---------------------INS MethodInvocation@@sb.append(toJson(e.getKey().toString())) @TO@ ExpressionStatement@@MethodInvocation:sb.append(toJson(e.getKey().toString())) @AT@ 2182 @LENGTH@ 40\n" +
"------------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(toJson(e.getKey().toString())) @AT@ 2182 @LENGTH@ 2\n" +
"------------------------INS SimpleName@@MethodName:append:[toJson(e.getKey().toString())] @TO@ MethodInvocation@@sb.append(toJson(e.getKey().toString())) @AT@ 2185 @LENGTH@ 37\n" +
"---------------------------INS MethodInvocation@@toJson(e.getKey().toString()) @TO@ SimpleName@@MethodName:append:[toJson(e.getKey().toString())] @AT@ 2192 @LENGTH@ 29\n" +
"------------------------------INS SimpleName@@MethodName:toJson:[e.getKey().toString()] @TO@ MethodInvocation@@toJson(e.getKey().toString()) @AT@ 2192 @LENGTH@ 29\n" +
"---------------------------------INS MethodInvocation@@e.getKey().toString() @TO@ SimpleName@@MethodName:toJson:[e.getKey().toString()] @AT@ 2199 @LENGTH@ 21\n" +
"------------------------------------INS MethodInvocation@@MethodName:getKey:[] @TO@ MethodInvocation@@e.getKey().toString() @AT@ 2199 @LENGTH@ 10\n" +
"------------------------------------INS SimpleName@@Name:e @TO@ MethodInvocation@@e.getKey().toString() @AT@ 2199 @LENGTH@ 1\n" +
"------------------------------------INS SimpleName@@MethodName:toString:[] @TO@ MethodInvocation@@e.getKey().toString() @AT@ 2210 @LENGTH@ 10\n" +
"------------------------------INS SimpleName@@MethodName:toJson:[e.getKey().toString()] @TO@ MethodInvocation@@toJson(e.getKey().toString()) @AT@ 2192 @LENGTH@ 29\n" +
"---------------------------------INS MethodInvocation@@e.getKey().toString() @TO@ SimpleName@@MethodName:toJson:[e.getKey().toString()] @AT@ 2199 @LENGTH@ 21\n" +
"------------------------------------INS MethodInvocation@@MethodName:getKey:[] @TO@ MethodInvocation@@e.getKey().toString() @AT@ 2199 @LENGTH@ 10\n" +
"------------------------------------INS SimpleName@@Name:e @TO@ MethodInvocation@@e.getKey().toString() @AT@ 2199 @LENGTH@ 1\n" +
"------------------------------------INS SimpleName@@MethodName:toString:[] @TO@ MethodInvocation@@e.getKey().toString() @AT@ 2210 @LENGTH@ 10\n" +
"------------------INS ExpressionStatement@@MethodInvocation:sb.append(\":\") @TO@ EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @AT@ 2244 @LENGTH@ 15\n" +
"---------------------INS MethodInvocation@@sb.append(\":\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\":\") @AT@ 2244 @LENGTH@ 14\n" +
"------------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\":\") @AT@ 2244 @LENGTH@ 2\n" +
"------------------------INS SimpleName@@MethodName:append:[\":\"] @TO@ MethodInvocation@@sb.append(\":\") @AT@ 2247 @LENGTH@ 11\n" +
"---------------------------INS StringLiteral@@\":\" @TO@ SimpleName@@MethodName:append:[\":\"] @AT@ 2254 @LENGTH@ 3\n" +
"------------------INS ExpressionStatement@@MethodInvocation:sb.append(toJson(e.getValue())) @TO@ EnhancedForStatement@@for (Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue()));} @AT@ 2280 @LENGTH@ 32\n" +
"---------------------INS MethodInvocation@@sb.append(toJson(e.getValue())) @TO@ ExpressionStatement@@MethodInvocation:sb.append(toJson(e.getValue())) @AT@ 2280 @LENGTH@ 31\n" +
"------------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(toJson(e.getValue())) @AT@ 2280 @LENGTH@ 2\n" +
"------------------------INS SimpleName@@MethodName:append:[toJson(e.getValue())] @TO@ MethodInvocation@@sb.append(toJson(e.getValue())) @AT@ 2283 @LENGTH@ 28\n" +
"---------------------------INS MethodInvocation@@toJson(e.getValue()) @TO@ SimpleName@@MethodName:append:[toJson(e.getValue())] @AT@ 2290 @LENGTH@ 20\n" +
"------------------------------INS SimpleName@@MethodName:toJson:[e.getValue()] @TO@ MethodInvocation@@toJson(e.getValue()) @AT@ 2290 @LENGTH@ 20\n" +
"---------------------------------INS MethodInvocation@@e.getValue() @TO@ SimpleName@@MethodName:toJson:[e.getValue()] @AT@ 2297 @LENGTH@ 12\n" +
"------------------------------------INS SimpleName@@Name:e @TO@ MethodInvocation@@e.getValue() @AT@ 2297 @LENGTH@ 1\n" +
"------------------------------------INS SimpleName@@MethodName:getValue:[] @TO@ MethodInvocation@@e.getValue() @AT@ 2299 @LENGTH@ 10\n" +
"------------------------------INS SimpleName@@MethodName:toJson:[e.getValue()] @TO@ MethodInvocation@@toJson(e.getValue()) @AT@ 2290 @LENGTH@ 20\n" +
"---------------------------------INS MethodInvocation@@e.getValue() @TO@ SimpleName@@MethodName:toJson:[e.getValue()] @AT@ 2297 @LENGTH@ 12\n" +
"------------------------------------INS SimpleName@@Name:e @TO@ MethodInvocation@@e.getValue() @AT@ 2297 @LENGTH@ 1\n" +
"------------------------------------INS SimpleName@@MethodName:getValue:[] @TO@ MethodInvocation@@e.getValue() @AT@ 2299 @LENGTH@ 10\n" +
"---------------INS ExpressionStatement@@MethodInvocation:sb.append(\"}\") @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} @AT@ 2347 @LENGTH@ 15\n" +
"------------------INS MethodInvocation@@sb.append(\"}\") @TO@ ExpressionStatement@@MethodInvocation:sb.append(\"}\") @AT@ 2347 @LENGTH@ 14\n" +
"---------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.append(\"}\") @AT@ 2347 @LENGTH@ 2\n" +
"---------------------INS SimpleName@@MethodName:append:[\"}\"] @TO@ MethodInvocation@@sb.append(\"}\") @AT@ 2350 @LENGTH@ 11\n" +
"------------------------INS StringLiteral@@\"}\" @TO@ SimpleName@@MethodName:append:[\"}\"] @AT@ 2357 @LENGTH@ 3\n" +
"---------------INS ReturnStatement@@MethodInvocation:sb.toString() @TO@ Block@@ThenBody:{ StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} @AT@ 2379 @LENGTH@ 21\n" +
"------------------INS MethodInvocation@@sb.toString() @TO@ ReturnStatement@@MethodInvocation:sb.toString() @AT@ 2386 @LENGTH@ 13\n" +
"---------------------INS SimpleName@@Name:sb @TO@ MethodInvocation@@sb.toString() @AT@ 2386 @LENGTH@ 2\n" +
"---------------------INS SimpleName@@MethodName:toString:[] @TO@ MethodInvocation@@sb.toString() @AT@ 2389 @LENGTH@ 10\n" +
"------------INS Block@@ElseBody:if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ IfStatement@@if (o instanceof Map) { StringBuilder sb=new StringBuilder(); sb.append(\"{\"); for ( Map.Entry<Object,Object> e : ((Map<Object,Object>)o).entrySet()) { if (sb.length() > 1) { sb.append(\",\"); } sb.append(toJson(e.getKey().toString())); sb.append(\":\"); sb.append(toJson(e.getValue())); } sb.append(\"}\"); return sb.toString();} else if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2420 @LENGTH@ 244\n" +
"---------------INS IfStatement@@if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ Block@@ElseBody:if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2420 @LENGTH@ 244\n" +
"------------------INS InfixExpression@@o == null @TO@ IfStatement@@if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2424 @LENGTH@ 9\n" +
"---------------------INS SimpleName@@o @TO@ InfixExpression@@o == null @AT@ 2424 @LENGTH@ 1\n" +
"---------------------INS Operator@@== @TO@ InfixExpression@@o == null @AT@ 2425 @LENGTH@ 2\n" +
"---------------------INS NullLiteral@@null @TO@ InfixExpression@@o == null @AT@ 2429 @LENGTH@ 4\n" +
"------------------INS Block@@ThenBody:{ return \"null\";} @TO@ IfStatement@@if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2435 @LENGTH@ 46\n" +
"---------------------INS ReturnStatement@@StringLiteral:\"null\" @TO@ Block@@ThenBody:{ return \"null\";} @AT@ 2453 @LENGTH@ 14\n" +
"------------------------INS StringLiteral@@\"null\" @TO@ ReturnStatement@@StringLiteral:\"null\" @AT@ 2460 @LENGTH@ 6\n" +
"------------------INS Block@@ElseBody:if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ IfStatement@@if (o == null) { return \"null\";} else if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2487 @LENGTH@ 177\n" +
"---------------------INS IfStatement@@if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @TO@ Block@@ElseBody:if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2487 @LENGTH@ 177\n" +
"------------------------INS InstanceofExpression@@o instanceof Date @TO@ IfStatement@@if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2491 @LENGTH@ 17\n" +
"---------------------------INS SimpleName@@o @TO@ InstanceofExpression@@o instanceof Date @AT@ 2491 @LENGTH@ 1\n" +
"---------------------------INS Instanceof@@instanceof @TO@ InstanceofExpression@@o instanceof Date @AT@ 2493 @LENGTH@ 10\n" +
"---------------------------INS SimpleType@@Date @TO@ InstanceofExpression@@o instanceof Date @AT@ 2504 @LENGTH@ 4\n" +
"------------------------INS Block@@ThenBody:{ return \"\\\"\" + toIso((Date)o) + \"\\\"\";} @TO@ IfStatement@@if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2510 @LENGTH@ 69\n" +
"---------------------------INS ReturnStatement@@InfixExpression:\"\\\"\" + toIso((Date)o) + \"\\\"\" @TO@ Block@@ThenBody:{ return \"\\\"\" + toIso((Date)o) + \"\\\"\";} @AT@ 2528 @LENGTH@ 37\n" +
"------------------------------INS InfixExpression@@\"\\\"\" + toIso((Date)o) + \"\\\"\" @TO@ ReturnStatement@@InfixExpression:\"\\\"\" + toIso((Date)o) + \"\\\"\" @AT@ 2535 @LENGTH@ 29\n" +
"---------------------------------INS StringLiteral@@\"\\\"\" @TO@ InfixExpression@@\"\\\"\" + toIso((Date)o) + \"\\\"\" @AT@ 2535 @LENGTH@ 4\n" +
"---------------------------------INS Operator@@+ @TO@ InfixExpression@@\"\\\"\" + toIso((Date)o) + \"\\\"\" @AT@ 2539 @LENGTH@ 1\n" +
"---------------------------------INS MethodInvocation@@toIso((Date)o) @TO@ InfixExpression@@\"\\\"\" + toIso((Date)o) + \"\\\"\" @AT@ 2542 @LENGTH@ 15\n" +
"------------------------------------INS SimpleName@@MethodName:toIso:[(Date)o] @TO@ MethodInvocation@@toIso((Date)o) @AT@ 2542 @LENGTH@ 15\n" +
"---------------------------------------INS CastExpression@@(Date)o @TO@ SimpleName@@MethodName:toIso:[(Date)o] @AT@ 2548 @LENGTH@ 8\n" +
"------------------------------------------INS SimpleType@@Date @TO@ CastExpression@@(Date)o @AT@ 2549 @LENGTH@ 4\n" +
"------------------------------------------INS SimpleName@@o @TO@ CastExpression@@(Date)o @AT@ 2555 @LENGTH@ 1\n" +
"------------------------------------INS SimpleName@@MethodName:toIso:[(Date)o] @TO@ MethodInvocation@@toIso((Date)o) @AT@ 2542 @LENGTH@ 15\n" +
"---------------------------------------INS CastExpression@@(Date)o @TO@ SimpleName@@MethodName:toIso:[(Date)o] @AT@ 2548 @LENGTH@ 8\n" +
"------------------------------------------INS SimpleType@@Date @TO@ CastExpression@@(Date)o @AT@ 2549 @LENGTH@ 4\n" +
"------------------------------------------INS SimpleName@@o @TO@ CastExpression@@(Date)o @AT@ 2555 @LENGTH@ 1\n" +
"---------------------------------INS StringLiteral@@\"\\\"\" @TO@ InfixExpression@@\"\\\"\" + toIso((Date)o) + \"\\\"\" @AT@ 2560 @LENGTH@ 4\n" +
"------------------------INS Block@@ElseBody:{ return mapper.writeValueAsString(o.toString());} @TO@ IfStatement@@if (o instanceof Date) { return \"\\\"\" + toIso((Date)o) + \"\\\"\";} else { return mapper.writeValueAsString(o.toString());} @AT@ 2585 @LENGTH@ 79\n" +
"---------------------------INS ReturnStatement@@MethodInvocation:mapper.writeValueAsString(o.toString()) @TO@ Block@@ElseBody:{ return mapper.writeValueAsString(o.toString());} @AT@ 2603 @LENGTH@ 47\n" +
"------------------------------MOV MethodInvocation@@mapper.writeValueAsString(o) @TO@ ReturnStatement@@MethodInvocation:mapper.writeValueAsString(o.toString()) @AT@ 1307 @LENGTH@ 28\n" +
"---------------------------------UPD SimpleName@@MethodName:writeValueAsString:[o] @TO@ MethodName:writeValueAsString:[o.toString()] @AT@ 1314 @LENGTH@ 21\n" +
"------------------------------------DEL SimpleName@@o @AT@ 1333 @LENGTH@ 1\n" +
"------------------------------------INS MethodInvocation@@o.toString() @TO@ SimpleName@@MethodName:writeValueAsString:[o] @AT@ 2636 @LENGTH@ 12\n" +
"---------------------------------------INS SimpleName@@Name:o @TO@ MethodInvocation@@o.toString() @AT@ 2636 @LENGTH@ 1\n" +
"---------------------------------------INS SimpleName@@MethodName:toString:[] @TO@ MethodInvocation@@o.toString() @AT@ 2638 @LENGTH@ 10\n");
}
//commons-collections_045fda_add3a9_src#main#java#org#apache#commons#collections#map#AbstractHashedMap.java
@Test
public void test_collections_045fda_add3a9() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_045fda_add3a9_src#main#java#org#apache#commons#collections#map#AbstractHashedMap.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD CatchClause@@catch (final CloneNotSupportedException ex) { return null;} @TO@ catch (final CloneNotSupportedException ex) { throw new InternalError();} @AT@ 44401 @LENGTH@ 104\n" +
"---DEL ReturnStatement@@NullLiteral:null @AT@ 44459 @LENGTH@ 12\n" +
"------DEL NullLiteral@@null @AT@ 44466 @LENGTH@ 4\n" +
"---INS ThrowStatement@@ClassInstanceCreation:new InternalError() @TO@ CatchClause@@catch (final CloneNotSupportedException ex) { return null;} @AT@ 44530 @LENGTH@ 26\n" +
"------INS ClassInstanceCreation@@InternalError[] @TO@ ThrowStatement@@ClassInstanceCreation:new InternalError() @AT@ 44536 @LENGTH@ 19\n" +
"---------INS New@@new @TO@ ClassInstanceCreation@@InternalError[] @AT@ 44536 @LENGTH@ 3\n" +
"---------INS SimpleType@@InternalError @TO@ ClassInstanceCreation@@InternalError[] @AT@ 44540 @LENGTH@ 13\n");
}
//metadata_a82665_e8bff4_appclient#src#main#java#org#jboss#metadata#appclient#parser#spec#ApplicationClientMetaDataParser.java
@Test
public void test_metadata_a82665_e8bff4() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("metadata_a82665_e8bff4_appclient#src#main#java#org#jboss#metadata#appclient#parser#spec#ApplicationClientMetaDataParser.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD SwitchStatement@@switch (ejbJarAttribute) {case ID:{ metaData.setId(value); break; }case VERSION:{ metaData.setVersion(value);}case METADATA_COMPLETE:{metaData.setMetadataComplete(true);break;}default :throw unexpectedAttribute(reader,i);} @TO@ switch (ejbJarAttribute) {case ID:{ metaData.setId(value); break; }case VERSION:{ metaData.setVersion(value); break;}case METADATA_COMPLETE:{metaData.setMetadataComplete(Boolean.parseBoolean(value));break;}default :throw unexpectedAttribute(reader,i);} @AT@ 2859 @LENGTH@ 410\n" +
"---INS BreakStatement@@ @TO@ SwitchStatement@@switch (ejbJarAttribute) {case ID:{ metaData.setId(value); break; }case VERSION:{ metaData.setVersion(value);}case METADATA_COMPLETE:{metaData.setMetadataComplete(true);break;}default :throw unexpectedAttribute(reader,i);} @AT@ 3074 @LENGTH@ 6\n" +
"---UPD ExpressionStatement@@MethodInvocation:metaData.setMetadataComplete(true) @TO@ MethodInvocation:metaData.setMetadataComplete(Boolean.parseBoolean(value)) @AT@ 3128 @LENGTH@ 35\n" +
"------UPD MethodInvocation@@metaData.setMetadataComplete(true) @TO@ metaData.setMetadataComplete(Boolean.parseBoolean(value)) @AT@ 3128 @LENGTH@ 34\n" +
"---------UPD SimpleName@@MethodName:setMetadataComplete:[true] @TO@ MethodName:setMetadataComplete:[Boolean.parseBoolean(value)] @AT@ 3137 @LENGTH@ 25\n" +
"------------DEL BooleanLiteral@@true @AT@ 3157 @LENGTH@ 4\n" +
"------------INS MethodInvocation@@Boolean.parseBoolean(value) @TO@ SimpleName@@MethodName:setMetadataComplete:[true] @AT@ 3177 @LENGTH@ 27\n" +
"---------------INS SimpleName@@Name:Boolean @TO@ MethodInvocation@@Boolean.parseBoolean(value) @AT@ 3177 @LENGTH@ 7\n" +
"---------------INS SimpleName@@MethodName:parseBoolean:[value] @TO@ MethodInvocation@@Boolean.parseBoolean(value) @AT@ 3185 @LENGTH@ 19\n" +
"------------------INS SimpleName@@value @TO@ SimpleName@@MethodName:parseBoolean:[value] @AT@ 3198 @LENGTH@ 5\n");
}
//fuse_6baf6f_ad4e95_fabric#fabric-core-agent-jclouds#src#main#java#org#fusesource#fabric#service#jclouds#JcloudsContainerProvider.java
@Test
public void test_fuse_6baf6f_ad4e95() throws IOException {
//null pointer
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_6baf6f_ad4e95_fabric#fabric-core-agent-jclouds#src#main#java#org#fusesource#fabric#service#jclouds#JcloudsContainerProvider.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD MethodDeclaration@@public, Set<CreateJCloudsContainerMetadata>, MethodName:create, CreateJCloudsContainerOptions options, MalformedURLException, RunNodesException, URISyntaxException, @TO@ public, Set<CreateJCloudsContainerMetadata>, MethodName:create, CreateJCloudsContainerOptions options, MalformedURLException, RunNodesException, URISyntaxException, InterruptedException, @AT@ 2863 @LENGTH@ 4271\n" +
"---INS SimpleType@@InterruptedException @TO@ MethodDeclaration@@public, Set<CreateJCloudsContainerMetadata>, MethodName:create, CreateJCloudsContainerOptions options, MalformedURLException, RunNodesException, URISyntaxException, @AT@ 3021 @LENGTH@ 20\n" +
"---UPD VariableDeclarationStatement@@Set<CreateJCloudsContainerMetadata> result=new LinkedHashSet<CreateJCloudsContainerMetadata>(); @TO@ final Set<CreateJCloudsContainerMetadata> result=new LinkedHashSet<CreateJCloudsContainerMetadata>(); @AT@ 3030 @LENGTH@ 97\n" +
"------INS Modifier@@final @TO@ VariableDeclarationStatement@@Set<CreateJCloudsContainerMetadata> result=new LinkedHashSet<CreateJCloudsContainerMetadata>(); @AT@ 3051 @LENGTH@ 5\n" +
"---INS ExpressionStatement@@MethodInvocation:Thread.sleep(5000) @TO@ MethodDeclaration@@public, Set<CreateJCloudsContainerMetadata>, MethodName:create, CreateJCloudsContainerOptions options, MalformedURLException, RunNodesException, URISyntaxException, @AT@ 5007 @LENGTH@ 19\n" +
"------INS MethodInvocation@@Thread.sleep(5000) @TO@ ExpressionStatement@@MethodInvocation:Thread.sleep(5000) @AT@ 5007 @LENGTH@ 18\n" +
"---------INS SimpleName@@Name:Thread @TO@ MethodInvocation@@Thread.sleep(5000) @AT@ 5007 @LENGTH@ 6\n" +
"---------INS SimpleName@@MethodName:sleep:[5000] @TO@ MethodInvocation@@Thread.sleep(5000) @AT@ 5014 @LENGTH@ 11\n" +
"------------INS NumberLiteral@@5000 @TO@ SimpleName@@MethodName:sleep:[5000] @AT@ 5020 @LENGTH@ 4\n" +
"---UPD IfStatement@@if (metadatas != null) { for ( NodeMetadata nodeMetadata : metadatas) { Credentials credentials=null; if (options.getUser() != null) { credentials=new Credentials(options.getUser(),nodeMetadata.getCredentials().credential); } else { credentials=nodeMetadata.getCredentials(); } String id=nodeMetadata.getId(); Set<String> publicAddresses=nodeMetadata.getPublicAddresses(); for ( String pa : publicAddresses) { if (first) { first=false; } else { buffer.append(\",\"); } buffer.append(pa + \":\" + options.getServicePort()); } String containerName=options.getName(); if (options.getNumber() > 1) { containerName+=suffix++; } String script=buildStartupScript(options.name(containerName)); if (credentials != null) { computeService.runScriptOnNode(id,script,RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false)); } else { computeService.runScriptOnNode(id,script); } CreateJCloudsContainerMetadata jCloudsContainerMetadata=new CreateJCloudsContainerMetadata(); jCloudsContainerMetadata.setNodeId(nodeMetadata.getId()); jCloudsContainerMetadata.setContainerName(containerName); jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses()); jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname()); result.add(jCloudsContainerMetadata); }} @TO@ if (metadatas != null) { for ( NodeMetadata nodeMetadata : metadatas) { Credentials credentials=null; if (options.getUser() != null) { credentials=new Credentials(options.getUser(),nodeMetadata.getCredentials().credential); } else { credentials=nodeMetadata.getCredentials(); } String id=nodeMetadata.getId(); Set<String> publicAddresses=nodeMetadata.getPublicAddresses(); String containerName=options.getName(); if (options.getNumber() > 1) { containerName+=suffix++; } String script=buildStartupScript(options.name(containerName)); if (credentials != null) { computeService.runScriptOnNode(id,script,RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false)); } else { computeService.runScriptOnNode(id,script); } CreateJCloudsContainerMetadata jCloudsContainerMetadata=new CreateJCloudsContainerMetadata(); jCloudsContainerMetadata.setNodeId(nodeMetadata.getId()); jCloudsContainerMetadata.setContainerName(containerName); jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses()); jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname()); result.add(jCloudsContainerMetadata); }} @AT@ 5086 @LENGTH@ 2018\n" +
"------UPD Block@@ThenBody:{ for ( NodeMetadata nodeMetadata : metadatas) { Credentials credentials=null; if (options.getUser() != null) { credentials=new Credentials(options.getUser(),nodeMetadata.getCredentials().credential); } else { credentials=nodeMetadata.getCredentials(); } String id=nodeMetadata.getId(); Set<String> publicAddresses=nodeMetadata.getPublicAddresses(); for ( String pa : publicAddresses) { if (first) { first=false; } else { buffer.append(\",\"); } buffer.append(pa + \":\" + options.getServicePort()); } String containerName=options.getName(); if (options.getNumber() > 1) { containerName+=suffix++; } String script=buildStartupScript(options.name(containerName)); if (credentials != null) { computeService.runScriptOnNode(id,script,RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false)); } else { computeService.runScriptOnNode(id,script); } CreateJCloudsContainerMetadata jCloudsContainerMetadata=new CreateJCloudsContainerMetadata(); jCloudsContainerMetadata.setNodeId(nodeMetadata.getId()); jCloudsContainerMetadata.setContainerName(containerName); jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses()); jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname()); result.add(jCloudsContainerMetadata); }} @TO@ ThenBody:{ for ( NodeMetadata nodeMetadata : metadatas) { Credentials credentials=null; if (options.getUser() != null) { credentials=new Credentials(options.getUser(),nodeMetadata.getCredentials().credential); } else { credentials=nodeMetadata.getCredentials(); } String id=nodeMetadata.getId(); Set<String> publicAddresses=nodeMetadata.getPublicAddresses(); String containerName=options.getName(); if (options.getNumber() > 1) { containerName+=suffix++; } String script=buildStartupScript(options.name(containerName)); if (credentials != null) { computeService.runScriptOnNode(id,script,RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false)); } else { computeService.runScriptOnNode(id,script); } CreateJCloudsContainerMetadata jCloudsContainerMetadata=new CreateJCloudsContainerMetadata(); jCloudsContainerMetadata.setNodeId(nodeMetadata.getId()); jCloudsContainerMetadata.setContainerName(containerName); jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses()); jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname()); result.add(jCloudsContainerMetadata); }} @AT@ 5109 @LENGTH@ 1995\n" +
"---------UPD EnhancedForStatement@@for (NodeMetadata nodeMetadata : metadatas) { Credentials credentials=null; if (options.getUser() != null) { credentials=new Credentials(options.getUser(),nodeMetadata.getCredentials().credential); } else { credentials=nodeMetadata.getCredentials(); } String id=nodeMetadata.getId(); Set<String> publicAddresses=nodeMetadata.getPublicAddresses(); for ( String pa : publicAddresses) { if (first) { first=false; } else { buffer.append(\",\"); } buffer.append(pa + \":\" + options.getServicePort()); } String containerName=options.getName(); if (options.getNumber() > 1) { containerName+=suffix++; } String script=buildStartupScript(options.name(containerName)); if (credentials != null) { computeService.runScriptOnNode(id,script,RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false)); } else { computeService.runScriptOnNode(id,script); } CreateJCloudsContainerMetadata jCloudsContainerMetadata=new CreateJCloudsContainerMetadata(); jCloudsContainerMetadata.setNodeId(nodeMetadata.getId()); jCloudsContainerMetadata.setContainerName(containerName); jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses()); jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname()); result.add(jCloudsContainerMetadata);} @TO@ for (NodeMetadata nodeMetadata : metadatas) { Credentials credentials=null; if (options.getUser() != null) { credentials=new Credentials(options.getUser(),nodeMetadata.getCredentials().credential); } else { credentials=nodeMetadata.getCredentials(); } String id=nodeMetadata.getId(); Set<String> publicAddresses=nodeMetadata.getPublicAddresses(); String containerName=options.getName(); if (options.getNumber() > 1) { containerName+=suffix++; } String script=buildStartupScript(options.name(containerName)); if (credentials != null) { computeService.runScriptOnNode(id,script,RunScriptOptions.Builder.overrideCredentialsWith(credentials).runAsRoot(false)); } else { computeService.runScriptOnNode(id,script); } CreateJCloudsContainerMetadata jCloudsContainerMetadata=new CreateJCloudsContainerMetadata(); jCloudsContainerMetadata.setNodeId(nodeMetadata.getId()); jCloudsContainerMetadata.setContainerName(containerName); jCloudsContainerMetadata.setPublicAddresses(nodeMetadata.getPublicAddresses()); jCloudsContainerMetadata.setHostname(nodeMetadata.getHostname()); result.add(jCloudsContainerMetadata);} @AT@ 5123 @LENGTH@ 1971\n" +
"------------DEL EnhancedForStatement@@for (String pa : publicAddresses) { if (first) { first=false; } else { buffer.append(\",\"); } buffer.append(pa + \":\" + options.getServicePort());} @AT@ 5753 @LENGTH@ 291\n" +
"---------------DEL SingleVariableDeclaration@@String pa @AT@ 5758 @LENGTH@ 9\n" +
"------------------DEL SimpleType@@String @AT@ 5758 @LENGTH@ 6\n" +
"------------------DEL SimpleName@@pa @AT@ 5765 @LENGTH@ 2\n" +
"---------------DEL SimpleName@@publicAddresses @AT@ 5769 @LENGTH@ 15\n" +
"---------------DEL IfStatement@@if (first) { first=false;} else { buffer.append(\",\");} @AT@ 5808 @LENGTH@ 146\n" +
"------------------DEL SimpleName@@first @AT@ 5812 @LENGTH@ 5\n" +
"------------------DEL Block@@ThenBody:{ first=false;} @AT@ 5819 @LENGTH@ 62\n" +
"---------------------DEL ExpressionStatement@@Assignment:first=false @AT@ 5845 @LENGTH@ 14\n" +
"------------------------DEL Assignment@@first=false @AT@ 5845 @LENGTH@ 13\n" +
"---------------------------DEL SimpleName@@first @AT@ 5845 @LENGTH@ 5\n" +
"---------------------------DEL Operator@@= @AT@ 5850 @LENGTH@ 1\n" +
"---------------------------DEL BooleanLiteral@@false @AT@ 5853 @LENGTH@ 5\n" +
"------------------DEL Block@@ElseBody:{ buffer.append(\",\");} @AT@ 5887 @LENGTH@ 67\n" +
"---------------------DEL ExpressionStatement@@MethodInvocation:buffer.append(\",\") @AT@ 5913 @LENGTH@ 19\n" +
"------------------------DEL MethodInvocation@@buffer.append(\",\") @AT@ 5913 @LENGTH@ 18\n" +
"---------------------------DEL SimpleName@@Name:buffer @AT@ 5913 @LENGTH@ 6\n" +
"---------------------------DEL SimpleName@@MethodName:append:[\",\"] @AT@ 5920 @LENGTH@ 11\n" +
"------------------------------DEL StringLiteral@@\",\" @AT@ 5927 @LENGTH@ 3\n" +
"---------------DEL ExpressionStatement@@MethodInvocation:buffer.append(pa + \":\" + options.getServicePort()) @AT@ 5975 @LENGTH@ 51\n" +
"------------------DEL MethodInvocation@@buffer.append(pa + \":\" + options.getServicePort()) @AT@ 5975 @LENGTH@ 50\n" +
"---------------------DEL SimpleName@@Name:buffer @AT@ 5975 @LENGTH@ 6\n" +
"---------------------DEL SimpleName@@MethodName:append:[pa + \":\" + options.getServicePort()] @AT@ 5982 @LENGTH@ 43\n" +
"------------------------DEL InfixExpression@@pa + \":\" + options.getServicePort() @AT@ 5989 @LENGTH@ 35\n" +
"---------------------------DEL SimpleName@@pa @AT@ 5989 @LENGTH@ 2\n" +
"---------------------------DEL Operator@@+ @AT@ 5991 @LENGTH@ 1\n" +
"---------------------------DEL StringLiteral@@\":\" @AT@ 5994 @LENGTH@ 3\n" +
"---------------------------DEL MethodInvocation@@options.getServicePort() @AT@ 6000 @LENGTH@ 24\n" +
"------------------------------DEL SimpleName@@Name:options @AT@ 6000 @LENGTH@ 7\n" +
"------------------------------DEL SimpleName@@MethodName:getServicePort:[] @AT@ 6008 @LENGTH@ 16\n");
}
//commons-collections_51c9ef_d65e29_src#java#org#apache#commons#collections#iterators#IteratorChain.java
@Test
public void test_collections_51c9ef_d65e29() throws IOException {
//MOVE - DEL 2 action example
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_51c9ef_d65e29_src#java#org#apache#commons#collections#iterators#IteratorChain.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"INS IfStatement@@if (currentIterator == null) { updateCurrentIterator();} @TO@ MethodDeclaration@@public, void, MethodName:remove, @AT@ 9501 @LENGTH@ 78\n" +
"---INS InfixExpression@@currentIterator == null @TO@ IfStatement@@if (currentIterator == null) { updateCurrentIterator();} @AT@ 9505 @LENGTH@ 23\n" +
"------INS SimpleName@@currentIterator @TO@ InfixExpression@@currentIterator == null @AT@ 9505 @LENGTH@ 15\n" +
"------INS Operator@@== @TO@ InfixExpression@@currentIterator == null @AT@ 9520 @LENGTH@ 2\n" +
"------INS NullLiteral@@null @TO@ InfixExpression@@currentIterator == null @AT@ 9524 @LENGTH@ 4\n" +
"---INS Block@@ThenBody:{ updateCurrentIterator();} @TO@ IfStatement@@if (currentIterator == null) { updateCurrentIterator();} @AT@ 9531 @LENGTH@ 48\n" +
"------INS ExpressionStatement@@MethodInvocation:updateCurrentIterator() @TO@ Block@@ThenBody:{ updateCurrentIterator();} @AT@ 9545 @LENGTH@ 24\n" +
"---------MOV MethodInvocation@@MethodName:updateCurrentIterator:[] @TO@ ExpressionStatement@@MethodInvocation:updateCurrentIterator() @AT@ 9522 @LENGTH@ 23\n");
}
//fuse_d6d40b_b37c85_fabric-zookeeper-commands#src#main#java#org#fusesource#fabric#zookeeper#commands#Export.java
@Test
public void test_fuse_d6d40b_b37c85() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_d6d40b_b37c85_fabric-zookeeper-commands#src#main#java#org#fusesource#fabric#zookeeper#commands#Export.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD IfStatement@@if (!matches(include,p) || matches(exclude,p)) { continue;} @TO@ if (!matches(include,p,true) || matches(exclude,p,false)) { continue;} @AT@ 3334 @LENGTH@ 90\n" +
"---UPD InfixExpression@@!matches(include,p) || matches(exclude,p) @TO@ !matches(include,p,true) || matches(exclude,p,false) @AT@ 3338 @LENGTH@ 43\n" +
"------UPD PrefixExpression@@!matches(include,p) @TO@ !matches(include,p,true) @AT@ 3338 @LENGTH@ 20\n" +
"---------UPD MethodInvocation@@matches(include,p) @TO@ matches(include,p,true) @AT@ 3339 @LENGTH@ 19\n" +
"------------UPD SimpleName@@MethodName:matches:[include, p] @TO@ MethodName:matches:[include, p, true] @AT@ 3339 @LENGTH@ 19\n" +
"---------------INS BooleanLiteral@@true @TO@ SimpleName@@MethodName:matches:[include, p] @AT@ 3359 @LENGTH@ 4\n" +
"------UPD MethodInvocation@@matches(exclude,p) @TO@ matches(exclude,p,false) @AT@ 3362 @LENGTH@ 19\n" +
"---------UPD SimpleName@@MethodName:matches:[exclude, p] @TO@ MethodName:matches:[exclude, p, false] @AT@ 3362 @LENGTH@ 19\n" +
"------------INS BooleanLiteral@@false @TO@ SimpleName@@MethodName:matches:[exclude, p] @AT@ 3388 @LENGTH@ 5\n");
}
//fuse_d6d40b_b37c85_fabric-zookeeper-commands#src#main#java#org#fusesource#fabric#zookeeper#commands#Import.java
@Test
public void test_fuse_d6d40b_b37c85_() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_d6d40b_b37c85_fabric-zookeeper-commands#src#main#java#org#fusesource#fabric#zookeeper#commands#Import.java");
Assert.assertEquals(hierarchicalActionSets.size(),2);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD IfStatement@@if (!matches(include,key) || matches(exclude,key)) { continue;} @TO@ if (!matches(include,key,true) || matches(exclude,key,false)) { continue;} @AT@ 5493 @LENGTH@ 94\n" +
"---UPD InfixExpression@@!matches(include,key) || matches(exclude,key) @TO@ !matches(include,key,true) || matches(exclude,key,false) @AT@ 5497 @LENGTH@ 47\n" +
"------UPD PrefixExpression@@!matches(include,key) @TO@ !matches(include,key,true) @AT@ 5497 @LENGTH@ 22\n" +
"---------UPD MethodInvocation@@matches(include,key) @TO@ matches(include,key,true) @AT@ 5498 @LENGTH@ 21\n" +
"------------UPD SimpleName@@MethodName:matches:[include, key] @TO@ MethodName:matches:[include, key, true] @AT@ 5498 @LENGTH@ 21\n" +
"---------------INS BooleanLiteral@@true @TO@ SimpleName@@MethodName:matches:[include, key] @AT@ 5520 @LENGTH@ 4\n" +
"------UPD MethodInvocation@@matches(exclude,key) @TO@ matches(exclude,key,false) @AT@ 5523 @LENGTH@ 21\n" +
"---------UPD SimpleName@@MethodName:matches:[exclude, key] @TO@ MethodName:matches:[exclude, key, false] @AT@ 5523 @LENGTH@ 21\n" +
"------------INS BooleanLiteral@@false @TO@ SimpleName@@MethodName:matches:[exclude, key] @AT@ 5551 @LENGTH@ 5\n");
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD IfStatement@@if (!matches(include,key) || matches(exclude,key)) { continue;} @TO@ if (!matches(include,key,true) || matches(exclude,key,false)) { continue;} @AT@ 5493 @LENGTH@ 94\n" +
"---UPD InfixExpression@@!matches(include,key) || matches(exclude,key) @TO@ !matches(include,key,true) || matches(exclude,key,false) @AT@ 5497 @LENGTH@ 47\n" +
"------UPD PrefixExpression@@!matches(include,key) @TO@ !matches(include,key,true) @AT@ 5497 @LENGTH@ 22\n" +
"---------UPD MethodInvocation@@matches(include,key) @TO@ matches(include,key,true) @AT@ 5498 @LENGTH@ 21\n" +
"------------UPD SimpleName@@MethodName:matches:[include, key] @TO@ MethodName:matches:[include, key, true] @AT@ 5498 @LENGTH@ 21\n" +
"---------------INS BooleanLiteral@@true @TO@ SimpleName@@MethodName:matches:[include, key] @AT@ 5520 @LENGTH@ 4\n" +
"------UPD MethodInvocation@@matches(exclude,key) @TO@ matches(exclude,key,false) @AT@ 5523 @LENGTH@ 21\n" +
"---------UPD SimpleName@@MethodName:matches:[exclude, key] @TO@ MethodName:matches:[exclude, key, false] @AT@ 5523 @LENGTH@ 21\n" +
"------------INS BooleanLiteral@@false @TO@ SimpleName@@MethodName:matches:[exclude, key] @AT@ 5551 @LENGTH@ 5\n");
}
//fuse_6d0e56_998321_fabric#fabric-agent#src#main#java#org#fusesource#fabric#agent#DeploymentAgent.java
@Test
public void test_fuse_6d0e56_998321() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_6d0e56_998321_fabric#fabric-agent#src#main#java#org#fusesource#fabric#agent#DeploymentAgent.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD CatchClause@@catch (IOException e) { errors.add(e);} @TO@ catch (Throwable e) { errors.add(e);} @AT@ 42072 @LENGTH@ 84\n" +
"---UPD SingleVariableDeclaration@@IOException e @TO@ Throwable e @AT@ 42079 @LENGTH@ 13\n" +
"------UPD SimpleType@@IOException @TO@ Throwable @AT@ 42079 @LENGTH@ 11\n");
}
//fuse_b0e8c7_602ea9_fabric#fabric-core#src#main#java#org#fusesource#fabric#service#AbstractDataStore.java
@Test
public void test_fuse_b0e8c7_602ea9() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_b0e8c7_602ea9_fabric#fabric-core#src#main#java#org#fusesource#fabric#service#AbstractDataStore.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ReturnStatement@@MethodInvocation:availableResolvers.get(scheme).resolve(pid,key,toSubstitute) @TO@ MethodInvocation:availableResolvers.get(scheme).resolve(configs,pid,key,toSubstitute) @AT@ 9506 @LENGTH@ 70\n" +
"---UPD MethodInvocation@@availableResolvers.get(scheme).resolve(pid,key,toSubstitute) @TO@ availableResolvers.get(scheme).resolve(configs,pid,key,toSubstitute) @AT@ 9513 @LENGTH@ 62\n" +
"------UPD SimpleName@@MethodName:resolve:[pid, key, toSubstitute] @TO@ MethodName:resolve:[configs, pid, key, toSubstitute] @AT@ 9544 @LENGTH@ 31\n" +
"---------INS SimpleName@@configs @TO@ SimpleName@@MethodName:resolve:[pid, key, toSubstitute] @AT@ 9552 @LENGTH@ 7\n");
}
//commons-collections_2d2aef_fb3daa_src#java#org#apache#commons#collections#map#TransformedMap.java
@Test
public void test_collections_2d2aef_fb3daa() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_2d2aef_fb3daa_src#java#org#apache#commons#collections#map#TransformedMap.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ExpressionStatement@@MethodInvocation:result.put((K)transformKey(entry.getKey()),transformValue(entry.getValue())) @TO@ MethodInvocation:result.put(transformKey(entry.getKey()),transformValue(entry.getValue())) @AT@ 7727 @LENGTH@ 79\n" +
"---UPD MethodInvocation@@result.put((K)transformKey(entry.getKey()),transformValue(entry.getValue())) @TO@ result.put(transformKey(entry.getKey()),transformValue(entry.getValue())) @AT@ 7727 @LENGTH@ 78\n" +
"------UPD SimpleName@@MethodName:put:[(K)transformKey(entry.getKey()), transformValue(entry.getValue())] @TO@ MethodName:put:[transformKey(entry.getKey()), transformValue(entry.getValue())] @AT@ 7734 @LENGTH@ 71\n" +
"---------DEL CastExpression@@(K)transformKey(entry.getKey()) @AT@ 7738 @LENGTH@ 32\n" +
"------------DEL SimpleType@@K @AT@ 7739 @LENGTH@ 1\n" +
"---------MOV MethodInvocation@@transformKey(entry.getKey()) @TO@ SimpleName@@MethodName:put:[(K)transformKey(entry.getKey()), transformValue(entry.getValue())] @AT@ 7742 @LENGTH@ 28\n");
}
//fuse_6d0e56_998321_fabric#fabric-agent#src#main#java#org#fusesource#fabric#agent#download#MavenDownloadTask.java
@Test
public void test_fuse_6d0e56_998321_() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_6d0e56_998321_fabric#fabric-agent#src#main#java#org#fusesource#fabric#agent#download#MavenDownloadTask.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @TO@ try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); file.getParentFile().mkdirs(); if (!file.getParentFile().isDirectory()) { throw new IOException(\"Unable to create directory \" + file.getParentFile().toString()); } File tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()); OutputStream os=new FileOutputStream(tmp); copy(is,os); is.close(); os.close(); if (file.exists() && !file.delete()) { throw new IOException(\"Unable to delete file: \" + file.toString()); } if (!tmp.renameTo(file)) { throw new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()); } return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 3516 @LENGTH@ 1028\n" +
"---DEL VariableDeclarationStatement@@File tmp=new File(file.getAbsolutePath() + \".tmp\"); @AT@ 3971 @LENGTH@ 53\n" +
"------DEL VariableDeclarationFragment@@tmp=new File(file.getAbsolutePath() + \".tmp\") @AT@ 3976 @LENGTH@ 47\n" +
"---------DEL ClassInstanceCreation@@File[file.getAbsolutePath() + \".tmp\"] @AT@ 3982 @LENGTH@ 41\n" +
"---INS ExpressionStatement@@MethodInvocation:file.getParentFile().mkdirs() @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 3972 @LENGTH@ 30\n" +
"------INS MethodInvocation@@file.getParentFile().mkdirs() @TO@ ExpressionStatement@@MethodInvocation:file.getParentFile().mkdirs() @AT@ 3972 @LENGTH@ 29\n" +
"---------INS MethodInvocation@@MethodName:getParentFile:[] @TO@ MethodInvocation@@file.getParentFile().mkdirs() @AT@ 3972 @LENGTH@ 20\n" +
"---------INS SimpleName@@Name:file @TO@ MethodInvocation@@file.getParentFile().mkdirs() @AT@ 3972 @LENGTH@ 4\n" +
"---------INS SimpleName@@MethodName:mkdirs:[] @TO@ MethodInvocation@@file.getParentFile().mkdirs() @AT@ 3993 @LENGTH@ 8\n" +
"---INS IfStatement@@if (!file.getParentFile().isDirectory()) { throw new IOException(\"Unable to create directory \" + file.getParentFile().toString());} @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 4019 @LENGTH@ 168\n" +
"------INS PrefixExpression@@!file.getParentFile().isDirectory() @TO@ IfStatement@@if (!file.getParentFile().isDirectory()) { throw new IOException(\"Unable to create directory \" + file.getParentFile().toString());} @AT@ 4023 @LENGTH@ 35\n" +
"---------INS Operator@@! @TO@ PrefixExpression@@!file.getParentFile().isDirectory() @AT@ 4023 @LENGTH@ 1\n" +
"---------INS MethodInvocation@@file.getParentFile().isDirectory() @TO@ PrefixExpression@@!file.getParentFile().isDirectory() @AT@ 4024 @LENGTH@ 34\n" +
"------------INS MethodInvocation@@MethodName:getParentFile:[] @TO@ MethodInvocation@@file.getParentFile().isDirectory() @AT@ 4024 @LENGTH@ 20\n" +
"------------INS SimpleName@@Name:file @TO@ MethodInvocation@@file.getParentFile().isDirectory() @AT@ 4024 @LENGTH@ 4\n" +
"------------INS SimpleName@@MethodName:isDirectory:[] @TO@ MethodInvocation@@file.getParentFile().isDirectory() @AT@ 4045 @LENGTH@ 13\n" +
"------INS Block@@ThenBody:{ throw new IOException(\"Unable to create directory \" + file.getParentFile().toString());} @TO@ IfStatement@@if (!file.getParentFile().isDirectory()) { throw new IOException(\"Unable to create directory \" + file.getParentFile().toString());} @AT@ 4060 @LENGTH@ 127\n" +
"---------INS ThrowStatement@@ClassInstanceCreation:new IOException(\"Unable to create directory \" + file.getParentFile().toString()) @TO@ Block@@ThenBody:{ throw new IOException(\"Unable to create directory \" + file.getParentFile().toString());} @AT@ 4082 @LENGTH@ 87\n" +
"------------INS ClassInstanceCreation@@IOException[\"Unable to create directory \" + file.getParentFile().toString()] @TO@ ThrowStatement@@ClassInstanceCreation:new IOException(\"Unable to create directory \" + file.getParentFile().toString()) @AT@ 4088 @LENGTH@ 80\n" +
"---------------INS New@@new @TO@ ClassInstanceCreation@@IOException[\"Unable to create directory \" + file.getParentFile().toString()] @AT@ 4088 @LENGTH@ 3\n" +
"---------------INS SimpleType@@IOException @TO@ ClassInstanceCreation@@IOException[\"Unable to create directory \" + file.getParentFile().toString()] @AT@ 4092 @LENGTH@ 11\n" +
"---------------INS InfixExpression@@\"Unable to create directory \" + file.getParentFile().toString() @TO@ ClassInstanceCreation@@IOException[\"Unable to create directory \" + file.getParentFile().toString()] @AT@ 4104 @LENGTH@ 63\n" +
"------------------INS StringLiteral@@\"Unable to create directory \" @TO@ InfixExpression@@\"Unable to create directory \" + file.getParentFile().toString() @AT@ 4104 @LENGTH@ 29\n" +
"------------------INS Operator@@+ @TO@ InfixExpression@@\"Unable to create directory \" + file.getParentFile().toString() @AT@ 4133 @LENGTH@ 1\n" +
"------------------INS MethodInvocation@@file.getParentFile().toString() @TO@ InfixExpression@@\"Unable to create directory \" + file.getParentFile().toString() @AT@ 4136 @LENGTH@ 31\n" +
"---------------------INS MethodInvocation@@MethodName:getParentFile:[] @TO@ MethodInvocation@@file.getParentFile().toString() @AT@ 4136 @LENGTH@ 20\n" +
"---------------------INS SimpleName@@Name:file @TO@ MethodInvocation@@file.getParentFile().toString() @AT@ 4136 @LENGTH@ 4\n" +
"---------------------INS SimpleName@@MethodName:toString:[] @TO@ MethodInvocation@@file.getParentFile().toString() @AT@ 4157 @LENGTH@ 10\n" +
"---DEL ExpressionStatement@@MethodInvocation:tmp.getParentFile().mkdirs() @AT@ 4041 @LENGTH@ 29\n" +
"------DEL MethodInvocation@@tmp.getParentFile().mkdirs() @AT@ 4041 @LENGTH@ 28\n" +
"---------DEL MethodInvocation@@MethodName:getParentFile:[] @AT@ 4041 @LENGTH@ 19\n" +
"---DEL ExpressionStatement@@MethodInvocation:file.delete() @AT@ 4178 @LENGTH@ 14\n" +
"------DEL MethodInvocation@@file.delete() @AT@ 4178 @LENGTH@ 13\n" +
"---INS VariableDeclarationStatement@@File tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()); @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 4204 @LENGTH@ 76\n" +
"------MOV SimpleType@@File @TO@ VariableDeclarationStatement@@File tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()); @AT@ 3971 @LENGTH@ 4\n" +
"------INS VariableDeclarationFragment@@tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()) @TO@ VariableDeclarationStatement@@File tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()); @AT@ 4209 @LENGTH@ 70\n" +
"---------MOV SimpleName@@tmp @TO@ VariableDeclarationFragment@@tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()) @AT@ 3976 @LENGTH@ 3\n" +
"---------INS MethodInvocation@@File.createTempFile(\"fabric-agent-\",null,file.getParentFile()) @TO@ VariableDeclarationFragment@@tmp=File.createTempFile(\"fabric-agent-\",null,file.getParentFile()) @AT@ 4215 @LENGTH@ 64\n" +
"------------INS SimpleName@@Name:File @TO@ MethodInvocation@@File.createTempFile(\"fabric-agent-\",null,file.getParentFile()) @AT@ 4215 @LENGTH@ 4\n" +
"------------INS SimpleName@@MethodName:createTempFile:[\"fabric-agent-\", null, file.getParentFile()] @TO@ MethodInvocation@@File.createTempFile(\"fabric-agent-\",null,file.getParentFile()) @AT@ 4220 @LENGTH@ 59\n" +
"---------------INS StringLiteral@@\"fabric-agent-\" @TO@ SimpleName@@MethodName:createTempFile:[\"fabric-agent-\", null, file.getParentFile()] @AT@ 4235 @LENGTH@ 15\n" +
"---------------INS NullLiteral@@null @TO@ SimpleName@@MethodName:createTempFile:[\"fabric-agent-\", null, file.getParentFile()] @AT@ 4252 @LENGTH@ 4\n" +
"---------------INS MethodInvocation@@file.getParentFile() @TO@ SimpleName@@MethodName:createTempFile:[\"fabric-agent-\", null, file.getParentFile()] @AT@ 4258 @LENGTH@ 20\n" +
"------------------INS SimpleName@@Name:file @TO@ MethodInvocation@@file.getParentFile() @AT@ 4258 @LENGTH@ 4\n" +
"------------------INS SimpleName@@MethodName:getParentFile:[] @TO@ MethodInvocation@@file.getParentFile() @AT@ 4263 @LENGTH@ 15\n" +
"---DEL ExpressionStatement@@MethodInvocation:tmp.renameTo(file) @AT@ 4209 @LENGTH@ 19\n" +
"---INS ExpressionStatement@@MethodInvocation:is.close() @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 4388 @LENGTH@ 11\n" +
"------INS MethodInvocation@@is.close() @TO@ ExpressionStatement@@MethodInvocation:is.close() @AT@ 4388 @LENGTH@ 10\n" +
"---------INS SimpleName@@Name:is @TO@ MethodInvocation@@is.close() @AT@ 4388 @LENGTH@ 2\n" +
"---------INS SimpleName@@MethodName:close:[] @TO@ MethodInvocation@@is.close() @AT@ 4391 @LENGTH@ 7\n" +
"---INS ExpressionStatement@@MethodInvocation:os.close() @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 4416 @LENGTH@ 11\n" +
"------INS MethodInvocation@@os.close() @TO@ ExpressionStatement@@MethodInvocation:os.close() @AT@ 4416 @LENGTH@ 10\n" +
"---------INS SimpleName@@Name:os @TO@ MethodInvocation@@os.close() @AT@ 4416 @LENGTH@ 2\n" +
"---------INS SimpleName@@MethodName:close:[] @TO@ MethodInvocation@@os.close() @AT@ 4419 @LENGTH@ 7\n" +
"---INS IfStatement@@if (file.exists() && !file.delete()) { throw new IOException(\"Unable to delete file: \" + file.toString());} @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 4444 @LENGTH@ 144\n" +
"------INS InfixExpression@@file.exists() && !file.delete() @TO@ IfStatement@@if (file.exists() && !file.delete()) { throw new IOException(\"Unable to delete file: \" + file.toString());} @AT@ 4448 @LENGTH@ 31\n" +
"---------INS MethodInvocation@@file.exists() @TO@ InfixExpression@@file.exists() && !file.delete() @AT@ 4448 @LENGTH@ 13\n" +
"------------INS SimpleName@@Name:file @TO@ MethodInvocation@@file.exists() @AT@ 4448 @LENGTH@ 4\n" +
"------------INS SimpleName@@MethodName:exists:[] @TO@ MethodInvocation@@file.exists() @AT@ 4453 @LENGTH@ 8\n" +
"---------INS Operator@@&& @TO@ InfixExpression@@file.exists() && !file.delete() @AT@ 4461 @LENGTH@ 2\n" +
"---------INS PrefixExpression@@!file.delete() @TO@ InfixExpression@@file.exists() && !file.delete() @AT@ 4465 @LENGTH@ 14\n" +
"------------INS Operator@@! @TO@ PrefixExpression@@!file.delete() @AT@ 4465 @LENGTH@ 1\n" +
"------------INS MethodInvocation@@file.delete() @TO@ PrefixExpression@@!file.delete() @AT@ 4466 @LENGTH@ 13\n" +
"---------------INS SimpleName@@Name:file @TO@ MethodInvocation@@file.delete() @AT@ 4466 @LENGTH@ 4\n" +
"---------------INS SimpleName@@MethodName:delete:[] @TO@ MethodInvocation@@file.delete() @AT@ 4471 @LENGTH@ 8\n" +
"------INS Block@@ThenBody:{ throw new IOException(\"Unable to delete file: \" + file.toString());} @TO@ IfStatement@@if (file.exists() && !file.delete()) { throw new IOException(\"Unable to delete file: \" + file.toString());} @AT@ 4481 @LENGTH@ 107\n" +
"---------INS ThrowStatement@@ClassInstanceCreation:new IOException(\"Unable to delete file: \" + file.toString()) @TO@ Block@@ThenBody:{ throw new IOException(\"Unable to delete file: \" + file.toString());} @AT@ 4503 @LENGTH@ 67\n" +
"------------INS ClassInstanceCreation@@IOException[\"Unable to delete file: \" + file.toString()] @TO@ ThrowStatement@@ClassInstanceCreation:new IOException(\"Unable to delete file: \" + file.toString()) @AT@ 4509 @LENGTH@ 60\n" +
"---------------MOV New@@new @TO@ ClassInstanceCreation@@IOException[\"Unable to delete file: \" + file.toString()] @AT@ 3982 @LENGTH@ 3\n" +
"---------------MOV SimpleType@@File @TO@ ClassInstanceCreation@@IOException[\"Unable to delete file: \" + file.toString()] @AT@ 3986 @LENGTH@ 4\n" +
"---------------MOV InfixExpression@@file.getAbsolutePath() + \".tmp\" @TO@ ClassInstanceCreation@@IOException[\"Unable to delete file: \" + file.toString()] @AT@ 3991 @LENGTH@ 31\n" +
"------------------UPD MethodInvocation@@file.getAbsolutePath() @TO@ file.toString() @AT@ 3991 @LENGTH@ 22\n" +
"---------------------UPD SimpleName@@MethodName:getAbsolutePath:[] @TO@ MethodName:toString:[] @AT@ 3996 @LENGTH@ 17\n" +
"------------------DEL Operator@@+ @AT@ 4013 @LENGTH@ 1\n" +
"------------------DEL StringLiteral@@\".tmp\" @AT@ 4016 @LENGTH@ 6\n" +
"------------------INS StringLiteral@@\"Unable to delete file: \" @TO@ InfixExpression@@file.getAbsolutePath() + \".tmp\" @AT@ 4525 @LENGTH@ 25\n" +
"------------------INS Operator@@+ @TO@ InfixExpression@@file.getAbsolutePath() + \".tmp\" @AT@ 4550 @LENGTH@ 1\n" +
"---INS IfStatement@@if (!tmp.renameTo(file)) { throw new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString());} @TO@ TryStatement@@try { configuration.enableProxy(artifact.getArtifactURL()); String repository=system.getFile().getAbsolutePath(); if (!repository.endsWith(Parser.FILE_SEPARATOR)) { repository=repository + Parser.FILE_SEPARATOR; } InputStream is=artifact.getInputStream(); File file=new File(repository + parser.getArtifactPath()); File tmp=new File(file.getAbsolutePath() + \".tmp\"); tmp.getParentFile().mkdirs(); OutputStream os=new FileOutputStream(tmp); copy(is,os); file.delete(); tmp.renameTo(file); return file;} catch (IOException ignore) { LOG.debug(Ix2 + \"Could not download [\" + artifact+ \"]\"); LOG.trace(Ix2 + \"Reason [\" + ignore.getClass().getName()+ \": \"+ ignore.getMessage()+ \"]\");} @AT@ 4605 @LENGTH@ 157\n" +
"------INS PrefixExpression@@!tmp.renameTo(file) @TO@ IfStatement@@if (!tmp.renameTo(file)) { throw new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString());} @AT@ 4609 @LENGTH@ 19\n" +
"---------MOV MethodInvocation@@tmp.renameTo(file) @TO@ PrefixExpression@@!tmp.renameTo(file) @AT@ 4209 @LENGTH@ 18\n" +
"---------INS Operator@@! @TO@ PrefixExpression@@!tmp.renameTo(file) @AT@ 4609 @LENGTH@ 1\n" +
"------INS Block@@ThenBody:{ throw new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString());} @TO@ IfStatement@@if (!tmp.renameTo(file)) { throw new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString());} @AT@ 4630 @LENGTH@ 132\n" +
"---------INS ThrowStatement@@ClassInstanceCreation:new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()) @TO@ Block@@ThenBody:{ throw new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString());} @AT@ 4652 @LENGTH@ 92\n" +
"------------INS ClassInstanceCreation@@IOException[\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()] @TO@ ThrowStatement@@ClassInstanceCreation:new IOException(\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()) @AT@ 4658 @LENGTH@ 85\n" +
"---------------INS New@@new @TO@ ClassInstanceCreation@@IOException[\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()] @AT@ 4658 @LENGTH@ 3\n" +
"---------------INS SimpleType@@IOException @TO@ ClassInstanceCreation@@IOException[\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()] @AT@ 4662 @LENGTH@ 11\n" +
"---------------INS InfixExpression@@\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString() @TO@ ClassInstanceCreation@@IOException[\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString()] @AT@ 4674 @LENGTH@ 68\n" +
"------------------INS StringLiteral@@\"Unable to rename file \" @TO@ InfixExpression@@\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString() @AT@ 4674 @LENGTH@ 24\n" +
"------------------INS Operator@@+ @TO@ InfixExpression@@\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString() @AT@ 4698 @LENGTH@ 1\n" +
"------------------INS MethodInvocation@@tmp.toString() @TO@ InfixExpression@@\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString() @AT@ 4701 @LENGTH@ 14\n" +
"---------------------MOV SimpleName@@Name:tmp @TO@ MethodInvocation@@tmp.toString() @AT@ 4041 @LENGTH@ 3\n" +
"---------------------MOV SimpleName@@MethodName:mkdirs:[] @TO@ MethodInvocation@@tmp.toString() @AT@ 4061 @LENGTH@ 8\n" +
"------------------INS StringLiteral@@\" to \" @TO@ InfixExpression@@\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString() @AT@ 4718 @LENGTH@ 6\n" +
"------------------INS MethodInvocation@@file.toString() @TO@ InfixExpression@@\"Unable to rename file \" + tmp.toString() + \" to \"+ file.toString() @AT@ 4727 @LENGTH@ 15\n" +
"---------------------MOV SimpleName@@Name:file @TO@ MethodInvocation@@file.toString() @AT@ 4178 @LENGTH@ 4\n" +
"---------------------MOV SimpleName@@MethodName:delete:[] @TO@ MethodInvocation@@file.toString() @AT@ 4183 @LENGTH@ 8\n");
}
//commons-collections_2d6bc8_506966_src#java#org#apache#commons#collections#ExtendedProperties.java
@Test
public void test_collections_2d6bc8_506966() throws IOException {
//TODO seems ok but
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_2d6bc8_506966_src#java#org#apache#commons#collections#ExtendedProperties.java");
Assert.assertEquals(hierarchicalActionSets.size(),3);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"DEL MethodDeclaration@@private, void, MethodName:init, ExtendedProperties exp, IOException, @AT@ 13436 @LENGTH@ 104\n" +
"---DEL Modifier@@private @AT@ 13436 @LENGTH@ 7\n" +
"---DEL PrimitiveType@@void @AT@ 13444 @LENGTH@ 4\n" +
"---DEL SimpleName@@MethodName:init @AT@ 13449 @LENGTH@ 4\n" +
"---DEL SingleVariableDeclaration@@ExtendedProperties exp @AT@ 13455 @LENGTH@ 22\n" +
"------DEL SimpleType@@ExtendedProperties @AT@ 13455 @LENGTH@ 18\n" +
"------DEL SimpleName@@exp @AT@ 13474 @LENGTH@ 3\n" +
"---DEL SimpleType@@IOException @AT@ 13487 @LENGTH@ 11\n");
Assert.assertEquals(hierarchicalActionSets.get(1).toString(),"MOV ExpressionStatement@@Assignment:isInitialized=true @TO@ MethodDeclaration@@public, void, MethodName:addProperty, String key, Object token, @AT@ 13513 @LENGTH@ 21\n");
Assert.assertEquals(hierarchicalActionSets.get(2).toString(),"UPD TryStatement@@try { while (true) { String line=reader.readProperty(); int equalSign=line.indexOf('='); if (equalSign > 0) { String key=line.substring(0,equalSign).trim(); String value=line.substring(equalSign + 1).trim(); if (\"\".equals(value)) continue; if (getInclude() != null && key.equalsIgnoreCase(getInclude())) { File file=null; if (value.startsWith(fileSeparator)) { file=new File(value); } else { if (value.startsWith(\".\" + fileSeparator)) { value=value.substring(2); } file=new File(basePath + value); } if (file != null && file.exists() && file.canRead()) { load(new FileInputStream(file)); } } else { addProperty(key,value); } } }} catch (NullPointerException e) { return;} @TO@ try { while (true) { String line=reader.readProperty(); int equalSign=line.indexOf('='); if (equalSign > 0) { String key=line.substring(0,equalSign).trim(); String value=line.substring(equalSign + 1).trim(); if (\"\".equals(value)) continue; if (getInclude() != null && key.equalsIgnoreCase(getInclude())) { File file=null; if (value.startsWith(fileSeparator)) { file=new File(value); } else { if (value.startsWith(\".\" + fileSeparator)) { value=value.substring(2); } file=new File(basePath + value); } if (file != null && file.exists() && file.canRead()) { load(new FileInputStream(file)); } } else { addProperty(key,value); } } }} catch (NullPointerException e) { return;} finally { isInitialized=true;} @AT@ 15296 @LENGTH@ 2508\n" +
"---INS Block@@FinallyBody:{ isInitialized=true;} @TO@ TryStatement@@try { while (true) { String line=reader.readProperty(); int equalSign=line.indexOf('='); if (equalSign > 0) { String key=line.substring(0,equalSign).trim(); String value=line.substring(equalSign + 1).trim(); if (\"\".equals(value)) continue; if (getInclude() != null && key.equalsIgnoreCase(getInclude())) { File file=null; if (value.startsWith(fileSeparator)) { file=new File(value); } else { if (value.startsWith(\".\" + fileSeparator)) { value=value.substring(2); } file=new File(basePath + value); } if (file != null && file.exists() && file.canRead()) { load(new FileInputStream(file)); } } else { addProperty(key,value); } } }} catch (NullPointerException e) { return;} @AT@ 17503 @LENGTH@ 115\n" +
"------INS ExpressionStatement@@Assignment:isInitialized=true @TO@ Block@@FinallyBody:{ isInitialized=true;} @AT@ 17587 @LENGTH@ 21\n" +
"---------INS Assignment@@isInitialized=true @TO@ ExpressionStatement@@Assignment:isInitialized=true @AT@ 17587 @LENGTH@ 20\n" +
"------------INS SimpleName@@isInitialized @TO@ Assignment@@isInitialized=true @AT@ 17587 @LENGTH@ 13\n" +
"------------INS Operator@@= @TO@ Assignment@@isInitialized=true @AT@ 17600 @LENGTH@ 1\n" +
"------------INS BooleanLiteral@@true @TO@ Assignment@@isInitialized=true @AT@ 17603 @LENGTH@ 4\n");
}
//commons-compress_b7de91_5273bd_src#main#java#org#apache#commons#compress#archivers#sevenz#SevenZFile.java
@Test
public void test_compress_b7de91_5273bd() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-compress_b7de91_5273bd_src#main#java#org#apache#commons#compress#archivers#sevenz#SevenZFile.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ForStatement@@for (int i=0; i < files.length; i++) { files[i].setHasAcessDate(timesDefined.get(i)); if (files[i].getHasAcessDate()) { files[i].setAccessDate(Long.reverseBytes(header.readLong())); }} @TO@ for (int i=0; i < files.length; i++) { files[i].setHasAccessDate(timesDefined.get(i)); if (files[i].getHasAccessDate()) { files[i].setAccessDate(Long.reverseBytes(header.readLong())); }} @AT@ 27930 @LENGTH@ 327\n" +
"---UPD ExpressionStatement@@MethodInvocation:files[i].setHasAcessDate(timesDefined.get(i)) @TO@ MethodInvocation:files[i].setHasAccessDate(timesDefined.get(i)) @AT@ 27999 @LENGTH@ 46\n" +
"------UPD MethodInvocation@@files[i].setHasAcessDate(timesDefined.get(i)) @TO@ files[i].setHasAccessDate(timesDefined.get(i)) @AT@ 27999 @LENGTH@ 45\n" +
"---------UPD SimpleName@@MethodName:setHasAcessDate:[timesDefined.get(i)] @TO@ MethodName:setHasAccessDate:[timesDefined.get(i)] @AT@ 28008 @LENGTH@ 36\n" +
"---UPD IfStatement@@if (files[i].getHasAcessDate()) { files[i].setAccessDate(Long.reverseBytes(header.readLong()));} @TO@ if (files[i].getHasAccessDate()) { files[i].setAccessDate(Long.reverseBytes(header.readLong()));} @AT@ 28074 @LENGTH@ 157\n" +
"------UPD MethodInvocation@@files[i].getHasAcessDate() @TO@ files[i].getHasAccessDate() @AT@ 28078 @LENGTH@ 26\n" +
"---------UPD SimpleName@@MethodName:getHasAcessDate:[] @TO@ MethodName:getHasAccessDate:[] @AT@ 28087 @LENGTH@ 17\n");
}
//metadata_017a3f_580eac_web#src#main#java#org#jboss#metadata#parser#jbossweb#ContainerListenerMetaDataParser.java
@Test
public void test_metadata_017a3f_580eac() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("metadata_017a3f_580eac_web#src#main#java#org#jboss#metadata#parser#jbossweb#ContainerListenerMetaDataParser.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD SwitchStatement@@switch (element) {case CLASS_NAME: containerListener.setListenerClass(getElementText(reader));break;case MODULE:containerListener.setModule(getElementText(reader));break;case LISTENER_TYPE:containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));break;case PARAM:List<ParamValueMetaData> params=containerListener.getParams();if (params == null) {params=new ArrayList<ParamValueMetaData>();containerListener.setParams(params);}params.add(ParamValueMetaDataParser.parse(reader));break;default :throw unexpectedElement(reader);} @TO@ switch (element) {case CLASS_NAME: containerListener.setListenerClass(getElementText(reader));break;case MODULE:containerListener.setModule(getElementText(reader));break;case LISTENER_TYPE:try {containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));} catch (IllegalArgumentException e) {throw unexpectedValue(reader,e);}break;case PARAM:List<ParamValueMetaData> params=containerListener.getParams();if (params == null) {params=new ArrayList<ParamValueMetaData>();containerListener.setParams(params);}params.add(ParamValueMetaDataParser.parse(reader));break;default :throw unexpectedElement(reader);} @AT@ 1995 @LENGTH@ 924\n" +
"---INS TryStatement@@try { containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));} catch (IllegalArgumentException e) { throw unexpectedValue(reader,e);} @TO@ SwitchStatement@@switch (element) {case CLASS_NAME: containerListener.setListenerClass(getElementText(reader));break;case MODULE:containerListener.setModule(getElementText(reader));break;case LISTENER_TYPE:containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));break;case PARAM:List<ParamValueMetaData> params=containerListener.getParams();if (params == null) {params=new ArrayList<ParamValueMetaData>();containerListener.setParams(params);}params.add(ParamValueMetaDataParser.parse(reader));break;default :throw unexpectedElement(reader);} @AT@ 2333 @LENGTH@ 258\n" +
"------MOV ExpressionStatement@@MethodInvocation:containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader))) @TO@ TryStatement@@try { containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));} catch (IllegalArgumentException e) { throw unexpectedValue(reader,e);} @AT@ 2330 @LENGTH@ 89\n" +
"------INS CatchClause@@catch (IllegalArgumentException e) { throw unexpectedValue(reader,e);} @TO@ TryStatement@@try { containerListener.setListenerType(ContainerListenerType.valueOf(getElementText(reader)));} catch (IllegalArgumentException e) { throw unexpectedValue(reader,e);} @AT@ 2475 @LENGTH@ 116\n" +
"---------INS SingleVariableDeclaration@@IllegalArgumentException e @TO@ CatchClause@@catch (IllegalArgumentException e) { throw unexpectedValue(reader,e);} @AT@ 2482 @LENGTH@ 26\n" +
"------------INS SimpleType@@IllegalArgumentException @TO@ SingleVariableDeclaration@@IllegalArgumentException e @AT@ 2482 @LENGTH@ 24\n" +
"------------INS SimpleName@@e @TO@ SingleVariableDeclaration@@IllegalArgumentException e @AT@ 2507 @LENGTH@ 1\n" +
"---------INS ThrowStatement@@MethodInvocation:unexpectedValue(reader,e) @TO@ CatchClause@@catch (IllegalArgumentException e) { throw unexpectedValue(reader,e);} @AT@ 2536 @LENGTH@ 33\n" +
"------------INS MethodInvocation@@unexpectedValue(reader,e) @TO@ ThrowStatement@@MethodInvocation:unexpectedValue(reader,e) @AT@ 2542 @LENGTH@ 26\n" +
"---------------INS SimpleName@@MethodName:unexpectedValue:[reader, e] @TO@ MethodInvocation@@unexpectedValue(reader,e) @AT@ 2542 @LENGTH@ 26\n" +
"------------------INS SimpleName@@reader @TO@ SimpleName@@MethodName:unexpectedValue:[reader, e] @AT@ 2558 @LENGTH@ 6\n" +
"------------------INS SimpleName@@e @TO@ SimpleName@@MethodName:unexpectedValue:[reader, e] @AT@ 2566 @LENGTH@ 1\n");
}
//metadata_674226_73eff3_common#src#main#java#org#jboss#metadata#parser#ee#ServiceReferenceMetaDataParser.java
@Test
public void test_metadata_674226_73eff3() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("metadata_674226_73eff3_common#src#main#java#org#jboss#metadata#parser#ee#ServiceReferenceMetaDataParser.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ExpressionStatement@@MethodInvocation:serviceReference.setServiceQname(QName.valueOf(getElementText(reader))) @TO@ MethodInvocation:serviceReference.setServiceQname(parseQName(reader,getElementText(reader))) @AT@ 4613 @LENGTH@ 72\n" +
"---UPD MethodInvocation@@serviceReference.setServiceQname(QName.valueOf(getElementText(reader))) @TO@ serviceReference.setServiceQname(parseQName(reader,getElementText(reader))) @AT@ 4613 @LENGTH@ 71\n" +
"------UPD SimpleName@@MethodName:setServiceQname:[QName.valueOf(getElementText(reader))] @TO@ MethodName:setServiceQname:[parseQName(reader,getElementText(reader))] @AT@ 4630 @LENGTH@ 54\n" +
"---------UPD MethodInvocation@@QName.valueOf(getElementText(reader)) @TO@ parseQName(reader,getElementText(reader)) @AT@ 4646 @LENGTH@ 37\n" +
"------------DEL SimpleName@@Name:QName @AT@ 4646 @LENGTH@ 5\n" +
"------------UPD SimpleName@@MethodName:valueOf:[getElementText(reader)] @TO@ MethodName:parseQName:[reader, getElementText(reader)] @AT@ 4652 @LENGTH@ 31\n" +
"---------------INS SimpleName@@reader @TO@ SimpleName@@MethodName:valueOf:[getElementText(reader)] @AT@ 4657 @LENGTH@ 6\n");
}
//commons-configuration_bad272_1c720e_src#java#org#apache#commons#configuration#DataConfiguration.java
@Test
public void test_configuration_bad272_1c720e() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-configuration_bad272_1c720e_src#java#org#apache#commons#configuration#DataConfiguration.java");
Assert.assertEquals(hierarchicalActionSets.size(),2);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ThrowStatement@@ClassInstanceCreation:new IllegalArgumentException(\"The type of the default value (\" + defaultValue.getClass() + \") is not an array of the specified class (\"+ cls+ \")\") @TO@ ClassInstanceCreation:new IllegalArgumentException(\"The type of the default value (\" + defaultValue.getClass() + \")\"+ \" is not an array of the specified class (\"+ cls+ \")\") @AT@ 13001 @LENGTH@ 155\n" +
"---UPD ClassInstanceCreation@@IllegalArgumentException[\"The type of the default value (\" + defaultValue.getClass() + \") is not an array of the specified class (\"+ cls+ \")\"] @TO@ IllegalArgumentException[\"The type of the default value (\" + defaultValue.getClass() + \")\"+ \" is not an array of the specified class (\"+ cls+ \")\"] @AT@ 13007 @LENGTH@ 148\n" +
"------UPD InfixExpression@@\"The type of the default value (\" + defaultValue.getClass() + \") is not an array of the specified class (\"+ cls+ \")\" @TO@ \"The type of the default value (\" + defaultValue.getClass() + \")\"+ \" is not an array of the specified class (\"+ cls+ \")\" @AT@ 13036 @LENGTH@ 118\n" +
"---------UPD StringLiteral@@\") is not an array of the specified class (\" @TO@ \" is not an array of the specified class (\" @AT@ 13098 @LENGTH@ 44\n" +
"---------INS StringLiteral@@\")\" @TO@ InfixExpression@@\"The type of the default value (\" + defaultValue.getClass() + \") is not an array of the specified class (\"+ cls+ \")\" @AT@ 13122 @LENGTH@ 3\n");
Assert.assertEquals(hierarchicalActionSets.get(1).toString(),"UPD ThrowStatement@@ClassInstanceCreation:new ConversionException('\\'' + key + \"' (\"+ arrayType+ \") doesn't map to a compatible array of \"+ cls) @TO@ ClassInstanceCreation:new ConversionException('\\'' + key + \"' (\"+ arrayType+ \")\"+ \" doesn't map to a compatible array of \"+ cls) @AT@ 15560 @LENGTH@ 112\n" +
"---UPD ClassInstanceCreation@@ConversionException['\\'' + key + \"' (\"+ arrayType+ \") doesn't map to a compatible array of \"+ cls] @TO@ ConversionException['\\'' + key + \"' (\"+ arrayType+ \")\"+ \" doesn't map to a compatible array of \"+ cls] @AT@ 15566 @LENGTH@ 105\n" +
"------UPD InfixExpression@@'\\'' + key + \"' (\"+ arrayType+ \") doesn't map to a compatible array of \"+ cls @TO@ '\\'' + key + \"' (\"+ arrayType+ \")\"+ \" doesn't map to a compatible array of \"+ cls @AT@ 15590 @LENGTH@ 80\n" +
"---------UPD StringLiteral@@\") doesn't map to a compatible array of \" @TO@ \" doesn't map to a compatible array of \" @AT@ 15623 @LENGTH@ 41\n" +
"---------INS StringLiteral@@\")\" @TO@ InfixExpression@@'\\'' + key + \"' (\"+ arrayType+ \") doesn't map to a compatible array of \"+ cls @AT@ 15672 @LENGTH@ 3\n");
}
//commons-compress_4ac67b_0eccda_src#main#java#org#apache#commons#compress#compressors#snappy#SnappyCompressorInputStream.java
@Test
public void test_compress_4ac67b_0eccda() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-compress_4ac67b_0eccda_src#main#java#org#apache#commons#compress#compressors#snappy#SnappyCompressorInputStream.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD SwitchStatement@@switch (state) {case NO_BLOCK: fill();return read(b,off,len);case IN_LITERAL:int litLen=readLiteral(b,off,len);if (!hasMoreDataInBlock()) {state=State.NO_BLOCK;}return litLen;case IN_BACK_REFERENCE:int backReferenceLen=readBackReference(b,off,len);if (!hasMoreDataInBlock()) {state=State.NO_BLOCK;}return backReferenceLen;default :throw new IOException(\"Unknown stream state \" + state);} @TO@ switch (state) {case NO_BLOCK: fill();return read(b,off,len);case IN_LITERAL:int litLen=readLiteral(b,off,len);if (!hasMoreDataInBlock()) {state=State.NO_BLOCK;}return litLen > 0 ? litLen : read(b,off,len);case IN_BACK_REFERENCE:int backReferenceLen=readBackReference(b,off,len);if (!hasMoreDataInBlock()) {state=State.NO_BLOCK;}return backReferenceLen > 0 ? backReferenceLen : read(b,off,len);default :throw new IOException(\"Unknown stream state \" + state);} @AT@ 3346 @LENGTH@ 621\n" +
"---UPD ReturnStatement@@SimpleName:litLen @TO@ ConditionalExpression:litLen > 0 ? litLen : read(b,off,len) @AT@ 3627 @LENGTH@ 14\n" +
"------INS ConditionalExpression@@litLen > 0 ? litLen : read(b,off,len) @TO@ ReturnStatement@@SimpleName:litLen @AT@ 3634 @LENGTH@ 39\n" +
"---------INS InfixExpression@@litLen > 0 @TO@ ConditionalExpression@@litLen > 0 ? litLen : read(b,off,len) @AT@ 3634 @LENGTH@ 10\n" +
"------------INS SimpleName@@litLen @TO@ InfixExpression@@litLen > 0 @AT@ 3634 @LENGTH@ 6\n" +
"------------INS Operator@@> @TO@ InfixExpression@@litLen > 0 @AT@ 3640 @LENGTH@ 1\n" +
"------------INS NumberLiteral@@0 @TO@ InfixExpression@@litLen > 0 @AT@ 3643 @LENGTH@ 1\n" +
"---------INS SimpleName@@litLen @TO@ ConditionalExpression@@litLen > 0 ? litLen : read(b,off,len) @AT@ 3647 @LENGTH@ 6\n" +
"---------INS MethodInvocation@@read(b,off,len) @TO@ ConditionalExpression@@litLen > 0 ? litLen : read(b,off,len) @AT@ 3656 @LENGTH@ 17\n" +
"------------INS SimpleName@@MethodName:read:[b, off, len] @TO@ MethodInvocation@@read(b,off,len) @AT@ 3656 @LENGTH@ 17\n" +
"---------------INS SimpleName@@b @TO@ SimpleName@@MethodName:read:[b, off, len] @AT@ 3661 @LENGTH@ 1\n" +
"---------------INS SimpleName@@off @TO@ SimpleName@@MethodName:read:[b, off, len] @AT@ 3664 @LENGTH@ 3\n" +
"---------------INS SimpleName@@len @TO@ SimpleName@@MethodName:read:[b, off, len] @AT@ 3669 @LENGTH@ 3\n" +
"------DEL SimpleName@@litLen @AT@ 3634 @LENGTH@ 6\n" +
"---UPD ReturnStatement@@SimpleName:backReferenceLen @TO@ ConditionalExpression:backReferenceLen > 0 ? backReferenceLen : read(b,off,len) @AT@ 3848 @LENGTH@ 24\n" +
"------DEL SimpleName@@backReferenceLen @AT@ 3855 @LENGTH@ 16\n" +
"------INS ConditionalExpression@@backReferenceLen > 0 ? backReferenceLen : read(b,off,len) @TO@ ReturnStatement@@SimpleName:backReferenceLen @AT@ 3888 @LENGTH@ 59\n" +
"---------INS InfixExpression@@backReferenceLen > 0 @TO@ ConditionalExpression@@backReferenceLen > 0 ? backReferenceLen : read(b,off,len) @AT@ 3888 @LENGTH@ 20\n" +
"------------INS SimpleName@@backReferenceLen @TO@ InfixExpression@@backReferenceLen > 0 @AT@ 3888 @LENGTH@ 16\n" +
"------------INS Operator@@> @TO@ InfixExpression@@backReferenceLen > 0 @AT@ 3904 @LENGTH@ 1\n" +
"------------INS NumberLiteral@@0 @TO@ InfixExpression@@backReferenceLen > 0 @AT@ 3907 @LENGTH@ 1\n" +
"---------INS SimpleName@@backReferenceLen @TO@ ConditionalExpression@@backReferenceLen > 0 ? backReferenceLen : read(b,off,len) @AT@ 3911 @LENGTH@ 16\n" +
"---------INS MethodInvocation@@read(b,off,len) @TO@ ConditionalExpression@@backReferenceLen > 0 ? backReferenceLen : read(b,off,len) @AT@ 3930 @LENGTH@ 17\n" +
"------------INS SimpleName@@MethodName:read:[b, off, len] @TO@ MethodInvocation@@read(b,off,len) @AT@ 3930 @LENGTH@ 17\n" +
"---------------INS SimpleName@@b @TO@ SimpleName@@MethodName:read:[b, off, len] @AT@ 3935 @LENGTH@ 1\n" +
"---------------INS SimpleName@@off @TO@ SimpleName@@MethodName:read:[b, off, len] @AT@ 3938 @LENGTH@ 3\n" +
"---------------INS SimpleName@@len @TO@ SimpleName@@MethodName:read:[b, off, len] @AT@ 3943 @LENGTH@ 3\n");
}
//commons-collections_b6d038_4bdb89_src#java#org#apache#commons#collections#iterators#ArrayListIterator.java
@Test
public void test_collections_b6d038_4bdb89() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_b6d038_4bdb89_src#java#org#apache#commons#collections#iterators#ArrayListIterator.java");
Assert.assertEquals(hierarchicalActionSets.size(),2);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD ReturnStatement@@FieldAccess:this.index @TO@ InfixExpression:this.index - this.startIndex @AT@ 7714 @LENGTH@ 18\n" +
"---DEL FieldAccess@@this.index @AT@ 7721 @LENGTH@ 10\n" +
"---INS InfixExpression@@this.index - this.startIndex @TO@ ReturnStatement@@FieldAccess:this.index @AT@ 7740 @LENGTH@ 28\n" +
"------INS FieldAccess@@this.index @TO@ InfixExpression@@this.index - this.startIndex @AT@ 7740 @LENGTH@ 10\n" +
"---------MOV ThisExpression@@this @TO@ FieldAccess@@this.index @AT@ 7721 @LENGTH@ 4\n" +
"---------MOV SimpleName@@index @TO@ FieldAccess@@this.index @AT@ 7726 @LENGTH@ 5\n" +
"------INS Operator@@- @TO@ InfixExpression@@this.index - this.startIndex @AT@ 7750 @LENGTH@ 1\n" +
"------INS FieldAccess@@this.startIndex @TO@ InfixExpression@@this.index - this.startIndex @AT@ 7753 @LENGTH@ 15\n" +
"---------INS ThisExpression@@this @TO@ FieldAccess@@this.startIndex @AT@ 7753 @LENGTH@ 4\n" +
"---------INS SimpleName@@startIndex @TO@ FieldAccess@@this.startIndex @AT@ 7758 @LENGTH@ 10\n");
Assert.assertEquals(hierarchicalActionSets.get(1).toString(),"UPD ReturnStatement@@InfixExpression:this.index - 1 @TO@ InfixExpression:this.index - this.startIndex - 1 @AT@ 7947 @LENGTH@ 22\n" +
"---UPD InfixExpression@@this.index - 1 @TO@ this.index - this.startIndex - 1 @AT@ 7954 @LENGTH@ 14\n" +
"------INS FieldAccess@@this.startIndex @TO@ InfixExpression@@this.index - 1 @AT@ 8004 @LENGTH@ 15\n" +
"---------INS ThisExpression@@this @TO@ FieldAccess@@this.startIndex @AT@ 8004 @LENGTH@ 4\n" +
"---------INS SimpleName@@startIndex @TO@ FieldAccess@@this.startIndex @AT@ 8009 @LENGTH@ 10\n");
}
//commons-collections_3761b5_3639ab_src#java#org#apache#commons#collections#FastHashMap.java
@Test
public void test_collections_3761b5_3639ab() throws IOException {
//TODO can be different
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("commons-collections_3761b5_3639ab_src#java#org#apache#commons#collections#FastHashMap.java");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD IfStatement@@if (fast) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} else {synchronized (map) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true); }} @TO@ if (fast) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} else {synchronized (map) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true); }} @AT@ 8632 @LENGTH@ 1375\n" +
"---UPD Block@@ThenBody:{ if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} @TO@ ThenBody:{ if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} @AT@ 8642 @LENGTH@ 624\n" +
"------UPD WhileStatement@@while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @TO@ while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @AT@ 8781 @LENGTH@ 448\n" +
"---------UPD Block@@WhileBody:{ Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @TO@ WhileBody:{ Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @AT@ 8801 @LENGTH@ 428\n" +
"------------UPD VariableDeclarationStatement@@Entry e=(Entry)i.next(); @TO@ Map.Entry e=(Map.Entry)i.next(); @AT@ 8819 @LENGTH@ 27\n" +
"---------------UPD SimpleType@@Entry @TO@ Map.Entry @AT@ 8819 @LENGTH@ 5\n" +
"---------------UPD VariableDeclarationFragment@@e=(Entry)i.next() @TO@ e=(Map.Entry)i.next() @AT@ 8825 @LENGTH@ 20\n" +
"------------------UPD CastExpression@@(Entry)i.next() @TO@ (Map.Entry)i.next() @AT@ 8829 @LENGTH@ 16\n" +
"---------------------UPD SimpleType@@Entry @TO@ Map.Entry @AT@ 8830 @LENGTH@ 5\n" +
"---UPD Block@@ElseBody:{synchronized (map) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true); }} @TO@ ElseBody:{synchronized (map) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true); }} @AT@ 9272 @LENGTH@ 735\n" +
"------UPD SynchronizedStatement@@synchronized (map) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} @TO@ synchronized (map) { if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} @AT@ 9286 @LENGTH@ 711\n" +
"---------UPD Block@@SyncBody:{ if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} @TO@ SyncBody:{ if (mo.size() != map.size()) return (false); Iterator i=map.entrySet().iterator(); while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); } } return (true);} @AT@ 9305 @LENGTH@ 692\n" +
"------------UPD WhileStatement@@while (i.hasNext()) { Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @TO@ while (i.hasNext()) { Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @AT@ 9460 @LENGTH@ 492\n" +
"---------------UPD Block@@WhileBody:{ Entry e=(Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @TO@ WhileBody:{ Map.Entry e=(Map.Entry)i.next(); Object key=e.getKey(); Object value=e.getValue(); if (value == null) { if (!(mo.get(key) == null && mo.containsKey(key))) return (false); } else { if (!value.equals(mo.get(key))) return (false); }} @AT@ 9480 @LENGTH@ 472\n" +
"------------------UPD VariableDeclarationStatement@@Entry e=(Entry)i.next(); @TO@ Map.Entry e=(Map.Entry)i.next(); @AT@ 9502 @LENGTH@ 27\n" +
"---------------------UPD SimpleType@@Entry @TO@ Map.Entry @AT@ 9502 @LENGTH@ 5\n" +
"---------------------UPD VariableDeclarationFragment@@e=(Entry)i.next() @TO@ e=(Map.Entry)i.next() @AT@ 9508 @LENGTH@ 20\n" +
"------------------------UPD CastExpression@@(Entry)i.next() @TO@ (Map.Entry)i.next() @AT@ 9512 @LENGTH@ 16\n" +
"---------------------------UPD SimpleType@@Entry @TO@ Map.Entry @AT@ 9513 @LENGTH@ 5\n");
}
//fuse_74c02c_a4658a_fabric#fabric-core#src#main#java#org#fusesource#fabric#service#DataStoreManager.java
@Test
public void test_fuse_74c02c_a4658a() throws IOException {
//mov position
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_74c02c_a4658a_fabric#fabric-core#src#main#java#org#fusesource#fabric#service#DataStoreManager.java");
Assert.assertEquals(hierarchicalActionSets.size(),2);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD IfStatement@@if (type.equals(dataStorePlugin.getName())) { updateServiceRegistration();} @TO@ if (type != null && type.equals(dataStorePlugin.getName())) { updateServiceRegistration();} @AT@ 5887 @LENGTH@ 104\n" +
"---INS InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @TO@ IfStatement@@if (type.equals(dataStorePlugin.getName())) { updateServiceRegistration();} @AT@ 5891 @LENGTH@ 54\n" +
"------MOV MethodInvocation@@type.equals(dataStorePlugin.getName()) @TO@ InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @AT@ 5891 @LENGTH@ 38\n" +
"------INS InfixExpression@@type != null @TO@ InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @AT@ 5891 @LENGTH@ 12\n" +
"---------INS SimpleName@@type @TO@ InfixExpression@@type != null @AT@ 5891 @LENGTH@ 4\n" +
"---------INS Operator@@!= @TO@ InfixExpression@@type != null @AT@ 5895 @LENGTH@ 2\n" +
"---------INS NullLiteral@@null @TO@ InfixExpression@@type != null @AT@ 5899 @LENGTH@ 4\n" +
"------INS Operator@@&& @TO@ InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @AT@ 5903 @LENGTH@ 2\n");
Assert.assertEquals(hierarchicalActionSets.get(1).toString(),"UPD IfStatement@@if (type.equals(dataStorePlugin.getName())) { updateServiceRegistration();} @TO@ if (type != null && type.equals(dataStorePlugin.getName())) { updateServiceRegistration();} @AT@ 6204 @LENGTH@ 104\n" +
"---INS InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @TO@ IfStatement@@if (type.equals(dataStorePlugin.getName())) { updateServiceRegistration();} @AT@ 6224 @LENGTH@ 54\n" +
"------MOV MethodInvocation@@type.equals(dataStorePlugin.getName()) @TO@ InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @AT@ 6208 @LENGTH@ 38\n" +
"------INS InfixExpression@@type != null @TO@ InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @AT@ 6224 @LENGTH@ 12\n" +
"---------INS SimpleName@@type @TO@ InfixExpression@@type != null @AT@ 6224 @LENGTH@ 4\n" +
"---------INS Operator@@!= @TO@ InfixExpression@@type != null @AT@ 6228 @LENGTH@ 2\n" +
"---------INS NullLiteral@@null @TO@ InfixExpression@@type != null @AT@ 6232 @LENGTH@ 4\n" +
"------INS Operator@@&& @TO@ InfixExpression@@type != null && type.equals(dataStorePlugin.getName()) @AT@ 6236 @LENGTH@ 2\n");
}
//fuse_cb3362_cfb295_fabric#fabric-features-service#src#main#java#org#fusesource#fabric#features#FabricFeaturesServiceImpl.java
@Test
public void test_fuse_cb3362_cfb295() throws IOException {
//todo maye wrong but also true
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("fuse_cb3362_cfb295_fabric#fabric-features-service#src#main#java#org#fusesource#fabric#features#FabricFeaturesServiceImpl.java");
Assert.assertEquals(hierarchicalActionSets.size(),2);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"UPD EnhancedForStatement@@for (Repository repository : repositories) { for ( Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); } }} @TO@ for (Repository repository : repositories) { try { for ( Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); } } } catch ( Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI()); }} @AT@ 8535 @LENGTH@ 273\n" +
"---INS TryStatement@@try { for ( Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); } }} catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI());} @TO@ EnhancedForStatement@@for (Repository repository : repositories) { for ( Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); } }} @AT@ 8596 @LENGTH@ 390\n" +
"------MOV EnhancedForStatement@@for (Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); }} @TO@ TryStatement@@try { for ( Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); } }} catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI());} @AT@ 8596 @LENGTH@ 198\n" +
"------INS CatchClause@@catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI());} @TO@ TryStatement@@try { for ( Feature feature : repository.getFeatures()) { if (!allfeatures.contains(feature)) { allfeatures.add(feature); } }} catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI());} @AT@ 8855 @LENGTH@ 131\n" +
"---------INS SingleVariableDeclaration@@Exception ex @TO@ CatchClause@@catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI());} @AT@ 8862 @LENGTH@ 12\n" +
"------------INS SimpleType@@Exception @TO@ SingleVariableDeclaration@@Exception ex @AT@ 8862 @LENGTH@ 9\n" +
"------------INS SimpleName@@ex @TO@ SingleVariableDeclaration@@Exception ex @AT@ 8872 @LENGTH@ 2\n" +
"---------INS ExpressionStatement@@MethodInvocation:LOGGER.debug(\"Could not load features from %s.\",repository.getURI()) @TO@ CatchClause@@catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repository.getURI());} @AT@ 8898 @LENGTH@ 70\n" +
"------------INS MethodInvocation@@LOGGER.debug(\"Could not load features from %s.\",repository.getURI()) @TO@ ExpressionStatement@@MethodInvocation:LOGGER.debug(\"Could not load features from %s.\",repository.getURI()) @AT@ 8898 @LENGTH@ 69\n" +
"---------------INS SimpleName@@Name:LOGGER @TO@ MethodInvocation@@LOGGER.debug(\"Could not load features from %s.\",repository.getURI()) @AT@ 8898 @LENGTH@ 6\n" +
"---------------INS SimpleName@@MethodName:debug:[\"Could not load features from %s.\", repository.getURI()] @TO@ MethodInvocation@@LOGGER.debug(\"Could not load features from %s.\",repository.getURI()) @AT@ 8905 @LENGTH@ 62\n" +
"------------------INS StringLiteral@@\"Could not load features from %s.\" @TO@ SimpleName@@MethodName:debug:[\"Could not load features from %s.\", repository.getURI()] @AT@ 8911 @LENGTH@ 34\n" +
"------------------INS MethodInvocation@@repository.getURI() @TO@ SimpleName@@MethodName:debug:[\"Could not load features from %s.\", repository.getURI()] @AT@ 8947 @LENGTH@ 19\n" +
"---------------------INS SimpleName@@Name:repository @TO@ MethodInvocation@@repository.getURI() @AT@ 8947 @LENGTH@ 10\n" +
"---------------------INS SimpleName@@MethodName:getURI:[] @TO@ MethodInvocation@@repository.getURI() @AT@ 8958 @LENGTH@ 8\n");
Assert.assertEquals(hierarchicalActionSets.get(1).toString(),"UPD EnhancedForStatement@@for (Repository repo : repositories) { for ( Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); } }} @TO@ for (Repository repo : repositories) { try { for ( Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); } } } catch ( Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI()); }} @AT@ 11093 @LENGTH@ 483\n" +
"---INS TryStatement@@try { for ( Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); } }} catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI());} @TO@ EnhancedForStatement@@for (Repository repo : repositories) { for ( Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); } }} @AT@ 11336 @LENGTH@ 608\n" +
"------MOV EnhancedForStatement@@for (Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); }} @TO@ TryStatement@@try { for ( Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); } }} catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI());} @AT@ 11144 @LENGTH@ 422\n" +
"------INS CatchClause@@catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI());} @TO@ TryStatement@@try { for ( Feature f : repo.getFeatures()) { if (features.get(f.getName()) == null) { Map<String,Feature> versionMap=new TreeMap<String,Feature>(); versionMap.put(f.getVersion(),f); features.put(f.getName(),versionMap); } else { features.get(f.getName()).put(f.getVersion(),f); } }} catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI());} @AT@ 11827 @LENGTH@ 117\n" +
"---------INS SingleVariableDeclaration@@Exception ex @TO@ CatchClause@@catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI());} @AT@ 11834 @LENGTH@ 12\n" +
"------------INS SimpleType@@Exception @TO@ SingleVariableDeclaration@@Exception ex @AT@ 11834 @LENGTH@ 9\n" +
"------------INS SimpleName@@ex @TO@ SingleVariableDeclaration@@Exception ex @AT@ 11844 @LENGTH@ 2\n" +
"---------INS ExpressionStatement@@MethodInvocation:LOGGER.debug(\"Could not load features from %s.\",repo.getURI()) @TO@ CatchClause@@catch (Exception ex) { LOGGER.debug(\"Could not load features from %s.\",repo.getURI());} @AT@ 11866 @LENGTH@ 64\n" +
"------------INS MethodInvocation@@LOGGER.debug(\"Could not load features from %s.\",repo.getURI()) @TO@ ExpressionStatement@@MethodInvocation:LOGGER.debug(\"Could not load features from %s.\",repo.getURI()) @AT@ 11866 @LENGTH@ 63\n" +
"---------------INS SimpleName@@Name:LOGGER @TO@ MethodInvocation@@LOGGER.debug(\"Could not load features from %s.\",repo.getURI()) @AT@ 11866 @LENGTH@ 6\n" +
"---------------INS SimpleName@@MethodName:debug:[\"Could not load features from %s.\", repo.getURI()] @TO@ MethodInvocation@@LOGGER.debug(\"Could not load features from %s.\",repo.getURI()) @AT@ 11873 @LENGTH@ 56\n" +
"------------------INS StringLiteral@@\"Could not load features from %s.\" @TO@ SimpleName@@MethodName:debug:[\"Could not load features from %s.\", repo.getURI()] @AT@ 11879 @LENGTH@ 34\n" +
"------------------INS MethodInvocation@@repo.getURI() @TO@ SimpleName@@MethodName:debug:[\"Could not load features from %s.\", repo.getURI()] @AT@ 11915 @LENGTH@ 13\n" +
"---------------------INS SimpleName@@Name:repo @TO@ MethodInvocation@@repo.getURI() @AT@ 11915 @LENGTH@ 4\n" +
"---------------------INS SimpleName@@MethodName:getURI:[] @TO@ MethodInvocation@@repo.getURI() @AT@ 11920 @LENGTH@ 8\n");
}
@Ignore
@Test
public void test_() throws IOException {
List<HierarchicalActionSet> hierarchicalActionSets = getHierarchicalActionSets4java("");
Assert.assertEquals(hierarchicalActionSets.size(),1);
Assert.assertEquals(hierarchicalActionSets.get(0).toString(),"");
}
}
+2 -2
View File
@@ -19,10 +19,10 @@ fixminer:
portDumps : 6399
numOfWorkers : 14
hostname : localhost
hunkLimit : 10
hunkLimit : 2
patchSize : 50
projectList : spring-shell,fuse,metadata
projectList : spring-shell,fuse,metadata,commons-codec,commons-collections,commons-compress,commons-configuration,commons-crypto,commons-csv
inputPath : /Users/anilkoyuncu/projects/test/fixminer-data/patches
redisPath : /Users/anilkoyuncu/projects/release/test/fixminer_source/python/data/redis
srcMLPath : /usr/local/bin/srcml