release version;

This commit is contained in:
fixminer
2018-05-03 10:33:14 +02:00
parent 931ce5c04f
commit 3649e7476a
24 changed files with 272 additions and 2370 deletions
+3 -3
View File
@@ -19,9 +19,6 @@
<orderEntry type="library" name="Maven: stax:stax-api:1.0.1" level="project" />
<orderEntry type="library" name="Maven: net.sourceforge.jexcelapi:jxl:2.6.12" level="project" />
<orderEntry type="library" name="Maven: log4j:log4j:1.2.14" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.25" level="project" />
<orderEntry type="library" name="Maven: ch.qos.logback:logback-core:1.2.3" level="project" />
<orderEntry type="library" name="Maven: ch.qos.logback:logback-classic:1.2.3" level="project" />
<orderEntry type="library" name="Maven: com.typesafe.akka:akka-actor_2.11:2.4.11" level="project" />
<orderEntry type="library" name="Maven: org.scala-lang:scala-library:2.11.8" level="project" />
<orderEntry type="library" name="Maven: com.typesafe:config:1.3.0" level="project" />
@@ -42,6 +39,9 @@
<orderEntry type="library" name="Maven: org.eclipse.equinox:app:1.3.200-v20130910-1609" level="project" />
<orderEntry type="library" name="Maven: org.eclipse.birt.runtime:org.eclipse.core.resources:3.10.0.v20150423-0755" level="project" />
<orderEntry type="library" name="Maven: org.eclipse.tycho:org.eclipse.jdt.core:3.12.2.v20161117-1814" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.7" level="project" />
<orderEntry type="library" name="Maven: ch.qos.logback:logback-classic:1.1.2" level="project" />
<orderEntry type="library" name="Maven: ch.qos.logback:logback-core:1.1.2" level="project" />
<orderEntry type="module" module-name="GitTraveller" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.7" level="project" />
<orderEntry type="library" name="Maven: org.jsoup:jsoup:1.11.2" level="project" />
+21
View File
@@ -13,6 +13,8 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<akka.version>2.4.11</akka.version>
<logback.version>1.1.2</logback.version>
<slf4j-api.version>1.7.7</slf4j-api.version>
</properties>
<dependencies>
@@ -41,6 +43,25 @@
<artifactId>gen.jdt</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
<!-- SLF4J - API -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j-api.version}</version>
</dependency>
<!-- logback -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<!--<dependency>-->
<!--<groupId>edu.lu.uni.serval</groupId>-->
@@ -31,7 +31,7 @@ public abstract class Parser implements ParserInterface {
protected String originalTree = ""; // Guide of generating patches.
protected String actionSets = ""; // Guide of generating patches.
public abstract void parseFixPatterns(File prevFile, File revFile, File diffEntryFile);
public abstract void parseFixPatterns(File prevFile, File revFile, File diffEntryFile,String project);
protected List<HierarchicalActionSet> parseChangedSourceCodeWithGumTree(File prevFile, File revFile) {
List<HierarchicalActionSet> actionSets = new ArrayList<>();
@@ -8,6 +8,7 @@ public class RunnableParser implements Runnable {
private File revFile;
private File diffentryFile;
private Parser parser;
private String project;
public RunnableParser(File prevFile, File revFile, File diffentryFile, Parser parser) {
this.prevFile = prevFile;
@@ -16,8 +17,16 @@ public class RunnableParser implements Runnable {
this.parser = parser;
}
public RunnableParser(File prevFile, File revFile, File diffentryFile, Parser parser,String project) {
this.prevFile = prevFile;
this.revFile = revFile;
this.diffentryFile = diffentryFile;
this.parser = parser;
this.project = project;
}
@Override
public void run() {
parser.parseFixPatterns(prevFile, revFile, diffentryFile);
parser.parseFixPatterns(prevFile, revFile, diffentryFile,project);
}
}
@@ -70,89 +70,22 @@ public class AkkaTreeLoader {
private static Consumer<String> consumer = Assert::assertNotNull;
public static void loadRedisWait(String cmd){
Process process;
try {
// String comd = String.format(cmd, f.getAbsoluteFile());
process = Runtime.getRuntime()
.exec(cmd);
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), consumer);
Executors.newSingleThreadExecutor().submit(streamGobbler);
int exitCode = process.waitFor();
assert exitCode == 0;
} catch (IOException e) {
e.printStackTrace();
}
catch (InterruptedException e) {
e.printStackTrace();
}
log.info("Load done");
}
// public static void main(String[] args) {
public static void main(String portInner,String serverWait,String dbDir,String chunkName,String port, String dumpsName) throws Exception {
// String inputPath;
//// String outputPath;
// String port;
// String portInner;
//// String pairsCSVPath;
// String importScript;
//// String pairsCompletedPath;
// String serverWait;
//// String option;
// String dbDir;
// String chunkName;
// String numOfWorkers;
// if (args.length > 0) {
// inputPath = args[0];
// portInner = args[1];
// serverWait = args[2];
//// option = args[4];
// chunkName = args[3];
// numOfWorkers = args[4];
// dbDir = args[5];
// port = args[6];
//// pairsCSVPath = args[3];
//// importScript = args[4];
//// pairsCompletedPath = args[3];
// } else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2";
//// outputPath = "/Users/anilkoyuncu/bugStudy/dataset/";
// port = "6399";
// portInner = "6380";
// serverWait = "10000";
//// option = "COMP";
//// pairsCSVPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs/test";
//// importScript = "/Users/anilkoyuncu/bugStudy/dataset/pairs/test2.sh";
//// pairsCompletedPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs_completed";
// chunkName ="chunk3.rdb";
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
// numOfWorkers = "1";
//
// }
String parameters = String.format("\nportInner %s \nserverWait %s \nchunkName %s \ndbDir %s \ndumpsName %s",portInner,serverWait,chunkName,dbDir,dumpsName);
log.info(parameters);
// if (option.equals("CALC")) {
// calculatePairs(inputPath, port);
// log.info("Calculate pairs done");
// }else {
CallShell cs = new CallShell();
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String cmd1 = String.format(cmd, dbDir,dumpsName,Integer.valueOf(port));
// loadRedis(cmd1,serverWait);
cs.runShell(cmd1,serverWait);
String cmdInner = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String cmd2 = String.format(cmdInner, dbDir,chunkName,Integer.valueOf(portInner));
// loadRedis(cmd2,serverWait);
cs.runShell(cmd2,serverWait);
JedisPool outerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(port),20000000);
JedisPool innerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(portInner),20000000);
@@ -163,12 +96,11 @@ public class AkkaTreeLoader {
String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer1 = String.format(stopServer,Integer.valueOf(portInner));
// loadRedis(stopServer1,serverWait);
cs.runShell(stopServer1,serverWait);
stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer2 = String.format(stopServer,Integer.valueOf(port));
// loadRedis(stopServer2,serverWait);
// }
cs.runShell(stopServer2,serverWait);
@@ -196,20 +128,7 @@ public class AkkaTreeLoader {
List<String> result = scan.getResult();
// ActorSystem system = null;
// ActorRef parsingActor = null;
// final WorkMessage msg = new WorkMessage(0, result,innerPort,inputPath,dbDir,serverWait);
// try {
//
// log.info("Akka begins...");
// system = ActorSystem.create("Tree-System");
// parsingActor = system.actorOf(TreeActor.props(Integer.valueOf(numOfWorkers),dbDir,innerPort,serverWait), "tree-actor");
// parsingActor.tell(msg, ActorRef.noSender());
// } catch (Exception e) {
// system.shutdown();
// e.printStackTrace();
// }
// greeter.tell();
result
.parallelStream()
.forEach(m ->
@@ -316,41 +235,6 @@ public class AkkaTreeLoader {
return parent;
}
public static ITree getASTTree(HierarchicalActionSet actionSet, ITree parent, ITree children){
int newType = 0;
String astNodeType = actionSet.getAstNodeType();
List<Integer> keysByValue = getKeysByValue(ASTNodeMap.map, astNodeType);
if(keysByValue.size() != 1){
log.error("Birden cok astnodemapmapping");
}
newType = keysByValue.get(0);
if(actionSet.getParent() == null){
//root
parent = new Tree(newType,"");
}else{
children = new Tree(newType,"");
parent.addChild(children);
}
List<HierarchicalActionSet> subActions = actionSet.getSubActions();
if (subActions.size() != 0){
for (HierarchicalActionSet subAction : subActions) {
if(actionSet.getParent() == null){
children = parent;
}
getASTTree(subAction,children,null);
}
}
return parent;
}
static final JedisPoolConfig poolConfig = buildPoolConfig();
@@ -371,31 +255,6 @@ public class AkkaTreeLoader {
return poolConfig;
}
private static List<edu.lu.uni.serval.MultipleThreadsParser.MessageFile> getMessageFiles(String gumTreeInput) {
String inputPath = gumTreeInput; // prevFiles revFiles diffentryFile positionsFile
File revFilesPath = new File(inputPath + "revFiles/");
File[] revFiles = revFilesPath.listFiles(); // project folders
List<edu.lu.uni.serval.MultipleThreadsParser.MessageFile> msgFiles = new ArrayList<>();
if (revFiles.length >= 0) {
for (File revFile : revFiles) {
// if (revFile.getName().endsWith(".java")) {
String fileName = revFile.getName();
File prevFile = new File(gumTreeInput + "prevFiles/prev_" + fileName);// previous file
fileName = fileName.replace(".java", ".txt");
File diffentryFile = new File(gumTreeInput + "DiffEntries/" + fileName); // DiffEntry file
File positionFile = new File(gumTreeInput + "positions/" + fileName); // position file
edu.lu.uni.serval.MultipleThreadsParser.MessageFile msgFile = new edu.lu.uni.serval.MultipleThreadsParser.MessageFile(revFile, prevFile, diffentryFile);
msgFile.setPositionFile(positionFile);
msgFiles.add(msgFile);
// }
}
return msgFiles;
}
else{
return null;
}
}
}
@@ -1,9 +1,7 @@
package edu.lu.uni.serval.FixPatternParser.cluster;
import com.github.gumtreediff.tree.ITree;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.utils.FileHelper;
import org.javatuples.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
@@ -12,14 +10,8 @@ import redis.clients.jedis.ScanParams;
import redis.clients.jedis.ScanResult;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis;
import static edu.lu.uni.serval.FixPatternParser.cluster.TreeLoaderClusterL1.poolConfig;
/**
@@ -27,46 +19,17 @@ import static edu.lu.uni.serval.FixPatternParser.cluster.TreeLoaderClusterL1.poo
*/
public class CalculatePairs {
private static Logger log = LoggerFactory.getLogger(CalculatePairs.class);
// public static void main(String[] args) {
public static void main(String serverWait,String dbDir,String chunkName,String port,String outputPath,String pjName) throws Exception {
// String inputPath;
// String port;
// String portInner;
// String serverWait;
// String dbDir;
// String chunkName;
// String outputPath;
//
// if (args.length > 0) {
// inputPath = args[0];
// portInner = args[1];
// serverWait = args[2];
// chunkName = args[3];
//
// dbDir = args[5];
// port = args[6];
// outputPath = args[7];
//
// } else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2";
//
// port = "6399";
// portInner = "6380";
// serverWait = "10000";
// outputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImport";
// chunkName ="chunk";
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
//
//
// }
String parameters = String.format("\nport %s \nserverWait %s \nchunkName %s \ndbDir %s",port,serverWait,chunkName,dbDir);
log.info(parameters);
CallShell cs =new CallShell();
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,chunkName,Integer.valueOf(port));
// loadRedis(cmd,serverWait);
cs.runShell(cmd,serverWait);
FileHelper.createDirectory(outputPath);
@@ -96,9 +59,26 @@ public class CalculatePairs {
byte [] buf = new byte[0];
String line = null;
try {
// FileOutputStream fos = new FileOutputStream(outputPath + "/" +pjName+".txt");
// DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
//
FileOutputStream fos = new FileOutputStream(outputPath + "/" +pjName+".csv");
DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
for (int i = 0; i < result.size(); i++) {
for (int j = i + 1; j < result.size(); j++) {
line = String.valueOf(i) +"," + String.valueOf(j) + "," + result.get(i) + "," + result.get(j)+"\n";
outStream.write(line.getBytes());
}
}
outStream.close();
// int fileCounter = 0;
// FileChannel rwChannel = new RandomAccessFile(outputPath + "/" +pjName +String.valueOf(fileCounter)+".txt", "rw").getChannel();
// int maxSize = 500*500000;
// ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, maxSize);
//
//
// for (int i = 0; i < result.size(); i++) {
@@ -107,39 +87,22 @@ public class CalculatePairs {
//
//
// line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + result.get(i) + "\t" + result.get(j)+"\n";
// outStream.write(line.getBytes());
// buf = line.getBytes();
// if(wrBuf.remaining() > 500) {
// wrBuf.put(buf);
// }else{
// log.info("Next pair dump");
// fileCounter++;
// rwChannel = new RandomAccessFile(outputPath+"/" +pjName+String.valueOf(fileCounter)+".txt", "rw").getChannel();
// wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, maxSize);
// }
//
//
//
//
// }
// }
// outStream.close();
int fileCounter = 0;
FileChannel rwChannel = new RandomAccessFile(outputPath + "/" +pjName +String.valueOf(fileCounter)+".txt", "rw").getChannel();
int maxSize = 500*500000;
ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, maxSize);
for (int i = 0; i < result.size(); i++) {
for (int j = i + 1; j < result.size(); j++) {
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + result.get(i) + "\t" + result.get(j)+"\n";
buf = line.getBytes();
if(wrBuf.remaining() > 500) {
wrBuf.put(buf);
}else{
log.info("Next pair dump");
fileCounter++;
rwChannel = new RandomAccessFile(outputPath+"/" +pjName+String.valueOf(fileCounter)+".txt", "rw").getChannel();
wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, maxSize);
}
}
}
rwChannel.close();
// rwChannel.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
@@ -157,81 +120,4 @@ public class CalculatePairs {
log.info("Done pairs");
}
// comparePairs(inputPath, innerPool,outerPool, serverWait,chunkName,dbDir,numOfWorkers);
// String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
// stopServer = String.format(stopServer,Integer.valueOf(portInner));
// loadRedis(stopServer,serverWait);
// }
// public static void corePairs(,ArrayList<Pair<String,String>> list){
// String cmdInner = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
// cmd = String.format(cmdInner, dbDir,chunkName,Integer.valueOf(portInner));
// loadRedis(cmd,serverWait);
// JedisPool innerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(portInner),20000000);
//
//
//
// Jedis jedis = null;
// for (Pair<String, String> objects : list) {
//
// try {
// String key = objects.getValue0();
// String value = objects.getValue1();
// jedis = innerPool.getResource();
//
//
// String[] split = value.split(",");
//
//
// jedis.hset(key, "0", split[0]);
// jedis.hset(key, "1", split[1]);
//
//
// //10000000
// if (pairCounter % 10000000 == 0) {
//
// File dbPath = new File(dbDir + "/" + chunkName);
// File savePath = new File(dbDir + "/" + "chunk" + String.valueOf(fileCounter) + ".rdb");
// try {
// jedis.save();
// log.info("saving key {} chunk {}",key,fileCounter);
// while (jedis.ping() == "PONG") {
// log.info("wait");
// }
// Thread.sleep(Integer.valueOf(serverWait));
//
// Files.copy(dbPath.toPath(), savePath.toPath(), StandardCopyOption.REPLACE_EXISTING);
// fileCounter++;
// jedis.flushDB();
// while (jedis.ping() == "PONG") {
// log.info("wait");
// }
// Thread.sleep(Integer.valueOf(serverWait));
//
// } catch (IOException e) {
//
// e.printStackTrace();
// }
//
//
// }
// }catch (Exception e) {
// log.error(e.toString() + " {}", (key));
// }finally {
// if (jedis != null) {
// jedis.close();
// }
// }
// }
// }
}
@@ -20,31 +20,10 @@ import java.util.stream.Stream;
*/
public class ImportPairs2DB {
private static Logger log = LoggerFactory.getLogger(ImportPairs2DB.class);
// public static void main(String[] args) {
public static void main(String csvInputPath,String portInner,String serverWait,String dbDir,String datasetPath) throws Exception {
// String inputPath;
// String portInner;
// String serverWait;
// String dbDir;
// String chunkName;
// String numOfWorkers;
// if (args.length > 0) {
// inputPath = args[0];
// portInner = args[1];
// serverWait = args[2];
// chunkName = args[3];
// numOfWorkers = args[4];
// dbDir = args[5];
// } else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs";
// portInner = "6380";
// serverWait = "10000";
// chunkName ="dumps.rdb";
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
// numOfWorkers = "1";
// }
String parameters = String.format("\nInput path %s \nportInner %s \nserverWait %s \ndbDir %s",csvInputPath,portInner,serverWait,dbDir);
log.info(parameters);
@@ -65,7 +44,7 @@ public class ImportPairs2DB {
CallShell cs = new CallShell();
cs.runShell(cmd,serverWait);
cmd = "bash "+datasetPath + "/redisImportSingle.sh" +" %s %s";
cmd = "bash "+datasetPath + "/redisSingleImport.sh" +" %s %s";
cmd = String.format(cmd, pj.getPath(), portInt);
log.info(cmd);
@@ -1,26 +1,14 @@
package edu.lu.uni.serval.FixPatternParser.cluster;
import com.github.gumtreediff.actions.ActionGenerator;
import com.github.gumtreediff.actions.model.*;
import com.github.gumtreediff.matchers.Matcher;
import com.github.gumtreediff.matchers.Matchers;
import com.github.gumtreediff.tree.ITree;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.gumtree.GumTreeComparer;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalActionSet;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalRegrouper;
import edu.lu.uni.serval.utils.FileHelper;
import edu.lu.uni.serval.utils.ListSorter;
import org.javatuples.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.*;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.io.File;
import java.time.Duration;
import java.util.*;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -29,49 +17,23 @@ import java.util.stream.Stream;
*/
public class TreeLoaderClusterL1 {
private static int resultType;
private static Logger log = LoggerFactory.getLogger(TreeLoaderClusterL1.class);
// public static void main(String[] args){
public static void main(String portInner,String serverWait,String port,String inputPath,String level1DB,String level1Path,String innerTypePrefix) throws Exception {
// String inputPath;
// String outputPath;
// String port;
// String portInner;
// String serverWait;
// if (args.length > 0) {
// inputPath = args[0];
// outputPath = args[1];
// port = args[2];
// serverWait = args[3];
// portInner = args[4];
//
// } else {
//// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2/";
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/redis";
// outputPath = "/Users/anilkoyuncu/bugStudy/dataset/";
// port = "6379";
// portInner = "6380";
// serverWait = "10000";
//
//
// }
String parameters = String.format("\nInput path %s \nportInner %s \nserverWait %s \nport %s",inputPath,portInner,serverWait,port);
log.info(parameters);
String cmd = "bash "+inputPath + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, inputPath,level1DB,Integer.valueOf(port));
// loadRedis(cmd,serverWait);
CallShell cs = new CallShell();
cs.runShell(cmd,serverWait);
JedisPool outerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(port),20000000);
// String level1Path = inputPath + "/level1";
File chunks = new File(level1Path);
File[] listFiles = chunks.listFiles();
Stream<File> stream = Arrays.stream(listFiles);
@@ -82,7 +44,7 @@ public class TreeLoaderClusterL1 {
for (File db : dbs) {
String cmdInner = "bash "+inputPath + "/" + "startServer.sh" +" %s %s %s";
cmdInner = String.format(cmdInner, inputPath,db.getName(),Integer.valueOf(portInner));
// loadRedis(cmdInner,serverWait);
cs.runShell(cmdInner,serverWait);
JedisPool innerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(portInner),20000000);
@@ -146,607 +108,9 @@ public class TreeLoaderClusterL1 {
cs.runShell(stopServer1,serverWait);
// calculatePairsOfClusters(inputPath, outputPath);
// mainCompare(inputPath,port,pairsCSVPath,importScript);
// calculatePairs(inputPath, outputPath);
// processMessages(inputPath,outputPath);
// evaluateResults(inputPath,outputPath);
}
public static void mainCompare(String inputPath,String port,String pairsCSVPath,String importScript) {
String cmd;
cmd = "bash " + importScript +" %s";
JedisPool jedisPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(port),20000000);
File folder = new File(pairsCSVPath);
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> folders = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
for (File f:folders){
if(f.getName().startsWith("cluster76")) {
try (Jedis jedis = jedisPool.getResource()) {
// do operations with jedis resource
ScanParams sc = new ScanParams();
sc.count(150000000);
sc.match("pair_[0-9]*");
log.info("Scanning");
ScanResult<String> scan = jedis.scan("0", sc);
int size = scan.getResult().size();
if (size == 0) {
// loadRedis(cmd, f);
scan = jedis.scan("0", sc);
size = scan.getResult().size();
}
log.info("Scanned " + String.valueOf(size));
String clusterName = f.getName().replaceAll("[^0-9]+", "");
//76
scan.getResult().parallelStream()
.forEach(m -> coreCompare(m, inputPath, jedisPool, clusterName));
jedis.save();
}
}
}
}
public static Pair<ITree,String> getTree(String firstValue){
String gumTreeInput = "/Volumes/data/bugStudy_backup/dataset/GumTreeInputBug4/";
String[] split2 = firstValue.split("/");
String cluster = split2[1];
File folder = new File("/Users/anilkoyuncu/bugStudy/code/python/cluster/"+cluster);
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> folders = stream
.filter(x -> !x.getName().startsWith(".") && x.getName().startsWith(split2[2]))
.collect(Collectors.toList());
String[] split1 = folders.get(0).getName().split(".txt_");
String s = split1[0];
String[] splitPJ = split1[1].split("_");
String project = splitPJ[1];
String actionSetPosition = splitPJ[0];
File prevFile = new File(gumTreeInput + project+ "/" + "prevFiles/prev_" + s + ".java");// previous file
File revFile = new File(gumTreeInput + project+ "/" + "revFiles/" + s + ".java");//rev file
List<HierarchicalActionSet> actionSets = parseChangedSourceCodeWithGumTree2(prevFile, revFile);
HierarchicalActionSet actionSet = actionSets.get(Integer.valueOf(actionSetPosition));
// for (HierarchicalActionSet actionSet : actionSets) {
ITree actionTree= null;
ITree test2 = null;
getActionTree(actionSet);
ITree node = actionSet.getNode();
List<ITree> descendants = node.getDescendants();
for (ITree descendant : descendants) {
if(descendant.getType() <= 100){
descendant.setType(104);
}
}
node.setParent(null);
// }
// }
Pair<ITree, String> pair = new Pair<>(node,project);
return pair;
}
public static void getActionTree(HierarchicalActionSet actionSet){
int newType = 0;
Action action = actionSet.getAction();
if (action instanceof Update){
newType = 101;
}else if(action instanceof Insert){
newType =100;
}else if(action instanceof Move){
newType = 102;
}else if(action instanceof Delete){
newType=103;
}else{
new Exception("unknow action");
}
actionSet.getNode().setType(newType);
// actionSet.getNode().setLabel("");
List<HierarchicalActionSet> subActions = actionSet.getSubActions();
if (subActions.size() != 0){
for (HierarchicalActionSet subAction : subActions) {
getActionTree(subAction);
}
}
}
// public static ITree getActionTree(HierarchicalActionSet actionSet, ITree parent, ITree children){
//
// int newType = 0;
//
// Action action = actionSet.getAction();
// if (action instanceof Update){
// newType = 101;
// }else if(action instanceof Insert){
// newType =100;
// }else if(action instanceof Move){
// newType = 102;
// }else if(action instanceof Delete){
// newType=103;
// }else{
// new Exception("unknow action");
// }
// if(actionSet.getParent() == null){
// //root
//
// parent = new Tree(newType,"");
// }else{
// children = new Tree(newType,"");
// parent.addChild(children);
// }
// List<HierarchicalActionSet> subActions = actionSet.getSubActions();
// if (subActions.size() != 0){
// for (HierarchicalActionSet subAction : subActions) {
//
// if(actionSet.getParent() == null){
// children = parent;
// }
// getActionTree(subAction,children,null);
//
// }
//
//
// }
// return parent;
// }
private static void coreCompare(String name , String inputPath, JedisPool jedisPool,String clusterName) {
try (Jedis jedis = jedisPool.getResource()) {
Map<String, String> resultMap = jedis.hgetAll(name);
resultMap.get("0");
String[] split = name.split("_");
String i = null;
String j =null;
try {
i = split[1];
j = split[2];
}
catch (Exception e){
e.printStackTrace();
}
String firstValue = resultMap.get("0");
String secondValue = resultMap.get("1");
// firstValue = inputPath + firstValue;
// secondValue = inputPath + secondValue;
// String[] firstValueSplit = firstValue.split("/");
// String[] secondValueSplit = secondValue.split("/");
//
// if (firstValueSplit.length == 1) {
// firstValue = inputPath + firstValueSplit[0];
// } else {
// firstValue = inputPath + firstValueSplit[1];
// }
//
// if (secondValueSplit.length == 1) {
// secondValue = inputPath + secondValueSplit[0];
// } else {
// secondValue = inputPath + secondValueSplit[1];
// }
try {
Pair<ITree, String> oldPair = getTree(firstValue);
Pair<ITree, String> newPair = getTree(secondValue);
ITree oldTree = oldPair.getValue0();
ITree newTree = newPair.getValue0();
String oldProject = oldPair.getValue1();
String newProject = newPair.getValue1();
Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
m.match();
ActionGenerator ag = new ActionGenerator(oldTree, newTree, m.getMappings());
ag.generate();
List<Action> actions = ag.getActions();
String resultKey = "result_" + (String.valueOf(i)) + "_" + String.valueOf(j);
double chawatheSimilarity1 = m.chawatheSimilarity(oldTree, newTree);
String chawatheSimilarity = String.format("%1.2f", chawatheSimilarity1);
double diceSimilarity1 = m.diceSimilarity(oldTree, newTree);
String diceSimilarity = String.format("%1.2f", diceSimilarity1);
double jaccardSimilarity1 = m.jaccardSimilarity(oldTree, newTree);
String jaccardSimilarity = String.format("%1.2f", jaccardSimilarity1);
String editDistance = String.valueOf(actions.size());
// jedis.select(1);
String result = resultMap.get("0") + "," + oldProject +"," + resultMap.get("1") + "," +newProject+ "," + chawatheSimilarity + "," + diceSimilarity + "," + jaccardSimilarity + "," + editDistance;
// jedis.set(resultKey, result);
if (((Double) chawatheSimilarity1).equals(1.0) || ((Double) diceSimilarity1).equals(1.0)
|| ((Double) jaccardSimilarity1).equals(1.0) || actions.size() == 0) {
String matchKey = "match-"+clusterName+"_" + (String.valueOf(i)) + "_" + String.valueOf(j);
jedis.select(1);
jedis.set(matchKey, result);
}
jedis.select(0);
String pairKey = "pair_" + (String.valueOf(i)) + "_" + String.valueOf(j);
jedis.del(pairKey);
// log.info("Completed " + resultKey);
}catch (Exception e){
log.error(e.toString() + " {}",(name));
}
}
}
protected static List<HierarchicalActionSet> parseChangedSourceCodeWithGumTree2(File prevFile, File revFile) {
List<HierarchicalActionSet> actionSets = new ArrayList<>();
// GumTree results
List<Action> gumTreeResults = new GumTreeComparer().compareTwoFilesWithGumTree(prevFile, revFile);
if (gumTreeResults == null) {
resultType = 1;
return null;
} else if (gumTreeResults.size() == 0){
resultType = 2;
return actionSets;
} else {
// Regroup GumTre results.
List<HierarchicalActionSet> allActionSets = new HierarchicalRegrouper().regroupGumTreeResults(gumTreeResults);
// for (HierarchicalActionSet actionSet : allActionSets) {
// String astNodeType = actionSet.getAstNodeType();
// if (astNodeType.endsWith("Statement") || "FieldDeclaration".equals(astNodeType)) {
// actionSets.add(actionSet);
// }
// }
// Filter out modified actions of changing method names, method parameters, variable names and field names in declaration part.
// variable effects range, sub-actions are these kinds of modification?
// actionSets.addAll(new ActionFilter().filterOutUselessActions(allActionSets));
ListSorter<HierarchicalActionSet> sorter = new ListSorter<>(allActionSets);
actionSets = sorter.sortAscending();
if (actionSets.size() == 0) {
resultType = 3;
}
return actionSets;
}
}
/*
orginal calculate pairs, from all dumps of the projects
*/
public static void calculatePairs(String inputPath, String outputPath) {
File folder = new File(inputPath);
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
List<File> fileToCompare = new ArrayList<>();
for (File pj : pjs) {
File[] files = pj.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith("ASTDumps");
}
});
Collections.addAll(fileToCompare, files[0].listFiles());
}
System.out.println("a");
// compareAll(fileToCompare);
readMessageFiles(fileToCompare, outputPath);
}
/*
pairs of each cluster
*/
public static void calculatePairsOfClusters(String inputPath, String outputPath) {
File folder = new File(inputPath);
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
FileHelper.createDirectory(outputPath + "pairs/");
for (File pj : pjs) {
File[] files = pj.listFiles();
List<File> fileList = Arrays.asList(files);
readMessageFilesCluster(fileList, outputPath,inputPath,pj.getName());
}
}
public static void processMessages(String inputPath, String outputPath) {
File folder = new File(outputPath + "pairs_splitted/");
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
FileHelper.createDirectory(outputPath + "comparison_splitted/");
pjs.parallelStream()
.forEach(m -> coreLoop(m, outputPath,inputPath));
}
private static void readMessageFilesCluster(List<File> folders, String outputPath,String inputPath,String cluster) {
List<String> treesFileNames = new ArrayList<>();
for (File target : folders) {
treesFileNames.add(target.toString());
}
log.info("Calculating pairs");
// treesFileNames = treesFileNames.subList(0,100);
String filename = "cluster" + cluster;
byte [] buf = new byte[0];
String line = null;
try {
FileOutputStream fos = new FileOutputStream(outputPath + "pairs/" +filename+".txt");
DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
for (int i = 0; i < treesFileNames.size(); i++) {
for (int j = i + 1; j < treesFileNames.size(); j++) {
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + treesFileNames.get(i).replace(inputPath,"") + "\t" + treesFileNames.get(j).replace(inputPath,"")+"\n";
outStream.write(line.getBytes());
}
}
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}catch (java.nio.BufferOverflowException e) {
log.error(line);
log.error(String.valueOf(buf.length));
e.printStackTrace();
}
log.info("Done pairs");
}
public static ITree getSimpliedTree(String fn) {
ITree tree = null;
try {
FileInputStream fi = new FileInputStream(new File(fn));
ObjectInputStream oi = new ObjectInputStream(fi);
tree = (ITree) oi.readObject();
oi.close();
fi.close();
} catch (FileNotFoundException e) {
log.error("File not found");
e.printStackTrace();
} catch (IOException e) {
log.error("Error initializing stream");
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// tree.setLabel("");
tree.setParent(null);
// List<ITree> descendants = tree.getDescendants();
// for (ITree descendant : descendants) {
// descendant.setLabel("");
// }
return tree;
}
private static void coreLoop(File mes, String outputPath,String inputPath) {
try {
log.info("Starting in coreLoop");
BufferedReader br = null;
String sCurrentLine = null;
BufferedWriter writer = new BufferedWriter(new FileWriter(outputPath + "comparison_splitted/" + "output_" + mes.getName()));
br = new BufferedReader(
new FileReader(mes));
while ((sCurrentLine = br.readLine()) != null) {
String currentLine = sCurrentLine;
String[] split = currentLine.split("\t");
String i = split[0];
String j = split[1];
String firstValue = split[2];
String secondValue = split[3];
firstValue = inputPath + firstValue.split("GumTreeOutput2")[1];
secondValue = inputPath + secondValue.split("GumTreeOutput2")[1];
ITree oldTree = getSimpliedTree(firstValue);
ITree newTree = getSimpliedTree(secondValue);
Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
m.match();
ActionGenerator ag = new ActionGenerator(oldTree, newTree, m.getMappings());
ag.generate();
List<Action> actions = ag.getActions();
writer.write(String.valueOf(i));
writer.write("\t");
writer.write(String.valueOf(j));
writer.write("\t");
writer.write(String.format("%1.2f", m.chawatheSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.format("%1.2f", m.diceSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.format("%1.2f", m.jaccardSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.valueOf(actions.size()));
writer.write("\t");
writer.write(firstValue);
writer.write("\t");
writer.write(secondValue);
writer.write("\n");
}
writer.close();
} catch (FileNotFoundException e) {
log.error("File not found");
e.printStackTrace();
} catch (IOException e) {
log.error("Error initializing stream");
e.printStackTrace();
}
log.info("Completed output_" + mes.getName());
}
private static void readMessageFiles(List<File> folders, String outputPath) {
List<String> treesFileNames = new ArrayList<>();
for (File target : folders) {
treesFileNames.add(target.toString());
}
FileHelper.createDirectory(outputPath + "pairs/");
log.info("Calculating pairs");
// treesFileNames = treesFileNames.subList(0,100);
byte [] buf = new byte[0];
String line = null;
try {
FileChannel rwChannel = new RandomAccessFile(outputPath + "pairs/" +"textfile.txt", "rw").getChannel();
ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
int fileCounter = 0;
for (int i = 0; i < treesFileNames.size(); i++) {
for (int j = i + 1; j < treesFileNames.size(); j++) {
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + treesFileNames.get(i).replace("/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2","") + "\t" + treesFileNames.get(j).replace("/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2","")+"\n";
buf = line.getBytes();
if(wrBuf.remaining() > 500) {
wrBuf.put(buf);
}else{
log.info("Next pair dump");
fileCounter++;
rwChannel = new RandomAccessFile(outputPath+"pairs/" +"textfile"+String.valueOf(fileCounter)+".txt", "rw").getChannel();
wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
}
}
}
rwChannel.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}catch (java.nio.BufferOverflowException e) {
log.error(line);
log.error(String.valueOf(buf.length));
e.printStackTrace();
}
log.info("Done pairs");
}
static final JedisPoolConfig poolConfig = buildPoolConfig();
@@ -767,8 +131,6 @@ orginal calculate pairs, from all dumps of the projects
}
// return msgFiles;
}
@@ -98,4 +98,8 @@ public class CommitPatchParser extends Parser{
return buggyStatements + fixedStatements;
}
@Override
public void parseFixPatterns(File prevFile, File revFile, File diffEntryFile, String project) {
}
}
@@ -41,7 +41,7 @@ public class FixedViolationHunkParser extends FixedViolationParser {
public String unfixedViolations = "";
@Override
public void parseFixPatterns(File prevFile, File revFile, File diffentryFile) {
public void parseFixPatterns(File prevFile, File revFile, File diffentryFile,String project) {
List<HierarchicalActionSet> actionSets = parseChangedSourceCodeWithGumTree2(prevFile, revFile);
if (actionSets.size() != 0) {
boolean isUpdate =
@@ -71,12 +71,12 @@ public class FixedViolationHunkParser extends FixedViolationParser {
try {
// String pj = diffentryFile.getParent().split("Defects4J")[1];
String datasetName = diffentryFile.getParent().split("dataset/")[1].split("/")[0];
String datasetName = project;
String[] split1 = diffentryFile.getParent().split(datasetName);
String root = split1[0];
String pj = split1[1].split("/")[1];
String hunkTreeFileName = root + "GumTreeOutput" + datasetName + "/" + pj + folder + diffentryFile.getName() + "_" + String.valueOf(hunkSet);
String hunkTreeFileName = root + "EnhancedASTDiff" + datasetName + "/" + pj + folder + diffentryFile.getName() + "_" + String.valueOf(hunkSet);
f = new FileOutputStream(new File(hunkTreeFileName));
ObjectOutputStream o = new ObjectOutputStream(f);
o.writeObject(actionSet);
@@ -37,7 +37,7 @@ public class FixedViolationParser extends Parser {
protected String violationTypes = "";
@Override
public void parseFixPatterns(File prevFile, File revFile, File diffentryFile) {
public void parseFixPatterns(File prevFile, File revFile, File diffentryFile,String project) {
}
/**
@@ -134,7 +134,12 @@ public class FixedViolationParser extends Parser {
public String getAlarmTypes() {
return violationTypes;
}
@Override
public void parseFixPatterns(File prevFile, File revFile, File diffEntryFile) {
}
// public void setUselessViolations(List<Violation> uselessViolations) {
// this.uselessViolations = uselessViolations;
// }
@@ -259,7 +259,7 @@ public class MultiThreadTreeLoader {
}catch (Exception e){
log.error(e.toString() + " {}",(name));
log.warn(e.toString() + " {}",(name));
}
@@ -5,119 +5,27 @@ import com.github.gumtreediff.actions.model.*;
import com.github.gumtreediff.matchers.Matcher;
import com.github.gumtreediff.matchers.Matchers;
import com.github.gumtreediff.tree.ITree;
import com.github.gumtreediff.tree.Tree;
import com.github.gumtreediff.tree.TreeContext;
import edu.lu.uni.serval.gumtree.GumTreeComparer;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalActionSet;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalRegrouper;
import edu.lu.uni.serval.utils.FileHelper;
import edu.lu.uni.serval.utils.ListSorter;
import org.javatuples.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.*;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis;
/**
* Created by anilkoyuncu on 19/03/2018.
*/
public class MultiThreadTreeLoaderCluster {
private static int resultType;
private static class StreamGobbler implements Runnable {
private InputStream inputStream;
private Consumer<String> consumer;
public StreamGobbler(InputStream inputStream, Consumer<String> consumer) {
this.inputStream = inputStream;
this.consumer = consumer;
}
@Override
public void run() {
new BufferedReader(new InputStreamReader(inputStream)).lines()
.forEach(consumer);
}
}
private static Logger log = LoggerFactory.getLogger(MultiThreadTreeLoaderCluster.class);
public static void main(String[] args){
String inputPath;
String outputPath;
String port;
String pairsCSVPath;
String importScript;
String dbDir;
if (args.length > 0) {
inputPath = args[0];
outputPath = args[1];
port = args[2];
pairsCSVPath = args[3];
importScript = args[4];
dbDir = args[5];
} else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2/";
inputPath = "/Users/anilkoyuncu/bugStudy/code/python/clusterDumps";
outputPath = "/Users/anilkoyuncu/bugStudy/dataset/";
port = "6381";
pairsCSVPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs-csv/";
importScript = "/Users/anilkoyuncu/bugStudy/dataset/redisSingleImport.sh";
dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
}
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,"cluster1.rdb",Integer.valueOf(port));
edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd,"1000");
cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,"dumps.rdb",Integer.valueOf("6399"));
edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd,"10000");
// calculatePairsOfClusters(inputPath, outputPath);
// mainCompare(inputPath,port,pairsCSVPath,importScript);
// calculatePairs(inputPath, outputPath);
// processMessages(inputPath,outputPath);
// evaluateResults(inputPath,outputPath);
}
public static void loadRedis(String cmd){
Process process;
try {
process = Runtime.getRuntime()
.exec(cmd);
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), System.out::println);
Executors.newSingleThreadExecutor().submit(streamGobbler);
int exitCode = process.waitFor();
assert exitCode == 0;
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
log.info("Load done");
}
public static void mainCompare(String port,String pairsCSVPath,String importScript,String dbDir,String chunkName,String dumpName,String portInner,String serverWait,String type) throws Exception {
@@ -222,31 +130,11 @@ public class MultiThreadTreeLoaderCluster {
public static Pair<ITree,String> getTree(String firstValue, JedisPool outerPool,String type){
// HierarchicalActionSet actionSet = null;
// try {
// FileInputStream fi = new FileInputStream(new File(dumps + firstValue));
// ObjectInputStream oi = new ObjectInputStream(fi);
// actionSet = (HierarchicalActionSet) oi.readObject();
// oi.close();
// fi.close();
//
//
// } catch (FileNotFoundException e) {
// log.error("File not found");
// e.printStackTrace();
// } catch (IOException e) {
// log.error("Error initializing stream");
// e.printStackTrace();
// } catch (ClassNotFoundException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
ITree tree = null;
Jedis inner = null;
String[] split2 = firstValue.split("/");
String cluster = split2[1];
String fullFileName = split2[2];
String fullFileName = split2[split2.length-1];
String[] split = fullFileName.split(".txt_");
String pureFileName = split[0];
String[] splitPJ = split[1].split("_");
@@ -269,17 +157,6 @@ public class MultiThreadTreeLoaderCluster {
tree.setParent(null);
tc.validate();
// log.info(tc.toString());
// ITree newTree = ((Update)actionSet.getAction()).getNewNode();
// ITree oldTree = ((Update)actionSet.getAction()).getNode();
//
// Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
// m.match();
// ActionGenerator ag = new ActionGenerator(oldTree, newTree, m.getMappings());
// ag.generate();
// List<Action> actions = ag.getActions();
// log.info(actions.toString());
} catch (IOException e) {
e.printStackTrace();
@@ -346,77 +223,7 @@ public class MultiThreadTreeLoaderCluster {
return parent;
}
// public static void getActionTree(HierarchicalActionSet actionSet){
//
//
// int newType = 0;
//
// Action action = actionSet.getAction();
// if (action instanceof Update){
// newType = 101;
// }else if(action instanceof Insert){
// newType =100;
// }else if(action instanceof Move){
// newType = 102;
// }else if(action instanceof Delete){
// newType=103;
// }else{
// new Exception("unknow action");
// }
// actionSet.getNode().setType(newType);
//// actionSet.getNode().setLabel("");
// List<HierarchicalActionSet> subActions = actionSet.getSubActions();
// if (subActions.size() != 0){
// for (HierarchicalActionSet subAction : subActions) {
// getActionTree(subAction);
// }
//
//
// }
//
// }
// public static ITree getActionTree(HierarchicalActionSet actionSet, ITree parent, ITree children){
//
// int newType = 0;
//
// Action action = actionSet.getAction();
// if (action instanceof Update){
// newType = 101;
// }else if(action instanceof Insert){
// newType =100;
// }else if(action instanceof Move){
// newType = 102;
// }else if(action instanceof Delete){
// newType=103;
// }else{
// new Exception("unknow action");
// }
// if(actionSet.getParent() == null){
// //root
//
// parent = new Tree(newType,"");
// }else{
// children = new Tree(newType,"");
// parent.addChild(children);
// }
// List<HierarchicalActionSet> subActions = actionSet.getSubActions();
// if (subActions.size() != 0){
// for (HierarchicalActionSet subAction : subActions) {
//
// if(actionSet.getParent() == null){
// children = parent;
// }
// getActionTree(subAction,children,null);
//
// }
//
//
// }
// return parent;
// }
private static void coreCompare(String name , JedisPool jedisPool,String clusterName,JedisPool outerPool,String type) {
@@ -441,34 +248,6 @@ public class MultiThreadTreeLoaderCluster {
String firstValue = resultMap.get("0");
String secondValue = resultMap.get("1");
// if (firstValue.equals("71d453_0b5934_hbase-server#src#main#java#org#apache#hadoop#hbase#regionserver#RSRpcServices.txt_0")){
// //3f70d6_9ee9c5_camel-core#src#main#java#org#apache#camel#builder#NotifyBuilder.txt_0_CAMEL
// //29ea3e_71c614_spring-batch-core#src#test#java#org#springframework#batch#core#domain#JobExecutionTests.txt_0_BATCH
// log.info(firstValue);
// }
// firstValue = inputPath + firstValue;
// secondValue = inputPath + secondValue;
// String[] firstValueSplit = firstValue.split("/");
// String[] secondValueSplit = secondValue.split("/");
//
// if (firstValueSplit.length == 1) {
// firstValue = inputPath + firstValueSplit[0];
// } else {
// firstValue = inputPath + firstValueSplit[1];
// }
//
// if (secondValueSplit.length == 1) {
// secondValue = inputPath + secondValueSplit[0];
// } else {
// secondValue = inputPath + secondValueSplit[1];
// }
try {
Pair<ITree, String> oldPair = getTree(firstValue, outerPool,type);
@@ -516,7 +295,7 @@ public class MultiThreadTreeLoaderCluster {
// log.info("Completed " + resultKey);
}catch (Exception e){
log.error(e.toString() + " {}",(name));
log.warn(e.toString() + " {}",(name));
}
@@ -529,68 +308,6 @@ public class MultiThreadTreeLoaderCluster {
protected static List<HierarchicalActionSet> parseChangedSourceCodeWithGumTree2(File prevFile, File revFile) {
List<HierarchicalActionSet> actionSets = new ArrayList<>();
// GumTree results
List<Action> gumTreeResults = new GumTreeComparer().compareTwoFilesWithGumTree(prevFile, revFile);
if (gumTreeResults == null) {
resultType = 1;
return null;
} else if (gumTreeResults.size() == 0){
resultType = 2;
return actionSets;
} else {
// Regroup GumTre results.
List<HierarchicalActionSet> allActionSets = new HierarchicalRegrouper().regroupGumTreeResults(gumTreeResults);
// for (HierarchicalActionSet actionSet : allActionSets) {
// String astNodeType = actionSet.getAstNodeType();
// if (astNodeType.endsWith("Statement") || "FieldDeclaration".equals(astNodeType)) {
// actionSets.add(actionSet);
// }
// }
// Filter out modified actions of changing method names, method parameters, variable names and field names in declaration part.
// variable effects range, sub-actions are these kinds of modification?
// actionSets.addAll(new ActionFilter().filterOutUselessActions(allActionSets));
ListSorter<HierarchicalActionSet> sorter = new ListSorter<>(allActionSets);
actionSets = sorter.sortAscending();
if (actionSets.size() == 0) {
resultType = 3;
}
return actionSets;
}
}
/*
orginal calculate pairs, from all dumps of the projects
*/
public static void calculatePairs(String inputPath, String outputPath) {
File folder = new File(inputPath);
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
List<File> fileToCompare = new ArrayList<>();
for (File pj : pjs) {
File[] files = pj.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith("ASTDumps");
}
});
Collections.addAll(fileToCompare, files[0].listFiles());
}
System.out.println("a");
// compareAll(fileToCompare);
readMessageFiles(fileToCompare, outputPath);
}
/*
pairs of each cluster
*/
@@ -615,17 +332,6 @@ orginal calculate pairs, from all dumps of the projects
}
public static void processMessages(String inputPath, String outputPath) {
File folder = new File(outputPath + "pairs_splitted/");
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
FileHelper.createDirectory(outputPath + "comparison_splitted/");
pjs.parallelStream()
.forEach(m -> coreLoop(m, outputPath,inputPath));
}
private static void readMessageFilesCluster(List<File> folders, String outputPath,String inputPath,String cluster,String type) {
@@ -645,7 +351,7 @@ orginal calculate pairs, from all dumps of the projects
String line = null;
try {
FileOutputStream fos = new FileOutputStream(outputPath + "/pairs"+type+"/" +filename+".txt");
FileOutputStream fos = new FileOutputStream(outputPath + "/pairs"+type+"/" +filename+".csv");
DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
@@ -655,7 +361,7 @@ orginal calculate pairs, from all dumps of the projects
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + treesFileNames.get(i).replace(inputPath,"") + "\t" + treesFileNames.get(j).replace(inputPath,"")+"\n";
line = String.valueOf(i) +"," + String.valueOf(j) + "," + treesFileNames.get(i).replace(inputPath,"") + "," + treesFileNames.get(j).replace(inputPath,"")+"\n";
outStream.write(line.getBytes());
}
@@ -676,159 +382,6 @@ orginal calculate pairs, from all dumps of the projects
public static ITree getSimpliedTree(String fn) {
ITree tree = null;
try {
FileInputStream fi = new FileInputStream(new File(fn));
ObjectInputStream oi = new ObjectInputStream(fi);
tree = (ITree) oi.readObject();
oi.close();
fi.close();
} catch (FileNotFoundException e) {
log.error("File not found");
e.printStackTrace();
} catch (IOException e) {
log.error("Error initializing stream");
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// tree.setLabel("");
tree.setParent(null);
// List<ITree> descendants = tree.getDescendants();
// for (ITree descendant : descendants) {
// descendant.setLabel("");
// }
return tree;
}
private static void coreLoop(File mes, String outputPath,String inputPath) {
try {
log.info("Starting in coreLoop");
BufferedReader br = null;
String sCurrentLine = null;
BufferedWriter writer = new BufferedWriter(new FileWriter(outputPath + "comparison_splitted/" + "output_" + mes.getName()));
br = new BufferedReader(
new FileReader(mes));
while ((sCurrentLine = br.readLine()) != null) {
String currentLine = sCurrentLine;
String[] split = currentLine.split("\t");
String i = split[0];
String j = split[1];
String firstValue = split[2];
String secondValue = split[3];
firstValue = inputPath + firstValue.split("GumTreeOutput2")[1];
secondValue = inputPath + secondValue.split("GumTreeOutput2")[1];
ITree oldTree = getSimpliedTree(firstValue);
ITree newTree = getSimpliedTree(secondValue);
Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
m.match();
ActionGenerator ag = new ActionGenerator(oldTree, newTree, m.getMappings());
ag.generate();
List<Action> actions = ag.getActions();
writer.write(String.valueOf(i));
writer.write("\t");
writer.write(String.valueOf(j));
writer.write("\t");
writer.write(String.format("%1.2f", m.chawatheSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.format("%1.2f", m.diceSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.format("%1.2f", m.jaccardSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.valueOf(actions.size()));
writer.write("\t");
writer.write(firstValue);
writer.write("\t");
writer.write(secondValue);
writer.write("\n");
}
writer.close();
} catch (FileNotFoundException e) {
log.error("File not found");
e.printStackTrace();
} catch (IOException e) {
log.error("Error initializing stream");
e.printStackTrace();
}
log.info("Completed output_" + mes.getName());
}
private static void readMessageFiles(List<File> folders, String outputPath) {
List<String> treesFileNames = new ArrayList<>();
for (File target : folders) {
treesFileNames.add(target.toString());
}
FileHelper.createDirectory(outputPath + "pairs/");
log.info("Calculating pairs");
// treesFileNames = treesFileNames.subList(0,100);
byte [] buf = new byte[0];
String line = null;
try {
FileChannel rwChannel = new RandomAccessFile(outputPath + "pairs/" +"textfile.txt", "rw").getChannel();
ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
int fileCounter = 0;
for (int i = 0; i < treesFileNames.size(); i++) {
for (int j = i + 1; j < treesFileNames.size(); j++) {
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + treesFileNames.get(i).replace("/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2","") + "\t" + treesFileNames.get(j).replace("/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2","")+"\n";
buf = line.getBytes();
if(wrBuf.remaining() > 500) {
wrBuf.put(buf);
}else{
log.info("Next pair dump");
fileCounter++;
rwChannel = new RandomAccessFile(outputPath+"pairs/" +"textfile"+String.valueOf(fileCounter)+".txt", "rw").getChannel();
wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
}
}
}
rwChannel.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}catch (java.nio.BufferOverflowException e) {
log.error(line);
log.error(String.valueOf(buf.length));
e.printStackTrace();
}
log.info("Done pairs");
}
static final JedisPoolConfig poolConfig = buildPoolConfig();
@@ -38,127 +38,9 @@ import static edu.lu.uni.serval.FixPatternParser.violations.MultiThreadTreeLoade
*/
public class MultiThreadTreeLoaderCluster3 {
private static int resultType;
private static class StreamGobbler implements Runnable {
private InputStream inputStream;
private Consumer<String> consumer;
public StreamGobbler(InputStream inputStream, Consumer<String> consumer) {
this.inputStream = inputStream;
this.consumer = consumer;
}
@Override
public void run() {
new BufferedReader(new InputStreamReader(inputStream)).lines()
.forEach(consumer);
}
}
private static Logger log = LoggerFactory.getLogger(MultiThreadTreeLoaderCluster3.class);
public static void main(String[] args){
String inputPath;
String outputPath;
String port;
String pairsCSVPath;
String importScript;
String csvScript;
String dbDir;
if (args.length > 0) {
inputPath = args[0];
outputPath = args[1];
port = args[2];
pairsCSVPath = args[3];
importScript = args[4];
csvScript = args[5];
dbDir = args[6];
} else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2/";
inputPath = "/Users/anilkoyuncu/bugStudy/code/python/cluster2L";
outputPath = "/Users/anilkoyuncu/bugStudy/dataset/";
port = "6379";
pairsCSVPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs-2l-csv/";
importScript = "/Users/anilkoyuncu/bugStudy/dataset/redisSingleImport.sh";
csvScript = "/Users/anilkoyuncu/bugStudy/dataset/transformCSV.sh";
dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
}
// calculatePairsOfClusters(inputPath, outputPath);
// createCSV(csvScript,outputPath + "pairs-2l/",pairsCSVPath);
//create csv file and move
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,"cluster2.rdb",Integer.valueOf(port));
edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd,"1000");
cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,"dumps.rdb",Integer.valueOf("6399"));
edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd,"1000");
// mainCompare(inputPath,port,pairsCSVPath,importScript);
// calculatePairs(inputPath, outputPath);
// processMessages(inputPath,outputPath);
// evaluateResults(inputPath,outputPath);
}
public static void createCSV(String csvScript, String f1, String f2){
String cmd;
cmd = "bash " + csvScript +" %s %s";
Process process = null;
File source = new File(f1);
File dest = new File(f2);
log.info(source.getName());
log.info(dest.getName());
try {
String comd = String.format(cmd, source.getAbsoluteFile() ,dest.getAbsoluteFile());
process = Runtime.getRuntime()
.exec(comd);
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), System.out::println);
Executors.newSingleThreadExecutor().submit(streamGobbler);
int exitCode = process.waitFor();
assert exitCode == 0;
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}finally {
process.destroyForcibly();
}
log.info("Load done");
}
public static void loadRedis(String cmd, File f){
Process process;
log.info(f.getName());
try {
String comd = String.format(cmd, f.getAbsoluteFile());
process = Runtime.getRuntime()
.exec(comd);
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), System.out::println);
Executors.newSingleThreadExecutor().submit(streamGobbler);
int exitCode = process.waitFor();
assert exitCode == 0;
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
log.info("Load done");
}
// public static void mainCompare(String inputPath,String port,String pairsCSVPath,String importScript) {
public static void mainCompare(String port,String pairsCSVPath,String importScript,String dbDir,String chunkName,String dumpName,String portInner,String serverWait, String type) throws Exception {
@@ -256,9 +138,9 @@ public class MultiThreadTreeLoaderCluster3 {
public static ITree getTree(String firstValue, JedisPool outerPool,String type){
// String gumTreeInput = "/Volumes/data/bugStudy_backup/dataset/GumTreeInputBug4/";
String[] split2 = firstValue.split("/");
String cluster = split2[1];
String subCluster = split2[2];
String filename = split2[3];
String filename = split2[split2.length-1];
// String filename = split2[3];
String[] split1= filename.split(".txt_");
String s = split1[0];
String[] splitPJ = split1[1].split("_");
@@ -347,35 +229,6 @@ public class MultiThreadTreeLoaderCluster3 {
return parent;
}
public static ITree getActionTree(HierarchicalActionSet actionSet){
int newType = 0;
Action action = actionSet.getAction();
if (action instanceof Update){
newType = 101;
}else if(action instanceof Insert){
newType =100;
}else if(action instanceof Move){
newType = 102;
}else if(action instanceof Delete){
newType=103;
}else{
new Exception("unknow action");
}
actionSet.getNode().setType(newType);
// actionSet.getNode().setLabel("");
List<HierarchicalActionSet> subActions = actionSet.getSubActions();
if (subActions.size() != 0){
for (HierarchicalActionSet subAction : subActions) {
getActionTree(subAction);
}
}
return actionSet.getNode();
}
private static List<String> getNames(ITree oldTree, List<String> oldTokens){
@@ -618,9 +471,9 @@ public class MultiThreadTreeLoaderCluster3 {
}
}
if (oldTokens.size() == 0 ) {// && (oldTree.getType() != 41 && oldTree.getType() != 21 && oldTree.getType() !=17 && oldTree.getType()!=60 && oldTree.getType() != 46)){
log.info("dur bakalim nereye!???");
}
// if (oldTokens.size() == 0 ) {// && (oldTree.getType() != 41 && oldTree.getType() != 21 && oldTree.getType() !=17 && oldTree.getType()!=60 && oldTree.getType() != 46)){
// log.info("dur bakalim nereye!???");
// }
return oldTokens;
}
@@ -704,9 +557,9 @@ public class MultiThreadTreeLoaderCluster3 {
oldTokens = getNames(oldTree,oldTokens);
newTokens = getNames(newTree,newTokens);
if(oldTokens.size() == 0 || newTokens.size() == 0){
log.error("Cluster {} has no tokens on pair {}",clusterName , name);
}
// if(oldTokens.size() == 0 || newTokens.size() == 0){
// log.error("Cluster {} has no tokens on pair {}",clusterName , name);
// }
// Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
// m.match();
CharSequence[] oldSequences = oldTokens.toArray(new CharSequence[oldTokens.size()]);
@@ -732,9 +585,9 @@ public class MultiThreadTreeLoaderCluster3 {
// log.info(firstValue);
// log.info(secondValue);
// log.info("************");
if(!overallSimi.equals(1.0)){
log.info("");
}
// if(!overallSimi.equals(1.0)){
// log.info("");
// }
String matchKey = "match-"+clusterName+"_" + (String.valueOf(i)) + "_" + String.valueOf(j);
String result = firstValue + "," + secondValue + ","+String.join(",", oldTokens);
jedis.select(1);
@@ -747,7 +600,7 @@ public class MultiThreadTreeLoaderCluster3 {
}catch (Exception e){
log.error(e.toString() + " {}",(name));
log.warn(e.toString() + " {}",(name));
}
@@ -759,69 +612,6 @@ public class MultiThreadTreeLoaderCluster3 {
}
protected static List<HierarchicalActionSet> parseChangedSourceCodeWithGumTree2(File prevFile, File revFile) {
List<HierarchicalActionSet> actionSets = new ArrayList<>();
// GumTree results
List<Action> gumTreeResults = new GumTreeComparer().compareTwoFilesWithGumTree(prevFile, revFile);
if (gumTreeResults == null) {
resultType = 1;
return null;
} else if (gumTreeResults.size() == 0){
resultType = 2;
return actionSets;
} else {
// Regroup GumTre results.
List<HierarchicalActionSet> allActionSets = new HierarchicalRegrouper().regroupGumTreeResults(gumTreeResults);
// for (HierarchicalActionSet actionSet : allActionSets) {
// String astNodeType = actionSet.getAstNodeType();
// if (astNodeType.endsWith("Statement") || "FieldDeclaration".equals(astNodeType)) {
// actionSets.add(actionSet);
// }
// }
// Filter out modified actions of changing method names, method parameters, variable names and field names in declaration part.
// variable effects range, sub-actions are these kinds of modification?
// actionSets.addAll(new ActionFilter().filterOutUselessActions(allActionSets));
ListSorter<HierarchicalActionSet> sorter = new ListSorter<>(allActionSets);
actionSets = sorter.sortAscending();
if (actionSets.size() == 0) {
resultType = 3;
}
return actionSets;
}
}
/*
orginal calculate pairs, from all dumps of the projects
*/
public static void calculatePairs(String inputPath, String outputPath) {
File folder = new File(inputPath);
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
List<File> fileToCompare = new ArrayList<>();
for (File pj : pjs) {
File[] files = pj.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith("ASTDumps");
}
});
Collections.addAll(fileToCompare, files[0].listFiles());
}
System.out.println("a");
// compareAll(fileToCompare);
readMessageFiles(fileToCompare, outputPath);
}
/*
pairs of each cluster
*/
@@ -856,17 +646,7 @@ orginal calculate pairs, from all dumps of the projects
}
public static void processMessages(String inputPath, String outputPath) {
File folder = new File(outputPath + "pairs_splitted/");
File[] listOfFiles = folder.listFiles();
Stream<File> stream = Arrays.stream(listOfFiles);
List<File> pjs = stream
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
FileHelper.createDirectory(outputPath + "comparison_splitted/");
pjs.parallelStream()
.forEach(m -> coreLoop(m, outputPath,inputPath));
}
private static void readMessageFilesCluster(List<File> folders, String outputPath,String inputPath,String cluster, String subCluster,String type) {
@@ -890,7 +670,7 @@ orginal calculate pairs, from all dumps of the projects
// ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, 1000*treesFileNames.size()*treesFileNames.size());
// int fileCounter = 0;
FileOutputStream fos = new FileOutputStream(outputPath + "/pairs-2l"+type+"/" +filename+".txt");
FileOutputStream fos = new FileOutputStream(outputPath + "/pairs-2l"+type+"/" +filename+".csv");
DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
@@ -900,7 +680,7 @@ orginal calculate pairs, from all dumps of the projects
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + treesFileNames.get(i).replace(inputPath,"") + "\t" + treesFileNames.get(j).replace(inputPath,"")+"\n";
line = String.valueOf(i) +"," + String.valueOf(j) + "," + treesFileNames.get(i).replace(inputPath,"") + "," + treesFileNames.get(j).replace(inputPath,"")+"\n";
outStream.write(line.getBytes());
// buf = line.getBytes();
// if(wrBuf.remaining() > 500) {
@@ -933,159 +713,8 @@ orginal calculate pairs, from all dumps of the projects
public static ITree getSimpliedTree(String fn) {
ITree tree = null;
try {
FileInputStream fi = new FileInputStream(new File(fn));
ObjectInputStream oi = new ObjectInputStream(fi);
tree = (ITree) oi.readObject();
oi.close();
fi.close();
} catch (FileNotFoundException e) {
log.error("File not found");
e.printStackTrace();
} catch (IOException e) {
log.error("Error initializing stream");
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// tree.setLabel("");
tree.setParent(null);
// List<ITree> descendants = tree.getDescendants();
// for (ITree descendant : descendants) {
// descendant.setLabel("");
// }
return tree;
}
private static void coreLoop(File mes, String outputPath,String inputPath) {
try {
log.info("Starting in coreLoop");
BufferedReader br = null;
String sCurrentLine = null;
BufferedWriter writer = new BufferedWriter(new FileWriter(outputPath + "comparison_splitted/" + "output_" + mes.getName()));
br = new BufferedReader(
new FileReader(mes));
while ((sCurrentLine = br.readLine()) != null) {
String currentLine = sCurrentLine;
String[] split = currentLine.split("\t");
String i = split[0];
String j = split[1];
String firstValue = split[2];
String secondValue = split[3];
firstValue = inputPath + firstValue.split("GumTreeOutput2")[1];
secondValue = inputPath + secondValue.split("GumTreeOutput2")[1];
ITree oldTree = getSimpliedTree(firstValue);
ITree newTree = getSimpliedTree(secondValue);
Matcher m = Matchers.getInstance().getMatcher(oldTree, newTree);
m.match();
ActionGenerator ag = new ActionGenerator(oldTree, newTree, m.getMappings());
ag.generate();
List<Action> actions = ag.getActions();
writer.write(String.valueOf(i));
writer.write("\t");
writer.write(String.valueOf(j));
writer.write("\t");
writer.write(String.format("%1.2f", m.chawatheSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.format("%1.2f", m.diceSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.format("%1.2f", m.jaccardSimilarity(oldTree, newTree)));
writer.write("\t");
writer.write(String.valueOf(actions.size()));
writer.write("\t");
writer.write(firstValue);
writer.write("\t");
writer.write(secondValue);
writer.write("\n");
}
writer.close();
} catch (FileNotFoundException e) {
log.error("File not found");
e.printStackTrace();
} catch (IOException e) {
log.error("Error initializing stream");
e.printStackTrace();
}
log.info("Completed output_" + mes.getName());
}
private static void readMessageFiles(List<File> folders, String outputPath) {
List<String> treesFileNames = new ArrayList<>();
for (File target : folders) {
treesFileNames.add(target.toString());
}
FileHelper.createDirectory(outputPath + "pairs/");
log.info("Calculating pairs");
// treesFileNames = treesFileNames.subList(0,100);
byte [] buf = new byte[0];
String line = null;
try {
FileChannel rwChannel = new RandomAccessFile(outputPath + "pairs/" +"textfile.txt", "rw").getChannel();
ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
int fileCounter = 0;
for (int i = 0; i < treesFileNames.size(); i++) {
for (int j = i + 1; j < treesFileNames.size(); j++) {
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + treesFileNames.get(i).replace("/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2","") + "\t" + treesFileNames.get(j).replace("/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput2","")+"\n";
buf = line.getBytes();
if(wrBuf.remaining() > 500) {
wrBuf.put(buf);
}else{
log.info("Next pair dump");
fileCounter++;
rwChannel = new RandomAccessFile(outputPath+"pairs/" +"textfile"+String.valueOf(fileCounter)+".txt", "rw").getChannel();
wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
}
}
}
rwChannel.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}catch (java.nio.BufferOverflowException e) {
log.error(line);
log.error(String.valueOf(buf.length));
e.printStackTrace();
}
log.info("Done pairs");
}
static final JedisPoolConfig poolConfig = buildPoolConfig();
@@ -1,18 +1,14 @@
package edu.lu.uni.serval.FixPatternParser.violations;
import com.oracle.tools.packager.Log;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -5,16 +5,14 @@ import akka.actor.ActorSystem;
import edu.lu.uni.serval.MultipleThreadsParser.MessageFile;
import edu.lu.uni.serval.MultipleThreadsParser.ParseFixPatternActor;
import edu.lu.uni.serval.MultipleThreadsParser.WorkMessage;
import edu.lu.uni.serval.config.Configuration;
import edu.lu.uni.serval.utils.FileHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -22,21 +20,8 @@ public class TestHunkParser {
private static Logger log = LoggerFactory.getLogger(TestHunkParser.class);
// public static void main(String[] args) {
public static void main(String inputPath, String outputPath,String numOfWorkers) {
// input data
public static void main(String inputPath, String outputPath,String numOfWorkers,String project) {
//// String rootPath = "/Users/anilkoyuncu/bugStudy";
// String inputPath;
// String outputPath;
// if(args.length > 0){
// inputPath = args[1];
// outputPath = args[0];
// }else{
//// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeInputBug4";
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeInputBug13April";
//// outputPath = "/Users/anilkoyuncu/bugStudy/code/python/GumTreeOutput2/";
// outputPath = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutput13April";
// }
String parameters = String.format("\nInput path %s \nOutput path %s",inputPath,outputPath);
log.info(parameters);
@@ -48,51 +33,39 @@ public class TestHunkParser {
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
// List<File> targetList = new ArrayList<File>();
// for (File f:folders){
// for(File f1 :f.listFiles()){
// if (!f1.getName().startsWith(".")){
// targetList.add(f1);
// }
// }
// }
// List<String> pjList = Arrays.asList("DATAJPA","ZXing","PDE","SWS","SWT", "SWF", "COLLECTIONS", "JDT");
for (File target : folders) {
String pjName = target.getName();
// if (!pjList.contains(pjName)){
// continue;
// }
final List<MessageFile> msgFiles = getMessageFiles(target.toString() + "/", outputPath); //"/Users/anilkoyuncu/bugStudy/code/python/GumTreeInput/Apache/CAMEL/"
final List<MessageFile> msgFiles = getMessageFiles(target.toString() + "/"); //"/Users/anilkoyuncu/bugStudy/code/python/GumTreeInput/Apache/CAMEL/"
System.out.println(msgFiles.size());
if(msgFiles.size() == 0)
continue;
String GUM_TREE_OUTPUT = outputPath + "/"+ pjName + "/";
final String editScriptsFilePath = GUM_TREE_OUTPUT + "editScripts.list";
final String patchesSourceCodeFilePath =GUM_TREE_OUTPUT + "patchSourceCode.list";
final String buggyTokensFilePath = GUM_TREE_OUTPUT + "tokens.list";
final String editScriptSizesFilePath = GUM_TREE_OUTPUT + "editScriptSizes.csv";
final String alarmTypesFilePath = GUM_TREE_OUTPUT + "alarmTypes.list";
// final String editScriptsFilePath = GUM_TREE_OUTPUT + "editScripts.list";
// final String patchesSourceCodeFilePath =GUM_TREE_OUTPUT + "patchSourceCode.list";
// final String buggyTokensFilePath = GUM_TREE_OUTPUT + "tokens.list";
// final String editScriptSizesFilePath = GUM_TREE_OUTPUT + "editScriptSizes.csv";
// final String alarmTypesFilePath = GUM_TREE_OUTPUT + "alarmTypes.list";
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/UPD");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/INS");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/DEL");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/MOV");
FileHelper.deleteDirectory(editScriptsFilePath);
FileHelper.deleteDirectory(patchesSourceCodeFilePath);
FileHelper.deleteDirectory(buggyTokensFilePath);
FileHelper.deleteDirectory(editScriptSizesFilePath);
FileHelper.deleteDirectory(alarmTypesFilePath);
// FileHelper.deleteDirectory(editScriptsFilePath);
// FileHelper.deleteDirectory(patchesSourceCodeFilePath);
// FileHelper.deleteDirectory(buggyTokensFilePath);
// FileHelper.deleteDirectory(editScriptSizesFilePath);
// FileHelper.deleteDirectory(alarmTypesFilePath);
StringBuilder astEditScripts = new StringBuilder();
StringBuilder tokens = new StringBuilder();
StringBuilder sizes = new StringBuilder();
StringBuilder patches = new StringBuilder();
StringBuilder alarmTypes = new StringBuilder();
// StringBuilder astEditScripts = new StringBuilder();
// StringBuilder tokens = new StringBuilder();
// StringBuilder sizes = new StringBuilder();
// StringBuilder patches = new StringBuilder();
// StringBuilder alarmTypes = new StringBuilder();
int a = 0;
@@ -102,77 +75,32 @@ public class TestHunkParser {
try {
log.info("Akka begins...");
system = ActorSystem.create("Mining-FixPattern-System");
parsingActor = system.actorOf(ParseFixPatternActor.props(Integer.valueOf(numOfWorkers), editScriptsFilePath,
patchesSourceCodeFilePath, buggyTokensFilePath, editScriptSizesFilePath), "mine-fix-pattern-actor");
parsingActor = system.actorOf(ParseFixPatternActor.props(Integer.valueOf(numOfWorkers), project), "mine-fix-pattern-actor");
parsingActor.tell(msg, ActorRef.noSender());
} catch (Exception e) {
system.shutdown();
e.printStackTrace();
}
// int counter = 0;
// for (MessageFile msgFile : msgFiles) {
// FixedViolationHunkParser parser = new FixedViolationHunkParser();
//
// final ExecutorService executor = Executors.newSingleThreadExecutor();
// // schedule the work
// final Future<?> future = executor.submit(new RunnableParser(msgFile.getPrevFile(),
// msgFile.getRevFile(), msgFile.getDiffEntryFile(), parser));
// try {
// // where we wait for task to complete
// future.get(Configuration.SECONDS_TO_WAIT, TimeUnit.SECONDS);
// String editScripts = parser.getAstEditScripts();
// if (!editScripts.equals("")) {
// astEditScripts.append(editScripts);
// tokens.append(parser.getTokensOfSourceCode());
// sizes.append(parser.getSizes());
// patches.append(parser.getPatchesSourceCode());
// alarmTypes.append(parser.getAlarmTypes());
//
// a++;
// if (a % 100 == 0) {
// FileHelper.outputToFile(editScriptsFilePath, astEditScripts, true);
// FileHelper.outputToFile(buggyTokensFilePath, tokens, true);
// FileHelper.outputToFile(editScriptSizesFilePath, sizes, true);
// FileHelper.outputToFile(patchesSourceCodeFilePath, patches, true);
// FileHelper.outputToFile(alarmTypesFilePath, alarmTypes, true);
// astEditScripts.setLength(0);
// tokens.setLength(0);
// sizes.setLength(0);
// patches.setLength(0);
// alarmTypes.setLength(0);
// System.out.println("Finish of parsing " + a + " files......");
// }
// }
// } catch (TimeoutException e) {
// err.println("task timed out");
// future.cancel(true /* mayInterruptIfRunning */);
// } catch (InterruptedException e) {
// err.println("task interrupted");
// } catch (ExecutionException e) {
// err.println("task aborted");
// } finally {
// executor.shutdownNow();
// }
// }
FileHelper.outputToFile(editScriptsFilePath, astEditScripts, true);
FileHelper.outputToFile(buggyTokensFilePath, tokens, true);
FileHelper.outputToFile(editScriptSizesFilePath, sizes, true);
FileHelper.outputToFile(patchesSourceCodeFilePath, patches, true);
FileHelper.outputToFile(alarmTypesFilePath, alarmTypes, true);
astEditScripts.setLength(0);
tokens.setLength(0);
sizes.setLength(0);
patches.setLength(0);
alarmTypes.setLength(0);
System.out.println(a);
// FileHelper.outputToFile(editScriptsFilePath, astEditScripts, true);
// FileHelper.outputToFile(buggyTokensFilePath, tokens, true);
// FileHelper.outputToFile(editScriptSizesFilePath, sizes, true);
// FileHelper.outputToFile(patchesSourceCodeFilePath, patches, true);
// FileHelper.outputToFile(alarmTypesFilePath, alarmTypes, true);
// astEditScripts.setLength(0);
// tokens.setLength(0);
// sizes.setLength(0);
// patches.setLength(0);
// alarmTypes.setLength(0);
// System.out.println(a);
// classifyByAlarmTypes();
}
}
private static List<MessageFile> getMessageFiles(String gumTreeInput,String outputPath) {
private static List<MessageFile> getMessageFiles(String gumTreeInput) {
String inputPath = gumTreeInput; // prevFiles revFiles diffentryFile positionsFile
File revFilesPath = new File(inputPath + "revFiles/");
File[] revFiles = revFilesPath.listFiles(); // project folders
@@ -197,132 +125,6 @@ public class TestHunkParser {
return null;
}
}
public static void classifyByAlarmTypes() {
final String alarmTypesFilePath = Configuration.ALARM_TYPES_FILE;
List<String> alarmTypes = readStringList(alarmTypesFilePath);
//edit scripts, sizes of edit scripts, buggy tokens, patches.
classifyByAlarmTypes(alarmTypes, Configuration.EDITSCRIPT_SIZES_FILE);
classifyByAlarmTypes(alarmTypes, Configuration.EDITSCRIPTS_FILE);
classifyByAlarmTypes(alarmTypes, Configuration.BUGGY_CODE_TOKENS_FILE);
classifyByAlarmTypes2(alarmTypes, Configuration.PATCH_SOURCECODE_FILE);
}
private static void classifyByAlarmTypes(List<String> alarmTypes, String file) {
Map<String, StringBuilder> buildersMap = new HashMap<>();
FileInputStream fis = null;
Scanner scanner = null;
try {
fis = new FileInputStream(file);
scanner = new Scanner(fis);
int counter = 0;
while (scanner.hasNextLine()) {
String alarmType = alarmTypes.get(counter);
StringBuilder builder = getBuilder(buildersMap, alarmType);
builder.append(scanner.nextLine() + "\n");
counter ++;
if (counter % 1000 == 0) {
outputBuilders(buildersMap, file);
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} finally {
try {
scanner.close();
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
outputBuilders(buildersMap, file);
}
private static void classifyByAlarmTypes2(List<String> alarmTypes, String patchSourcecodeFile) {
Map<String, StringBuilder> buildersMap = new HashMap<>();
FileInputStream fis = null;
Scanner scanner = null;
try {
fis = new FileInputStream(patchSourcecodeFile);
scanner = new Scanner(fis);
int counter = 0;
String singlePatch = "";
while (scanner.hasNextLine()) {
String line = scanner.nextLine();
if (Configuration.PATCH_SIGNAL.equals(line)) {
if (!"".equals(singlePatch)) {
String alarmType = alarmTypes.get(counter);
StringBuilder builder = getBuilder(buildersMap, alarmType);
builder.append(scanner.nextLine() + "\n");
counter ++;
if (counter % 2000 == 0) {
outputBuilders(buildersMap, patchSourcecodeFile);
}
}
singlePatch = line + "\n";
}
singlePatch += line + "\n";
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} finally {
try {
scanner.close();
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
outputBuilders(buildersMap, patchSourcecodeFile);
}
private static void outputBuilders(Map<String, StringBuilder> map, String fileNameStr) {
File file = new File(fileNameStr);
String fileName = file.getName();
String parentPath = file.getParent();
for (Map.Entry<String, StringBuilder> entry : map.entrySet()) {
String alarmType = entry.getKey();
StringBuilder builder = entry.getValue();
FileHelper.outputToFile(parentPath + "/" + alarmType + "/" + fileName, builder, true);
builder.setLength(0);
entry.setValue(builder);
}
}
public static List<String> readStringList(String inputFile) {
List<String> list = new ArrayList<>();
FileInputStream fis = null;
Scanner scanner = null;
try {
fis = new FileInputStream(inputFile);
scanner = new Scanner(fis);
while(scanner.hasNextLine()) {
list.add(scanner.nextLine());
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} finally {
try {
scanner.close();
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return list;
}
private static StringBuilder getBuilder(Map<String, StringBuilder> buildersMap, String alarmType) {
if (buildersMap.containsKey(alarmType)) {
return buildersMap.get(alarmType);
} else {
StringBuilder builder = new StringBuilder();
buildersMap.put(alarmType, builder);
return builder;
}
}
}
@@ -102,8 +102,7 @@ public class TestHunkParserSingleFile {
try {
log.info("Akka begins...");
system = ActorSystem.create("Mining-FixPattern-System");
parsingActor = system.actorOf(ParseFixPatternActor.props(1, editScriptsFilePath,
patchesSourceCodeFilePath, buggyTokensFilePath, editScriptSizesFilePath), "mine-fix-pattern-actor");
parsingActor = system.actorOf(ParseFixPatternActor.props(1, "dataset"), "mine-fix-pattern-actor");
parsingActor.tell(msg, ActorRef.noSender());
} catch (Exception e) {
system.shutdown();
+72 -250
View File
@@ -5,315 +5,137 @@ import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.FixPatternParser.violations.MultiThreadTreeLoaderCluster;
import edu.lu.uni.serval.FixPatternParser.violations.MultiThreadTreeLoaderCluster3;
import edu.lu.uni.serval.FixPatternParser.violations.TestHunkParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
/**
* Created by anilkoyuncu on 14/04/2018.
*/
public class Launcher {
public static void main(String[] args) {
private static Logger log = LoggerFactory.getLogger(Launcher.class);
public static void main(String[] args) throws IOException {
Properties appProps = new Properties();
String appConfigPath = args[0];
// String appConfigPath = "/Users/anilkoyuncu/bugStudy/release/code/app.properties";
appProps.load(new FileInputStream(appConfigPath));
String portInner = appProps.getProperty("portInner","6380");
String serverWait = appProps.getProperty("serverWait", "50000");
String numOfWorkers = appProps.getProperty("numOfWorkers", "10");
String jobType = appProps.getProperty("jobType","ALL");
String port = appProps.getProperty("port","6399");
String pythonPath = appProps.getProperty("pythonPath","/Users/anilkoyuncu/bugStudy/code/python");
String datasetPath = appProps.getProperty("datasetPath","/Users/anilkoyuncu/bugStudy/dataset");
String pjName = appProps.getProperty("pjName","allDataset");
String dbNo = appProps.getProperty("dbNo","0");
String actionType = appProps.getProperty("actionType","UPD");
String threshold = appProps.getProperty("threshold","1");
String parameters = String.format("\nportInner %s " +
"\nserverWait %s \nnumOfWorkers %s " +
"\njobType %s \nport %s " +
"\npythonPath %s \ndatasetPath %s" +
"\npjName %s \ndbNo %s \nactionType %s \nthreshold %s"
, portInner, serverWait, numOfWorkers, jobType, port, pythonPath,datasetPath,pjName,dbNo,actionType,threshold);
log.info(parameters);
mainLaunch(portInner, serverWait, numOfWorkers, jobType, port, pythonPath,datasetPath,pjName,dbNo,actionType,threshold);
}
public static void mainLaunch(String portInner,String serverWait, String numOfWorkers,String jobType,String port, String pythonPath, String datasetPath, String pjName, String dbNo, String actionType,String threshold){
// String inputPath;
String portInner;
String serverWait;
String dbDir;
String chunkName;
String numOfWorkers;
String jobType;
String port;
String pairsPath;
String csvInputPath;
String dumpsName;
String gumInput;
String gumOutput;
String datasetPath;
String pjName;
String pythonPath;
String dbNo;
String actionType;
if (args.length > 0) {
jobType = args[0];
portInner = args[1];
serverWait = args[2];
numOfWorkers = args[3];
port = args[4];
pythonPath = args[5];
datasetPath = args[6];
pjName = args[7];
dbNo = args[8];
actionType = args[9];
// gumInput = args[1];
// chunkName = args[4];
// dbDir = args[6];
// pairsPath = args[8];
// csvInputPath = args[9];
// gumOutput =args[12];
} else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs";
// gumInput = "/Users/anilkoyuncu/bugStudy/dataset/Defects4J/";
portInner = "6380";
serverWait = "50000";
// chunkName = "Bug13April.txt.csv.rdb";
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
numOfWorkers = "10";
jobType = "L3DB";
port = "6399";
pythonPath = "/Users/anilkoyuncu/bugStudy/code/python";
// pairsPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImportDefects4J";
// gumOutput = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutputDefects4J";
// csvInputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImportDefects4J-CSV";
// dumpsName = "dumps-Bug13April.rdb";
datasetPath = "/Users/anilkoyuncu/bugStudy/dataset";
pjName = "allDataset";
dbNo = "0";
actionType ="UPD";
}
gumInput = datasetPath +"/"+pjName+"/";
gumOutput = datasetPath + "/GumTreeOutput" + pjName;
gumOutput = datasetPath + "/EnhancedASTDiff" + pjName;
dbDir = datasetPath + "/redis";
pairsPath = datasetPath + "/pairsImport"+pjName;
dumpsName = "dumps-"+pjName+".rdb";
// csvInputPath = datasetPath + "/pairsImport"+pjName+"-CSV";
// String parameters = String.format("\nJob %s \nInput path %s \nportInner %s \nserverWait %s \nchunkName %s \nnumOfWorks %s \ndbDir %s", jobType, inputPath, portInner, serverWait, chunkName, numOfWorkers, dbDir);
try {
switch (jobType) {
case "DUMPTREE":
TestHunkParser.main(gumInput, gumOutput, numOfWorkers);
case "ENHANCEDASTDIFF":
TestHunkParser.main(gumInput, gumOutput, numOfWorkers, pjName);
break;
case "STORE":
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "INS"+dumpsName,"INS");
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "DEL"+dumpsName,"DEL");
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "UPD"+dumpsName,"UPD");
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "MOV"+dumpsName,"MOV");
case "CACHE":
StoreFile.main(gumOutput, portInner, serverWait, dbDir, actionType+dumpsName,actionType);
break;
case "CALCPAIRS":
CalculatePairs.main(serverWait, dbDir, "INS"+dumpsName, portInner, pairsPath+"INS", pjName+"INS");
CalculatePairs.main(serverWait, dbDir, "DEL"+dumpsName, portInner, pairsPath+"DEL", pjName+"DEL");
CalculatePairs.main(serverWait, dbDir, "UPD"+dumpsName, portInner, pairsPath+"UPD", pjName+"UPD");
CalculatePairs.main(serverWait, dbDir, "MOV"+dumpsName, portInner, pairsPath+"MOV", pjName+"MOV");
break;
case "TRANSFORM":
CallShell cs =new CallShell();
String cmd = "bash "+datasetPath + "/" + "transformCSV.sh" +" %s %s";
String cmd1 = String.format(cmd, pairsPath+"INS",pairsPath+"INS"+"-CSV");
cs.runShell(cmd1);
String cmd2 = String.format(cmd, pairsPath+"UPD",pairsPath+"UPD"+"-CSV");
cs.runShell(cmd2);
String cmd3 = String.format(cmd, pairsPath+"DEL",pairsPath+"DEL"+"-CSV");
cs.runShell(cmd3);
String cmd4 = String.format(cmd, pairsPath+"MOV",pairsPath+"MOV"+"-CSV");
cs.runShell(cmd4);
case "LEVEL1":
CalculatePairs.main(serverWait, dbDir, actionType+dumpsName, portInner, pairsPath+actionType, pjName+actionType);
break;
case "IMPORTPAIRS":
ImportPairs2DB.main(pairsPath+"INS"+"-CSV", portInner, serverWait, dbDir,datasetPath);
ImportPairs2DB.main(pairsPath+"UPD"+"-CSV", portInner, serverWait, dbDir,datasetPath);
ImportPairs2DB.main(pairsPath+"DEL"+"-CSV", portInner, serverWait, dbDir,datasetPath);
ImportPairs2DB.main(pairsPath+"MOV"+"-CSV", portInner, serverWait, dbDir,datasetPath);
break;
case "AKKA":
ImportPairs2DB.main(pairsPath+actionType, portInner, serverWait, dbDir,datasetPath);
AkkaTreeLoader.main(portInner, serverWait, dbDir, pjName +actionType+dbNo+".txt.csv.rdb" , port, actionType+dumpsName);
// AkkaTreeLoader.main(portInner, serverWait, dbDir, pjName +"DEL"+dbNo+".txt.csv.rdb", port, "DEL"+dumpsName);
// AkkaTreeLoader.main(portInner, serverWait, dbDir, pjName +"UPD"+dbNo+".txt.csv.rdb", port, "UPD"+dumpsName);
// AkkaTreeLoader.main(portInner, serverWait, dbDir, pjName +"MOV"+dbNo+".txt.csv.rdb", port, "MOV"+dumpsName);
break;
AkkaTreeLoader.main(portInner, serverWait, dbDir, pjName +actionType+".csv.rdb" , port, actionType+dumpsName);
TreeLoaderClusterL1.main(portInner, serverWait, port, dbDir, "level1-"+pjName+ actionType+".rdb", dbDir ,pjName + actionType);
case "LEVEL1DB":
TreeLoaderClusterL1.main(portInner, serverWait, port, dbDir, "level1-"+pjName+ "UPD"+".rdb", dbDir ,pjName + "UPD");
TreeLoaderClusterL1.main(portInner, serverWait, port, dbDir, "level1-"+pjName+ "INS"+".rdb", dbDir ,pjName + "INS");
TreeLoaderClusterL1.main(portInner, serverWait, port, dbDir, "level1-"+pjName+ "DEL"+".rdb", dbDir ,pjName + "DEL");
TreeLoaderClusterL1.main(portInner, serverWait, port, dbDir, "level1-"+pjName+ "MOV"+".rdb", dbDir ,pjName + "MOV");
break;
case "L1DB":
CallShell cs1 =new CallShell();
String db1 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String db11 = String.format(db1, dbDir,"level1-"+pjName+ "INS"+".rdb" ,Integer.valueOf(port));
String db11 = String.format(db1, dbDir,"level1-"+pjName+ actionType+".rdb" ,Integer.valueOf(port));
cs1.runShell(db11,serverWait);
String runpy = "bash "+datasetPath + "/" + "launchPy.sh" +" %s %s %s %s %s";
String formatRunPy = String.format(runpy,pythonPath +"/abstractPatch.py", gumInput, datasetPath + "/cluster"+pjName+ "INS", port, "matches" + pjName + "INS");
String runpy = "bash "+datasetPath + "/" + "launchPy.sh" +" %s %s %s %s %s %s";
String formatRunPy = String.format(runpy,pythonPath +"/abstractPatch.py", gumInput, datasetPath + "/cluster"+pjName+ actionType, port, "matches" + pjName + actionType, threshold);
cs1.runShell(formatRunPy);
String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
stopServer = String.format(stopServer,Integer.valueOf(port));
cs1.runShell(stopServer,serverWait);
String db2 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String db12 = String.format(db2, dbDir,"level1-"+pjName+ "DEL"+".rdb" ,Integer.valueOf(port));
cs1.runShell(db12,serverWait);
String formatRunPy1 = String.format(runpy,pythonPath +"/abstractPatch.py", gumInput, datasetPath + "/cluster"+pjName+ "DEL", port, "matches" + pjName + "DEL");
cs1.runShell(formatRunPy1);
String stopServer2 = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
stopServer2 = String.format(stopServer2,Integer.valueOf(port));
cs1.runShell(stopServer2,serverWait);
String db3 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String db13 = String.format(db3, dbDir,"level1-"+pjName+ "MOV"+".rdb" ,Integer.valueOf(port));
cs1.runShell(db13,serverWait);
String formatRunPy3 = String.format(runpy,pythonPath +"/abstractPatch.py", gumInput, datasetPath + "/cluster"+pjName+ "MOV", port, "matches" + pjName + "MOV");
cs1.runShell(formatRunPy3);
String stopServer3 = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
stopServer3 = String.format(stopServer3,Integer.valueOf(port));
cs1.runShell(stopServer3,serverWait);
String db4 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String db14 = String.format(db4, dbDir,"level1-"+pjName+ "UPD"+".rdb" ,Integer.valueOf(port));
cs1.runShell(db14,serverWait);
String formatRunPy4 = String.format(runpy,pythonPath +"/abstractPatch.py", gumInput, datasetPath + "/cluster"+pjName+ "UPD", port, "matches" + pjName + "UPD");
cs1.runShell(formatRunPy4);
String stopServer4 = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
stopServer4 = String.format(stopServer4,Integer.valueOf(port));
cs1.runShell(stopServer4,serverWait);
break;
//CALC python abstractPatch.py to from cluster folder
case "L2CALCPAIRS":
// MultiThreadTreeLoaderCluster.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterDefect4J","/Users/anilkoyuncu/bugStudy/dataset/");
MultiThreadTreeLoaderCluster.calculatePairsOfClusters(datasetPath + "/cluster"+pjName+ "MOV", datasetPath,"MOV");
MultiThreadTreeLoaderCluster.calculatePairsOfClusters(datasetPath + "/cluster"+pjName+ "INS", datasetPath,"INS");
MultiThreadTreeLoaderCluster.calculatePairsOfClusters(datasetPath + "/cluster"+pjName+ "DEL", datasetPath,"DEL");
MultiThreadTreeLoaderCluster.calculatePairsOfClusters(datasetPath + "/cluster"+pjName+ "UPD", datasetPath,"UPD");
break;
case "LEVEL2":
MultiThreadTreeLoaderCluster.calculatePairsOfClusters(datasetPath + "/cluster"+pjName+ actionType, datasetPath,actionType);
case "TRANSFORM2":
CallShell cs2 =new CallShell();
String cmdL2 = "bash "+datasetPath + "/" + "transformCSV.sh" +" %s %s";
String cmd1a = String.format(cmdL2, datasetPath+"/pairsINS",datasetPath+"/pairsINS"+"-CSV");
cs2.runShell(cmd1a);
String cmd2a = String.format(cmdL2, datasetPath+"/pairsUPD",datasetPath+"/pairsUPD"+"-CSV");
cs2.runShell(cmd2a);
String cmd3a = String.format(cmdL2, datasetPath+"/pairsDEL",datasetPath+"/pairsDEL"+"-CSV");
cs2.runShell(cmd3a);
String cmd4a = String.format(cmdL2, datasetPath+"/pairsMOV",datasetPath+"/pairsMOV"+"-CSV");
cs2.runShell(cmd4a);
MultiThreadTreeLoaderCluster.mainCompare("6300", datasetPath+"/pairs"+actionType, datasetPath + "/redisSingleImport.sh", dbDir, "clusterl1-"+pjName+actionType+".rdb", actionType+dumpsName, "6301",serverWait,actionType);
break;
case "L2PAIRDB":
// MultiThreadTreeLoaderCluster.mainCompare("6300","/Users/anilkoyuncu/bugStudy/dataset/pairs-csv","/Users/anilkoyuncu/bugStudy/dataset/redisSingleImport.sh",dbDir,"clusterl1-d4j.rdb",dumpsName,"6301");
MultiThreadTreeLoaderCluster.mainCompare("6300", datasetPath+"/pairsINS"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl1-"+pjName+"INS.rdb", "INS"+dumpsName, "6301",serverWait,"INS");
MultiThreadTreeLoaderCluster.mainCompare("6300", datasetPath+"/pairsUPD"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl1-"+pjName+"UPD.rdb", "UPD"+dumpsName, "6301",serverWait,"UPD");
MultiThreadTreeLoaderCluster.mainCompare("6300", datasetPath+"/pairsDEL"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl1-"+pjName+"DEL.rdb", "DEL"+dumpsName, "6301",serverWait,"DEL");
MultiThreadTreeLoaderCluster.mainCompare("6300", datasetPath+"/pairsMOV"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl1-"+pjName+"MOV.rdb", "MOV"+dumpsName, "6301",serverWait,"MOV");
break;
case "L2DB":
CallShell cs3 =new CallShell();
String db22 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String db1b = String.format(db22, dbDir,"clusterl1-"+pjName+"INS.rdb",Integer.valueOf(port));
String db1b = String.format(db22, dbDir,"clusterl1-"+pjName+actionType+".rdb",Integer.valueOf(port));
cs3.runShell(db1b,serverWait);
String runpy2 = "bash "+datasetPath + "/" + "launchPy.sh" +" %s %s %s %s %s";
String formatRunPy1a = String.format(runpy2,pythonPath +"/abstractPatchCluster.py", gumInput, datasetPath + "/cluster"+pjName+ "INS", port, datasetPath + "/cluster-2l"+pjName+ "INS");
String runpy2 = "bash "+datasetPath + "/" + "launchPy.sh" +" %s %s %s %s %s %s";
String formatRunPy1a = String.format(runpy2,pythonPath +"/abstractPatchCluster.py", gumInput, datasetPath + "/cluster"+pjName+ actionType, port, datasetPath + "/cluster-2l"+pjName+ actionType,threshold);
cs3.runShell(formatRunPy1a);
String stopServer1a = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
stopServer = String.format(stopServer1a,Integer.valueOf(port));
cs3.runShell(stopServer,serverWait);
String db2b = String.format(db22, dbDir,"clusterl1-"+pjName+"UPD.rdb",Integer.valueOf(port));
cs3.runShell(db2b,serverWait);
String formatRunPy2a = String.format(runpy2,pythonPath +"/abstractPatchCluster.py", gumInput, datasetPath + "/cluster"+pjName+ "UPD", port, datasetPath + "/cluster-2l"+pjName+ "UPD");
cs3.runShell(formatRunPy2a);
stopServer = String.format(stopServer1a,Integer.valueOf(port));
cs3.runShell(stopServer,serverWait);
String db3b = String.format(db22, dbDir,"clusterl1-"+pjName+"DEL.rdb",Integer.valueOf(port));
cs3.runShell(db3b,serverWait);
String formatRunPy3a = String.format(runpy2,pythonPath +"/abstractPatchCluster.py", gumInput, datasetPath + "/cluster"+pjName+ "DEL", port, datasetPath + "/cluster-2l"+pjName+ "DEL");
cs3.runShell(formatRunPy3a);
stopServer = String.format(stopServer1a,Integer.valueOf(port));
cs3.runShell(stopServer,serverWait);
String db4b = String.format(db22, dbDir,"clusterl1-"+pjName+"MOV.rdb",Integer.valueOf(port));
cs3.runShell(db4b,serverWait);
String formatRunPy4a = String.format(runpy2,pythonPath +"/abstractPatchCluster.py", gumInput, datasetPath + "/cluster"+pjName+ "MOV", port, datasetPath + "/cluster-2l"+pjName+ "MOV");
cs3.runShell(formatRunPy4a);
stopServer = String.format(stopServer1a,Integer.valueOf(port));
cs3.runShell(stopServer,serverWait);
break;
//CALC via python
case "L3CALCPAIRS":
// MultiThreadTreeLoaderCluster3.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterDefect4J-2l",datasetPath);
// MultiThreadTreeLoaderCluster3.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterBug13April-2l", datasetPath);
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters(datasetPath + "/cluster-2l"+pjName+ "UPD", datasetPath,"UPD");
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters(datasetPath + "/cluster-2l"+pjName+ "INS", datasetPath,"INS");
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters(datasetPath + "/cluster-2l"+pjName+ "DEL", datasetPath,"DEL");
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters(datasetPath + "/cluster-2l"+pjName+ "MOV", datasetPath,"MOV");
break;
case "LEVEL3":
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters(datasetPath + "/cluster-2l"+pjName+ actionType, datasetPath,actionType);
case "TRANSFORM3":
CallShell cs4 =new CallShell();
String cmdL4 = "bash "+datasetPath + "/" + "transformCSV.sh" +" %s %s";
String cmd5a = String.format(cmdL4, datasetPath+"/pairs-2lINS",datasetPath+"/pairs-2lINS"+"-CSV");
cs4.runShell(cmd5a);
String cmd5b = String.format(cmdL4, datasetPath+"/pairs-2lUPD",datasetPath+"/pairs-2lUPD"+"-CSV");
cs4.runShell(cmd5b);
String cmd5c = String.format(cmdL4, datasetPath+"/pairs-2lDEL",datasetPath+"/pairs-2lDEL"+"-CSV");
cs4.runShell(cmd5c);
String cmd5d = String.format(cmdL4, datasetPath+"/pairs-2lMOV",datasetPath+"/pairs-2lMOV"+"-CSV");
cs4.runShell(cmd5d);
MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath+"/pairs-2l"+actionType, datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-"+pjName+actionType+".rdb", actionType+dumpsName, "6301",serverWait,actionType);
break;
case "L3PAIRDB":
// MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath + "/pairs-2l-csv", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-13april.rdb", "UPD"+dumpsName, "6301",serverWait,"UPD");
MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath+"/pairs-2lMOV"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-"+pjName+"MOV.rdb", "MOV"+dumpsName, "6301",serverWait,"MOV");
MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath+"/pairs-2lDEL"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-"+pjName+"DEL.rdb", "DEL"+dumpsName, "6301",serverWait,"DEL");
MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath+"/pairs-2lUPD"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-"+pjName+"UPD.rdb", "UPD"+dumpsName, "6301",serverWait,"UPD");
MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath+"/pairs-2lINS"+"-CSV", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-"+pjName+"INS.rdb", "INS"+dumpsName, "6301",serverWait,"INS");
break;
case "L3DB":
CallShell cs5 =new CallShell();
String dba = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String dbaa = String.format(dba, dbDir,"clusterl2-"+pjName+"INS.rdb",Integer.valueOf(port));
String dbaa = String.format(dba, dbDir,"clusterl2-"+pjName+actionType+".rdb",Integer.valueOf(port));
cs5.runShell(dbaa,serverWait);
String runpya = "bash "+datasetPath + "/" + "launchPy.sh" +" %s %s %s %s %s";
String formatRunPya = String.format(runpya,pythonPath +"/abstractPatchClusterLevel3.py", gumInput, datasetPath + "/cluster-3l"+pjName+ "INS", port, datasetPath + "/cluster-2l"+pjName+ "INS");
String runpya = "bash "+datasetPath + "/" + "launchPy.sh" +" %s %s %s %s %s %s";
String formatRunPya = String.format(runpya,pythonPath +"/abstractPatchClusterLevel3.py", gumInput, datasetPath + "/cluster-3l"+pjName+ actionType, port, datasetPath + "/cluster-2l"+pjName+ actionType,threshold);
cs5.runShell(formatRunPya);
String stopServera = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
stopServer = String.format(stopServera,Integer.valueOf(port));
cs5.runShell(stopServer,serverWait);
break;
String dbb = String.format(dba, dbDir,"clusterl2-"+pjName+"UPD.rdb",Integer.valueOf(port));
cs5.runShell(dbb,serverWait);
String formatRunPyb = String.format(runpya,pythonPath +"/abstractPatchClusterLevel3.py", gumInput, datasetPath + "/cluster-3l"+pjName+ "UPD", port, datasetPath + "/cluster-2l"+pjName+ "UPD");
cs5.runShell(formatRunPyb);
stopServer = String.format(stopServera,Integer.valueOf(port));
cs5.runShell(stopServer,serverWait);
// String dbc = String.format(dba, dbDir,"clusterl2-"+pjName+"DEL.rdb",Integer.valueOf(port));
// cs5.runShell(dbc,serverWait);
// String formatRunPyc = String.format(runpya,pythonPath +"/abstractPatchClusterLevel3.py", gumInput, datasetPath + "/cluster-3l"+pjName+ "DEL", port, datasetPath + "/cluster-2l"+pjName+ "DEL");
// cs5.runShell(formatRunPyc);
// stopServer = String.format(stopServera,Integer.valueOf(port));
// cs5.runShell(stopServer,serverWait);
////
// String dbd = String.format(dba, dbDir,"clusterl2-"+pjName+"MOV.rdb",Integer.valueOf(port));
// cs5.runShell(dbd,serverWait);
// String formatRunPyd = String.format(runpya,pythonPath +"/abstractPatchClusterLevel3.py", gumInput, datasetPath + "/cluster-3l"+pjName+ "MOV", port, datasetPath + "/cluster-2l"+pjName+ "MOV");
// cs5.runShell(formatRunPyd);
// stopServer = String.format(stopServera,Integer.valueOf(port));
// cs5.runShell(stopServer,serverWait);
}
} catch (Exception e) {
e.printStackTrace();
@@ -80,8 +80,8 @@ public class AkkaParser {
try {
log.info("Akka begins...");
system = ActorSystem.create("Mining-FixPattern-System");
parsingActor = system.actorOf(ParseFixPatternActor.props(numberOfWorkers, editScriptsFilePath,
patchesSourceCodeFilePath, buggyTokensFilePath, editScriptSizesFilePath), "mine-fix-pattern-actor");
// parsingActor = system.actorOf(ParseFixPatternActor.props(numberOfWorkers, editScriptsFilePath,
// patchesSourceCodeFilePath, buggyTokensFilePath, editScriptSizesFilePath), "mine-fix-pattern-actor");
parsingActor.tell(msg, ActorRef.noSender());
} catch (Exception e) {
system.shutdown();
@@ -99,8 +99,8 @@ public class AkkaParser2 {
try {
log.info("Akka begins...");
system = ActorSystem.create("Mining-FixPattern-System");
parsingActor = system.actorOf(ParseFixPatternActor.props(numberOfWorkers, editScriptsFilePath,
patchesSourceCodeFilePath, buggyTokensFilePath, editScriptSizesFilePath), "mine-fix-pattern-actor");
// parsingActor = system.actorOf(ParseFixPatternActor.props(numberOfWorkers, editScriptsFilePath,
// patchesSourceCodeFilePath, buggyTokensFilePath, editScriptSizesFilePath), "mine-fix-pattern-actor");
parsingActor.tell(msg, ActorRef.noSender());
} catch (Exception e) {
system.shutdown();
@@ -19,16 +19,13 @@ public class ParseFixPatternActor extends UntypedActor {
private final int numberOfWorkers;
private int counter = 0;
public ParseFixPatternActor(int numberOfWorkers, String editScriptsFilePath, String patchesSourceCodeFilePath,
String buggyTokensFilePath, String editScriptSizesFilePath) {
public ParseFixPatternActor(int numberOfWorkers, String project) {
mineRouter = this.getContext().actorOf(new RoundRobinPool(numberOfWorkers)
.props(ParseFixPatternWorker.props(editScriptsFilePath, patchesSourceCodeFilePath,
buggyTokensFilePath, editScriptSizesFilePath)), "mine-fix-pattern-router");
.props(ParseFixPatternWorker.props(project)), "mine-fix-pattern-router");
this.numberOfWorkers = numberOfWorkers;
}
public static Props props(final int numberOfWorkers, final String editScriptsFilePath, final String patchesSourceCodeFilePath,
final String buggyTokensFilePath, final String editScriptSizesFilePath) {
public static Props props(final int numberOfWorkers, final String project) {
return Props.create(new Creator<ParseFixPatternActor>() {
@@ -36,8 +33,7 @@ public class ParseFixPatternActor extends UntypedActor {
@Override
public ParseFixPatternActor create() throws Exception {
return new ParseFixPatternActor(numberOfWorkers, editScriptsFilePath, patchesSourceCodeFilePath,
buggyTokensFilePath, editScriptSizesFilePath);
return new ParseFixPatternActor(numberOfWorkers, project);
}
});
@@ -27,29 +27,21 @@ import edu.lu.uni.serval.utils.FileHelper;
public class ParseFixPatternWorker extends UntypedActor {
private static Logger log = LoggerFactory.getLogger(ParseFixPatternActor.class);
private String editScriptsFilePath;
private String patchesSourceCodeFilePath;
private String editScriptSizesFilePath;
private String buggyTokensFilePath;
private String project;
public ParseFixPatternWorker(String editScriptsFilePath, String patchesSourceCodeFilePath,
String buggyTokensFilePath, String editScriptSizesFilePath) {
this.editScriptsFilePath = editScriptsFilePath;
this.patchesSourceCodeFilePath = patchesSourceCodeFilePath;
this.editScriptSizesFilePath = editScriptSizesFilePath;
this.buggyTokensFilePath = buggyTokensFilePath;
public ParseFixPatternWorker(String project) {
this.project = project;
}
public static Props props(final String editScriptsFile, final String patchesSourceCodeFile, final String buggyTokensFilePath,
final String editScriptSizesFilePath) {
public static Props props(final String project) {
return Props.create(new Creator<ParseFixPatternWorker>() {
private static final long serialVersionUID = -7615153844097275009L;
@Override
public ParseFixPatternWorker create() throws Exception {
return new ParseFixPatternWorker(editScriptsFile, patchesSourceCodeFile,
buggyTokensFilePath, editScriptSizesFilePath);
return new ParseFixPatternWorker(project);
}
});
@@ -95,7 +87,7 @@ public class ParseFixPatternWorker extends UntypedActor {
final ExecutorService executor = Executors.newSingleThreadExecutor();
// schedule the work
final Future<?> future = executor.submit(new RunnableParser(prevFile, revFile, diffentryFile, parser));
final Future<?> future = executor.submit(new RunnableParser(prevFile, revFile, diffentryFile, parser,project));
try {
// wait for task to complete
future.get(Configuration.SECONDS_TO_WAIT, TimeUnit.SECONDS);
@@ -126,16 +118,16 @@ public class ParseFixPatternWorker extends UntypedActor {
counter ++;
if (counter % 100 == 0) {
FileHelper.outputToFile(editScriptsFilePath + "edistScripts_" + id + ".list", editScripts, true);
FileHelper.outputToFile(patchesSourceCodeFilePath + "patches_" + id + ".list", patchesSourceCode, true);
FileHelper.outputToFile(editScriptSizesFilePath + "sizes_" + id + ".list", sizes, true);
FileHelper.outputToFile(buggyTokensFilePath + "tokens_" + id + ".list", tokens, true);
// FileHelper.outputToFile(editScriptsFilePath + "edistScripts_" + id + ".list", editScripts, true);
// FileHelper.outputToFile(patchesSourceCodeFilePath + "patches_" + id + ".list", patchesSourceCode, true);
// FileHelper.outputToFile(editScriptSizesFilePath + "sizes_" + id + ".list", sizes, true);
// FileHelper.outputToFile(buggyTokensFilePath + "tokens_" + id + ".list", tokens, true);
editScripts.setLength(0);
patchesSourceCode.setLength(0);
sizes.setLength(0);
tokens.setLength(0);
log.info("Worker #" + id +" finialized parsing " + counter + " files...");
FileHelper.outputToFile("OUTPUT/testingInfo_" + id + ".list", testingInfo, true);
// FileHelper.outputToFile("OUTPUT/testingInfo_" + id + ".list", testingInfo, true);
testingInfo.setLength(0);
}
}
@@ -157,10 +149,10 @@ public class ParseFixPatternWorker extends UntypedActor {
}
if (sizes.length() > 0) {
FileHelper.outputToFile(editScriptsFilePath + "editScripts_" + id + ".list", editScripts, true);
FileHelper.outputToFile(patchesSourceCodeFilePath + "patches_" + id + ".list", patchesSourceCode, true);
FileHelper.outputToFile(editScriptSizesFilePath + "sizes_" + id + ".list", sizes, true);
FileHelper.outputToFile(buggyTokensFilePath + "tokens_" + id + ".list", tokens, true);
// FileHelper.outputToFile(editScriptsFilePath + "editScripts_" + id + ".list", editScripts, true);
// FileHelper.outputToFile(patchesSourceCodeFilePath + "patches_" + id + ".list", patchesSourceCode, true);
// FileHelper.outputToFile(editScriptSizesFilePath + "sizes_" + id + ".list", sizes, true);
// FileHelper.outputToFile(buggyTokensFilePath + "tokens_" + id + ".list", tokens, true);
editScripts.setLength(0);
patchesSourceCode.setLength(0);
sizes.setLength(0);
@@ -184,31 +176,4 @@ public class ParseFixPatternWorker extends UntypedActor {
}
}
private int countAlarms(File positionFile, String type) {//, List<Violation> uselessViolations) {
int counter = 0;
String content = FileHelper.readFile(positionFile);
BufferedReader reader = new BufferedReader(new StringReader(content));
String line = null;
try {
while ((line = reader.readLine()) != null) {
String[] elements = line.split(":");
Violation v = new Violation(Integer.parseInt(elements[1]), Integer.parseInt(elements[2]), elements[0]);
String fileName = positionFile.getName().replace(".txt", ".java");
v.setFileName(fileName);
counter ++;
if (!"".equals(type)) {
System.err.println(type + fileName + ":" + elements[1] + ":" + elements[2] + ":" + elements[0]);
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return counter;
}
}
+15
View File
@@ -0,0 +1,15 @@
jobType = LEVEL2
pjName = BugsDotJar
portInner = 6380
dbNo = 0
port = 6399
serverWait = 10000
numOfWorkers = 100
pythonPath = /Users/anilkoyuncu/bugStudy/code/python
datasetPath = /Users/anilkoyuncu/bugStudy/release/code
actionType =UPD
threshold = 9
#ENHANCEDASTDIFF,CACHE,LEVEL1,LEVEL2,LEVEL3
+3 -3
View File
@@ -7,14 +7,14 @@
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n</pattern>
</encoder>
</appender>
<appender name="FILE" class="ch.qos.logback.core.FileAppender">
<file>${LOG_HOME}/myLog.log</file>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n</pattern>
</encoder>
</appender>
@@ -24,7 +24,7 @@
<MaxHistory>30</MaxHistory>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n</pattern>
</encoder>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>10MB</MaxFileSize>