new without thread block

This commit is contained in:
Anil Koyuncu
2018-04-18 10:18:32 +02:00
parent bbddbdfe59
commit ec0b90a6b0
13 changed files with 294 additions and 151 deletions
+2 -2
View File
@@ -52,8 +52,8 @@
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.3.3" level="project" />
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.1.3" level="project" />
<orderEntry type="library" name="Maven: org.javatuples:javatuples:1.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
<orderEntry type="library" name="Maven: junit:junit:4.11" level="project" />
<orderEntry type="library" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
<orderEntry type="library" name="Maven: redis.clients:jedis:2.8.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-pool2:2.4.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-text:1.3" level="project" />
+5
View File
@@ -79,6 +79,11 @@
<artifactId>amqp-client</artifactId>
<version>4.0.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
</dependencies>
@@ -4,6 +4,7 @@ import com.github.gumtreediff.tree.ITree;
import com.github.gumtreediff.tree.Tree;
import com.github.gumtreediff.tree.TreeContext;
import edu.lu.uni.serval.FixPattern.utils.ASTNodeMap;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalActionSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -15,6 +16,7 @@ import java.util.*;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.junit.Assert;
/**
* Created by anilkoyuncu on 19/03/2018.
@@ -53,9 +55,9 @@ public class AkkaTreeLoader {
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), System.out::println);
Executors.newSingleThreadExecutor().submit(streamGobbler);
// int exitCode = process.waitFor();
// assert exitCode == 0;
Thread.sleep(Integer.valueOf(serverWait));
int exitCode = process.waitFor();
assert exitCode == 0;
// Thread.sleep(Integer.valueOf(serverWait));
} catch (IOException e) {
e.printStackTrace();
@@ -66,6 +68,8 @@ public class AkkaTreeLoader {
log.info("Load done");
}
private static Consumer<String> consumer = Assert::assertNotNull;
public static void loadRedisWait(String cmd){
Process process;
@@ -77,7 +81,7 @@ public class AkkaTreeLoader {
StreamGobbler streamGobbler =
new StreamGobbler(process.getInputStream(), System.out::println);
new StreamGobbler(process.getInputStream(), consumer);
Executors.newSingleThreadExecutor().submit(streamGobbler);
int exitCode = process.waitFor();
assert exitCode == 0;
@@ -93,7 +97,7 @@ public class AkkaTreeLoader {
}
// public static void main(String[] args) {
public static void main(String portInner,String serverWait,String dbDir,String chunkName,String port, String dumpsName){
public static void main(String portInner,String serverWait,String dbDir,String chunkName,String port, String dumpsName) throws Exception {
// String inputPath;
//// String outputPath;
@@ -141,14 +145,15 @@ public class AkkaTreeLoader {
// calculatePairs(inputPath, port);
// log.info("Calculate pairs done");
// }else {
CallShell cs = new CallShell();
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String cmd1 = String.format(cmd, dbDir,dumpsName,Integer.valueOf(port));
loadRedis(cmd1,serverWait);
// loadRedis(cmd1,serverWait);
cs.runShell(cmd1,serverWait);
String cmdInner = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
String cmd2 = String.format(cmdInner, dbDir,chunkName,Integer.valueOf(portInner));
loadRedis(cmd2,serverWait);
// loadRedis(cmd2,serverWait);
cs.runShell(cmd2,serverWait);
JedisPool outerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(port),20000000);
JedisPool innerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(portInner),20000000);
@@ -158,12 +163,13 @@ public class AkkaTreeLoader {
String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer1 = String.format(stopServer,Integer.valueOf(portInner));
loadRedis(stopServer1,serverWait);
// loadRedis(stopServer1,serverWait);
cs.runShell(stopServer1,serverWait);
stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer2 = String.format(stopServer,Integer.valueOf(port));
loadRedis(stopServer2,serverWait);
// loadRedis(stopServer2,serverWait);
// }
cs.runShell(stopServer2,serverWait);
}
@@ -181,7 +187,7 @@ public class AkkaTreeLoader {
ScanParams sc = new ScanParams();
//150000000
sc.count(150000000);
sc.match("pair_*");
sc.match("pair_[0-9]*");
scan = inner.scan("0", sc);
int size = scan.getResult().size();
@@ -1,6 +1,7 @@
package edu.lu.uni.serval.FixPatternParser.cluster;
import com.github.gumtreediff.tree.ITree;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.utils.FileHelper;
import org.javatuples.Pair;
import org.slf4j.Logger;
@@ -10,10 +11,7 @@ import redis.clients.jedis.JedisPool;
import redis.clients.jedis.ScanParams;
import redis.clients.jedis.ScanResult;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.io.*;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
@@ -30,7 +28,7 @@ import static edu.lu.uni.serval.FixPatternParser.cluster.TreeLoaderClusterL1.poo
public class CalculatePairs {
private static Logger log = LoggerFactory.getLogger(CalculatePairs.class);
// public static void main(String[] args) {
public static void main(String serverWait,String dbDir,String chunkName,String port,String outputPath,String pjName){
public static void main(String serverWait,String dbDir,String chunkName,String port,String outputPath,String pjName) throws Exception {
// String inputPath;
// String port;
@@ -65,11 +63,11 @@ public class CalculatePairs {
String parameters = String.format("\nport %s \nserverWait %s \nchunkName %s \ndbDir %s",port,serverWait,chunkName,dbDir);
log.info(parameters);
CallShell cs =new CallShell();
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,chunkName,Integer.valueOf(port));
loadRedis(cmd,serverWait);
// loadRedis(cmd,serverWait);
cs.runShell(cmd,serverWait);
FileHelper.createDirectory(outputPath);
@@ -98,10 +96,9 @@ public class CalculatePairs {
byte [] buf = new byte[0];
String line = null;
try {
FileOutputStream fos = new FileOutputStream(outputPath + "/" +pjName+".txt");
DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
FileChannel rwChannel = new RandomAccessFile(outputPath + "/" +pjName +".txt", "rw").getChannel();
ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
int fileCounter = 0;
for (int i = 0; i < result.size(); i++) {
@@ -110,22 +107,38 @@ public class CalculatePairs {
line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + result.get(i) + "\t" + result.get(j)+"\n";
buf = line.getBytes();
if(wrBuf.remaining() > 500) {
wrBuf.put(buf);
}else{
log.info("Next pair dump");
fileCounter++;
rwChannel = new RandomAccessFile(outputPath+"/" +pjName+String.valueOf(fileCounter)+".txt", "rw").getChannel();
wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
}
outStream.write(line.getBytes());
}
}
rwChannel.close();
outStream.close();
// FileChannel rwChannel = new RandomAccessFile(outputPath + "/" +pjName +".txt", "rw").getChannel();
// ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
// int fileCounter = 0;
//
//
// for (int i = 0; i < result.size(); i++) {
// for (int j = i + 1; j < result.size(); j++) {
//
//
//
// line = String.valueOf(i) +"\t" + String.valueOf(j) + "\t" + result.get(i) + "\t" + result.get(j)+"\n";
// buf = line.getBytes();
// if(wrBuf.remaining() > 500) {
// wrBuf.put(buf);
// }else{
// log.info("Next pair dump");
// fileCounter++;
// rwChannel = new RandomAccessFile(outputPath+"/" +pjName+String.valueOf(fileCounter)+".txt", "rw").getChannel();
// wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, Integer.MAX_VALUE);
// }
//
//
//
//
// }
// }
// rwChannel.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
@@ -136,6 +149,10 @@ public class CalculatePairs {
e.printStackTrace();
}
String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer2 = String.format(stopServer,Integer.valueOf(port));
// loadRedis(stopServer2,serverWait);
cs.runShell(stopServer2,serverWait);
log.info("Done pairs");
}
@@ -1,5 +1,6 @@
package edu.lu.uni.serval.FixPatternParser.cluster;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -10,8 +11,8 @@ import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis;
import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedisWait;
//import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis;
//import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedisWait;
/**
@@ -21,7 +22,7 @@ public class ImportPairs2DB {
private static Logger log = LoggerFactory.getLogger(ImportPairs2DB.class);
// public static void main(String[] args) {
public static void main(String csvInputPath,String portInner,String serverWait,String dbDir,String numOfWorkers){
public static void main(String csvInputPath,String portInner,String serverWait,String dbDir) throws Exception {
// String inputPath;
// String portInner;
@@ -44,7 +45,7 @@ public class ImportPairs2DB {
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
// numOfWorkers = "1";
// }
String parameters = String.format("\nInput path %s \nportInner %s \nserverWait %s \nnumOfWorks %s \ndbDir %s",csvInputPath,portInner,serverWait,numOfWorkers,dbDir);
String parameters = String.format("\nInput path %s \nportInner %s \nserverWait %s \ndbDir %s",csvInputPath,portInner,serverWait,dbDir);
log.info(parameters);
@@ -61,18 +62,21 @@ public class ImportPairs2DB {
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,pj.getName() +".rdb", portInt);
log.info(cmd);
loadRedisWait(cmd);
CallShell cs = new CallShell();
cs.runShell(cmd);
cmd = "bash "+dbDir + "/redisImportSingle.sh" +" %s %s";
cmd = String.format(cmd, pj.getPath(), portInt);
log.info(cmd);
loadRedisWait(cmd);
cs.runShell(cmd);
portInt++;
//TODO missing kill server script
String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer2 = String.format(stopServer,Integer.valueOf(portInner));
cs.runShell(stopServer2);
}
@@ -1,5 +1,6 @@
package edu.lu.uni.serval.FixPatternParser.cluster;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalActionSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -17,6 +18,7 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import static edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis;
import static edu.lu.uni.serval.FixPatternParser.cluster.TreeLoaderClusterL1.poolConfig;
/**
* Created by anilkoyuncu on 03/04/2018.
@@ -26,7 +28,7 @@ public class StoreFile {
private static Logger log = LoggerFactory.getLogger(StoreFile.class);
// public static void main(String[] args) {
public static void main(String inputPath,String portInner,String serverWait,String dbDir,String chunkName){
public static void main(String inputPath,String portInner,String serverWait,String dbDir,String chunkName,String operation) throws Exception {
// String inputPath;
// String portInner;
// String serverWait;
@@ -48,12 +50,13 @@ public class StoreFile {
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
// numOfWorkers = "1";
// }
String parameters = String.format("\nInput path %s \nportInner %s \nserverWait %s \nchunkName %s \ndbDir %s",inputPath,portInner,serverWait,chunkName,dbDir);
String parameters = String.format("\nInput path %s \nportInner %s \nserverWait %s \nchunkName %s \ndbDir %s \noperation %s",inputPath,portInner,serverWait,chunkName,dbDir,operation);
log.info(parameters);
CallShell cs = new CallShell();
String cmd = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, dbDir,chunkName,Integer.valueOf(portInner));
loadRedis(cmd,serverWait);
// loadRedis(cmd,serverWait);
cs.runShell(cmd,serverWait);
File folder = new File(inputPath);
File[] subFolders = folder.listFiles();
@@ -67,40 +70,43 @@ public class StoreFile {
File[] files = pj.listFiles();
Stream<File> fileStream = Arrays.stream(files);
List<File> fs = fileStream
.filter(x -> x.getName().startsWith("ActionSetDumps"))
.filter(x -> x.getName().startsWith(operation))
.collect(Collectors.toList());
File[] dumps = fs.get(0).listFiles();
for (File f : dumps) {
String name = f.getName();
String key = pjName + "/"+ "ActionSetDumps/" + name;
String key = pjName + "/"+ operation+"/" + name;
String result = key +","+f.getPath();
workList.add(result);
}
}
log.info(String.valueOf(workList.size()));
JedisPool innerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(portInner),20000000);
workList.stream().parallel()
.forEach(m -> storeCore(portInner, m.split(",")[1],m.split(",")[0]));
.forEach(m -> storeCore(innerPool, m.split(",")[1],m.split(",")[0]));
log.info(parameters);
String stopServer = "bash "+dbDir + "/" + "stopServer.sh" +" %s";
String stopServer2 = String.format(stopServer,Integer.valueOf(portInner));
loadRedis(stopServer2,serverWait);
// loadRedis(stopServer2,serverWait);
cs.runShell(stopServer2,serverWait);
}
public static void storeCore(String portInner,String path,String key){
public static void storeCore(JedisPool innerPool,String path,String key){
try {
JedisPool pool = new JedisPool(new JedisPoolConfig(), "127.0.0.1", Integer.valueOf(portInner), 20000000);
ScanResult<String> scan;
HierarchicalActionSet actionSet = null;
HierarchicalActionSet NewactionSet = null;
try {
FileInputStream fi = new FileInputStream(new File(path));
ObjectInputStream oi = new ObjectInputStream(fi);
@@ -120,8 +126,8 @@ public class StoreFile {
e.printStackTrace();
}
try (Jedis inner = innerPool.getResource()) {
try (Jedis inner = pool.getResource()) {
inner.set(key,toString(actionSet));
}
@@ -5,6 +5,7 @@ import com.github.gumtreediff.actions.model.*;
import com.github.gumtreediff.matchers.Matcher;
import com.github.gumtreediff.matchers.Matchers;
import com.github.gumtreediff.tree.ITree;
import edu.lu.uni.serval.FixPatternParser.violations.CallShell;
import edu.lu.uni.serval.gumtree.GumTreeComparer;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalActionSet;
import edu.lu.uni.serval.gumtree.regroup.HierarchicalRegrouper;
@@ -38,7 +39,7 @@ public class TreeLoaderClusterL1 {
private static Logger log = LoggerFactory.getLogger(TreeLoaderClusterL1.class);
// public static void main(String[] args){
public static void main(String portInner,String serverWait,String port,String inputPath,String level1DB,String level1Path){
public static void main(String portInner,String serverWait,String port,String inputPath,String level1DB,String level1Path) throws Exception {
// String inputPath;
// String outputPath;
@@ -68,8 +69,9 @@ public class TreeLoaderClusterL1 {
String cmd = "bash "+inputPath + "/" + "startServer.sh" +" %s %s %s";
cmd = String.format(cmd, inputPath,level1DB,Integer.valueOf(port));
loadRedis(cmd,serverWait);
// loadRedis(cmd,serverWait);
CallShell cs = new CallShell();
cs.runShell(cmd,serverWait);
JedisPool outerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(port),20000000);
@@ -83,7 +85,8 @@ public class TreeLoaderClusterL1 {
for (File db : dbs) {
String cmdInner = "bash "+inputPath + "/" + "startServer.sh" +" %s %s %s";
cmdInner = String.format(cmdInner, inputPath,db.getName(),Integer.valueOf(portInner));
loadRedis(cmdInner,serverWait);
// loadRedis(cmdInner,serverWait);
cs.runShell(cmdInner,serverWait);
JedisPool innerPool = new JedisPool(poolConfig, "127.0.0.1",Integer.valueOf(portInner),20000000);
Jedis inner = null;
@@ -136,8 +139,8 @@ public class TreeLoaderClusterL1 {
String stopServer = "bash "+level1Path + "/" + "stopServer.sh" +" %s";
stopServer = String.format(stopServer,Integer.valueOf(portInner));
loadRedis(stopServer,serverWait);
// loadRedis(stopServer,serverWait);
cs.runShell(stopServer,serverWait);
}
@@ -0,0 +1,41 @@
package edu.lu.uni.serval.FixPatternParser.violations;
/**
* Created by anilkoyuncu on 17/04/2018.
*/
import java.io.*;
public class CallShell {
public void runShell(String command) throws Exception {
Process process = Runtime.getRuntime().exec(command);
BufferedReader reader = new BufferedReader(new InputStreamReader(
process.getInputStream()));
String s;
while ((s = reader.readLine()) != null) {
System.out.println("Script output: " + s);
}
}
public void runShell(String command,String serverWait) throws Exception {
Process process = Runtime.getRuntime().exec(command);
BufferedReader reader = new BufferedReader(new InputStreamReader(
process.getInputStream()));
String s;
while ((s = reader.readLine()) != null) {
System.out.println("Script output: " + s);
}
Thread.sleep(Integer.valueOf(serverWait));
}
}
@@ -6,6 +6,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.github.gumtreediff.actions.model.Delete;
import com.github.gumtreediff.actions.model.Insert;
import com.github.gumtreediff.actions.model.Move;
import com.github.gumtreediff.actions.model.Update;
import com.github.gumtreediff.tree.ITree;
@@ -40,20 +43,40 @@ public class FixedViolationHunkParser extends FixedViolationParser {
@Override
public void parseFixPatterns(File prevFile, File revFile, File diffentryFile) {
List<HierarchicalActionSet> actionSets = parseChangedSourceCodeWithGumTree2(prevFile, revFile);
// boolean isUpdate =
// actionSets.stream().allMatch(p -> p.getAction() instanceof Update);
if (actionSets.size() != 0) {
boolean isUpdate =
actionSets.stream().allMatch(p -> p.getAction() instanceof Update);
boolean isInsert =
actionSets.stream().allMatch(p -> p.getAction() instanceof Insert);
boolean isDelete =
actionSets.stream().allMatch(p -> p.getAction() instanceof Delete);
boolean isMove =
actionSets.stream().allMatch(p -> p.getAction() instanceof Move);
int hunkSet = 0;
// if(isUpdate){
if (isUpdate || isInsert || isDelete || isMove) {
for (HierarchicalActionSet actionSet : actionSets) {
String folder = null;
if (isUpdate) {
folder = "/UPD/";
} else if (isDelete) {
folder = "/DEL/";
} else if (isInsert) {
folder = "/INS/";
} else if (isMove) {
folder = "/MOV/";
}
FileOutputStream f = null;
try {
String pj = diffentryFile.getParent().split("Defects4J")[1];
String root = diffentryFile.getParent().split("Defects4J")[0];
String hunkTreeFileName = root+"GumTreeOutputDefects4J/" +pj.replace("DiffEntries","ActionSetDumps/") + diffentryFile.getName() + "_" + String.valueOf(hunkSet);
// String pj = diffentryFile.getParent().split("Defects4J")[1];
String datasetName = diffentryFile.getParent().split("dataset/")[1].split("/")[0];
String[] split1 = diffentryFile.getParent().split(datasetName);
String root = split1[0];
String pj = split1[1].split("/")[1];
String hunkTreeFileName = root + "GumTreeOutput" + datasetName + "/" + pj + folder + diffentryFile.getName() + "_" + String.valueOf(hunkSet);
f = new FileOutputStream(new File(hunkTreeFileName));
ObjectOutputStream o = new ObjectOutputStream(f);
o.writeObject(actionSet);
@@ -68,7 +91,8 @@ public class FixedViolationHunkParser extends FixedViolationParser {
hunkSet++;
}
// }
}
}
}
// public void parseFixPatterns(File prevFile, File revFile, File diffentryFile) {
@@ -126,9 +126,9 @@ public class MultiThreadTreeLoaderCluster {
edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd1,"1000");
String cmd2 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
cmd2 = String.format(cmd2, dbDir,dumpName,Integer.valueOf(port));
edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd2,"10000");
// String cmd2 = "bash "+dbDir + "/" + "startServer.sh" +" %s %s %s";
// cmd2 = String.format(cmd2, dbDir,dumpName,Integer.valueOf(port));
// edu.lu.uni.serval.FixPatternParser.cluster.AkkaTreeLoader.loadRedis(cmd2,"10000");
String cmd3;
@@ -779,7 +779,7 @@ orginal calculate pairs, from all dumps of the projects
.filter(x -> !x.getName().startsWith("."))
.collect(Collectors.toList());
FileHelper.createDirectory(outputPath + "pairs-2l/");
FileHelper.createDirectory(outputPath + "/pairs-2l/");
for (File pj : pjs) {
File[] files = pj.listFiles();
@@ -835,7 +835,7 @@ orginal calculate pairs, from all dumps of the projects
// ByteBuffer wrBuf = rwChannel.map(FileChannel.MapMode.READ_WRITE, 0, 1000*treesFileNames.size()*treesFileNames.size());
// int fileCounter = 0;
FileOutputStream fos = new FileOutputStream(outputPath + "pairs-2l/" +filename+".txt");
FileOutputStream fos = new FileOutputStream(outputPath + "/pairs-2l/" +filename+".txt");
DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(fos));
@@ -78,7 +78,10 @@ public class TestHunkParser {
final String alarmTypesFilePath = GUM_TREE_OUTPUT + "alarmTypes.list";
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/ActionSetDumps");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/UPD");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/INS");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/DEL");
FileHelper.createDirectory(GUM_TREE_OUTPUT + "/MOV");
FileHelper.deleteDirectory(editScriptsFilePath);
FileHelper.deleteDirectory(patchesSourceCodeFilePath);
FileHelper.deleteDirectory(buggyTokensFilePath);
+69 -35
View File
@@ -25,72 +25,106 @@ public class Launcher {
String dumpsName;
String gumInput;
String gumOutput;
String datasetPath;
String pjName;
if (args.length > 0) {
jobType = args[0];
gumInput = args[1];
portInner = args[2];
serverWait = args[3];
chunkName = args[4];
numOfWorkers = args[5];
dbDir = args[6];
port = args[7];
pairsPath = args[8];
csvInputPath = args[9];
dumpsName = args[10];
gumOutput =args[12];
portInner = args[1];
serverWait = args[2];
numOfWorkers = args[3];
port = args[4];
dumpsName = args[5];
datasetPath = args[6];
pjName = args[7];
// gumInput = args[1];
// chunkName = args[4];
// dbDir = args[6];
// pairsPath = args[8];
// csvInputPath = args[9];
// gumOutput =args[12];
} else {
// inputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairs";
gumInput = "/Users/anilkoyuncu/bugStudy/dataset/Defects4J/";
// gumInput = "/Users/anilkoyuncu/bugStudy/dataset/Defects4J/";
portInner = "6380";
serverWait = "10000";
chunkName = "textfile.txt.csv.rdb";
dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
numOfWorkers = "1";
jobType = "L3PAIRDB";
serverWait = "50000";
chunkName = "Bug13April.txt.csv.rdb";
// dbDir = "/Users/anilkoyuncu/bugStudy/dataset/redis";
numOfWorkers = "10";
jobType = "AKKA";
port = "6399";
pairsPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImportDefects4J";
gumOutput = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutputDefects4J";
csvInputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImportDefects4J-CSV";
dumpsName = "dumpsDefect4J.rdb";
// pairsPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImportDefects4J";
// gumOutput = "/Users/anilkoyuncu/bugStudy/dataset/GumTreeOutputDefects4J";
// csvInputPath = "/Users/anilkoyuncu/bugStudy/dataset/pairsImportDefects4J-CSV";
dumpsName = "dumps-Bug13April.rdb";
datasetPath = "/Users/anilkoyuncu/bugStudy/dataset";
pjName = "Bug13April";
}
gumInput = datasetPath +"/"+pjName+"/";
gumOutput = datasetPath + "/GumTreeOutput" + pjName;
dbDir = datasetPath + "/redis";
pairsPath = datasetPath + "/pairsImport"+pjName;
csvInputPath = datasetPath + "/pairsImport"+pjName+"-CSV";
// String parameters = String.format("\nJob %s \nInput path %s \nportInner %s \nserverWait %s \nchunkName %s \nnumOfWorks %s \ndbDir %s", jobType, inputPath, portInner, serverWait, chunkName, numOfWorkers, dbDir);
switch (jobType){
try {
switch (jobType) {
case "DUMPTREE":
TestHunkParser.main(gumInput,gumOutput,numOfWorkers);
TestHunkParser.main(gumInput, gumOutput, numOfWorkers);
break;
case "STORE":
StoreFile.main(gumOutput,portInner,serverWait,dbDir,dumpsName);
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "INS"+dumpsName,"INS");
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "DEL"+dumpsName,"DEL");
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "UPD"+dumpsName,"UPD");
StoreFile.main(gumOutput, portInner, serverWait, dbDir, "MOV"+dumpsName,"MOV");
break;
case "CALCPAIRS":
CalculatePairs.main(serverWait,dbDir,dumpsName,port,pairsPath,"DEFECT4J");
CalculatePairs.main(serverWait, dbDir, "INS"+dumpsName, portInner, pairsPath+"INS", pjName+"INS");
CalculatePairs.main(serverWait, dbDir, "DEL"+dumpsName, portInner, pairsPath+"DEL", pjName+"DEL");
CalculatePairs.main(serverWait, dbDir, "UPD"+dumpsName, portInner, pairsPath+"UPD", pjName+"UPD");
CalculatePairs.main(serverWait, dbDir, "MOV"+dumpsName, portInner, pairsPath+"MOV", pjName+"MOV");
break;
case "IMPORTPAIRS":
ImportPairs2DB.main(csvInputPath,portInner,serverWait,dbDir,numOfWorkers);
ImportPairs2DB.main(csvInputPath+"INS", portInner, serverWait, dbDir);
ImportPairs2DB.main(csvInputPath+"DEL", portInner, serverWait, dbDir);
ImportPairs2DB.main(csvInputPath+"MOV", portInner, serverWait, dbDir);
ImportPairs2DB.main(csvInputPath+"UPD", portInner, serverWait, dbDir);
break;
case "AKKA":
AkkaTreeLoader.main(portInner,serverWait,dbDir,chunkName,port,dumpsName);
String chunk = pjName;
AkkaTreeLoader.main(portInner, serverWait, dbDir, chunk +"INS"+".txt.csv.rdb" , port, "INS"+dumpsName);
AkkaTreeLoader.main(portInner, serverWait, dbDir, chunk +"DEL"+".txt.csv.rdb", port, "DEL"+dumpsName);
AkkaTreeLoader.main(portInner, serverWait, dbDir, chunk +"UPD"+".txt.csv.rdb", port, "UPD"+dumpsName);
AkkaTreeLoader.main(portInner, serverWait, dbDir, chunk +"MOV"+".txt.csv.rdb", port, "MOV"+dumpsName);
break;
case "LEVEL1DB":
TreeLoaderClusterL1.main(portInner,serverWait,port,dbDir,"level1-defect4j.rdb",dbDir+"/level1-defect4j/");
TreeLoaderClusterL1.main(portInner, serverWait, port, dbDir, "level1-BugsDotJar.rdb", dbDir + "/level1-BugsDotJar/");
break;
//CALC python abstractPatch.py to from cluster folder
case "L2CALCPAIRS":
MultiThreadTreeLoaderCluster.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterDefect4J","/Users/anilkoyuncu/bugStudy/dataset/");
// MultiThreadTreeLoaderCluster.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterDefect4J","/Users/anilkoyuncu/bugStudy/dataset/");
MultiThreadTreeLoaderCluster.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/cluster", datasetPath);
break;
case "L2PAIRDB":
MultiThreadTreeLoaderCluster.mainCompare("6300","/Users/anilkoyuncu/bugStudy/dataset/pairs-csv","/Users/anilkoyuncu/bugStudy/dataset/redisSingleImport.sh",dbDir,"clusterl1-d4j.rdb",dumpsName,"6301");
// MultiThreadTreeLoaderCluster.mainCompare("6300","/Users/anilkoyuncu/bugStudy/dataset/pairs-csv","/Users/anilkoyuncu/bugStudy/dataset/redisSingleImport.sh",dbDir,"clusterl1-d4j.rdb",dumpsName,"6301");
MultiThreadTreeLoaderCluster.mainCompare("6300", datasetPath + "/pairs-csv", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl1-13april.rdb", dumpsName, "6301");
break;
case "L3CALCPAIRS":
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterDefect4J-2l","/Users/anilkoyuncu/bugStudy/dataset/");
// MultiThreadTreeLoaderCluster3.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/clusterDefect4J-2l",datasetPath);
MultiThreadTreeLoaderCluster3.calculatePairsOfClusters("/Users/anilkoyuncu/bugStudy/code/python/cluster-2l", datasetPath);
break;
case "L3PAIRDB":
MultiThreadTreeLoaderCluster3.mainCompare("6300","/Users/anilkoyuncu/bugStudy/dataset/pairs-2l-csv","/Users/anilkoyuncu/bugStudy/dataset/redisSingleImport.sh",dbDir,"clusterl2-d4j.rdb",dumpsName,"6301");
MultiThreadTreeLoaderCluster3.mainCompare("6300", datasetPath + "/pairs-2l-csv", datasetPath + "/redisSingleImport.sh", dbDir, "clusterl2-13april.rdb", dumpsName, "6301");
break;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
// System.exit(1);
}