Skip to content

Commit

Permalink
Initial fix for #792
Browse files Browse the repository at this point in the history
  • Loading branch information
tsantalis committed Oct 27, 2024
1 parent 97e6313 commit e3226f8
Show file tree
Hide file tree
Showing 7 changed files with 598 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2398,6 +2398,23 @@ else if(invocationCoveringTheEntireStatement1 != null && (r = invocationCovering
}
}
}
else if(statement1 instanceof AbstractExpression && invocation1.getName().equals("isPresent") && invocation1.getExpression() != null &&
(invocationCoveringTheEntireStatement2.getName().equals("ifPresent") || invocationCoveringTheEntireStatement2.getName().equals("ifPresentOrElse")) &&
invocationCoveringTheEntireStatement2.arguments().size() >= 1) {
if(invocationCoveringTheEntireStatement2.arguments().get(0).startsWith(invocation1.getExpression() + JAVA.LAMBDA_ARROW)) {
Replacement replacement = new MethodInvocationReplacement(invocation1.actualString(),
invocationCoveringTheEntireStatement2.actualString(), invocation1, invocationCoveringTheEntireStatement2, ReplacementType.METHOD_INVOCATION);
replacementInfo.addReplacement(replacement);
return replacementInfo.getReplacements();
}
else if(invocationCoveringTheEntireStatement2.getExpression() != null && invocationCoveringTheEntireStatement2.getExpression().equals(invocation1.getExpression()) &&
invocationCoveringTheEntireStatement2.arguments().get(0).contains(JAVA.LAMBDA_ARROW)) {
Replacement replacement = new MethodInvocationReplacement(invocation1.actualString(),
invocationCoveringTheEntireStatement2.actualString(), invocation1, invocationCoveringTheEntireStatement2, ReplacementType.METHOD_INVOCATION);
replacementInfo.addReplacement(replacement);
return replacementInfo.getReplacements();
}
}
}
}
}
Expand Down Expand Up @@ -4767,7 +4784,7 @@ else if(invocations1.size() == 0 && invocations2.size() > 0) {
}

protected static boolean streamAPIName(String name) {
return name.equals("stream") || name.equals("filter") || name.equals("forEach") || name.equals("collect") || name.equals("map") || name.equals("removeIf");
return name.equals("stream") || name.equals("filter") || name.equals("forEach") || name.equals("collect") || name.equals("map") || name.equals("flatMap") || name.equals("removeIf") || name.equals("ifPresent") || name.equals("ifPresentOrElse");
}

protected static List<AbstractCall> streamAPICalls(AbstractCodeFragment leaf) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1765,6 +1765,24 @@ else if(composite.getLocationInfo().subsumes(m.getFragment2().getLocationInfo())
private void processStreamAPIStatements(List<AbstractCodeFragment> leaves1, List<AbstractCodeFragment> leaves2,
List<CompositeStatementObject> innerNodes1, Set<AbstractCodeFragment> streamAPIStatements2)
throws RefactoringMinerTimedOutException {
Map<VariableDeclarationContainer, List<AbstractCodeFragment>> map = new LinkedHashMap<VariableDeclarationContainer, List<AbstractCodeFragment>>();
int mapSize = 0;
streamAPICallsInExtractedMethods(container2, leaves2, streamAPIStatements2, map);
while(mapSize < map.size()) {
int i=0;
int tmpMapSize = map.size();
for(VariableDeclarationContainer key : new LinkedHashSet<>(map.keySet())) {
if(i >= mapSize) {
streamAPICallsInExtractedMethods(key, map.get(key), streamAPIStatements2, map);
}
i++;
}
mapSize = tmpMapSize;
}
List<AbstractCodeFragment> newLeaves2 = new ArrayList<AbstractCodeFragment>();
for(VariableDeclarationContainer key : map.keySet()) {
newLeaves2.addAll(map.get(key));
}
//match expressions in inner nodes from T1 with leaves from T2
List<AbstractExpression> expressionsT1 = new ArrayList<AbstractExpression>();
for(CompositeStatementObject composite : innerNodes1) {
Expand All @@ -1776,6 +1794,8 @@ private void processStreamAPIStatements(List<AbstractCodeFragment> leaves1, List
}
int numberOfMappings = mappings.size();
processLeaves(expressionsT1, leaves2, new LinkedHashMap<String, String>(), false);
boolean onlyNestedMappings = this.mappings.size() == numberOfMappings;
processLeaves(expressionsT1, newLeaves2, new LinkedHashMap<String, String>(), false);

List<AbstractCodeMapping> mappings = new ArrayList<>(this.mappings);
if(numberOfMappings == mappings.size()) {
Expand Down Expand Up @@ -1921,7 +1941,63 @@ else if(streamAPICall.getName().equals("stream")) {
AbstractCodeFragment fragment2 = mapping.getFragment2();
for(ListIterator<CompositeStatementObject> innerNodeIterator1 = innerNodes1.listIterator(); innerNodeIterator1.hasNext();) {
CompositeStatementObject composite = innerNodeIterator1.next();
if(composite.getExpressions().contains(fragment1)) {
if((composite.getLocationInfo().getCodeElementType().equals(CodeElementType.FOR_STATEMENT) ||
composite.getLocationInfo().getCodeElementType().equals(CodeElementType.ENHANCED_FOR_STATEMENT) ||
composite.getLocationInfo().getCodeElementType().equals(CodeElementType.WHILE_STATEMENT) ||
composite.getLocationInfo().getCodeElementType().equals(CodeElementType.DO_STATEMENT)) &&
composite.getLocationInfo().subsumes(fragment1.getLocationInfo()) &&
onlyNestedMappings) {
AbstractCodeFragment streamAPICallStatement = null;
List<AbstractCall> streamAPICalls = null;
for(AbstractCodeFragment leaf2 : streamAPIStatements2) {
if(leaves2.contains(leaf2)) {
streamAPICallStatement = leaf2;
streamAPICalls = streamAPICalls(leaf2);
break;
}
}
if(streamAPICallStatement != null && streamAPICalls != null) {
List<VariableDeclaration> lambdaParameters = nestedLambdaParameters(streamAPICallStatement.getLambdas());
Set<AbstractCodeFragment> additionallyMatchedStatements1 = new LinkedHashSet<>();
additionallyMatchedStatements1.add(composite);
Set<AbstractCodeFragment> additionallyMatchedStatements2 = new LinkedHashSet<>();
additionallyMatchedStatements2.add(streamAPICallStatement);
for(AbstractCall streamAPICall : streamAPICalls) {
if(streamAPICall.getName().equals("forEach")) {
CompositeReplacement replacement = new CompositeReplacement(composite.getString(), streamAPICallStatement.getString(), additionallyMatchedStatements1, additionallyMatchedStatements2);
Set<Replacement> replacements = new LinkedHashSet<>();
replacements.add(replacement);
LeafMapping newMapping = createLeafMapping(composite, streamAPICallStatement, new LinkedHashMap<String, String>(), false);
newMapping.addReplacements(replacements);
TreeSet<LeafMapping> mappingSet = new TreeSet<>();
mappingSet.add(newMapping);
for(VariableDeclaration lambdaParameter : lambdaParameters) {
for(VariableDeclaration compositeParameter : composite.getVariableDeclarations()) {
if(lambdaParameter.getVariableName().equals(compositeParameter.getVariableName())) {
Pair<VariableDeclaration, VariableDeclaration> pair = Pair.of(compositeParameter, lambdaParameter);
matchedVariables.add(pair);
}
else {
for(Replacement r : mapping.getReplacements()) {
if(r.getBefore().equals(compositeParameter.getVariableName()) && r.getAfter().equals(lambdaParameter.getVariableName())) {
Pair<VariableDeclaration, VariableDeclaration> pair = Pair.of(compositeParameter, lambdaParameter);
matchedVariables.add(pair);
break;
}
}
}
}
}
ReplaceLoopWithPipelineRefactoring ref = new ReplaceLoopWithPipelineRefactoring(additionallyMatchedStatements1, additionallyMatchedStatements2, container1, container2);
newMapping.addRefactoring(ref);
addToMappings(newMapping, mappingSet);
leaves2.remove(newMapping.getFragment2());
innerNodeIterator1.remove();
}
}
}
}
else if(composite.getExpressions().contains(fragment1)) {
AbstractCodeFragment streamAPICallStatement = null;
List<AbstractCall> streamAPICalls = null;
for(AbstractCodeFragment leaf2 : streamAPIStatements2) {
Expand All @@ -1933,6 +2009,11 @@ else if(streamAPICall.getName().equals("stream")) {
break;
}
}
else if(fragment2.equals(leaf2)) {
streamAPICallStatement = leaf2;
streamAPICalls = streamAPICalls(leaf2);
break;
}
}
if(streamAPICallStatement != null && streamAPICalls != null) {
List<VariableDeclaration> lambdaParameters = nestedLambdaParameters(streamAPICallStatement.getLambdas());
Expand Down Expand Up @@ -2060,8 +2141,20 @@ else if(composite.getLocationInfo().subsumes(m.getFragment1().getLocationInfo())
}
}
}
ReplaceLoopWithPipelineRefactoring ref = new ReplaceLoopWithPipelineRefactoring(additionallyMatchedStatements1, additionallyMatchedStatements2, container1, container2);
newMapping.addRefactoring(ref);
boolean loopFound = false;
for(AbstractCodeFragment fragment : additionallyMatchedStatements1) {
if(fragment.getLocationInfo().getCodeElementType().equals(CodeElementType.FOR_STATEMENT) ||
fragment.getLocationInfo().getCodeElementType().equals(CodeElementType.ENHANCED_FOR_STATEMENT) ||
fragment.getLocationInfo().getCodeElementType().equals(CodeElementType.WHILE_STATEMENT) ||
fragment.getLocationInfo().getCodeElementType().equals(CodeElementType.DO_STATEMENT)) {
loopFound = true;
break;
}
}
if(loopFound) {
ReplaceLoopWithPipelineRefactoring ref = new ReplaceLoopWithPipelineRefactoring(additionallyMatchedStatements1, additionallyMatchedStatements2, container1, container2);
newMapping.addRefactoring(ref);
}
addToMappings(newMapping, mappingSet);
leaves2.remove(newMapping.getFragment2());
innerNodeIterator1.remove();
Expand All @@ -2071,6 +2164,30 @@ else if(composite.getLocationInfo().subsumes(m.getFragment1().getLocationInfo())
}
}

private void streamAPICallsInExtractedMethods(VariableDeclarationContainer callerOperation, List<AbstractCodeFragment> leaves2, Set<AbstractCodeFragment> streamAPIStatements2, Map<VariableDeclarationContainer, List<AbstractCodeFragment>> map) {
if(classDiff != null) {
for(AbstractCodeFragment leaf2 : leaves2) {
List<AbstractCall> calls = leaf2.getMethodInvocations();
for(AbstractCall call : calls) {
UMLOperation addedOperation = classDiff.matchesOperation(call, classDiff.getAddedOperations(), callerOperation);
if(addedOperation != null && !map.keySet().contains(addedOperation)) {
List<AbstractCodeFragment> newLeaves2 = new ArrayList<AbstractCodeFragment>();
if(!addedOperation.hasEmptyBody()) {
Set<AbstractCodeFragment> newStreamAPIStatements2 = statementsWithStreamAPICalls(addedOperation.getBody().getCompositeStatement().getLeaves());
for(AbstractCodeFragment streamAPICall : newStreamAPIStatements2) {
if(streamAPICall.getLambdas().size() > 0) {
streamAPIStatements2.add(streamAPICall);
expandAnonymousAndLambdas(streamAPICall, newLeaves2, new ArrayList<CompositeStatementObject>(), new LinkedHashSet<>(), new LinkedHashSet<>(), anonymousClassList2(), codeFragmentOperationMap2, container2, false);
}
}
}
map.put(addedOperation, newLeaves2);
}
}
}
}
}

protected UMLOperationBodyMapper(UMLOperation anonymousClassOperation, LambdaExpressionObject lambda2, UMLOperationBodyMapper parentMapper) throws RefactoringMinerTimedOutException {
this.classDiff = parentMapper.classDiff;
this.modelDiff = classDiff != null ? classDiff.getModelDiff() : null;
Expand Down
34 changes: 34 additions & 0 deletions src/test/java/org/refactoringminer/test/TestStatementMappings.java
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,40 @@ public void handle(String commitId, List<Refactoring> refactorings) {
Assertions.assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected));
}

@Test
public void testNestedExtractMethodStatementMappingsWithStreamsMigration() throws Exception {
final List<String> actual = new ArrayList<>();
Map<String, String> fileContentsBefore = new LinkedHashMap<String, String>();
Map<String, String> fileContentsCurrent = new LinkedHashMap<String, String>();
String contentsV1 = FileUtils.readFileToString(new File(EXPECTED_PATH + "FetchAndMergeEntry-v1.txt"));
String contentsV2 = FileUtils.readFileToString(new File(EXPECTED_PATH + "FetchAndMergeEntry-v2.txt"));
fileContentsBefore.put("src/main/java/org/jabref/gui/mergeentries/FetchAndMergeEntry.java", contentsV1);
fileContentsCurrent.put("src/main/java/org/jabref/gui/mergeentries/FetchAndMergeEntry.java", contentsV2);
UMLModel parentUMLModel = GitHistoryRefactoringMinerImpl.createModel(fileContentsBefore, new LinkedHashSet<String>());
UMLModel currentUMLModel = GitHistoryRefactoringMinerImpl.createModel(fileContentsCurrent, new LinkedHashSet<String>());

UMLModelDiff modelDiff = parentUMLModel.diff(currentUMLModel);
List<Refactoring> refactorings = modelDiff.getRefactorings();
List<UMLOperationBodyMapper> parentMappers = new ArrayList<>();
for (Refactoring ref : refactorings) {
if(ref instanceof ExtractOperationRefactoring) {
ExtractOperationRefactoring ex = (ExtractOperationRefactoring)ref;
UMLOperationBodyMapper bodyMapper = ex.getBodyMapper();
if(!bodyMapper.isNested()) {
if(!parentMappers.contains(bodyMapper.getParentMapper())) {
parentMappers.add(bodyMapper.getParentMapper());
}
}
mapperInfo(bodyMapper, actual);
}
}
for(UMLOperationBodyMapper parentMapper : parentMappers) {
mapperInfo(parentMapper, actual);
}
List<String> expected = IOUtils.readLines(new FileReader(EXPECTED_PATH + "jabref-12025.txt"));
Assertions.assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected));
}

@Test
public void testDuplicatedExtractMethodStatementMappings() throws Exception {
GitHistoryRefactoringMinerImpl miner = new GitHistoryRefactoringMinerImpl();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -382,6 +382,40 @@ public void handle(String commitId, List<Refactoring> refactorings) {
Assert.assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected));
}

@Test
public void testNestedExtractMethodStatementMappingsWithStreamsMigration() throws Exception {
final List<String> actual = new ArrayList<>();
Map<String, String> fileContentsBefore = new LinkedHashMap<String, String>();
Map<String, String> fileContentsCurrent = new LinkedHashMap<String, String>();
String contentsV1 = FileUtils.readFileToString(new File(EXPECTED_PATH + "FetchAndMergeEntry-v1.txt"));
String contentsV2 = FileUtils.readFileToString(new File(EXPECTED_PATH + "FetchAndMergeEntry-v2.txt"));
fileContentsBefore.put("src/main/java/org/jabref/gui/mergeentries/FetchAndMergeEntry.java", contentsV1);
fileContentsCurrent.put("src/main/java/org/jabref/gui/mergeentries/FetchAndMergeEntry.java", contentsV2);
UMLModel parentUMLModel = GitHistoryRefactoringMinerImpl.createModel(fileContentsBefore, new LinkedHashSet<String>());
UMLModel currentUMLModel = GitHistoryRefactoringMinerImpl.createModel(fileContentsCurrent, new LinkedHashSet<String>());

UMLModelDiff modelDiff = parentUMLModel.diff(currentUMLModel);
List<Refactoring> refactorings = modelDiff.getRefactorings();
List<UMLOperationBodyMapper> parentMappers = new ArrayList<>();
for (Refactoring ref : refactorings) {
if(ref instanceof ExtractOperationRefactoring) {
ExtractOperationRefactoring ex = (ExtractOperationRefactoring)ref;
UMLOperationBodyMapper bodyMapper = ex.getBodyMapper();
if(!bodyMapper.isNested()) {
if(!parentMappers.contains(bodyMapper.getParentMapper())) {
parentMappers.add(bodyMapper.getParentMapper());
}
}
mapperInfo(bodyMapper, actual);
}
}
for(UMLOperationBodyMapper parentMapper : parentMappers) {
mapperInfo(parentMapper, actual);
}
List<String> expected = IOUtils.readLines(new FileReader(EXPECTED_PATH + "jabref-12025.txt"));
Assert.assertTrue(expected.size() == actual.size() && expected.containsAll(actual) && actual.containsAll(expected));
}

@Test
public void testNestedExtractMethodStatementMappingsWithIntermediateDelegate() throws Exception {
GitHistoryRefactoringMinerImpl miner = new GitHistoryRefactoringMinerImpl();
Expand Down
Loading

0 comments on commit e3226f8

Please sign in to comment.