Skip to content

Commit

Permalink
Add support for ParentIterative Edge
Browse files Browse the repository at this point in the history
  • Loading branch information
ketkarameya committed Sep 20, 2023
1 parent b688980 commit 08da159
Show file tree
Hide file tree
Showing 10 changed files with 195 additions and 35 deletions.
33 changes: 24 additions & 9 deletions src/models/edit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,17 +106,19 @@ impl SourceCodeUnit {
// Apply all the `rules` to the node, parent, grand parent and great grand parent.
// Short-circuit on the first match.
pub(crate) fn get_edit_for_context(
&self, previous_edit_start: usize, previous_edit_end: usize, rules_store: &mut RuleStore,
rules: &Vec<InstantiatedRule>,
&self, previous_edit_range: &Range, rules_store: &mut RuleStore, rules: &Vec<InstantiatedRule>,
iterative: bool,
) -> Option<Edit> {
let number_of_ancestors_in_parent_scope = *self
.piranha_arguments()
.number_of_ancestors_in_parent_scope();
let changed_node = get_node_for_range(self.root_node(), previous_edit_start, previous_edit_end);
debug!(
"\n{}",
format!("Changed node kind {}", changed_node.kind()).blue()
let changed_node = get_node_for_range(
self.root_node(),
*previous_edit_range.start_byte(),
*previous_edit_range.end_byte(),
);
debug!("\nChanged node kind {}", changed_node.kind().blue());

// Context contains - the changed node in the previous edit, its's parent, grand parent and great grand parent
let context = || {
get_context(
Expand All @@ -125,13 +127,26 @@ impl SourceCodeUnit {
number_of_ancestors_in_parent_scope,
)
};
for rule in rules {
// If iterative is true, we apply the rules in the order they are provided to each ancestor in the context.
// If iterative is false, we apply the rules to each ancestor in the context in the order they are provided.
if iterative {
for ancestor in &context() {
if let Some(edit) = self.get_edit(rule, rules_store, *ancestor, false) {
return Some(edit);
for rule in rules {
if let Some(edit) = self.get_edit(rule, rules_store, *ancestor, false) {
return Some(edit);
}
}
}
} else {
for rule in rules {
for ancestor in &context() {
if let Some(edit) = self.get_edit(rule, rules_store, *ancestor, false) {
return Some(edit);
}
}
}
}

None
}

Expand Down
3 changes: 2 additions & 1 deletion src/models/rule_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ use pyo3::prelude::{pyclass, pymethods};

pub(crate) static GLOBAL: &str = "Global";
pub(crate) static PARENT: &str = "Parent";
pub(crate) static PARENT_ITERATIVE: &str = "ParentIterative";

#[derive(Debug, Default, Getters, MutGetters, Builder, Clone, PartialEq)]
#[builder(build_fn(name = "create"))]
Expand Down Expand Up @@ -194,7 +195,7 @@ impl RuleGraph {
}
}
// Add empty entry, incase no next rule was found for a particular scope
for scope in [PARENT, GLOBAL] {
for scope in [PARENT, PARENT_ITERATIVE, GLOBAL] {
next_rules.entry(scope.to_string()).or_default();
}
next_rules
Expand Down
42 changes: 23 additions & 19 deletions src/models/source_code_unit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use tree_sitter::{InputEdit, Node, Parser, Tree};

use crate::{
models::capture_group_patterns::CGPattern,
models::rule_graph::{GLOBAL, PARENT},
models::rule_graph::{GLOBAL, PARENT, PARENT_ITERATIVE},
utilities::tree_sitter_utilities::{
get_node_for_range, get_replace_range, get_tree_sitter_edit, number_of_errors,
},
Expand Down Expand Up @@ -240,48 +240,52 @@ impl SourceCodeUnit {

// Process the parent
// Find the rules to be applied in the "Parent" scope that match any parent (context) of the changed node in the previous edit
if let Some(edit) = self.get_edit_for_context(
*current_replace_range.start_byte(),
*current_replace_range.end_byte(),
rules_store,
&next_rules_by_scope[PARENT],
) {
if let Some(edit) =
self._get_parent_edits(&current_replace_range, rules_store, &next_rules_by_scope)
{
self.rewrites_mut().push(edit.clone());
debug!(
"\n{}",
format!(
"Cleaning up the context, by applying the rule - {}",
edit.matched_rule()
)
.green()
);
// Apply the matched rule to the parent
let applied_edit = self.apply_edit(&edit, parser);
current_replace_range = get_replace_range(applied_edit);
current_rule = edit.matched_rule().to_string();
// Add the (tag, code_snippet) mapping to substitution table.
self.substitutions.extend(edit.p_match().matches().clone());
} else {
// No more parents found for cleanup
break;
}
}

// Apply the next rules from the stack
for (sq, rle) in &next_rules_stack {
self.apply_rule(rle.clone(), rules_store, parser, &Some(sq.clone()));
}
}

fn _get_parent_edits(
&self, replace_range: &Range, rules_store: &mut RuleStore,
next_rules: &HashMap<String, Vec<InstantiatedRule>>,
) -> Option<Edit> {
// Get the parent edit
self
.get_edit_for_context(replace_range, rules_store, &next_rules[PARENT], false)
.or_else(|| {
self.get_edit_for_context(
replace_range,
rules_store,
&next_rules[PARENT_ITERATIVE],
true,
)
})
}

/// Adds the "Method" and "Class" scoped next rules to the queue.
fn add_rules_to_stack(
&mut self, next_rules_by_scope: &HashMap<String, Vec<InstantiatedRule>>,
current_match_range: Range, rules_store: &mut RuleStore,
stack: &mut VecDeque<(CGPattern, InstantiatedRule)>,
) {
for (scope_level, rules) in next_rules_by_scope {
// Scope level is not "PArent" or "Global"
if ![PARENT, GLOBAL].contains(&scope_level.as_str()) {
// Scope level is not "Parent" or "Global"
if ![PARENT, PARENT_ITERATIVE, GLOBAL].contains(&scope_level.as_str()) {
for rule in rules {
let scope_query = self.get_scope_query(
scope_level,
Expand Down
24 changes: 19 additions & 5 deletions src/models/unit_tests/rule_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use crate::{
default_configs::{JAVA, UNUSED_CODE_PATH},
filter::Filter,
language::PiranhaLanguage,
matches::Point,
matches::{Point, Range},
piranha_arguments::PiranhaArgumentsBuilder,
},
utilities::eq_without_whitespace,
Expand Down Expand Up @@ -225,9 +225,16 @@ fn test_get_edit_for_context_positive() {
PathBuf::new().as_path(),
&args,
);
let range = Range {
start_byte: 41_usize,
end_byte: 44_usize,
..Default::default()
};
let edit =
source_code_unit.get_edit_for_context(41_usize, 44_usize, &mut rule_store, &vec![rule]);
// let edit = rule.get_edit(&source_code_unit, &mut rule_store, node, true);
source_code_unit.get_edit_for_context(&range, &mut rule_store, &vec![rule.clone()], false);
assert!(edit.is_some());

let edit = source_code_unit.get_edit_for_context(&range, &mut rule_store, &vec![rule], true);
assert!(edit.is_some());
}

Expand Down Expand Up @@ -266,9 +273,16 @@ fn test_get_edit_for_context_negative() {
PathBuf::new().as_path(),
&args,
);
let range = Range {
start_byte: 29_usize,
end_byte: 33_usize,
..Default::default()
};
let edit =
source_code_unit.get_edit_for_context(29_usize, 33_usize, &mut rule_store, &vec![rule]);
// let edit = rule.get_edit(&source_code_unit, &mut rule_store, node, true);
source_code_unit.get_edit_for_context(&range, &mut rule_store, &vec![rule.clone()], false);
assert!(edit.is_none());

let edit = source_code_unit.get_edit_for_context(&range, &mut rule_store, &vec![rule], true);
assert!(edit.is_none());
}

Expand Down
1 change: 1 addition & 0 deletions src/tests/test_piranha_java.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ create_rewrite_tests! {
test_java_delete_method_invocation_argument: "delete_method_invocation_argument", 1;
test_java_delete_method_invocation_argument_no_op: "delete_method_invocation_argument_no_op", 0;
test_regex_based_matcher: "regex_based_matcher", 1, cleanup_comments = true;
test_spark_conf: "spark_conf", 1;
}

create_match_tests! {
Expand Down
1 change: 0 additions & 1 deletion src/tests/test_piranha_scala.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,4 @@ use crate::models::default_configs::SCALA;
create_rewrite_tests! {
SCALA,
test_simple_match_replace: "simple_match_replace", 1;

}
21 changes: 21 additions & 0 deletions test-resources/java/spark_conf/configurations/edges.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Copyright (c) 2023 Uber Technologies, Inc.
#
# <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License at
# <p>http://www.apache.org/licenses/LICENSE-2.0
#
# <p>Unless required by applicable law or agreed to in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing permissions and
# limitations under the License.


[[edges]]
scope = "ParentIterative"
from = "spark_conf_change_java_scala"
to = ["BuilderPattern"]

[[edges]]
scope = "ParentIterative"
from = "BuilderPattern"
to = ["BuilderPattern"]
50 changes: 50 additions & 0 deletions test-resources/java/spark_conf/configurations/rules.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Copyright (c) 2023 Uber Technologies, Inc.
#
# <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License at
# <p>http://www.apache.org/licenses/LICENSE-2.0
#
# <p>Unless required by applicable law or agreed to in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing permissions and
# limitations under the License.

[[rules]]
name = "spark_conf_change_java_scala"
query = "cs new SparkConf()"
replace_node = "*"
replace = 'new SparkSession.builder()'


[[rules]]
name = "set_spark_home_change_java_scala"
query = "cs :[r].setSparkHome(:[a1])"
replace_node = "*"
replace = "@r.sparkHome(@a1)"
is_seed_rule = false
groups = ["BuilderPattern"]

[[rules]]
name = "set_executor_env_change_2_java_scala"
query = "cs :[r].setExecutorEnv(:[a1], :[a2])"
replace_node = "*"
replace = "@r.executorEnv(@a1, @a2)"
is_seed_rule = false
groups = ["BuilderPattern"]


[[rules]]
name = "app_name_change_java_scala"
query = "cs :[r].setAppName(:[app_name])"
replace_node = "*"
replace = "@r.appName(@app_name)"
is_seed_rule = false
groups = ["BuilderPattern"]

[[rules]]
name = "master_name_change_java_scala"
query = "cs :[r].setMaster(:[master])"
replace_node = "*"
replace = "@r.master(@master)"
is_seed_rule = false
groups = ["BuilderPattern"]
26 changes: 26 additions & 0 deletions test-resources/java/spark_conf/expected/sample.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package com.piranha;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

class Sample {
void main() {

val conf1 = new SparkSession.builder()
.master(master1)
.appName(appName1)
.executorEnv("spark.executor.extraClassPath1", "test1");

val conf2 = new SparkSession.builder()
.appName(appName2)
.master(master2)
.sparkHome(sparkHome2)
.executorEnv("spark.executor.extraClassPath2", "test2");

val conf3 = new SparkSession.builder()
.executorEnv("spark.executor.extraClassPath3", "test3")
.appName(appName3)
.master(master3)
.sparkHome(sparkHome3);
}
}
29 changes: 29 additions & 0 deletions test-resources/java/spark_conf/input/sample.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
package com.piranha;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;

class Sample {
void main() {

val conf1 = new SparkConf()
.setMaster(master1)
.setAppName(appName1)
.setExecutorEnv("spark.executor.extraClassPath1", "test1");

val conf2 = new SparkConf()
.setAppName(appName2)
.setMaster(master2)
.setSparkHome(sparkHome2)
.setExecutorEnv("spark.executor.extraClassPath2", "test2");

val conf3 = new SparkConf()
.setExecutorEnv("spark.executor.extraClassPath3", "test3")
.setAppName(appName3)
.setMaster(master3)
.setSparkHome(sparkHome3);


}

}

0 comments on commit 08da159

Please sign in to comment.