Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/maven/org.codehaus.mojo-versions-…
Browse files Browse the repository at this point in the history
…maven-plugin-2.17.1
  • Loading branch information
khatchad authored Nov 1, 2024
2 parents 9b53772 + 1a4f3da commit 291338e
Show file tree
Hide file tree
Showing 54 changed files with 427 additions and 96 deletions.
58 changes: 58 additions & 0 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-java-with-maven

name: Java CI with Maven
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
schedule:
- cron: "0 2 * * 1-5"
concurrency:
group: ${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: '17'
distribution: 'temurin'
cache: maven
- name: Fail on whitespace errors
run: git show HEAD --check
- name: Run Spotless
run: mvn -s .travis.settings.xml -Dgithub.username=${{ github.actor }} -Dgithub.password=${{ secrets.GITHUB_TOKEN }} spotless:check -B
- name: Cache Pip dependencies
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install global requirements
run: pip3.10 install -r requirements.txt
- name: Run Black
run: black --fast --check --extend-exclude \/out .
- name: Install test requirements
run: pip3.10 install -r edu.cuny.hunter.hybridize.tests/requirements.txt
- name: Install with Maven
run: mvn -U -s .travis.settings.xml -Dgithub.username=${{ github.actor }} -Dgithub.password=${{ secrets.GITHUB_TOKEN }} -Dlogging.config.file=\${maven.multiModuleProjectDirectory}/logging.ci.properties -DtrimStackTrace=true -Dtycho.showEclipseLog=false install -B -q -DskipTests=true
- name: Print Python 3 version.
run: python3 --version
- name: Print Python 3.10 version.
run: python3.10 --version
- name: Clone our fork of PyDev
run: |
mkdir "$HOME/git"
pushd "$HOME/git"
git clone --depth=50 --branch=pydev_9_3 https://github.com/ponder-lab/Pydev.git
popd
- name: Test with Maven
run: mvn -U -s .travis.settings.xml -Dgithub.username=${{ github.actor }} -Dgithub.password=${{ secrets.GITHUB_TOKEN }} -Dlogging.config.file=\${maven.multiModuleProjectDirectory}/logging.ci.properties -DtrimStackTrace=true -Dtycho.showEclipseLog=false -B verify -Pjacoco coveralls:report
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
.mtj.tmp/

# Package Files #
*.jar
# *.jar
*.war
*.nar
*.ear
Expand Down Expand Up @@ -89,3 +89,4 @@ local.properties
target/
.tycho-consumer-pom.xml
results
*.versionsBackup
9 changes: 8 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Please see our [wiki] for more information regarding development.

## Eclipse Environment

The plug-ins are being developed on the following Eclipse versions. Currently, newer versions of Eclipse will not resolve M2E dependencies:
The plug-ins are being developed on the following Eclipse versions. Currently, newer versions of Eclipse will not resolve M2E dependencies. The [Eclipse Installer](https://www.eclipse.org/downloads/packages/installer) can be used to install specific versions:

Eclipse IDE for RCP and RAP Developers (includes Incubating components)

Expand Down Expand Up @@ -39,7 +39,14 @@ Dependency | Update Site
[PyDev] | https://raw.githubusercontent.com/ponder-lab/Pydev/pydev_9_3/org.python.pydev.updatesite
[WALA] | https://raw.githubusercontent.com/ponder-lab/WALA/v1.6/com.ibm.wala-repository

### Running the Evaluator

Use the `edu.cuny.hunter.hybridize.evaluator` plug-in project to run the evaluation. The evaluation process will produce several CSVs, as well as perform the transformation if desired (see below for details). For convenience, there is an [Eclipse launch configuration](https://wiki.eclipse.org/FAQ_What_is_a_launch_configuration%3F) that can be used to run the evaluation. The run configuration is named [`edu.cuny.hunter.hybridize.eval/Evaluate Hybridize Functions.launch`](https://github.com/ponder-lab/Hybridize-Functions-Refactoring/blob/691cbeb87be805b8bfc336e799d938a9064a5e0e/edu.cuny.hunter.hybridize.eval/Evaluate%20Hybridize%20Functions.launch). In the run configuration dialog, you can specify several arguments to the evaluator as system properties.

You can run the evaluator in several different ways, including as a command or as a menu item, which is shown in the menu bar. Either way, you must evaluate *entire* projects, as the evaluator will collect project-level data. Information on configuring the evaluator can be found on [this wiki page][evaluator wiki].

[wiki]: https://github.com/ponder-lab/Hybridize-Functions-Refactoring/wiki
[evaluator wiki]: https://github.com/ponder-lab/Hybridize-Functions-Refactoring/wiki/Running-the-Evaluator
[PyDev]: https://github.com/ponder-lab/Pydev/tree/pydev_9_3
[Common Eclipse Refactoring Framework]: https://github.com/ponder-lab/Common-Eclipse-Refactoring-Framework
[Ariadne]: https://github.com/ponder-lab/ML
Expand Down
34 changes: 30 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,27 +1,53 @@
# Hybridize-Functions-Refactoring

[![Build Status](https://app.travis-ci.com/ponder-lab/Hybridize-Functions-Refactoring.svg?token=ysqq4ZuxzD688KNytWSA&branch=main)](https://app.travis-ci.com/ponder-lab/Hybridize-Functions-Refactoring) [![Coverage Status](https://coveralls.io/repos/github/ponder-lab/Hybridize-Functions-Refactoring/badge.svg?branch=main&t=PffqbW)](https://coveralls.io/github/ponder-lab/Hybridize-Functions-Refactoring?branch=main) [![GitHub license](https://img.shields.io/badge/license-Eclipse-blue.svg)](https://github.com/khatchadourian-lab/Java-8-Stream-Refactoring/raw/master/LICENSE.txt) [![Java profiler](https://www.ej-technologies.com/images/product_banners/jprofiler_small.png)](https://www.ej-technologies.com/products/jprofiler/overview.html)
[![Build Status](https://github.com/ponder-lab/Hybridize-Functions-Refactoring/actions/workflows/maven.yml/badge.svg)](https://github.com/ponder-lab/Hybridize-Functions-Refactoring/actions/workflows/maven.yml) [![Coverage Status](https://coveralls.io/repos/github/ponder-lab/Hybridize-Functions-Refactoring/badge.svg?branch=main&t=PffqbW)](https://coveralls.io/github/ponder-lab/Hybridize-Functions-Refactoring?branch=main) [![GitHub license](https://img.shields.io/badge/license-Eclipse-blue.svg)](https://github.com/ponder-lab/Hybridize-Functions-Refactoring/raw/master/LICENSE) [![Java profiler](https://www.ej-technologies.com/images/product_banners/jprofiler_small.png)](https://www.ej-technologies.com/products/jprofiler/overview.html)

## Introduction

Refactorings for optimizing imperative TensorFlow clients for greater efficiency.
<img src="https://raw.githubusercontent.com/ponder-lab/Hybridize-Functions-Refactoring/master/edu.cuny.hunter.hybridize.ui/icons/icon.drawio.png" alt="Icon" align="left" height=150px /> Imperative Deep Learning programming is a promising paradigm for creating reliable and efficient Deep Learning programs. However, it is [challenging to write correct and efficient imperative Deep Learning programs](https://dl.acm.org/doi/10.1145/3524842.3528455) in TensorFlow (v2), a popular Deep Learning framework. TensorFlow provides a high-level API (`@tf.function`) that allows users to execute computational graphs using nature, imperative programming. However, writing efficient imperative TensorFlow programs requires careful consideration.

This tool consists of automated refactoring research prototype plug-ins for [Eclipse][eclipse] [PyDev][pydev] that assists developers in writing optimal imperative Deep Learning code in a semantics-preserving fashion. Refactoring preconditions and transformations for automatically determining when it is safe and potentially advantageous to migrate an eager function to hybrid and improve upon already hybrid Python functions are included. The approach utilizes the [WALA][wala] [Ariadne][ariadne] static analysis framework that has been modernized to TensorFlow 2 and extended to work with modern Python constructs and whole projects. The tool also features a side-effect analysis that is used to determine if a Python function is safe to hybridize.

## Screenshot

## Demonstration
![Screenshot](https://khatchad.commons.gc.cuny.edu/wp-content/blogs.dir/2880/files/2024/10/Screenshot-from-2024-10-01-13-07-03.png)

## Usage

The refactoring can be run in two different ways:

1. As a command.
1. Select a Python code entity.
1. Select "Hybridize function..." from the "Quick Access" dialog (CTRL-3).
1. As a menu item.
1. Right-click on a Python code entity.
1. Under "Refactor," choose "Hybridize function..."

Currently, the refactoring works only via the package explorer and the outline views. You can either select a code entity to optimize or select multiple entities. In each case, the tool will find functions in the enclosing entity to refactor.

### Update

Due to https://github.com/ponder-lab/Hybridize-Functions-Refactoring/issues/370, only the "command" is working.

## Installation

Coming soon!

### Update Site

https://raw.githubusercontent.com/ponder-lab/Hybridize-Functions-Refactoring/main/edu.cuny.hunter.hybridize.updatesite

### Eclipse Marketplace

Coming soon!

## Contributing

For information on contributing, see [CONTRIBUTING.md][contrib].

[wiki]: https://github.com/ponder-lab/Java-8-Stream-Refactoring/wiki
[wiki]: https://github.com/ponder-lab/Hybridize-Functions-Refactoring/wiki
[eclipse]: http://eclipse.org
[contrib]: https://github.com/ponder-lab/Hybridize-Functions-Refactoring/blob/main/CONTRIBUTING.md
[pydev]: http://www.pydev.org/
[wala]: https://github.com/wala/WALA
[ariadne]: https://github.com/wala/ML
3 changes: 2 additions & 1 deletion edu.cuny.hunter.hybridize.core/.gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
bin
bin/
/target/
lib/
2 changes: 1 addition & 1 deletion edu.cuny.hunter.hybridize.core/META-INF/MANIFEST.MF
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: %Bundle-Name
Bundle-SymbolicName: edu.cuny.hunter.hybridize.core;singleton:=true
Bundle-Version: 1.0.0.qualifier
Bundle-Version: 1.1.0.qualifier
Bundle-Vendor: %Bundle-Vendor
Bundle-ClassPath: .
Automatic-Module-Name: edu.cuny.hunter.hybridize.core
Expand Down
24 changes: 24 additions & 0 deletions edu.cuny.hunter.hybridize.core/abseil.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,13 @@
<putfield class="LRoot" field="app" fieldType="LRoot" ref="x" value="app" />
<new def="run" class="Labsl/run" />
<putfield class="LRoot" field="run" fieldType="LRoot" ref="app" value="run" />
<!-- https://abseil.io/docs/python/guides/testing -->
<new def="testing" class="Lobject" />
<putfield class="LRoot" field="testing" fieldType="LRoot" ref="x" value="testing" />
<new def="parameterized" class="Lobject" />
<putfield class="LRoot" field="parameterized" fieldType="LRoot" ref="testing" value="parameterized" />
<new def="named_parameters" class="Labsl/class/named_parameters" />
<putfield class="LRoot" field="named_parameters" fieldType="LRoot" ref="parameterized" value="named_parameters" />
<return value="x" />
</method>
</class>
Expand All @@ -23,5 +30,22 @@
</method>
</class>
</package>
<package name="absl/class">
<class name="NamedParameters" allocatable="true">
<method name="do" descriptor="()LRoot;" numArgs="2" paramNames="self test">
<putfield class="LRoot" field="params" fieldType="LRoot" ref="test" value="self" />
<return value="test" />
</method>
</class>
<class name="named_parameters" allocatable="true">
<method name="do" descriptor="()LRoot;" numArgs="4" paramNames="self params values extra">
<new def="closure" class="Labsl/class/NamedParameters" />
<putfield class="LRoot" field="test" fieldType="LRoot" ref="closure" value="self" />
<putfield class="LRoot" field="params" fieldType="LRoot" ref="closure" value="params" />
<putfield class="LRoot" field="values" fieldType="LRoot" ref="closure" value="values" />
<return value="closure" />
</method>
</class>
</package>
</classloader>
</summary-spec>
2 changes: 1 addition & 1 deletion edu.cuny.hunter.hybridize.core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>edu.cuny.hunter.hybridize</groupId>
<artifactId>edu.cuny.hunter.hybridize</artifactId>
<version>1.0.0-SNAPSHOT</version>
<version>1.1.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<artifactId>edu.cuny.hunter.hybridize.core</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import static edu.cuny.hunter.hybridize.core.wala.ml.PythonModRefWithBuiltinFunctions.PythonModVisitorWithBuiltinFunctions.GLOBAL_OUTPUT_STREAM_POINTER_KEY;
import static java.lang.Boolean.FALSE;
import static java.lang.Boolean.TRUE;
import static java.util.Collections.emptySet;
import static org.eclipse.core.runtime.Platform.getLog;
import static org.python.pydev.parser.visitors.NodeUtils.getFullRepresentationString;
import static org.python.pydev.parser.visitors.NodeUtils.getOffset;
Expand Down Expand Up @@ -129,7 +128,7 @@ public class Function {
/**
* Used for speculative analysis of the function name.
*/
private static final String FUNCTION_NAME_CONTEXT_REGEX = ".*(train|test).*_step|call|__call__|run_model";
private static final String FUNCTION_NAME_CONTEXT_REGEX = ".*(train|test).*_step|call|__call__|run_model|.*inference";

private final class FunctionStatusContext extends RefactoringStatusContext {
@Override
Expand Down Expand Up @@ -356,12 +355,12 @@ public boolean isReduceRetracingParamExists() {

private static final String SELF_PARAMETER_NAME = "self";

private static Map<TensorTypeAnalysis, Set<InstanceKey>> tensorContainersCache = Maps.newHashMap();
private static Map<TensorTypeAnalysis, Set<InstanceKey>> tensorContainersCache = Maps.newConcurrentMap();

/**
* Containing {@link IDocument}s that have had import statements added to them during transformation.
* Containing {@link File}s that have had import statements added to them during transformation.
*/
private static Set<IDocument> documentsWithAddedImport = new HashSet<>();
private static Set<File> filesWithAddedImport = new HashSet<>();

private static final String TF_FUNCTION_FQN = "tensorflow.python.eager.def_function.function";

Expand Down Expand Up @@ -436,8 +435,7 @@ private static boolean allCreationsWithinClosureInteral(MethodReference methodRe
cache2.put(instanceKey, cache3);
}

Boolean previous = cache3.put(callGraph, result);
assert previous == null : "Should be a new key.";
cache3.put(callGraph, result);

return result;
}
Expand Down Expand Up @@ -474,7 +472,7 @@ private static boolean allCreationsWithinClosureInteral2(MethodReference methodR
public static void clearCaches() {
creationsCache.clear();
tensorContainersCache.clear();
documentsWithAddedImport.clear();
filesWithAddedImport.clear();
}

/**
Expand Down Expand Up @@ -1007,8 +1005,8 @@ public void computeHybridization(IProgressMonitor monitor) throws BadLocationExc
try {
selection = Util.getSelection(decorator, document);
hybrid = isHybrid(decorator, containingModuleName, containingFile, selection, nature, monitor);
} catch (AmbiguousDeclaringModuleException | BadLocationException | NoDeclaringModuleException | NoTextSelectionException
| RuntimeException e) {
} catch (AmbiguousDeclaringModuleException | BadLocationException | NoDeclaringModuleException
| NoTextSelectionException e) {
String selectedText = null;
try {
selectedText = selection == null ? "(can't compute)" : selection.getSelectedText();
Expand Down Expand Up @@ -1641,7 +1639,7 @@ public void inferTensorTensorParameters(TensorTypeAnalysis tensorAnalysis, CallG
monitor.done();
}

private boolean hasTensorContext() throws NoTextSelectionException {
private boolean hasTensorContext() {
String functionName = this.getSimpleName();
boolean matches = functionName.matches(FUNCTION_NAME_CONTEXT_REGEX);

Expand All @@ -1663,22 +1661,34 @@ private boolean hasTensorContext() throws NoTextSelectionException {
return matches;
}

private Set<String> getAllClassParentNames(boolean onlyLastSegment) throws NoTextSelectionException {
private Set<String> getAllClassParentNames(boolean onlyLastSegment) {
Set<String> ret = new HashSet<>();
SimpleNode node = this.getFunctionDefinition().getFunctionDef().parent;

if (node instanceof ClassDef) {
ClassDef def = (ClassDef) node;

PySelection selection = Util.getSelection(def.name, getContainingDocument());
RefactoringRequest request = new RefactoringRequest(getContainingFile(), selection, getNature());
IPyRefactoring2 refactoring = (Refactorer) AbstractPyRefactoring.getPyRefactoring();
HierarchyNodeModel hierarchyNode = refactoring.findClassHierarchy(request, true);
assert def.equals(hierarchyNode.ast) : "The first node in the class hierarchy should be this class.";
PySelection selection = null;
try {
selection = Util.getSelection(def.name, getContainingDocument());
} catch (NoTextSelectionException e) {
LOG.info("Can't get class parent names for: " + this + " with enclosing class: " + def + " with name:" + def.name, e);
}

if (selection != null) {
RefactoringRequest request = new RefactoringRequest(getContainingFile(), selection, getNature());
IPyRefactoring2 refactoring = (Refactorer) AbstractPyRefactoring.getPyRefactoring();
HierarchyNodeModel hierarchyNode = refactoring.findClassHierarchy(request, true);

if (hierarchyNode != null)
return getAllParentNames(hierarchyNode, onlyLastSegment);
}

return getAllParentNames(hierarchyNode, onlyLastSegment);
// otherwise, just traverse the base in this AST node.
ret.addAll(NodeUtils.getParentNames(def, onlyLastSegment));
}

return emptySet();
return ret;
}

public boolean isHybridizationAvailable() {
Expand Down Expand Up @@ -1734,7 +1744,6 @@ private boolean matches(exprType lhsParamExpr, String lhsParamName, LocalPointer

int paramIndex = rhsPointerKey.getValueNumber() - 1;
Position parameterPosition = astMethod.getParameterPosition(paramIndex);
LOG.info(rhsPointerKey + " position is: " + parameterPosition + ".");

if (parameterPosition != null) {
int rhsBeginColumn = parameterPosition.getFirstCol() + 1; // workaround https://github.com/jython/jython3/issues/48.
Expand All @@ -1746,10 +1755,7 @@ private boolean matches(exprType lhsParamExpr, String lhsParamName, LocalPointer
return lhsBeginColumn == rhsBeginColumn && lhsBeginLine == rhsBeginLine;
}
}

LOG.info(containingFile.getName() + " does not match: " + sourceFileName + ".");
} else
LOG.warn("Encountered non-AST method: " + nodeMethod + ".");
}

return false;
}
Expand Down Expand Up @@ -1838,7 +1844,11 @@ private boolean tensorAnalysisIncludesParameterContainer(TensorTypeAnalysis tens
IR ir = node.getIR();
int i = paramInx + 1;

assert i < ir.getNumberOfParameters() : "Parameter index (" + i + ") must be inbounds (" + ir.getNumberOfParameters() + ").";
if (i >= ir.getNumberOfParameters()) {
LOG.warn("Parameter index (" + i + ") must be inbounds (" + ir.getNumberOfParameters() + "). Skipping: "
+ ir.getMethod().getSignature());
continue;
}

int param = ir.getParameter(i); // the first argument is the function being invoked.

Expand Down Expand Up @@ -1949,15 +1959,17 @@ private List<TextEdit> convertToHybrid() throws BadLocationException {

if (prefix == null) {
// need to add an import if it doesn't already exist.
if (!documentsWithAddedImport.contains(doc)) {
File file = this.getContainingFile();

if (!filesWithAddedImport.contains(file)) {
int line = getLineToInsertImport(doc);
int lineOffset = doc.getLineOffset(line);

TextEdit edit = new InsertEdit(lineOffset, "from tensorflow import function");
TextEdit edit = new InsertEdit(lineOffset, "from tensorflow import function\n");
MultiTextEdit mte = new MultiTextEdit();
mte.addChild(edit);
ret.add(mte);
documentsWithAddedImport.add(doc);
filesWithAddedImport.add(file);
}

prefix = ""; // no prefix needed.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,4 @@

public enum PreconditionSuccess {
P1, P2, P3,
// P4,
// P5
}
Loading

0 comments on commit 291338e

Please sign in to comment.