Snapshot idea/138.1980 from git://git.jetbrains.org/idea/community.git

Change-Id: Ib567c9c152d770212a7a3db20fbf591c210920bd
diff --git a/python/build/pycharm_community_build.gant b/python/build/pycharm_community_build.gant
index cfaaf33..0bb18ff 100644
--- a/python/build/pycharm_community_build.gant
+++ b/python/build/pycharm_community_build.gant
@@ -15,7 +15,7 @@
 // load ApplicationInfo.xml properties
 ant.xmlproperty(file: "$pythonCommunityHome/resources/idea/PyCharmCoreApplicationInfo.xml", collapseAttributes: "true")
 
-setProperty("system_selector", "PyCharm${p("component.version.major")}0")
+setProperty("system_selector", "PyCharmCE${p("component.version.major")}0")
 setProperty("dryRun", false)
 setProperty("jdk16", guessJdk())
 
diff --git a/python/edu/build/desktop.ini b/python/edu/build/desktop.ini
index f56d43c..0ea32de 100644
--- a/python/edu/build/desktop.ini
+++ b/python/edu/build/desktop.ini
@@ -28,7 +28,7 @@
 [Field 4]
 Type=RadioButton
 Left=5
-Right=45
+Right=100
 Top=50
 Bottom=60
 State=1
@@ -36,8 +36,8 @@
 
 [Field 5]
 Type=RadioButton
-Left=95
-Right=135
+Left=120
+Right=-1
 Top=50
 Bottom=60
 State=0
diff --git a/python/edu/build/idea.nsi b/python/edu/build/idea.nsi
index d9903c9..05c3757 100644
--- a/python/edu/build/idea.nsi
+++ b/python/edu/build/idea.nsi
@@ -23,6 +23,7 @@
 ;------------------------------------------------------------------------------
 !include "MUI2.nsh"
 !include "FileFunc.nsh"
+!include "TextFunc.nsh"
 !include UAC.nsh
 !include "InstallOptions.nsh"
 !include StrFunc.nsh
@@ -32,6 +33,7 @@
 ${UnStrLoc}
 ${UnStrRep}
 ${StrRep}
+${StrTok}
 
 ReserveFile "desktop.ini"
 ReserveFile "DeleteSettings.ini"
@@ -700,49 +702,69 @@
     '$INSTDIR\bin\${PRODUCT_EXE_FILE} "%1"'
 FunctionEnd
 
+Function getPythonInfo
+  ClearErrors
+  FileOpen $3 $INSTDIR\python\python.txt r
+  IfErrors cantOpenFile ;file can not be open.  
+  ;get python2 info
+  FileRead $3 $4
+  ${StrTok} $0 $4 " " "1" "1"
+  ${StrTok} $1 $4 " " "2" "1"
+  ;get python3 info
+  FileRead $3 $4
+  ${StrTok} $R0 $4 " " "1" "1"
+  ${StrTok} $R1 $4 " " "2" "1"
+  goto Done
+cantOpenFile:
+  MessageBox MB_OK|MB_ICONEXCLAMATION "python.txt is not exist. Python will not be downloaded."
+  StrCpy $0 "Error"
+Done:
+FunctionEnd
+
+
 ;------------------------------------------------------------------------------
 ; Installer sections
 ;------------------------------------------------------------------------------
 Section "IDEA Files" CopyIdeaFiles
-;  StrCpy $baseRegKey "HKCU"
-;  !insertmacro INSTALLOPTIONS_READ $R2 "Desktop.ini" "Field 3" "State"
-;  StrCmp $R2 1 continue_for_current_user
-;  SetShellVarContext all
-;  StrCpy $baseRegKey "HKLM"
-;  continue_for_current_user:
-
-; create shortcuts
-
+  ${LineSum} "$INSTDIR\python\python.txt" $R0
+  IfErrors cantOpenFile
+  StrCmp $R0 "2" getPythonInfo ;info about 2 and 3 version of python
+cantOpenFile:  
+  MessageBox MB_OK|MB_ICONEXCLAMATION "python.txt is invalid. Python will not be downloaded."
+  goto skip_python_download
+getPythonInfo:  
+  Call getPythonInfo
+  StrCmp $0 "Error" skip_python_download
   !insertmacro INSTALLOPTIONS_READ $R2 "Desktop.ini" "Field 4" "State"
   StrCmp $R2 1 "" python3
-  StrCpy $R2 "2.7"
+  StrCpy $R2 $0
+  StrCpy $R3 $1
   goto check_python
 python3:  
-  StrCpy $R2 "3.4"
+  StrCpy $R2 $R0
+  StrCpy $R3 $R1
 check_python:  
-  ReadRegStr $1 "HKCU" "Software\Python\PythonCore\$R2\InstallPath" $0
+  ReadRegStr $1 "HKCU" "Software\Python\PythonCore\$R2\InstallPath" ""
   StrCmp $1 "" installation_for_all_users
   goto verefy_python_launcher
 installation_for_all_users:
-  ReadRegStr $1 "HKLM" "Software\Python\PythonCore\$R2\InstallPath" $0
+  ReadRegStr $1 "HKLM" "Software\Python\PythonCore\$R2\InstallPath" ""
   StrCmp $1 "" get_python
 verefy_python_launcher:
   IfFileExists $1\python.exe python_exists get_python
-
-get_python:
-  CreateDirectory "$INSTDIR\python"
-  StrCmp $R2 "2.7" get_python2
-  inetc::get "https://www.python.org/ftp/python/3.4.1/python-3.4.1.amd64.msi" "$INSTDIR\python\python_$R2.msi"
+get_python:  
+  inetc::get "$R3" "$INSTDIR\python\python_$R2.msi"
   goto validate_download
-get_python2:  
-  inetc::get "http://www.python.org/ftp/python/2.7.8/python-2.7.8.msi" "$INSTDIR\python\python_$R2.msi"
-validate_download:  
+validate_download:
   Pop $0
   ${If} $0 == "OK" 
-    ExecCmd::exec 'msiexec /i "$INSTDIR\python\python_$R2.msi" /quiet /qn /norestart /log "$INSTDIR\python\python_$R2_silent.log"'
+    ExecCmd::exec 'msiexec /i "$INSTDIR\python\python_$R2.msi" /quiet /qn /norestart'
+  ${Else}
+    MessageBox MB_OK|MB_ICONEXCLAMATION "The download is failed"
   ${EndIf}
-
 python_exists:  
+skip_python_download:  
+; create shortcuts
   !insertmacro INSTALLOPTIONS_READ $R2 "Desktop.ini" "Field 1" "State"
   StrCmp $R2 1 "" skip_desktop_shortcut
   CreateShortCut "$DESKTOP\${PRODUCT_FULL_NAME_WITH_VER}.lnk" \
@@ -882,6 +904,21 @@
   ${If} $0 == "1"
     !insertmacro INSTALLOPTIONS_WRITE "Desktop.ini" "Field 2" "Flags" "DISABLED"
   ${EndIf}
+  CreateDirectory "$INSTDIR\python"
+  inetc::get "http://www.jetbrains.com/updates/python.txt" "$INSTDIR\python\python.txt"
+  ${LineSum} "$INSTDIR\python\python.txt" $R0
+  IfErrors cantOpenFile
+  StrCmp $R0 "2" getPythonInfo
+cantOpenFile:  
+  MessageBox MB_OK|MB_ICONEXCLAMATION "python.txt is not exist. Python will not be downloaded."
+  goto association
+getPythonInfo:  
+  Call getPythonInfo
+  StrCmp $0 "Error" association
+  !insertmacro INSTALLOPTIONS_WRITE "Desktop.ini" "Field 4" "Text" "Python $0"
+  !insertmacro INSTALLOPTIONS_WRITE "Desktop.ini" "Field 5" "Text" "Python $R0"
+
+association:
   StrCmp "${ASSOCIATION}" "NoAssociation" skip_association
   StrCpy $R0 6
   push "${ASSOCIATION}"
diff --git a/python/edu/build/paths.nsi b/python/edu/build/paths.nsi
index 910f2b3..d854929 100644
--- a/python/edu/build/paths.nsi
+++ b/python/edu/build/paths.nsi
@@ -1,5 +1,5 @@
 ; Installer images
-!define IMAGES_LOCATION ${COMMUNITY_DIR}\python\build\resources
+!define IMAGES_LOCATION ${COMMUNITY_DIR}\python\edu\build\resources
 ;!define LICENSE_FILE ${BASE_DIR}\python\license\PyCharm_Preview_License
 !define PRODUCT_PROPERTIES_FILE ${BASE_DIR}\out\pycharmEDU\layout\bin\idea.properties
 !define PRODUCT_VM_OPTIONS_NAME pycharm.exe.vmoptions
diff --git a/python/edu/build/plugin-list.txt b/python/edu/build/plugin-list.txt
index bc4b94c..60ecf3f 100644
--- a/python/edu/build/plugin-list.txt
+++ b/python/edu/build/plugin-list.txt
@@ -4,4 +4,5 @@
 github
 IntelliLang
 IntelliLang-python
-learn-python
\ No newline at end of file
+learn-python
+course-creator
\ No newline at end of file
diff --git a/python/edu/build/pycharm_edu_build.gant b/python/edu/build/pycharm_edu_build.gant
index 2ef0bed..28d829e 100644
--- a/python/edu/build/pycharm_edu_build.gant
+++ b/python/edu/build/pycharm_edu_build.gant
@@ -22,14 +22,17 @@
 includeTargets << new File("$home/build/scripts/ultimate_utils.gant")
 
 requireProperty("buildNumber", requireProperty("build.number", snapshot))
-setProperty("buildName", "PE-$buildNumber")
+setProperty("buildName", "EDU-$buildNumber")
 setProperty("ch", "$home/community")
 setProperty("pythonCommunityHome", "$ch/python")
 setProperty("pythonEduHome", "$ch/python/edu")
+requireProperty("jdk_bundled_mac", "1.7")
+def jdk_bundled_version = p("jdk_bundled_mac") == "1.8" ? "jdk8_mac_redist.tar" : "jdk_mac_redist.tar"
+ant.copy(file: "${home}/build/jdk/${jdk_bundled_version}", tofile: "${home}/build/jdk/jdk_mac_redist_for_${buildNumber}.tar")
 
 // load ApplicationInfo.xml properties
 ant.xmlproperty(file: "$pythonEduHome/resources/idea/PyCharmEduApplicationInfo.xml", collapseAttributes: "true")
-setProperty("system_selector", "PyCharm${p("component.version.major")}0")
+setProperty("system_selector", "PyCharmEdu${p("component.version.major")}0")
 setProperty("dryRun", false)
 setProperty("jdk16", guessJdk())
 
@@ -89,7 +92,6 @@
 }
 
 setProperty("paths", new Paths(home))
-setProperty("buildName", "PE-$buildNumber")
 
 target('default': "Build artifacts") {
 
@@ -196,12 +198,12 @@
 
   buildNSIS([paths.distAll, paths.distWin],
             "$pythonEduHome/build/strings.nsi", "$pythonEduHome/build/paths.nsi",
-            "pycharmPE-", false, true, ".py", system_selector)
+            "pycharmEDU-", false, true, ".py", system_selector)
 
-  String tarRoot = isEap() ? "pycharm-pe-$buildNumber" : "pycharm-pe-${p("component.version.major")}.${p("component.version.minor")}"
+  String tarRoot = isEap() ? "pycharm-edu-$buildNumber" : "pycharm-edu-${p("component.version.major")}.${p("component.version.minor")}"
   buildTarGz(tarRoot, "$paths.artifacts/pycharm${buildName}.tar", [paths.distAll, paths.distUnix])
 
-  String macAppRoot = isEap() ? "PyCharm PE ${p("component.version.major")}.${p("component.version.minor")} EAP.app/Contents" : "PyCharm PE.app/Contents"
+  String macAppRoot = isEap() ? "PyCharm Educational ${p("component.version.major")}.${p("component.version.minor")} EAP.app/Contents" : "PyCharm Educational.app/Contents"
   buildMacZip(macAppRoot, "${paths.artifacts}/pycharm${buildName}.sit", [paths.distAll], paths.distMac)
   ant.copy(file: "${paths.artifacts}/pycharm${buildName}.sit", tofile: "${paths.artifacts}/pycharm${buildName}-jdk-bundled.sit")
   ant.delete(file: "${paths.artifacts}/pycharm${buildName}.sit")
@@ -214,6 +216,7 @@
         fileset(dir: "$pythonEduHome/learn-python/resources/courses")
       }
     }
+    layouts.layoutPlugin("course-creator")
   }
 
   layouts.layoutCommunityPlugins(ch)
@@ -363,7 +366,7 @@
   winScripts(target, ch, "pycharm.bat", args)
   winVMOptions(target, null, "pycharm.exe")
 
-  ant.copy(file: "$home/python/help/pycharmhelp.jar", todir: "$target/help", failonerror: false)
+  ant.copy(file: "$home/python/help/pycharm-eduhelp.jar", todir: "$target/help", failonerror: false)
 }
 
 private layoutUnix(Map args, String target) {
@@ -380,7 +383,7 @@
   unixScripts(target, ch, "pycharm.sh", args)
   unixVMOptions(target, "pycharm")
 
-  ant.copy(file: "$home/python/help/pycharmhelp.jar", todir: "$target/help", failonerror: false)
+  ant.copy(file: "$home/python/help/pycharm-eduhelp.jar", todir: "$target/help", failonerror: false)
 }
 
 private layoutMac(Map _args, String target) {
diff --git a/python/edu/build/python.txt b/python/edu/build/python.txt
new file mode 100644
index 0000000..d6f247d
--- /dev/null
+++ b/python/edu/build/python.txt
@@ -0,0 +1,2 @@
+python2 2.7 https://www.python.org/ftp/python/2.7.8/python-2.7.8.amd64.msi
+python3 3.4 https://www.python.org/ftp/python/3.4.1/python-3.4.1.amd64.msi
\ No newline at end of file
diff --git a/python/edu/build/resources/logo.bmp b/python/edu/build/resources/logo.bmp
new file mode 100644
index 0000000..9ff6698
--- /dev/null
+++ b/python/edu/build/resources/logo.bmp
Binary files differ
diff --git a/python/edu/build/resources/logo.png b/python/edu/build/resources/logo.png
deleted file mode 100644
index 1ccbc07..0000000
--- a/python/edu/build/resources/logo.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/build/upload_pythonInfo.xml b/python/edu/build/upload_pythonInfo.xml
new file mode 100644
index 0000000..f8d9477
--- /dev/null
+++ b/python/edu/build/upload_pythonInfo.xml
@@ -0,0 +1,32 @@
+<project name="Upload updates.xml to jetbrains.com. Effective in half an hour" default="bootstrap">
+  <property name="home" value="${basedir}/../../../.."/>
+  <target name="upload">
+    <xmlvalidate file="${home}/build/eap/updates.xml"/>
+
+    <property name="host" value="ftp.labs.intellij.net"/>
+    <property name="user" value="idea"/>
+    <property name="password" value="4pawoMauoJjjlxpIl3XG"/>
+
+    <ftp server="${host}" action="send" binary="false" remotedir="updates" userid="${user}" password="${password}">
+      <fileset file="${home}/community/python/edu/build/python.txt"/>
+    </ftp>
+  </target>
+
+  <target name="bootstrap">
+    <java failonerror="true" classname="org.apache.tools.ant.Main" fork="true">
+      <classpath>
+        <fileset dir="${home}/community/lib/ant/lib">
+          <include name="*.jar"/>
+        </fileset>
+        <fileset dir="${home}/community/lib">
+          <include name="commons-net-3.1.jar"/>
+          <include name="jsch-0.1.50.jar"/>
+        </fileset>
+      </classpath>
+
+      <arg value="-f"/>
+      <arg value="${ant.file}"/>
+      <arg value="upload"/>
+    </java>
+  </target>
+</project>
diff --git a/python/edu/course-creator/course-creator.iml b/python/edu/course-creator/course-creator.iml
new file mode 100644
index 0000000..fc8c6a1
--- /dev/null
+++ b/python/edu/course-creator/course-creator.iml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+  <component name="NewModuleRootManager" inherit-compiler-output="true">
+    <exclude-output />
+    <content url="file://$MODULE_DIR$">
+      <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/resources" type="java-resource" />
+    </content>
+    <orderEntry type="inheritedJdk" />
+    <orderEntry type="sourceFolder" forTests="false" />
+    <orderEntry type="module" module-name="python-community" />
+    <orderEntry type="module" module-name="lang-impl" />
+    <orderEntry type="library" name="gson" level="project" />
+  </component>
+</module>
+
diff --git a/python/edu/course-creator/resources/META-INF/plugin.xml b/python/edu/course-creator/resources/META-INF/plugin.xml
new file mode 100644
index 0000000..6a9a9ea
--- /dev/null
+++ b/python/edu/course-creator/resources/META-INF/plugin.xml
@@ -0,0 +1,60 @@
+<idea-plugin version="2">
+  <id>org.jetbrains.plugins.coursecreator</id>
+  <name>Course Creator for PyCharm Educational</name>
+  <version>1.0</version>
+
+  <description><![CDATA[
+      Plugin allows you to create new course for PyCharm Education Edition.
+    ]]></description>
+
+  <change-notes><![CDATA[
+    ]]>
+  </change-notes>
+  <!-- please see http://confluence.jetbrains.com/display/IDEADEV/Build+Number+Ranges for description -->
+
+  <!-- please see http://confluence.jetbrains.com/display/IDEADEV/Plugin+Compatibility+with+IntelliJ+Platform+Products
+       on how to target different products -->
+  <!-- uncomment to enable plugin in all products
+  <depends>com.intellij.modules.lang</depends>
+  -->
+
+  <depends>com.intellij.modules.python</depends>
+
+  <extensions defaultExtensionNs="com.intellij">
+    <directoryProjectGenerator implementation="org.jetbrains.plugins.coursecreator.CCProjectGenerator"/>
+    <projectService serviceImplementation="org.jetbrains.plugins.coursecreator.CCProjectService"/>
+    <codeInsight.lineMarkerProvider language="Python"
+                                    implementationClass="org.jetbrains.plugins.coursecreator.highlighting.CCTaskLineMarkerProvider"/>
+    <treeStructureProvider implementation="org.jetbrains.plugins.coursecreator.projectView.CCTreeStructureProvider"/>
+  </extensions>
+
+  <application-components>
+    <!-- Add your application components here -->
+  </application-components>
+
+  <project-components>
+    <!-- Add your project components here -->
+    <component>
+      <implementation-class>org.jetbrains.plugins.coursecreator.CCProjectComponent</implementation-class>
+    </component>
+  </project-components>
+
+  <actions>
+    <action id="CreateLesson" class="org.jetbrains.plugins.coursecreator.actions.CreateLesson">
+      <add-to-group group-id="NewGroup" anchor="before" relative-to-action="NewFile"/>
+    </action>
+    <action id="CreateTaskFile" class="org.jetbrains.plugins.coursecreator.actions.CreateTaskFile">
+      <add-to-group group-id="NewGroup" anchor="before" relative-to-action="NewFile"/>
+    </action>
+    <action id="CreateTask" class="org.jetbrains.plugins.coursecreator.actions.CreateTask">
+      <add-to-group group-id="NewGroup" anchor="before" relative-to-action="NewFile"/>
+    </action>
+    <action id="AddTaskWindow" class="org.jetbrains.plugins.coursecreator.actions.AddTaskWindow">
+      <add-to-group group-id="EditorPopupMenu" anchor="before" relative-to-action="CopyReference"/>
+    </action>
+    <action id="PackCourse" class="org.jetbrains.plugins.coursecreator.actions.CreateCourseArchive">
+      <add-to-group group-id="MainToolBar" anchor="last" />
+    </action>
+  </actions>
+
+</idea-plugin>
\ No newline at end of file
diff --git a/python/edu/course-creator/resources/fileTemplates/internal/task.html.ft b/python/edu/course-creator/resources/fileTemplates/internal/task.html.ft
new file mode 100644
index 0000000..f683349
--- /dev/null
+++ b/python/edu/course-creator/resources/fileTemplates/internal/task.html.ft
@@ -0,0 +1,4 @@
+<html>
+Write your task text here.
+<br>
+</html>
\ No newline at end of file
diff --git a/python/edu/course-creator/resources/fileTemplates/internal/task.py.ft b/python/edu/course-creator/resources/fileTemplates/internal/task.py.ft
new file mode 100644
index 0000000..0256e10
--- /dev/null
+++ b/python/edu/course-creator/resources/fileTemplates/internal/task.py.ft
@@ -0,0 +1 @@
+# TODO: type solution here
\ No newline at end of file
diff --git a/python/edu/course-creator/resources/fileTemplates/internal/test_helper.py.ft b/python/edu/course-creator/resources/fileTemplates/internal/test_helper.py.ft
new file mode 100644
index 0000000..1182b78
--- /dev/null
+++ b/python/edu/course-creator/resources/fileTemplates/internal/test_helper.py.ft
@@ -0,0 +1,127 @@
+import sys
+
+def get_file_text(path):
+    """ get file text by path"""
+    file_io = open(path, "r")
+    text = file_io.read()
+    file_io.close()
+    return text
+
+def get_file_output(path):
+    # TODO: get file output by path
+    return ""
+
+def test_file_importable():
+    """ tests there is no obvious syntax errors"""
+    path = sys.argv[-1]
+    try:
+        import_file(path)
+    except ImportError:
+        failed("File contains syntax errors")
+        return
+    except SyntaxError:
+        failed("File contains syntax errors")
+        return
+    except NameError:
+        failed("File contains syntax errors")
+        return
+
+    passed()
+
+def import_file(path):
+    """ returns imported file """
+    import imp
+    tmp = imp.load_source('tmp', path)
+    return tmp
+
+def import_task_file():
+    """ returns imported file """
+    path = sys.argv[-1]
+    return import_file(path)
+
+def test_is_not_empty():
+    path = sys.argv[-1]
+    file_text = get_file_text(path)
+
+    if len(file_text) > 0:
+        passed()
+    else:
+        failed("The file is empty. Please, reload the task and try again.")
+
+def test_is_initial_text(error_text="You should modify the file"):
+    path = sys.argv[-1]
+    text = get_initial_text(path)
+    file_text = get_file_text(path)
+
+    if file_text.strip() == text:
+        failed(error_text)
+    else:
+        passed()
+
+def get_initial_text(path):
+    course_lib = sys.argv[-2]
+
+    import os
+    # path format is "project_root/lessonX/taskY/file.py"
+    task_index = path.rfind(os.sep, 0, path.rfind(os.sep))
+    index = path.rfind(os.sep, 0, task_index)
+    relative_path = path[index+1:]
+    initial_file_path = os.path.join(course_lib, relative_path)
+    return get_file_text(initial_file_path)
+
+
+def test_text_equals(text, error_text):
+    path = sys.argv[-1]
+    file_text = get_file_text(path)
+
+    if file_text.strip() == text:
+        passed()
+    else:
+        failed(error_text)
+
+def test_window_text_deleted(error_text="Don't just delete task text"):
+    windows = get_task_windows()
+
+    for window in windows:
+        if len(window) == 0:
+            failed(error_text)
+            return
+    passed()
+
+
+def failed(message="Please, reload the task and try again."):
+    print("#study_plugin FAILED + " + message)
+
+def passed():
+    print("#study_plugin test OK")
+
+def get_task_windows():
+    prefix = "#study_plugin_window = "
+    path = sys.argv[-1]
+    import os
+    windows_path = os.path.splitext(path)[0] + "_windows"
+    windows = []
+    f = open(windows_path, "r")
+    window_text = ""
+    first = True
+    for line in f.readlines():
+        if line.startswith(prefix):
+            if not first:
+                windows.append(window_text.strip())
+            else:
+                first = False
+            window_text = line[len(prefix):]
+        else:
+            window_text += line
+
+    if window_text:
+        windows.append(window_text.strip())
+
+    f.close()
+    return windows
+
+def run_common_tests(error_text="Please, reload file and try again"):
+    test_file_importable()
+    test_is_not_empty()
+    test_is_initial_text(error_text)
+    test_window_text_deleted(error_text)
\ No newline at end of file
diff --git a/python/edu/course-creator/resources/fileTemplates/internal/tests.py.ft b/python/edu/course-creator/resources/fileTemplates/internal/tests.py.ft
new file mode 100644
index 0000000..2e6fd4c
--- /dev/null
+++ b/python/edu/course-creator/resources/fileTemplates/internal/tests.py.ft
@@ -0,0 +1,17 @@
+from test_helper import run_common_tests, failed, passed, get_task_windows
+
+
+def test_task_windows():
+    windows = get_task_windows()
+    window = windows[0]
+    if window != "":       # TODO: your condition here
+        passed()
+    else:
+        failed()
+
+
+if __name__ == '__main__':
+    run_common_tests()
+    test_task_windows()
+
+
diff --git a/python/edu/course-creator/resources/icons/gutter.png b/python/edu/course-creator/resources/icons/gutter.png
new file mode 100644
index 0000000..244e6ca
--- /dev/null
+++ b/python/edu/course-creator/resources/icons/gutter.png
Binary files differ
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCEditorFactoryListener.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCEditorFactoryListener.java
new file mode 100644
index 0000000..1eb8690
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCEditorFactoryListener.java
@@ -0,0 +1,80 @@
+package org.jetbrains.plugins.coursecreator;
+
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.editor.event.EditorFactoryEvent;
+import com.intellij.openapi.editor.event.EditorFactoryListener;
+import com.intellij.openapi.fileEditor.FileDocumentManager;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.vfs.VirtualFile;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.format.*;
+
+public class CCEditorFactoryListener implements EditorFactoryListener {
+  @Override
+  public void editorCreated(@NotNull EditorFactoryEvent event) {
+    Editor editor = event.getEditor();
+    Project project = editor.getProject();
+    if (project == null) {
+      return;
+    }
+    VirtualFile virtualFile = FileDocumentManager.getInstance().getFile(editor.getDocument());
+    if (virtualFile == null) {
+      return;
+    }
+    Course course = CCProjectService.getInstance(project).getCourse();
+    if (course == null) {
+      return;
+    }
+    final VirtualFile taskDir = virtualFile.getParent();
+    if (taskDir == null || !taskDir.getName().contains("task")) {
+      return;
+    }
+    final VirtualFile lessonDir = taskDir.getParent();
+    if (lessonDir == null) return;
+    final Lesson lesson = course.getLesson(lessonDir.getName());
+    final Task task = lesson.getTask(taskDir.getName());
+    final TaskFile taskFile = task.getTaskFile(virtualFile.getName());
+    TaskFileModificationListener listener = new TaskFileModificationListener(taskFile);
+    CCProjectService.addDocumentListener(editor.getDocument(), listener);
+    editor.getDocument().addDocumentListener(listener);
+    CCProjectService.drawTaskWindows(virtualFile, editor, course);
+  }
+
+  @Override
+  public void editorReleased(@NotNull EditorFactoryEvent event) {
+    Editor editor = event.getEditor();
+    Document document = editor.getDocument();
+    StudyDocumentListener listener = CCProjectService.getListener(document);
+    if (listener != null) {
+      document.removeDocumentListener(listener);
+      CCProjectService.removeListener(document);
+    }
+    editor.getMarkupModel().removeAllHighlighters();
+    editor.getSelectionModel().removeSelection();
+  }
+
+  private class TaskFileModificationListener extends StudyDocumentListener {
+
+    private final TaskFile myTaskFile;
+
+    public TaskFileModificationListener(TaskFile taskFile) {
+      super(taskFile);
+      myTaskFile = taskFile;
+    }
+
+    @Override
+    protected void updateTaskWindowLength(CharSequence fragment, TaskWindow taskWindow, int change) {
+        int newLength = taskWindow.getReplacementLength() + change;
+        taskWindow.setReplacementLength(newLength <= 0 ? 0 : newLength);
+        if (fragment.equals("\n")) {
+          taskWindow.setReplacementLength(taskWindow.getLength() + 1);
+        }
+    }
+
+    @Override
+    protected boolean needModify() {
+      return myTaskFile.isTrackChanges();
+    }
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectComponent.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectComponent.java
new file mode 100644
index 0000000..34de943
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectComponent.java
@@ -0,0 +1,58 @@
+package org.jetbrains.plugins.coursecreator;
+
+import com.intellij.openapi.components.ProjectComponent;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.editor.EditorFactory;
+import com.intellij.openapi.editor.event.EditorFactoryEvent;
+import com.intellij.openapi.editor.impl.EditorFactoryImpl;
+import com.intellij.openapi.fileEditor.FileEditor;
+import com.intellij.openapi.fileEditor.FileEditorManager;
+import com.intellij.openapi.fileEditor.impl.text.PsiAwareTextEditorImpl;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.project.ProjectManager;
+import com.intellij.openapi.startup.StartupManager;
+import com.intellij.openapi.vfs.VirtualFile;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.format.Course;
+
+public class CCProjectComponent implements ProjectComponent {
+  private final Project myProject;
+
+  public CCProjectComponent(Project project) {
+    myProject = project;
+  }
+
+  public void initComponent() {
+  }
+
+  public void disposeComponent() {
+  }
+
+  @NotNull
+  public String getComponentName() {
+    return "CCProjectComponent";
+  }
+
+  public void projectOpened() {
+    StartupManager.getInstance(myProject).runWhenProjectIsInitialized(new Runnable() {
+      @Override
+      public void run() {
+        Course course = CCProjectService.getInstance(myProject).getCourse();
+        if (course != null) {
+          EditorFactory.getInstance().addEditorFactoryListener(new CCEditorFactoryListener(), myProject);
+          VirtualFile[] files = FileEditorManager.getInstance(myProject).getOpenFiles();
+          for (VirtualFile file : files) {
+            FileEditor fileEditor = FileEditorManager.getInstance(myProject).getSelectedEditor(file);
+            if (fileEditor instanceof PsiAwareTextEditorImpl) {
+              Editor editor = ((PsiAwareTextEditorImpl)fileEditor).getEditor();
+              new CCEditorFactoryListener().editorCreated(new EditorFactoryEvent(new EditorFactoryImpl(ProjectManager.getInstance()), editor ));
+            }
+          }
+        }
+      }
+    });
+  }
+
+  public void projectClosed() {
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectGenerator.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectGenerator.java
new file mode 100644
index 0000000..dbaa726
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectGenerator.java
@@ -0,0 +1,101 @@
+package org.jetbrains.plugins.coursecreator;
+
+import com.intellij.facet.ui.FacetEditorValidator;
+import com.intellij.facet.ui.FacetValidatorsManager;
+import com.intellij.facet.ui.ValidationResult;
+import com.intellij.ide.fileTemplates.FileTemplate;
+import com.intellij.ide.fileTemplates.FileTemplateManager;
+import com.intellij.ide.fileTemplates.FileTemplateUtil;
+import com.intellij.ide.util.DirectoryUtil;
+import com.intellij.openapi.command.WriteCommandAction;
+import com.intellij.openapi.module.Module;
+import com.intellij.openapi.progress.ProcessCanceledException;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.platform.DirectoryProjectGenerator;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiManager;
+import com.jetbrains.python.newProject.PythonProjectGenerator;
+import org.jetbrains.annotations.Nls;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.format.Course;
+import org.jetbrains.plugins.coursecreator.ui.CCNewProjectPanel;
+
+import javax.swing.*;
+
+
+public class CCProjectGenerator extends PythonProjectGenerator implements DirectoryProjectGenerator {
+  private CCNewProjectPanel mySettingsPanel;
+
+  @Nls
+  @NotNull
+  @Override
+  public String getName() {
+    return "Course creation";
+  }
+
+  @Nullable
+  @Override
+  public Object showGenerationSettings(VirtualFile baseDir) throws ProcessCanceledException {
+    return null;
+  }
+
+  @Nullable
+  @Override
+  public Icon getLogo() {
+    return null;
+  }
+
+
+  @Override
+  public void generateProject(@NotNull final Project project, @NotNull final VirtualFile baseDir,
+                              @Nullable Object settings, @NotNull Module module) {
+
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = new Course(mySettingsPanel.getName(), mySettingsPanel.getAuthor(), mySettingsPanel.getDescription());
+    service.setCourse(course);
+
+    final PsiDirectory projectDir = PsiManager.getInstance(project).findDirectory(baseDir);
+    if (projectDir == null) return;
+    new WriteCommandAction.Simple(project) {
+      @Override
+      protected void run() throws Throwable {
+        final FileTemplate template = FileTemplateManager.getInstance().getInternalTemplate("test_helper");
+        try {
+          FileTemplateUtil.createFromTemplate(template, "test_helper.py", null, projectDir);
+        }
+        catch (Exception ignored) {
+        }
+        DirectoryUtil.createSubdirectories("hints", projectDir, "\\/");
+      }
+    }.execute();
+
+  }
+
+  @NotNull
+  @Override
+  public ValidationResult validate(@NotNull String s) {
+    String message = "";
+    message = mySettingsPanel.getDescription().equals("") ? "Enter description" : message;
+    message = mySettingsPanel.getAuthor().equals("") ? "Enter author name" : message;
+    message = mySettingsPanel.getName().equals("") ? "Enter course name" : message;
+    return message.equals("")? ValidationResult.OK : new ValidationResult(message) ;
+  }
+
+  @Nullable
+  @Override
+  public JPanel extendBasePanel() throws ProcessCanceledException {
+    mySettingsPanel = new CCNewProjectPanel();
+    mySettingsPanel.registerValidators(new FacetValidatorsManager() {
+      public void registerValidator(FacetEditorValidator validator, JComponent... componentsToWatch) {
+        throw new UnsupportedOperationException();
+      }
+
+      public void validate() {
+        fireStateChanged();
+      }
+    });
+    return mySettingsPanel.getMainPanel();
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectService.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectService.java
new file mode 100644
index 0000000..1e38bab
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/CCProjectService.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2000-2013 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.jetbrains.plugins.coursecreator;
+
+import com.intellij.ide.projectView.ProjectView;
+import com.intellij.openapi.components.PersistentStateComponent;
+import com.intellij.openapi.components.ServiceManager;
+import com.intellij.openapi.components.State;
+import com.intellij.openapi.components.Storage;
+import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.vfs.VirtualFileManager;
+import com.intellij.util.xmlb.XmlSerializer;
+import org.jdom.Element;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.format.*;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@State(name = "CCProjectService",
+       storages = {
+         @Storage(file = "$PROJECT_CONFIG_DIR$/course_service.xml")
+       }
+)
+public class CCProjectService implements PersistentStateComponent<Element> {
+
+  private static final Logger LOG = Logger.getInstance(CCProjectService.class.getName());
+  public Course myCourse;
+  public static final String COURSE_ELEMENT = "course";
+  private static final Map<Document, StudyDocumentListener> myDocumentListeners = new HashMap<Document, StudyDocumentListener>();
+
+  public void setCourse(@NotNull final Course course) {
+    myCourse = course;
+  }
+
+  public Course getCourse() {
+    return myCourse;
+  }
+
+  @Override
+  public Element getState() {
+    final Element el = new Element("CCProjectService");
+    if (myCourse != null) {
+      Element courseElement = new Element(COURSE_ELEMENT);
+      XmlSerializer.serializeInto(myCourse, courseElement);
+      el.addContent(courseElement);
+    }
+    return el;
+  }
+
+  @Override
+  public void loadState(Element el) {
+    myCourse = XmlSerializer.deserialize(el.getChild(COURSE_ELEMENT), Course.class);
+  }
+
+  public static CCProjectService getInstance(@NotNull Project project) {
+    return ServiceManager.getService(project, CCProjectService.class);
+  }
+
+  public static void deleteProjectFile(File file, @NotNull final Project project) {
+    if (!file.delete()) {
+      LOG.info("Failed to delete file " + file.getPath());
+    }
+    VirtualFileManager.getInstance().refreshWithoutFileWatcher(true);
+    ProjectView.getInstance(project).refresh();
+  }
+
+  public static void drawTaskWindows(@NotNull final VirtualFile virtualFile, @NotNull final Editor editor, @NotNull final Course course) {
+    VirtualFile taskDir = virtualFile.getParent();
+    if (taskDir == null) {
+      return;
+    }
+    String taskDirName = taskDir.getName();
+    if (!taskDirName.contains("task")) {
+      return;
+    }
+    VirtualFile lessonDir = taskDir.getParent();
+    if (lessonDir == null) {
+      return;
+    }
+    String lessonDirName = lessonDir.getName();
+    if (!lessonDirName.contains("lesson")) {
+      return;
+    }
+    Lesson lesson = course.getLessonsMap().get(lessonDirName);
+    if (lesson == null) {
+      return;
+    }
+    Task task = lesson.getTask(taskDirName);
+    if (task == null) {
+      return;
+    }
+    TaskFile taskFile = task.getTaskFile(virtualFile.getName());
+    if (taskFile == null) {
+      return;
+    }
+    List<TaskWindow> taskWindows = taskFile.getTaskWindows();
+    for (TaskWindow taskWindow : taskWindows) {
+      taskWindow.drawHighlighter(editor);
+    }
+  }
+
+  public static void addDocumentListener(Document document, StudyDocumentListener listener) {
+    myDocumentListeners.put(document, listener);
+  }
+
+  public static StudyDocumentListener getListener(Document document) {
+    return myDocumentListeners.get(document);
+  }
+
+  public static void removeListener(Document document) {
+    myDocumentListeners.remove(document);
+  }
+
+  public static boolean indexIsValid(int index, List<TaskWindow> collection) {
+    int size = collection.size();
+    return index >= 0 && index < size;
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/StudyDocumentListener.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/StudyDocumentListener.java
new file mode 100644
index 0000000..d803e0e
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/StudyDocumentListener.java
@@ -0,0 +1,71 @@
+package org.jetbrains.plugins.coursecreator;
+
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.LogicalPosition;
+import com.intellij.openapi.editor.event.DocumentAdapter;
+import com.intellij.openapi.editor.event.DocumentEvent;
+import com.intellij.openapi.editor.impl.event.DocumentEventImpl;
+import org.jetbrains.plugins.coursecreator.format.TaskFile;
+import org.jetbrains.plugins.coursecreator.format.TaskWindow;
+
+/**
+ * author: liana
+ * data: 7/16/14.
+ * Listens changes in study files and updates
+ * coordinates of all the windows in current task file
+ */
+public abstract class StudyDocumentListener extends DocumentAdapter {
+  private final TaskFile myTaskFile;
+  private int oldLine;
+  private int oldLineStartOffset;
+  private TaskWindow myTaskWindow;
+
+  public StudyDocumentListener(TaskFile taskFile) {
+    myTaskFile = taskFile;
+  }
+
+
+  //remembering old end before document change because of problems
+  // with fragments containing "\n"
+  @Override
+  public void beforeDocumentChange(DocumentEvent e) {
+    int offset = e.getOffset();
+    int oldEnd = offset + e.getOldLength();
+    Document document = e.getDocument();
+    oldLine = document.getLineNumber(oldEnd);
+    oldLineStartOffset = document.getLineStartOffset(oldLine);
+    int line = document.getLineNumber(offset);
+    int offsetInLine = offset - document.getLineStartOffset(line);
+    LogicalPosition pos = new LogicalPosition(line, offsetInLine);
+    myTaskWindow = myTaskFile.getTaskWindow(document, pos);
+
+  }
+
+  @Override
+  public void documentChanged(DocumentEvent e) {
+    if (e instanceof DocumentEventImpl) {
+      if (!needModify()) {
+        return;
+      }
+      DocumentEventImpl event = (DocumentEventImpl)e;
+      Document document = e.getDocument();
+      int offset = e.getOffset();
+      int change = event.getNewLength() - event.getOldLength();
+      if (myTaskWindow != null) {
+        updateTaskWindowLength(e.getNewFragment(), myTaskWindow, change);
+      }
+      int newEnd = offset + event.getNewLength();
+      int newLine = document.getLineNumber(newEnd);
+      int lineChange = newLine - oldLine;
+      myTaskFile.incrementLines(oldLine + 1, lineChange);
+      int newEndOffsetInLine = offset + e.getNewLength() - document.getLineStartOffset(newLine);
+      int oldEndOffsetInLine = offset + e.getOldLength() - oldLineStartOffset;
+      myTaskFile.updateLine(lineChange, oldLine, newEndOffsetInLine, oldEndOffsetInLine);
+    }
+  }
+
+  protected abstract void updateTaskWindowLength(CharSequence fragment, TaskWindow taskWindow, int change);
+
+  protected  abstract boolean needModify();
+}
+
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/AddTaskWindow.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/AddTaskWindow.java
new file mode 100644
index 0000000..ff88cea
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/AddTaskWindow.java
@@ -0,0 +1,105 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.intellij.codeInsight.daemon.impl.DaemonCodeAnalyzerImpl;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.actionSystem.Presentation;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.editor.SelectionModel;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.DialogWrapper;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiDocumentManager;
+import com.intellij.psi.PsiFile;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.*;
+import org.jetbrains.plugins.coursecreator.ui.CreateTaskWindowDialog;
+
+public class AddTaskWindow extends DumbAwareAction {
+  public AddTaskWindow() {
+    super("Add task window","Add task window", null);
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final Project project = e.getData(CommonDataKeys.PROJECT);
+    if (project == null) {
+      return;
+    }
+    final PsiFile file = CommonDataKeys.PSI_FILE.getData(e.getDataContext());
+    if (file == null) return;
+    final Editor editor = CommonDataKeys.EDITOR.getData(e.getDataContext());
+    if (editor == null) return;
+
+    final SelectionModel model = editor.getSelectionModel();
+    final Document document = PsiDocumentManager.getInstance(project).getDocument(file);
+    if (document == null) return;
+    final int start = model.getSelectionStart();
+    final int end = model.getSelectionEnd();
+    final int lineNumber = document.getLineNumber(start);
+    final int length = end - start;
+    int realStart = start - document.getLineStartOffset(lineNumber);
+
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final PsiDirectory taskDir = file.getContainingDirectory();
+    final PsiDirectory lessonDir = taskDir.getParent();
+    if (lessonDir == null) return;
+
+    final Lesson lesson = course.getLesson(lessonDir.getName());
+    final Task task = lesson.getTask(taskDir.getName());
+    final TaskFile taskFile = task.getTaskFile(file.getName());
+    final TaskWindow taskWindow = new TaskWindow(lineNumber, realStart, length, model.getSelectedText());
+    CreateTaskWindowDialog dlg = new CreateTaskWindowDialog(project, taskWindow, lesson.getIndex(), task.getIndex(), file.getVirtualFile().getNameWithoutExtension(), taskFile.getTaskWindows().size() + 1);
+    dlg.show();
+    if (dlg.getExitCode() != DialogWrapper.OK_EXIT_CODE) {
+      return;
+    }
+    int index = taskFile.getTaskWindows().size() + 1;
+    taskFile.addTaskWindow(taskWindow, index);
+    taskWindow.drawHighlighter(editor);
+    DaemonCodeAnalyzerImpl.getInstance(project).restart(file);
+  }
+
+  @Override
+  public void update(AnActionEvent event) {
+    final Presentation presentation = event.getPresentation();
+    final Project project = event.getData(CommonDataKeys.PROJECT);
+    if (project == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    final Editor editor = CommonDataKeys.EDITOR.getData(event.getDataContext());
+    final PsiFile file = CommonDataKeys.PSI_FILE.getData(event.getDataContext());
+    if (editor == null || file == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    if (!editor.getSelectionModel().hasSelection()) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final PsiDirectory taskDir = file.getContainingDirectory();
+    final PsiDirectory lessonDir = taskDir.getParent();
+    if (lessonDir == null) return;
+
+    final Lesson lesson = course.getLesson(lessonDir.getName());
+    final Task task = lesson.getTask(taskDir.getName());
+    if (task == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    presentation.setVisible(true);
+    presentation.setEnabled(true);
+
+  }
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateCourseArchive.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateCourseArchive.java
new file mode 100644
index 0000000..05428f4
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateCourseArchive.java
@@ -0,0 +1,198 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.intellij.icons.AllIcons;
+import com.intellij.ide.projectView.ProjectView;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.command.CommandProcessor;
+import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.fileEditor.FileDocumentManager;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.DialogWrapper;
+import com.intellij.openapi.ui.Messages;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.vfs.VirtualFileManager;
+import com.intellij.util.io.ZipUtil;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.StudyDocumentListener;
+import org.jetbrains.plugins.coursecreator.format.*;
+import org.jetbrains.plugins.coursecreator.ui.CreateCourseArchiveDialog;
+
+import java.io.*;
+import java.util.*;
+import java.util.zip.ZipOutputStream;
+
+public class CreateCourseArchive extends DumbAwareAction {
+  private static final Logger LOG = Logger.getInstance(CreateCourseArchive.class.getName());
+  String myZipName;
+  String myLocationDir;
+
+  public void setZipName(String zipName) {
+    myZipName = zipName;
+  }
+
+  public void setLocationDir(String locationDir) {
+    myLocationDir = locationDir;
+  }
+
+  public CreateCourseArchive() {
+    super("Generate course archive", "Generate course archive", AllIcons.FileTypes.Archive);
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final Project project = e.getData(CommonDataKeys.PROJECT);
+    if (project == null) {
+      return;
+    }
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    if (course == null) return;
+    CreateCourseArchiveDialog dlg = new CreateCourseArchiveDialog(project, this);
+    dlg.show();
+    if (dlg.getExitCode() != DialogWrapper.OK_EXIT_CODE) {
+      return;
+    }
+    final VirtualFile baseDir = project.getBaseDir();
+    final Map<String, Lesson> lessons = course.getLessonsMap();
+    //List<FileEditor> editorList = new ArrayList<FileEditor>();
+    Map<VirtualFile, TaskFile> taskFiles = new HashMap<VirtualFile, TaskFile>();
+    for (Map.Entry<String, Lesson> lesson : lessons.entrySet()) {
+      final VirtualFile lessonDir = baseDir.findChild(lesson.getKey());
+      if (lessonDir == null) continue;
+      for (Map.Entry<String, Task> task : lesson.getValue().myTasksMap.entrySet()) {
+        final VirtualFile taskDir = lessonDir.findChild(task.getKey());
+        if (taskDir == null) continue;
+        for (Map.Entry<String, TaskFile> entry : task.getValue().task_files.entrySet()) {
+          final VirtualFile file = taskDir.findChild(entry.getKey());
+          if (file == null) continue;
+          final Document document = FileDocumentManager.getInstance().getDocument(file);
+          if (document == null) continue;
+          final TaskFile taskFile = entry.getValue();
+          document.addDocumentListener(new InsertionListener(taskFile));
+          taskFiles.put(file, taskFile);
+          taskFile.setTrackChanges(false);
+          Collections.sort(taskFile.getTaskWindows());
+          for (int i = taskFile.getTaskWindows().size() - 1; i >=0 ; i--) {
+            final TaskWindow taskWindow = taskFile.getTaskWindows().get(i);
+            final String taskText = taskWindow.getTaskText();
+            final int lineStartOffset = document.getLineStartOffset(taskWindow.line);
+            final int offset = lineStartOffset + taskWindow.start;
+            CommandProcessor.getInstance().executeCommand(project, new Runnable() {
+              @Override
+              public void run() {
+                ApplicationManager.getApplication().runWriteAction(new Runnable() {
+                  @Override
+                  public void run() {
+                    document.replaceString(offset, offset + taskWindow.getReplacementLength(), taskText);
+                    FileDocumentManager.getInstance().saveDocument(document);
+                  }
+                });
+              }
+            }, "x", "qwe");
+          }
+        }
+      }
+    }
+    generateJson(project);
+    try {
+      File zipFile = new File(myLocationDir, myZipName + ".zip");
+      ZipOutputStream zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile)));
+
+      for (Map.Entry<String, Lesson> entry : lessons.entrySet()) {
+        final VirtualFile lessonDir = baseDir.findChild(entry.getKey());
+        if (lessonDir == null) continue;
+
+        ZipUtil.addFileOrDirRecursively(zos, null, new File(lessonDir.getPath()), lessonDir.getName(), null, null);
+      }
+      ZipUtil.addFileOrDirRecursively(zos, null, new File(baseDir.getPath(), "hints"), "hints", null, null);
+      ZipUtil.addFileOrDirRecursively(zos, null, new File(baseDir.getPath(), "course.json"), "course.json", null, null);
+      ZipUtil.addFileOrDirRecursively(zos, null, new File(baseDir.getPath(), "test_helper.py"), "test_helper.py", null, null);
+      zos.close();
+      Messages.showInfoMessage("Course archive was saved to " + zipFile.getPath(), "Course Archive Was Created Successfully");
+    }
+    catch (IOException e1) {
+      LOG.error(e1);
+    }
+
+    for (Map.Entry<VirtualFile, TaskFile> entry: taskFiles.entrySet()) {
+      TaskFile value = entry.getValue();
+      final Document document = FileDocumentManager.getInstance().getDocument(entry.getKey());
+      if (document == null) {
+        continue;
+      }
+      for (final TaskWindow taskWindow : value.getTaskWindows()){
+        final int lineStartOffset = document.getLineStartOffset(taskWindow.line);
+        final int offset = lineStartOffset + taskWindow.start;
+        CommandProcessor.getInstance().executeCommand(project, new Runnable() {
+          @Override
+          public void run() {
+            ApplicationManager.getApplication().runWriteAction(new Runnable() {
+              @Override
+              public void run() {
+                document.replaceString(offset, offset + taskWindow.length, taskWindow.getPossibleAnswer());
+                FileDocumentManager.getInstance().saveDocument(document);
+              }
+            });
+          }
+        }, "x", "qwe");
+      }
+      value.setTrackChanges(true);
+    }
+    VirtualFileManager.getInstance().refreshWithoutFileWatcher(true);
+    ProjectView.getInstance(project).refresh();
+  }
+
+  private void generateJson(Project project) {
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final Gson gson = new GsonBuilder().setPrettyPrinting().excludeFieldsWithoutExposeAnnotation().create();
+    final String json = gson.toJson(course);
+    final File courseJson = new File(project.getBasePath(), "course.json");
+    FileWriter writer = null;
+    try {
+      writer = new FileWriter(courseJson);
+      writer.write(json);
+    }
+    catch (IOException e) {
+      Messages.showErrorDialog(e.getMessage(), "Failed to Generate Json");
+      LOG.info(e);
+    }
+    catch (Exception e) {
+      Messages.showErrorDialog(e.getMessage(), "Failed to Generate Json");
+      LOG.info(e);
+    }
+    finally {
+      try {
+        if (writer != null) {
+          writer.close();
+        }
+      }
+      catch (IOException e1) {
+        //close silently
+      }
+    }
+  }
+
+  private class InsertionListener extends StudyDocumentListener {
+
+    public InsertionListener(TaskFile taskFile) {
+      super(taskFile);
+    }
+
+    @Override
+    protected void updateTaskWindowLength(CharSequence fragment, TaskWindow taskWindow, int change) {
+    //we don't need to update task window length
+    }
+
+    @Override
+    protected boolean needModify() {
+      return true;
+    }
+  }
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateLesson.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateLesson.java
new file mode 100644
index 0000000..15d9f83
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateLesson.java
@@ -0,0 +1,89 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.intellij.ide.IdeView;
+import com.intellij.ide.util.DirectoryChooserUtil;
+import com.intellij.ide.util.DirectoryUtil;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.actionSystem.LangDataKeys;
+import com.intellij.openapi.actionSystem.Presentation;
+import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.Messages;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.util.PlatformIcons;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.Course;
+import org.jetbrains.plugins.coursecreator.format.Lesson;
+
+public class CreateLesson extends DumbAwareAction {
+  public CreateLesson() {
+    super("Lesson", "Create new Lesson", PlatformIcons.DIRECTORY_CLOSED_ICON);
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final IdeView view = e.getData(LangDataKeys.IDE_VIEW);
+    final Project project = e.getData(CommonDataKeys.PROJECT);
+
+    if (view == null || project == null) {
+      return;
+    }
+    final PsiDirectory directory = DirectoryChooserUtil.getOrChooseDirectory(view);
+    if (directory == null) return;
+
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final int size = course.getLessons().size();
+    final String lessonName = Messages.showInputDialog("Name:", "Lesson Name", null, "lesson" + (size+1), null);
+    if (lessonName == null) return;
+
+    ApplicationManager.getApplication().runWriteAction(new Runnable() {
+      @Override
+      public void run() {
+        final PsiDirectory lessonDirectory = DirectoryUtil.createSubdirectories("lesson" + (size+1), directory, "\\/");
+        if (lessonDirectory != null) {
+          view.selectElement(lessonDirectory);
+          final Lesson lesson = new Lesson(lessonName);
+          lesson.setIndex(size + 1);
+          course.addLesson(lesson, lessonDirectory);
+        }
+      }
+    });
+  }
+
+  @Override
+  public void update(AnActionEvent event) {
+    final Presentation presentation = event.getPresentation();
+    final Project project = event.getData(CommonDataKeys.PROJECT);
+    if (project == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final IdeView view = event.getData(LangDataKeys.IDE_VIEW);
+    if (view == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final PsiDirectory[] directories = view.getDirectories();
+    if (directories.length == 0) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    final PsiDirectory directory = DirectoryChooserUtil.getOrChooseDirectory(view);
+    if (directory != null && !project.getBaseDir().equals(directory.getVirtualFile())) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    presentation.setVisible(true);
+    presentation.setEnabled(true);
+
+  }
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateTask.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateTask.java
new file mode 100644
index 0000000..0940135
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateTask.java
@@ -0,0 +1,122 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.intellij.ide.IdeView;
+import com.intellij.ide.fileTemplates.FileTemplate;
+import com.intellij.ide.fileTemplates.FileTemplateManager;
+import com.intellij.ide.fileTemplates.FileTemplateUtil;
+import com.intellij.ide.util.DirectoryChooserUtil;
+import com.intellij.ide.util.DirectoryUtil;
+import com.intellij.ide.util.EditorHelper;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.actionSystem.LangDataKeys;
+import com.intellij.openapi.actionSystem.Presentation;
+import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.Messages;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiElement;
+import com.intellij.util.PlatformIcons;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.Course;
+import org.jetbrains.plugins.coursecreator.format.Lesson;
+import org.jetbrains.plugins.coursecreator.format.Task;
+
+public class CreateTask extends DumbAwareAction {
+  public CreateTask() {
+    super("Task", "Create new Task", PlatformIcons.DIRECTORY_CLOSED_ICON);
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final IdeView view = e.getData(LangDataKeys.IDE_VIEW);
+    final Project project = e.getData(CommonDataKeys.PROJECT);
+
+    if (view == null || project == null) {
+      return;
+    }
+    final PsiDirectory directory = DirectoryChooserUtil.getOrChooseDirectory(view);
+
+    if (directory == null) return;
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final Lesson lesson = course.getLesson(directory.getName());
+    final int size = lesson.getTasklist().size();
+
+    final String taskName = Messages.showInputDialog("Name:", "Task Name", null, "task" + (size + 1), null);
+    if (taskName == null) return;
+
+    ApplicationManager.getApplication().runWriteAction(new Runnable() {
+      @Override
+      public void run() {
+        final PsiDirectory taskDirectory = DirectoryUtil.createSubdirectories("task" + (size + 1), directory, "\\/");
+        if (taskDirectory != null) {
+          final FileTemplate template = FileTemplateManager.getInstance().getInternalTemplate("task.html");
+          final FileTemplate testsTemplate = FileTemplateManager.getInstance().getInternalTemplate("tests");
+          final FileTemplate taskTemplate = FileTemplateManager.getInstance().getInternalTemplate("task.py");
+          try {
+            final PsiElement taskFile = FileTemplateUtil.createFromTemplate(template, "task.html", null, taskDirectory);
+            final PsiElement testsFile = FileTemplateUtil.createFromTemplate(testsTemplate, "tests.py", null, taskDirectory);
+            final PsiElement taskPyFile = FileTemplateUtil.createFromTemplate(taskTemplate, "file1" + ".py", null, taskDirectory);
+
+            final Task task = new Task(taskName);
+            task.addTaskFile(taskPyFile.getContainingFile().getName(), size + 1);
+            task.setIndex(size + 1);
+            lesson.addTask(task, taskDirectory);
+
+            ApplicationManager.getApplication().invokeLater(new Runnable() {
+              @Override
+              public void run() {
+                EditorHelper.openInEditor(testsFile, false);
+                EditorHelper.openInEditor(taskPyFile, false);
+                view.selectElement(taskFile);
+              }
+            });
+          }
+          catch (Exception ignored) {
+          }
+
+
+        }
+      }
+    });
+  }
+
+  @Override
+  public void update(AnActionEvent event) {
+    final Presentation presentation = event.getPresentation();
+    final Project project = event.getData(CommonDataKeys.PROJECT);
+    if (project == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final IdeView view = event.getData(LangDataKeys.IDE_VIEW);
+    if (view == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final PsiDirectory[] directories = view.getDirectories();
+    if (directories.length == 0) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    final PsiDirectory directory = DirectoryChooserUtil.getOrChooseDirectory(view);
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    if (course != null && directory != null && course.getLesson(directory.getName()) == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    presentation.setVisible(true);
+    presentation.setEnabled(true);
+
+  }
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateTaskFile.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateTaskFile.java
new file mode 100644
index 0000000..5aafceb
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/CreateTaskFile.java
@@ -0,0 +1,110 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.intellij.ide.IdeView;
+import com.intellij.ide.fileTemplates.FileTemplate;
+import com.intellij.ide.fileTemplates.FileTemplateManager;
+import com.intellij.ide.fileTemplates.FileTemplateUtil;
+import com.intellij.ide.util.DirectoryChooserUtil;
+import com.intellij.ide.util.EditorHelper;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.actionSystem.LangDataKeys;
+import com.intellij.openapi.actionSystem.Presentation;
+import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.Messages;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiElement;
+import icons.PythonPsiApiIcons;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.Course;
+import org.jetbrains.plugins.coursecreator.format.Lesson;
+import org.jetbrains.plugins.coursecreator.format.Task;
+
+public class CreateTaskFile extends DumbAwareAction {
+
+  public CreateTaskFile() {
+    super("Task File", "Create new Task File", PythonPsiApiIcons.PythonFile);
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final IdeView view = e.getData(LangDataKeys.IDE_VIEW);
+    final Project project = e.getData(CommonDataKeys.PROJECT);
+
+    if (view == null || project == null) {
+      return;
+    }
+    final PsiDirectory taskDir = DirectoryChooserUtil.getOrChooseDirectory(view);
+    if (taskDir == null) return;
+    PsiDirectory lessonDir = taskDir.getParent();
+    if (lessonDir == null) {
+      return;
+    }
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final Lesson lesson = course.getLesson(lessonDir.getName());
+    final Task task = lesson.getTask(taskDir.getName());
+
+    final int index = task.getTaskFiles().size() + 1;
+    String generatedName = "file" + index;
+    final String taskFileName = Messages.showInputDialog("Name:", "Task File Name", null, generatedName, null);
+    if (taskFileName == null) return;
+
+    ApplicationManager.getApplication().runWriteAction(new Runnable() {
+      @Override
+      public void run() {
+          final FileTemplate taskTemplate = FileTemplateManager.getInstance().getInternalTemplate("task.py");
+          try {
+            final PsiElement taskPyFile = FileTemplateUtil.createFromTemplate(taskTemplate, taskFileName + ".py", null, taskDir);
+            task.addTaskFile(taskPyFile.getContainingFile().getName(), index);
+            ApplicationManager.getApplication().invokeLater(new Runnable() {
+              @Override
+              public void run() {
+                EditorHelper.openInEditor(taskPyFile, false);
+                view.selectElement(taskPyFile);
+              }
+            });
+          }
+          catch (Exception ignored) {
+          }
+      }
+    });
+  }
+
+  @Override
+  public void update(AnActionEvent event) {
+    final Presentation presentation = event.getPresentation();
+    final Project project = event.getData(CommonDataKeys.PROJECT);
+    if (project == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final IdeView view = event.getData(LangDataKeys.IDE_VIEW);
+    if (view == null) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+
+    final PsiDirectory[] directories = view.getDirectories();
+    if (directories.length == 0) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    final PsiDirectory directory = DirectoryChooserUtil.getOrChooseDirectory(view);
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    if (course != null && directory != null && !directory.getName().contains("task")) {
+      presentation.setVisible(false);
+      presentation.setEnabled(false);
+      return;
+    }
+    presentation.setVisible(true);
+    presentation.setEnabled(true);
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/DeleteTaskWindow.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/DeleteTaskWindow.java
new file mode 100644
index 0000000..2724759
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/DeleteTaskWindow.java
@@ -0,0 +1,62 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.intellij.codeInsight.daemon.impl.DaemonCodeAnalyzerImpl;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.actionSystem.PlatformDataKeys;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiDocumentManager;
+import com.intellij.psi.PsiFile;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.*;
+
+import java.util.List;
+
+@SuppressWarnings("ComponentNotRegistered")
+public class DeleteTaskWindow extends DumbAwareAction {
+  @NotNull
+  private final TaskWindow myTaskWindow;
+
+  public DeleteTaskWindow(@NotNull final TaskWindow taskWindow) {
+    super("Delete task window","Delete task window", null);
+    myTaskWindow = taskWindow;
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final Project project = e.getData(PlatformDataKeys.PROJECT);
+    if (project == null) return;
+    final PsiFile file = CommonDataKeys.PSI_FILE.getData(e.getDataContext());
+    if (file == null) return;
+    final Editor editor = CommonDataKeys.EDITOR.getData(e.getDataContext());
+    if (editor == null) {
+      return;
+    }
+    final Document document = PsiDocumentManager.getInstance(project).getDocument(file);
+    if (document == null) return;
+
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final PsiDirectory taskDir = file.getContainingDirectory();
+    final PsiDirectory lessonDir = taskDir.getParent();
+    if (lessonDir == null) return;
+
+    final Lesson lesson = course.getLesson(lessonDir.getName());
+    final Task task = lesson.getTask(taskDir.getName());
+    final TaskFile taskFile = task.getTaskFile(file.getName());
+    final List<TaskWindow> taskWindows = taskFile.getTaskWindows();
+    if (taskWindows.contains(myTaskWindow)) {
+      myTaskWindow.removeResources(project);
+      taskWindows.remove(myTaskWindow);
+      editor.getMarkupModel().removeAllHighlighters();
+      CCProjectService.drawTaskWindows(file.getVirtualFile(), editor, course);
+      DaemonCodeAnalyzerImpl.getInstance(project).restart(file);
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/ShowTaskWindowText.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/ShowTaskWindowText.java
new file mode 100644
index 0000000..7c7e7fa
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/actions/ShowTaskWindowText.java
@@ -0,0 +1,44 @@
+package org.jetbrains.plugins.coursecreator.actions;
+
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CommonDataKeys;
+import com.intellij.openapi.actionSystem.PlatformDataKeys;
+import com.intellij.openapi.project.DumbAwareAction;
+import com.intellij.openapi.project.Project;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiFile;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.*;
+import org.jetbrains.plugins.coursecreator.ui.CreateTaskWindowDialog;
+
+@SuppressWarnings("ComponentNotRegistered")
+public class ShowTaskWindowText extends DumbAwareAction {
+  @NotNull
+  private final TaskWindow myTaskWindow;
+
+  public ShowTaskWindowText(@NotNull final TaskWindow taskWindow) {
+    super("Add task window","Add task window", null);
+    myTaskWindow = taskWindow;
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final Project project = e.getData(PlatformDataKeys.PROJECT);
+    if (project == null) return;
+    final PsiFile file = CommonDataKeys.PSI_FILE.getData(e.getDataContext());
+    if (file == null) return;
+    final CCProjectService service = CCProjectService.getInstance(project);
+    final Course course = service.getCourse();
+    final PsiDirectory taskDir = file.getContainingDirectory();
+    final PsiDirectory lessonDir = taskDir.getParent();
+    if (lessonDir == null) return;
+
+    final Lesson lesson = course.getLesson(lessonDir.getName());
+    final Task task = lesson.getTask(taskDir.getName());
+    final TaskFile taskFile = task.getTaskFile(file.getName());
+    //TODO: copy task window and return if modification canceled
+    CreateTaskWindowDialog dlg = new CreateTaskWindowDialog(project, myTaskWindow, lesson.getIndex(), task.getIndex(), file.getVirtualFile().getNameWithoutExtension(), taskFile.getTaskWindows().size() + 1);
+    dlg.show();
+  }
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Course.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Course.java
new file mode 100644
index 0000000..eb62d59
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Course.java
@@ -0,0 +1,55 @@
+package org.jetbrains.plugins.coursecreator.format;
+
+import com.google.gson.annotations.Expose;
+import com.intellij.psi.PsiDirectory;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class Course {
+  @Expose public List<Lesson> lessons = new ArrayList<Lesson>();
+  @Expose public String description;
+
+  @Expose public String name;
+  @Expose public String author;
+
+  public Map<String, Lesson> myLessonsMap = new HashMap<String, Lesson>();
+
+  public Map<String, Lesson> getLessonsMap() {
+    return myLessonsMap;
+  }
+
+  public Lesson getLesson(@NotNull final  String name) {
+    return myLessonsMap.get(name);
+  }
+
+
+  public Course() {
+  }
+
+  public Course(@NotNull final String name, @NotNull final String author, @NotNull final String description) {
+    this.description = description;
+    this.name = name;
+    this.author = author;
+  }
+
+  public List<Lesson> getLessons() {
+    return lessons;
+  }
+
+  public void addLesson(@NotNull final Lesson lesson, @NotNull final PsiDirectory directory) {
+    lessons.add(lesson);
+    myLessonsMap.put(directory.getName(), lesson);
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getDescription() {
+    return description;
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Lesson.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Lesson.java
new file mode 100644
index 0000000..3872014
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Lesson.java
@@ -0,0 +1,45 @@
+package org.jetbrains.plugins.coursecreator.format;
+
+import com.google.gson.annotations.Expose;
+import com.intellij.psi.PsiDirectory;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class Lesson {
+  @Expose public String name;
+  @Expose public List<Task> task_list = new ArrayList<Task>();
+
+  public int myIndex;
+  public Map<String, Task> myTasksMap = new HashMap<String, Task>();
+
+  public Lesson() {}
+
+  public Lesson(@NotNull final String name) {
+    this.name = name;
+  }
+
+  public void addTask(@NotNull final Task task, PsiDirectory taskDirectory) {
+    myTasksMap.put(taskDirectory.getName(), task);
+    task_list.add(task);
+  }
+
+  public Task getTask(@NotNull final String name) {
+    return myTasksMap.get(name);
+  }
+
+  public List<Task> getTasklist() {
+    return task_list;
+  }
+
+  public void setIndex(int index) {
+    myIndex = index;
+  }
+
+  public int getIndex() {
+    return myIndex;
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Task.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Task.java
new file mode 100644
index 0000000..e6c085b
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/Task.java
@@ -0,0 +1,41 @@
+package org.jetbrains.plugins.coursecreator.format;
+
+import com.google.gson.annotations.Expose;
+import org.jetbrains.annotations.NotNull;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class Task {
+  @Expose public String name;
+  @Expose public Map<String, TaskFile> task_files = new HashMap<String, TaskFile>();
+  public int myIndex;
+
+  public Task() {}
+
+  public Task(@NotNull final String name) {
+    this.name = name;
+  }
+
+  public int getIndex() {
+    return myIndex;
+  }
+
+  public void addTaskFile(@NotNull final String name, int index) {
+    TaskFile taskFile = new TaskFile();
+    taskFile.setIndex(index);
+    task_files.put(name, taskFile);
+  }
+
+  public TaskFile getTaskFile(@NotNull final String name) {
+    return task_files.get(name);
+  }
+
+  public void setIndex(int index) {
+    myIndex = index;
+  }
+
+  public Map<String, TaskFile> getTaskFiles() {
+    return task_files;
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/TaskFile.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/TaskFile.java
new file mode 100644
index 0000000..85f0d91
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/TaskFile.java
@@ -0,0 +1,111 @@
+package org.jetbrains.plugins.coursecreator.format;
+
+import com.google.gson.annotations.Expose;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.LogicalPosition;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class TaskFile {
+  @Expose public List<TaskWindow> task_windows = new ArrayList<TaskWindow>();
+  public int myIndex;
+  public boolean myTrackChanges = true;
+
+  public boolean isTrackChanges() {
+    return myTrackChanges;
+  }
+
+  public void setTrackChanges(boolean trackChanges) {
+    myTrackChanges = trackChanges;
+  }
+
+  public TaskFile() {}
+
+  public void addTaskWindow(@NotNull final TaskWindow taskWindow, int index) {
+    taskWindow.setIndex(index);
+    task_windows.add(taskWindow);
+  }
+
+  public List<TaskWindow> getTaskWindows() {
+    return task_windows;
+  }
+
+  public void setIndex(int index) {
+    myIndex = index;
+  }
+
+
+  /**
+   * @param pos position in editor
+   * @return task window located in specified position or null if there is no task window in this position
+   */
+  @Nullable
+  public TaskWindow getTaskWindow(@NotNull final Document document, @NotNull final LogicalPosition pos) {
+    int line = pos.line;
+    if (line >= document.getLineCount()) {
+      return null;
+    }
+    int column = pos.column;
+    int offset = document.getLineStartOffset(line) + column;
+    for (TaskWindow tw : task_windows) {
+      if (tw.getLine() <= line) {
+        int twStartOffset = tw.getRealStartOffset(document);
+        final int length = tw.getReplacementLength() > 0 ? tw.getReplacementLength() : 0;
+        int twEndOffset = twStartOffset + length;
+        if (twStartOffset <= offset && offset <= twEndOffset) {
+          return tw;
+        }
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Updates task window lines
+   *
+   * @param startLine lines greater than this line and including this line will be updated
+   * @param change    change to be added to line numbers
+   */
+  public void incrementLines(int startLine, int change) {
+    for (TaskWindow taskTaskWindow : task_windows) {
+      if (taskTaskWindow.getLine() >= startLine) {
+        taskTaskWindow.setLine(taskTaskWindow.getLine() + change);
+      }
+    }
+  }
+
+  /**
+   * Updates windows in specific line
+   *
+   * @param lineChange         change in line number
+   * @param line               line to be updated
+   * @param newEndOffsetInLine distance from line start to end of inserted fragment
+   * @param oldEndOffsetInLine distance from line start to end of changed fragment
+   */
+  public void updateLine(int lineChange, int line, int newEndOffsetInLine, int oldEndOffsetInLine) {
+    for (TaskWindow w : task_windows) {
+      if ((w.getLine() == line) && (w.getStart() >= oldEndOffsetInLine)) {
+        int distance = w.getStart() - oldEndOffsetInLine;
+        boolean coveredByPrevTW = false;
+        int prevIndex = w.getIndex() - 1;
+        if (CCProjectService.indexIsValid(prevIndex, task_windows)) {
+          TaskWindow prevTW = task_windows.get(prevIndex);
+          if (prevTW.getLine() == line) {
+            int endOffset = prevTW.getStart() + prevTW.getLength();
+            if (endOffset >= newEndOffsetInLine) {
+              coveredByPrevTW = true;
+            }
+          }
+        }
+        if (lineChange != 0 || newEndOffsetInLine <= w.getStart() || coveredByPrevTW) {
+          w.setStart(distance + newEndOffsetInLine);
+          w.setLine(line + lineChange);
+        }
+      }
+    }
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/TaskWindow.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/TaskWindow.java
new file mode 100644
index 0000000..cb6418e
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/format/TaskWindow.java
@@ -0,0 +1,133 @@
+package org.jetbrains.plugins.coursecreator.format;
+
+import com.google.gson.annotations.Expose;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.editor.colors.EditorColors;
+import com.intellij.openapi.editor.colors.EditorColorsManager;
+import com.intellij.openapi.editor.markup.HighlighterLayer;
+import com.intellij.openapi.editor.markup.HighlighterTargetArea;
+import com.intellij.openapi.editor.markup.RangeHighlighter;
+import com.intellij.openapi.editor.markup.TextAttributes;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.vfs.VirtualFile;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+
+import java.io.File;
+
+public class TaskWindow implements Comparable{
+
+  @Expose public int line;
+  @Expose public int start;
+  @Expose public String hint;
+  @Expose public String possible_answer;
+  @Expose public int length;
+  public String myTaskText;
+  public int myReplacementLength;
+  public int myIndex;
+
+  public TaskWindow() {}
+
+  public TaskWindow(int line, int start, int length, String selectedText) {
+    this.line = line;
+    this.start = start;
+    myReplacementLength = length;
+    this.possible_answer = selectedText;
+  }
+
+  public void setTaskText(@NotNull final String taskText) {
+    myTaskText = taskText;
+    length = myTaskText.length();
+  }
+
+  public String getTaskText() {
+    return myTaskText;
+  }
+
+  public int getReplacementLength() {
+    return myReplacementLength;
+  }
+
+  public void setHint(String hint) {
+    this.hint = hint;
+  }
+
+  public String getHintName() {
+    return hint;
+  }
+
+  public void removeResources(@NotNull final Project project) {
+    if (hint != null) {
+      VirtualFile hints = project.getBaseDir().findChild("hints");
+      if (hints == null) {
+        return;
+      }
+      File hintFile = new File(hints.getPath(), hint);
+      CCProjectService.deleteProjectFile(hintFile, project);
+    }
+  }
+
+  public void drawHighlighter(@NotNull final Editor editor) {
+    int startOffset = editor.getDocument().getLineStartOffset(line) + start;
+    int endOffset = startOffset + myReplacementLength;
+    TextAttributes defaultTestAttributes =
+      EditorColorsManager.getInstance().getGlobalScheme().getAttributes(EditorColors.LIVE_TEMPLATE_ATTRIBUTES);
+    RangeHighlighter highlighter =
+      editor.getMarkupModel().addRangeHighlighter(startOffset, endOffset, HighlighterLayer.LAST + 1, defaultTestAttributes,
+                                                  HighlighterTargetArea.EXACT_RANGE);
+    highlighter.setGreedyToLeft(true);
+    highlighter.setGreedyToRight(true);
+  }
+
+  public int getIndex() {
+    return myIndex;
+  }
+
+  public void setIndex(int index) {
+
+    myIndex = index;
+  }
+
+  public void setReplacementLength(int replacementLength) {
+    myReplacementLength = replacementLength;
+  }
+
+  public int getLine() {
+    return line;
+  }
+
+  public int getRealStartOffset(Document document) {
+    return document.getLineStartOffset(line) + start;
+  }
+
+  public void setLine(int line) {
+    this.line = line;
+  }
+
+  public int getStart() {
+    return start;
+  }
+
+  public void setStart(int start) {
+    this.start = start;
+  }
+
+  @Override
+  public int compareTo(Object o) {
+    TaskWindow taskWindow = (TaskWindow)o;
+    int lineDiff = line - taskWindow.line;
+    if (lineDiff == 0) {
+      return start - taskWindow.start;
+    }
+    return lineDiff;
+  }
+
+  public String getPossibleAnswer() {
+    return possible_answer;
+  }
+
+  public int getLength() {
+    return length;
+  }
+}
\ No newline at end of file
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/highlighting/CCTaskLineMarkerProvider.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/highlighting/CCTaskLineMarkerProvider.java
new file mode 100644
index 0000000..1818b65
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/highlighting/CCTaskLineMarkerProvider.java
@@ -0,0 +1,75 @@
+package org.jetbrains.plugins.coursecreator.highlighting;
+
+import com.intellij.codeHighlighting.Pass;
+import com.intellij.codeInsight.daemon.LineMarkerInfo;
+import com.intellij.codeInsight.daemon.LineMarkerProvider;
+import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.editor.Document;
+import com.intellij.openapi.editor.markup.GutterIconRenderer;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.util.IconLoader;
+import com.intellij.openapi.util.TextRange;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.psi.PsiDocumentManager;
+import com.intellij.psi.PsiElement;
+import com.intellij.psi.PsiFile;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.*;
+
+import java.util.Collection;
+import java.util.List;
+
+public class CCTaskLineMarkerProvider implements LineMarkerProvider {
+  private static final Logger LOG = Logger.getInstance(CCTaskLineMarkerProvider.class.getName());
+
+  @Nullable
+  @Override
+  public LineMarkerInfo getLineMarkerInfo(@NotNull PsiElement element) {
+    return null;
+  }
+
+  @Override
+  public void collectSlowLineMarkers(@NotNull List<PsiElement> elements, @NotNull final Collection<LineMarkerInfo> result) {
+    for (PsiElement element : elements) {
+      if (element instanceof PsiFile) {
+        final Project project = element.getProject();
+        final Course course = CCProjectService.getInstance(project).getCourse();
+        if (course == null) return;
+        final String taskFileName = ((PsiFile) element).getName();
+        final PsiDirectory taskDir = ((PsiFile) element).getParent();
+        if (taskDir == null) continue;
+        final String taskDirName = taskDir.getName();
+        final PsiDirectory lessonDir = taskDir.getParentDirectory();
+        if (lessonDir == null) continue;
+        final String lessonDirName = lessonDir.getName();
+        final Lesson lesson = course.getLesson(lessonDirName);
+        if (lesson == null) continue;
+        final Task task = lesson.getTask(taskDirName);
+        final TaskFile taskFile = task.getTaskFile(taskFileName);
+        if (taskFile == null) continue;
+        final Document document = PsiDocumentManager.getInstance(project).getDocument((PsiFile) element);
+        if (document == null) continue;
+        for (final TaskWindow taskWindow : taskFile.getTaskWindows()) {
+          if (taskWindow.line > document.getLineCount()) continue;
+          final int lineStartOffset = document.getLineStartOffset(taskWindow.line);
+          final int offset = lineStartOffset + taskWindow.start;
+          if (offset > document.getTextLength()) continue;
+          final TextRange textRange = TextRange.create(offset, offset + taskWindow.getReplacementLength());
+          @SuppressWarnings("unchecked")
+          final LineMarkerInfo info = new LineMarkerInfo(element, textRange,
+              IconLoader.getIcon("/icons/gutter.png"), Pass.UPDATE_OVERRIDEN_MARKERS,
+              null, null, GutterIconRenderer.Alignment.CENTER) {
+            @Nullable
+            @Override
+            public GutterIconRenderer createGutterRenderer() {
+              return new TaskTextGutter(taskWindow, this);
+            }
+          };
+          result.add(info);
+        }
+      }
+    }
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/highlighting/TaskTextGutter.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/highlighting/TaskTextGutter.java
new file mode 100644
index 0000000..80b2674
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/highlighting/TaskTextGutter.java
@@ -0,0 +1,60 @@
+package org.jetbrains.plugins.coursecreator.highlighting;
+
+import com.intellij.codeInsight.daemon.LineMarkerInfo;
+import com.intellij.openapi.actionSystem.ActionGroup;
+import com.intellij.openapi.actionSystem.AnAction;
+import com.intellij.openapi.actionSystem.DefaultActionGroup;
+import com.intellij.openapi.util.IconLoader;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.actions.DeleteTaskWindow;
+import org.jetbrains.plugins.coursecreator.actions.ShowTaskWindowText;
+import org.jetbrains.plugins.coursecreator.format.TaskWindow;
+
+import javax.swing.*;
+
+public class TaskTextGutter extends LineMarkerInfo.LineMarkerGutterIconRenderer {
+  @NotNull
+  private final TaskWindow myTaskWindow;
+
+  public TaskTextGutter(@NotNull final TaskWindow taskWindow, LineMarkerInfo lineMarkerInfo) {
+    super(lineMarkerInfo);
+    myTaskWindow = taskWindow;
+  }
+
+  @NotNull
+  @Override
+  public Icon getIcon() {
+    return IconLoader.getIcon("/icons/gutter.png");
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    return this == o || o instanceof TaskTextGutter
+        && myTaskWindow.getTaskText().equals(((TaskTextGutter) o).getTaskWindow().getTaskText());
+  }
+
+  @NotNull
+  public TaskWindow getTaskWindow() {
+    return myTaskWindow;
+  }
+
+  @Override
+  public int hashCode() {
+    return myTaskWindow.hashCode();
+  }
+
+  @Nullable
+  @Override
+  public AnAction getClickAction() {
+    return new ShowTaskWindowText(myTaskWindow);
+  }
+
+  @Nullable
+  @Override
+  public ActionGroup getPopupMenuActions() {
+    DefaultActionGroup group = new DefaultActionGroup();
+    group.add(new DeleteTaskWindow(myTaskWindow));
+    return group;
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/projectView/CCDirectoryNode.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/projectView/CCDirectoryNode.java
new file mode 100644
index 0000000..1a73041
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/projectView/CCDirectoryNode.java
@@ -0,0 +1,63 @@
+package org.jetbrains.plugins.coursecreator.projectView;
+
+import com.intellij.ide.projectView.PresentationData;
+import com.intellij.ide.projectView.ViewSettings;
+import com.intellij.ide.projectView.impl.nodes.PsiDirectoryNode;
+import com.intellij.openapi.project.Project;
+import com.intellij.psi.PsiDirectory;
+import com.intellij.ui.SimpleTextAttributes;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.Course;
+import org.jetbrains.plugins.coursecreator.format.Lesson;
+import org.jetbrains.plugins.coursecreator.format.Task;
+
+public class CCDirectoryNode extends PsiDirectoryNode {
+  private final PsiDirectory myValue;
+  private final Project myProject;
+
+  public CCDirectoryNode(@NotNull final Project project,
+                         PsiDirectory value,
+                         ViewSettings viewSettings) {
+    super(project, value, viewSettings);
+    myValue = value;
+    myProject = project;
+  }
+
+  @Override
+  protected void updateImpl(PresentationData data) {
+    String valueName = myValue.getName();
+    final Course course = CCProjectService.getInstance(myProject).getCourse();
+    if (course == null) return;
+    if (myProject.getBaseDir().equals(myValue.getVirtualFile())) {
+      data.clearText();
+      data.addText(valueName, SimpleTextAttributes.REGULAR_ATTRIBUTES);
+      data.addText(" (" + course.getName() + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
+      return;
+    }
+    final Lesson lesson = course.getLesson(valueName);
+    if (lesson != null) {
+      data.clearText();
+      data.addText(valueName, SimpleTextAttributes.REGULAR_ATTRIBUTES);
+      data.addText(" (" + lesson.name + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
+      return;
+    }
+    else {
+      final PsiDirectory parentDir = myValue.getParentDirectory();
+      if (parentDir != null) {
+        final Lesson parentLesson = course.getLesson(parentDir.getName());
+        if (parentLesson != null) {
+          final Task task = parentLesson.getTask(valueName);
+          if (task != null) {
+            data.clearText();
+            data.addText(valueName, SimpleTextAttributes.REGULAR_ATTRIBUTES);
+            data.addText(" (" + task.name + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
+            return;
+          }
+        }
+      }
+    }
+    data.setPresentableText(valueName);
+  }
+
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/projectView/CCTreeStructureProvider.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/projectView/CCTreeStructureProvider.java
new file mode 100644
index 0000000..69b78ec
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/projectView/CCTreeStructureProvider.java
@@ -0,0 +1,55 @@
+package org.jetbrains.plugins.coursecreator.projectView;
+
+import com.intellij.ide.projectView.TreeStructureProvider;
+import com.intellij.ide.projectView.ViewSettings;
+import com.intellij.ide.util.treeView.AbstractTreeNode;
+import com.intellij.openapi.project.DumbAware;
+import com.intellij.openapi.project.Project;
+import com.intellij.psi.PsiDirectory;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+public class CCTreeStructureProvider implements TreeStructureProvider, DumbAware {
+  @NotNull
+  @Override
+  public Collection<AbstractTreeNode> modify(@NotNull AbstractTreeNode parent,
+                                             @NotNull Collection<AbstractTreeNode> children,
+                                             ViewSettings settings) {
+    if (!needModify(parent)) {
+      return children;
+    }
+    Collection<AbstractTreeNode> nodes = new ArrayList<AbstractTreeNode>();
+    for (AbstractTreeNode node : children) {
+      Project project = node.getProject();
+      if (project != null) {
+        if (node.getValue() instanceof PsiDirectory) {
+          PsiDirectory directory = (PsiDirectory) node.getValue();
+          nodes.add(new CCDirectoryNode(project, directory, settings));
+        } else {
+          nodes.add(node);
+        }
+      }
+    }
+    return nodes;
+  }
+
+  private static boolean needModify(@NotNull final AbstractTreeNode parent) {
+    Project project = parent.getProject();
+    if (project != null) {
+      if (CCProjectService.getInstance(project).getCourse() == null) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Nullable
+  @Override
+  public Object getData(Collection<AbstractTreeNode> selected, String dataName) {
+    return null;
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CCNewProjectPanel.form b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CCNewProjectPanel.form
new file mode 100644
index 0000000..71e2785
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CCNewProjectPanel.form
@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<form xmlns="http://www.intellij.com/uidesigner/form/" version="1" bind-to-class="org.jetbrains.plugins.coursecreator.ui.CCNewProjectPanel">
+  <grid id="27dc6" binding="myPanel" layout-manager="GridLayoutManager" row-count="3" column-count="3" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+    <margin top="0" left="0" bottom="0" right="0"/>
+    <constraints>
+      <xy x="20" y="20" width="500" height="400"/>
+    </constraints>
+    <properties/>
+    <border type="none"/>
+    <children>
+      <component id="fd520" class="javax.swing.JLabel">
+        <constraints>
+          <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false">
+            <preferred-size width="95" height="-1"/>
+          </grid>
+        </constraints>
+        <properties>
+          <text value="Name:"/>
+        </properties>
+      </component>
+      <component id="7e88" class="javax.swing.JTextField" binding="myName">
+        <constraints>
+          <grid row="0" column="1" row-span="1" col-span="2" vsize-policy="0" hsize-policy="6" anchor="8" fill="1" indent="0" use-parent-layout="false">
+            <preferred-size width="150" height="-1"/>
+          </grid>
+        </constraints>
+        <properties/>
+      </component>
+      <component id="ec56c" class="javax.swing.JLabel">
+        <constraints>
+          <grid row="2" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="9" fill="0" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties>
+          <text value="Description"/>
+        </properties>
+      </component>
+      <component id="2e2d7" class="javax.swing.JLabel">
+        <constraints>
+          <grid row="1" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false">
+            <preferred-size width="95" height="-1"/>
+          </grid>
+        </constraints>
+        <properties>
+          <text value="Author:"/>
+        </properties>
+      </component>
+      <component id="41fe6" class="javax.swing.JTextField" binding="myAuthorField">
+        <constraints>
+          <grid row="1" column="1" row-span="1" col-span="2" vsize-policy="0" hsize-policy="6" anchor="8" fill="1" indent="0" use-parent-layout="false">
+            <preferred-size width="150" height="-1"/>
+          </grid>
+        </constraints>
+        <properties/>
+      </component>
+      <grid id="12f06" layout-manager="GridLayoutManager" row-count="1" column-count="1" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+        <margin top="0" left="0" bottom="0" right="0"/>
+        <constraints>
+          <grid row="2" column="1" row-span="1" col-span="2" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties/>
+        <border type="line">
+          <color color="-6709600"/>
+        </border>
+        <children>
+          <component id="389a7" class="javax.swing.JTextArea" binding="myDescription">
+            <constraints>
+              <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="6" hsize-policy="6" anchor="0" fill="3" indent="0" use-parent-layout="false">
+                <preferred-size width="150" height="50"/>
+              </grid>
+            </constraints>
+            <properties/>
+          </component>
+        </children>
+      </grid>
+    </children>
+  </grid>
+</form>
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CCNewProjectPanel.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CCNewProjectPanel.java
new file mode 100644
index 0000000..83a3458
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CCNewProjectPanel.java
@@ -0,0 +1,59 @@
+package org.jetbrains.plugins.coursecreator.ui;
+
+import com.intellij.facet.ui.FacetValidatorsManager;
+import com.intellij.openapi.util.text.StringUtil;
+import com.intellij.ui.DocumentAdapter;
+import org.jetbrains.annotations.NotNull;
+
+import javax.swing.*;
+import javax.swing.event.DocumentEvent;
+
+public class CCNewProjectPanel {
+  private JPanel myPanel;
+  private JTextArea myDescription;
+  private JTextField myName;
+  private JTextField myAuthorField;
+  private FacetValidatorsManager myValidationManager;
+
+
+  public CCNewProjectPanel() {
+    final String userName = System.getProperty("user.name");
+    if (userName != null) {
+      myAuthorField.setText(userName);
+    }
+    myName.getDocument().addDocumentListener(new MyValidator());
+    myDescription.getDocument().addDocumentListener(new MyValidator());
+    myAuthorField.getDocument().addDocumentListener(new MyValidator());
+  }
+
+  public JPanel getMainPanel() {
+    return myPanel;
+  }
+
+  @NotNull
+  public String getName() {
+    return StringUtil.notNullize(myName.getText());
+  }
+
+  @NotNull
+  public String getDescription() {
+    return StringUtil.notNullize(myDescription.getText());
+  }
+
+  @NotNull
+  public String getAuthor() {
+    return StringUtil.notNullize(myAuthorField.getText());
+  }
+
+  public void registerValidators(FacetValidatorsManager manager) {
+    myValidationManager = manager;
+  }
+
+  private class MyValidator extends DocumentAdapter {
+
+    @Override
+    protected void textChanged(DocumentEvent e) {
+      myValidationManager.validate();
+    }
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchiveDialog.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchiveDialog.java
new file mode 100644
index 0000000..d6c3bdb
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchiveDialog.java
@@ -0,0 +1,40 @@
+package org.jetbrains.plugins.coursecreator.ui;
+
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.DialogWrapper;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.actions.CreateCourseArchive;
+
+import javax.swing.*;
+
+public class CreateCourseArchiveDialog extends DialogWrapper {
+
+  private CreateCourseArchivePanel myPanel;
+  private CreateCourseArchive myAction;
+
+  public CreateCourseArchiveDialog(@NotNull final  Project project, CreateCourseArchive action) {
+    super(project);
+    setTitle("Create Course Archive");
+    myPanel = new CreateCourseArchivePanel(project, this);
+    myAction = action;
+    init();
+  }
+
+  @Nullable
+  @Override
+  protected JComponent createCenterPanel() {
+    return myPanel;
+  }
+
+  public void enableOKAction(boolean isEnabled) {
+    myOKAction.setEnabled(isEnabled);
+  }
+
+  @Override
+  protected void doOKAction() {
+    myAction.setZipName(myPanel.getZipName());
+    myAction.setLocationDir(myPanel.getLocationPath());
+    super.doOKAction();
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchivePanel.form b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchivePanel.form
new file mode 100644
index 0000000..920dcb9
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchivePanel.form
@@ -0,0 +1,79 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<form xmlns="http://www.intellij.com/uidesigner/form/" version="1" bind-to-class="org.jetbrains.plugins.coursecreator.ui.CreateCourseArchivePanel">
+  <grid id="27dc6" binding="myPanel" layout-manager="GridLayoutManager" row-count="1" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+    <margin top="0" left="0" bottom="0" right="0"/>
+    <constraints>
+      <xy x="20" y="20" width="500" height="400"/>
+    </constraints>
+    <properties/>
+    <border type="none"/>
+    <children>
+      <grid id="a3b77" layout-manager="GridLayoutManager" row-count="4" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+        <margin top="0" left="0" bottom="0" right="0"/>
+        <constraints>
+          <grid row="0" column="0" row-span="1" col-span="2" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties/>
+        <border type="none"/>
+        <children>
+          <component id="786af" class="javax.swing.JLabel">
+            <constraints>
+              <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+            </constraints>
+            <properties>
+              <text value="Name:"/>
+            </properties>
+          </component>
+          <component id="160bb" class="javax.swing.JTextField" binding="myNameField" default-binding="true">
+            <constraints>
+              <grid row="0" column="1" row-span="2" col-span="1" vsize-policy="0" hsize-policy="6" anchor="8" fill="1" indent="0" use-parent-layout="false">
+                <preferred-size width="150" height="-1"/>
+              </grid>
+            </constraints>
+            <properties/>
+          </component>
+          <component id="628ab" class="javax.swing.JLabel">
+            <constraints>
+              <grid row="1" column="0" row-span="2" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+            </constraints>
+            <properties>
+              <text value="Location:"/>
+            </properties>
+          </component>
+          <component id="aab8" class="com.intellij.openapi.ui.TextFieldWithBrowseButton" binding="myLocationField">
+            <constraints>
+              <grid row="2" column="1" row-span="1" col-span="1" vsize-policy="3" hsize-policy="3" anchor="0" fill="1" indent="0" use-parent-layout="false"/>
+            </constraints>
+            <properties/>
+          </component>
+          <grid id="29be7" layout-manager="GridLayoutManager" row-count="1" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+            <margin top="0" left="0" bottom="0" right="0"/>
+            <constraints>
+              <grid row="3" column="0" row-span="1" col-span="2" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="0" use-parent-layout="false"/>
+            </constraints>
+            <properties/>
+            <border type="none"/>
+            <children>
+              <component id="4fa15" class="javax.swing.JLabel" binding="myErrorIcon">
+                <constraints>
+                  <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+                </constraints>
+                <properties>
+                  <text value=""/>
+                </properties>
+              </component>
+              <component id="4bdcf" class="javax.swing.JLabel" binding="myErrorLabel">
+                <constraints>
+                  <grid row="0" column="1" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+                </constraints>
+                <properties>
+                  <text value=""/>
+                </properties>
+              </component>
+            </children>
+          </grid>
+        </children>
+      </grid>
+    </children>
+  </grid>
+</form>
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchivePanel.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchivePanel.java
new file mode 100644
index 0000000..4e7deed
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateCourseArchivePanel.java
@@ -0,0 +1,65 @@
+package org.jetbrains.plugins.coursecreator.ui;
+
+import com.intellij.icons.AllIcons;
+import com.intellij.openapi.fileChooser.FileChooserDescriptor;
+import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.TextFieldWithBrowseButton;
+import org.jetbrains.annotations.NotNull;
+
+import javax.swing.*;
+import java.awt.*;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+import java.io.File;
+
+public class CreateCourseArchivePanel extends JPanel {
+  private JPanel myPanel;
+  private JTextField myNameField;
+  private TextFieldWithBrowseButton myLocationField;
+  private JLabel myErrorIcon;
+  private JLabel myErrorLabel;
+  private CreateCourseArchiveDialog myDlg;
+
+  public CreateCourseArchivePanel(@NotNull final Project project, CreateCourseArchiveDialog dlg) {
+    setLayout(new BorderLayout());
+    add(myPanel, BorderLayout.CENTER);
+    myErrorIcon.setIcon(AllIcons.Actions.Lightning);
+    setState(false);
+    myDlg = dlg;
+    myNameField.setText("course");
+    myLocationField.setText(project.getBasePath());
+    FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor();
+    myLocationField.addBrowseFolderListener("Choose location folder", null, project, descriptor);
+    myLocationField.addActionListener(new ActionListener() {
+      @Override
+      public void actionPerformed(ActionEvent e) {
+        String location = myLocationField.getText();
+        File file = new File(location);
+        if (!file.exists() || !file.isDirectory()) {
+          myDlg.enableOKAction(false);
+          setError("Invalid location");
+        }
+        myDlg.enableOKAction(true);
+      }
+    });
+  }
+
+  private void setState(boolean isVisible) {
+    myErrorIcon.setVisible(isVisible);
+    myErrorLabel.setVisible(isVisible);
+  }
+
+  private void setError(String message) {
+    myErrorLabel.setText(message);
+    setState(true);
+  }
+
+  public String getZipName() {
+    return myNameField.getText();
+  }
+
+  public String getLocationPath() {
+    return myLocationField.getText();
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowDialog.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowDialog.java
new file mode 100644
index 0000000..c7e8f71
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowDialog.java
@@ -0,0 +1,153 @@
+package org.jetbrains.plugins.coursecreator.ui;
+
+import com.intellij.ide.projectView.ProjectView;
+import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.ui.DialogWrapper;
+import com.intellij.openapi.ui.ValidationInfo;
+import com.intellij.openapi.util.text.StringUtil;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.vfs.VirtualFileManager;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.plugins.coursecreator.CCProjectService;
+import org.jetbrains.plugins.coursecreator.format.TaskWindow;
+
+import javax.swing.*;
+import java.io.*;
+
+public class CreateTaskWindowDialog extends DialogWrapper {
+
+  public static final String TITLE = "New Task Window";
+  private static final Logger LOG = Logger.getInstance(CreateTaskWindowDialog.class.getName());
+  private final TaskWindow myTaskWindow;
+  private final CreateTaskWindowPanel myPanel;
+  private final Project myProject;
+
+  public Project getProject() {
+    return myProject;
+  }
+
+  public CreateTaskWindowDialog(@NotNull final Project project, @NotNull final TaskWindow taskWindow, int lessonIndex,
+                                int taskIndex, String taskFileName, int taskWindowIndex) {
+    super(project, true);
+    setTitle(TITLE);
+    myTaskWindow = taskWindow;
+    myPanel = new CreateTaskWindowPanel(this);
+    String generatedHintName = "lesson" + lessonIndex + "task" + taskIndex + taskFileName + "_" + taskWindowIndex;
+    myPanel.setGeneratedHintName(generatedHintName);
+    if (taskWindow.getHintName() != null) {
+      setHintText(project, taskWindow);
+    }
+    myProject = project;
+    String taskWindowTaskText = taskWindow.getTaskText();
+    myPanel.setTaskWindowText(taskWindowTaskText != null ? taskWindowTaskText : "");
+    String hintName = taskWindow.getHintName();
+    myPanel.setHintName(hintName != null ? hintName : "");
+    init();
+    initValidation();
+  }
+
+  private void setHintText(Project project, TaskWindow taskWindow) {
+    VirtualFile hints = project.getBaseDir().findChild("hints");
+    if (hints != null) {
+      File file = new File(hints.getPath(), taskWindow.getHintName());
+      StringBuilder hintText = new StringBuilder();
+      if (file.exists()) {
+        try {
+          BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
+          String line;
+          while ((line = bufferedReader.readLine()) != null) {
+            hintText.append(line).append("\n");
+          }
+          myPanel.doClick();
+          //myPanel.enableHint(true);
+          myPanel.setHintText(hintText.toString());
+        }
+        catch (FileNotFoundException e) {
+          LOG.error("created hint was not found", e);
+        }
+        catch (IOException e) {
+          LOG.error(e);
+        }
+      }
+    }
+  }
+
+  @Override
+  protected void doOKAction() {
+    String taskWindowText = myPanel.getTaskWindowText();
+    myTaskWindow.setTaskText(StringUtil.notNullize(taskWindowText));
+    if (myPanel.createHint()) {
+      String hintName = myPanel.getHintName();
+      myTaskWindow.setHint(hintName);
+      String hintText = myPanel.getHintText();
+      createHint(hintName, hintText);
+    }
+    super.doOKAction();
+  }
+
+  private void createHint(String hintName, String hintText) {
+    VirtualFile hintsDir = myProject.getBaseDir().findChild("hints");
+    if (hintsDir != null) {
+      File hintFile = new File(hintsDir.getPath(), hintName);
+      PrintWriter printWriter = null;
+      try {
+        printWriter = new PrintWriter(hintFile);
+        printWriter.print(hintText);
+      }
+      catch (FileNotFoundException e) {
+        //TODO:show error in UI
+        return;
+      }
+      finally {
+        if (printWriter != null) {
+          printWriter.close();
+        }
+      }
+    }
+    VirtualFileManager.getInstance().refreshWithoutFileWatcher(true);
+    ProjectView.getInstance(myProject).refresh();
+  }
+
+  public void deleteHint() {
+    VirtualFile hintsDir = myProject.getBaseDir().findChild("hints");
+    if (hintsDir != null) {
+      String hintName = myTaskWindow.getHintName();
+      if (hintName == null) {
+        return;
+      }
+      File hintFile = new File(hintsDir.getPath(), hintName);
+      if (hintFile.exists()) {
+        CCProjectService.deleteProjectFile(hintFile, myProject);
+        myTaskWindow.setHint(null);
+        myPanel.resetHint();
+      }
+    }
+  }
+
+  @Nullable
+  @Override
+  protected JComponent createCenterPanel() {
+    return myPanel;
+  }
+
+  @Nullable
+  @Override
+  public ValidationInfo doValidate() {
+    String name = myPanel.getHintName();
+    VirtualFile hintsDir = myProject.getBaseDir().findChild("hints");
+    if (hintsDir == null) {
+      return null;
+    }
+    VirtualFile child = hintsDir.findChild(name);
+    if (child == null) {
+      return null;
+    }
+    return myTaskWindow.getHintName() != null ? null : new ValidationInfo("Hint file with such filename already exists");
+  }
+
+  public void validateInput() {
+    super.initValidation();
+  }
+}
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowPanel.form b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowPanel.form
new file mode 100644
index 0000000..ccd91e4
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowPanel.form
@@ -0,0 +1,102 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<form xmlns="http://www.intellij.com/uidesigner/form/" version="1" bind-to-class="org.jetbrains.plugins.coursecreator.ui.CreateTaskWindowPanel">
+  <grid id="27dc6" binding="myPanel" layout-manager="GridLayoutManager" row-count="4" column-count="4" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+    <margin top="0" left="0" bottom="0" right="0"/>
+    <constraints>
+      <xy x="20" y="20" width="500" height="400"/>
+    </constraints>
+    <properties/>
+    <border type="none"/>
+    <children>
+      <component id="aaa28" class="javax.swing.JLabel">
+        <constraints>
+          <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties>
+          <labelFor value="b712"/>
+          <text value="Text:"/>
+        </properties>
+      </component>
+      <component id="d2e2f" class="javax.swing.JLabel" binding="myHintNameLabel">
+        <constraints>
+          <grid row="2" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties>
+          <enabled value="true"/>
+          <labelFor value="ddeb1"/>
+          <text value="Hint name:"/>
+        </properties>
+      </component>
+      <component id="ddeb1" class="javax.swing.JTextField" binding="myHintName">
+        <constraints>
+          <grid row="2" column="1" row-span="1" col-span="3" vsize-policy="0" hsize-policy="6" anchor="8" fill="1" indent="0" use-parent-layout="false">
+            <preferred-size width="150" height="-1"/>
+          </grid>
+        </constraints>
+        <properties>
+          <text value=""/>
+        </properties>
+      </component>
+      <component id="d322a" class="javax.swing.JLabel" binding="myHintTextLabel">
+        <constraints>
+          <grid row="3" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties>
+          <labelFor value="d0efc"/>
+          <text value="Hint text:"/>
+        </properties>
+      </component>
+      <grid id="51a63" layout-manager="GridLayoutManager" row-count="1" column-count="1" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+        <margin top="0" left="0" bottom="0" right="0"/>
+        <constraints>
+          <grid row="3" column="1" row-span="1" col-span="3" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties/>
+        <border type="line">
+          <color color="-6709600"/>
+        </border>
+        <children>
+          <component id="d0efc" class="javax.swing.JTextArea" binding="myHintText">
+            <constraints>
+              <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="6" hsize-policy="6" anchor="0" fill="3" indent="0" use-parent-layout="false">
+                <preferred-size width="300" height="100"/>
+              </grid>
+            </constraints>
+            <properties/>
+          </component>
+        </children>
+      </grid>
+      <grid id="cbc70" layout-manager="GridLayoutManager" row-count="1" column-count="1" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+        <margin top="0" left="0" bottom="0" right="0"/>
+        <constraints>
+          <grid row="0" column="1" row-span="1" col-span="3" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="0" use-parent-layout="false">
+            <minimum-size width="150" height="-1"/>
+            <preferred-size width="150" height="-1"/>
+          </grid>
+        </constraints>
+        <properties/>
+        <border type="line">
+          <color color="-6709600"/>
+        </border>
+        <children>
+          <component id="b712" class="javax.swing.JTextArea" binding="myTaskWindowText">
+            <constraints>
+              <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="6" hsize-policy="6" anchor="0" fill="3" indent="0" use-parent-layout="false">
+                <preferred-size width="300" height="100"/>
+              </grid>
+            </constraints>
+            <properties/>
+          </component>
+        </children>
+      </grid>
+      <component id="f86b4" class="javax.swing.JCheckBox" binding="myCreateHintCheckBox" default-binding="true">
+        <constraints>
+          <grid row="1" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="3" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties>
+          <text value="Create hint"/>
+        </properties>
+      </component>
+    </children>
+  </grid>
+</form>
diff --git a/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowPanel.java b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowPanel.java
new file mode 100644
index 0000000..21a7eb0
--- /dev/null
+++ b/python/edu/course-creator/src/org/jetbrains/plugins/coursecreator/ui/CreateTaskWindowPanel.java
@@ -0,0 +1,96 @@
+package org.jetbrains.plugins.coursecreator.ui;
+
+import com.intellij.ui.DocumentAdapter;
+
+import javax.swing.*;
+import javax.swing.event.DocumentEvent;
+import java.awt.*;
+import java.awt.event.ItemEvent;
+import java.awt.event.ItemListener;
+
+public class CreateTaskWindowPanel extends JPanel {
+
+  private final CreateTaskWindowDialog myDialog;
+  private JPanel myPanel;
+  private JTextArea myTaskWindowText;
+  private JTextField myHintName;
+  private JTextArea myHintText;
+  private JCheckBox myCreateHintCheckBox;
+  private JLabel myHintNameLabel;
+  private JLabel myHintTextLabel;
+  private String myGeneratedHintName = "";
+
+  public CreateTaskWindowPanel(CreateTaskWindowDialog dialog) {
+    super(new BorderLayout());
+    add(myPanel, BorderLayout.CENTER);
+    myDialog = dialog;
+    enableHint(false);
+    myCreateHintCheckBox.addItemListener(new ItemListener() {
+      @Override
+      public void itemStateChanged(ItemEvent e) {
+        int state = e.getStateChange();
+        // 1 for checked
+        enableHint(state == 1);
+        if (state == 2) {
+          myDialog.deleteHint();
+        }
+      }
+    });
+
+    myHintName.getDocument().addDocumentListener(new DocumentAdapter() {
+      @Override
+      protected void textChanged(DocumentEvent e) {
+        myDialog.validateInput();
+      }
+    });
+  }
+
+  public void enableHint(boolean isEnable) {
+    myHintName.setEnabled(isEnable);
+    myHintText.setEnabled(isEnable);
+    myHintNameLabel.setEnabled(isEnable);
+    myHintTextLabel.setEnabled(isEnable);
+    myHintName.setText(myGeneratedHintName);
+  }
+
+  public void setTaskWindowText(String taskWindowText) {
+    myTaskWindowText.setText(taskWindowText);
+  }
+
+  public void setHintName(String hintName) {
+    myHintName.setText(hintName);
+  }
+
+  public void setHintText(String hintText) {
+    myHintText.setText(hintText);
+  }
+
+  public String getTaskWindowText() {
+    return myTaskWindowText.getText();
+  }
+
+  public String getHintName() {
+    return myHintName.getText();
+  }
+
+  public String getHintText() {
+    return myHintText.getText();
+  }
+
+  public boolean createHint() {
+    return myHintName.isEnabled();
+  }
+
+  public void doClick() {
+    myCreateHintCheckBox.doClick();
+  }
+
+  public void resetHint() {
+    myHintName.setText("");
+    myHintText.setText("");
+  }
+
+  public void setGeneratedHintName(String generatedHintName) {
+    myGeneratedHintName = generatedHintName;
+  }
+}
diff --git a/python/edu/learn-python/gen/icons/StudyIcons.java b/python/edu/learn-python/gen/icons/StudyIcons.java
index 2840910..04ae98d 100644
--- a/python/edu/learn-python/gen/icons/StudyIcons.java
+++ b/python/edu/learn-python/gen/icons/StudyIcons.java
@@ -13,17 +13,15 @@
     return IconLoader.getIcon(path, StudyIcons.class);
   }
 
-  public static final Icon Add = load("/icons/com/jetbrains/python/edu/add.png"); // 16x16
-  public static final Icon Checked = load("/icons/com/jetbrains/python/edu/checked.png"); // 32x32
-  public static final Icon Failed = load("/icons/com/jetbrains/python/edu/failed.png"); // 32x32
-  public static final Icon Next = load("/icons/com/jetbrains/python/edu/next.png"); // 24x24
-  public static final Icon Playground = load("/icons/com/jetbrains/python/edu/playground.png"); // 32x28
-  public static final Icon Prev = load("/icons/com/jetbrains/python/edu/prev.png"); // 24x24
-  public static final Icon Refresh = load("/icons/com/jetbrains/python/edu/refresh.png"); // 16x16
-  public static final Icon Refresh24 = load("/icons/com/jetbrains/python/edu/refresh24.png"); // 24x24
-  public static final Icon Resolve = load("/icons/com/jetbrains/python/edu/resolve.png"); // 24x24
-  public static final Icon Run = load("/icons/com/jetbrains/python/edu/Run.png"); // 24x24
+  public static final Icon EducationalProjectType = load("/icons/com/jetbrains/python/edu/EducationalProjectType.png"); // 32x32
+  public static final Icon Lesson = load("/icons/com/jetbrains/python/edu/Lesson.png"); // 16x16
+  public static final Icon LessonCompl = load("/icons/com/jetbrains/python/edu/LessonCompl.png"); // 16x16
+  public static final Icon Playground = load("/icons/com/jetbrains/python/edu/Playground.png"); // 16x16
+  public static final Icon Prev = load("/icons/com/jetbrains/python/edu/prev.png"); // 16x16
+  public static final Icon Resolve = load("/icons/com/jetbrains/python/edu/resolve.png"); // 16x16
   public static final Icon ShowHint = load("/icons/com/jetbrains/python/edu/showHint.png"); // 24x24
-  public static final Icon Unchecked = load("/icons/com/jetbrains/python/edu/unchecked.png"); // 32x32
+  public static final Icon Task = load("/icons/com/jetbrains/python/edu/Task.png"); // 16x16
+  public static final Icon TaskCompl = load("/icons/com/jetbrains/python/edu/TaskCompl.png"); // 16x16
+  public static final Icon TaskProbl = load("/icons/com/jetbrains/python/edu/TaskProbl.png"); // 16x16
   public static final Icon WatchInput = load("/icons/com/jetbrains/python/edu/WatchInput.png"); // 24x24
 }
diff --git a/python/edu/learn-python/learn-python.iml b/python/edu/learn-python/learn-python.iml
index bd539d1..613d675 100644
--- a/python/edu/learn-python/learn-python.iml
+++ b/python/edu/learn-python/learn-python.iml
@@ -15,6 +15,7 @@
     <orderEntry type="module" module-name="lang-impl" />
     <orderEntry type="library" name="gson" level="project" />
     <orderEntry type="library" name="JUnit4" level="project" />
+    <orderEntry type="module" module-name="python-ide-community" />
   </component>
 </module>
 
diff --git a/python/edu/learn-python/resources/META-INF/plugin.xml b/python/edu/learn-python/resources/META-INF/plugin.xml
index ec828eb..8e8bddc 100644
--- a/python/edu/learn-python/resources/META-INF/plugin.xml
+++ b/python/edu/learn-python/resources/META-INF/plugin.xml
@@ -50,7 +50,7 @@
     <action id="NextTaskAction" class="com.jetbrains.python.edu.actions.StudyNextStudyTaskAction" text="NextTaskAction" description="Next Task"/>
     <action id="PreviousTaskAction" class="com.jetbrains.python.edu.actions.StudyPreviousStudyTaskAction" text="PreviousTaskAction"
             description="Previous Task"/>
-    <action id="RefreshTaskAction" class="com.jetbrains.python.edu.actions.StudyRefreshTaskAction" text="RefreshTaskAction"
+    <action id="RefreshTaskAction" class="com.jetbrains.python.edu.actions.StudyRefreshTaskFileAction" text="RefreshTaskAction"
             description="Refresh current task"/>
     <action id="WatchInputAction" class="com.jetbrains.python.edu.actions.StudyEditInputAction" text="WatchInputAction"
             description="watch input"/>
@@ -59,6 +59,11 @@
             description="show hint">
       <add-to-group group-id="MainToolBar" anchor="last"/>
     </action>
+
+    <action id="WelcomeScreen.LearnPython" class="com.jetbrains.python.edu.actions.StudyNewProject" icon="StudyIcons.EducationalProjectType">
+      <add-to-group group-id="WelcomeScreen.QuickStart" anchor="first"/>
+    </action>
+
   </actions>
 
   <extensions defaultExtensionNs="com.intellij">
@@ -70,4 +75,7 @@
     <applicationService serviceInterface="com.intellij.openapi.fileEditor.impl.EditorEmptyTextPainter"
         serviceImplementation="com.jetbrains.python.edu.StudyInstructionPainter" overrides="true"/>
   </extensions>
+  <extensions defaultExtensionNs="Pythonid">
+    <visitorFilter language="Python" implementationClass="com.jetbrains.python.edu.highlighting.StudyVisitorFilter"/>
+  </extensions>
 </idea-plugin>
\ No newline at end of file
diff --git a/python/edu/learn-python/resources/courses/introduction_course.zip b/python/edu/learn-python/resources/courses/introduction_course.zip
index f3b24f2..c39695e 100644
--- a/python/edu/learn-python/resources/courses/introduction_course.zip
+++ b/python/edu/learn-python/resources/courses/introduction_course.zip
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/EducationalProjectType.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/EducationalProjectType.png
new file mode 100644
index 0000000..6340e89
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/EducationalProjectType.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/EducationalProjectType_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/EducationalProjectType_dark.png
new file mode 100644
index 0000000..d9ec6dc
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/EducationalProjectType_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Lesson.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Lesson.png
new file mode 100644
index 0000000..9cc8a4f
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Lesson.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Lesson@2x.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Lesson@2x.png
new file mode 100644
index 0000000..2b0c0b8
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Lesson@2x.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/LessonComp@2x.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/LessonComp@2x.png
new file mode 100644
index 0000000..e18c639
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/LessonComp@2x.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/LessonCompl.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/LessonCompl.png
new file mode 100644
index 0000000..bacc7de
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/LessonCompl.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Playground.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Playground.png
new file mode 100644
index 0000000..1086710
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Playground.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Playground@2x.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Playground@2x.png
new file mode 100644
index 0000000..58665fa
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Playground@2x.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Run.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Run.png
deleted file mode 100644
index 27a6e36..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Run.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task.png
new file mode 100644
index 0000000..b678c64
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task@2x.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task@2x.png
new file mode 100644
index 0000000..4abb95c
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task@2x.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task@2x_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task@2x_dark.png
new file mode 100644
index 0000000..78af691
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task@2x_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl.png
new file mode 100644
index 0000000..a7f8f77
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl@2x.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl@2x.png
new file mode 100644
index 0000000..d657aa6
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl@2x.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl@2x_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl@2x_dark.png
new file mode 100644
index 0000000..f5f29ee
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl@2x_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl_dark.png
new file mode 100644
index 0000000..481e9cd
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskCompl_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl.png
new file mode 100644
index 0000000..6173d64
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl@2x.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl@2x.png
new file mode 100644
index 0000000..44aba9d
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl@2x.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl@2x_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl@2x_dark.png
new file mode 100644
index 0000000..f74c9de
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl@2x_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl_dark.png
new file mode 100644
index 0000000..0133a7f
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/TaskProbl_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task_dark.png
new file mode 100644
index 0000000..2ff286d
--- /dev/null
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/Task_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/add.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/add.png
deleted file mode 100644
index 9494f2d..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/add.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/checked.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/checked.png
deleted file mode 100644
index 4105a01..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/checked.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/failed.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/failed.png
deleted file mode 100644
index e2aaa55..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/failed.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/icon.jpg b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/icon.jpg
deleted file mode 100644
index 3a9716e..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/icon.jpg
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/next.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/next.png
deleted file mode 100644
index dd1a5d9..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/next.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/playground.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/playground.png
deleted file mode 100644
index d12a751..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/playground.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/prev.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/prev.png
index 0656f81..fc51cb5 100644
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/prev.png
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/prev.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/refresh.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/refresh.png
deleted file mode 100644
index d595f6b..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/refresh.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/refresh24.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/refresh24.png
deleted file mode 100644
index 218f075..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/refresh24.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve.png
index 7ef960b..78290f9 100644
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve.png
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve_dark.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve_dark.png
index 99aaa1d..b988adc 100644
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve_dark.png
+++ b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/resolve_dark.png
Binary files differ
diff --git a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/unchecked.png b/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/unchecked.png
deleted file mode 100644
index 2145982..0000000
--- a/python/edu/learn-python/resources/icons/com/jetbrains/python/edu/unchecked.png
+++ /dev/null
Binary files differ
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDirectoryProjectGenerator.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDirectoryProjectGenerator.java
index d4831d9..59bd8bc 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDirectoryProjectGenerator.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDirectoryProjectGenerator.java
@@ -53,7 +53,7 @@
   @NotNull
   @Override
   public String getName() {
-    return "Study project";
+    return "Learn Python";
   }
 
 
@@ -137,7 +137,7 @@
   @Nullable
   @Override
   public Icon getLogo() {
-    return StudyIcons.Playground;
+    return StudyIcons.EducationalProjectType;
   }
 
 
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDocumentListener.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDocumentListener.java
index 9fdcf70..6ce1d09 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDocumentListener.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyDocumentListener.java
@@ -52,6 +52,9 @@
       if (myTaskWindow != null) {
         int newLength = myTaskWindow.getLength() + change;
         myTaskWindow.setLength(newLength <= 0 ? 0 : newLength);
+        if (e.getNewFragment().equals("\n")) {
+          myTaskWindow.setLength(myTaskWindow.getLength() + 1);
+        }
       }
       int newEnd = offset + event.getNewLength();
       int newLine = document.getLineNumber(newEnd);
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyInstructionPainter.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyInstructionPainter.java
index 4f34bfb..96a44b2 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyInstructionPainter.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyInstructionPainter.java
@@ -8,7 +8,6 @@
 import com.intellij.util.PairFunction;
 import com.intellij.util.ui.GraphicsUtil;
 import com.intellij.util.ui.UIUtil;
-import com.jetbrains.python.edu.ui.StudyCondition;
 
 import java.awt.*;
 
@@ -19,10 +18,6 @@
 public class StudyInstructionPainter extends EditorEmptyTextPainter {
   @Override
   public void paintEmptyText(final EditorsSplitters splitters, Graphics g) {
-    if (!StudyCondition.VALUE) {
-      super.paintEmptyText(splitters, g);
-      return;
-    }
     boolean isDarkBackground = UIUtil.isUnderDarcula();
     UIUtil.applyRenderingHints(g);
     GraphicsUtil.setupAntialiasing(g, true, false);
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyState.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyState.java
new file mode 100644
index 0000000..96dc3d9
--- /dev/null
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyState.java
@@ -0,0 +1,52 @@
+package com.jetbrains.python.edu;
+
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.fileEditor.FileDocumentManager;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.jetbrains.python.edu.course.Task;
+import com.jetbrains.python.edu.course.TaskFile;
+import com.jetbrains.python.edu.editor.StudyEditor;
+
+public class StudyState {
+  private final StudyEditor myStudyEditor;
+  private final Editor myEditor;
+  private final TaskFile myTaskFile;
+  private final VirtualFile myVirtualFile;
+  private final Task myTask;
+  private final VirtualFile myTaskDir;
+
+  public StudyState(final StudyEditor studyEditor) {
+    myStudyEditor = studyEditor;
+    myEditor = studyEditor != null ? studyEditor.getEditor() : null;
+    myTaskFile = studyEditor != null ? studyEditor.getTaskFile() : null;
+    myVirtualFile = myEditor != null ? FileDocumentManager.getInstance().getFile(myEditor.getDocument()) : null;
+    myTaskDir = myVirtualFile != null ? myVirtualFile.getParent() : null;
+    myTask = myTaskFile != null ? myTaskFile.getTask() : null;
+  }
+
+  public Editor getEditor() {
+    return myEditor;
+  }
+
+  public TaskFile getTaskFile() {
+    return myTaskFile;
+  }
+
+  public VirtualFile getVirtualFile() {
+    return myVirtualFile;
+  }
+
+  public Task getTask() {
+    return myTask;
+  }
+
+  public VirtualFile getTaskDir() {
+    return myTaskDir;
+  }
+
+  public boolean isValid() {
+    return myStudyEditor != null && myEditor != null &&
+           myTaskFile != null && myVirtualFile != null &&
+           myTask != null && myTaskDir != null;
+  }
+}
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTaskManager.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTaskManager.java
index 213c1f7..3013fbc 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTaskManager.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTaskManager.java
@@ -109,21 +109,29 @@
               StartupManager.getInstance(myProject).runWhenProjectIsInitialized(new Runnable() {
                 @Override
                 public void run() {
-                  ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.PROJECT_VIEW).show(null);
-                  FileEditor[] editors = FileEditorManager.getInstance(myProject).getSelectedEditors();
-                  if (editors.length > 0) {
-                    JComponent focusedComponent = editors[0].getPreferredFocusedComponent();
-                    if (focusedComponent != null) {
-                      IdeFocusManager.getInstance(myProject).requestFocus(focusedComponent, true);
+                  ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.PROJECT_VIEW).show(new Runnable() {
+                    @Override
+                    public void run() {
+                      FileEditor[] editors = FileEditorManager.getInstance(myProject).getSelectedEditors();
+                      if (editors.length > 0) {
+                        final JComponent focusedComponent = editors[0].getPreferredFocusedComponent();
+                        if (focusedComponent != null) {
+                          ApplicationManager.getApplication().invokeLater(new Runnable() {
+                            @Override
+                            public void run() {
+                              IdeFocusManager.getInstance(myProject).requestFocus(focusedComponent, true);
+                            }
+                          });
+                        }
+                      }
                     }
-                  }
+                  });
                 }
               });
               UISettings.getInstance().HIDE_TOOL_STRIPES = false;
               UISettings.getInstance().fireUISettingsChanged();
               ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
               String toolWindowId = StudyToolWindowFactory.STUDY_TOOL_WINDOW;
-              //TODO:decide smth with tool window position
               try {
                 Method method = toolWindowManager.getClass().getDeclaredMethod("registerToolWindow", String.class,
                                                                                JComponent.class,
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTestRunner.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTestRunner.java
new file mode 100644
index 0000000..b0cd5ba
--- /dev/null
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyTestRunner.java
@@ -0,0 +1,76 @@
+package com.jetbrains.python.edu;
+
+import com.intellij.execution.ExecutionException;
+import com.intellij.execution.configurations.GeneralCommandLine;
+import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.module.ModuleManager;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.projectRoots.Sdk;
+import com.intellij.openapi.util.io.FileUtil;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.jetbrains.python.edu.course.Course;
+import com.jetbrains.python.edu.course.Task;
+import com.jetbrains.python.sdk.PythonSdkType;
+
+import java.io.*;
+import java.util.Map;
+
+public class StudyTestRunner {
+  public static final String TEST_OK = "#study_plugin test OK";
+  private static final String TEST_FAILED = "#study_plugin FAILED + ";
+  private static final String PYTHONPATH = "PYTHONPATH";
+  private static final Logger LOG = Logger.getInstance(StudyTestRunner.class);
+  private final Task myTask;
+  private final VirtualFile myTaskDir;
+
+  public StudyTestRunner(Task task, VirtualFile taskDir) {
+    myTask = task;
+    myTaskDir = taskDir;
+  }
+
+  public Process launchTests(Project project, String executablePath) throws ExecutionException {
+    Sdk sdk = PythonSdkType.findPythonSdk(ModuleManager.getInstance(project).getModules()[0]);
+    File testRunner = new File(myTaskDir.getPath(), myTask.getTestFile());
+    GeneralCommandLine commandLine = new GeneralCommandLine();
+    commandLine.setWorkDirectory(myTaskDir.getPath());
+    final Map<String, String> env = commandLine.getEnvironment();
+    final VirtualFile courseDir = project.getBaseDir();
+    if (courseDir != null) {
+      env.put(PYTHONPATH, courseDir.getPath());
+    }
+    if (sdk != null) {
+      String pythonPath = sdk.getHomePath();
+      if (pythonPath != null) {
+        commandLine.setExePath(pythonPath);
+        commandLine.addParameter(testRunner.getPath());
+        final Course course = StudyTaskManager.getInstance(project).getCourse();
+        assert course != null;
+        commandLine.addParameter(new File(course.getResourcePath()).getParent());
+        commandLine.addParameter(FileUtil.toSystemDependentName(executablePath));
+        return commandLine.createProcess();
+      }
+    }
+    return null;
+  }
+
+
+  public String getPassedTests(Process p) {
+    InputStream testOutput = p.getInputStream();
+    BufferedReader testOutputReader = new BufferedReader(new InputStreamReader(testOutput));
+    String line;
+    try {
+      while ((line = testOutputReader.readLine()) != null) {
+        if (line.contains(TEST_FAILED)) {
+          return line.substring(TEST_FAILED.length(), line.length());
+        }
+      }
+    }
+    catch (IOException e) {
+      LOG.error(e);
+    }
+    finally {
+      StudyUtils.closeSilently(testOutputReader);
+    }
+    return TEST_OK;
+  }
+}
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyUtils.java b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyUtils.java
index d3ac1da..5d9bb13 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/StudyUtils.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/StudyUtils.java
@@ -3,6 +3,7 @@
 import com.intellij.ide.SaveAndSyncHandlerImpl;
 import com.intellij.openapi.actionSystem.AnActionEvent;
 import com.intellij.openapi.actionSystem.Presentation;
+import com.intellij.openapi.application.ApplicationManager;
 import com.intellij.openapi.diagnostic.Logger;
 import com.intellij.openapi.editor.Document;
 import com.intellij.openapi.fileEditor.FileDocumentManager;
@@ -10,12 +11,12 @@
 import com.intellij.openapi.fileEditor.FileEditorManager;
 import com.intellij.openapi.project.Project;
 import com.intellij.openapi.util.TextRange;
+import com.intellij.openapi.util.io.FileUtil;
 import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.openapi.vfs.VirtualFileManager;
 import com.intellij.openapi.wm.ToolWindowManager;
 import com.intellij.util.ui.UIUtil;
-import com.jetbrains.python.edu.course.TaskFile;
-import com.jetbrains.python.edu.course.TaskWindow;
+import com.jetbrains.python.edu.course.*;
 import com.jetbrains.python.edu.editor.StudyEditor;
 import com.jetbrains.python.edu.ui.StudyToolWindowFactory;
 import org.jetbrains.annotations.NotNull;
@@ -70,7 +71,7 @@
       return wrapHTML ? UIUtil.toHtml(taskText.toString()) : taskText.toString();
     }
     catch (IOException e) {
-      LOG.error("Failed to get file text from file " + fileName, e);
+      LOG.info("Failed to get file text from file " + fileName, e);
     }
     finally {
       closeSilently(reader);
@@ -119,14 +120,18 @@
   }
 
   @SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
-  public static VirtualFile flushWindows(Document document, TaskFile taskFile, VirtualFile file) {
+  public static VirtualFile flushWindows(TaskFile taskFile, VirtualFile file) {
     VirtualFile taskDir = file.getParent();
     VirtualFile fileWindows = null;
+    final Document document = FileDocumentManager.getInstance().getDocument(file);
+    if (document == null) {
+      LOG.debug("Couldn't flush windows");
+      return null;
+    }
     if (taskDir != null) {
       String name = file.getNameWithoutExtension() + "_windows";
       PrintWriter printWriter = null;
       try {
-
         fileWindows = taskDir.createChildData(taskFile, name);
         printWriter = new PrintWriter(new FileOutputStream(fileWindows.getPath()));
         for (TaskWindow taskWindow : taskFile.getTaskWindows()) {
@@ -137,6 +142,12 @@
           String windowDescription = document.getText(new TextRange(start, start + taskWindow.getLength()));
           printWriter.println("#study_plugin_window = " + windowDescription);
         }
+        ApplicationManager.getApplication().runWriteAction(new Runnable() {
+          @Override
+          public void run() {
+            FileDocumentManager.getInstance().saveDocument(document);
+          }
+        });
       }
       catch (IOException e) {
        LOG.error(e);
@@ -148,4 +159,27 @@
     }
     return fileWindows;
   }
+
+  public static void deleteFile(VirtualFile file) {
+    try {
+      file.delete(StudyUtils.class);
+    }
+    catch (IOException e) {
+      LOG.error(e);
+    }
+  }
+
+  public static File copyResourceFile(String sourceName, String copyName, Project project, Task task)
+    throws IOException {
+    StudyTaskManager taskManager = StudyTaskManager.getInstance(project);
+    Course course = taskManager.getCourse();
+    int taskNum = task.getIndex() + 1;
+    int lessonNum = task.getLesson().getIndex() + 1;
+    assert course != null;
+    String pathToResource =
+      FileUtil.join(new File(course.getResourcePath()).getParent(), Lesson.LESSON_DIR + lessonNum, Task.TASK_DIR + taskNum);
+    File resourceFile = new File(pathToResource, copyName);
+    FileUtil.copy(new File(pathToResource, sourceName), resourceFile);
+    return resourceFile;
+  }
 }
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyCheckAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyCheckAction.java
index f8e10c9..5d02f71 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyCheckAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyCheckAction.java
@@ -1,7 +1,6 @@
 package com.jetbrains.python.edu.actions;
 
 import com.intellij.execution.ExecutionException;
-import com.intellij.execution.configurations.GeneralCommandLine;
 import com.intellij.ide.projectView.ProjectView;
 import com.intellij.openapi.actionSystem.ActionManager;
 import com.intellij.openapi.actionSystem.AnActionEvent;
@@ -13,93 +12,81 @@
 import com.intellij.openapi.fileEditor.FileDocumentManager;
 import com.intellij.openapi.fileEditor.FileEditor;
 import com.intellij.openapi.fileEditor.FileEditorManager;
-import com.intellij.openapi.module.ModuleManager;
 import com.intellij.openapi.project.DumbAwareAction;
 import com.intellij.openapi.project.Project;
-import com.intellij.openapi.projectRoots.Sdk;
+import com.intellij.openapi.ui.MessageType;
 import com.intellij.openapi.ui.popup.Balloon;
 import com.intellij.openapi.ui.popup.BalloonBuilder;
 import com.intellij.openapi.ui.popup.JBPopupFactory;
-import com.intellij.openapi.util.TextRange;
-import com.intellij.openapi.util.io.FileUtil;
+import com.intellij.openapi.util.Disposer;
 import com.intellij.openapi.vfs.VirtualFile;
-import com.intellij.ui.JBColor;
+import com.intellij.openapi.wm.IdeFocusManager;
 import com.jetbrains.python.edu.StudyDocumentListener;
-import com.jetbrains.python.edu.StudyTaskManager;
+import com.jetbrains.python.edu.StudyState;
+import com.jetbrains.python.edu.StudyTestRunner;
 import com.jetbrains.python.edu.StudyUtils;
-import com.jetbrains.python.edu.course.*;
+import com.jetbrains.python.edu.course.StudyStatus;
+import com.jetbrains.python.edu.course.Task;
+import com.jetbrains.python.edu.course.TaskFile;
+import com.jetbrains.python.edu.course.TaskWindow;
 import com.jetbrains.python.edu.editor.StudyEditor;
-import com.jetbrains.python.sdk.PythonSdkType;
 import org.jetbrains.annotations.NotNull;
 
 import javax.swing.*;
 import java.awt.*;
-import java.io.*;
-import java.util.*;
-import java.util.List;
+import java.io.IOException;
+import java.util.Map;
 
 public class StudyCheckAction extends DumbAwareAction {
 
   private static final Logger LOG = Logger.getInstance(StudyCheckAction.class.getName());
-  public static final String PYTHONPATH = "PYTHONPATH";
+  private static final String ANSWERS_POSTFIX = "_answers.py";
 
-  static class StudyTestRunner {
-    public static final String TEST_OK = "#study_plugin test OK";
-    private static final String TEST_FAILED = "#study_plugin FAILED + ";
-    private final Task myTask;
-    private final VirtualFile myTaskDir;
 
-    StudyTestRunner(Task task, VirtualFile taskDir) {
-      myTask = task;
-      myTaskDir = taskDir;
-    }
-
-    Process launchTests(Project project, String executablePath) throws ExecutionException {
-      Sdk sdk = PythonSdkType.findPythonSdk(ModuleManager.getInstance(project).getModules()[0]);
-      File testRunner = new File(myTaskDir.getPath(), myTask.getTestFile());
-      GeneralCommandLine commandLine = new GeneralCommandLine();
-      commandLine.setWorkDirectory(myTaskDir.getPath());
-      final Map<String, String> env = commandLine.getEnvironment();
-      final VirtualFile courseDir = project.getBaseDir();
-      if (courseDir != null)
-        env.put(PYTHONPATH, courseDir.getPath());
-      if (sdk != null) {
-        String pythonPath = sdk.getHomePath();
-        if (pythonPath != null) {
-          commandLine.setExePath(pythonPath);
-          commandLine.addParameter(testRunner.getPath());
-          final Course course = StudyTaskManager.getInstance(project).getCourse();
-          assert course != null;
-          commandLine.addParameter(new File(course.getResourcePath()).getParent());
-          commandLine.addParameter(FileUtil.toSystemDependentName(executablePath));
-          return commandLine.createProcess();
-        }
+  private static void flushWindows(@NotNull final Task task, @NotNull final VirtualFile taskDir) {
+    for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) {
+      String name = entry.getKey();
+      TaskFile taskFile = entry.getValue();
+      VirtualFile virtualFile = taskDir.findChild(name);
+      if (virtualFile == null) {
+        continue;
       }
-      return null;
-    }
-
-
-    String getPassedTests(Process p) {
-      InputStream testOutput = p.getInputStream();
-      BufferedReader testOutputReader = new BufferedReader(new InputStreamReader(testOutput));
-      String line;
-      try {
-        while ((line = testOutputReader.readLine()) != null) {
-          if (line.contains(TEST_FAILED)) {
-             return line.substring(TEST_FAILED.length(), line.length());
-          }
-        }
-      }
-      catch (IOException e) {
-        LOG.error(e);
-      }
-      finally {
-        StudyUtils.closeSilently(testOutputReader);
-      }
-      return TEST_OK;
+      StudyUtils.flushWindows(taskFile, virtualFile);
     }
   }
 
+  private static void deleteWindowDescriptions(@NotNull final Task task, @NotNull final VirtualFile taskDir) {
+    for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) {
+      String name = entry.getKey();
+      VirtualFile virtualFile = taskDir.findChild(name);
+      if (virtualFile == null) {
+        continue;
+      }
+      String windowsFileName = virtualFile.getNameWithoutExtension() + "_windows";
+      VirtualFile windowsFile = taskDir.findChild(windowsFileName);
+      if (windowsFile != null) {
+        StudyUtils.deleteFile(windowsFile);
+      }
+    }
+  }
+
+  private static void drawAllTaskWindows(@NotNull final Project project, @NotNull final Task task, @NotNull final VirtualFile taskDir) {
+    for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) {
+      String name = entry.getKey();
+      TaskFile taskFile = entry.getValue();
+      VirtualFile virtualFile = taskDir.findChild(name);
+      if (virtualFile == null) {
+        continue;
+      }
+      FileEditor fileEditor = FileEditorManager.getInstance(project).getSelectedEditor(virtualFile);
+      if (fileEditor instanceof StudyEditor) {
+        StudyEditor studyEditor = (StudyEditor)fileEditor;
+        taskFile.drawAllWindows(studyEditor.getEditor());
+      }
+    }
+  }
+
+
   public void check(@NotNull final Project project) {
     ApplicationManager.getApplication().runWriteAction(new Runnable() {
       @Override
@@ -107,188 +94,138 @@
         CommandProcessor.getInstance().runUndoTransparentAction(new Runnable() {
           @Override
           public void run() {
-        final Editor selectedEditor = StudyEditor.getSelectedEditor(project);
-        if (selectedEditor != null) {
-          final FileDocumentManager fileDocumentManager = FileDocumentManager.getInstance();
-          final VirtualFile openedFile = fileDocumentManager.getFile(selectedEditor.getDocument());
-          if (openedFile != null) {
-            StudyTaskManager taskManager = StudyTaskManager.getInstance(project);
-            final TaskFile selectedTaskFile = taskManager.getTaskFile(openedFile);
-            List<VirtualFile> filesToDelete = new ArrayList<VirtualFile>();
-            if (selectedTaskFile != null) {
-              final VirtualFile taskDir = openedFile.getParent();
-              Task currentTask = selectedTaskFile.getTask();
-              StudyStatus oldStatus = currentTask.getStatus();
-              Map<String, TaskFile> taskFiles = selectedTaskFile.getTask().getTaskFiles();
+            final StudyEditor selectedEditor = StudyEditor.getSelectedStudyEditor(project);
+            final StudyState studyState = new StudyState(selectedEditor);
+            if (!studyState.isValid()) {
+              LOG.error("StudyCheckAction was invokes outside study editor");
+              return;
+            }
+            Task task = studyState.getTask();
+            StudyStatus oldStatus = task.getStatus();
+            Map<String, TaskFile> taskFiles = task.getTaskFiles();
+            VirtualFile taskDir = studyState.getTaskDir();
+            flushWindows(task, taskDir);
+            StudyRunAction runAction = (StudyRunAction)ActionManager.getInstance().getAction(StudyRunAction.ACTION_ID);
+            if (runAction != null && taskFiles.size() == 1) {
+              runAction.run(project);
+            }
+            ApplicationManager.getApplication().invokeLater(new Runnable() {
+              @Override
+              public void run() {
+                IdeFocusManager.getInstance(project).requestFocus(studyState.getEditor().getComponent(), true);
+              }
+            });
+            final StudyTestRunner testRunner = new StudyTestRunner(task, taskDir);
+            Process testProcess = null;
+            try {
+              testProcess = testRunner.launchTests(project, studyState.getVirtualFile().getPath());
+            }
+            catch (ExecutionException e) {
+              LOG.error(e);
+            }
+            if (testProcess == null) {
+              return;
+            }
+            String failedMessage = testRunner.getPassedTests(testProcess);
+            if (failedMessage.equals(StudyTestRunner.TEST_OK)) {
+              task.setStatus(StudyStatus.Solved, oldStatus);
+              createTestResultPopUp("Congratulations!", MessageType.INFO.getPopupBackground(), project);
+            }
+            else {
+              task.setStatus(StudyStatus.Failed, oldStatus);
               for (Map.Entry<String, TaskFile> entry : taskFiles.entrySet()) {
                 String name = entry.getKey();
                 TaskFile taskFile = entry.getValue();
-                VirtualFile virtualFile = taskDir.findChild(name);
-                if (virtualFile == null) {
+                if (taskFile.getTaskWindows().size() < 2) {
+                  taskFile.setStatus(StudyStatus.Failed, StudyStatus.Unchecked);
                   continue;
                 }
-                VirtualFile windowFile = StudyUtils.flushWindows(FileDocumentManager.getInstance().getDocument(virtualFile), taskFile, virtualFile);
-                filesToDelete.add(windowFile);
-                FileDocumentManager.getInstance().saveAllDocuments();
+                runSmartTestProcess(taskDir, testRunner, name, taskFile, project);
               }
-
-              StudyRunAction runAction = (StudyRunAction)ActionManager.getInstance().getAction(StudyRunAction.ACTION_ID);
-              if (runAction != null && currentTask.getTaskFiles().size() == 1) {
-                runAction.run(project);
-              }
-              final StudyTestRunner testRunner = new StudyTestRunner(currentTask, taskDir);
-              Process testProcess = null;
-              try {
-                testProcess = testRunner.launchTests(project, openedFile.getPath());
-              }
-              catch (ExecutionException e) {
-                LOG.error(e);
-              }
-              if (testProcess != null) {
-                String failedMessage = testRunner.getPassedTests(testProcess);
-                if (failedMessage.equals(StudyTestRunner.TEST_OK)) {
-                  currentTask.setStatus(StudyStatus.Solved, oldStatus);
-                  StudyUtils.updateStudyToolWindow(project);
-                  selectedTaskFile.drawAllWindows(selectedEditor);
-                  ProjectView.getInstance(project).refresh();
-                  for (VirtualFile file:filesToDelete) {
-                    try {
-                      file.delete(this);
-                    }
-                    catch (IOException e) {
-                      LOG.error(e);
-                    }
-                  }
-                  createTestResultPopUp("Congratulations!", JBColor.GREEN, project);
-                  return;
-                }
-                for (Map.Entry<String, TaskFile> entry : taskFiles.entrySet()) {
-                  String name = entry.getKey();
-                  TaskFile taskFile = entry.getValue();
-                  TaskFile answerTaskFile = new TaskFile();
-                  VirtualFile virtualFile = taskDir.findChild(name);
-                  if (virtualFile == null) {
-                    continue;
-                  }
-                  VirtualFile answerFile = getCopyWithAnswers(taskDir, virtualFile, taskFile, answerTaskFile);
-                  for (TaskWindow taskWindow : answerTaskFile.getTaskWindows()) {
-                    Document document = FileDocumentManager.getInstance().getDocument(virtualFile);
-                    if (document == null) {
-                      continue;
-                    }
-                    if (!taskWindow.isValid(document)) {
-                      continue;
-                    }
-                    check(project, taskWindow, answerFile, answerTaskFile, taskFile, document, testRunner, virtualFile);
-                  }
-                  FileEditor fileEditor = FileEditorManager.getInstance(project).getSelectedEditor(virtualFile);
-                  Editor editor = null;
-                  if (fileEditor instanceof StudyEditor) {
-                    StudyEditor studyEditor = (StudyEditor) fileEditor;
-                    editor = studyEditor.getEditor();
-                  }
-
-                  if (editor != null) {
-                    taskFile.drawAllWindows(editor);
-                    StudyUtils.synchronize();
-                  }
-                  try {
-                    answerFile.delete(this);
-                  }
-                  catch (IOException e) {
-                    LOG.error(e);
-                  }
-                }
-                for (VirtualFile file:filesToDelete) {
-                  try {
-                    file.delete(this);
-                  }
-                  catch (IOException e) {
-                    LOG.error(e);
-                  }
-                }
-                currentTask.setStatus(StudyStatus.Failed, oldStatus);
-                StudyUtils.updateStudyToolWindow(project);
-                createTestResultPopUp(failedMessage, JBColor.RED, project);
-              }
+              createTestResultPopUp(failedMessage, MessageType.ERROR.getPopupBackground(), project);
+              navigateToFailedTaskWindow(studyState, task, taskDir, project);
             }
+            StudyUtils.updateStudyToolWindow(project);
+            drawAllTaskWindows(project, task, taskDir);
+            ProjectView.getInstance(project).refresh();
+            deleteWindowDescriptions(task, taskDir);
           }
-        }
-
-         }
-      });
+        });
       }
     });
   }
 
-  private void check(Project project,
-                     TaskWindow taskWindow,
-                     VirtualFile answerFile,
-                     TaskFile answerTaskFile,
-                     TaskFile usersTaskFile,
-                     Document usersDocument,
-                     StudyTestRunner testRunner,
-                     VirtualFile openedFile) {
-
-    try {
-       VirtualFile windowCopy = answerFile.copy(this, answerFile.getParent(), answerFile.getNameWithoutExtension() + "_window" + taskWindow.getIndex() + ".py");
-      final FileDocumentManager documentManager = FileDocumentManager.getInstance();
-      final Document windowDocument = documentManager.getDocument(windowCopy);
-      if (windowDocument != null) {
-        StudyTaskManager taskManager = StudyTaskManager.getInstance(project);
-        Course course = taskManager.getCourse();
-        Task task = usersTaskFile.getTask();
-        int taskNum = task.getIndex() + 1;
-        int lessonNum = task.getLesson().getIndex() + 1;
-        assert course != null;
-        String pathToResource = FileUtil.join(new File(course.getResourcePath()).getParent(), Lesson.LESSON_DIR + lessonNum,  Task.TASK_DIR + taskNum);
-        File resourceFile = new File(pathToResource, windowCopy.getName());
-        FileUtil.copy(new File(pathToResource, openedFile.getName()), resourceFile);
-        TaskFile windowTaskFile = new TaskFile();
-        TaskFile.copy(answerTaskFile, windowTaskFile);
-        StudyDocumentListener listener = new StudyDocumentListener(windowTaskFile);
-        windowDocument.addDocumentListener(listener);
-        int start = taskWindow.getRealStartOffset(windowDocument);
-        int end = start + taskWindow.getLength();
-        TaskWindow userTaskWindow = usersTaskFile.getTaskWindows().get(taskWindow.getIndex());
-        int userStart = userTaskWindow.getRealStartOffset(usersDocument);
-        int userEnd = userStart + userTaskWindow.getLength();
-        String text = usersDocument.getText(new TextRange(userStart, userEnd));
-        windowDocument.replaceString(start, end, text);
-        ApplicationManager.getApplication().runWriteAction(new Runnable() {
-          @Override
-          public void run() {
-            documentManager.saveDocument(windowDocument);
+  private static void navigateToFailedTaskWindow(@NotNull final StudyState studyState,
+                                                 @NotNull final Task task,
+                                                 @NotNull final VirtualFile taskDir,
+                                                 @NotNull final Project project) {
+    TaskFile selectedTaskFile = studyState.getTaskFile();
+    Editor editor = studyState.getEditor();
+    TaskFile taskFileToNavigate = selectedTaskFile;
+    VirtualFile fileToNavigate = studyState.getVirtualFile();
+    if (!selectedTaskFile.hasFailedTaskWindows()) {
+      for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) {
+        String name = entry.getKey();
+        TaskFile taskFile = entry.getValue();
+        if (taskFile.hasFailedTaskWindows()) {
+          taskFileToNavigate = taskFile;
+          VirtualFile virtualFile = taskDir.findChild(name);
+          if (virtualFile == null) {
+            continue;
           }
-        });
-        VirtualFile fileWindows = StudyUtils.flushWindows(windowDocument, windowTaskFile, windowCopy);
-        Process smartTestProcess = testRunner.launchTests(project, windowCopy.getPath());
-        boolean res = testRunner.getPassedTests(smartTestProcess).equals(StudyTestRunner.TEST_OK);
-        userTaskWindow.setStatus(res ? StudyStatus.Solved : StudyStatus.Failed, StudyStatus.Unchecked);
-        windowCopy.delete(this);
-        fileWindows.delete(this);
-        if (!resourceFile.delete()) {
-          LOG.error("failed to delete", resourceFile.getPath());
+          FileEditor fileEditor = FileEditorManager.getInstance(project).getSelectedEditor(virtualFile);
+          if (fileEditor instanceof StudyEditor) {
+            StudyEditor studyEditor = (StudyEditor)fileEditor;
+            editor = studyEditor.getEditor();
+          }
+          fileToNavigate = virtualFile;
+          break;
         }
       }
     }
-    catch (IOException e) {
-      LOG.error(e);
-    }
-    catch (ExecutionException e) {
-      LOG.error(e);
-    }
+    FileEditorManager.getInstance(project).openFile(fileToNavigate, true);
+    final Editor editorToNavigate = editor;
+    ApplicationManager.getApplication().invokeLater(new Runnable() {
+      @Override
+      public void run() {
+        IdeFocusManager.getInstance(project).requestFocus(editorToNavigate.getContentComponent(), true);
+      }
+    });
+    taskFileToNavigate.navigateToFirstFailedTaskWindow(editor);
   }
 
+  private void runSmartTestProcess(@NotNull final VirtualFile taskDir,
+                                   @NotNull final StudyTestRunner testRunner,
+                                   final String taskFileName,
+                                   @NotNull final TaskFile taskFile,
+                                   @NotNull final Project project) {
+    TaskFile answerTaskFile = new TaskFile();
+    VirtualFile virtualFile = taskDir.findChild(taskFileName);
+    if (virtualFile == null) {
+      return;
+    }
+    VirtualFile answerFile = getCopyWithAnswers(taskDir, virtualFile, taskFile, answerTaskFile);
+    for (TaskWindow taskWindow : answerTaskFile.getTaskWindows()) {
+      Document document = FileDocumentManager.getInstance().getDocument(virtualFile);
+      if (document == null) {
+        continue;
+      }
+      if (!taskWindow.isValid(document)) {
+        continue;
+      }
+      taskWindow.smartCheck(project, answerFile, answerTaskFile, taskFile, testRunner, virtualFile, document);
+    }
+    StudyUtils.deleteFile(answerFile);
+  }
 
-  private VirtualFile getCopyWithAnswers(final VirtualFile taskDir,
-                                         final VirtualFile file,
-                                         final TaskFile source,
-                                         TaskFile target) {
+  private VirtualFile getCopyWithAnswers(@NotNull final VirtualFile taskDir,
+                                         @NotNull final VirtualFile file,
+                                         @NotNull final TaskFile source,
+                                         @NotNull final TaskFile target) {
     VirtualFile copy = null;
     try {
 
-      copy = file.copy(this, taskDir, file.getNameWithoutExtension() +"_answers.py");
+      copy = file.copy(this, taskDir, file.getNameWithoutExtension() + ANSWERS_POSTFIX);
       final FileDocumentManager documentManager = FileDocumentManager.getInstance();
       final Document document = documentManager.getDocument(copy);
       if (document != null) {
@@ -315,19 +252,18 @@
     catch (IOException e) {
       LOG.error(e);
     }
-
-
     return copy;
   }
 
   private static void createTestResultPopUp(final String text, Color color, @NotNull final Project project) {
     BalloonBuilder balloonBuilder =
       JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(text, null, color, null);
-    Balloon balloon = balloonBuilder.createBalloon();
+    final Balloon balloon = balloonBuilder.createBalloon();
     StudyEditor studyEditor = StudyEditor.getSelectedStudyEditor(project);
     assert studyEditor != null;
     JButton checkButton = studyEditor.getCheckButton();
     balloon.showInCenterOf(checkButton);
+    Disposer.register(project, balloon);
   }
 
   @Override
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyEditInputAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyEditInputAction.java
index 5b9a6fe..72660fc 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyEditInputAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyEditInputAction.java
@@ -19,6 +19,7 @@
 import com.intellij.ui.tabs.TabInfo;
 import com.intellij.ui.tabs.TabsListener;
 import com.intellij.ui.tabs.impl.JBEditorTabs;
+import com.intellij.util.PlatformIcons;
 import com.jetbrains.python.edu.StudyTaskManager;
 import com.jetbrains.python.edu.StudyUtils;
 import com.jetbrains.python.edu.course.Task;
@@ -26,7 +27,6 @@
 import com.jetbrains.python.edu.course.UserTest;
 import com.jetbrains.python.edu.editor.StudyEditor;
 import com.jetbrains.python.edu.ui.StudyTestContentPanel;
-import icons.StudyIcons;
 import org.jetbrains.annotations.NotNull;
 
 import javax.swing.*;
@@ -92,7 +92,7 @@
         i++;
       }
       TabInfo plusTab = new TabInfo(new JPanel());
-      plusTab.setIcon(StudyIcons.Add);
+      plusTab.setIcon(PlatformIcons.ADD_ICON);
       tabbedPane.addTabSilently(plusTab, tabbedPane.getTabCount());
       final JBPopup hint =
         JBPopupFactory.getInstance().createComponentPopupBuilder(tabbedPane.getComponent(), tabbedPane.getComponent())
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNewProject.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNewProject.java
new file mode 100644
index 0000000..0b75c4b
--- /dev/null
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNewProject.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2000-2013 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.jetbrains.python.edu.actions;
+
+import com.jetbrains.python.edu.StudyDirectoryProjectGenerator;
+import com.jetbrains.python.newProject.actions.GenerateProjectCallback;
+import com.jetbrains.python.newProject.actions.ProjectSpecificAction;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+public class StudyNewProject extends ProjectSpecificAction {
+
+  public StudyNewProject(@NotNull final String name, @Nullable final Runnable runnable) {
+    super(new GenerateProjectCallback(runnable), new StudyDirectoryProjectGenerator(), name, true);
+  }
+
+  public StudyNewProject() {
+    this("Learn Python", null);
+  }
+
+}
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextStudyTaskAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextStudyTaskAction.java
index 81818a9..3c971c3 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextStudyTaskAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextStudyTaskAction.java
@@ -2,13 +2,14 @@
 
 import com.jetbrains.python.edu.editor.StudyEditor;
 import com.jetbrains.python.edu.course.Task;
+import org.jetbrains.annotations.NotNull;
 
 import javax.swing.*;
 
 public class StudyNextStudyTaskAction extends StudyTaskNavigationAction {
 
   @Override
-  protected JButton getButton(StudyEditor selectedStudyEditor) {
+  protected JButton getButton(@NotNull final StudyEditor selectedStudyEditor) {
     return selectedStudyEditor.getNextTaskButton();
   }
 
@@ -18,7 +19,7 @@
   }
 
   @Override
-  protected Task getTargetTask(Task sourceTask) {
+  protected Task getTargetTask(@NotNull final Task sourceTask) {
     return sourceTask.next();
   }
 }
\ No newline at end of file
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextWindowAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextWindowAction.java
index 595aeef..fcf9ef4 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextWindowAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyNextWindowAction.java
@@ -1,8 +1,8 @@
 package com.jetbrains.python.edu.actions;
 
+import com.intellij.icons.AllIcons;
 import com.jetbrains.python.edu.StudyUtils;
 import com.jetbrains.python.edu.course.TaskWindow;
-import icons.StudyIcons;
 import org.jetbrains.annotations.NotNull;
 
 import java.util.List;
@@ -16,7 +16,7 @@
   public static final String SHORTCUT2 = "ctrl pressed ENTER";
 
   public StudyNextWindowAction() {
-    super("NextWindowAction", "Select next window", StudyIcons.Next);
+    super("NextWindowAction", "Select next window", AllIcons.Actions.Forward);
   }
 
   @Override
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyPreviousStudyTaskAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyPreviousStudyTaskAction.java
index bc26c28..f6da6a0 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyPreviousStudyTaskAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyPreviousStudyTaskAction.java
@@ -3,13 +3,14 @@
 
 import com.jetbrains.python.edu.editor.StudyEditor;
 import com.jetbrains.python.edu.course.Task;
+import org.jetbrains.annotations.NotNull;
 
 import javax.swing.*;
 
 public class StudyPreviousStudyTaskAction extends StudyTaskNavigationAction {
 
   @Override
-  protected JButton getButton(StudyEditor selectedStudyEditor) {
+  protected JButton getButton(@NotNull final StudyEditor selectedStudyEditor) {
     return selectedStudyEditor.getPrevTaskButton();
   }
 
@@ -19,7 +20,7 @@
   }
 
   @Override
-  protected Task getTargetTask(Task sourceTask) {
+  protected Task getTargetTask(@NotNull final Task sourceTask) {
     return sourceTask.prev();
   }
 }
\ No newline at end of file
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyRefreshTaskAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyRefreshTaskFileAction.java
similarity index 90%
rename from python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyRefreshTaskAction.java
rename to python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyRefreshTaskFileAction.java
index f8abb0b..a9448dd 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyRefreshTaskAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyRefreshTaskFileAction.java
@@ -14,6 +14,7 @@
 import com.intellij.openapi.ui.popup.Balloon;
 import com.intellij.openapi.ui.popup.BalloonBuilder;
 import com.intellij.openapi.ui.popup.JBPopupFactory;
+import com.intellij.openapi.util.Disposer;
 import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.openapi.wm.IdeFocusManager;
 import com.jetbrains.python.edu.StudyDocumentListener;
@@ -24,8 +25,8 @@
 
 import java.io.*;
 
-public class StudyRefreshTaskAction extends DumbAwareAction {
-  private static final Logger LOG = Logger.getInstance(StudyRefreshTaskAction.class.getName());
+public class StudyRefreshTaskFileAction extends DumbAwareAction {
+  private static final Logger LOG = Logger.getInstance(StudyRefreshTaskFileAction.class.getName());
 
   public void refresh(final Project project) {
         ApplicationManager.getApplication().invokeLater(new Runnable() {
@@ -92,14 +93,20 @@
                     document.addDocumentListener(listener);
                   }
                   selectedTaskFile.drawAllWindows(editor);
-                  IdeFocusManager.getInstance(project).requestFocus(editor.getContentComponent(), true);
+                  ApplicationManager.getApplication().invokeLater(new Runnable() {
+                    @Override
+                    public void run() {
+                      IdeFocusManager.getInstance(project).requestFocus(editor.getContentComponent(), true);
+                    }
+                  });
                   selectedTaskFile.navigateToFirstTaskWindow(editor);
                   BalloonBuilder balloonBuilder =
                     JBPopupFactory.getInstance().createHtmlTextBalloonBuilder("You can now start again", MessageType.INFO, null);
-                  Balloon balloon = balloonBuilder.createBalloon();
+                  final Balloon balloon = balloonBuilder.createBalloon();
                   StudyEditor selectedStudyEditor = StudyEditor.getSelectedStudyEditor(project);
                   assert selectedStudyEditor != null;
                   balloon.showInCenterOf(selectedStudyEditor.getRefreshButton());
+                  Disposer.register(project, balloon);
                 }
                 catch (FileNotFoundException e1) {
                   LOG.error(e1);
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyShowHintAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyShowHintAction.java
index 1efa908..2952486 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyShowHintAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyShowHintAction.java
@@ -5,19 +5,18 @@
 import com.intellij.openapi.actionSystem.AnActionEvent;
 import com.intellij.openapi.editor.Editor;
 import com.intellij.openapi.editor.LogicalPosition;
-import com.intellij.openapi.fileEditor.FileDocumentManager;
 import com.intellij.openapi.project.DumbAwareAction;
 import com.intellij.openapi.project.Project;
 import com.intellij.openapi.ui.popup.JBPopup;
 import com.intellij.openapi.ui.popup.JBPopupFactory;
-import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.util.Disposer;
 import com.intellij.psi.PsiElement;
 import com.intellij.psi.PsiFile;
 import com.intellij.psi.PsiManager;
+import com.jetbrains.python.edu.StudyState;
 import com.jetbrains.python.edu.StudyTaskManager;
 import com.jetbrains.python.edu.StudyUtils;
 import com.jetbrains.python.edu.course.Course;
-import com.jetbrains.python.edu.course.TaskFile;
 import com.jetbrains.python.edu.course.TaskWindow;
 import com.jetbrains.python.edu.editor.StudyEditor;
 import icons.StudyIcons;
@@ -33,58 +32,56 @@
   }
 
   public void actionPerformed(AnActionEvent e) {
-    Project project = e.getProject();
-    if (project != null) {
+    final Project project = e.getProject();
+    if (project == null) {
+      return;
+    }
+    Course course = StudyTaskManager.getInstance(project).getCourse();
+    if (course == null) {
+      return;
+    }
+    StudyState studyState = new StudyState(StudyEditor.getSelectedStudyEditor(project));
+    if (!studyState.isValid()) {
+      return;
+    }
+    PsiFile file = PsiManager.getInstance(project).findFile(studyState.getVirtualFile());
+    final Editor editor = studyState.getEditor();
+    LogicalPosition pos = editor.getCaretModel().getLogicalPosition();
+    TaskWindow taskWindow = studyState.getTaskFile().getTaskWindow(editor.getDocument(), pos);
+    if (file == null || taskWindow == null) {
+      return;
+    }
+    String hint = taskWindow.getHint();
+    if (hint == null) {
+      return;
+    }
+    File resourceFile = new File(course.getResourcePath());
+    File resourceRoot = resourceFile.getParentFile();
+    if (resourceRoot == null || !resourceRoot.exists()) {
+      return;
+    }
+    File hintsDir = new File(resourceRoot, Course.HINTS_DIR);
+    if (hintsDir.exists()) {
+      String hintText = StudyUtils.getFileText(hintsDir.getAbsolutePath(), hint, true);
+      int offset = editor.getDocument().getLineStartOffset(pos.line) + pos.column;
+      PsiElement element = file.findElementAt(offset);
+      if (hintText == null || element == null) {
+        return;
+      }
+
       DocumentationManager documentationManager = DocumentationManager.getInstance(project);
       DocumentationComponent component = new DocumentationComponent(documentationManager);
-      Editor selectedEditor = StudyEditor.getSelectedEditor(project);
-      FileDocumentManager fileDocumentManager = FileDocumentManager.getInstance();
-      assert selectedEditor != null;
-      VirtualFile openedFile = fileDocumentManager.getFile(selectedEditor.getDocument());
-      if (openedFile != null) {
-        StudyTaskManager taskManager = StudyTaskManager.getInstance(e.getProject());
-        TaskFile taskFile = taskManager.getTaskFile(openedFile);
-        if (taskFile != null) {
-          PsiFile file = PsiManager.getInstance(project).findFile(openedFile);
-          if (file != null) {
-            LogicalPosition pos = selectedEditor.getCaretModel().getLogicalPosition();
-            TaskWindow taskWindow = taskFile.getTaskWindow(selectedEditor.getDocument(), pos);
-            if (taskWindow != null) {
-              String hint = taskWindow.getHint();
-              if (hint == null) {
-                return;
-              }
-              Course course = taskManager.getCourse();
-              if (course != null) {
-                File resourceFile = new File(course.getResourcePath());
-                File resourceRoot = resourceFile.getParentFile();
-                if (resourceRoot != null && resourceRoot.exists()) {
-                  File hintsDir = new File(resourceRoot, Course.HINTS_DIR);
-                  if (hintsDir.exists()) {
-                    String hintText = StudyUtils.getFileText(hintsDir.getAbsolutePath(), hint, true);
-                    if (hintText != null) {
-                      int offset = selectedEditor.getDocument().getLineStartOffset(pos.line) + pos.column;
-                      PsiElement element = file.findElementAt(offset);
-                      if (element != null) {
-                        component.setData(element, hintText, true, null);
-                        final JBPopup popup =
-                          JBPopupFactory.getInstance().createComponentPopupBuilder(component, component)
-                            .setDimensionServiceKey(project, DocumentationManager.JAVADOC_LOCATION_AND_SIZE, false)
-                            .setResizable(true)
-                            .setMovable(true)
-                            .setRequestFocus(true)
-                            .createPopup();
-                        component.setHint(popup);
-                        popup.showInBestPositionFor(selectedEditor);
-                      }
-                    }
-                  }
-                }
-              }
-            }
-          }
-        }
-      }
+      component.setData(element, hintText, true, null);
+      final JBPopup popup =
+        JBPopupFactory.getInstance().createComponentPopupBuilder(component, component)
+          .setDimensionServiceKey(project, DocumentationManager.JAVADOC_LOCATION_AND_SIZE, false)
+          .setResizable(true)
+          .setMovable(true)
+          .setRequestFocus(true)
+          .createPopup();
+      component.setHint(popup);
+      popup.showInBestPositionFor(editor);
+      Disposer.dispose(component);
     }
   }
 
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyTaskNavigationAction.java b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyTaskNavigationAction.java
index b781e7d..46c0981 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyTaskNavigationAction.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/actions/StudyTaskNavigationAction.java
@@ -1,8 +1,6 @@
 package com.jetbrains.python.edu.actions;
 
 import com.intellij.openapi.actionSystem.AnActionEvent;
-import com.intellij.openapi.editor.Editor;
-import com.intellij.openapi.fileEditor.FileDocumentManager;
 import com.intellij.openapi.fileEditor.FileEditorManager;
 import com.intellij.openapi.project.DumbAwareAction;
 import com.intellij.openapi.project.Project;
@@ -11,37 +9,34 @@
 import com.intellij.openapi.ui.popup.BalloonBuilder;
 import com.intellij.openapi.ui.popup.JBPopupFactory;
 import com.intellij.openapi.vfs.VirtualFile;
-import com.jetbrains.python.edu.StudyTaskManager;
+import com.intellij.openapi.wm.ToolWindow;
+import com.intellij.openapi.wm.ToolWindowId;
+import com.intellij.openapi.wm.ToolWindowManager;
+import com.jetbrains.python.edu.StudyState;
 import com.jetbrains.python.edu.course.Lesson;
 import com.jetbrains.python.edu.course.Task;
 import com.jetbrains.python.edu.course.TaskFile;
 import com.jetbrains.python.edu.editor.StudyEditor;
+import org.jetbrains.annotations.NotNull;
 
 import javax.swing.*;
 import java.util.Map;
 
-/**
- * author: liana
- * data: 7/21/14.
- */
+
 abstract public class StudyTaskNavigationAction extends DumbAwareAction {
-  public void navigateTask(Project project) {
-    Editor selectedEditor = StudyEditor.getSelectedEditor(project);
-    FileDocumentManager fileDocumentManager = FileDocumentManager.getInstance();
-    assert selectedEditor != null;
-    VirtualFile openedFile = fileDocumentManager.getFile(selectedEditor.getDocument());
-    StudyTaskManager taskManager = StudyTaskManager.getInstance(project);
-    assert openedFile != null;
-    TaskFile selectedTaskFile = taskManager.getTaskFile(openedFile);
-    assert selectedTaskFile != null;
-    Task currentTask = selectedTaskFile.getTask();
-    Task nextTask = getTargetTask(currentTask);
+  public void navigateTask(@NotNull final Project project) {
+    StudyEditor studyEditor = StudyEditor.getSelectedStudyEditor(project);
+    StudyState studyState = new StudyState(studyEditor);
+    if (!studyState.isValid()) {
+      return;
+    }
+    Task nextTask = getTargetTask(studyState.getTask());
     if (nextTask == null) {
       BalloonBuilder balloonBuilder =
         JBPopupFactory.getInstance().createHtmlTextBalloonBuilder(getNavigationFinishedMessage(), MessageType.INFO, null);
       Balloon balloon = balloonBuilder.createBalloon();
-      StudyEditor selectedStudyEditor = StudyEditor.getSelectedStudyEditor(project);
-      balloon.showInCenterOf(getButton(selectedStudyEditor));
+      assert studyEditor != null;
+      balloon.showInCenterOf(getButton(studyEditor));
       return;
     }
     for (VirtualFile file : FileEditorManager.getInstance(project).getOpenFiles()) {
@@ -82,16 +77,24 @@
     if (shouldBeActive != null) {
       FileEditorManager.getInstance(project).openFile(shouldBeActive, true);
     }
+    ToolWindow runToolWindow = ToolWindowManager.getInstance(project).getToolWindow(ToolWindowId.RUN);
+    if (runToolWindow != null) {
+      runToolWindow.hide(null);
+    }
   }
 
-  protected abstract JButton getButton(StudyEditor selectedStudyEditor);
+  protected abstract JButton getButton(@NotNull final StudyEditor selectedStudyEditor);
 
   @Override
   public void actionPerformed(AnActionEvent e) {
-    navigateTask(e.getProject());
+    Project project = e.getProject();
+    if (project == null) {
+      return;
+    }
+    navigateTask(project);
   }
 
   protected abstract String getNavigationFinishedMessage();
 
-  protected abstract Task getTargetTask(Task sourceTask);
+  protected abstract Task getTargetTask(@NotNull final Task sourceTask);
 }
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskFile.java b/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskFile.java
index 4f17fc0..c46c4f5 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskFile.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskFile.java
@@ -21,7 +21,7 @@
  * which is visible to student in project view
  */
 
-public class TaskFile implements Stateful{
+public class TaskFile implements Stateful {
   public List<TaskWindow> taskWindows = new ArrayList<TaskWindow>();
   private Task myTask;
   @Transient
@@ -173,7 +173,18 @@
     for (TaskWindow w : taskWindows) {
       if ((w.getLine() == line) && (w.getStart() >= oldEndOffsetInLine)) {
         int distance = w.getStart() - oldEndOffsetInLine;
-        if (lineChange != 0 || newEndOffsetInLine <= w.getStart()) {
+        boolean coveredByPrevTW = false;
+        int prevIndex = w.getIndex() - 1;
+        if (StudyUtils.indexIsValid(prevIndex, taskWindows)) {
+          TaskWindow prevTW = taskWindows.get(prevIndex);
+          if (prevTW.getLine() == line) {
+            int endOffset = prevTW.getStart() + prevTW.getLength();
+            if (endOffset >= newEndOffsetInLine) {
+              coveredByPrevTW = true;
+            }
+          }
+        }
+        if (lineChange != 0 || newEndOffsetInLine <= w.getStart() || coveredByPrevTW) {
           w.setStart(distance + newEndOffsetInLine);
           w.setLine(line + lineChange);
         }
@@ -217,12 +228,33 @@
   public void navigateToFirstTaskWindow(@NotNull final Editor editor) {
     if (!taskWindows.isEmpty()) {
       TaskWindow firstTaskWindow = StudyUtils.getFirst(taskWindows);
-      mySelectedTaskWindow = firstTaskWindow;
-      LogicalPosition taskWindowStart = new LogicalPosition(firstTaskWindow.getLine(), firstTaskWindow.getStart());
-      editor.getCaretModel().moveToLogicalPosition(taskWindowStart);
-      int startOffset = firstTaskWindow.getRealStartOffset(editor.getDocument());
-      int endOffset = startOffset + firstTaskWindow.getLength();
-      editor.getSelectionModel().setSelection(startOffset, endOffset);
+      navigateToTaskWindow(editor, firstTaskWindow);
     }
   }
+
+  private void navigateToTaskWindow(@NotNull final Editor editor, @NotNull final TaskWindow firstTaskWindow) {
+    if (!firstTaskWindow.isValid(editor.getDocument())) {
+      return;
+    }
+    mySelectedTaskWindow = firstTaskWindow;
+    LogicalPosition taskWindowStart = new LogicalPosition(firstTaskWindow.getLine(), firstTaskWindow.getStart());
+    editor.getCaretModel().moveToLogicalPosition(taskWindowStart);
+    int startOffset = firstTaskWindow.getRealStartOffset(editor.getDocument());
+    int endOffset = startOffset + firstTaskWindow.getLength();
+    editor.getSelectionModel().setSelection(startOffset, endOffset);
+  }
+
+  public void navigateToFirstFailedTaskWindow(@NotNull final Editor editor) {
+    for (TaskWindow taskWindow : taskWindows) {
+      if (taskWindow.getStatus() != StudyStatus.Failed) {
+        continue;
+      }
+      navigateToTaskWindow(editor, taskWindow);
+      break;
+    }
+  }
+
+  public boolean hasFailedTaskWindows() {
+    return taskWindows.size() > 0 && getStatus() == StudyStatus.Failed;
+  }
 }
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskWindow.java b/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskWindow.java
index 4fb112c..dc4a75a 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskWindow.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/course/TaskWindow.java
@@ -1,5 +1,8 @@
 package com.jetbrains.python.edu.course;
 
+import com.intellij.execution.ExecutionException;
+import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.diagnostic.Logger;
 import com.intellij.openapi.editor.Document;
 import com.intellij.openapi.editor.Editor;
 import com.intellij.openapi.editor.colors.EditorColors;
@@ -8,16 +11,27 @@
 import com.intellij.openapi.editor.markup.HighlighterTargetArea;
 import com.intellij.openapi.editor.markup.RangeHighlighter;
 import com.intellij.openapi.editor.markup.TextAttributes;
+import com.intellij.openapi.fileEditor.FileDocumentManager;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.util.TextRange;
+import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.ui.JBColor;
+import com.jetbrains.python.edu.StudyDocumentListener;
+import com.jetbrains.python.edu.StudyTestRunner;
+import com.jetbrains.python.edu.StudyUtils;
 import org.jetbrains.annotations.NotNull;
 
+import java.io.File;
+import java.io.IOException;
+
 /**
  * Implementation of windows which user should type in
  */
 
 
 public class TaskWindow implements Comparable, Stateful {
-
+  private static final String WINDOW_POSTFIX = "_window.py";
+  private static final Logger LOG = Logger.getInstance(TaskWindow.class);
   public int line = 0;
   public int start = 0;
   public String hint = "";
@@ -174,4 +188,55 @@
   public int getIndex() {
     return myIndex;
   }
+
+  public void smartCheck(@NotNull final Project project,
+                         @NotNull final VirtualFile answerFile,
+                         @NotNull final TaskFile answerTaskFile,
+                         @NotNull final TaskFile usersTaskFile,
+                         @NotNull final StudyTestRunner testRunner,
+                         @NotNull final VirtualFile virtualFile,
+                         @NotNull final Document usersDocument) {
+
+    try {
+      VirtualFile windowCopy =
+        answerFile.copy(this, answerFile.getParent(), answerFile.getNameWithoutExtension() + WINDOW_POSTFIX);
+      final FileDocumentManager documentManager = FileDocumentManager.getInstance();
+      final Document windowDocument = documentManager.getDocument(windowCopy);
+      if (windowDocument != null) {
+        File resourceFile = StudyUtils.copyResourceFile(virtualFile.getName(), windowCopy.getName(), project, usersTaskFile.getTask());
+        TaskFile windowTaskFile = new TaskFile();
+        TaskFile.copy(answerTaskFile, windowTaskFile);
+        StudyDocumentListener listener = new StudyDocumentListener(windowTaskFile);
+        windowDocument.addDocumentListener(listener);
+        int start = getRealStartOffset(windowDocument);
+        int end = start + getLength();
+        TaskWindow userTaskWindow = usersTaskFile.getTaskWindows().get(getIndex());
+        int userStart = userTaskWindow.getRealStartOffset(usersDocument);
+        int userEnd = userStart + userTaskWindow.getLength();
+        String text = usersDocument.getText(new TextRange(userStart, userEnd));
+        windowDocument.replaceString(start, end, text);
+        ApplicationManager.getApplication().runWriteAction(new Runnable() {
+          @Override
+          public void run() {
+            documentManager.saveDocument(windowDocument);
+          }
+        });
+        VirtualFile fileWindows = StudyUtils.flushWindows(windowTaskFile, windowCopy);
+        Process smartTestProcess = testRunner.launchTests(project, windowCopy.getPath());
+        boolean res = testRunner.getPassedTests(smartTestProcess).equals(StudyTestRunner.TEST_OK);
+        userTaskWindow.setStatus(res ? StudyStatus.Solved : StudyStatus.Failed, StudyStatus.Unchecked);
+        StudyUtils.deleteFile(windowCopy);
+        StudyUtils.deleteFile(fileWindows);
+        if (!resourceFile.delete()) {
+          LOG.error("failed to delete", resourceFile.getPath());
+        }
+      }
+    }
+    catch (ExecutionException e) {
+      LOG.error(e);
+    }
+    catch (IOException e) {
+      LOG.error(e);
+    }
+  }
 }
\ No newline at end of file
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/editor/StudyEditor.java b/python/edu/learn-python/src/com/jetbrains/python/edu/editor/StudyEditor.java
index 69c5acc..6b27c4a 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/editor/StudyEditor.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/editor/StudyEditor.java
@@ -1,8 +1,10 @@
 package com.jetbrains.python.edu.editor;
 
 import com.intellij.codeHighlighting.BackgroundEditorHighlighter;
+import com.intellij.icons.AllIcons;
 import com.intellij.ide.structureView.StructureViewBuilder;
 import com.intellij.openapi.actionSystem.ActionManager;
+import com.intellij.openapi.application.ApplicationManager;
 import com.intellij.openapi.editor.Document;
 import com.intellij.openapi.editor.Editor;
 import com.intellij.openapi.editor.EditorFactory;
@@ -17,9 +19,15 @@
 import com.intellij.openapi.util.Disposer;
 import com.intellij.openapi.util.Key;
 import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.wm.IdeFocusManager;
+import com.intellij.openapi.wm.ToolWindow;
+import com.intellij.openapi.wm.ToolWindowId;
+import com.intellij.openapi.wm.ToolWindowManager;
 import com.intellij.pom.Navigatable;
+import com.intellij.ui.BrowserHyperlinkListener;
 import com.intellij.ui.HideableTitledPanel;
 import com.intellij.ui.JBColor;
+import com.intellij.util.ui.EmptyClipboardOwner;
 import com.intellij.util.ui.UIUtil;
 import com.jetbrains.python.edu.StudyDocumentListener;
 import com.jetbrains.python.edu.StudyTaskManager;
@@ -36,8 +44,8 @@
 import javax.swing.text.StyleConstants;
 import javax.swing.text.StyledDocument;
 import java.awt.*;
-import java.awt.event.ActionEvent;
-import java.awt.event.ActionListener;
+import java.awt.datatransfer.StringSelection;
+import java.awt.event.*;
 import java.beans.PropertyChangeListener;
 import java.util.HashMap;
 import java.util.Map;
@@ -50,12 +58,13 @@
   private static final String TASK_TEXT_HEADER = "Task Text";
   private final FileEditor myDefaultEditor;
   private final JComponent myComponent;
+  private final TaskFile myTaskFile;
   private JButton myCheckButton;
   private JButton myNextTaskButton;
   private JButton myPrevTaskButton;
   private JButton myRefreshButton;
   private static final Map<Document, StudyDocumentListener> myDocumentListeners = new HashMap<Document, StudyDocumentListener>();
-  private Project myProject;
+  private final Project myProject;
 
   public JButton getCheckButton() {
     return myCheckButton;
@@ -65,6 +74,10 @@
     return myPrevTaskButton;
   }
 
+  public TaskFile getTaskFile() {
+    return myTaskFile;
+  }
+
   private static JButton addButton(@NotNull final JComponent parentComponent, String toolTipText, Icon icon) {
     JButton newButton = new JButton();
     newButton.setToolTipText(toolTipText);
@@ -89,26 +102,72 @@
     myComponent = myDefaultEditor.getComponent();
     JPanel studyPanel = new JPanel();
     studyPanel.setLayout(new BoxLayout(studyPanel, BoxLayout.Y_AXIS));
-    TaskFile taskFile = StudyTaskManager.getInstance(myProject).getTaskFile(file);
-    if (taskFile != null) {
-      Task currentTask = taskFile.getTask();
+    myTaskFile = StudyTaskManager.getInstance(myProject).getTaskFile(file);
+    if (myTaskFile != null) {
+      Task currentTask = myTaskFile.getTask();
       String taskText = currentTask.getResourceText(project, currentTask.getText(), false);
       initializeTaskText(studyPanel, taskText);
       JPanel studyButtonPanel = new JPanel(new GridLayout(1, 2));
       JPanel taskActionsPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
       studyButtonPanel.add(taskActionsPanel);
       studyButtonPanel.add(new JPanel());
-      initializeButtons(taskActionsPanel, taskFile);
+      initializeButtons(taskActionsPanel, myTaskFile);
       studyPanel.add(studyButtonPanel);
       myComponent.add(studyPanel, BorderLayout.NORTH);
     }
   }
 
-  private static void initializeTaskText(JPanel studyPanel, @Nullable String taskText) {
+  class CopyListener extends MouseAdapter {
+    final JTextPane myTextPane;
+
+    public CopyListener(JTextPane textPane) {
+      myTextPane = textPane;
+    }
+
+    @Override
+    public void mouseReleased(MouseEvent e) {
+      ApplicationManager.getApplication().invokeLater(new Runnable() {
+        @Override
+        public void run() {
+          ToolWindow projectView = ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.PROJECT_VIEW);
+          if (projectView == null) {
+            return;
+          }
+          final Component focusComponent = projectView.getComponent();
+          IdeFocusManager.getInstance(myProject).requestFocus(focusComponent, true);
+          final String text = myTextPane.getSelectedText();
+          if (text == null) {
+            return;
+          }
+          KeyAdapter keyAdapter = new KeyAdapter() {
+            @Override
+            public void keyPressed(KeyEvent ev) {
+              if (ev.getKeyCode() == KeyEvent.VK_C
+                  && ev.getModifiers() == InputEvent.CTRL_MASK) {
+                StringSelection selection = new StringSelection(text);
+                Toolkit.getDefaultToolkit().getSystemClipboard().setContents(selection, EmptyClipboardOwner.INSTANCE);
+                ApplicationManager.getApplication().invokeLater(new Runnable() {
+                  @Override
+                  public void run() {
+                    IdeFocusManager.getInstance(myProject).requestFocus(myDefaultEditor.getComponent(), true);
+                  }
+                });
+              }
+            }
+          };
+          focusComponent.addKeyListener(keyAdapter);
+        }
+      });
+    }
+  }
+
+  private void initializeTaskText(JPanel studyPanel, @Nullable String taskText) {
     JTextPane taskTextPane = new JTextPane();
+    taskTextPane.addMouseListener(new CopyListener(taskTextPane));
     taskTextPane.setContentType("text/html");
     taskTextPane.setEditable(false);
     taskTextPane.setText(taskText);
+    taskTextPane.addHyperlinkListener(new BrowserHyperlinkListener());
     EditorColorsScheme editorColorsScheme = EditorColorsManager.getInstance().getGlobalScheme();
     int fontSize = editorColorsScheme.getEditorFontSize();
     String fontName = editorColorsScheme.getEditorFontName();
@@ -134,10 +193,10 @@
   private void initializeButtons(@NotNull final JPanel taskActionsPanel, @NotNull final TaskFile taskFile) {
     myCheckButton = addButton(taskActionsPanel, "Check task", StudyIcons.Resolve);
     myPrevTaskButton = addButton(taskActionsPanel, "Prev Task", StudyIcons.Prev);
-    myNextTaskButton = addButton(taskActionsPanel, "Next Task", StudyIcons.Next);
-    myRefreshButton = addButton(taskActionsPanel, "Start task again", StudyIcons.Refresh24);
+    myNextTaskButton = addButton(taskActionsPanel, "Next Task", AllIcons.Actions.Forward);
+    myRefreshButton = addButton(taskActionsPanel, "Start task again", AllIcons.Actions.Refresh);
     if (!taskFile.getTask().getUserTests().isEmpty()) {
-      JButton runButton = addButton(taskActionsPanel, "Run", StudyIcons.Run);
+      JButton runButton = addButton(taskActionsPanel, "Run", AllIcons.General.Run);
       runButton.addActionListener(new ActionListener() {
         @Override
         public void actionPerformed(ActionEvent e) {
@@ -149,7 +208,8 @@
       watchInputButton.addActionListener(new ActionListener() {
         @Override
         public void actionPerformed(ActionEvent e) {
-          StudyEditInputAction studyEditInputAction = (StudyEditInputAction)ActionManager.getInstance().getAction("WatchInputAction");
+          StudyEditInputAction studyEditInputAction =
+            (StudyEditInputAction)ActionManager.getInstance().getAction("WatchInputAction");
           studyEditInputAction.showInput(myProject);
         }
       });
@@ -165,7 +225,8 @@
     myNextTaskButton.addActionListener(new ActionListener() {
       @Override
       public void actionPerformed(ActionEvent e) {
-        StudyNextStudyTaskAction studyNextTaskAction = (StudyNextStudyTaskAction)ActionManager.getInstance().getAction("NextTaskAction");
+        StudyNextStudyTaskAction studyNextTaskAction =
+          (StudyNextStudyTaskAction)ActionManager.getInstance().getAction("NextTaskAction");
         studyNextTaskAction.navigateTask(myProject);
       }
     });
@@ -180,7 +241,8 @@
     myRefreshButton.addActionListener(new ActionListener() {
       @Override
       public void actionPerformed(ActionEvent e) {
-        StudyRefreshTaskAction studyRefreshTaskAction = (StudyRefreshTaskAction)ActionManager.getInstance().getAction("RefreshTaskAction");
+        StudyRefreshTaskFileAction studyRefreshTaskAction =
+          (StudyRefreshTaskFileAction)ActionManager.getInstance().getAction("RefreshTaskAction");
         studyRefreshTaskAction.refresh(myProject);
       }
     });
@@ -300,7 +362,8 @@
       if (fileEditor instanceof StudyEditor) {
         return (StudyEditor)fileEditor;
       }
-    } catch (Exception e) {
+    }
+    catch (Exception e) {
       return null;
     }
     return null;
@@ -325,8 +388,9 @@
   @NotNull
   @Override
   public Editor getEditor() {
-    if (myDefaultEditor instanceof TextEditor)
+    if (myDefaultEditor instanceof TextEditor) {
       return ((TextEditor)myDefaultEditor).getEditor();
+    }
     return EditorFactory.getInstance().createViewer(new DocumentImpl(""), myProject);
   }
 
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/highlighting/StudyVisitorFilter.java b/python/edu/learn-python/src/com/jetbrains/python/edu/highlighting/StudyVisitorFilter.java
new file mode 100644
index 0000000..dc7495a
--- /dev/null
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/highlighting/StudyVisitorFilter.java
@@ -0,0 +1,18 @@
+package com.jetbrains.python.edu.highlighting;
+
+import com.intellij.psi.PsiFile;
+import com.jetbrains.python.edu.StudyTaskManager;
+import com.jetbrains.python.inspections.PythonVisitorFilter;
+import com.jetbrains.python.inspections.unresolvedReference.PyUnresolvedReferencesInspection;
+import org.jetbrains.annotations.NotNull;
+
+public class StudyVisitorFilter implements PythonVisitorFilter {
+  @Override
+  public boolean isSupported(@NotNull final Class visitorClass, @NotNull final PsiFile file) {
+    if (StudyTaskManager.getInstance(file.getProject()).getCourse() == null) return true;
+    if (visitorClass == PyUnresolvedReferencesInspection.class) {
+      return false;
+    }
+    return true;
+  }
+}
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/projectView/StudyDirectoryNode.java b/python/edu/learn-python/src/com/jetbrains/python/edu/projectView/StudyDirectoryNode.java
index abf648c..2f80dba 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/projectView/StudyDirectoryNode.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/projectView/StudyDirectoryNode.java
@@ -32,7 +32,7 @@
 
   @Override
   protected void updateImpl(PresentationData data) {
-    data.setIcon(StudyIcons.Unchecked);
+    data.setIcon(StudyIcons.Task);
     String valueName = myValue.getName();
     StudyTaskManager studyTaskManager = StudyTaskManager.getInstance(myProject);
     Course course = studyTaskManager.getCourse();
@@ -41,7 +41,7 @@
     }
     if (valueName.equals(myProject.getName())) {
       data.clearText();
-      data.addText(course.getName(), new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, JBColor.BLUE));
+      data.addText(course.getName(), new SimpleTextAttributes(SimpleTextAttributes.STYLE_PLAIN, JBColor.BLACK));
       data.addText(" (" + valueName + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
       return;
     }
@@ -91,15 +91,16 @@
     StudyStatus taskStatus = stateful.getStatus();
     switch (taskStatus) {
       case Unchecked: {
-        updatePresentation(data, additionalName, JBColor.blue, StudyIcons.Unchecked);
+        updatePresentation(data, additionalName, JBColor.BLACK, stateful instanceof Lesson ? StudyIcons.Lesson : StudyIcons.Task);
         break;
       }
       case Solved: {
-        updatePresentation(data, additionalName, new JBColor(new Color(0, 134, 0), new Color(98, 150, 85)), StudyIcons.Checked);
+        updatePresentation(data, additionalName, new JBColor(new Color(0, 134, 0), new Color(98, 150, 85)),
+                           stateful instanceof Lesson ? StudyIcons.LessonCompl : StudyIcons.TaskCompl);
         break;
       }
       case Failed: {
-        updatePresentation(data, additionalName, JBColor.RED, StudyIcons.Failed);
+        updatePresentation(data, additionalName, JBColor.RED, stateful instanceof Lesson ? StudyIcons.Lesson : StudyIcons.TaskProbl);
       }
     }
   }
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.form b/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.form
index 133c38d..8dd6506 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.form
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.form
@@ -39,11 +39,9 @@
           </component>
         </children>
       </grid>
-      <component id="6c40c" class="javax.swing.JLabel">
+      <component id="6c40c" class="javax.swing.JLabel" binding="myLabel">
         <constraints>
-          <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="0" fill="1" indent="0" use-parent-layout="false">
-            <preferred-size width="81" height="-1"/>
-          </grid>
+          <grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="0" fill="1" indent="0" use-parent-layout="false"/>
         </constraints>
         <properties>
           <font/>
@@ -67,9 +65,7 @@
       </component>
       <component id="f1e10" class="javax.swing.JButton" binding="myRefreshButton">
         <constraints>
-          <grid row="0" column="3" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="0" fill="1" indent="0" use-parent-layout="false">
-            <minimum-size width="30" height="23"/>
-          </grid>
+          <grid row="0" column="3" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="0" fill="0" indent="0" use-parent-layout="false"/>
         </constraints>
         <properties>
           <hideActionText value="false"/>
diff --git a/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.java b/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.java
index 0f1ec08..6edad63 100644
--- a/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.java
+++ b/python/edu/learn-python/src/com/jetbrains/python/edu/ui/StudyNewProjectPanel.java
@@ -2,6 +2,7 @@
 
 import com.intellij.facet.ui.FacetValidatorsManager;
 import com.intellij.facet.ui.ValidationResult;
+import com.intellij.icons.AllIcons;
 import com.intellij.openapi.fileChooser.FileChooser;
 import com.intellij.openapi.fileChooser.FileChooserDescriptor;
 import com.intellij.openapi.vfs.VirtualFile;
@@ -9,7 +10,6 @@
 import com.jetbrains.python.edu.StudyDirectoryProjectGenerator;
 import com.jetbrains.python.edu.StudyUtils;
 import com.jetbrains.python.edu.course.CourseInfo;
-import icons.StudyIcons;
 
 import javax.swing.*;
 import java.awt.event.ActionEvent;
@@ -32,6 +32,7 @@
   private JPanel myContentPanel;
   private JLabel myAuthorLabel;
   private JLabel myDescriptionLabel;
+  private JLabel myLabel;
   private final StudyDirectoryProjectGenerator myGenerator;
   private static final String CONNECTION_ERROR = "<html>Failed to download courses.<br>Check your Internet connection.</html>";
   private static final String INVALID_COURSE = "Selected course is invalid";
@@ -56,7 +57,9 @@
     }
     initListeners();
     myRefreshButton.setVisible(true);
-    myRefreshButton.setIcon(StudyIcons.Refresh);
+    myRefreshButton.setIcon(AllIcons.Actions.Refresh);
+
+    myLabel.setPreferredSize(new JLabel("Project name").getPreferredSize());
   }
 
   private void initListeners() {
diff --git a/python/edu/main_pycharm_edu.iml b/python/edu/main_pycharm_edu.iml
index 12efe9b..17f1d67 100644
--- a/python/edu/main_pycharm_edu.iml
+++ b/python/edu/main_pycharm_edu.iml
@@ -16,6 +16,7 @@
     <orderEntry type="module" module-name="ShortcutPromoter" />
     <orderEntry type="module" module-name="python-educational" />
     <orderEntry type="module" module-name="learn-python" />
+    <orderEntry type="module" module-name="course-creator" />
   </component>
 </module>
 
diff --git a/python/edu/resources/idea/PyCharmEduApplicationInfo.xml b/python/edu/resources/idea/PyCharmEduApplicationInfo.xml
index 0fdf0d4..eed232d 100644
--- a/python/edu/resources/idea/PyCharmEduApplicationInfo.xml
+++ b/python/edu/resources/idea/PyCharmEduApplicationInfo.xml
@@ -19,5 +19,5 @@
 
   <feedback eap-url="http://www.jetbrains.com/feedback/feedback.jsp?product=PyCharm&amp;build=$BUILD&amp;timezone=$TIMEZONE&amp;eval=$EVAL"
             release-url="http://www.jetbrains.com/feedback/feedback.jsp?product=PyCharm&amp;build=$BUILD&amp;timezone=$TIMEZONE&amp;eval=$EVAL"/>
-  <help file="pycharmhelp.jar" root="pycharm"/>
+  <help file="pycharm-eduhelp.jar" root="pycharm"/>
 </component>
diff --git a/python/edu/src/META-INF/PyCharmEduPlugin.xml b/python/edu/src/META-INF/PyCharmEduPlugin.xml
index d5b2fdf..87739a9 100644
--- a/python/edu/src/META-INF/PyCharmEduPlugin.xml
+++ b/python/edu/src/META-INF/PyCharmEduPlugin.xml
@@ -19,6 +19,10 @@
     </component>
   </application-components>
 
+  <extensions defaultExtensionNs="com.intellij">
+      <codeInsight.lineMarkerProvider language="Python" implementationClass="com.jetbrains.python.edu.PyExecuteFileLineMarkerProvider"/>
+  </extensions>
+
   <actions>
     <group overrides="true" class="com.intellij.openapi.actionSystem.EmptyActionGroup" id="ToolsMenu"/>
 
@@ -38,5 +42,11 @@
     <action overrides="true" class="com.intellij.openapi.actionSystem.EmptyAction" id="NewHtmlFile"/>
 
 
+    <group id="PyRunMenu">
+      <action id="runCurrentFile" class="com.jetbrains.python.edu.PyRunCurrentFileAction"/>
+      <add-to-group group-id="RunMenu" anchor="first"/>
+    </group>
+
+
   </actions>
 </idea-plugin>
diff --git a/python/edu/src/com/intellij/openapi/application/PyCharmEduConfigImportSettings.java b/python/edu/src/com/intellij/openapi/application/PyCharmEduConfigImportSettings.java
new file mode 100644
index 0000000..989c750
--- /dev/null
+++ b/python/edu/src/com/intellij/openapi/application/PyCharmEduConfigImportSettings.java
@@ -0,0 +1,12 @@
+package com.intellij.openapi.application;
+
+import com.intellij.ide.plugins.PluginManagerCore;
+
+// see com.intellij.openapi.application.ConfigImportHelper.getConfigImportSettings
+@SuppressWarnings("UnusedDeclaration")
+public class PyCharmEduConfigImportSettings extends ConfigImportSettings {
+  public PyCharmEduConfigImportSettings() {
+    PluginManagerCore.disablePlugin("org.jetbrains.plugins.coursecreator");
+  }
+
+}
diff --git a/python/edu/src/com/jetbrains/python/edu/PyCharmEduInitialConfigurator.java b/python/edu/src/com/jetbrains/python/edu/PyCharmEduInitialConfigurator.java
index ecf3d79..2954f7c 100644
--- a/python/edu/src/com/jetbrains/python/edu/PyCharmEduInitialConfigurator.java
+++ b/python/edu/src/com/jetbrains/python/edu/PyCharmEduInitialConfigurator.java
@@ -35,14 +35,15 @@
 import com.intellij.openapi.keymap.Keymap;
 import com.intellij.openapi.keymap.ex.KeymapManagerEx;
 import com.intellij.openapi.keymap.impl.KeymapImpl;
+import com.intellij.openapi.project.DumbAwareRunnable;
 import com.intellij.openapi.project.Project;
 import com.intellij.openapi.project.ProjectManager;
 import com.intellij.openapi.project.ProjectManagerAdapter;
 import com.intellij.openapi.project.ex.ProjectManagerEx;
+import com.intellij.openapi.startup.StartupManager;
+import com.intellij.openapi.util.registry.Registry;
 import com.intellij.openapi.vfs.VfsUtil;
-import com.intellij.openapi.wm.ToolWindowEP;
-import com.intellij.openapi.wm.ToolWindowId;
-import com.intellij.openapi.wm.WindowManager;
+import com.intellij.openapi.wm.*;
 import com.intellij.platform.DirectoryProjectConfigurator;
 import com.intellij.platform.PlatformProjectViewOpener;
 import com.intellij.psi.codeStyle.CodeStyleSettings;
@@ -62,9 +63,9 @@
  */
 @SuppressWarnings({"UtilityClassWithoutPrivateConstructor", "UtilityClassWithPublicConstructor"})
 public class PyCharmEduInitialConfigurator {
-  @NonNls private static final String DISPLAYED_PROPERTY = "PyCharm.initialConfigurationShown";
+  @NonNls private static final String DISPLAYED_PROPERTY = "PyCharmEDU.initialConfigurationShown";
 
-  @NonNls private static final String CONFIGURED = "PyCharm.InitialConfiguration";
+  @NonNls private static final String CONFIGURED = "PyCharmEDU.InitialConfiguration";
 
 
   public static class First {
@@ -93,6 +94,8 @@
       uiSettings.SHOW_MAIN_TOOLBAR = false;
       codeInsightSettings.REFORMAT_ON_PASTE = CodeInsightSettings.NO_REFORMAT;
 
+      Registry.get("ide.new.settings.dialog").setValue(true);
+
       GeneralSettings.getInstance().setShowTipsOnStartup(false);
 
       EditorSettingsExternalizable.getInstance().setVirtualSpace(false);
@@ -113,7 +116,7 @@
           });
         }
       });
-      PyCodeInsightSettings.getInstance().SHOW_IMPORT_POPUP = true;
+      PyCodeInsightSettings.getInstance().SHOW_IMPORT_POPUP = false;
     }
 
     if (!propertiesComponent.isValueSet(DISPLAYED_PROPERTY)) {
@@ -147,6 +150,29 @@
         }
 
         patchProjectAreaExtensions(project);
+
+        StartupManager.getInstance(project).runWhenProjectIsInitialized(new DumbAwareRunnable() {
+          @Override
+          public void run() {
+            if (project.isDisposed()) return;
+
+            ToolWindowManager.getInstance(project).invokeLater(new Runnable() {
+              int count = 0;
+
+              public void run() {
+                if (project.isDisposed()) return;
+                if (count++ < 3) { // we need to call this after ToolWindowManagerImpl.registerToolWindowsFromBeans
+                  ToolWindowManager.getInstance(project).invokeLater(this);
+                  return;
+                }
+                ToolWindow toolWindow = ToolWindowManager.getInstance(project).getToolWindow("Project");
+                if (toolWindow.getType() != ToolWindowType.SLIDING) {
+                  toolWindow.activate(null);
+                }
+              }
+            });
+          }
+        });
       }
     });
   }
@@ -205,6 +231,11 @@
 
   private static void showInitialConfigurationDialog() {
     final JFrame frame = WindowManager.getInstance().findVisibleFrame();
-    new InitialConfigurationDialog(frame, "Python").show();
+    new InitialConfigurationDialog(frame, "Python") {
+      @Override
+      protected boolean canCreateLauncherScript() {
+        return false;
+      }
+    }.show();
   }
 }
diff --git a/python/edu/src/com/jetbrains/python/edu/PyExecuteFileLineMarkerProvider.java b/python/edu/src/com/jetbrains/python/edu/PyExecuteFileLineMarkerProvider.java
new file mode 100644
index 0000000..03522bb
--- /dev/null
+++ b/python/edu/src/com/jetbrains/python/edu/PyExecuteFileLineMarkerProvider.java
@@ -0,0 +1,91 @@
+package com.jetbrains.python.edu;
+
+import com.intellij.codeHighlighting.Pass;
+import com.intellij.codeInsight.daemon.GutterIconNavigationHandler;
+import com.intellij.codeInsight.daemon.LineMarkerInfo;
+import com.intellij.codeInsight.daemon.LineMarkerProvider;
+import com.intellij.execution.actions.ConfigurationContext;
+import com.intellij.icons.AllIcons;
+import com.intellij.ide.DataManager;
+import com.intellij.openapi.editor.Editor;
+import com.intellij.openapi.editor.markup.GutterIconRenderer;
+import com.intellij.psi.PsiComment;
+import com.intellij.psi.PsiElement;
+import com.intellij.psi.PsiWhiteSpace;
+import com.intellij.psi.util.PsiUtilBase;
+import com.intellij.util.Function;
+import com.jetbrains.python.psi.PyFile;
+import com.jetbrains.python.psi.PyImportStatement;
+import com.jetbrains.python.psi.PyStatement;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.awt.event.MouseEvent;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * @author traff
+ */
+public class PyExecuteFileLineMarkerProvider implements LineMarkerProvider {
+  @Nullable
+  @Override
+  public LineMarkerInfo getLineMarkerInfo(@NotNull PsiElement element) {
+    return null;
+  }
+
+  @Override
+  public void collectSlowLineMarkers(@NotNull List<PsiElement> elements, @NotNull Collection<LineMarkerInfo> result) {
+    for (PsiElement element : elements) {
+      if (isFirstCodeLine(element)) {
+        result.add(new LineMarkerInfo<PsiElement>(
+          element, element.getTextRange(), AllIcons.Actions.Execute, Pass.UPDATE_OVERRIDEN_MARKERS,
+          new Function<PsiElement, String>() {
+            @Override
+            public String fun(PsiElement e) {
+              return "Execute '" + e.getContainingFile().getName() + "'";
+            }
+          },
+          new GutterIconNavigationHandler<PsiElement>() {
+            @Override
+            public void navigate(MouseEvent e, PsiElement elt) {
+              executeCurrentScript(elt);
+            }
+          },
+          GutterIconRenderer.Alignment.RIGHT));
+      }
+    }
+  }
+
+  private static void executeCurrentScript(PsiElement elt) {
+    Editor editor = PsiUtilBase.findEditor(elt);
+    assert editor != null;
+
+    final ConfigurationContext context =
+      ConfigurationContext.getFromContext(DataManager.getInstance().getDataContext(editor.getComponent()));
+    PyRunCurrentFileAction.run(context);
+  }
+
+  private static boolean isFirstCodeLine(PsiElement element) {
+    return element instanceof PyStatement &&
+           element.getParent() instanceof PyFile &&
+           !isNothing(element) &&
+           nothingBefore(element);
+  }
+
+  private static boolean nothingBefore(PsiElement element) {
+    element = element.getPrevSibling();
+    while (element != null) {
+      if (!isNothing(element)) {
+        return false;
+      }
+      element = element.getPrevSibling();
+    }
+
+    return true;
+  }
+
+  private static boolean isNothing(PsiElement element) {
+    return (element instanceof PsiComment) || (element instanceof PyImportStatement) || (element instanceof PsiWhiteSpace);
+  }
+}
diff --git a/python/edu/src/com/jetbrains/python/edu/PyRunCurrentFileAction.java b/python/edu/src/com/jetbrains/python/edu/PyRunCurrentFileAction.java
new file mode 100644
index 0000000..4d30fa2
--- /dev/null
+++ b/python/edu/src/com/jetbrains/python/edu/PyRunCurrentFileAction.java
@@ -0,0 +1,56 @@
+package com.jetbrains.python.edu;
+
+import com.intellij.execution.Location;
+import com.intellij.execution.RunManagerEx;
+import com.intellij.execution.RunnerAndConfigurationSettings;
+import com.intellij.execution.actions.ConfigurationContext;
+import com.intellij.execution.executors.DefaultRunExecutor;
+import com.intellij.execution.runners.ExecutionUtil;
+import com.intellij.icons.AllIcons;
+import com.intellij.openapi.actionSystem.AnAction;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.Presentation;
+import com.jetbrains.python.PythonFileType;
+import org.jetbrains.annotations.NotNull;
+
+/**
+ * @author traff
+ */
+public class PyRunCurrentFileAction extends AnAction {
+  public PyRunCurrentFileAction() {
+    getTemplatePresentation().setIcon(AllIcons.Actions.Execute);
+  }
+
+  @Override
+  public void update(AnActionEvent e) {
+    Presentation presentation = e.getPresentation();
+    final ConfigurationContext context = ConfigurationContext.getFromContext(e.getDataContext());
+    Location location = context.getLocation();
+    if (location != null && location.getPsiElement().getContainingFile() != null && location.getPsiElement().getContainingFile().getFileType() == PythonFileType.INSTANCE) {
+      presentation.setEnabled(true);
+      presentation.setText("Run '" + location.getPsiElement().getContainingFile().getName() + "'");
+    }
+  }
+
+  @Override
+  public void actionPerformed(AnActionEvent e) {
+    final ConfigurationContext context = ConfigurationContext.getFromContext(e.getDataContext());
+
+    run(context);
+  }
+
+  public static void run(@NotNull ConfigurationContext context) {
+    RunnerAndConfigurationSettings configuration = context.findExisting();
+    final RunManagerEx runManager = (RunManagerEx)context.getRunManager();
+    if (configuration == null) {
+      configuration = context.getConfiguration();
+      if (configuration == null) {
+        return;
+      }
+      runManager.setTemporaryConfiguration(configuration);
+    }
+    runManager.setSelectedConfiguration(configuration);
+
+    ExecutionUtil.runConfiguration(configuration, DefaultRunExecutor.getRunExecutorInstance());
+  }
+}
diff --git a/python/helpers/pycharm/_bdd_utils.py b/python/helpers/pycharm/_bdd_utils.py
index 300feb2..eea1beb 100644
--- a/python/helpers/pycharm/_bdd_utils.py
+++ b/python/helpers/pycharm/_bdd_utils.py
@@ -15,6 +15,20 @@
 __author__ = 'Ilya.Kazakevich'
 
 
+def fix_win_drive(feature_path):
+    """
+    Workaround to fix issues like http://bugs.python.org/issue7195 on windows.
+    Pass feature dir or file path as argument.
+    This function does nothing on non-windows platforms, so it could be run safely.
+
+    :param feature_path: path to feature (c:/fe.feature or /my/features)
+    """
+    current_disk = (os.path.splitdrive(os.getcwd()))[0]
+    feature_disk = (os.path.splitdrive(feature_path))[0]
+    if current_disk and feature_disk and current_disk != feature_disk:
+        os.chdir(feature_disk)
+
+
 def get_path_by_args(arguments):
     """
     :type arguments list
diff --git a/python/helpers/pycharm/behave_runner.py b/python/helpers/pycharm/behave_runner.py
index 0ad8313..2ec649e 100644
--- a/python/helpers/pycharm/behave_runner.py
+++ b/python/helpers/pycharm/behave_runner.py
@@ -228,6 +228,8 @@
         pass
 
     command_args = list(filter(None, sys.argv[1:]))
+    if command_args:
+        _bdd_utils.fix_win_drive(command_args[0])
     my_config = configuration.Configuration(command_args=command_args)
     formatters.register_as(_Null, "com.intellij.python.null")
     my_config.format = ["com.intellij.python.null"]  # To prevent output to stdout
diff --git a/python/helpers/pycharm/lettuce_runner.py b/python/helpers/pycharm/lettuce_runner.py
index 3cd1125..f0a4b5d 100644
--- a/python/helpers/pycharm/lettuce_runner.py
+++ b/python/helpers/pycharm/lettuce_runner.py
@@ -109,4 +109,5 @@
 
 if __name__ == "__main__":
     (base_dir, what_to_run) = _bdd_utils.get_path_by_args(sys.argv)
+    _bdd_utils.fix_win_drive(what_to_run)
     _LettuceRunner(base_dir, what_to_run).run()
\ No newline at end of file
diff --git a/python/helpers/pydev/README.md b/python/helpers/pydev/README.md
index 7b22116..17df01c 100644
--- a/python/helpers/pydev/README.md
+++ b/python/helpers/pydev/README.md
@@ -1,2 +1,9 @@
 PyDev.Debugger
 ==============
+
+[![Build Status](https://travis-ci.org/fabioz/PyDev.Debugger.png)](https://travis-ci.org/fabioz/PyDev.Debugger)
+
+This repository contains the sources for the Debugger used in PyDev & PyCharm.
+
+It should be compatible with Python 2.4 onwards (as well as Jython 2.2.1, IronPython and PyPy -- and any
+other variant which properly supports the Python structure for debuggers -- i.e.: sys.settrace/threading.settrace).
\ No newline at end of file
diff --git a/python/helpers/pydev/__init__.py b/python/helpers/pydev/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/python/helpers/pydev/__init__.py
+++ /dev/null
diff --git a/python/helpers/pydev/_pydev_BaseHTTPServer.py b/python/helpers/pydev/_pydev_BaseHTTPServer.py
deleted file mode 100644
index 1dcef2e..0000000
--- a/python/helpers/pydev/_pydev_BaseHTTPServer.py
+++ /dev/null
@@ -1,604 +0,0 @@
-"""HTTP server base class.
-
-Note: the class in this module doesn't implement any HTTP request; see
-SimpleHTTPServer for simple implementations of GET, HEAD and POST
-(including CGI scripts).  It does, however, optionally implement HTTP/1.1
-persistent connections, as of version 0.3.
-
-Contents:
-
-- BaseHTTPRequestHandler: HTTP request handler base class
-- test: test function
-
-XXX To do:
-
-- log requests even later (to capture byte count)
-- log user-agent header and other interesting goodies
-- send error log to separate file
-"""
-
-
-# See also:
-#
-# HTTP Working Group                                        T. Berners-Lee
-# INTERNET-DRAFT                                            R. T. Fielding
-# <draft-ietf-http-v10-spec-00.txt>                     H. Frystyk Nielsen
-# Expires September 8, 1995                                  March 8, 1995
-#
-# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
-#
-# and
-#
-# Network Working Group                                      R. Fielding
-# Request for Comments: 2616                                       et al
-# Obsoletes: 2068                                              June 1999
-# Category: Standards Track
-#
-# URL: http://www.faqs.org/rfcs/rfc2616.html
-
-# Log files
-# ---------
-#
-# Here's a quote from the NCSA httpd docs about log file format.
-#
-# | The logfile format is as follows. Each line consists of:
-# |
-# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
-# |
-# |        host: Either the DNS name or the IP number of the remote client
-# |        rfc931: Any information returned by identd for this person,
-# |                - otherwise.
-# |        authuser: If user sent a userid for authentication, the user name,
-# |                  - otherwise.
-# |        DD: Day
-# |        Mon: Month (calendar name)
-# |        YYYY: Year
-# |        hh: hour (24-hour format, the machine's timezone)
-# |        mm: minutes
-# |        ss: seconds
-# |        request: The first line of the HTTP request as sent by the client.
-# |        ddd: the status code returned by the server, - if not available.
-# |        bbbb: the total number of bytes sent,
-# |              *not including the HTTP/1.0 header*, - if not available
-# |
-# | You can determine the name of the file accessed through request.
-#
-# (Actually, the latter is only true if you know the server configuration
-# at the time the request was made!)
-
-__version__ = "0.3"
-
-__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
-
-import sys
-import _pydev_time as time
-import _pydev_socket as socket # For gethostbyaddr()
-from warnings import filterwarnings, catch_warnings
-with catch_warnings():
-    if sys.py3kwarning:
-        filterwarnings("ignore", ".*mimetools has been removed",
-                        DeprecationWarning)
-    import mimetools
-
-import _pydev_SocketServer as SocketServer
-
-# Default error message template
-DEFAULT_ERROR_MESSAGE = """\
-<head>
-<title>Error response</title>
-</head>
-<body>
-<h1>Error response</h1>
-<p>Error code %(code)d.
-<p>Message: %(message)s.
-<p>Error code explanation: %(code)s = %(explain)s.
-</body>
-"""
-
-DEFAULT_ERROR_CONTENT_TYPE = "text/html"
-
-def _quote_html(html):
-    return html.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
-
-class HTTPServer(SocketServer.TCPServer):
-
-    allow_reuse_address = 1    # Seems to make sense in testing environment
-
-    def server_bind(self):
-        """Override server_bind to store the server name."""
-        SocketServer.TCPServer.server_bind(self)
-        host, port = self.socket.getsockname()[:2]
-        self.server_name = socket.getfqdn(host)
-        self.server_port = port
-
-
-class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
-
-    """HTTP request handler base class.
-
-    The following explanation of HTTP serves to guide you through the
-    code as well as to expose any misunderstandings I may have about
-    HTTP (so you don't need to read the code to figure out I'm wrong
-    :-).
-
-    HTTP (HyperText Transfer Protocol) is an extensible protocol on
-    top of a reliable stream transport (e.g. TCP/IP).  The protocol
-    recognizes three parts to a request:
-
-    1. One line identifying the request type and path
-    2. An optional set of RFC-822-style headers
-    3. An optional data part
-
-    The headers and data are separated by a blank line.
-
-    The first line of the request has the form
-
-    <command> <path> <version>
-
-    where <command> is a (case-sensitive) keyword such as GET or POST,
-    <path> is a string containing path information for the request,
-    and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
-    <path> is encoded using the URL encoding scheme (using %xx to signify
-    the ASCII character with hex code xx).
-
-    The specification specifies that lines are separated by CRLF but
-    for compatibility with the widest range of clients recommends
-    servers also handle LF.  Similarly, whitespace in the request line
-    is treated sensibly (allowing multiple spaces between components
-    and allowing trailing whitespace).
-
-    Similarly, for output, lines ought to be separated by CRLF pairs
-    but most clients grok LF characters just fine.
-
-    If the first line of the request has the form
-
-    <command> <path>
-
-    (i.e. <version> is left out) then this is assumed to be an HTTP
-    0.9 request; this form has no optional headers and data part and
-    the reply consists of just the data.
-
-    The reply form of the HTTP 1.x protocol again has three parts:
-
-    1. One line giving the response code
-    2. An optional set of RFC-822-style headers
-    3. The data
-
-    Again, the headers and data are separated by a blank line.
-
-    The response code line has the form
-
-    <version> <responsecode> <responsestring>
-
-    where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
-    <responsecode> is a 3-digit response code indicating success or
-    failure of the request, and <responsestring> is an optional
-    human-readable string explaining what the response code means.
-
-    This server parses the request and the headers, and then calls a
-    function specific to the request type (<command>).  Specifically,
-    a request SPAM will be handled by a method do_SPAM().  If no
-    such method exists the server sends an error response to the
-    client.  If it exists, it is called with no arguments:
-
-    do_SPAM()
-
-    Note that the request name is case sensitive (i.e. SPAM and spam
-    are different requests).
-
-    The various request details are stored in instance variables:
-
-    - client_address is the client IP address in the form (host,
-    port);
-
-    - command, path and version are the broken-down request line;
-
-    - headers is an instance of mimetools.Message (or a derived
-    class) containing the header information;
-
-    - rfile is a file object open for reading positioned at the
-    start of the optional input data part;
-
-    - wfile is a file object open for writing.
-
-    IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
-
-    The first thing to be written must be the response line.  Then
-    follow 0 or more header lines, then a blank line, and then the
-    actual data (if any).  The meaning of the header lines depends on
-    the command executed by the server; in most cases, when data is
-    returned, there should be at least one header line of the form
-
-    Content-type: <type>/<subtype>
-
-    where <type> and <subtype> should be registered MIME types,
-    e.g. "text/html" or "text/plain".
-
-    """
-
-    # The Python system version, truncated to its first component.
-    sys_version = "Python/" + sys.version.split()[0]
-
-    # The server software version.  You may want to override this.
-    # The format is multiple whitespace-separated strings,
-    # where each string is of the form name[/version].
-    server_version = "BaseHTTP/" + __version__
-
-    # The default request version.  This only affects responses up until
-    # the point where the request line is parsed, so it mainly decides what
-    # the client gets back when sending a malformed request line.
-    # Most web servers default to HTTP 0.9, i.e. don't send a status line.
-    default_request_version = "HTTP/0.9"
-
-    def parse_request(self):
-        """Parse a request (internal).
-
-        The request should be stored in self.raw_requestline; the results
-        are in self.command, self.path, self.request_version and
-        self.headers.
-
-        Return True for success, False for failure; on failure, an
-        error is sent back.
-
-        """
-        self.command = None  # set in case of error on the first line
-        self.request_version = version = self.default_request_version
-        self.close_connection = 1
-        requestline = self.raw_requestline
-        requestline = requestline.rstrip('\r\n')
-        self.requestline = requestline
-        words = requestline.split()
-        if len(words) == 3:
-            command, path, version = words
-            if version[:5] != 'HTTP/':
-                self.send_error(400, "Bad request version (%r)" % version)
-                return False
-            try:
-                base_version_number = version.split('/', 1)[1]
-                version_number = base_version_number.split(".")
-                # RFC 2145 section 3.1 says there can be only one "." and
-                #   - major and minor numbers MUST be treated as
-                #      separate integers;
-                #   - HTTP/2.4 is a lower version than HTTP/2.13, which in
-                #      turn is lower than HTTP/12.3;
-                #   - Leading zeros MUST be ignored by recipients.
-                if len(version_number) != 2:
-                    raise ValueError
-                version_number = int(version_number[0]), int(version_number[1])
-            except (ValueError, IndexError):
-                self.send_error(400, "Bad request version (%r)" % version)
-                return False
-            if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
-                self.close_connection = 0
-            if version_number >= (2, 0):
-                self.send_error(505,
-                          "Invalid HTTP Version (%s)" % base_version_number)
-                return False
-        elif len(words) == 2:
-            command, path = words
-            self.close_connection = 1
-            if command != 'GET':
-                self.send_error(400,
-                                "Bad HTTP/0.9 request type (%r)" % command)
-                return False
-        elif not words:
-            return False
-        else:
-            self.send_error(400, "Bad request syntax (%r)" % requestline)
-            return False
-        self.command, self.path, self.request_version = command, path, version
-
-        # Examine the headers and look for a Connection directive
-        self.headers = self.MessageClass(self.rfile, 0)
-
-        conntype = self.headers.get('Connection', "")
-        if conntype.lower() == 'close':
-            self.close_connection = 1
-        elif (conntype.lower() == 'keep-alive' and
-              self.protocol_version >= "HTTP/1.1"):
-            self.close_connection = 0
-        return True
-
-    def handle_one_request(self):
-        """Handle a single HTTP request.
-
-        You normally don't need to override this method; see the class
-        __doc__ string for information on how to handle specific HTTP
-        commands such as GET and POST.
-
-        """
-        try:
-            self.raw_requestline = self.rfile.readline(65537)
-            if len(self.raw_requestline) > 65536:
-                self.requestline = ''
-                self.request_version = ''
-                self.command = ''
-                self.send_error(414)
-                return
-            if not self.raw_requestline:
-                self.close_connection = 1
-                return
-            if not self.parse_request():
-                # An error code has been sent, just exit
-                return
-            mname = 'do_' + self.command
-            if not hasattr(self, mname):
-                self.send_error(501, "Unsupported method (%r)" % self.command)
-                return
-            method = getattr(self, mname)
-            method()
-            self.wfile.flush() #actually send the response if not already done.
-        except socket.timeout:
-            #a read or a write timed out.  Discard this connection
-            self.log_error("Request timed out: %r", sys.exc_info()[1])
-            self.close_connection = 1
-            return
-
-    def handle(self):
-        """Handle multiple requests if necessary."""
-        self.close_connection = 1
-
-        self.handle_one_request()
-        while not self.close_connection:
-            self.handle_one_request()
-
-    def send_error(self, code, message=None):
-        """Send and log an error reply.
-
-        Arguments are the error code, and a detailed message.
-        The detailed message defaults to the short entry matching the
-        response code.
-
-        This sends an error response (so it must be called before any
-        output has been generated), logs the error, and finally sends
-        a piece of HTML explaining the error to the user.
-
-        """
-
-        try:
-            short, long = self.responses[code]
-        except KeyError:
-            short, long = '???', '???'
-        if message is None:
-            message = short
-        explain = long
-        self.log_error("code %d, message %s", code, message)
-        # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
-        content = (self.error_message_format %
-                   {'code': code, 'message': _quote_html(message), 'explain': explain})
-        self.send_response(code, message)
-        self.send_header("Content-Type", self.error_content_type)
-        self.send_header('Connection', 'close')
-        self.end_headers()
-        if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
-            self.wfile.write(content)
-
-    error_message_format = DEFAULT_ERROR_MESSAGE
-    error_content_type = DEFAULT_ERROR_CONTENT_TYPE
-
-    def send_response(self, code, message=None):
-        """Send the response header and log the response code.
-
-        Also send two standard headers with the server software
-        version and the current date.
-
-        """
-        self.log_request(code)
-        if message is None:
-            if code in self.responses:
-                message = self.responses[code][0]
-            else:
-                message = ''
-        if self.request_version != 'HTTP/0.9':
-            self.wfile.write("%s %d %s\r\n" %
-                             (self.protocol_version, code, message))
-            # print (self.protocol_version, code, message)
-        self.send_header('Server', self.version_string())
-        self.send_header('Date', self.date_time_string())
-
-    def send_header(self, keyword, value):
-        """Send a MIME header."""
-        if self.request_version != 'HTTP/0.9':
-            self.wfile.write("%s: %s\r\n" % (keyword, value))
-
-        if keyword.lower() == 'connection':
-            if value.lower() == 'close':
-                self.close_connection = 1
-            elif value.lower() == 'keep-alive':
-                self.close_connection = 0
-
-    def end_headers(self):
-        """Send the blank line ending the MIME headers."""
-        if self.request_version != 'HTTP/0.9':
-            self.wfile.write("\r\n")
-
-    def log_request(self, code='-', size='-'):
-        """Log an accepted request.
-
-        This is called by send_response().
-
-        """
-
-        self.log_message('"%s" %s %s',
-                         self.requestline, str(code), str(size))
-
-    def log_error(self, format, *args):
-        """Log an error.
-
-        This is called when a request cannot be fulfilled.  By
-        default it passes the message on to log_message().
-
-        Arguments are the same as for log_message().
-
-        XXX This should go to the separate error log.
-
-        """
-
-        self.log_message(format, *args)
-
-    def log_message(self, format, *args):
-        """Log an arbitrary message.
-
-        This is used by all other logging functions.  Override
-        it if you have specific logging wishes.
-
-        The first argument, FORMAT, is a format string for the
-        message to be logged.  If the format string contains
-        any % escapes requiring parameters, they should be
-        specified as subsequent arguments (it's just like
-        printf!).
-
-        The client host and current date/time are prefixed to
-        every message.
-
-        """
-
-        sys.stderr.write("%s - - [%s] %s\n" %
-                         (self.address_string(),
-                          self.log_date_time_string(),
-                          format%args))
-
-    def version_string(self):
-        """Return the server software version string."""
-        return self.server_version + ' ' + self.sys_version
-
-    def date_time_string(self, timestamp=None):
-        """Return the current date and time formatted for a message header."""
-        if timestamp is None:
-            timestamp = time.time()
-        year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
-        s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
-                self.weekdayname[wd],
-                day, self.monthname[month], year,
-                hh, mm, ss)
-        return s
-
-    def log_date_time_string(self):
-        """Return the current time formatted for logging."""
-        now = time.time()
-        year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
-        s = "%02d/%3s/%04d %02d:%02d:%02d" % (
-                day, self.monthname[month], year, hh, mm, ss)
-        return s
-
-    weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
-
-    monthname = [None,
-                 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
-                 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
-
-    def address_string(self):
-        """Return the client address formatted for logging.
-
-        This version looks up the full hostname using gethostbyaddr(),
-        and tries to find a name that contains at least one dot.
-
-        """
-
-        host, port = self.client_address[:2]
-        return socket.getfqdn(host)
-
-    # Essentially static class variables
-
-    # The version of the HTTP protocol we support.
-    # Set this to HTTP/1.1 to enable automatic keepalive
-    protocol_version = "HTTP/1.0"
-
-    # The Message-like class used to parse headers
-    MessageClass = mimetools.Message
-
-    # Table mapping response codes to messages; entries have the
-    # form {code: (shortmessage, longmessage)}.
-    # See RFC 2616.
-    responses = {
-        100: ('Continue', 'Request received, please continue'),
-        101: ('Switching Protocols',
-              'Switching to new protocol; obey Upgrade header'),
-
-        200: ('OK', 'Request fulfilled, document follows'),
-        201: ('Created', 'Document created, URL follows'),
-        202: ('Accepted',
-              'Request accepted, processing continues off-line'),
-        203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
-        204: ('No Content', 'Request fulfilled, nothing follows'),
-        205: ('Reset Content', 'Clear input form for further input.'),
-        206: ('Partial Content', 'Partial content follows.'),
-
-        300: ('Multiple Choices',
-              'Object has several resources -- see URI list'),
-        301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
-        302: ('Found', 'Object moved temporarily -- see URI list'),
-        303: ('See Other', 'Object moved -- see Method and URL list'),
-        304: ('Not Modified',
-              'Document has not changed since given time'),
-        305: ('Use Proxy',
-              'You must use proxy specified in Location to access this '
-              'resource.'),
-        307: ('Temporary Redirect',
-              'Object moved temporarily -- see URI list'),
-
-        400: ('Bad Request',
-              'Bad request syntax or unsupported method'),
-        401: ('Unauthorized',
-              'No permission -- see authorization schemes'),
-        402: ('Payment Required',
-              'No payment -- see charging schemes'),
-        403: ('Forbidden',
-              'Request forbidden -- authorization will not help'),
-        404: ('Not Found', 'Nothing matches the given URI'),
-        405: ('Method Not Allowed',
-              'Specified method is invalid for this resource.'),
-        406: ('Not Acceptable', 'URI not available in preferred format.'),
-        407: ('Proxy Authentication Required', 'You must authenticate with '
-              'this proxy before proceeding.'),
-        408: ('Request Timeout', 'Request timed out; try again later.'),
-        409: ('Conflict', 'Request conflict.'),
-        410: ('Gone',
-              'URI no longer exists and has been permanently removed.'),
-        411: ('Length Required', 'Client must specify Content-Length.'),
-        412: ('Precondition Failed', 'Precondition in headers is false.'),
-        413: ('Request Entity Too Large', 'Entity is too large.'),
-        414: ('Request-URI Too Long', 'URI is too long.'),
-        415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
-        416: ('Requested Range Not Satisfiable',
-              'Cannot satisfy request range.'),
-        417: ('Expectation Failed',
-              'Expect condition could not be satisfied.'),
-
-        500: ('Internal Server Error', 'Server got itself in trouble'),
-        501: ('Not Implemented',
-              'Server does not support this operation'),
-        502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
-        503: ('Service Unavailable',
-              'The server cannot process the request due to a high load'),
-        504: ('Gateway Timeout',
-              'The gateway server did not receive a timely response'),
-        505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
-        }
-
-
-def test(HandlerClass = BaseHTTPRequestHandler,
-         ServerClass = HTTPServer, protocol="HTTP/1.0"):
-    """Test the HTTP request handler class.
-
-    This runs an HTTP server on port 8000 (or the first command line
-    argument).
-
-    """
-
-    if sys.argv[1:]:
-        port = int(sys.argv[1])
-    else:
-        port = 8000
-    server_address = ('', port)
-
-    HandlerClass.protocol_version = protocol
-    httpd = ServerClass(server_address, HandlerClass)
-
-    sa = httpd.socket.getsockname()
-    print ("Serving HTTP on", sa[0], "port", sa[1], "...")
-    httpd.serve_forever()
-
-
-if __name__ == '__main__':
-    test()
diff --git a/python/helpers/pydev/_pydev_Queue.py b/python/helpers/pydev/_pydev_Queue.py
deleted file mode 100644
index cc32ea6..0000000
--- a/python/helpers/pydev/_pydev_Queue.py
+++ /dev/null
@@ -1,244 +0,0 @@
-"""A multi-producer, multi-consumer queue."""
-
-from _pydev_time import time as _time
-try:
-    import _pydev_threading as _threading
-except ImportError:
-    import dummy_threading as _threading
-from collections import deque
-import heapq
-
-__all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue']
-
-class Empty(Exception):
-    "Exception raised by Queue.get(block=0)/get_nowait()."
-    pass
-
-class Full(Exception):
-    "Exception raised by Queue.put(block=0)/put_nowait()."
-    pass
-
-class Queue:
-    """Create a queue object with a given maximum size.
-
-    If maxsize is <= 0, the queue size is infinite.
-    """
-    def __init__(self, maxsize=0):
-        self.maxsize = maxsize
-        self._init(maxsize)
-        # mutex must be held whenever the queue is mutating.  All methods
-        # that acquire mutex must release it before returning.  mutex
-        # is shared between the three conditions, so acquiring and
-        # releasing the conditions also acquires and releases mutex.
-        self.mutex = _threading.Lock()
-        # Notify not_empty whenever an item is added to the queue; a
-        # thread waiting to get is notified then.
-        self.not_empty = _threading.Condition(self.mutex)
-        # Notify not_full whenever an item is removed from the queue;
-        # a thread waiting to put is notified then.
-        self.not_full = _threading.Condition(self.mutex)
-        # Notify all_tasks_done whenever the number of unfinished tasks
-        # drops to zero; thread waiting to join() is notified to resume
-        self.all_tasks_done = _threading.Condition(self.mutex)
-        self.unfinished_tasks = 0
-
-    def task_done(self):
-        """Indicate that a formerly enqueued task is complete.
-
-        Used by Queue consumer threads.  For each get() used to fetch a task,
-        a subsequent call to task_done() tells the queue that the processing
-        on the task is complete.
-
-        If a join() is currently blocking, it will resume when all items
-        have been processed (meaning that a task_done() call was received
-        for every item that had been put() into the queue).
-
-        Raises a ValueError if called more times than there were items
-        placed in the queue.
-        """
-        self.all_tasks_done.acquire()
-        try:
-            unfinished = self.unfinished_tasks - 1
-            if unfinished <= 0:
-                if unfinished < 0:
-                    raise ValueError('task_done() called too many times')
-                self.all_tasks_done.notify_all()
-            self.unfinished_tasks = unfinished
-        finally:
-            self.all_tasks_done.release()
-
-    def join(self):
-        """Blocks until all items in the Queue have been gotten and processed.
-
-        The count of unfinished tasks goes up whenever an item is added to the
-        queue. The count goes down whenever a consumer thread calls task_done()
-        to indicate the item was retrieved and all work on it is complete.
-
-        When the count of unfinished tasks drops to zero, join() unblocks.
-        """
-        self.all_tasks_done.acquire()
-        try:
-            while self.unfinished_tasks:
-                self.all_tasks_done.wait()
-        finally:
-            self.all_tasks_done.release()
-
-    def qsize(self):
-        """Return the approximate size of the queue (not reliable!)."""
-        self.mutex.acquire()
-        n = self._qsize()
-        self.mutex.release()
-        return n
-
-    def empty(self):
-        """Return True if the queue is empty, False otherwise (not reliable!)."""
-        self.mutex.acquire()
-        n = not self._qsize()
-        self.mutex.release()
-        return n
-
-    def full(self):
-        """Return True if the queue is full, False otherwise (not reliable!)."""
-        self.mutex.acquire()
-        n = 0 < self.maxsize == self._qsize()
-        self.mutex.release()
-        return n
-
-    def put(self, item, block=True, timeout=None):
-        """Put an item into the queue.
-
-        If optional args 'block' is true and 'timeout' is None (the default),
-        block if necessary until a free slot is available. If 'timeout' is
-        a positive number, it blocks at most 'timeout' seconds and raises
-        the Full exception if no free slot was available within that time.
-        Otherwise ('block' is false), put an item on the queue if a free slot
-        is immediately available, else raise the Full exception ('timeout'
-        is ignored in that case).
-        """
-        self.not_full.acquire()
-        try:
-            if self.maxsize > 0:
-                if not block:
-                    if self._qsize() == self.maxsize:
-                        raise Full
-                elif timeout is None:
-                    while self._qsize() == self.maxsize:
-                        self.not_full.wait()
-                elif timeout < 0:
-                    raise ValueError("'timeout' must be a positive number")
-                else:
-                    endtime = _time() + timeout
-                    while self._qsize() == self.maxsize:
-                        remaining = endtime - _time()
-                        if remaining <= 0.0:
-                            raise Full
-                        self.not_full.wait(remaining)
-            self._put(item)
-            self.unfinished_tasks += 1
-            self.not_empty.notify()
-        finally:
-            self.not_full.release()
-
-    def put_nowait(self, item):
-        """Put an item into the queue without blocking.
-
-        Only enqueue the item if a free slot is immediately available.
-        Otherwise raise the Full exception.
-        """
-        return self.put(item, False)
-
-    def get(self, block=True, timeout=None):
-        """Remove and return an item from the queue.
-
-        If optional args 'block' is true and 'timeout' is None (the default),
-        block if necessary until an item is available. If 'timeout' is
-        a positive number, it blocks at most 'timeout' seconds and raises
-        the Empty exception if no item was available within that time.
-        Otherwise ('block' is false), return an item if one is immediately
-        available, else raise the Empty exception ('timeout' is ignored
-        in that case).
-        """
-        self.not_empty.acquire()
-        try:
-            if not block:
-                if not self._qsize():
-                    raise Empty
-            elif timeout is None:
-                while not self._qsize():
-                    self.not_empty.wait()
-            elif timeout < 0:
-                raise ValueError("'timeout' must be a positive number")
-            else:
-                endtime = _time() + timeout
-                while not self._qsize():
-                    remaining = endtime - _time()
-                    if remaining <= 0.0:
-                        raise Empty
-                    self.not_empty.wait(remaining)
-            item = self._get()
-            self.not_full.notify()
-            return item
-        finally:
-            self.not_empty.release()
-
-    def get_nowait(self):
-        """Remove and return an item from the queue without blocking.
-
-        Only get an item if one is immediately available. Otherwise
-        raise the Empty exception.
-        """
-        return self.get(False)
-
-    # Override these methods to implement other queue organizations
-    # (e.g. stack or priority queue).
-    # These will only be called with appropriate locks held
-
-    # Initialize the queue representation
-    def _init(self, maxsize):
-        self.queue = deque()
-
-    def _qsize(self, len=len):
-        return len(self.queue)
-
-    # Put a new item in the queue
-    def _put(self, item):
-        self.queue.append(item)
-
-    # Get an item from the queue
-    def _get(self):
-        return self.queue.popleft()
-
-
-class PriorityQueue(Queue):
-    '''Variant of Queue that retrieves open entries in priority order (lowest first).
-
-    Entries are typically tuples of the form:  (priority number, data).
-    '''
-
-    def _init(self, maxsize):
-        self.queue = []
-
-    def _qsize(self, len=len):
-        return len(self.queue)
-
-    def _put(self, item, heappush=heapq.heappush):
-        heappush(self.queue, item)
-
-    def _get(self, heappop=heapq.heappop):
-        return heappop(self.queue)
-
-
-class LifoQueue(Queue):
-    '''Variant of Queue that retrieves most recently added entries first.'''
-
-    def _init(self, maxsize):
-        self.queue = []
-
-    def _qsize(self, len=len):
-        return len(self.queue)
-
-    def _put(self, item):
-        self.queue.append(item)
-
-    def _get(self):
-        return self.queue.pop()
diff --git a/python/helpers/pydev/_pydev_SimpleXMLRPCServer.py b/python/helpers/pydev/_pydev_SimpleXMLRPCServer.py
deleted file mode 100644
index c7da5b7..0000000
--- a/python/helpers/pydev/_pydev_SimpleXMLRPCServer.py
+++ /dev/null
@@ -1,610 +0,0 @@
-#Just a copy of the version in python 2.5 to be used if it's not available in jython 2.1
-
-"""Simple XML-RPC Server.
-
-This module can be used to create simple XML-RPC servers
-by creating a server and either installing functions, a
-class instance, or by extending the SimpleXMLRPCServer
-class.
-
-It can also be used to handle XML-RPC requests in a CGI
-environment using CGIXMLRPCRequestHandler.
-
-A list of possible usage patterns follows:
-
-1. Install functions:
-
-server = SimpleXMLRPCServer(("localhost", 8000))
-server.register_function(pow)
-server.register_function(lambda x,y: x+y, 'add')
-server.serve_forever()
-
-2. Install an instance:
-
-class MyFuncs:
-    def __init__(self):
-        # make all of the string functions available through
-        # string.func_name
-        import string
-        self.string = string
-    def _listMethods(self):
-        # implement this method so that system.listMethods
-        # knows to advertise the strings methods
-        return list_public_methods(self) + \
-                ['string.' + method for method in list_public_methods(self.string)]
-    def pow(self, x, y): return pow(x, y)
-    def add(self, x, y) : return x + y
-
-server = SimpleXMLRPCServer(("localhost", 8000))
-server.register_introspection_functions()
-server.register_instance(MyFuncs())
-server.serve_forever()
-
-3. Install an instance with custom dispatch method:
-
-class Math:
-    def _listMethods(self):
-        # this method must be present for system.listMethods
-        # to work
-        return ['add', 'pow']
-    def _methodHelp(self, method):
-        # this method must be present for system.methodHelp
-        # to work
-        if method == 'add':
-            return "add(2,3) => 5"
-        elif method == 'pow':
-            return "pow(x, y[, z]) => number"
-        else:
-            # By convention, return empty
-            # string if no help is available
-            return ""
-    def _dispatch(self, method, params):
-        if method == 'pow':
-            return pow(*params)
-        elif method == 'add':
-            return params[0] + params[1]
-        else:
-            raise 'bad method'
-
-server = SimpleXMLRPCServer(("localhost", 8000))
-server.register_introspection_functions()
-server.register_instance(Math())
-server.serve_forever()
-
-4. Subclass SimpleXMLRPCServer:
-
-class MathServer(SimpleXMLRPCServer):
-    def _dispatch(self, method, params):
-        try:
-            # We are forcing the 'export_' prefix on methods that are
-            # callable through XML-RPC to prevent potential security
-            # problems
-            func = getattr(self, 'export_' + method)
-        except AttributeError:
-            raise Exception('method "%s" is not supported' % method)
-        else:
-            return func(*params)
-
-    def export_add(self, x, y):
-        return x + y
-
-server = MathServer(("localhost", 8000))
-server.serve_forever()
-
-5. CGI script:
-
-server = CGIXMLRPCRequestHandler()
-server.register_function(pow)
-server.handle_request()
-"""
-
-# Written by Brian Quinlan (brian@sweetapp.com).
-# Based on code written by Fredrik Lundh.
-
-try:
-    True
-    False
-except:
-    import __builtin__
-    setattr(__builtin__, 'True', 1) #Python 3.0 does not accept __builtin__.True = 1 in its syntax
-    setattr(__builtin__, 'False', 0)
-    
-
-import _pydev_xmlrpclib as xmlrpclib
-from _pydev_xmlrpclib import Fault
-import _pydev_SocketServer as SocketServer
-import _pydev_BaseHTTPServer as BaseHTTPServer
-import sys
-import os
-try:
-    import fcntl
-except ImportError:
-    fcntl = None
-
-def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
-    """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
-
-    Resolves a dotted attribute name to an object.  Raises
-    an AttributeError if any attribute in the chain starts with a '_'.
-
-    If the optional allow_dotted_names argument is false, dots are not
-    supported and this function operates similar to getattr(obj, attr).
-    """
-
-    if allow_dotted_names:
-        attrs = attr.split('.')
-    else:
-        attrs = [attr]
-
-    for i in attrs:
-        if i.startswith('_'):
-            raise AttributeError(
-                'attempt to access private attribute "%s"' % i
-                )
-        else:
-            obj = getattr(obj, i)
-    return obj
-
-def list_public_methods(obj):
-    """Returns a list of attribute strings, found in the specified
-    object, which represent callable attributes"""
-
-    return [member for member in dir(obj)
-                if not member.startswith('_') and
-                    callable(getattr(obj, member))]
-
-def remove_duplicates(lst):
-    """remove_duplicates([2,2,2,1,3,3]) => [3,1,2]
-
-    Returns a copy of a list without duplicates. Every list
-    item must be hashable and the order of the items in the
-    resulting list is not defined.
-    """
-    u = {}
-    for x in lst:
-        u[x] = 1
-
-    return u.keys()
-
-class SimpleXMLRPCDispatcher:
-    """Mix-in class that dispatches XML-RPC requests.
-
-    This class is used to register XML-RPC method handlers
-    and then to dispatch them. There should never be any
-    reason to instantiate this class directly.
-    """
-
-    def __init__(self, allow_none, encoding):
-        self.funcs = {}
-        self.instance = None
-        self.allow_none = allow_none
-        self.encoding = encoding
-
-    def register_instance(self, instance, allow_dotted_names=False):
-        """Registers an instance to respond to XML-RPC requests.
-
-        Only one instance can be installed at a time.
-
-        If the registered instance has a _dispatch method then that
-        method will be called with the name of the XML-RPC method and
-        its parameters as a tuple
-        e.g. instance._dispatch('add',(2,3))
-
-        If the registered instance does not have a _dispatch method
-        then the instance will be searched to find a matching method
-        and, if found, will be called. Methods beginning with an '_'
-        are considered private and will not be called by
-        SimpleXMLRPCServer.
-
-        If a registered function matches a XML-RPC request, then it
-        will be called instead of the registered instance.
-
-        If the optional allow_dotted_names argument is true and the
-        instance does not have a _dispatch method, method names
-        containing dots are supported and resolved, as long as none of
-        the name segments start with an '_'.
-
-            *** SECURITY WARNING: ***
-
-            Enabling the allow_dotted_names options allows intruders
-            to access your module's global variables and may allow
-            intruders to execute arbitrary code on your machine.  Only
-            use this option on a secure, closed network.
-
-        """
-
-        self.instance = instance
-        self.allow_dotted_names = allow_dotted_names
-
-    def register_function(self, function, name=None):
-        """Registers a function to respond to XML-RPC requests.
-
-        The optional name argument can be used to set a Unicode name
-        for the function.
-        """
-
-        if name is None:
-            name = function.__name__
-        self.funcs[name] = function
-
-    def register_introspection_functions(self):
-        """Registers the XML-RPC introspection methods in the system
-        namespace.
-
-        see http://xmlrpc.usefulinc.com/doc/reserved.html
-        """
-
-        self.funcs.update({'system.listMethods' : self.system_listMethods,
-                      'system.methodSignature' : self.system_methodSignature,
-                      'system.methodHelp' : self.system_methodHelp})
-
-    def register_multicall_functions(self):
-        """Registers the XML-RPC multicall method in the system
-        namespace.
-
-        see http://www.xmlrpc.com/discuss/msgReader$1208"""
-
-        self.funcs.update({'system.multicall' : self.system_multicall})
-
-    def _marshaled_dispatch(self, data, dispatch_method=None):
-        """Dispatches an XML-RPC method from marshalled (XML) data.
-
-        XML-RPC methods are dispatched from the marshalled (XML) data
-        using the _dispatch method and the result is returned as
-        marshalled data. For backwards compatibility, a dispatch
-        function can be provided as an argument (see comment in
-        SimpleXMLRPCRequestHandler.do_POST) but overriding the
-        existing method through subclassing is the prefered means
-        of changing method dispatch behavior.
-        """
-        try:
-            params, method = xmlrpclib.loads(data)
-
-            # generate response
-            if dispatch_method is not None:
-                response = dispatch_method(method, params)
-            else:
-                response = self._dispatch(method, params)
-            # wrap response in a singleton tuple
-            response = (response,)
-            response = xmlrpclib.dumps(response, methodresponse=1,
-                                       allow_none=self.allow_none, encoding=self.encoding)
-        except Fault, fault:
-            response = xmlrpclib.dumps(fault, allow_none=self.allow_none,
-                                       encoding=self.encoding)
-        except:
-            # report exception back to server
-            response = xmlrpclib.dumps(
-                xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)), #@UndefinedVariable exc_value only available when we actually have an exception
-                encoding=self.encoding, allow_none=self.allow_none,
-                )
-
-        return response
-
-    def system_listMethods(self):
-        """system.listMethods() => ['add', 'subtract', 'multiple']
-
-        Returns a list of the methods supported by the server."""
-
-        methods = self.funcs.keys()
-        if self.instance is not None:
-            # Instance can implement _listMethod to return a list of
-            # methods
-            if hasattr(self.instance, '_listMethods'):
-                methods = remove_duplicates(
-                        methods + self.instance._listMethods()
-                    )
-            # if the instance has a _dispatch method then we
-            # don't have enough information to provide a list
-            # of methods
-            elif not hasattr(self.instance, '_dispatch'):
-                methods = remove_duplicates(
-                        methods + list_public_methods(self.instance)
-                    )
-        methods.sort()
-        return methods
-
-    def system_methodSignature(self, method_name):
-        """system.methodSignature('add') => [double, int, int]
-
-        Returns a list describing the signature of the method. In the
-        above example, the add method takes two integers as arguments
-        and returns a double result.
-
-        This server does NOT support system.methodSignature."""
-
-        # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
-
-        return 'signatures not supported'
-
-    def system_methodHelp(self, method_name):
-        """system.methodHelp('add') => "Adds two integers together"
-
-        Returns a string containing documentation for the specified method."""
-
-        method = None
-        if self.funcs.has_key(method_name):
-            method = self.funcs[method_name]
-        elif self.instance is not None:
-            # Instance can implement _methodHelp to return help for a method
-            if hasattr(self.instance, '_methodHelp'):
-                return self.instance._methodHelp(method_name)
-            # if the instance has a _dispatch method then we
-            # don't have enough information to provide help
-            elif not hasattr(self.instance, '_dispatch'):
-                try:
-                    method = resolve_dotted_attribute(
-                                self.instance,
-                                method_name,
-                                self.allow_dotted_names
-                                )
-                except AttributeError:
-                    pass
-
-        # Note that we aren't checking that the method actually
-        # be a callable object of some kind
-        if method is None:
-            return ""
-        else:
-            try:
-                import pydoc
-            except ImportError:
-                return "" #not there for jython
-            else:
-                return pydoc.getdoc(method)
-
-    def system_multicall(self, call_list):
-        """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
-[[4], ...]
-
-        Allows the caller to package multiple XML-RPC calls into a single
-        request.
-
-        See http://www.xmlrpc.com/discuss/msgReader$1208
-        """
-
-        results = []
-        for call in call_list:
-            method_name = call['methodName']
-            params = call['params']
-
-            try:
-                # XXX A marshalling error in any response will fail the entire
-                # multicall. If someone cares they should fix this.
-                results.append([self._dispatch(method_name, params)])
-            except Fault, fault:
-                results.append(
-                    {'faultCode' : fault.faultCode,
-                     'faultString' : fault.faultString}
-                    )
-            except:
-                results.append(
-                    {'faultCode' : 1,
-                     'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)} #@UndefinedVariable exc_value only available when we actually have an exception
-                    )
-        return results
-
-    def _dispatch(self, method, params):
-        """Dispatches the XML-RPC method.
-
-        XML-RPC calls are forwarded to a registered function that
-        matches the called XML-RPC method name. If no such function
-        exists then the call is forwarded to the registered instance,
-        if available.
-
-        If the registered instance has a _dispatch method then that
-        method will be called with the name of the XML-RPC method and
-        its parameters as a tuple
-        e.g. instance._dispatch('add',(2,3))
-
-        If the registered instance does not have a _dispatch method
-        then the instance will be searched to find a matching method
-        and, if found, will be called.
-
-        Methods beginning with an '_' are considered private and will
-        not be called.
-        """
-
-        func = None
-        try:
-            # check to see if a matching function has been registered
-            func = self.funcs[method]
-        except KeyError:
-            if self.instance is not None:
-                # check for a _dispatch method
-                if hasattr(self.instance, '_dispatch'):
-                    return self.instance._dispatch(method, params)
-                else:
-                    # call instance method directly
-                    try:
-                        func = resolve_dotted_attribute(
-                            self.instance,
-                            method,
-                            self.allow_dotted_names
-                            )
-                    except AttributeError:
-                        pass
-
-        if func is not None:
-            return func(*params)
-        else:
-            raise Exception('method "%s" is not supported' % method)
-
-class SimpleXMLRPCRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-    """Simple XML-RPC request handler class.
-
-    Handles all HTTP POST requests and attempts to decode them as
-    XML-RPC requests.
-    """
-
-    # Class attribute listing the accessible path components;
-    # paths not on this list will result in a 404 error.
-    rpc_paths = ('/', '/RPC2')
-
-    def is_rpc_path_valid(self):
-        if self.rpc_paths:
-            return self.path in self.rpc_paths
-        else:
-            # If .rpc_paths is empty, just assume all paths are legal
-            return True
-
-    def do_POST(self):
-        """Handles the HTTP POST request.
-
-        Attempts to interpret all HTTP POST requests as XML-RPC calls,
-        which are forwarded to the server's _dispatch method for handling.
-        """
-
-        # Check that the path is legal
-        if not self.is_rpc_path_valid():
-            self.report_404()
-            return
-
-        try:
-            # Get arguments by reading body of request.
-            # We read this in chunks to avoid straining
-            # socket.read(); around the 10 or 15Mb mark, some platforms
-            # begin to have problems (bug #792570).
-            max_chunk_size = 10 * 1024 * 1024
-            size_remaining = int(self.headers["content-length"])
-            L = []
-            while size_remaining:
-                chunk_size = min(size_remaining, max_chunk_size)
-                L.append(self.rfile.read(chunk_size))
-                size_remaining -= len(L[-1])
-            data = ''.join(L)
-
-            # In previous versions of SimpleXMLRPCServer, _dispatch
-            # could be overridden in this class, instead of in
-            # SimpleXMLRPCDispatcher. To maintain backwards compatibility,
-            # check to see if a subclass implements _dispatch and dispatch
-            # using that method if present.
-            response = self.server._marshaled_dispatch(
-                    data, getattr(self, '_dispatch', None)
-                )
-        except: # This should only happen if the module is buggy
-            # internal error, report as HTTP server error
-            self.send_response(500)
-            self.end_headers()
-        else:
-            # got a valid XML RPC response
-            self.send_response(200)
-            self.send_header("Content-type", "text/xml")
-            self.send_header("Content-length", str(len(response)))
-            self.end_headers()
-            self.wfile.write(response)
-
-            # shut down the connection
-            self.wfile.flush()
-            self.connection.shutdown(1)
-
-    def report_404 (self):
-            # Report a 404 error
-        self.send_response(404)
-        response = 'No such page'
-        self.send_header("Content-type", "text/plain")
-        self.send_header("Content-length", str(len(response)))
-        self.end_headers()
-        self.wfile.write(response)
-        # shut down the connection
-        self.wfile.flush()
-        self.connection.shutdown(1)
-
-    def log_request(self, code='-', size='-'):
-        """Selectively log an accepted request."""
-
-        if self.server.logRequests:
-            BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size)
-
-class SimpleXMLRPCServer(SocketServer.TCPServer,
-                         SimpleXMLRPCDispatcher):
-    """Simple XML-RPC server.
-
-    Simple XML-RPC server that allows functions and a single instance
-    to be installed to handle requests. The default implementation
-    attempts to dispatch XML-RPC calls to the functions or instance
-    installed in the server. Override the _dispatch method inhereted
-    from SimpleXMLRPCDispatcher to change this behavior.
-    """
-
-    allow_reuse_address = True
-
-    def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
-                 logRequests=True, allow_none=False, encoding=None):
-        self.logRequests = logRequests
-
-        SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
-        SocketServer.TCPServer.__init__(self, addr, requestHandler)
-
-        # [Bug #1222790] If possible, set close-on-exec flag; if a
-        # method spawns a subprocess, the subprocess shouldn't have
-        # the listening socket open.
-        if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
-            flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
-            flags |= fcntl.FD_CLOEXEC
-            fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
-
-class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
-    """Simple handler for XML-RPC data passed through CGI."""
-
-    def __init__(self, allow_none=False, encoding=None):
-        SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
-
-    def handle_xmlrpc(self, request_text):
-        """Handle a single XML-RPC request"""
-
-        response = self._marshaled_dispatch(request_text)
-
-        sys.stdout.write('Content-Type: text/xml\n')
-        sys.stdout.write('Content-Length: %d\n' % len(response))
-        sys.stdout.write('\n')
-        
-        sys.stdout.write(response)
-
-    def handle_get(self):
-        """Handle a single HTTP GET request.
-
-        Default implementation indicates an error because
-        XML-RPC uses the POST method.
-        """
-
-        code = 400
-        message, explain = \
-                 BaseHTTPServer.BaseHTTPRequestHandler.responses[code]
-
-        response = BaseHTTPServer.DEFAULT_ERROR_MESSAGE % { #@UndefinedVariable
-             'code' : code,
-             'message' : message,
-             'explain' : explain
-            }
-        sys.stdout.write('Status: %d %s\n' % (code, message))
-        sys.stdout.write('Content-Type: text/html\n')
-        sys.stdout.write('Content-Length: %d\n' % len(response))
-        sys.stdout.write('\n')
-        
-        sys.stdout.write(response)
-
-    def handle_request(self, request_text=None):
-        """Handle a single XML-RPC request passed through a CGI post method.
-
-        If no XML data is given then it is read from stdin. The resulting
-        XML-RPC response is printed to stdout along with the correct HTTP
-        headers.
-        """
-
-        if request_text is None and \
-            os.environ.get('REQUEST_METHOD', None) == 'GET':
-            self.handle_get()
-        else:
-            # POST data is normally available through stdin
-            if request_text is None:
-                request_text = sys.stdin.read()
-
-            self.handle_xmlrpc(request_text)
-
-if __name__ == '__main__':
-    sys.stdout.write('Running XML-RPC server on port 8000\n')
-    server = SimpleXMLRPCServer(("localhost", 8000))
-    server.register_function(pow)
-    server.register_function(lambda x, y: x + y, 'add')
-    server.serve_forever()
diff --git a/python/helpers/pydev/_pydev_SocketServer.py b/python/helpers/pydev/_pydev_SocketServer.py
deleted file mode 100644
index c611126..0000000
--- a/python/helpers/pydev/_pydev_SocketServer.py
+++ /dev/null
@@ -1,715 +0,0 @@
-"""Generic socket server classes.
-
-This module tries to capture the various aspects of defining a server:
-
-For socket-based servers:
-
-- address family:
-        - AF_INET{,6}: IP (Internet Protocol) sockets (default)
-        - AF_UNIX: Unix domain sockets
-        - others, e.g. AF_DECNET are conceivable (see <socket.h>
-- socket type:
-        - SOCK_STREAM (reliable stream, e.g. TCP)
-        - SOCK_DGRAM (datagrams, e.g. UDP)
-
-For request-based servers (including socket-based):
-
-- client address verification before further looking at the request
-        (This is actually a hook for any processing that needs to look
-         at the request before anything else, e.g. logging)
-- how to handle multiple requests:
-        - synchronous (one request is handled at a time)
-        - forking (each request is handled by a new process)
-        - threading (each request is handled by a new thread)
-
-The classes in this module favor the server type that is simplest to
-write: a synchronous TCP/IP server.  This is bad class design, but
-save some typing.  (There's also the issue that a deep class hierarchy
-slows down method lookups.)
-
-There are five classes in an inheritance diagram, four of which represent
-synchronous servers of four types:
-
-        +------------+
-        | BaseServer |
-        +------------+
-              |
-              v
-        +-----------+        +------------------+
-        | TCPServer |------->| UnixStreamServer |
-        +-----------+        +------------------+
-              |
-              v
-        +-----------+        +--------------------+
-        | UDPServer |------->| UnixDatagramServer |
-        +-----------+        +--------------------+
-
-Note that UnixDatagramServer derives from UDPServer, not from
-UnixStreamServer -- the only difference between an IP and a Unix
-stream server is the address family, which is simply repeated in both
-unix server classes.
-
-Forking and threading versions of each type of server can be created
-using the ForkingMixIn and ThreadingMixIn mix-in classes.  For
-instance, a threading UDP server class is created as follows:
-
-        class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
-
-The Mix-in class must come first, since it overrides a method defined
-in UDPServer! Setting the various member variables also changes
-the behavior of the underlying server mechanism.
-
-To implement a service, you must derive a class from
-BaseRequestHandler and redefine its handle() method.  You can then run
-various versions of the service by combining one of the server classes
-with your request handler class.
-
-The request handler class must be different for datagram or stream
-services.  This can be hidden by using the request handler
-subclasses StreamRequestHandler or DatagramRequestHandler.
-
-Of course, you still have to use your head!
-
-For instance, it makes no sense to use a forking server if the service
-contains state in memory that can be modified by requests (since the
-modifications in the child process would never reach the initial state
-kept in the parent process and passed to each child).  In this case,
-you can use a threading server, but you will probably have to use
-locks to avoid two requests that come in nearly simultaneous to apply
-conflicting changes to the server state.
-
-On the other hand, if you are building e.g. an HTTP server, where all
-data is stored externally (e.g. in the file system), a synchronous
-class will essentially render the service "deaf" while one request is
-being handled -- which may be for a very long time if a client is slow
-to read all the data it has requested.  Here a threading or forking
-server is appropriate.
-
-In some cases, it may be appropriate to process part of a request
-synchronously, but to finish processing in a forked child depending on
-the request data.  This can be implemented by using a synchronous
-server and doing an explicit fork in the request handler class
-handle() method.
-
-Another approach to handling multiple simultaneous requests in an
-environment that supports neither threads nor fork (or where these are
-too expensive or inappropriate for the service) is to maintain an
-explicit table of partially finished requests and to use select() to
-decide which request to work on next (or whether to handle a new
-incoming request).  This is particularly important for stream services
-where each client can potentially be connected for a long time (if
-threads or subprocesses cannot be used).
-
-Future work:
-- Standard classes for Sun RPC (which uses either UDP or TCP)
-- Standard mix-in classes to implement various authentication
-  and encryption schemes
-- Standard framework for select-based multiplexing
-
-XXX Open problems:
-- What to do with out-of-band data?
-
-BaseServer:
-- split generic "request" functionality out into BaseServer class.
-  Copyright (C) 2000  Luke Kenneth Casson Leighton <lkcl@samba.org>
-
-  example: read entries from a SQL database (requires overriding
-  get_request() to return a table entry from the database).
-  entry is processed by a RequestHandlerClass.
-
-"""
-
-# Author of the BaseServer patch: Luke Kenneth Casson Leighton
-
-# XXX Warning!
-# There is a test suite for this module, but it cannot be run by the
-# standard regression test.
-# To run it manually, run Lib/test/test_socketserver.py.
-
-__version__ = "0.4"
-
-
-import _pydev_socket as socket
-import _pydev_select as select
-import sys
-import os
-try:
-    import _pydev_threading as threading
-except ImportError:
-    import dummy_threading as threading
-
-__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
-           "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
-           "StreamRequestHandler","DatagramRequestHandler",
-           "ThreadingMixIn", "ForkingMixIn"]
-if hasattr(socket, "AF_UNIX"):
-    __all__.extend(["UnixStreamServer","UnixDatagramServer",
-                    "ThreadingUnixStreamServer",
-                    "ThreadingUnixDatagramServer"])
-
-class BaseServer:
-
-    """Base class for server classes.
-
-    Methods for the caller:
-
-    - __init__(server_address, RequestHandlerClass)
-    - serve_forever(poll_interval=0.5)
-    - shutdown()
-    - handle_request()  # if you do not use serve_forever()
-    - fileno() -> int   # for select()
-
-    Methods that may be overridden:
-
-    - server_bind()
-    - server_activate()
-    - get_request() -> request, client_address
-    - handle_timeout()
-    - verify_request(request, client_address)
-    - server_close()
-    - process_request(request, client_address)
-    - shutdown_request(request)
-    - close_request(request)
-    - handle_error()
-
-    Methods for derived classes:
-
-    - finish_request(request, client_address)
-
-    Class variables that may be overridden by derived classes or
-    instances:
-
-    - timeout
-    - address_family
-    - socket_type
-    - allow_reuse_address
-
-    Instance variables:
-
-    - RequestHandlerClass
-    - socket
-
-    """
-
-    timeout = None
-
-    def __init__(self, server_address, RequestHandlerClass):
-        """Constructor.  May be extended, do not override."""
-        self.server_address = server_address
-        self.RequestHandlerClass = RequestHandlerClass
-        self.__is_shut_down = threading.Event()
-        self.__shutdown_request = False
-
-    def server_activate(self):
-        """Called by constructor to activate the server.
-
-        May be overridden.
-
-        """
-        pass
-
-    def serve_forever(self, poll_interval=0.5):
-        """Handle one request at a time until shutdown.
-
-        Polls for shutdown every poll_interval seconds. Ignores
-        self.timeout. If you need to do periodic tasks, do them in
-        another thread.
-        """
-        self.__is_shut_down.clear()
-        try:
-            while not self.__shutdown_request:
-                # XXX: Consider using another file descriptor or
-                # connecting to the socket to wake this up instead of
-                # polling. Polling reduces our responsiveness to a
-                # shutdown request and wastes cpu at all other times.
-                r, w, e = select.select([self], [], [], poll_interval)
-                if self in r:
-                    self._handle_request_noblock()
-        finally:
-            self.__shutdown_request = False
-            self.__is_shut_down.set()
-
-    def shutdown(self):
-        """Stops the serve_forever loop.
-
-        Blocks until the loop has finished. This must be called while
-        serve_forever() is running in another thread, or it will
-        deadlock.
-        """
-        self.__shutdown_request = True
-        self.__is_shut_down.wait()
-
-    # The distinction between handling, getting, processing and
-    # finishing a request is fairly arbitrary.  Remember:
-    #
-    # - handle_request() is the top-level call.  It calls
-    #   select, get_request(), verify_request() and process_request()
-    # - get_request() is different for stream or datagram sockets
-    # - process_request() is the place that may fork a new process
-    #   or create a new thread to finish the request
-    # - finish_request() instantiates the request handler class;
-    #   this constructor will handle the request all by itself
-
-    def handle_request(self):
-        """Handle one request, possibly blocking.
-
-        Respects self.timeout.
-        """
-        # Support people who used socket.settimeout() to escape
-        # handle_request before self.timeout was available.
-        timeout = self.socket.gettimeout()
-        if timeout is None:
-            timeout = self.timeout
-        elif self.timeout is not None:
-            timeout = min(timeout, self.timeout)
-        fd_sets = select.select([self], [], [], timeout)
-        if not fd_sets[0]:
-            self.handle_timeout()
-            return
-        self._handle_request_noblock()
-
-    def _handle_request_noblock(self):
-        """Handle one request, without blocking.
-
-        I assume that select.select has returned that the socket is
-        readable before this function was called, so there should be
-        no risk of blocking in get_request().
-        """
-        try:
-            request, client_address = self.get_request()
-        except socket.error:
-            return
-        if self.verify_request(request, client_address):
-            try:
-                self.process_request(request, client_address)
-            except:
-                self.handle_error(request, client_address)
-                self.shutdown_request(request)
-
-    def handle_timeout(self):
-        """Called if no new request arrives within self.timeout.
-
-        Overridden by ForkingMixIn.
-        """
-        pass
-
-    def verify_request(self, request, client_address):
-        """Verify the request.  May be overridden.
-
-        Return True if we should proceed with this request.
-
-        """
-        return True
-
-    def process_request(self, request, client_address):
-        """Call finish_request.
-
-        Overridden by ForkingMixIn and ThreadingMixIn.
-
-        """
-        self.finish_request(request, client_address)
-        self.shutdown_request(request)
-
-    def server_close(self):
-        """Called to clean-up the server.
-
-        May be overridden.
-
-        """
-        pass
-
-    def finish_request(self, request, client_address):
-        """Finish one request by instantiating RequestHandlerClass."""
-        self.RequestHandlerClass(request, client_address, self)
-
-    def shutdown_request(self, request):
-        """Called to shutdown and close an individual request."""
-        self.close_request(request)
-
-    def close_request(self, request):
-        """Called to clean up an individual request."""
-        pass
-
-    def handle_error(self, request, client_address):
-        """Handle an error gracefully.  May be overridden.
-
-        The default is to print a traceback and continue.
-
-        """
-        print '-'*40
-        print 'Exception happened during processing of request from',
-        print client_address
-        import traceback
-        traceback.print_exc() # XXX But this goes to stderr!
-        print '-'*40
-
-
-class TCPServer(BaseServer):
-
-    """Base class for various socket-based server classes.
-
-    Defaults to synchronous IP stream (i.e., TCP).
-
-    Methods for the caller:
-
-    - __init__(server_address, RequestHandlerClass, bind_and_activate=True)
-    - serve_forever(poll_interval=0.5)
-    - shutdown()
-    - handle_request()  # if you don't use serve_forever()
-    - fileno() -> int   # for select()
-
-    Methods that may be overridden:
-
-    - server_bind()
-    - server_activate()
-    - get_request() -> request, client_address
-    - handle_timeout()
-    - verify_request(request, client_address)
-    - process_request(request, client_address)
-    - shutdown_request(request)
-    - close_request(request)
-    - handle_error()
-
-    Methods for derived classes:
-
-    - finish_request(request, client_address)
-
-    Class variables that may be overridden by derived classes or
-    instances:
-
-    - timeout
-    - address_family
-    - socket_type
-    - request_queue_size (only for stream sockets)
-    - allow_reuse_address
-
-    Instance variables:
-
-    - server_address
-    - RequestHandlerClass
-    - socket
-
-    """
-
-    address_family = socket.AF_INET
-
-    socket_type = socket.SOCK_STREAM
-
-    request_queue_size = 5
-
-    allow_reuse_address = False
-
-    def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
-        """Constructor.  May be extended, do not override."""
-        BaseServer.__init__(self, server_address, RequestHandlerClass)
-        self.socket = socket.socket(self.address_family,
-                                    self.socket_type)
-        if bind_and_activate:
-            self.server_bind()
-            self.server_activate()
-
-    def server_bind(self):
-        """Called by constructor to bind the socket.
-
-        May be overridden.
-
-        """
-        if self.allow_reuse_address:
-            self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
-        self.socket.bind(self.server_address)
-        self.server_address = self.socket.getsockname()
-
-    def server_activate(self):
-        """Called by constructor to activate the server.
-
-        May be overridden.
-
-        """
-        self.socket.listen(self.request_queue_size)
-
-    def server_close(self):
-        """Called to clean-up the server.
-
-        May be overridden.
-
-        """
-        self.socket.close()
-
-    def fileno(self):
-        """Return socket file number.
-
-        Interface required by select().
-
-        """
-        return self.socket.fileno()
-
-    def get_request(self):
-        """Get the request and client address from the socket.
-
-        May be overridden.
-
-        """
-        return self.socket.accept()
-
-    def shutdown_request(self, request):
-        """Called to shutdown and close an individual request."""
-        try:
-            #explicitly shutdown.  socket.close() merely releases
-            #the socket and waits for GC to perform the actual close.
-            request.shutdown(socket.SHUT_WR)
-        except socket.error:
-            pass #some platforms may raise ENOTCONN here
-        self.close_request(request)
-
-    def close_request(self, request):
-        """Called to clean up an individual request."""
-        request.close()
-
-
-class UDPServer(TCPServer):
-
-    """UDP server class."""
-
-    allow_reuse_address = False
-
-    socket_type = socket.SOCK_DGRAM
-
-    max_packet_size = 8192
-
-    def get_request(self):
-        data, client_addr = self.socket.recvfrom(self.max_packet_size)
-        return (data, self.socket), client_addr
-
-    def server_activate(self):
-        # No need to call listen() for UDP.
-        pass
-
-    def shutdown_request(self, request):
-        # No need to shutdown anything.
-        self.close_request(request)
-
-    def close_request(self, request):
-        # No need to close anything.
-        pass
-
-class ForkingMixIn:
-
-    """Mix-in class to handle each request in a new process."""
-
-    timeout = 300
-    active_children = None
-    max_children = 40
-
-    def collect_children(self):
-        """Internal routine to wait for children that have exited."""
-        if self.active_children is None: return
-        while len(self.active_children) >= self.max_children:
-            # XXX: This will wait for any child process, not just ones
-            # spawned by this library. This could confuse other
-            # libraries that expect to be able to wait for their own
-            # children.
-            try:
-                pid, status = os.waitpid(0, 0)
-            except os.error:
-                pid = None
-            if pid not in self.active_children: continue
-            self.active_children.remove(pid)
-
-        # XXX: This loop runs more system calls than it ought
-        # to. There should be a way to put the active_children into a
-        # process group and then use os.waitpid(-pgid) to wait for any
-        # of that set, but I couldn't find a way to allocate pgids
-        # that couldn't collide.
-        for child in self.active_children:
-            try:
-                pid, status = os.waitpid(child, os.WNOHANG)
-            except os.error:
-                pid = None
-            if not pid: continue
-            try:
-                self.active_children.remove(pid)
-            except ValueError, e:
-                raise ValueError('%s. x=%d and list=%r' % (e.message, pid,
-                                                           self.active_children))
-
-    def handle_timeout(self):
-        """Wait for zombies after self.timeout seconds of inactivity.
-
-        May be extended, do not override.
-        """
-        self.collect_children()
-
-    def process_request(self, request, client_address):
-        """Fork a new subprocess to process the request."""
-        self.collect_children()
-        pid = os.fork()
-        if pid:
-            # Parent process
-            if self.active_children is None:
-                self.active_children = []
-            self.active_children.append(pid)
-            self.close_request(request) #close handle in parent process
-            return
-        else:
-            # Child process.
-            # This must never return, hence os._exit()!
-            try:
-                self.finish_request(request, client_address)
-                self.shutdown_request(request)
-                os._exit(0)
-            except:
-                try:
-                    self.handle_error(request, client_address)
-                    self.shutdown_request(request)
-                finally:
-                    os._exit(1)
-
-
-class ThreadingMixIn:
-    """Mix-in class to handle each request in a new thread."""
-
-    # Decides how threads will act upon termination of the
-    # main process
-    daemon_threads = False
-
-    def process_request_thread(self, request, client_address):
-        """Same as in BaseServer but as a thread.
-
-        In addition, exception handling is done here.
-
-        """
-        try:
-            self.finish_request(request, client_address)
-            self.shutdown_request(request)
-        except:
-            self.handle_error(request, client_address)
-            self.shutdown_request(request)
-
-    def process_request(self, request, client_address):
-        """Start a new thread to process the request."""
-        t = threading.Thread(target = self.process_request_thread,
-                             args = (request, client_address))
-        t.daemon = self.daemon_threads
-        t.start()
-
-
-class ForkingUDPServer(ForkingMixIn, UDPServer): pass
-class ForkingTCPServer(ForkingMixIn, TCPServer): pass
-
-class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
-class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
-
-if hasattr(socket, 'AF_UNIX'):
-
-    class UnixStreamServer(TCPServer):
-        address_family = socket.AF_UNIX
-
-    class UnixDatagramServer(UDPServer):
-        address_family = socket.AF_UNIX
-
-    class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
-
-    class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
-
-class BaseRequestHandler:
-
-    """Base class for request handler classes.
-
-    This class is instantiated for each request to be handled.  The
-    constructor sets the instance variables request, client_address
-    and server, and then calls the handle() method.  To implement a
-    specific service, all you need to do is to derive a class which
-    defines a handle() method.
-
-    The handle() method can find the request as self.request, the
-    client address as self.client_address, and the server (in case it
-    needs access to per-server information) as self.server.  Since a
-    separate instance is created for each request, the handle() method
-    can define arbitrary other instance variariables.
-
-    """
-
-    def __init__(self, request, client_address, server):
-        self.request = request
-        self.client_address = client_address
-        self.server = server
-        self.setup()
-        try:
-            self.handle()
-        finally:
-            self.finish()
-
-    def setup(self):
-        pass
-
-    def handle(self):
-        pass
-
-    def finish(self):
-        pass
-
-
-# The following two classes make it possible to use the same service
-# class for stream or datagram servers.
-# Each class sets up these instance variables:
-# - rfile: a file object from which receives the request is read
-# - wfile: a file object to which the reply is written
-# When the handle() method returns, wfile is flushed properly
-
-
-class StreamRequestHandler(BaseRequestHandler):
-
-    """Define self.rfile and self.wfile for stream sockets."""
-
-    # Default buffer sizes for rfile, wfile.
-    # We default rfile to buffered because otherwise it could be
-    # really slow for large data (a getc() call per byte); we make
-    # wfile unbuffered because (a) often after a write() we want to
-    # read and we need to flush the line; (b) big writes to unbuffered
-    # files are typically optimized by stdio even when big reads
-    # aren't.
-    rbufsize = -1
-    wbufsize = 0
-
-    # A timeout to apply to the request socket, if not None.
-    timeout = None
-
-    # Disable nagle algorithm for this socket, if True.
-    # Use only when wbufsize != 0, to avoid small packets.
-    disable_nagle_algorithm = False
-
-    def setup(self):
-        self.connection = self.request
-        if self.timeout is not None:
-            self.connection.settimeout(self.timeout)
-        if self.disable_nagle_algorithm:
-            self.connection.setsockopt(socket.IPPROTO_TCP,
-                                       socket.TCP_NODELAY, True)
-        self.rfile = self.connection.makefile('rb', self.rbufsize)
-        self.wfile = self.connection.makefile('wb', self.wbufsize)
-
-    def finish(self):
-        if not self.wfile.closed:
-            self.wfile.flush()
-        self.wfile.close()
-        self.rfile.close()
-
-
-class DatagramRequestHandler(BaseRequestHandler):
-
-    # XXX Regrettably, I cannot get this working on Linux;
-    # s.recvfrom() doesn't return a meaningful client address.
-
-    """Define self.rfile and self.wfile for datagram sockets."""
-
-    def setup(self):
-        try:
-            from cStringIO import StringIO
-        except ImportError:
-            from StringIO import StringIO
-        self.packet, self.socket = self.request
-        self.rfile = StringIO(self.packet)
-        self.wfile = StringIO()
-
-    def finish(self):
-        self.socket.sendto(self.wfile.getvalue(), self.client_address)
diff --git a/python/helpers/pydev/_pydev_imps/_pydev_Queue.py b/python/helpers/pydev/_pydev_imps/_pydev_Queue.py
index d351b50..52e83b0 100644
--- a/python/helpers/pydev/_pydev_imps/_pydev_Queue.py
+++ b/python/helpers/pydev/_pydev_imps/_pydev_Queue.py
@@ -1,6 +1,7 @@
 """A multi-producer, multi-consumer queue."""
 
 from _pydev_imps._pydev_time import time as _time
+from _pydev_imps import _pydev_thread
 try:
     import _pydev_threading as _threading
 except ImportError:
@@ -30,7 +31,7 @@
         # that acquire mutex must release it before returning.  mutex
         # is shared between the three conditions, so acquiring and
         # releasing the conditions also acquires and releases mutex.
-        self.mutex = _threading.Lock()
+        self.mutex = _pydev_thread.allocate_lock()
         # Notify not_empty whenever an item is added to the queue; a
         # thread waiting to get is notified then.
         self.not_empty = _threading.Condition(self.mutex)
diff --git a/python/helpers/pydev/_pydev_imps/_pydev_inspect.py b/python/helpers/pydev/_pydev_imps/_pydev_inspect.py
index 5714764..5fd33d8 100644
--- a/python/helpers/pydev/_pydev_imps/_pydev_inspect.py
+++ b/python/helpers/pydev/_pydev_imps/_pydev_inspect.py
@@ -27,13 +27,7 @@
 __author__ = 'Ka-Ping Yee <ping@lfw.org>'
 __date__ = '1 Jan 2001'
 
-import sys
-import os
-import types
-import string
-import re
-import imp
-import tokenize
+import sys, os, types, string, re, imp, tokenize
 
 # ----------------------------------------------------------- type-checking
 def ismodule(object):
diff --git a/python/helpers/pydev/_pydev_imps/_pydev_select.py b/python/helpers/pydev/_pydev_imps/_pydev_select.py
index b8dad03..c031f3d 100644
--- a/python/helpers/pydev/_pydev_imps/_pydev_select.py
+++ b/python/helpers/pydev/_pydev_imps/_pydev_select.py
@@ -1 +1,9 @@
-from select import *
\ No newline at end of file
+from select import *
+
+try:
+    from gevent import monkey
+    saved = monkey.saved['select']
+    for key, val in saved.items():
+        globals()[key] = val
+except:
+    pass
\ No newline at end of file
diff --git a/python/helpers/pydev/_pydev_imps/_pydev_socket.py b/python/helpers/pydev/_pydev_imps/_pydev_socket.py
index 9e96e80..3d74e3b 100644
--- a/python/helpers/pydev/_pydev_imps/_pydev_socket.py
+++ b/python/helpers/pydev/_pydev_imps/_pydev_socket.py
@@ -1 +1,9 @@
-from socket import *
\ No newline at end of file
+from socket import *
+
+try:
+    from gevent import monkey
+    saved = monkey.saved['socket']
+    for key, val in saved.items():
+        globals()[key] = val
+except:
+    pass
\ No newline at end of file
diff --git a/python/helpers/pydev/_pydev_imps/_pydev_thread.py b/python/helpers/pydev/_pydev_imps/_pydev_thread.py
index 4d2fd5d..7b46c8e 100644
--- a/python/helpers/pydev/_pydev_imps/_pydev_thread.py
+++ b/python/helpers/pydev/_pydev_imps/_pydev_thread.py
@@ -2,3 +2,11 @@
     from thread import *
 except:
     from _thread import * #Py3k
+
+try:
+    from gevent import monkey
+    saved = monkey.saved['thread']
+    for key, val in saved.items():
+        globals()[key] = val
+except:
+    pass
diff --git a/python/helpers/pydev/_pydev_imps/_pydev_time.py b/python/helpers/pydev/_pydev_imps/_pydev_time.py
index 72705db..f53b94c 100644
--- a/python/helpers/pydev/_pydev_imps/_pydev_time.py
+++ b/python/helpers/pydev/_pydev_imps/_pydev_time.py
@@ -1 +1,9 @@
 from time import *
+
+try:
+    from gevent import monkey
+    saved = monkey.saved['time']
+    for key, val in saved.items():
+        globals()[key] = val
+except:
+    pass
diff --git a/python/helpers/pydev/_pydev_inspect.py b/python/helpers/pydev/_pydev_inspect.py
deleted file mode 100644
index 5fd33d8..0000000
--- a/python/helpers/pydev/_pydev_inspect.py
+++ /dev/null
@@ -1,788 +0,0 @@
-"""Get useful information from live Python objects.
-
-This module encapsulates the interface provided by the internal special
-attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion.
-It also provides some help for examining source code and class layout.
-
-Here are some of the useful functions provided by this module:
-
-    ismodule(), isclass(), ismethod(), isfunction(), istraceback(),
-        isframe(), iscode(), isbuiltin(), isroutine() - check object types
-    getmembers() - get members of an object that satisfy a given condition
-
-    getfile(), getsourcefile(), getsource() - find an object's source code
-    getdoc(), getcomments() - get documentation on an object
-    getmodule() - determine the module that an object came from
-    getclasstree() - arrange classes so as to represent their hierarchy
-
-    getargspec(), getargvalues() - get info about function arguments
-    formatargspec(), formatargvalues() - format an argument spec
-    getouterframes(), getinnerframes() - get info about frames
-    currentframe() - get the current stack frame
-    stack(), trace() - get info about frames on the stack or in a traceback
-"""
-
-# This module is in the public domain.  No warranties.
-
-__author__ = 'Ka-Ping Yee <ping@lfw.org>'
-__date__ = '1 Jan 2001'
-
-import sys, os, types, string, re, imp, tokenize
-
-# ----------------------------------------------------------- type-checking
-def ismodule(object):
-    """Return true if the object is a module.
-
-    Module objects provide these attributes:
-        __doc__         documentation string
-        __file__        filename (missing for built-in modules)"""
-    return isinstance(object, types.ModuleType)
-
-def isclass(object):
-    """Return true if the object is a class.
-
-    Class objects provide these attributes:
-        __doc__         documentation string
-        __module__      name of module in which this class was defined"""
-    return isinstance(object, types.ClassType) or hasattr(object, '__bases__')
-
-def ismethod(object):
-    """Return true if the object is an instance method.
-
-    Instance method objects provide these attributes:
-        __doc__         documentation string
-        __name__        name with which this method was defined
-        im_class        class object in which this method belongs
-        im_func         function object containing implementation of method
-        im_self         instance to which this method is bound, or None"""
-    return isinstance(object, types.MethodType)
-
-def ismethoddescriptor(object):
-    """Return true if the object is a method descriptor.
-
-    But not if ismethod() or isclass() or isfunction() are true.
-
-    This is new in Python 2.2, and, for example, is true of int.__add__.
-    An object passing this test has a __get__ attribute but not a __set__
-    attribute, but beyond that the set of attributes varies.  __name__ is
-    usually sensible, and __doc__ often is.
-
-    Methods implemented via descriptors that also pass one of the other
-    tests return false from the ismethoddescriptor() test, simply because
-    the other tests promise more -- you can, e.g., count on having the
-    im_func attribute (etc) when an object passes ismethod()."""
-    return (hasattr(object, "__get__")
-            and not hasattr(object, "__set__") # else it's a data descriptor
-            and not ismethod(object)           # mutual exclusion
-            and not isfunction(object)
-            and not isclass(object))
-
-def isfunction(object):
-    """Return true if the object is a user-defined function.
-
-    Function objects provide these attributes:
-        __doc__         documentation string
-        __name__        name with which this function was defined
-        func_code       code object containing compiled function bytecode
-        func_defaults   tuple of any default values for arguments
-        func_doc        (same as __doc__)
-        func_globals    global namespace in which this function was defined
-        func_name       (same as __name__)"""
-    return isinstance(object, types.FunctionType)
-
-def istraceback(object):
-    """Return true if the object is a traceback.
-
-    Traceback objects provide these attributes:
-        tb_frame        frame object at this level
-        tb_lasti        index of last attempted instruction in bytecode
-        tb_lineno       current line number in Python source code
-        tb_next         next inner traceback object (called by this level)"""
-    return isinstance(object, types.TracebackType)
-
-def isframe(object):
-    """Return true if the object is a frame object.
-
-    Frame objects provide these attributes:
-        f_back          next outer frame object (this frame's caller)
-        f_builtins      built-in namespace seen by this frame
-        f_code          code object being executed in this frame
-        f_exc_traceback traceback if raised in this frame, or None
-        f_exc_type      exception type if raised in this frame, or None
-        f_exc_value     exception value if raised in this frame, or None
-        f_globals       global namespace seen by this frame
-        f_lasti         index of last attempted instruction in bytecode
-        f_lineno        current line number in Python source code
-        f_locals        local namespace seen by this frame
-        f_restricted    0 or 1 if frame is in restricted execution mode
-        f_trace         tracing function for this frame, or None"""
-    return isinstance(object, types.FrameType)
-
-def iscode(object):
-    """Return true if the object is a code object.
-
-    Code objects provide these attributes:
-        co_argcount     number of arguments (not including * or ** args)
-        co_code         string of raw compiled bytecode
-        co_consts       tuple of constants used in the bytecode
-        co_filename     name of file in which this code object was created
-        co_firstlineno  number of first line in Python source code
-        co_flags        bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
-        co_lnotab       encoded mapping of line numbers to bytecode indices
-        co_name         name with which this code object was defined
-        co_names        tuple of names of local variables
-        co_nlocals      number of local variables
-        co_stacksize    virtual machine stack space required
-        co_varnames     tuple of names of arguments and local variables"""
-    return isinstance(object, types.CodeType)
-
-def isbuiltin(object):
-    """Return true if the object is a built-in function or method.
-
-    Built-in functions and methods provide these attributes:
-        __doc__         documentation string
-        __name__        original name of this function or method
-        __self__        instance to which a method is bound, or None"""
-    return isinstance(object, types.BuiltinFunctionType)
-
-def isroutine(object):
-    """Return true if the object is any kind of function or method."""
-    return (isbuiltin(object)
-            or isfunction(object)
-            or ismethod(object)
-            or ismethoddescriptor(object))
-
-def getmembers(object, predicate=None):
-    """Return all members of an object as (name, value) pairs sorted by name.
-    Optionally, only return members that satisfy a given predicate."""
-    results = []
-    for key in dir(object):
-        value = getattr(object, key)
-        if not predicate or predicate(value):
-            results.append((key, value))
-    results.sort()
-    return results
-
-def classify_class_attrs(cls):
-    """Return list of attribute-descriptor tuples.
-
-    For each name in dir(cls), the return list contains a 4-tuple
-    with these elements:
-
-        0. The name (a string).
-
-        1. The kind of attribute this is, one of these strings:
-               'class method'    created via classmethod()
-               'static method'   created via staticmethod()
-               'property'        created via property()
-               'method'          any other flavor of method
-               'data'            not a method
-
-        2. The class which defined this attribute (a class).
-
-        3. The object as obtained directly from the defining class's
-           __dict__, not via getattr.  This is especially important for
-           data attributes:  C.data is just a data object, but
-           C.__dict__['data'] may be a data descriptor with additional
-           info, like a __doc__ string.
-    """
-
-    mro = getmro(cls)
-    names = dir(cls)
-    result = []
-    for name in names:
-        # Get the object associated with the name.
-        # Getting an obj from the __dict__ sometimes reveals more than
-        # using getattr.  Static and class methods are dramatic examples.
-        if name in cls.__dict__:
-            obj = cls.__dict__[name]
-        else:
-            obj = getattr(cls, name)
-
-        # Figure out where it was defined.
-        homecls = getattr(obj, "__objclass__", None)
-        if homecls is None:
-            # search the dicts.
-            for base in mro:
-                if name in base.__dict__:
-                    homecls = base
-                    break
-
-        # Get the object again, in order to get it from the defining
-        # __dict__ instead of via getattr (if possible).
-        if homecls is not None and name in homecls.__dict__:
-            obj = homecls.__dict__[name]
-
-        # Also get the object via getattr.
-        obj_via_getattr = getattr(cls, name)
-
-        # Classify the object.
-        if isinstance(obj, staticmethod):
-            kind = "static method"
-        elif isinstance(obj, classmethod):
-            kind = "class method"
-        elif isinstance(obj, property):
-            kind = "property"
-        elif (ismethod(obj_via_getattr) or
-              ismethoddescriptor(obj_via_getattr)):
-            kind = "method"
-        else:
-            kind = "data"
-
-        result.append((name, kind, homecls, obj))
-
-    return result
-
-# ----------------------------------------------------------- class helpers
-def _searchbases(cls, accum):
-    # Simulate the "classic class" search order.
-    if cls in accum:
-        return
-    accum.append(cls)
-    for base in cls.__bases__:
-        _searchbases(base, accum)
-
-def getmro(cls):
-    "Return tuple of base classes (including cls) in method resolution order."
-    if hasattr(cls, "__mro__"):
-        return cls.__mro__
-    else:
-        result = []
-        _searchbases(cls, result)
-        return tuple(result)
-
-# -------------------------------------------------- source code extraction
-def indentsize(line):
-    """Return the indent size, in spaces, at the start of a line of text."""
-    expline = string.expandtabs(line)
-    return len(expline) - len(string.lstrip(expline))
-
-def getdoc(object):
-    """Get the documentation string for an object.
-
-    All tabs are expanded to spaces.  To clean up docstrings that are
-    indented to line up with blocks of code, any whitespace than can be
-    uniformly removed from the second line onwards is removed."""
-    try:
-        doc = object.__doc__
-    except AttributeError:
-        return None
-    if not isinstance(doc, (str, unicode)):
-        return None
-    try:
-        lines = string.split(string.expandtabs(doc), '\n')
-    except UnicodeError:
-        return None
-    else:
-        margin = None
-        for line in lines[1:]:
-            content = len(string.lstrip(line))
-            if not content: continue
-            indent = len(line) - content
-            if margin is None: margin = indent
-            else: margin = min(margin, indent)
-        if margin is not None:
-            for i in range(1, len(lines)): lines[i] = lines[i][margin:]
-        return string.join(lines, '\n')
-
-def getfile(object):
-    """Work out which source or compiled file an object was defined in."""
-    if ismodule(object):
-        if hasattr(object, '__file__'):
-            return object.__file__
-        raise TypeError, 'arg is a built-in module'
-    if isclass(object):
-        object = sys.modules.get(object.__module__)
-        if hasattr(object, '__file__'):
-            return object.__file__
-        raise TypeError, 'arg is a built-in class'
-    if ismethod(object):
-        object = object.im_func
-    if isfunction(object):
-        object = object.func_code
-    if istraceback(object):
-        object = object.tb_frame
-    if isframe(object):
-        object = object.f_code
-    if iscode(object):
-        return object.co_filename
-    raise TypeError, 'arg is not a module, class, method, ' \
-                     'function, traceback, frame, or code object'
-
-def getmoduleinfo(path):
-    """Get the module name, suffix, mode, and module type for a given file."""
-    filename = os.path.basename(path)
-    suffixes = map(lambda (suffix, mode, mtype):
-                   (-len(suffix), suffix, mode, mtype), imp.get_suffixes())
-    suffixes.sort() # try longest suffixes first, in case they overlap
-    for neglen, suffix, mode, mtype in suffixes:
-        if filename[neglen:] == suffix:
-            return filename[:neglen], suffix, mode, mtype
-
-def getmodulename(path):
-    """Return the module name for a given file, or None."""
-    info = getmoduleinfo(path)
-    if info: return info[0]
-
-def getsourcefile(object):
-    """Return the Python source file an object was defined in, if it exists."""
-    filename = getfile(object)
-    if string.lower(filename[-4:]) in ['.pyc', '.pyo']:
-        filename = filename[:-4] + '.py'
-    for suffix, mode, kind in imp.get_suffixes():
-        if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
-            # Looks like a binary file.  We want to only return a text file.
-            return None
-    if os.path.exists(filename):
-        return filename
-
-def getabsfile(object):
-    """Return an absolute path to the source or compiled file for an object.
-
-    The idea is for each object to have a unique origin, so this routine
-    normalizes the result as much as possible."""
-    return os.path.normcase(
-        os.path.abspath(getsourcefile(object) or getfile(object)))
-
-modulesbyfile = {}
-
-def getmodule(object):
-    """Return the module an object was defined in, or None if not found."""
-    if ismodule(object):
-        return object
-    if isclass(object):
-        return sys.modules.get(object.__module__)
-    try:
-        file = getabsfile(object)
-    except TypeError:
-        return None
-    if modulesbyfile.has_key(file):
-        return sys.modules[modulesbyfile[file]]
-    for module in sys.modules.values():
-        if hasattr(module, '__file__'):
-            modulesbyfile[getabsfile(module)] = module.__name__
-    if modulesbyfile.has_key(file):
-        return sys.modules[modulesbyfile[file]]
-    main = sys.modules['__main__']
-    if hasattr(main, object.__name__):
-        mainobject = getattr(main, object.__name__)
-        if mainobject is object:
-            return main
-    builtin = sys.modules['__builtin__']
-    if hasattr(builtin, object.__name__):
-        builtinobject = getattr(builtin, object.__name__)
-        if builtinobject is object:
-            return builtin
-
-def findsource(object):
-    """Return the entire source file and starting line number for an object.
-
-    The argument may be a module, class, method, function, traceback, frame,
-    or code object.  The source code is returned as a list of all the lines
-    in the file and the line number indexes a line in that list.  An IOError
-    is raised if the source code cannot be retrieved."""
-    try:
-        file = open(getsourcefile(object))
-    except (TypeError, IOError):
-        raise IOError, 'could not get source code'
-    lines = file.readlines()
-    file.close()
-
-    if ismodule(object):
-        return lines, 0
-
-    if isclass(object):
-        name = object.__name__
-        pat = re.compile(r'^\s*class\s*' + name + r'\b')
-        for i in range(len(lines)):
-            if pat.match(lines[i]): return lines, i
-        else: raise IOError, 'could not find class definition'
-
-    if ismethod(object):
-        object = object.im_func
-    if isfunction(object):
-        object = object.func_code
-    if istraceback(object):
-        object = object.tb_frame
-    if isframe(object):
-        object = object.f_code
-    if iscode(object):
-        if not hasattr(object, 'co_firstlineno'):
-            raise IOError, 'could not find function definition'
-        lnum = object.co_firstlineno - 1
-        pat = re.compile(r'^(\s*def\s)|(.*\slambda(:|\s))')
-        while lnum > 0:
-            if pat.match(lines[lnum]): break
-            lnum = lnum - 1
-        return lines, lnum
-    raise IOError, 'could not find code object'
-
-def getcomments(object):
-    """Get lines of comments immediately preceding an object's source code."""
-    try: lines, lnum = findsource(object)
-    except IOError: return None
-
-    if ismodule(object):
-        # Look for a comment block at the top of the file.
-        start = 0
-        if lines and lines[0][:2] == '#!': start = 1
-        while start < len(lines) and string.strip(lines[start]) in ['', '#']:
-            start = start + 1
-        if start < len(lines) and lines[start][:1] == '#':
-            comments = []
-            end = start
-            while end < len(lines) and lines[end][:1] == '#':
-                comments.append(string.expandtabs(lines[end]))
-                end = end + 1
-            return string.join(comments, '')
-
-    # Look for a preceding block of comments at the same indentation.
-    elif lnum > 0:
-        indent = indentsize(lines[lnum])
-        end = lnum - 1
-        if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \
-            indentsize(lines[end]) == indent:
-            comments = [string.lstrip(string.expandtabs(lines[end]))]
-            if end > 0:
-                end = end - 1
-                comment = string.lstrip(string.expandtabs(lines[end]))
-                while comment[:1] == '#' and indentsize(lines[end]) == indent:
-                    comments[:0] = [comment]
-                    end = end - 1
-                    if end < 0: break
-                    comment = string.lstrip(string.expandtabs(lines[end]))
-            while comments and string.strip(comments[0]) == '#':
-                comments[:1] = []
-            while comments and string.strip(comments[-1]) == '#':
-                comments[-1:] = []
-            return string.join(comments, '')
-
-class ListReader:
-    """Provide a readline() method to return lines from a list of strings."""
-    def __init__(self, lines):
-        self.lines = lines
-        self.index = 0
-
-    def readline(self):
-        i = self.index
-        if i < len(self.lines):
-            self.index = i + 1
-            return self.lines[i]
-        else: return ''
-
-class EndOfBlock(Exception): pass
-
-class BlockFinder:
-    """Provide a tokeneater() method to detect the end of a code block."""
-    def __init__(self):
-        self.indent = 0
-        self.started = 0
-        self.last = 0
-
-    def tokeneater(self, type, token, (srow, scol), (erow, ecol), line):
-        if not self.started:
-            if type == tokenize.NAME: self.started = 1
-        elif type == tokenize.NEWLINE:
-            self.last = srow
-        elif type == tokenize.INDENT:
-            self.indent = self.indent + 1
-        elif type == tokenize.DEDENT:
-            self.indent = self.indent - 1
-            if self.indent == 0: raise EndOfBlock, self.last
-        elif type == tokenize.NAME and scol == 0:
-            raise EndOfBlock, self.last
-
-def getblock(lines):
-    """Extract the block of code at the top of the given list of lines."""
-    try:
-        tokenize.tokenize(ListReader(lines).readline, BlockFinder().tokeneater)
-    except EndOfBlock, eob:
-        return lines[:eob.args[0]]
-    # Fooling the indent/dedent logic implies a one-line definition
-    return lines[:1]
-
-def getsourcelines(object):
-    """Return a list of source lines and starting line number for an object.
-
-    The argument may be a module, class, method, function, traceback, frame,
-    or code object.  The source code is returned as a list of the lines
-    corresponding to the object and the line number indicates where in the
-    original source file the first line of code was found.  An IOError is
-    raised if the source code cannot be retrieved."""
-    lines, lnum = findsource(object)
-
-    if ismodule(object): return lines, 0
-    else: return getblock(lines[lnum:]), lnum + 1
-
-def getsource(object):
-    """Return the text of the source code for an object.
-
-    The argument may be a module, class, method, function, traceback, frame,
-    or code object.  The source code is returned as a single string.  An
-    IOError is raised if the source code cannot be retrieved."""
-    lines, lnum = getsourcelines(object)
-    return string.join(lines, '')
-
-# --------------------------------------------------- class tree extraction
-def walktree(classes, children, parent):
-    """Recursive helper function for getclasstree()."""
-    results = []
-    classes.sort(lambda a, b: cmp(a.__name__, b.__name__))
-    for c in classes:
-        results.append((c, c.__bases__))
-        if children.has_key(c):
-            results.append(walktree(children[c], children, c))
-    return results
-
-def getclasstree(classes, unique=0):
-    """Arrange the given list of classes into a hierarchy of nested lists.
-
-    Where a nested list appears, it contains classes derived from the class
-    whose entry immediately precedes the list.  Each entry is a 2-tuple
-    containing a class and a tuple of its base classes.  If the 'unique'
-    argument is true, exactly one entry appears in the returned structure
-    for each class in the given list.  Otherwise, classes using multiple
-    inheritance and their descendants will appear multiple times."""
-    children = {}
-    roots = []
-    for c in classes:
-        if c.__bases__:
-            for parent in c.__bases__:
-                if not children.has_key(parent):
-                    children[parent] = []
-                children[parent].append(c)
-                if unique and parent in classes: break
-        elif c not in roots:
-            roots.append(c)
-    for parent in children.keys():
-        if parent not in classes:
-            roots.append(parent)
-    return walktree(roots, children, None)
-
-# ------------------------------------------------ argument list extraction
-# These constants are from Python's compile.h.
-CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8
-
-def getargs(co):
-    """Get information about the arguments accepted by a code object.
-
-    Three things are returned: (args, varargs, varkw), where 'args' is
-    a list of argument names (possibly containing nested lists), and
-    'varargs' and 'varkw' are the names of the * and ** arguments or None."""
-    if not iscode(co): raise TypeError, 'arg is not a code object'
-
-    nargs = co.co_argcount
-    names = co.co_varnames
-    args = list(names[:nargs])
-    step = 0
-
-    # The following acrobatics are for anonymous (tuple) arguments.
-    if not sys.platform.startswith('java'):#Jython doesn't have co_code
-        code = co.co_code
-        import dis
-        for i in range(nargs):
-            if args[i][:1] in ['', '.']:
-                stack, remain, count = [], [], []
-                while step < len(code):
-                    op = ord(code[step])
-                    step = step + 1
-                    if op >= dis.HAVE_ARGUMENT:
-                        opname = dis.opname[op]
-                        value = ord(code[step]) + ord(code[step + 1]) * 256
-                        step = step + 2
-                        if opname in ['UNPACK_TUPLE', 'UNPACK_SEQUENCE']:
-                            remain.append(value)
-                            count.append(value)
-                        elif opname == 'STORE_FAST':
-                            stack.append(names[value])
-                            remain[-1] = remain[-1] - 1
-                            while remain[-1] == 0:
-                                remain.pop()
-                                size = count.pop()
-                                stack[-size:] = [stack[-size:]]
-                                if not remain: break
-                                remain[-1] = remain[-1] - 1
-                            if not remain: break
-                args[i] = stack[0]
-
-    varargs = None
-    if co.co_flags & CO_VARARGS:
-        varargs = co.co_varnames[nargs]
-        nargs = nargs + 1
-    varkw = None
-    if co.co_flags & CO_VARKEYWORDS:
-        varkw = co.co_varnames[nargs]
-    return args, varargs, varkw
-
-def getargspec(func):
-    """Get the names and default values of a function's arguments.
-
-    A tuple of four things is returned: (args, varargs, varkw, defaults).
-    'args' is a list of the argument names (it may contain nested lists).
-    'varargs' and 'varkw' are the names of the * and ** arguments or None.
-    'defaults' is an n-tuple of the default values of the last n arguments."""
-    if ismethod(func):
-        func = func.im_func
-    if not isfunction(func): raise TypeError, 'arg is not a Python function'
-    args, varargs, varkw = getargs(func.func_code)
-    return args, varargs, varkw, func.func_defaults
-
-def getargvalues(frame):
-    """Get information about arguments passed into a particular frame.
-
-    A tuple of four things is returned: (args, varargs, varkw, locals).
-    'args' is a list of the argument names (it may contain nested lists).
-    'varargs' and 'varkw' are the names of the * and ** arguments or None.
-    'locals' is the locals dictionary of the given frame."""
-    args, varargs, varkw = getargs(frame.f_code)
-    return args, varargs, varkw, frame.f_locals
-
-def joinseq(seq):
-    if len(seq) == 1:
-        return '(' + seq[0] + ',)'
-    else:
-        return '(' + string.join(seq, ', ') + ')'
-
-def strseq(object, convert, join=joinseq):
-    """Recursively walk a sequence, stringifying each element."""
-    if type(object) in [types.ListType, types.TupleType]:
-        return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object))
-    else:
-        return convert(object)
-
-def formatargspec(args, varargs=None, varkw=None, defaults=None,
-                  formatarg=str,
-                  formatvarargs=lambda name: '*' + name,
-                  formatvarkw=lambda name: '**' + name,
-                  formatvalue=lambda value: '=' + repr(value),
-                  join=joinseq):
-    """Format an argument spec from the 4 values returned by getargspec.
-
-    The first four arguments are (args, varargs, varkw, defaults).  The
-    other four arguments are the corresponding optional formatting functions
-    that are called to turn names and values into strings.  The ninth
-    argument is an optional function to format the sequence of arguments."""
-    specs = []
-    if defaults:
-        firstdefault = len(args) - len(defaults)
-    for i in range(len(args)):
-        spec = strseq(args[i], formatarg, join)
-        if defaults and i >= firstdefault:
-            spec = spec + formatvalue(defaults[i - firstdefault])
-        specs.append(spec)
-    if varargs:
-        specs.append(formatvarargs(varargs))
-    if varkw:
-        specs.append(formatvarkw(varkw))
-    return '(' + string.join(specs, ', ') + ')'
-
-def formatargvalues(args, varargs, varkw, locals,
-                    formatarg=str,
-                    formatvarargs=lambda name: '*' + name,
-                    formatvarkw=lambda name: '**' + name,
-                    formatvalue=lambda value: '=' + repr(value),
-                    join=joinseq):
-    """Format an argument spec from the 4 values returned by getargvalues.
-
-    The first four arguments are (args, varargs, varkw, locals).  The
-    next four arguments are the corresponding optional formatting functions
-    that are called to turn names and values into strings.  The ninth
-    argument is an optional function to format the sequence of arguments."""
-    def convert(name, locals=locals,
-                formatarg=formatarg, formatvalue=formatvalue):
-        return formatarg(name) + formatvalue(locals[name])
-    specs = []
-    for i in range(len(args)):
-        specs.append(strseq(args[i], convert, join))
-    if varargs:
-        specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
-    if varkw:
-        specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
-    return '(' + string.join(specs, ', ') + ')'
-
-# -------------------------------------------------- stack frame extraction
-def getframeinfo(frame, context=1):
-    """Get information about a frame or traceback object.
-
-    A tuple of five things is returned: the filename, the line number of
-    the current line, the function name, a list of lines of context from
-    the source code, and the index of the current line within that list.
-    The optional second argument specifies the number of lines of context
-    to return, which are centered around the current line."""
-    raise NotImplementedError
-#    if istraceback(frame):
-#        frame = frame.tb_frame
-#    if not isframe(frame):
-#        raise TypeError, 'arg is not a frame or traceback object'
-#
-#    filename = getsourcefile(frame)
-#    lineno = getlineno(frame)
-#    if context > 0:
-#        start = lineno - 1 - context//2
-#        try:
-#            lines, lnum = findsource(frame)
-#        except IOError:
-#            lines = index = None
-#        else:
-#            start = max(start, 1)
-#            start = min(start, len(lines) - context)
-#            lines = lines[start:start+context]
-#            index = lineno - 1 - start
-#    else:
-#        lines = index = None
-#
-#    return (filename, lineno, frame.f_code.co_name, lines, index)
-
-def getlineno(frame):
-    """Get the line number from a frame object, allowing for optimization."""
-    # Written by Marc-Andr Lemburg; revised by Jim Hugunin and Fredrik Lundh.
-    lineno = frame.f_lineno
-    code = frame.f_code
-    if hasattr(code, 'co_lnotab'):
-        table = code.co_lnotab
-        lineno = code.co_firstlineno
-        addr = 0
-        for i in range(0, len(table), 2):
-            addr = addr + ord(table[i])
-            if addr > frame.f_lasti: break
-            lineno = lineno + ord(table[i + 1])
-    return lineno
-
-def getouterframes(frame, context=1):
-    """Get a list of records for a frame and all higher (calling) frames.
-
-    Each record contains a frame object, filename, line number, function
-    name, a list of lines of context, and index within the context."""
-    framelist = []
-    while frame:
-        framelist.append((frame,) + getframeinfo(frame, context))
-        frame = frame.f_back
-    return framelist
-
-def getinnerframes(tb, context=1):
-    """Get a list of records for a traceback's frame and all lower frames.
-
-    Each record contains a frame object, filename, line number, function
-    name, a list of lines of context, and index within the context."""
-    framelist = []
-    while tb:
-        framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
-        tb = tb.tb_next
-    return framelist
-
-def currentframe():
-    """Return the frame object for the caller's stack frame."""
-    try:
-        raise 'catch me'
-    except:
-        return sys.exc_traceback.tb_frame.f_back #@UndefinedVariable
-
-if hasattr(sys, '_getframe'): currentframe = sys._getframe
-
-def stack(context=1):
-    """Return a list of records for the stack above the caller's frame."""
-    return getouterframes(currentframe().f_back, context)
-
-def trace(context=1):
-    """Return a list of records for the stack below the current exception."""
-    return getinnerframes(sys.exc_traceback, context) #@UndefinedVariable
diff --git a/python/helpers/pydev/_pydev_jy_imports_tipper.py b/python/helpers/pydev/_pydev_jy_imports_tipper.py
index 1691e3e..db1d7f8 100644
--- a/python/helpers/pydev/_pydev_jy_imports_tipper.py
+++ b/python/helpers/pydev/_pydev_jy_imports_tipper.py
@@ -1,5 +1,7 @@
 import StringIO
 import traceback
+from java.lang import StringBuffer #@UnresolvedImport
+from java.lang import String #@UnresolvedImport
 import java.lang #@UnresolvedImport
 import sys
 from _pydev_tipper_common import DoFind
@@ -17,6 +19,7 @@
 from org.python.core import PyReflectedFunction #@UnresolvedImport
 
 from org.python import core #@UnresolvedImport
+from org.python.core import PyClass #@UnresolvedImport
 
 try:
     xrange
diff --git a/python/helpers/pydev/_pydev_select.py b/python/helpers/pydev/_pydev_select.py
deleted file mode 100644
index b8dad03..0000000
--- a/python/helpers/pydev/_pydev_select.py
+++ /dev/null
@@ -1 +0,0 @@
-from select import *
\ No newline at end of file
diff --git a/python/helpers/pydev/_pydev_socket.py b/python/helpers/pydev/_pydev_socket.py
deleted file mode 100644
index 9e96e80..0000000
--- a/python/helpers/pydev/_pydev_socket.py
+++ /dev/null
@@ -1 +0,0 @@
-from socket import *
\ No newline at end of file
diff --git a/python/helpers/pydev/_pydev_threading.py b/python/helpers/pydev/_pydev_threading.py
index d7bfadf..62b300b 100644
--- a/python/helpers/pydev/_pydev_threading.py
+++ b/python/helpers/pydev/_pydev_threading.py
@@ -1,978 +1,14 @@
-"""Thread module emulating a subset of Java's threading model."""
-
-import sys as _sys
-
-from _pydev_imps import _pydev_thread as thread
-import warnings
-
-from _pydev_imps._pydev_time import time as _time, sleep as _sleep
-from traceback import format_exc as _format_exc
-
-# Note regarding PEP 8 compliant aliases
-#  This threading model was originally inspired by Java, and inherited
-# the convention of camelCase function and method names from that
-# language. While those names are not in any imminent danger of being
-# deprecated, starting with Python 2.6, the module now provides a
-# PEP 8 compliant alias for any such method name.
-# Using the new PEP 8 compliant names also facilitates substitution
-# with the multiprocessing module, which doesn't provide the old
-# Java inspired names.
-
-
-# Rename some stuff so "from threading import *" is safe
-__all__ = ['activeCount', 'active_count', 'Condition', 'currentThread',
-           'current_thread', 'enumerate', 'Event',
-           'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
-           'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
-
-_start_new_thread = thread.start_new_thread
-_allocate_lock = thread.allocate_lock
-_get_ident = thread.get_ident
-ThreadError = thread.error
-del thread
-
-
-# sys.exc_clear is used to work around the fact that except blocks
-# don't fully clear the exception until 3.0.
-warnings.filterwarnings('ignore', category=DeprecationWarning,
-                        module='threading', message='sys.exc_clear')
-
-# Debug support (adapted from ihooks.py).
-# All the major classes here derive from _Verbose.  We force that to
-# be a new-style class so that all the major classes here are new-style.
-# This helps debugging (type(instance) is more revealing for instances
-# of new-style classes).
-
-_VERBOSE = False
-
-if __debug__:
-
-    class _Verbose(object):
-
-        def __init__(self, verbose=None):
-            if verbose is None:
-                verbose = _VERBOSE
-            self.__verbose = verbose
-
-        def _note(self, format, *args):
-            if self.__verbose:
-                format = format % args
-                # Issue #4188: calling current_thread() can incur an infinite
-                # recursion if it has to create a DummyThread on the fly.
-                ident = _get_ident()
-                try:
-                    name = _active[ident].name
-                except KeyError:
-                    name = "<OS thread %d>" % ident
-                format = "%s: %s\n" % (name, format)
-                _sys.stderr.write(format)
-
-else:
-    # Disable this when using "python -O"
-    class _Verbose(object):
-        def __init__(self, verbose=None):
-            pass
-        def _note(self, *args):
-            pass
-
-# Support for profile and trace hooks
-
-_profile_hook = None
-_trace_hook = None
-
-def setprofile(func):
-    global _profile_hook
-    _profile_hook = func
-
-def settrace(func):
-    global _trace_hook
-    _trace_hook = func
-
-# Synchronization classes
-
-Lock = _allocate_lock
-
-def RLock(*args, **kwargs):
-    return _RLock(*args, **kwargs)
-
-class _RLock(_Verbose):
-
-    def __init__(self, verbose=None):
-        _Verbose.__init__(self, verbose)
-        self.__block = _allocate_lock()
-        self.__owner = None
-        self.__count = 0
-
-    def __repr__(self):
-        owner = self.__owner
-        try:
-            owner = _active[owner].name
-        except KeyError:
-            pass
-        return "<%s owner=%r count=%d>" % (
-                self.__class__.__name__, owner, self.__count)
-
-    def acquire(self, blocking=1):
-        me = _get_ident()
-        if self.__owner == me:
-            self.__count = self.__count + 1
-            if __debug__:
-                self._note("%s.acquire(%s): recursive success", self, blocking)
-            return 1
-        rc = self.__block.acquire(blocking)
-        if rc:
-            self.__owner = me
-            self.__count = 1
-            if __debug__:
-                self._note("%s.acquire(%s): initial success", self, blocking)
-        else:
-            if __debug__:
-                self._note("%s.acquire(%s): failure", self, blocking)
-        return rc
-
-    __enter__ = acquire
-
-    def release(self):
-        if self.__owner != _get_ident():
-            raise RuntimeError("cannot release un-acquired lock")
-        self.__count = count = self.__count - 1
-        if not count:
-            self.__owner = None
-            self.__block.release()
-            if __debug__:
-                self._note("%s.release(): final release", self)
-        else:
-            if __debug__:
-                self._note("%s.release(): non-final release", self)
-
-    def __exit__(self, t, v, tb):
-        self.release()
-
-    # Internal methods used by condition variables
-
-    def _acquire_restore(self, count_owner):
-        count, owner = count_owner
-        self.__block.acquire()
-        self.__count = count
-        self.__owner = owner
-        if __debug__:
-            self._note("%s._acquire_restore()", self)
-
-    def _release_save(self):
-        if __debug__:
-            self._note("%s._release_save()", self)
-        count = self.__count
-        self.__count = 0
-        owner = self.__owner
-        self.__owner = None
-        self.__block.release()
-        return (count, owner)
-
-    def _is_owned(self):
-        return self.__owner == _get_ident()
-
-
-def Condition(*args, **kwargs):
-    return _Condition(*args, **kwargs)
-
-class _Condition(_Verbose):
-
-    def __init__(self, lock=None, verbose=None):
-        _Verbose.__init__(self, verbose)
-        if lock is None:
-            lock = RLock()
-        self.__lock = lock
-        # Export the lock's acquire() and release() methods
-        self.acquire = lock.acquire
-        self.release = lock.release
-        # If the lock defines _release_save() and/or _acquire_restore(),
-        # these override the default implementations (which just call
-        # release() and acquire() on the lock).  Ditto for _is_owned().
-        try:
-            self._release_save = lock._release_save
-        except AttributeError:
-            pass
-        try:
-            self._acquire_restore = lock._acquire_restore
-        except AttributeError:
-            pass
-        try:
-            self._is_owned = lock._is_owned
-        except AttributeError:
-            pass
-        self.__waiters = []
-
-    def __enter__(self):
-        return self.__lock.__enter__()
-
-    def __exit__(self, *args):
-        return self.__lock.__exit__(*args)
-
-    def __repr__(self):
-        return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
-
-    def _release_save(self):
-        self.__lock.release()           # No state to save
-
-    def _acquire_restore(self, x):
-        self.__lock.acquire()           # Ignore saved state
-
-    def _is_owned(self):
-        # Return True if lock is owned by current_thread.
-        # This method is called only if __lock doesn't have _is_owned().
-        if self.__lock.acquire(0):
-            self.__lock.release()
-            return False
-        else:
-            return True
-
-    def wait(self, timeout=None):
-        if not self._is_owned():
-            raise RuntimeError("cannot wait on un-acquired lock")
-        waiter = _allocate_lock()
-        waiter.acquire()
-        self.__waiters.append(waiter)
-        saved_state = self._release_save()
-        try:    # restore state no matter what (e.g., KeyboardInterrupt)
-            if timeout is None:
-                waiter.acquire()
-                if __debug__:
-                    self._note("%s.wait(): got it", self)
-            else:
-                # Balancing act:  We can't afford a pure busy loop, so we
-                # have to sleep; but if we sleep the whole timeout time,
-                # we'll be unresponsive.  The scheme here sleeps very
-                # little at first, longer as time goes on, but never longer
-                # than 20 times per second (or the timeout time remaining).
-                endtime = _time() + timeout
-                delay = 0.0005 # 500 us -> initial delay of 1 ms
-                while True:
-                    gotit = waiter.acquire(0)
-                    if gotit:
-                        break
-                    remaining = endtime - _time()
-                    if remaining <= 0:
-                        break
-                    delay = min(delay * 2, remaining, .05)
-                    _sleep(delay)
-                if not gotit:
-                    if __debug__:
-                        self._note("%s.wait(%s): timed out", self, timeout)
-                    try:
-                        self.__waiters.remove(waiter)
-                    except ValueError:
-                        pass
-                else:
-                    if __debug__:
-                        self._note("%s.wait(%s): got it", self, timeout)
-        finally:
-            self._acquire_restore(saved_state)
-
-    def notify(self, n=1):
-        if not self._is_owned():
-            raise RuntimeError("cannot notify on un-acquired lock")
-        __waiters = self.__waiters
-        waiters = __waiters[:n]
-        if not waiters:
-            if __debug__:
-                self._note("%s.notify(): no waiters", self)
-            return
-        self._note("%s.notify(): notifying %d waiter%s", self, n,
-                   n!=1 and "s" or "")
-        for waiter in waiters:
-            waiter.release()
-            try:
-                __waiters.remove(waiter)
-            except ValueError:
-                pass
-
-    def notifyAll(self):
-        self.notify(len(self.__waiters))
-
-    notify_all = notifyAll
-
-
-def Semaphore(*args, **kwargs):
-    return _Semaphore(*args, **kwargs)
-
-class _Semaphore(_Verbose):
-
-    # After Tim Peters' semaphore class, but not quite the same (no maximum)
-
-    def __init__(self, value=1, verbose=None):
-        if value < 0:
-            raise ValueError("semaphore initial value must be >= 0")
-        _Verbose.__init__(self, verbose)
-        self.__cond = Condition(Lock())
-        self.__value = value
-
-    def acquire(self, blocking=1):
-        rc = False
-        self.__cond.acquire()
-        while self.__value == 0:
-            if not blocking:
-                break
-            if __debug__:
-                self._note("%s.acquire(%s): blocked waiting, value=%s",
-                           self, blocking, self.__value)
-            self.__cond.wait()
-        else:
-            self.__value = self.__value - 1
-            if __debug__:
-                self._note("%s.acquire: success, value=%s",
-                           self, self.__value)
-            rc = True
-        self.__cond.release()
-        return rc
-
-    __enter__ = acquire
-
-    def release(self):
-        self.__cond.acquire()
-        self.__value = self.__value + 1
-        if __debug__:
-            self._note("%s.release: success, value=%s",
-                       self, self.__value)
-        self.__cond.notify()
-        self.__cond.release()
-
-    def __exit__(self, t, v, tb):
-        self.release()
-
-
-def BoundedSemaphore(*args, **kwargs):
-    return _BoundedSemaphore(*args, **kwargs)
-
-class _BoundedSemaphore(_Semaphore):
-    """Semaphore that checks that # releases is <= # acquires"""
-    def __init__(self, value=1, verbose=None):
-        _Semaphore.__init__(self, value, verbose)
-        self._initial_value = value
-
-    def release(self):
-        if self._Semaphore__value >= self._initial_value:
-            raise ValueError, "Semaphore released too many times"
-        return _Semaphore.release(self)
-
-
-def Event(*args, **kwargs):
-    return _Event(*args, **kwargs)
-
-class _Event(_Verbose):
-
-    # After Tim Peters' event class (without is_posted())
-
-    def __init__(self, verbose=None):
-        _Verbose.__init__(self, verbose)
-        self.__cond = Condition(Lock())
-        self.__flag = False
-
-    def _reset_internal_locks(self):
-        # private!  called by Thread._reset_internal_locks by _after_fork()
-        self.__cond.__init__()
-
-    def isSet(self):
-        return self.__flag
-
-    is_set = isSet
-
-    def set(self):
-        self.__cond.acquire()
-        try:
-            self.__flag = True
-            self.__cond.notify_all()
-        finally:
-            self.__cond.release()
-
-    def clear(self):
-        self.__cond.acquire()
-        try:
-            self.__flag = False
-        finally:
-            self.__cond.release()
-
-    def wait(self, timeout=None):
-        self.__cond.acquire()
-        try:
-            if not self.__flag:
-                self.__cond.wait(timeout)
-            return self.__flag
-        finally:
-            self.__cond.release()
-
-# Helper to generate new thread names
-_counter = 0
-def _newname(template="Thread-%d"):
-    global _counter
-    _counter = _counter + 1
-    return template % _counter
-
-# Active thread administration
-_active_limbo_lock = _allocate_lock()
-_active = {}    # maps thread id to Thread object
-_limbo = {}
-
-
-# Main class for threads
-
-class Thread(_Verbose):
-
-    __initialized = False
-    # Need to store a reference to sys.exc_info for printing
-    # out exceptions when a thread tries to use a global var. during interp.
-    # shutdown and thus raises an exception about trying to perform some
-    # operation on/with a NoneType
-    __exc_info = _sys.exc_info
-    # Keep sys.exc_clear too to clear the exception just before
-    # allowing .join() to return.
-    __exc_clear = _sys.exc_clear
-
-    def __init__(self, group=None, target=None, name=None,
-                 args=(), kwargs=None, verbose=None):
-        assert group is None, "group argument must be None for now"
-        _Verbose.__init__(self, verbose)
-        if kwargs is None:
-            kwargs = {}
-        self.__target = target
-        self.__name = str(name or _newname())
-        self.__args = args
-        self.__kwargs = kwargs
-        self.__daemonic = self._set_daemon()
-        self.__ident = None
-        self.__started = Event()
-        self.__stopped = False
-        self.__block = Condition(Lock())
-        self.__initialized = True
-        # sys.stderr is not stored in the class like
-        # sys.exc_info since it can be changed between instances
-        self.__stderr = _sys.stderr
-
-    def _reset_internal_locks(self):
-        # private!  Called by _after_fork() to reset our internal locks as
-        # they may be in an invalid state leading to a deadlock or crash.
-        if hasattr(self, '_Thread__block'):  # DummyThread deletes self.__block
-            self.__block.__init__()
-        self.__started._reset_internal_locks()
-
-    @property
-    def _block(self):
-        # used by a unittest
-        return self.__block
-
-    def _set_daemon(self):
-        # Overridden in _MainThread and _DummyThread
-        return current_thread().daemon
-
-    def __repr__(self):
-        assert self.__initialized, "Thread.__init__() was not called"
-        status = "initial"
-        if self.__started.is_set():
-            status = "started"
-        if self.__stopped:
-            status = "stopped"
-        if self.__daemonic:
-            status += " daemon"
-        if self.__ident is not None:
-            status += " %s" % self.__ident
-        return "<%s(%s, %s)>" % (self.__class__.__name__, self.__name, status)
-
-    def start(self):
-        if not self.__initialized:
-            raise RuntimeError("thread.__init__() not called")
-        if self.__started.is_set():
-            raise RuntimeError("threads can only be started once")
-        if __debug__:
-            self._note("%s.start(): starting thread", self)
-        with _active_limbo_lock:
-            _limbo[self] = self
-        try:
-            _start_new_thread(self.__bootstrap, ())
-        except Exception:
-            with _active_limbo_lock:
-                del _limbo[self]
-            raise
-        self.__started.wait()
-
-    def run(self):
-        try:
-            if self.__target:
-                self.__target(*self.__args, **self.__kwargs)
-        finally:
-            # Avoid a refcycle if the thread is running a function with
-            # an argument that has a member that points to the thread.
-            del self.__target, self.__args, self.__kwargs
-
-    def __bootstrap(self):
-        # Wrapper around the real bootstrap code that ignores
-        # exceptions during interpreter cleanup.  Those typically
-        # happen when a daemon thread wakes up at an unfortunate
-        # moment, finds the world around it destroyed, and raises some
-        # random exception *** while trying to report the exception in
-        # __bootstrap_inner() below ***.  Those random exceptions
-        # don't help anybody, and they confuse users, so we suppress
-        # them.  We suppress them only when it appears that the world
-        # indeed has already been destroyed, so that exceptions in
-        # __bootstrap_inner() during normal business hours are properly
-        # reported.  Also, we only suppress them for daemonic threads;
-        # if a non-daemonic encounters this, something else is wrong.
-        try:
-            self.__bootstrap_inner()
-        except:
-            if self.__daemonic and _sys is None:
-                return
-            raise
-
-    def _set_ident(self):
-        self.__ident = _get_ident()
-
-    def __bootstrap_inner(self):
-        try:
-            self._set_ident()
-            self.__started.set()
-            with _active_limbo_lock:
-                _active[self.__ident] = self
-                del _limbo[self]
-            if __debug__:
-                self._note("%s.__bootstrap(): thread started", self)
-
-            if _trace_hook:
-                self._note("%s.__bootstrap(): registering trace hook", self)
-                _sys.settrace(_trace_hook)
-            if _profile_hook:
-                self._note("%s.__bootstrap(): registering profile hook", self)
-                _sys.setprofile(_profile_hook)
-
-            try:
-                self.run()
-            except SystemExit:
-                if __debug__:
-                    self._note("%s.__bootstrap(): raised SystemExit", self)
-            except:
-                if __debug__:
-                    self._note("%s.__bootstrap(): unhandled exception", self)
-                # If sys.stderr is no more (most likely from interpreter
-                # shutdown) use self.__stderr.  Otherwise still use sys (as in
-                # _sys) in case sys.stderr was redefined since the creation of
-                # self.
-                if _sys:
-                    _sys.stderr.write("Exception in thread %s:\n%s\n" %
-                                      (self.name, _format_exc()))
-                else:
-                    # Do the best job possible w/o a huge amt. of code to
-                    # approximate a traceback (code ideas from
-                    # Lib/traceback.py)
-                    exc_type, exc_value, exc_tb = self.__exc_info()
-                    try:
-                        print>>self.__stderr, (
-                            "Exception in thread " + self.name +
-                            " (most likely raised during interpreter shutdown):")
-                        print>>self.__stderr, (
-                            "Traceback (most recent call last):")
-                        while exc_tb:
-                            print>>self.__stderr, (
-                                '  File "%s", line %s, in %s' %
-                                (exc_tb.tb_frame.f_code.co_filename,
-                                    exc_tb.tb_lineno,
-                                    exc_tb.tb_frame.f_code.co_name))
-                            exc_tb = exc_tb.tb_next
-                        print>>self.__stderr, ("%s: %s" % (exc_type, exc_value))
-                    # Make sure that exc_tb gets deleted since it is a memory
-                    # hog; deleting everything else is just for thoroughness
-                    finally:
-                        del exc_type, exc_value, exc_tb
-            else:
-                if __debug__:
-                    self._note("%s.__bootstrap(): normal return", self)
-            finally:
-                # Prevent a race in
-                # test_threading.test_no_refcycle_through_target when
-                # the exception keeps the target alive past when we
-                # assert that it's dead.
-                self.__exc_clear()
-        finally:
-            with _active_limbo_lock:
-                self.__stop()
-                try:
-                    # We don't call self.__delete() because it also
-                    # grabs _active_limbo_lock.
-                    del _active[_get_ident()]
-                except:
-                    pass
-
-    def __stop(self):
-        self.__block.acquire()
-        self.__stopped = True
-        self.__block.notify_all()
-        self.__block.release()
-
-    def __delete(self):
-        "Remove current thread from the dict of currently running threads."
-
-        # Notes about running with dummy_thread:
-        #
-        # Must take care to not raise an exception if dummy_thread is being
-        # used (and thus this module is being used as an instance of
-        # dummy_threading).  dummy_thread.get_ident() always returns -1 since
-        # there is only one thread if dummy_thread is being used.  Thus
-        # len(_active) is always <= 1 here, and any Thread instance created
-        # overwrites the (if any) thread currently registered in _active.
-        #
-        # An instance of _MainThread is always created by 'threading'.  This
-        # gets overwritten the instant an instance of Thread is created; both
-        # threads return -1 from dummy_thread.get_ident() and thus have the
-        # same key in the dict.  So when the _MainThread instance created by
-        # 'threading' tries to clean itself up when atexit calls this method
-        # it gets a KeyError if another Thread instance was created.
-        #
-        # This all means that KeyError from trying to delete something from
-        # _active if dummy_threading is being used is a red herring.  But
-        # since it isn't if dummy_threading is *not* being used then don't
-        # hide the exception.
-
-        try:
-            with _active_limbo_lock:
-                del _active[_get_ident()]
-                # There must not be any python code between the previous line
-                # and after the lock is released.  Otherwise a tracing function
-                # could try to acquire the lock again in the same thread, (in
-                # current_thread()), and would block.
-        except KeyError:
-            if 'dummy_threading' not in _sys.modules:
-                raise
-
-    def join(self, timeout=None):
-        if not self.__initialized:
-            raise RuntimeError("Thread.__init__() not called")
-        if not self.__started.is_set():
-            raise RuntimeError("cannot join thread before it is started")
-        if self is current_thread():
-            raise RuntimeError("cannot join current thread")
-
-        if __debug__:
-            if not self.__stopped:
-                self._note("%s.join(): waiting until thread stops", self)
-        self.__block.acquire()
-        try:
-            if timeout is None:
-                while not self.__stopped:
-                    self.__block.wait()
-                if __debug__:
-                    self._note("%s.join(): thread stopped", self)
-            else:
-                deadline = _time() + timeout
-                while not self.__stopped:
-                    delay = deadline - _time()
-                    if delay <= 0:
-                        if __debug__:
-                            self._note("%s.join(): timed out", self)
-                        break
-                    self.__block.wait(delay)
-                else:
-                    if __debug__:
-                        self._note("%s.join(): thread stopped", self)
-        finally:
-            self.__block.release()
-
-    @property
-    def name(self):
-        assert self.__initialized, "Thread.__init__() not called"
-        return self.__name
-
-    @name.setter
-    def name(self, name):
-        assert self.__initialized, "Thread.__init__() not called"
-        self.__name = str(name)
-
-    @property
-    def ident(self):
-        assert self.__initialized, "Thread.__init__() not called"
-        return self.__ident
-
-    def isAlive(self):
-        assert self.__initialized, "Thread.__init__() not called"
-        return self.__started.is_set() and not self.__stopped
-
-    is_alive = isAlive
-
-    @property
-    def daemon(self):
-        assert self.__initialized, "Thread.__init__() not called"
-        return self.__daemonic
-
-    @daemon.setter
-    def daemon(self, daemonic):
-        if not self.__initialized:
-            raise RuntimeError("Thread.__init__() not called")
-        if self.__started.is_set():
-            raise RuntimeError("cannot set daemon status of active thread");
-        self.__daemonic = daemonic
-
-    def isDaemon(self):
-        return self.daemon
-
-    def setDaemon(self, daemonic):
-        self.daemon = daemonic
-
-    def getName(self):
-        return self.name
-
-    def setName(self, name):
-        self.name = name
-
-# The timer class was contributed by Itamar Shtull-Trauring
-
-def Timer(*args, **kwargs):
-    return _Timer(*args, **kwargs)
-
-class _Timer(Thread):
-    """Call a function after a specified number of seconds:
-
-    t = Timer(30.0, f, args=[], kwargs={})
-    t.start()
-    t.cancel() # stop the timer's action if it's still waiting
-    """
-
-    def __init__(self, interval, function, args=[], kwargs={}):
-        Thread.__init__(self)
-        self.interval = interval
-        self.function = function
-        self.args = args
-        self.kwargs = kwargs
-        self.finished = Event()
-
-    def cancel(self):
-        """Stop the timer if it hasn't finished yet"""
-        self.finished.set()
-
-    def run(self):
-        self.finished.wait(self.interval)
-        if not self.finished.is_set():
-            self.function(*self.args, **self.kwargs)
-        self.finished.set()
-
-# Special thread class to represent the main thread
-# This is garbage collected through an exit handler
-
-class _MainThread(Thread):
-
-    def __init__(self):
-        Thread.__init__(self, name="MainThread")
-        self._Thread__started.set()
-        self._set_ident()
-        with _active_limbo_lock:
-            _active[_get_ident()] = self
-
-    def _set_daemon(self):
-        return False
-
-    def _exitfunc(self):
-        self._Thread__stop()
-        t = _pickSomeNonDaemonThread()
-        if t:
-            if __debug__:
-                self._note("%s: waiting for other threads", self)
-        while t:
-            t.join()
-            t = _pickSomeNonDaemonThread()
-        if __debug__:
-            self._note("%s: exiting", self)
-        self._Thread__delete()
-
-def _pickSomeNonDaemonThread():
-    for t in enumerate():
-        if not t.daemon and t.is_alive():
-            return t
-    return None
-
-
-# Dummy thread class to represent threads not started here.
-# These aren't garbage collected when they die, nor can they be waited for.
-# If they invoke anything in threading.py that calls current_thread(), they
-# leave an entry in the _active dict forever after.
-# Their purpose is to return *something* from current_thread().
-# They are marked as daemon threads so we won't wait for them
-# when we exit (conform previous semantics).
-
-class _DummyThread(Thread):
-
-    def __init__(self):
-        Thread.__init__(self, name=_newname("Dummy-%d"))
-
-        # Thread.__block consumes an OS-level locking primitive, which
-        # can never be used by a _DummyThread.  Since a _DummyThread
-        # instance is immortal, that's bad, so release this resource.
-        del self._Thread__block
-
-        self._Thread__started.set()
-        self._set_ident()
-        with _active_limbo_lock:
-            _active[_get_ident()] = self
-
-    def _set_daemon(self):
-        return True
-
-    def join(self, timeout=None):
-        assert False, "cannot join a dummy thread"
-
-
-# Global API functions
-
-def currentThread():
-    try:
-        return _active[_get_ident()]
-    except KeyError:
-        ##print "current_thread(): no current thread for", _get_ident()
-        return _DummyThread()
-
-current_thread = currentThread
-
-def activeCount():
-    with _active_limbo_lock:
-        return len(_active) + len(_limbo)
-
-active_count = activeCount
-
-def _enumerate():
-    # Same as enumerate(), but without the lock. Internal use only.
-    return _active.values() + _limbo.values()
-
-def enumerate():
-    with _active_limbo_lock:
-        return _active.values() + _limbo.values()
-
-# Create the main thread object,
-# and make it available for the interpreter
-# (Py_Main) as threading._shutdown.
-
-_shutdown = _MainThread()._exitfunc
+from threading import enumerate, currentThread, Condition, Event, Timer, Thread
+try:
+    from threading import settrace
+except:
+    pass
 
-# get thread-local implementation, either from the thread
-# module, or from the python fallback
 
 try:
-    from _pydev_imps._pydev_thread import _local as local
-except ImportError:
-    from _threading_local import local
-
-
-def _after_fork():
-    # This function is called by Python/ceval.c:PyEval_ReInitThreads which
-    # is called from PyOS_AfterFork.  Here we cleanup threading module state
-    # that should not exist after a fork.
-
-    # Reset _active_limbo_lock, in case we forked while the lock was held
-    # by another (non-forked) thread.  http://bugs.python.org/issue874900
-    global _active_limbo_lock
-    _active_limbo_lock = _allocate_lock()
-
-    # fork() only copied the current thread; clear references to others.
-    new_active = {}
-    current = current_thread()
-    with _active_limbo_lock:
-        for thread in _active.itervalues():
-            # Any lock/condition variable may be currently locked or in an
-            # invalid state, so we reinitialize them.
-            if hasattr(thread, '_reset_internal_locks'):
-                thread._reset_internal_locks()
-            if thread is current:
-                # There is only one active thread. We reset the ident to
-                # its new value since it can have changed.
-                ident = _get_ident()
-                thread._Thread__ident = ident
-                new_active[ident] = thread
-            else:
-                # All the others are already stopped.
-                thread._Thread__stop()
-
-        _limbo.clear()
-        _active.clear()
-        _active.update(new_active)
-        assert len(_active) == 1
-
-
-# Self-test code
-
-def _test():
-
-    class BoundedQueue(_Verbose):
-
-        def __init__(self, limit):
-            _Verbose.__init__(self)
-            self.mon = RLock()
-            self.rc = Condition(self.mon)
-            self.wc = Condition(self.mon)
-            self.limit = limit
-            self.queue = deque()
-
-        def put(self, item):
-            self.mon.acquire()
-            while len(self.queue) >= self.limit:
-                self._note("put(%s): queue full", item)
-                self.wc.wait()
-            self.queue.append(item)
-            self._note("put(%s): appended, length now %d",
-                       item, len(self.queue))
-            self.rc.notify()
-            self.mon.release()
-
-        def get(self):
-            self.mon.acquire()
-            while not self.queue:
-                self._note("get(): queue empty")
-                self.rc.wait()
-            item = self.queue.popleft()
-            self._note("get(): got %s, %d left", item, len(self.queue))
-            self.wc.notify()
-            self.mon.release()
-            return item
-
-    class ProducerThread(Thread):
-
-        def __init__(self, queue, quota):
-            Thread.__init__(self, name="Producer")
-            self.queue = queue
-            self.quota = quota
-
-        def run(self):
-            from random import random
-            counter = 0
-            while counter < self.quota:
-                counter = counter + 1
-                self.queue.put("%s.%d" % (self.name, counter))
-                _sleep(random() * 0.00001)
-
-
-    class ConsumerThread(Thread):
-
-        def __init__(self, queue, count):
-            Thread.__init__(self, name="Consumer")
-            self.queue = queue
-            self.count = count
-
-        def run(self):
-            while self.count > 0:
-                item = self.queue.get()
-                print item
-                self.count = self.count - 1
-
-    NP = 3
-    QL = 4
-    NI = 5
-
-    Q = BoundedQueue(QL)
-    P = []
-    for i in range(NP):
-        t = ProducerThread(Q, NI)
-        t.name = ("Producer-%d" % (i+1))
-        P.append(t)
-    C = ConsumerThread(Q, NI*NP)
-    for t in P:
-        t.start()
-        _sleep(0.000001)
-    C.start()
-    for t in P:
-        t.join()
-    C.join()
-
-if __name__ == '__main__':
-    _test()
+    from gevent import monkey
+    saved = monkey.saved['threading']
+    for key, val in saved.items():
+        globals()[key] = val
+except:
+    pass
diff --git a/python/helpers/pydev/_pydev_time.py b/python/helpers/pydev/_pydev_time.py
deleted file mode 100644
index 72705db..0000000
--- a/python/helpers/pydev/_pydev_time.py
+++ /dev/null
@@ -1 +0,0 @@
-from time import *
diff --git a/python/helpers/pydev/_pydev_xmlrpclib.py b/python/helpers/pydev/_pydev_xmlrpclib.py
deleted file mode 100644
index 5f6e2b7..0000000
--- a/python/helpers/pydev/_pydev_xmlrpclib.py
+++ /dev/null
@@ -1,1493 +0,0 @@
-#Just a copy of the version in python 2.5 to be used if it's not available in jython 2.1
-import sys
-
-#
-# XML-RPC CLIENT LIBRARY
-#
-# an XML-RPC client interface for Python.
-#
-# the marshalling and response parser code can also be used to
-# implement XML-RPC servers.
-#
-# Notes:
-# this version is designed to work with Python 2.1 or newer.
-#
-# History:
-# 1999-01-14 fl  Created
-# 1999-01-15 fl  Changed dateTime to use localtime
-# 1999-01-16 fl  Added Binary/base64 element, default to RPC2 service
-# 1999-01-19 fl  Fixed array data element (from Skip Montanaro)
-# 1999-01-21 fl  Fixed dateTime constructor, etc.
-# 1999-02-02 fl  Added fault handling, handle empty sequences, etc.
-# 1999-02-10 fl  Fixed problem with empty responses (from Skip Montanaro)
-# 1999-06-20 fl  Speed improvements, pluggable parsers/transports (0.9.8)
-# 2000-11-28 fl  Changed boolean to check the truth value of its argument
-# 2001-02-24 fl  Added encoding/Unicode/SafeTransport patches
-# 2001-02-26 fl  Added compare support to wrappers (0.9.9/1.0b1)
-# 2001-03-28 fl  Make sure response tuple is a singleton
-# 2001-03-29 fl  Don't require empty params element (from Nicholas Riley)
-# 2001-06-10 fl  Folded in _xmlrpclib accelerator support (1.0b2)
-# 2001-08-20 fl  Base xmlrpclib.Error on built-in Exception (from Paul Prescod)
-# 2001-09-03 fl  Allow Transport subclass to override getparser
-# 2001-09-10 fl  Lazy import of urllib, cgi, xmllib (20x import speedup)
-# 2001-10-01 fl  Remove containers from memo cache when done with them
-# 2001-10-01 fl  Use faster escape method (80% dumps speedup)
-# 2001-10-02 fl  More dumps microtuning
-# 2001-10-04 fl  Make sure import expat gets a parser (from Guido van Rossum)
-# 2001-10-10 sm  Allow long ints to be passed as ints if they don't overflow
-# 2001-10-17 sm  Test for int and long overflow (allows use on 64-bit systems)
-# 2001-11-12 fl  Use repr() to marshal doubles (from Paul Felix)
-# 2002-03-17 fl  Avoid buffered read when possible (from James Rucker)
-# 2002-04-07 fl  Added pythondoc comments
-# 2002-04-16 fl  Added __str__ methods to datetime/binary wrappers
-# 2002-05-15 fl  Added error constants (from Andrew Kuchling)
-# 2002-06-27 fl  Merged with Python CVS version
-# 2002-10-22 fl  Added basic authentication (based on code from Phillip Eby)
-# 2003-01-22 sm  Add support for the bool type
-# 2003-02-27 gvr Remove apply calls
-# 2003-04-24 sm  Use cStringIO if available
-# 2003-04-25 ak  Add support for nil
-# 2003-06-15 gn  Add support for time.struct_time
-# 2003-07-12 gp  Correct marshalling of Faults
-# 2003-10-31 mvl Add multicall support
-# 2004-08-20 mvl Bump minimum supported Python version to 2.1
-#
-# Copyright (c) 1999-2002 by Secret Labs AB.
-# Copyright (c) 1999-2002 by Fredrik Lundh.
-#
-# info@pythonware.com
-# http://www.pythonware.com
-#
-# --------------------------------------------------------------------
-# The XML-RPC client interface is
-#
-# Copyright (c) 1999-2002 by Secret Labs AB
-# Copyright (c) 1999-2002 by Fredrik Lundh
-#
-# By obtaining, using, and/or copying this software and/or its
-# associated documentation, you agree that you have read, understood,
-# and will comply with the following terms and conditions:
-#
-# Permission to use, copy, modify, and distribute this software and
-# its associated documentation for any purpose and without fee is
-# hereby granted, provided that the above copyright notice appears in
-# all copies, and that both that copyright notice and this permission
-# notice appear in supporting documentation, and that the name of
-# Secret Labs AB or the author not be used in advertising or publicity
-# pertaining to distribution of the software without specific, written
-# prior permission.
-#
-# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
-# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
-# ABILITY AND FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
-# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
-# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
-# OF THIS SOFTWARE.
-# --------------------------------------------------------------------
-
-#
-# things to look into some day:
-
-# TODO: sort out True/False/boolean issues for Python 2.3
-
-"""
-An XML-RPC client interface for Python.
-
-The marshalling and response parser code can also be used to
-implement XML-RPC servers.
-
-Exported exceptions:
-
-  Error          Base class for client errors
-  ProtocolError  Indicates an HTTP protocol error
-  ResponseError  Indicates a broken response package
-  Fault          Indicates an XML-RPC fault package
-
-Exported classes:
-
-  ServerProxy    Represents a logical connection to an XML-RPC server
-
-  MultiCall      Executor of boxcared xmlrpc requests
-  Boolean        boolean wrapper to generate a "boolean" XML-RPC value
-  DateTime       dateTime wrapper for an ISO 8601 string or time tuple or
-                 localtime integer value to generate a "dateTime.iso8601"
-                 XML-RPC value
-  Binary         binary data wrapper
-
-  SlowParser     Slow but safe standard parser (based on xmllib)
-  Marshaller     Generate an XML-RPC params chunk from a Python data structure
-  Unmarshaller   Unmarshal an XML-RPC response from incoming XML event message
-  Transport      Handles an HTTP transaction to an XML-RPC server
-  SafeTransport  Handles an HTTPS transaction to an XML-RPC server
-
-Exported constants:
-
-  True
-  False
-
-Exported functions:
-
-  boolean        Convert any Python value to an XML-RPC boolean
-  getparser      Create instance of the fastest available parser & attach
-                 to an unmarshalling object
-  dumps          Convert an argument tuple or a Fault instance to an XML-RPC
-                 request (or response, if the methodresponse option is used).
-  loads          Convert an XML-RPC packet to unmarshalled data plus a method
-                 name (None if not present).
-"""
-
-import re, string, time, operator
-
-from types import *
-
-# --------------------------------------------------------------------
-# Internal stuff
-
-try:
-    unicode
-except NameError:
-    unicode = None # unicode support not available
-
-try:
-    import datetime
-except ImportError:
-    datetime = None
-
-try:
-    _bool_is_builtin = False.__class__.__name__ == "bool"
-except (NameError, AttributeError):
-    _bool_is_builtin = 0
-
-def _decode(data, encoding, is8bit=re.compile("[\x80-\xff]").search):
-    # decode non-ascii string (if possible)
-    if unicode and encoding and is8bit(data):
-        data = unicode(data, encoding)
-    return data
-
-def escape(s, replace=string.replace):
-    s = replace(s, "&", "&amp;")
-    s = replace(s, "<", "&lt;")
-    return replace(s, ">", "&gt;",)
-
-if unicode:
-    def _stringify(string):
-        # convert to 7-bit ascii if possible
-        try:
-            return string.encode("ascii")
-        except UnicodeError:
-            return string
-else:
-    def _stringify(string):
-        return string
-
-__version__ = "1.0.1"
-
-# xmlrpc integer limits
-try:
-    long 
-except NameError:
-    long = int
-MAXINT = long(2) ** 31 - 1
-MININT = long(-2) ** 31
-
-# --------------------------------------------------------------------
-# Error constants (from Dan Libby's specification at
-# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php)
-
-# Ranges of errors
-PARSE_ERROR = -32700
-SERVER_ERROR = -32600
-APPLICATION_ERROR = -32500
-SYSTEM_ERROR = -32400
-TRANSPORT_ERROR = -32300
-
-# Specific errors
-NOT_WELLFORMED_ERROR = -32700
-UNSUPPORTED_ENCODING = -32701
-INVALID_ENCODING_CHAR = -32702
-INVALID_XMLRPC = -32600
-METHOD_NOT_FOUND = -32601
-INVALID_METHOD_PARAMS = -32602
-INTERNAL_ERROR = -32603
-
-# --------------------------------------------------------------------
-# Exceptions
-
-##
-# Base class for all kinds of client-side errors.
-
-class Error(Exception):
-    """Base class for client errors."""
-    def __str__(self):
-        return repr(self)
-
-##
-# Indicates an HTTP-level protocol error.  This is raised by the HTTP
-# transport layer, if the server returns an error code other than 200
-# (OK).
-#
-# @param url The target URL.
-# @param errcode The HTTP error code.
-# @param errmsg The HTTP error message.
-# @param headers The HTTP header dictionary.
-
-class ProtocolError(Error):
-    """Indicates an HTTP protocol error."""
-    def __init__(self, url, errcode, errmsg, headers):
-        Error.__init__(self)
-        self.url = url
-        self.errcode = errcode
-        self.errmsg = errmsg
-        self.headers = headers
-    def __repr__(self):
-        return (
-            "<ProtocolError for %s: %s %s>" % 
-            (self.url, self.errcode, self.errmsg)
-            )
-
-##
-# Indicates a broken XML-RPC response package.  This exception is
-# raised by the unmarshalling layer, if the XML-RPC response is
-# malformed.
-
-class ResponseError(Error):
-    """Indicates a broken response package."""
-    pass
-
-##
-# Indicates an XML-RPC fault response package.  This exception is
-# raised by the unmarshalling layer, if the XML-RPC response contains
-# a fault string.  This exception can also used as a class, to
-# generate a fault XML-RPC message.
-#
-# @param faultCode The XML-RPC fault code.
-# @param faultString The XML-RPC fault string.
-
-class Fault(Error):
-    """Indicates an XML-RPC fault package."""
-    def __init__(self, faultCode, faultString, **extra):
-        Error.__init__(self)
-        self.faultCode = faultCode
-        self.faultString = faultString
-    def __repr__(self):
-        return (
-            "<Fault %s: %s>" % 
-            (self.faultCode, repr(self.faultString))
-            )
-
-# --------------------------------------------------------------------
-# Special values
-
-##
-# Wrapper for XML-RPC boolean values.  Use the xmlrpclib.True and
-# xmlrpclib.False constants, or the xmlrpclib.boolean() function, to
-# generate boolean XML-RPC values.
-#
-# @param value A boolean value.  Any true value is interpreted as True,
-#              all other values are interpreted as False.
-
-if _bool_is_builtin:
-    boolean = Boolean = bool #@UndefinedVariable
-    # to avoid breaking code which references xmlrpclib.{True,False}
-    True, False = True, False
-else:
-    class Boolean:
-        """Boolean-value wrapper.
-
-        Use True or False to generate a "boolean" XML-RPC value.
-        """
-
-        def __init__(self, value=0):
-            self.value = operator.truth(value)
-
-        def encode(self, out):
-            out.write("<value><boolean>%d</boolean></value>\n" % self.value)
-
-        def __cmp__(self, other):
-            if isinstance(other, Boolean):
-                other = other.value
-            return cmp(self.value, other)
-
-        def __repr__(self):
-            if self.value:
-                return "<Boolean True at %x>" % id(self)
-            else:
-                return "<Boolean False at %x>" % id(self)
-
-        def __int__(self):
-            return self.value
-
-        def __nonzero__(self):
-            return self.value
-
-    True, False = Boolean(1), Boolean(0)
-
-    ##
-    # Map true or false value to XML-RPC boolean values.
-    #
-    # @def boolean(value)
-    # @param value A boolean value.  Any true value is mapped to True,
-    #              all other values are mapped to False.
-    # @return xmlrpclib.True or xmlrpclib.False.
-    # @see Boolean
-    # @see True
-    # @see False
-
-    def boolean(value, _truefalse=(False, True)):
-        """Convert any Python value to XML-RPC 'boolean'."""
-        return _truefalse[operator.truth(value)]
-
-##
-# Wrapper for XML-RPC DateTime values.  This converts a time value to
-# the format used by XML-RPC.
-# <p>
-# The value can be given as a string in the format
-# "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by
-# time.localtime()), or an integer value (as returned by time.time()).
-# The wrapper uses time.localtime() to convert an integer to a time
-# tuple.
-#
-# @param value The time, given as an ISO 8601 string, a time
-#              tuple, or a integer time value.
-
-class DateTime:
-    """DateTime wrapper for an ISO 8601 string or time tuple or
-    localtime integer value to generate 'dateTime.iso8601' XML-RPC
-    value.
-    """
-
-    def __init__(self, value=0):
-        if not isinstance(value, StringType):
-            if datetime and isinstance(value, datetime.datetime):
-                self.value = value.strftime("%Y%m%dT%H:%M:%S")
-                return
-            if datetime and isinstance(value, datetime.date):
-                self.value = value.strftime("%Y%m%dT%H:%M:%S")
-                return
-            if datetime and isinstance(value, datetime.time):
-                today = datetime.datetime.now().strftime("%Y%m%d")
-                self.value = value.strftime(today + "T%H:%M:%S")
-                return
-            if not isinstance(value, (TupleType, time.struct_time)): #@UndefinedVariable
-                if value == 0:
-                    value = time.time()
-                value = time.localtime(value)
-            value = time.strftime("%Y%m%dT%H:%M:%S", value)
-        self.value = value
-
-    def __cmp__(self, other):
-        if isinstance(other, DateTime):
-            other = other.value
-        return cmp(self.value, other)
-
-    ##
-    # Get date/time value.
-    #
-    # @return Date/time value, as an ISO 8601 string.
-
-    def __str__(self):
-        return self.value
-
-    def __repr__(self):
-        return "<DateTime %s at %x>" % (repr(self.value), id(self))
-
-    def decode(self, data):
-        data = str(data)
-        self.value = string.strip(data)
-
-    def encode(self, out):
-        out.write("<value><dateTime.iso8601>")
-        out.write(self.value)
-        out.write("</dateTime.iso8601></value>\n")
-
-def _datetime(data):
-    # decode xml element contents into a DateTime structure.
-    value = DateTime()
-    value.decode(data)
-    return value
-
-def _datetime_type(data):
-    t = time.strptime(data, "%Y%m%dT%H:%M:%S") #@UndefinedVariable
-    return datetime.datetime(*tuple(t)[:6])
-
-##
-# Wrapper for binary data.  This can be used to transport any kind
-# of binary data over XML-RPC, using BASE64 encoding.
-#
-# @param data An 8-bit string containing arbitrary data.
-
-import base64
-try:
-    import cStringIO as StringIO
-except ImportError:
-    import StringIO
-
-class Binary:
-    """Wrapper for binary data."""
-
-    def __init__(self, data=None):
-        self.data = data
-
-    ##
-    # Get buffer contents.
-    #
-    # @return Buffer contents, as an 8-bit string.
-
-    def __str__(self):
-        return self.data or ""
-
-    def __cmp__(self, other):
-        if isinstance(other, Binary):
-            other = other.data
-        return cmp(self.data, other)
-
-    def decode(self, data):
-        self.data = base64.decodestring(data)
-
-    def encode(self, out):
-        out.write("<value><base64>\n")
-        base64.encode(StringIO.StringIO(self.data), out)
-        out.write("</base64></value>\n")
-
-def _binary(data):
-    # decode xml element contents into a Binary structure
-    value = Binary()
-    value.decode(data)
-    return value
-
-WRAPPERS = (DateTime, Binary)
-if not _bool_is_builtin:
-    WRAPPERS = WRAPPERS + (Boolean,)
-
-# --------------------------------------------------------------------
-# XML parsers
-
-try:
-    # optional xmlrpclib accelerator
-    import _xmlrpclib #@UnresolvedImport
-    FastParser = _xmlrpclib.Parser
-    FastUnmarshaller = _xmlrpclib.Unmarshaller
-except (AttributeError, ImportError):
-    FastParser = FastUnmarshaller = None
-
-try:
-    import _xmlrpclib #@UnresolvedImport
-    FastMarshaller = _xmlrpclib.Marshaller
-except (AttributeError, ImportError):
-    FastMarshaller = None
-
-#
-# the SGMLOP parser is about 15x faster than Python's builtin
-# XML parser.  SGMLOP sources can be downloaded from:
-#
-#     http://www.pythonware.com/products/xml/sgmlop.htm
-#
-
-try:
-    import sgmlop
-    if not hasattr(sgmlop, "XMLParser"):
-        raise ImportError()
-except ImportError:
-    SgmlopParser = None # sgmlop accelerator not available
-else:
-    class SgmlopParser:
-        def __init__(self, target):
-
-            # setup callbacks
-            self.finish_starttag = target.start
-            self.finish_endtag = target.end
-            self.handle_data = target.data
-            self.handle_xml = target.xml
-
-            # activate parser
-            self.parser = sgmlop.XMLParser()
-            self.parser.register(self)
-            self.feed = self.parser.feed
-            self.entity = {
-                "amp": "&", "gt": ">", "lt": "<",
-                "apos": "'", "quot": '"'
-                }
-
-        def close(self):
-            try:
-                self.parser.close()
-            finally:
-                self.parser = self.feed = None # nuke circular reference
-
-        def handle_proc(self, tag, attr):
-            m = re.search("encoding\s*=\s*['\"]([^\"']+)[\"']", attr) #@UndefinedVariable
-            if m:
-                self.handle_xml(m.group(1), 1)
-
-        def handle_entityref(self, entity):
-            # <string> entity
-            try:
-                self.handle_data(self.entity[entity])
-            except KeyError:
-                self.handle_data("&%s;" % entity)
-
-try:
-    from xml.parsers import expat
-    if not hasattr(expat, "ParserCreate"):
-        raise ImportError()
-except ImportError:
-    ExpatParser = None # expat not available
-else:
-    class ExpatParser:
-        # fast expat parser for Python 2.0 and later.  this is about
-        # 50% slower than sgmlop, on roundtrip testing
-        def __init__(self, target):
-            self._parser = parser = expat.ParserCreate(None, None)
-            self._target = target
-            parser.StartElementHandler = target.start
-            parser.EndElementHandler = target.end
-            parser.CharacterDataHandler = target.data
-            encoding = None
-            if not parser.returns_unicode:
-                encoding = "utf-8"
-            target.xml(encoding, None)
-
-        def feed(self, data):
-            self._parser.Parse(data, 0)
-
-        def close(self):
-            self._parser.Parse("", 1) # end of data
-            del self._target, self._parser # get rid of circular references
-
-class SlowParser:
-    """Default XML parser (based on xmllib.XMLParser)."""
-    # this is about 10 times slower than sgmlop, on roundtrip
-    # testing.
-    def __init__(self, target):
-        import xmllib # lazy subclassing (!)
-        if xmllib.XMLParser not in SlowParser.__bases__:
-            SlowParser.__bases__ = (xmllib.XMLParser,)
-        self.handle_xml = target.xml
-        self.unknown_starttag = target.start
-        self.handle_data = target.data
-        self.handle_cdata = target.data
-        self.unknown_endtag = target.end
-        try:
-            xmllib.XMLParser.__init__(self, accept_utf8=1)
-        except TypeError:
-            xmllib.XMLParser.__init__(self) # pre-2.0
-
-# --------------------------------------------------------------------
-# XML-RPC marshalling and unmarshalling code
-
-##
-# XML-RPC marshaller.
-#
-# @param encoding Default encoding for 8-bit strings.  The default
-#     value is None (interpreted as UTF-8).
-# @see dumps
-
-class Marshaller:
-    """Generate an XML-RPC params chunk from a Python data structure.
-
-    Create a Marshaller instance for each set of parameters, and use
-    the "dumps" method to convert your data (represented as a tuple)
-    to an XML-RPC params chunk.  To write a fault response, pass a
-    Fault instance instead.  You may prefer to use the "dumps" module
-    function for this purpose.
-    """
-
-    # by the way, if you don't understand what's going on in here,
-    # that's perfectly ok.
-
-    def __init__(self, encoding=None, allow_none=0):
-        self.memo = {}
-        self.data = None
-        self.encoding = encoding
-        self.allow_none = allow_none
-
-    dispatch = {}
-
-    def dumps(self, values):
-        out = []
-        write = out.append
-        dump = self.__dump
-        if isinstance(values, Fault):
-            # fault instance
-            write("<fault>\n")
-            dump({'faultCode': values.faultCode,
-                  'faultString': values.faultString},
-                 write)
-            write("</fault>\n")
-        else:
-            # parameter block
-            # FIXME: the xml-rpc specification allows us to leave out
-            # the entire <params> block if there are no parameters.
-            # however, changing this may break older code (including
-            # old versions of xmlrpclib.py), so this is better left as
-            # is for now.  See @XMLRPC3 for more information. /F
-            write("<params>\n")
-            for v in values:
-                write("<param>\n")
-                dump(v, write)
-                write("</param>\n")
-            write("</params>\n")
-        result = string.join(out, "")
-        return result
-
-    def __dump(self, value, write):
-        try:
-            f = self.dispatch[type(value)]
-        except KeyError:
-            raise TypeError("cannot marshal %s objects" % type(value))
-        else:
-            f(self, value, write)
-
-    def dump_nil (self, value, write):
-        if not self.allow_none:
-            raise TypeError("cannot marshal None unless allow_none is enabled")
-        write("<value><nil/></value>")
-    dispatch[NoneType] = dump_nil
-
-    def dump_int(self, value, write):
-        # in case ints are > 32 bits
-        if value > MAXINT or value < MININT:
-            raise OverflowError("int exceeds XML-RPC limits")
-        write("<value><int>")
-        write(str(value))
-        write("</int></value>\n")
-    dispatch[IntType] = dump_int
-
-    if _bool_is_builtin:
-        def dump_bool(self, value, write):
-            write("<value><boolean>")
-            write(value and "1" or "0")
-            write("</boolean></value>\n")
-        dispatch[bool] = dump_bool #@UndefinedVariable
-
-    def dump_long(self, value, write):
-        if value > MAXINT or value < MININT:
-            raise OverflowError("long int exceeds XML-RPC limits")
-        write("<value><int>")
-        write(str(int(value)))
-        write("</int></value>\n")
-    dispatch[LongType] = dump_long
-
-    def dump_double(self, value, write):
-        write("<value><double>")
-        write(repr(value))
-        write("</double></value>\n")
-    dispatch[FloatType] = dump_double
-
-    def dump_string(self, value, write, escape=escape):
-        write("<value><string>")
-        write(escape(value))
-        write("</string></value>\n")
-    dispatch[StringType] = dump_string
-
-    if unicode:
-        def dump_unicode(self, value, write, escape=escape):
-            value = value.encode(self.encoding)
-            write("<value><string>")
-            write(escape(value))
-            write("</string></value>\n")
-        dispatch[UnicodeType] = dump_unicode
-
-    def dump_array(self, value, write):
-        i = id(value)
-        if self.memo.has_key(i):
-            raise TypeError("cannot marshal recursive sequences")
-        self.memo[i] = None
-        dump = self.__dump
-        write("<value><array><data>\n")
-        for v in value:
-            dump(v, write)
-        write("</data></array></value>\n")
-        del self.memo[i]
-    dispatch[TupleType] = dump_array
-    dispatch[ListType] = dump_array
-
-    def dump_struct(self, value, write, escape=escape):
-        i = id(value)
-        if self.memo.has_key(i):
-            raise TypeError("cannot marshal recursive dictionaries")
-        self.memo[i] = None
-        dump = self.__dump
-        write("<value><struct>\n")
-        for k, v in value.items():
-            write("<member>\n")
-            if type(k) is not StringType:
-                if unicode and type(k) is UnicodeType:
-                    k = k.encode(self.encoding)
-                else:
-                    raise TypeError("dictionary key must be string")
-            write("<name>%s</name>\n" % escape(k))
-            dump(v, write)
-            write("</member>\n")
-        write("</struct></value>\n")
-        del self.memo[i]
-    dispatch[DictType] = dump_struct
-
-    if datetime:
-        def dump_datetime(self, value, write):
-            write("<value><dateTime.iso8601>")
-            write(value.strftime("%Y%m%dT%H:%M:%S"))
-            write("</dateTime.iso8601></value>\n")
-        dispatch[datetime.datetime] = dump_datetime
-
-        def dump_date(self, value, write):
-            write("<value><dateTime.iso8601>")
-            write(value.strftime("%Y%m%dT00:00:00"))
-            write("</dateTime.iso8601></value>\n")
-        dispatch[datetime.date] = dump_date
-
-        def dump_time(self, value, write):
-            write("<value><dateTime.iso8601>")
-            write(datetime.datetime.now().date().strftime("%Y%m%dT"))
-            write(value.strftime("%H:%M:%S"))
-            write("</dateTime.iso8601></value>\n")
-        dispatch[datetime.time] = dump_time
-
-    def dump_instance(self, value, write):
-        # check for special wrappers
-        if value.__class__ in WRAPPERS:
-            self.write = write
-            value.encode(self)
-            del self.write
-        else:
-            # store instance attributes as a struct (really?)
-            self.dump_struct(value.__dict__, write)
-    dispatch[InstanceType] = dump_instance
-
-##
-# XML-RPC unmarshaller.
-#
-# @see loads
-
-class Unmarshaller:
-    """Unmarshal an XML-RPC response, based on incoming XML event
-    messages (start, data, end).  Call close() to get the resulting
-    data structure.
-
-    Note that this reader is fairly tolerant, and gladly accepts bogus
-    XML-RPC data without complaining (but not bogus XML).
-    """
-
-    # and again, if you don't understand what's going on in here,
-    # that's perfectly ok.
-
-    def __init__(self, use_datetime=0):
-        self._type = None
-        self._stack = []
-        self._marks = []
-        self._data = []
-        self._methodname = None
-        self._encoding = "utf-8"
-        self.append = self._stack.append
-        self._use_datetime = use_datetime
-        if use_datetime and not datetime:
-            raise ValueError("the datetime module is not available")
-
-    def close(self):
-        # return response tuple and target method
-        if self._type is None or self._marks:
-            raise ResponseError()
-        if self._type == "fault":
-            raise Fault(**self._stack[0])
-        return tuple(self._stack)
-
-    def getmethodname(self):
-        return self._methodname
-
-    #
-    # event handlers
-
-    def xml(self, encoding, standalone):
-        self._encoding = encoding
-        # FIXME: assert standalone == 1 ???
-
-    def start(self, tag, attrs):
-        # prepare to handle this element
-        if tag == "array" or tag == "struct":
-            self._marks.append(len(self._stack))
-        self._data = []
-        self._value = (tag == "value")
-
-    def data(self, text):
-        self._data.append(text)
-
-    def end(self, tag, join=string.join):
-        # call the appropriate end tag handler
-        try:
-            f = self.dispatch[tag]
-        except KeyError:
-            pass # unknown tag ?
-        else:
-            return f(self, join(self._data, ""))
-
-    #
-    # accelerator support
-
-    def end_dispatch(self, tag, data):
-        # dispatch data
-        try:
-            f = self.dispatch[tag]
-        except KeyError:
-            pass # unknown tag ?
-        else:
-            return f(self, data)
-
-    #
-    # element decoders
-
-    dispatch = {}
-
-    def end_nil (self, data):
-        self.append(None)
-        self._value = 0
-    dispatch["nil"] = end_nil
-
-    def end_boolean(self, data):
-        if data == "0":
-            self.append(False)
-        elif data == "1":
-            self.append(True)
-        else:
-            raise TypeError("bad boolean value")
-        self._value = 0
-    dispatch["boolean"] = end_boolean
-
-    def end_int(self, data):
-        self.append(int(data))
-        self._value = 0
-    dispatch["i4"] = end_int
-    dispatch["int"] = end_int
-
-    def end_double(self, data):
-        self.append(float(data))
-        self._value = 0
-    dispatch["double"] = end_double
-
-    def end_string(self, data):
-        if self._encoding:
-            data = _decode(data, self._encoding)
-        self.append(_stringify(data))
-        self._value = 0
-    dispatch["string"] = end_string
-    dispatch["name"] = end_string # struct keys are always strings
-
-    def end_array(self, data):
-        mark = self._marks.pop()
-        # map arrays to Python lists
-        self._stack[mark:] = [self._stack[mark:]]
-        self._value = 0
-    dispatch["array"] = end_array
-
-    def end_struct(self, data):
-        mark = self._marks.pop()
-        # map structs to Python dictionaries
-        dict = {}
-        items = self._stack[mark:]
-        for i in range(0, len(items), 2):
-            dict[_stringify(items[i])] = items[i + 1]
-        self._stack[mark:] = [dict]
-        self._value = 0
-    dispatch["struct"] = end_struct
-
-    def end_base64(self, data):
-        value = Binary()
-        value.decode(data)
-        self.append(value)
-        self._value = 0
-    dispatch["base64"] = end_base64
-
-    def end_dateTime(self, data):
-        value = DateTime()
-        value.decode(data)
-        if self._use_datetime:
-            value = _datetime_type(data)
-        self.append(value)
-    dispatch["dateTime.iso8601"] = end_dateTime
-
-    def end_value(self, data):
-        # if we stumble upon a value element with no internal
-        # elements, treat it as a string element
-        if self._value:
-            self.end_string(data)
-    dispatch["value"] = end_value
-
-    def end_params(self, data):
-        self._type = "params"
-    dispatch["params"] = end_params
-
-    def end_fault(self, data):
-        self._type = "fault"
-    dispatch["fault"] = end_fault
-
-    def end_methodName(self, data):
-        if self._encoding:
-            data = _decode(data, self._encoding)
-        self._methodname = data
-        self._type = "methodName" # no params
-    dispatch["methodName"] = end_methodName
-
-## Multicall support
-#
-
-class _MultiCallMethod:
-    # some lesser magic to store calls made to a MultiCall object
-    # for batch execution
-    def __init__(self, call_list, name):
-        self.__call_list = call_list
-        self.__name = name
-    def __getattr__(self, name):
-        return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name))
-    def __call__(self, *args):
-        self.__call_list.append((self.__name, args))
-
-class MultiCallIterator:
-    """Iterates over the results of a multicall. Exceptions are
-    thrown in response to xmlrpc faults."""
-
-    def __init__(self, results):
-        self.results = results
-
-    def __getitem__(self, i):
-        item = self.results[i]
-        if type(item) == type({}):
-            raise Fault(item['faultCode'], item['faultString'])
-        elif type(item) == type([]):
-            return item[0]
-        else:
-            raise ValueError("unexpected type in multicall result")
-
-class MultiCall:
-    """server -> a object used to boxcar method calls
-
-    server should be a ServerProxy object.
-
-    Methods can be added to the MultiCall using normal
-    method call syntax e.g.:
-
-    multicall = MultiCall(server_proxy)
-    multicall.add(2,3)
-    multicall.get_address("Guido")
-
-    To execute the multicall, call the MultiCall object e.g.:
-
-    add_result, address = multicall()
-    """
-
-    def __init__(self, server):
-        self.__server = server
-        self.__call_list = []
-
-    def __repr__(self):
-        return "<MultiCall at %x>" % id(self)
-
-    __str__ = __repr__
-
-    def __getattr__(self, name):
-        return _MultiCallMethod(self.__call_list, name)
-
-    def __call__(self):
-        marshalled_list = []
-        for name, args in self.__call_list:
-            marshalled_list.append({'methodName' : name, 'params' : args})
-
-        return MultiCallIterator(self.__server.system.multicall(marshalled_list))
-
-# --------------------------------------------------------------------
-# convenience functions
-
-##
-# Create a parser object, and connect it to an unmarshalling instance.
-# This function picks the fastest available XML parser.
-#
-# return A (parser, unmarshaller) tuple.
-
-def getparser(use_datetime=0):
-    """getparser() -> parser, unmarshaller
-
-    Create an instance of the fastest available parser, and attach it
-    to an unmarshalling object.  Return both objects.
-    """
-    if use_datetime and not datetime:
-        raise ValueError("the datetime module is not available")
-    if FastParser and FastUnmarshaller:
-        if use_datetime:
-            mkdatetime = _datetime_type
-        else:
-            mkdatetime = _datetime
-        target = FastUnmarshaller(True, False, _binary, mkdatetime, Fault)
-        parser = FastParser(target)
-    else:
-        target = Unmarshaller(use_datetime=use_datetime)
-        if FastParser:
-            parser = FastParser(target)
-        elif SgmlopParser:
-            parser = SgmlopParser(target)
-        elif ExpatParser:
-            parser = ExpatParser(target)
-        else:
-            parser = SlowParser(target)
-    return parser, target
-
-##
-# Convert a Python tuple or a Fault instance to an XML-RPC packet.
-#
-# @def dumps(params, **options)
-# @param params A tuple or Fault instance.
-# @keyparam methodname If given, create a methodCall request for
-#     this method name.
-# @keyparam methodresponse If given, create a methodResponse packet.
-#     If used with a tuple, the tuple must be a singleton (that is,
-#     it must contain exactly one element).
-# @keyparam encoding The packet encoding.
-# @return A string containing marshalled data.
-
-def dumps(params, methodname=None, methodresponse=None, encoding=None,
-          allow_none=0):
-    """data [,options] -> marshalled data
-
-    Convert an argument tuple or a Fault instance to an XML-RPC
-    request (or response, if the methodresponse option is used).
-
-    In addition to the data object, the following options can be given
-    as keyword arguments:
-
-        methodname: the method name for a methodCall packet
-
-        methodresponse: true to create a methodResponse packet.
-        If this option is used with a tuple, the tuple must be
-        a singleton (i.e. it can contain only one element).
-
-        encoding: the packet encoding (default is UTF-8)
-
-    All 8-bit strings in the data structure are assumed to use the
-    packet encoding.  Unicode strings are automatically converted,
-    where necessary.
-    """
-
-    assert isinstance(params, TupleType) or isinstance(params, Fault), \
-           "argument must be tuple or Fault instance"
-
-    if isinstance(params, Fault):
-        methodresponse = 1
-    elif methodresponse and isinstance(params, TupleType):
-        assert len(params) == 1, "response tuple must be a singleton"
-
-    if not encoding:
-        encoding = "utf-8"
-
-    if FastMarshaller:
-        m = FastMarshaller(encoding)
-    else:
-        m = Marshaller(encoding, allow_none)
-
-    data = m.dumps(params)
-
-    if encoding != "utf-8":
-        xmlheader = "<?xml version='1.0' encoding='%s'?>\n" % str(encoding)
-    else:
-        xmlheader = "<?xml version='1.0'?>\n" # utf-8 is default
-
-    # standard XML-RPC wrappings
-    if methodname:
-        # a method call
-        if not isinstance(methodname, StringType):
-            methodname = methodname.encode(encoding)
-        data = (
-            xmlheader,
-            "<methodCall>\n"
-            "<methodName>", methodname, "</methodName>\n",
-            data,
-            "</methodCall>\n"
-            )
-    elif methodresponse:
-        # a method response, or a fault structure
-        data = (
-            xmlheader,
-            "<methodResponse>\n",
-            data,
-            "</methodResponse>\n"
-            )
-    else:
-        return data # return as is
-    return string.join(data, "")
-
-##
-# Convert an XML-RPC packet to a Python object.  If the XML-RPC packet
-# represents a fault condition, this function raises a Fault exception.
-#
-# @param data An XML-RPC packet, given as an 8-bit string.
-# @return A tuple containing the unpacked data, and the method name
-#     (None if not present).
-# @see Fault
-
-def loads(data, use_datetime=0):
-    """data -> unmarshalled data, method name
-
-    Convert an XML-RPC packet to unmarshalled data plus a method
-    name (None if not present).
-
-    If the XML-RPC packet represents a fault condition, this function
-    raises a Fault exception.
-    """
-    p, u = getparser(use_datetime=use_datetime)
-    p.feed(data)
-    p.close()
-    return u.close(), u.getmethodname()
-
-
-# --------------------------------------------------------------------
-# request dispatcher
-
-class _Method:
-    # some magic to bind an XML-RPC method to an RPC server.
-    # supports "nested" methods (e.g. examples.getStateName)
-    def __init__(self, send, name):
-        self.__send = send
-        self.__name = name
-    def __getattr__(self, name):
-        return _Method(self.__send, "%s.%s" % (self.__name, name))
-    def __call__(self, *args):
-        return self.__send(self.__name, args)
-
-##
-# Standard transport class for XML-RPC over HTTP.
-# <p>
-# You can create custom transports by subclassing this method, and
-# overriding selected methods.
-
-class Transport:
-    """Handles an HTTP transaction to an XML-RPC server."""
-
-    # client identifier (may be overridden)
-    user_agent = "xmlrpclib.py/%s (by www.pythonware.com)" % __version__
-
-    def __init__(self, use_datetime=0):
-        self._use_datetime = use_datetime
-
-    ##
-    # Send a complete request, and parse the response.
-    #
-    # @param host Target host.
-    # @param handler Target PRC handler.
-    # @param request_body XML-RPC request body.
-    # @param verbose Debugging flag.
-    # @return Parsed response.
-
-    def request(self, host, handler, request_body, verbose=0):
-        # issue XML-RPC request
-
-        h = self.make_connection(host)
-        if verbose:
-            h.set_debuglevel(1)
-
-        self.send_request(h, handler, request_body)
-        self.send_host(h, host)
-        self.send_user_agent(h)
-        self.send_content(h, request_body)
-
-        errcode, errmsg, headers = h.getreply()
-
-        if errcode != 200:
-            raise ProtocolError(
-                host + handler,
-                errcode, errmsg,
-                headers
-                )
-
-        self.verbose = verbose
-
-        try:
-            sock = h._conn.sock
-        except AttributeError:
-            sock = None
-
-        return self._parse_response(h.getfile(), sock)
-
-    ##
-    # Create parser.
-    #
-    # @return A 2-tuple containing a parser and a unmarshaller.
-
-    def getparser(self):
-        # get parser and unmarshaller
-        return getparser(use_datetime=self._use_datetime)
-
-    ##
-    # Get authorization info from host parameter
-    # Host may be a string, or a (host, x509-dict) tuple; if a string,
-    # it is checked for a "user:pw@host" format, and a "Basic
-    # Authentication" header is added if appropriate.
-    #
-    # @param host Host descriptor (URL or (URL, x509 info) tuple).
-    # @return A 3-tuple containing (actual host, extra headers,
-    #     x509 info).  The header and x509 fields may be None.
-
-    def get_host_info(self, host):
-
-        x509 = {}
-        if isinstance(host, TupleType):
-            host, x509 = host
-
-        import urllib
-        auth, host = urllib.splituser(host)
-
-        if auth:
-            import base64
-            auth = base64.encodestring(urllib.unquote(auth))
-            auth = string.join(string.split(auth), "") # get rid of whitespace
-            extra_headers = [
-                ("Authorization", "Basic " + auth)
-                ]
-        else:
-            extra_headers = None
-
-        return host, extra_headers, x509
-
-    ##
-    # Connect to server.
-    #
-    # @param host Target host.
-    # @return A connection handle.
-
-    def make_connection(self, host):
-        # create a HTTP connection object from a host descriptor
-        import httplib
-        host, extra_headers, x509 = self.get_host_info(host)
-        return httplib.HTTP(host)
-
-    ##
-    # Send request header.
-    #
-    # @param connection Connection handle.
-    # @param handler Target RPC handler.
-    # @param request_body XML-RPC body.
-
-    def send_request(self, connection, handler, request_body):
-        connection.putrequest("POST", handler)
-
-    ##
-    # Send host name.
-    #
-    # @param connection Connection handle.
-    # @param host Host name.
-
-    def send_host(self, connection, host):
-        host, extra_headers, x509 = self.get_host_info(host)
-        connection.putheader("Host", host)
-        if extra_headers:
-            if isinstance(extra_headers, DictType):
-                extra_headers = extra_headers.items()
-            for key, value in extra_headers:
-                connection.putheader(key, value)
-
-    ##
-    # Send user-agent identifier.
-    #
-    # @param connection Connection handle.
-
-    def send_user_agent(self, connection):
-        connection.putheader("User-Agent", self.user_agent)
-
-    ##
-    # Send request body.
-    #
-    # @param connection Connection handle.
-    # @param request_body XML-RPC request body.
-
-    def send_content(self, connection, request_body):
-        connection.putheader("Content-Type", "text/xml")
-        connection.putheader("Content-Length", str(len(request_body)))
-        connection.endheaders()
-        if request_body:
-            connection.send(request_body)
-
-    ##
-    # Parse response.
-    #
-    # @param file Stream.
-    # @return Response tuple and target method.
-
-    def parse_response(self, file):
-        # compatibility interface
-        return self._parse_response(file, None)
-
-    ##
-    # Parse response (alternate interface).  This is similar to the
-    # parse_response method, but also provides direct access to the
-    # underlying socket object (where available).
-    #
-    # @param file Stream.
-    # @param sock Socket handle (or None, if the socket object
-    #    could not be accessed).
-    # @return Response tuple and target method.
-
-    def _parse_response(self, file, sock):
-        # read response from input file/socket, and parse it
-
-        p, u = self.getparser()
-
-        while 1:
-            if sock:
-                response = sock.recv(1024)
-            else:
-                response = file.read(1024)
-            if not response:
-                break
-            if self.verbose:
-                sys.stdout.write("body: %s\n" % repr(response))
-            p.feed(response)
-
-        file.close()
-        p.close()
-
-        return u.close()
-
-##
-# Standard transport class for XML-RPC over HTTPS.
-
-class SafeTransport(Transport):
-    """Handles an HTTPS transaction to an XML-RPC server."""
-
-    # FIXME: mostly untested
-
-    def make_connection(self, host):
-        # create a HTTPS connection object from a host descriptor
-        # host may be a string, or a (host, x509-dict) tuple
-        import httplib
-        host, extra_headers, x509 = self.get_host_info(host)
-        try:
-            HTTPS = httplib.HTTPS
-        except AttributeError:
-            raise NotImplementedError(
-                "your version of httplib doesn't support HTTPS"
-                )
-        else:
-            return HTTPS(host, None, **(x509 or {}))
-
-##
-# Standard server proxy.  This class establishes a virtual connection
-# to an XML-RPC server.
-# <p>
-# This class is available as ServerProxy and Server.  New code should
-# use ServerProxy, to avoid confusion.
-#
-# @def ServerProxy(uri, **options)
-# @param uri The connection point on the server.
-# @keyparam transport A transport factory, compatible with the
-#    standard transport class.
-# @keyparam encoding The default encoding used for 8-bit strings
-#    (default is UTF-8).
-# @keyparam verbose Use a true value to enable debugging output.
-#    (printed to standard output).
-# @see Transport
-
-class ServerProxy:
-    """uri [,options] -> a logical connection to an XML-RPC server
-
-    uri is the connection point on the server, given as
-    scheme://host/target.
-
-    The standard implementation always supports the "http" scheme.  If
-    SSL socket support is available (Python 2.0), it also supports
-    "https".
-
-    If the target part and the slash preceding it are both omitted,
-    "/RPC2" is assumed.
-
-    The following options can be given as keyword arguments:
-
-        transport: a transport factory
-        encoding: the request encoding (default is UTF-8)
-
-    All 8-bit strings passed to the server proxy are assumed to use
-    the given encoding.
-    """
-
-    def __init__(self, uri, transport=None, encoding=None, verbose=0,
-                 allow_none=0, use_datetime=0):
-        # establish a "logical" server connection
-
-        # get the url
-        import urllib
-        type, uri = urllib.splittype(uri)
-        if type not in ("http", "https"):
-            raise IOError("unsupported XML-RPC protocol")
-        self.__host, self.__handler = urllib.splithost(uri)
-        if not self.__handler:
-            self.__handler = "/RPC2"
-
-        if transport is None:
-            if type == "https":
-                transport = SafeTransport(use_datetime=use_datetime)
-            else:
-                transport = Transport(use_datetime=use_datetime)
-        self.__transport = transport
-
-        self.__encoding = encoding
-        self.__verbose = verbose
-        self.__allow_none = allow_none
-
-    def __request(self, methodname, params):
-        # call a method on the remote server
-
-        request = dumps(params, methodname, encoding=self.__encoding,
-                        allow_none=self.__allow_none)
-
-        response = self.__transport.request(
-            self.__host,
-            self.__handler,
-            request,
-            verbose=self.__verbose
-            )
-
-        if len(response) == 1:
-            response = response[0]
-
-        return response
-
-    def __repr__(self):
-        return (
-            "<ServerProxy for %s%s>" % 
-            (self.__host, self.__handler)
-            )
-
-    __str__ = __repr__
-
-    def __getattr__(self, name):
-        # magic method dispatcher
-        return _Method(self.__request, name)
-
-    # note: to call a remote object with an non-standard name, use
-    # result getattr(server, "strange-python-name")(args)
-
-# compatibility
-
-Server = ServerProxy
-
-# --------------------------------------------------------------------
-# test code
-
-if __name__ == "__main__":
-
-    # simple test program (from the XML-RPC specification)
-
-    # server = ServerProxy("http://localhost:8000") # local server
-    server = ServerProxy("http://time.xmlrpc.com/RPC2")
-
-    sys.stdout.write('%s\n' % server)
-
-    try:
-        sys.stdout.write('%s\n' % (server.currentTime.getCurrentTime(),))
-    except Error:
-        import traceback;traceback.print_exc()
-
-    multi = MultiCall(server)
-    multi.currentTime.getCurrentTime()
-    multi.currentTime.getCurrentTime()
-    try:
-        for response in multi():
-            sys.stdout.write('%s\n' % (response,))
-    except Error:
-        import traceback;traceback.print_exc()
diff --git a/python/helpers/pydev/django_debug.py b/python/helpers/pydev/django_debug.py
index 417ff01..2b17864 100644
--- a/python/helpers/pydev/django_debug.py
+++ b/python/helpers/pydev/django_debug.py
@@ -2,6 +2,7 @@
 from django_frame import DjangoTemplateFrame
 from pydevd_comm import CMD_SET_BREAK
 from pydevd_constants import DJANGO_SUSPEND, GetThreadId, DictContains
+from pydevd_file_utils import NormFileToServer
 from pydevd_breakpoints import LineBreakpoint
 import pydevd_vars
 import traceback
diff --git a/python/helpers/pydev/fix_getpass.py b/python/helpers/pydev/fix_getpass.py
index c81d935..160acc8 100644
--- a/python/helpers/pydev/fix_getpass.py
+++ b/python/helpers/pydev/fix_getpass.py
@@ -1,10 +1,13 @@
 def fixGetpass():
-  import getpass
-  import warnings
-  fallback = getattr(getpass, 'fallback_getpass', None) # >= 2.6
-  if not fallback:
-      fallback = getpass.default_getpass # <= 2.5
-  getpass.getpass = fallback
-  if hasattr(getpass, 'GetPassWarning'):
-      warnings.simplefilter("ignore", category=getpass.GetPassWarning)
+    try:
+        import getpass
+    except ImportError:
+        return #If we can't import it, we can't fix it
+    import warnings
+    fallback = getattr(getpass, 'fallback_getpass', None) # >= 2.6
+    if not fallback:
+        fallback = getpass.default_getpass # <= 2.5
+    getpass.getpass = fallback
+    if hasattr(getpass, 'GetPassWarning'):
+        warnings.simplefilter("ignore", category=getpass.GetPassWarning)
 
diff --git a/python/helpers/pydev/merge_pydev_pycharm.txt b/python/helpers/pydev/merge_pydev_pycharm.txt
index 1cbd356..e5e10f7 100644
--- a/python/helpers/pydev/merge_pydev_pycharm.txt
+++ b/python/helpers/pydev/merge_pydev_pycharm.txt
@@ -35,13 +35,25 @@
 
 - When the code is interrupted, the buffer in the python side is cleared.
 
-- GEvent debugging: for remote debugging, one has to import pydevd before doing the gevent patching -- even if
-    pydevd.settrace will only be done later.
-
+- GEvent debugging: improved PyDev not to use the threading module (uses the thread
+    primitives directly), so, gevent debugging can work even if pydevd is used for
+    remote debugging.
+    
     Also, the gevent debugging should probably be closer to the stackless debugging,
     where we actually show the live stackless threads -- so, we should show the live
-    gevent greenlets -- which the current version doesn't do.
+    gevent greenlets -- which the current version doesn't do (future work).
+    
+- Supporting Jython 2.2 onwards (note: CPython only tested with 2.7/3.3)
 
+- When there are big sets/tuples/lists/dicts, the items won't be all shown so that the
+    debugger speed doesn't suffer (the user should use the console if he wants to see
+    those items in this case). The limit was set to show up to 300 items (pydevd_resolver.MAX_ITEMS_TO_HANDLE)
+
+- Monkey-patching qt (QThread/QRunnable) to enable the debugger to work. 
+    Notes:
+        - It must be imported before the user actually runs its code (as the definitions of QThread/QRunnable
+        are monkey-patched), so, for the remote debugger to work, pydevd must be imported at the start of
+        the program, even if pydevd.settrace will only be used later on. 
 
 Things to be fixed in PyCharm:
 --------------------------------
diff --git a/python/helpers/pydev/pycompletionserver.py b/python/helpers/pydev/pycompletionserver.py
index 0b11cb6..154ad63 100644
--- a/python/helpers/pydev/pycompletionserver.py
+++ b/python/helpers/pydev/pycompletionserver.py
@@ -17,23 +17,17 @@
     setattr(__builtin__, 'True', 1)  # Python 3.0 does not accept __builtin__.True = 1 in its syntax
     setattr(__builtin__, 'False', 0)
 
-import pydevd_constants
+from pydevd_constants import IS_JYTHON
 
-try:
-    from java.lang import Thread
-    IS_JYTHON = True
+if IS_JYTHON:
+    import java.lang
     SERVER_NAME = 'jycompletionserver'
     import _pydev_jy_imports_tipper  # as _pydev_imports_tipper #changed to be backward compatible with 1.5
     _pydev_imports_tipper = _pydev_jy_imports_tipper
 
-except ImportError:
+else:
     # it is python
-    IS_JYTHON = False
     SERVER_NAME = 'pycompletionserver'
-    if pydevd_constants.USE_LIB_COPY:
-        from _pydev_threading import Thread
-    else:
-        from threading import Thread
     import _pydev_imports_tipper
 
 
@@ -185,14 +179,16 @@
 
         return '%s(%s)%s' % (MSG_COMPLETIONS, ''.join(compMsg), MSG_END)
 
+class Exit(Exception):
+    pass
 
-class T(Thread):
+class CompletionServer:
 
     def __init__(self, port):
-        Thread.__init__(self)
         self.ended = False
         self.port = port
         self.socket = None  # socket to send messages.
+        self.exit_process_on_kill = True
         self.processor = Processor()
 
 
@@ -266,7 +262,7 @@
                 while data.find(MSG_END) == -1:
                     received = self.socket.recv(BUFFER_SIZE)
                     if len(received) == 0:
-                        sys.exit(0)  # ok, connection ended
+                        raise Exit()  # ok, connection ended
                     if IS_PYTHON3K:
                         data = data + received.decode('utf-8')
                     else:
@@ -278,7 +274,7 @@
                             dbg(SERVER_NAME + ' kill message received', INFO1)
                             # break if we received kill message.
                             self.ended = True
-                            sys.exit(0)
+                            raise Exit()
 
                         dbg(SERVER_NAME + ' starting keep alive thread', INFO2)
 
@@ -359,7 +355,7 @@
 
                             else:
                                 self.send(MSG_INVALID_REQUEST)
-                    except SystemExit:
+                    except Exit:
                         self.send(self.getCompletionsMessage(None, [('Exit:', 'SystemExit', '')]))
                         raise
 
@@ -378,11 +374,12 @@
 
             self.socket.close()
             self.ended = True
-            sys.exit(0)  # connection broken
+            raise Exit()  # connection broken
 
 
-        except SystemExit:
-            raise
+        except Exit:
+            if self.exit_process_on_kill:
+                sys.exit(0)
             # No need to log SystemExit error
         except:
             s = StringIO.StringIO()
@@ -399,8 +396,6 @@
 
     port = int(sys.argv[1])  # this is from where we want to receive messages.
 
-    t = T(port)
+    t = CompletionServer(port)
     dbg(SERVER_NAME + ' will start', INFO1)
-    t.start()
-    time.sleep(5)
-    t.join()
+    t.run()
diff --git a/python/helpers/pydev/pydev_console_utils.py b/python/helpers/pydev/pydev_console_utils.py
index bd7b7de..6e53218 100644
--- a/python/helpers/pydev/pydev_console_utils.py
+++ b/python/helpers/pydev/pydev_console_utils.py
@@ -1,6 +1,5 @@
 from pydev_imports import xmlrpclib, _queue, Exec
 import sys
-from pydevd_constants import USE_LIB_COPY
 from pydevd_constants import IS_JYTHON
 from _pydev_imps import _pydev_thread as thread
 import pydevd_xml
@@ -418,10 +417,7 @@
             try:
                 # Try to import the packages needed to attach the debugger
                 import pydevd
-                if USE_LIB_COPY:
-                    import _pydev_threading as threading
-                else:
-                    import threading
+                import _pydev_threading as threading
 
             except:
                 # This happens on Jython embedded in host eclipse
diff --git a/python/helpers/pydev/pydev_imports.py b/python/helpers/pydev/pydev_imports.py
index 69804a8..c5132cf 100644
--- a/python/helpers/pydev/pydev_imports.py
+++ b/python/helpers/pydev/pydev_imports.py
@@ -53,9 +53,9 @@
     from pydevd_exec2 import Exec
 
 try:
-    from urllib import quote
+    from urllib import quote, quote_plus, unquote_plus
 except:
-    from urllib.parse import quote #@UnresolvedImport
+    from urllib.parse import quote, quote_plus, unquote_plus #@UnresolvedImport
 
 
 import os
diff --git a/python/helpers/pydev/pydev_ipython/inputhookglut.py b/python/helpers/pydev/pydev_ipython/inputhookglut.py
index e1e67e9..477e48d 100644
--- a/python/helpers/pydev/pydev_ipython/inputhookglut.py
+++ b/python/helpers/pydev/pydev_ipython/inputhookglut.py
@@ -29,6 +29,7 @@
 #-----------------------------------------------------------------------------
 # Imports
 #-----------------------------------------------------------------------------
+import os
 import sys
 from _pydev_imps import _pydev_time as time
 import signal
diff --git a/python/helpers/pydev/pydev_ipython/inputhookpyglet.py b/python/helpers/pydev/pydev_ipython/inputhookpyglet.py
index 64dd2e5..94a8ac7 100644
--- a/python/helpers/pydev/pydev_ipython/inputhookpyglet.py
+++ b/python/helpers/pydev/pydev_ipython/inputhookpyglet.py
@@ -20,6 +20,7 @@
 # Imports
 #-----------------------------------------------------------------------------
 
+import os
 import sys
 from _pydev_imps import _pydev_time as time
 from timeit import default_timer as clock
diff --git a/python/helpers/pydev/pydev_ipython/inputhookqt4.py b/python/helpers/pydev/pydev_ipython/inputhookqt4.py
index 27598fa..b7e1cf0 100644
--- a/python/helpers/pydev/pydev_ipython/inputhookqt4.py
+++ b/python/helpers/pydev/pydev_ipython/inputhookqt4.py
@@ -19,11 +19,7 @@
 import os
 import signal
 
-from pydevd_constants import USE_LIB_COPY
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+import threading
 
 
 from pydev_ipython.qt_for_kernel import QtCore, QtGui
diff --git a/python/helpers/pydev/pydev_ipython_console.py b/python/helpers/pydev/pydev_ipython_console.py
index 0c51dfe..68c2674 100644
--- a/python/helpers/pydev/pydev_ipython_console.py
+++ b/python/helpers/pydev/pydev_ipython_console.py
@@ -9,7 +9,7 @@
 # Uncomment to force PyDev standard shell.
 # raise ImportError()
 
-from pydev_ipython_console_011 import PyDevFrontEnd
+from pydev_ipython_console_011 import get_pydev_frontend
 
 #=======================================================================================================================
 # InterpreterInterface
@@ -23,7 +23,7 @@
         BaseInterpreterInterface.__init__(self, mainThread)
         self.client_port = client_port
         self.host = host
-        self.interpreter = PyDevFrontEnd(host, client_port)
+        self.interpreter = get_pydev_frontend(host, client_port)
         self._input_error_printed = False
         self.notification_succeeded = False
         self.notification_tries = 0
diff --git a/python/helpers/pydev/pydev_ipython_console_011.py b/python/helpers/pydev/pydev_ipython_console_011.py
index 54458e7..717aacc 100644
--- a/python/helpers/pydev/pydev_ipython_console_011.py
+++ b/python/helpers/pydev/pydev_ipython_console_011.py
@@ -19,7 +19,6 @@
 import codeop
 
 from IPython.core.error import UsageError
-from IPython.core.inputsplitter import IPythonInputSplitter
 from IPython.core.completer import IPCompleter
 from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
 from IPython.core.usage import default_banner_parts
@@ -53,7 +52,8 @@
     print(strng)
 
 def create_editor_hook(pydev_host, pydev_client_port):
-    def call_editor(self, filename, line=0, wait=True):
+    
+    def call_editor(filename, line=0, wait=True):
         """ Open an editor in PyDev """
         if line is None:
             line = 0
@@ -62,6 +62,9 @@
         # we don't launch a process. This is more like what happens in the zmqshell
         filename = os.path.abspath(filename)
 
+        # import sys
+        # sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
+        
         # Tell PyDev to open the editor
         server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port))
         server.IPythonEditor(filename, str(line))
@@ -291,22 +294,17 @@
 InteractiveShellABC.register(PyDevTerminalInteractiveShell)  # @UndefinedVariable
 
 #=======================================================================================================================
-# PyDevFrontEnd
+# _PyDevFrontEnd
 #=======================================================================================================================
-class PyDevFrontEnd:
+class _PyDevFrontEnd:
 
     version = release.__version__
 
-    def __init__(self, pydev_host, pydev_client_port, *args, **kwarg):
+    def __init__(self, *args, **kwarg):
 
         # Create and initialize our IPython instance.
         self.ipython = PyDevTerminalInteractiveShell.instance()
 
-        # Back channel to PyDev to open editors (in the future other
-        # info may go back this way. This is the same channel that is
-        # used to get stdin, see StdIn in pydev_console_utils)
-        self.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
-
         # Display the IPython banner, this has version info and
         # help info
         self.ipython.show_banner()
@@ -412,6 +410,9 @@
 
     def getNamespace(self):
         return self.ipython.user_ns
+    
+    def clearBuffer(self):
+        del self._curr_exec_lines[:] 
 
     def addExec(self, line):
         if self._curr_exec_lines:
@@ -463,3 +464,28 @@
 # rely on using the inputhooks directly.
 for name in pydev_ipython.inputhook.__all__:
     setattr(IPython.lib.inputhook, name, getattr(pydev_ipython.inputhook, name))
+
+
+class _PyDevFrontEndContainer:
+    _instance = None
+    _last_host_port = None
+    
+def get_pydev_frontend(pydev_host, pydev_client_port):
+    if _PyDevFrontEndContainer._instance is None:
+        _PyDevFrontEndContainer._instance = _PyDevFrontEnd()
+        
+    if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port):
+        _PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port
+        
+        # Back channel to PyDev to open editors (in the future other
+        # info may go back this way. This is the same channel that is
+        # used to get stdin, see StdIn in pydev_console_utils)
+        _PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port)
+
+        # Note: setting the callback directly because setting it with set_hook would actually create a chain instead
+        # of ovewriting at each new call).
+        # _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
+        
+    return _PyDevFrontEndContainer._instance
+        
+    
\ No newline at end of file
diff --git a/python/helpers/pydev/pydev_localhost.py b/python/helpers/pydev/pydev_localhost.py
index 13c4d02..eacabb5 100644
--- a/python/helpers/pydev/pydev_localhost.py
+++ b/python/helpers/pydev/pydev_localhost.py
@@ -1,4 +1,4 @@
-
+import pydevd_constants
 from _pydev_imps import _pydev_socket as socket
 
 _cache = None
diff --git a/python/helpers/pydev/pydev_monkey.py b/python/helpers/pydev/pydev_monkey.py
index 2b12ed2..d92378e 100644
--- a/python/helpers/pydev/pydev_monkey.py
+++ b/python/helpers/pydev/pydev_monkey.py
@@ -392,40 +392,42 @@
 
 class _NewThreadStartupWithTrace:
 
-    def __init__(self, original_func):
+    def __init__(self, original_func, args, kwargs):
         self.original_func = original_func
+        self.args = args
+        self.kwargs = kwargs
 
-    def __call__(self, *args, **kwargs):
+    def __call__(self):
         from pydevd_comm import GetGlobalDebugger
         global_debugger = GetGlobalDebugger()
         if global_debugger is not None:
             global_debugger.SetTrace(global_debugger.trace_dispatch)
 
-        return self.original_func(*args, **kwargs)
+        return self.original_func(*self.args, **self.kwargs)
 
 class _NewThreadStartupWithoutTrace:
 
-    def __init__(self, original_func):
+    def __init__(self, original_func, args, kwargs):
         self.original_func = original_func
+        self.args = args
+        self.kwargs = kwargs
 
-    def __call__(self, *args, **kwargs):
-        return self.original_func(*args, **kwargs)
+    def __call__(self):
+        return self.original_func(*self.args, **self.kwargs)
 
 _UseNewThreadStartup = _NewThreadStartupWithTrace
 
-def _get_threading_modules():
-    threading_modules = []
-    from _pydev_imps import _pydev_thread
-    threading_modules.append(_pydev_thread)
+def _get_threading_modules_to_patch():
+    threading_modules_to_patch = []
     try:
         import thread as _thread
-        threading_modules.append(_thread)
+        threading_modules_to_patch.append(_thread)
     except:
         import _thread
-        threading_modules.append(_thread)
-    return threading_modules
+        threading_modules_to_patch.append(_thread)
+    return threading_modules_to_patch
 
-threading_modules = _get_threading_modules()
+threading_modules_to_patch = _get_threading_modules_to_patch()
 
 
 
@@ -439,12 +441,12 @@
 
     class ClassWithPydevStartNewThread:
 
-        def pydev_start_new_thread(self, function, args, kwargs={}):
+        def pydev_start_new_thread(self, function, args=(), kwargs={}):
             '''
             We need to replace the original thread.start_new_thread with this function so that threads started
             through it and not through the threading module are properly traced.
             '''
-            return _original_start_new_thread(_UseNewThreadStartup(function), args, kwargs)
+            return _original_start_new_thread(_UseNewThreadStartup(function, args, kwargs), ())
 
     # This is a hack for the situation where the thread.start_new_thread is declared inside a class, such as the one below
     # class F(object):
@@ -465,11 +467,11 @@
         pass
 
 def patch_thread_modules():
-    for t in threading_modules:
+    for t in threading_modules_to_patch:
         patch_thread_module(t)
 
 def undo_patch_thread_modules():
-    for t in threading_modules:
+    for t in threading_modules_to_patch:
         try:
             t.start_new_thread = t._original_start_new_thread
         except:
@@ -494,3 +496,9 @@
     '''
     global _UseNewThreadStartup
     _UseNewThreadStartup = _NewThreadStartupWithTrace
+
+def get_original_start_new_thread(threading_module):
+    try:
+        return threading_module._original_start_new_thread
+    except:
+        return threading_module.start_new_thread
diff --git a/python/helpers/pydev/pydev_monkey_qt.py b/python/helpers/pydev/pydev_monkey_qt.py
new file mode 100644
index 0000000..9c62686
--- /dev/null
+++ b/python/helpers/pydev/pydev_monkey_qt.py
@@ -0,0 +1,89 @@
+from __future__ import nested_scopes
+
+def set_trace_in_qt():
+    import pydevd_tracing
+    from pydevd_comm import GetGlobalDebugger
+    debugger = GetGlobalDebugger()
+    if debugger is not None:
+        pydevd_tracing.SetTrace(debugger.trace_dispatch)
+        
+        
+_patched_qt = False
+def patch_qt():
+    '''
+    This method patches qt (PySide or PyQt4) so that we have hooks to set the tracing for QThread.
+    '''
+    
+    # Avoid patching more than once
+    global _patched_qt
+    if _patched_qt:
+        return
+    
+    _patched_qt = True
+    
+    try:
+        from PySide import QtCore
+    except:
+        try:
+            from PyQt4 import QtCore
+        except:
+            return
+    
+    _original_thread_init = QtCore.QThread.__init__
+    _original_runnable_init = QtCore.QRunnable.__init__
+    
+    
+    class FuncWrapper:
+        
+        def __init__(self, original):
+            self._original = original
+        
+        def __call__(self, *args, **kwargs):
+            set_trace_in_qt()
+            return self._original(*args, **kwargs)
+    
+    class StartedSignalWrapper:  # Wrapper for the QThread.started signal
+        
+        def __init__(self, thread, original_started):
+            self.thread = thread
+            self.original_started = original_started
+            
+        def connect(self, func, *args, **kwargs):
+            return self.original_started.connect(FuncWrapper(func), *args, **kwargs)
+        
+        def disconnect(self, *args, **kwargs):
+            return self.original_started.disconnect(*args, **kwargs)
+        
+        def emit(self, *args, **kwargs):
+            return self.original_started.emit(*args, **kwargs)
+            
+    
+    class ThreadWrapper(QtCore.QThread):  # Wrapper for QThread
+        
+        def __init__(self, *args, **kwargs):
+            _original_thread_init(self)
+    
+            self._original_run = self.run
+            self.run = self._new_run
+            self._original_started = self.started
+            self.started = StartedSignalWrapper(self, self.started)
+            
+        def _new_run(self):
+            set_trace_in_qt()
+            return self._original_run()
+    
+    class RunnableWrapper(QtCore.QRunnable):  # Wrapper for QRunnable
+        
+        def __init__(self, *args, **kwargs):
+            _original_runnable_init(self)
+    
+            self._original_run = self.run
+            self.run = self._new_run
+            
+            
+        def _new_run(self):
+            set_trace_in_qt()
+            return self._original_run()
+            
+    QtCore.QThread = ThreadWrapper
+    QtCore.QRunnable = RunnableWrapper
diff --git a/python/helpers/pydev/pydev_run_in_console.py b/python/helpers/pydev/pydev_run_in_console.py
new file mode 100644
index 0000000..1b8e1d2
--- /dev/null
+++ b/python/helpers/pydev/pydev_run_in_console.py
@@ -0,0 +1,83 @@
+
+from pydevconsole import *
+
+import pydev_imports
+
+
+def run_file(file, globals=None, locals=None):
+    if os.path.isdir(file):
+        new_target = os.path.join(file, '__main__.py')
+        if os.path.isfile(new_target):
+            file = new_target
+
+    if globals is None:
+        # patch provided by: Scott Schlesier - when script is run, it does not
+        # use globals from pydevd:
+        # This will prevent the pydevd script from contaminating the namespace for the script to be debugged
+
+        # pretend pydevd is not the main module, and
+        # convince the file to be debugged that it was loaded as main
+        sys.modules['pydevd'] = sys.modules['__main__']
+        sys.modules['pydevd'].__name__ = 'pydevd'
+
+        from imp import new_module
+        m = new_module('__main__')
+        sys.modules['__main__'] = m
+        if hasattr(sys.modules['pydevd'], '__loader__'):
+            setattr(m, '__loader__', getattr(sys.modules['pydevd'], '__loader__'))
+
+        m.__file__ = file
+        globals = m.__dict__
+        try:
+            globals['__builtins__'] = __builtins__
+        except NameError:
+            pass  # Not there on Jython...
+
+    if locals is None:
+        locals = globals
+
+
+    print('Running %s'%file)
+    pydev_imports.execfile(file, globals, locals)  # execute the script
+
+    return globals
+
+#=======================================================================================================================
+# main
+#=======================================================================================================================
+if __name__ == '__main__':
+    sys.stdin = BaseStdIn()
+    port, client_port = sys.argv[1:3]
+
+    del sys.argv[1]
+    del sys.argv[1]
+
+    file = sys.argv[1]
+
+    import pydev_localhost
+
+    if int(port) == 0 and int(client_port) == 0:
+        (h, p) = pydev_localhost.get_socket_name()
+
+        client_port = p
+
+    host = pydev_localhost.get_localhost()
+
+
+    #replace exit (see comments on method)
+    #note that this does not work in jython!!! (sys method can't be replaced).
+    sys.exit = DoExit
+
+    interpreter = InterpreterInterface(host, int(client_port), threading.currentThread())
+
+    server_thread = threading.Thread(target=start_server,
+                                     name='ServerThread',
+                                     args=(host, int(port), interpreter))
+    server_thread.setDaemon(True)
+    server_thread.start()
+
+    globals = run_file(file, None, None)
+
+    interpreter.getNamespace().update(globals)
+
+    process_exec_queue(interpreter)
\ No newline at end of file
diff --git a/python/helpers/pydev/pydev_runfiles_parallel.py b/python/helpers/pydev/pydev_runfiles_parallel.py
index e14f36d..91f5528 100644
--- a/python/helpers/pydev/pydev_runfiles_parallel.py
+++ b/python/helpers/pydev/pydev_runfiles_parallel.py
@@ -1,4 +1,5 @@
 import unittest
+from _pydev_imps import _pydev_thread
 try:
     import Queue
 except:
@@ -282,13 +283,9 @@
             if False:
                 proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
                 
-                stdout_thread = threading.Thread(target=self._reader_thread,args=(proc.stdout, sys.stdout))
-                stdout_thread.setDaemon(True)
-                stdout_thread.start()
+                _pydev_thread.start_new_thread(self._reader_thread,(proc.stdout, sys.stdout))
     
-                stderr_thread = threading.Thread(target=self._reader_thread,args=(proc.stderr, sys.stderr))
-                stderr_thread.setDaemon(True)
-                stderr_thread.start()
+                _pydev_thread.start_new_thread(target=self._reader_thread,args=(proc.stderr, sys.stderr))
             else:
                 proc = subprocess.Popen(args, env=os.environ, shell=False)
                 proc.wait()
diff --git a/python/helpers/pydev/pydev_runfiles_pytest2.py b/python/helpers/pydev/pydev_runfiles_pytest2.py
index e40d60f..6a4d94f 100644
--- a/python/helpers/pydev/pydev_runfiles_pytest2.py
+++ b/python/helpers/pydev/pydev_runfiles_pytest2.py
@@ -1,7 +1,4 @@
-import pickle
-import zlib
-import base64
-import os
+import pickle, zlib, base64, os
 import py
 from py._code import code  # @UnresolvedImport
 import pydev_runfiles_xml_rpc
@@ -11,7 +8,6 @@
 import time
 
 
-
 #===================================================================================================
 # Load filters with tests we should skip
 #===================================================================================================
diff --git a/python/helpers/pydev/pydev_runfiles_xml_rpc.py b/python/helpers/pydev/pydev_runfiles_xml_rpc.py
index bcaa38a..062f778 100644
--- a/python/helpers/pydev/pydev_runfiles_xml_rpc.py
+++ b/python/helpers/pydev/pydev_runfiles_xml_rpc.py
@@ -1,3 +1,4 @@
+import threading
 import traceback
 import warnings
 
diff --git a/python/helpers/pydev/pydevconsole.py b/python/helpers/pydev/pydevconsole.py
index 2f07a82..8d4375f 100644
--- a/python/helpers/pydev/pydevconsole.py
+++ b/python/helpers/pydev/pydevconsole.py
@@ -1,3 +1,5 @@
+from _pydev_imps._pydev_thread import start_new_thread
+
 try:
     from code import InteractiveConsole
 except ImportError:
@@ -9,12 +11,7 @@
 import os
 import sys
 
-from pydevd_constants import USE_LIB_COPY
-
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+import _pydev_threading as threading
 
 import traceback
 import fix_getpass
@@ -61,13 +58,14 @@
         self.code_fragment = code_fragment
         self.more = None
 
-    @staticmethod
+    
     def symbol_for_fragment(code_fragment):
         if code_fragment.is_single_line:
             symbol = 'single'
         else:
             symbol = 'exec' # Jython doesn't support this
         return symbol
+    symbol_for_fragment = staticmethod(symbol_for_fragment) 
 
     def run(self):
         text = self.code_fragment.text
@@ -228,7 +226,7 @@
 #=======================================================================================================================
 # _DoExit
 #=======================================================================================================================
-def _DoExit(*args):
+def DoExit(*args):
     '''
         We have to override the exit because calling sys.exit will only actually exit the main thread,
         and as we're in a Xml-rpc server, that won't work.
@@ -300,15 +298,11 @@
 def StartServer(host, port, client_port):
     #replace exit (see comments on method)
     #note that this does not work in jython!!! (sys method can't be replaced).
-    sys.exit = _DoExit
+    sys.exit = DoExit
 
     interpreter = InterpreterInterface(host, client_port, threading.currentThread())
 
-    server_thread = threading.Thread(target=start_server,
-                                     name='ServerThread',
-                                     args=(host, port, interpreter))
-    server_thread.setDaemon(True)
-    server_thread.start()
+    start_new_thread(start_server,(host, port, interpreter))
 
     process_exec_queue(interpreter)
 
diff --git a/python/helpers/pydev/pydevd.py b/python/helpers/pydev/pydevd.py
index 1733c26..9d0da09 100644
--- a/python/helpers/pydev/pydevd.py
+++ b/python/helpers/pydev/pydevd.py
@@ -1,10 +1,13 @@
 #IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe
 from __future__ import nested_scopes # Jython 2.1 support
+from pydevd_constants import * # @UnusedWildImport
+
+import pydev_monkey_qt
+pydev_monkey_qt.patch_qt()
 
 import traceback
 
 from django_debug import DjangoLineBreakpoint
-from pydevd_signature import SignatureFactory
 from pydevd_frame import add_exception_to_frame
 import pydev_imports
 from pydevd_breakpoints import * #@UnusedWildImport
@@ -59,7 +62,8 @@
                          StartServer, \
                          InternalSetNextStatementThread, \
                          ReloadCodeCommand, \
-    CMD_SET_PY_EXCEPTION, \
+                         ID_TO_MEANING,\
+                         CMD_SET_PY_EXCEPTION, \
                          CMD_IGNORE_THROWN_EXCEPTION_AT,\
                          InternalGetBreakpointException, \
                          InternalSendCurrExceptionTrace,\
@@ -81,14 +85,12 @@
 import pydevd_dont_trace
 import pydevd_traceproperty
 
-from _pydev_imps import _pydev_time as time
+from _pydev_imps import _pydev_time as time, _pydev_thread
 
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+import _pydev_threading as threading
 
 import os
+import atexit
 
 
 threadingEnumerate = threading.enumerate
@@ -112,9 +114,11 @@
               '_pydev_execfile.py':1,
               '_pydev_jython_execfile.py':1,
               '_pydev_threading':1,
+              '_pydev_Queue':1,
               'django_debug.py':1,
               'django_frame.py':1,
               'pydev_log.py':1,
+              'pydev_monkey.py':1 ,
               'pydevd.py':1 ,
               'pydevd_additional_thread_info.py':1,
               'pydevd_comm.py':1,
@@ -219,7 +223,7 @@
 
 
 def killAllPydevThreads():
-    threads = threadingEnumerate()
+    threads = DictKeys(PyDBDaemonThread.created_pydb_daemon_threads)
     for t in threads:
         if hasattr(t, 'doKillPydevThread'):
             t.doKillPydevThread()
@@ -233,12 +237,23 @@
     def __init__(self, pyDb):
         PyDBDaemonThread.__init__(self)
         self.pyDb = pyDb
-        self.setDaemon(False)
         self.setName('pydevd.CheckAliveThread')
 
     def OnRun(self):
             if self.dontTraceMe:
-                self.pyDb.SetTrace(None) # no debugging on this thread
+
+                disable_tracing = True
+        
+                if pydevd_vm_type.GetVmType() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0:
+                    # don't run untraced threads if we're in jython 2.2.1 or lower
+                    # jython bug: if we start a thread and another thread changes the tracing facility
+                    # it affects other threads (it's not set only for the thread but globally)
+                    # Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867
+                    disable_tracing = False
+        
+                if disable_tracing:
+                    pydevd_tracing.SetTrace(None)  # no debugging on this thread
+                    
             while not self.killReceived:
                 if not self.pyDb.haveAliveThreads():
                     try:
@@ -298,8 +313,8 @@
 
         self.django_exception_break = {}
         self.readyToRun = False
-        self._main_lock = threading.Lock()
-        self._lock_running_thread_ids = threading.Lock()
+        self._main_lock = _pydev_thread.allocate_lock()
+        self._lock_running_thread_ids = _pydev_thread.allocate_lock()
         self._py_db_command_thread_event = threading.Event()
         CustomFramesContainer._py_db_command_thread_event = self._py_db_command_thread_event
         self._finishDebuggingSession = False
@@ -332,7 +347,17 @@
 
     def haveAliveThreads(self):
         for t in threadingEnumerate():
-            if not isinstance(t, PyDBDaemonThread) and isThreadAlive(t) and not t.isDaemon():
+            if isinstance(t, PyDBDaemonThread):
+                pydev_log.error_once(
+                    'Error in debugger: Found PyDBDaemonThread through threading.enumerate().\n')
+                
+            if getattr(t, 'is_pydev_daemon_thread', False):
+                #Important: Jython 2.5rc4 has a bug where a thread created with thread.start_new_thread won't be
+                #set as a daemon thread, so, we also have to check for the 'is_pydev_daemon_thread' flag.
+                #See: https://github.com/fabioz/PyDev.Debugger/issues/11
+                continue
+            
+            if isThreadAlive(t) and not t.isDaemon():
                 return True
 
         return False
@@ -387,11 +412,9 @@
         if thread_id == "*":
             threads = threadingEnumerate()
             for t in threads:
-                thread_name = t.getName()
-                if not thread_name.startswith('pydevd.') or thread_name == 'pydevd.CommandThread':
-                    thread_id = GetThreadId(t)
-                    queue = self.getInternalQueue(thread_id)
-                    queue.put(int_cmd)
+                thread_id = GetThreadId(t)
+                queue = self.getInternalQueue(thread_id)
+                queue.put(int_cmd)
 
         else:
             queue = self.getInternalQueue(thread_id)
@@ -442,7 +465,13 @@
                 for t in all_threads:
                     thread_id = GetThreadId(t)
 
-                    if not isinstance(t, PyDBDaemonThread) and isThreadAlive(t):
+                    if isinstance(t, PyDBDaemonThread):
+                        pydev_log.error_once('Found PyDBDaemonThread in threading.enumerate.')
+                        
+                    elif getattr(t, 'is_pydev_daemon_thread', False):
+                        pass # I.e.: skip the DummyThreads created from pydev daemon threads
+                        
+                    elif isThreadAlive(t):
                         program_threads_alive[thread_id] = t
 
                         if not DictContains(self._running_thread_ids, thread_id):
@@ -505,19 +534,18 @@
         threads = threadingEnumerate()
         try:
             for t in threads:
-                if not t.getName().startswith('pydevd.'):
-                    # TODO: optimize so that we only actually add that tracing if it's in
-                    # the new breakpoint context.
-                    additionalInfo = None
-                    try:
-                        additionalInfo = t.additionalInfo
-                    except AttributeError:
-                        pass  # that's ok, no info currently set
+                # TODO: optimize so that we only actually add that tracing if it's in
+                # the new breakpoint context.
+                additionalInfo = None
+                try:
+                    additionalInfo = t.additionalInfo
+                except AttributeError:
+                    pass  # that's ok, no info currently set
 
-                    if additionalInfo is not None:
-                        for frame in additionalInfo.IterFrames():
-                            if frame is not ignore_frame:
-                                self.SetTraceForFrameAndParents(frame, overwrite_prev_trace=overwrite_prev_trace)
+                if additionalInfo is not None:
+                    for frame in additionalInfo.IterFrames():
+                        if frame is not ignore_frame:
+                            self.SetTraceForFrameAndParents(frame, overwrite_prev_trace=overwrite_prev_trace)
         finally:
             frame = None
             t = None
@@ -592,7 +620,7 @@
         it may be worth refactoring it (actually, reordering the ifs so that the ones used mostly come before
         probably will give better performance).
         '''
-        #print ID_TO_MEANING[str(cmd_id)], repr(text)
+        #print(ID_TO_MEANING[str(cmd_id)], repr(text))
 
         self._main_lock.acquire()
         try:
@@ -834,7 +862,7 @@
                     id_to_pybreakpoint[breakpoint_id] = breakpoint
                     self.consolidate_breakpoints(file, id_to_pybreakpoint, breakpoints)
 
-                    self.setTracingForUntracedContexts()
+                    self.setTracingForUntracedContexts(overwrite_prev_trace=True)
 
                 elif cmd_id == CMD_REMOVE_BREAK:
                     #command to remove some breakpoint
@@ -862,7 +890,7 @@
                             raise NameError(breakpoint_type)
 
                         try:
-                            id_to_pybreakpoint = file_to_id_to_breakpoint[file]
+                            id_to_pybreakpoint = file_to_id_to_breakpoint.get(file, {})
                             if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
                                 existing = id_to_pybreakpoint[breakpoint_id]
                                 sys.stderr.write('Removed breakpoint:%s - line:%s - func_name:%s (id: %s)\n' % (
@@ -1319,7 +1347,7 @@
             if self._finishDebuggingSession and not self._terminationEventSent:
                 #that was not working very well because jython gave some socket errors
                 try:
-                    threads = threadingEnumerate()
+                    threads = DictKeys(PyDBDaemonThread.created_pydb_daemon_threads)
                     for t in threads:
                         if hasattr(t, 'doKillPydevThread'):
                             t.doKillPydevThread()
@@ -1332,10 +1360,10 @@
 
             is_file_to_ignore = DictContains(DONT_TRACE, base) #we don't want to debug threading or anything related to pydevd
 
+            #print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, is_file_to_ignore)
             if is_file_to_ignore:
                 return None
 
-            #print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name)
             try:
                 #this shouldn't give an exception, but it could happen... (python bug)
                 #see http://mail.python.org/pipermail/python-bugs-list/2007-June/038796.html
@@ -1378,9 +1406,14 @@
 
         except Exception:
             # Log it
-            if traceback is not None:
-                # This can actually happen during the interpreter shutdown in Python 2.7
-                traceback.print_exc()
+            try:
+                if traceback is not None:
+                    # This can actually happen during the interpreter shutdown in Python 2.7
+                    traceback.print_exc()
+            except:
+                # Error logging? We're really in the interpreter shutdown...
+                # (https://github.com/fabioz/PyDev.Debugger/issues/8) 
+                pass
             return None
 
     if USE_PSYCO_OPTIMIZATION:
@@ -1401,8 +1434,9 @@
 
 
 
-    def SetTraceForFrameAndParents(self, frame, also_add_to_passed_frame=True, overwrite_prev_trace=False):
-        dispatch_func = self.trace_dispatch
+    def SetTraceForFrameAndParents(self, frame, also_add_to_passed_frame=True, overwrite_prev_trace=False, dispatch_func=None):
+        if dispatch_func is None:
+            dispatch_func = self.trace_dispatch
 
         if also_add_to_passed_frame:
             self.update_trace(frame, dispatch_func, overwrite_prev_trace)
@@ -1432,15 +1466,8 @@
 
     def prepareToRun(self):
         ''' Shared code to prepare debugging by installing traces and registering threads '''
-
-        # for completeness, we'll register the pydevd.reader & pydevd.writer threads
-        net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.reader" id="-1"/></xml>')
-        self.writer.addCommand(net)
-        net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.writer" id="-1"/></xml>')
-        self.writer.addCommand(net)
-
-        pydevd_tracing.SetTrace(self.trace_dispatch)
         self.patch_threads()
+        pydevd_tracing.SetTrace(self.trace_dispatch)
 
 
         PyDBCommandThread(self).start()
@@ -1519,6 +1546,8 @@
 
         pydev_imports.execfile(file, globals, locals)  # execute the script
 
+        return globals
+
     def exiting(self):
         sys.stdout.flush()
         sys.stderr.flush()
@@ -1526,6 +1555,22 @@
         cmd = self.cmdFactory.makeExitMessage()
         self.writer.addCommand(cmd)
 
+    def wait_for_commands(self, globals):
+        thread = threading.currentThread()
+        import pydevd_frame_utils
+        frame = pydevd_frame_utils.Frame(None, -1, pydevd_frame_utils.FCode("Console",
+                                                                            os.path.abspath(os.path.dirname(__file__))), globals, globals)
+        thread_id = GetThreadId(thread)
+        import pydevd_vars
+        pydevd_vars.addAdditionalFrameById(thread_id, {id(frame): frame})
+
+        cmd = self.cmdFactory.makeShowConsoleMessage(thread_id, frame)
+        self.writer.addCommand(cmd)
+
+        while True:
+            self.processInternalCommands()
+            time.sleep(0.01)
+
 def set_debug(setup):
     setup['DEBUG_RECORD_SOCKET_READS'] = True
     setup['DEBUG_TRACE_BREAKPOINTS'] = 1
@@ -1543,43 +1588,51 @@
     setup['multiproc'] = False #Used by PyCharm (reuses connection: ssh tunneling)
     setup['multiprocess'] = False # Used by PyDev (creates new connection to ide)
     setup['save-signatures'] = False
+    setup['print-in-debugger-startup'] = False
+    setup['cmd-line'] = False
     i = 0
     del argv[0]
     while (i < len(argv)):
-        if (argv[i] == '--port'):
+        if argv[i] == '--port':
             del argv[i]
             setup['port'] = int(argv[i])
             del argv[i]
-        elif (argv[i] == '--vm_type'):
+        elif argv[i] == '--vm_type':
             del argv[i]
             setup['vm_type'] = argv[i]
             del argv[i]
-        elif (argv[i] == '--client'):
+        elif argv[i] == '--client':
             del argv[i]
             setup['client'] = argv[i]
             del argv[i]
-        elif (argv[i] == '--server'):
+        elif argv[i] == '--server':
             del argv[i]
             setup['server'] = True
-        elif (argv[i] == '--file'):
+        elif argv[i] == '--file':
             del argv[i]
             setup['file'] = argv[i]
             i = len(argv) # pop out, file is our last argument
-        elif (argv[i] == '--DEBUG_RECORD_SOCKET_READS'):
+        elif argv[i] == '--DEBUG_RECORD_SOCKET_READS':
             del argv[i]
             setup['DEBUG_RECORD_SOCKET_READS'] = True
-        elif (argv[i] == '--DEBUG'):
+        elif argv[i] == '--DEBUG':
             del argv[i]
             set_debug(setup)
-        elif (argv[i] == '--multiproc'):
+        elif argv[i] == '--multiproc':
             del argv[i]
             setup['multiproc'] = True
-        elif (argv[i] == '--multiprocess'):
+        elif argv[i] == '--multiprocess':
             del argv[i]
             setup['multiprocess'] = True
-        elif (argv[i] == '--save-signatures'):
+        elif argv[i] == '--save-signatures':
             del argv[i]
             setup['save-signatures'] = True
+        elif argv[i] == '--print-in-debugger-startup':
+            del argv[i]
+            setup['print-in-debugger-startup'] = True
+        elif (argv[i] == '--cmd-line'):
+            del argv[i]
+            setup['cmd-line'] = True
         else:
             raise ValueError("unexpected option " + argv[i])
     return setup
@@ -1590,18 +1643,6 @@
     if doExit:
         sys.exit(0)
 
-def SetTraceForParents(frame, dispatch_func):
-    frame = frame.f_back
-    while frame:
-        if frame.f_trace is None:
-            frame.f_trace = dispatch_func
-
-        frame = frame.f_back
-    del frame
-
-def exit_hook():
-    debugger = GetGlobalDebugger()
-    debugger.exiting()
 
 def initStdoutRedirect():
     if not getattr(sys, 'stdoutBuf', None):
@@ -1666,7 +1707,7 @@
 
 
 
-_set_trace_lock = threading.Lock()
+_set_trace_lock = _pydev_thread.allocate_lock()
 
 def _locked_settrace(
     host,
@@ -1705,11 +1746,6 @@
         bufferStdOutToServer = stdoutToServer
         bufferStdErrToServer = stderrToServer
 
-        net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.reader" id="-1"/></xml>')
-        debugger.writer.addCommand(net)
-        net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.writer" id="-1"/></xml>')
-        debugger.writer.addCommand(net)
-
         if bufferStdOutToServer:
             initStdoutRedirect()
 
@@ -1748,7 +1784,9 @@
             # As this is the first connection, also set tracing for any untraced threads
             debugger.setTracingForUntracedContexts(ignore_frame=GetFrame(), overwrite_prev_trace=overwrite_prev_trace)
 
-        sys.exitfunc = exit_hook
+        # Stop the tracing as the last thing before the actual shutdown for a clean exit.
+        atexit.register(stoptrace)
+        
         #Suspend as the last thing after all tracing is in place.
         if suspend:
             debugger.setSuspend(t, CMD_SET_BREAK)
@@ -1793,16 +1831,15 @@
 
         from pydev_monkey import undo_patch_thread_modules
         undo_patch_thread_modules()
-
+ 
         debugger = GetGlobalDebugger()
-
+ 
         if debugger:
-            debugger.trace_dispatch = None
-
-            debugger.SetTraceForFrameAndParents(GetFrame(), False)
-
+  
+            debugger.SetTraceForFrameAndParents(
+                GetFrame(), also_add_to_passed_frame=True, overwrite_prev_trace=True, dispatch_func=lambda *args:None)
             debugger.exiting()
-
+  
             killAllPydevThreads()
 
         connected = False
@@ -1830,6 +1867,11 @@
         self.dispatcher = dispatcher
         ReaderThread.__init__(self, self.dispatcher.client)
 
+    def OnRun(self):
+        dummy_thread = threading.currentThread()
+        dummy_thread.is_pydev_daemon_thread = False
+        return ReaderThread.OnRun(self)
+        
     def handleExcept(self):
         ReaderThread.handleExcept(self)
 
@@ -1893,6 +1935,7 @@
 # main
 #=======================================================================================================================
 if __name__ == '__main__':
+    
     # parse the command line. --file is our last argument that is required
     try:
         sys.original_argv = sys.argv[:]
@@ -1902,6 +1945,12 @@
         traceback.print_exc()
         usage(1)
 
+    if setup['print-in-debugger-startup']:
+        try:
+            pid = ' (pid: %s)' % os.getpid()
+        except:
+            pid = ''
+        sys.stderr.write("pydev debugger: starting%s\n" % pid)
 
     fix_getpass.fixGetpass()
 
@@ -2010,6 +2059,12 @@
     except:
         pass  # It's ok not having stackless there...
 
+    debugger = PyDB()
+
+    if setup['cmd-line']:
+        debugger.cmd_line = True
+
+
     if fix_app_engine_debug:
         sys.stderr.write("pydev debugger: google app engine integration enabled\n")
         curr_dir = os.path.dirname(__file__)
@@ -2022,10 +2077,8 @@
         sys.argv.insert(3, '--automatic_restart=no')
         sys.argv.insert(4, '--max_module_instances=1')
 
-        debugger = PyDB()
         # Run the dev_appserver
         debugger.run(setup['file'], None, None, set_trace=False)
-
     else:
         # as to get here all our imports are already resolved, the psyco module can be
         # changed and we'll still get the speedups in the debugger, as those functions
@@ -2041,12 +2094,12 @@
             import pydevd_psyco_stub
             sys.modules['psyco'] = pydevd_psyco_stub
 
-        debugger = PyDB()
-
         if setup['save-signatures']:
             if pydevd_vm_type.GetVmType() == pydevd_vm_type.PydevdVmType.JYTHON:
                 sys.stderr.write("Collecting run-time type information is not supported for Jython\n")
             else:
+                # Only import it if we're going to use it!
+                from pydevd_signature import SignatureFactory
                 debugger.signature_factory = SignatureFactory()
 
         try:
@@ -2058,4 +2111,9 @@
 
         connected = True  # Mark that we're connected when started from inside ide.
 
-        debugger.run(setup['file'], None, None)
+        globals = debugger.run(setup['file'], None, None)
+
+        if setup['cmd-line']:
+            debugger.wait_for_commands(globals)
+
+
diff --git a/python/helpers/pydev/pydevd_additional_thread_info.py b/python/helpers/pydev/pydevd_additional_thread_info.py
index fa906ad..76fb49e 100644
--- a/python/helpers/pydev/pydevd_additional_thread_info.py
+++ b/python/helpers/pydev/pydevd_additional_thread_info.py
@@ -1,9 +1,6 @@
 import sys
 from pydevd_constants import * #@UnusedWildImport
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+from _pydev_imps import _pydev_thread
 from pydevd_frame import PyDBFrame
 import weakref
 
@@ -62,7 +59,7 @@
         #Or if the user compiled threadframe (from http://www.majid.info/mylos/stories/2004/06/10/threadframe.html)
 
         #NOT RLock!! (could deadlock if it was)
-        self.lock = threading.Lock()
+        self.lock = _pydev_thread.allocate_lock()
         self._acquire_lock = self.lock.acquire
         self._release_lock = self.lock.release
 
diff --git a/python/helpers/pydev/pydevd_breakpoints.py b/python/helpers/pydev/pydevd_breakpoints.py
index 82a230d..1171157 100644
--- a/python/helpers/pydev/pydevd_breakpoints.py
+++ b/python/helpers/pydev/pydevd_breakpoints.py
@@ -8,10 +8,7 @@
 _handle_exceptions = None
 
 
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+import _pydev_threading as threading
 
 threadingCurrentThread = threading.currentThread
 
diff --git a/python/helpers/pydev/pydevd_comm.py b/python/helpers/pydev/pydevd_comm.py
index c7f39a1..92a588c 100644
--- a/python/helpers/pydev/pydevd_comm.py
+++ b/python/helpers/pydev/pydevd_comm.py
@@ -61,12 +61,9 @@
 
 import sys
 
-from _pydev_imps import _pydev_time as time
-
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+from _pydev_imps import _pydev_time as time, _pydev_thread
+from _pydev_imps import _pydev_thread as thread
+import _pydev_threading as threading
 from _pydev_imps._pydev_socket import socket, AF_INET, SOCK_STREAM, SHUT_RD, SHUT_WR
 from pydev_imports import _queue
 
@@ -137,6 +134,7 @@
 CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED = 139
 CMD_IGNORE_THROWN_EXCEPTION_AT = 140
 CMD_ENABLE_DONT_TRACE = 141
+CMD_SHOW_CONSOLE = 142
 
 
 
@@ -246,22 +244,42 @@
 #=======================================================================================================================
 # PyDBDaemonThread
 #=======================================================================================================================
-class PyDBDaemonThread(threading.Thread):
+class PyDBDaemonThread:
+    
+    created_pydb_daemon_threads = {}
 
     def __init__(self):
-        threading.Thread.__init__(self)
-        self.setDaemon(True)
+        # Note: subclasses are always daemon threads.
         self.killReceived = False
         self.dontTraceMe = True
 
-    def run(self):
-        if sys.platform.startswith("java"):
-            import org.python.core as PyCore #@UnresolvedImport
-            ss = PyCore.PySystemState()
-            # Note: Py.setSystemState() affects only the current thread.
-            PyCore.Py.setSystemState(ss)
+    def setName(self, name):
+        self.name = name
 
-        self.OnRun()
+    def start(self):
+        import pydev_monkey
+        start_new_thread = pydev_monkey.get_original_start_new_thread(_pydev_thread)
+        start_new_thread(self.run, ())
+
+    def run(self):
+        created_pydb_daemon = self.created_pydb_daemon_threads
+        created_pydb_daemon[self] = 1
+        dummy_thread = threading.currentThread()
+        dummy_thread.is_pydev_daemon_thread = True
+        try:
+            try:
+                if IS_JYTHON:
+                    import org.python.core as PyCore #@UnresolvedImport
+                    ss = PyCore.PySystemState()
+                    # Note: Py.setSystemState() affects only the current thread.
+                    PyCore.Py.setSystemState(ss)
+        
+                self.OnRun()
+            except:
+                if sys is not None and traceback is not None:
+                    traceback.print_exc()
+        finally:
+            del created_pydb_daemon[self]
 
     def OnRun(self):
         raise NotImplementedError('Should be reimplemented by: %s' % self.__class__)
@@ -272,7 +290,18 @@
 
     def stopTrace(self):
         if self.dontTraceMe:
-            pydevd_tracing.SetTrace(None) # no debugging on this thread
+            
+            disable_tracing = True
+    
+            if pydevd_vm_type.GetVmType() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0:
+                # don't run untraced threads if we're in jython 2.2.1 or lower
+                # jython bug: if we start a thread and another thread changes the tracing facility
+                # it affects other threads (it's not set only for the thread but globally)
+                # Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867
+                disable_tracing = False
+    
+            if disable_tracing:
+                pydevd_tracing.SetTrace(None)  # no debugging on this thread
 
 
 #=======================================================================================================================
@@ -355,7 +384,6 @@
     """ writer thread writes out the commands in an infinite loop """
     def __init__(self, sock):
         PyDBDaemonThread.__init__(self)
-        self.setDaemon(False)  #writer isn't daemon to be able to deliver all messages after main thread terminated
         self.sock = sock
         self.setName("pydevd.Writer")
         self.cmdQueue = _queue.Queue()
@@ -373,11 +401,16 @@
         """ just loop and write responses """
 
         self.stopTrace()
+        get_has_timeout = sys.hexversion >= 0x02030000 # 2.3 onwards have it.
         try:
             while True:
                 try:
                     try:
-                        cmd = self.cmdQueue.get(1, 0.1)
+                        if get_has_timeout:
+                            cmd = self.cmdQueue.get(1, 0.1)
+                        else:
+                            time.sleep(.01)
+                            cmd = self.cmdQueue.get(0)
                     except _queue.Empty:
                         if self.killReceived:
                             try:
@@ -734,6 +767,12 @@
             dbg.writer.addCommand(net)
         return net
 
+    def makeShowConsoleMessage(self, thread_id, frame):
+        try:
+            return NetCommand(CMD_SHOW_CONSOLE, 0, self.makeThreadSuspendStr(thread_id, frame, CMD_SHOW_CONSOLE, ''))
+        except:
+            return self.makeErrorMessage(0, GetExceptionTracebackStr())
+
     def makeExitMessage(self):
         try:
             net = NetCommand(CMD_EXIT, 0, '')
@@ -774,7 +813,7 @@
         self.thread_id = thread_id
         self.module_name = module_name
         self.executed = False
-        self.lock = threading.Lock()
+        self.lock = _pydev_thread.allocate_lock()
 
 
     def canBeExecutedBy(self, thread_id):
@@ -1155,8 +1194,8 @@
                 from pydevd_console import ConsoleMessage
                 console_message = ConsoleMessage()
                 console_message.add_console_message(
-                    pydevd_console.CONSOLE_ERROR, 
-                    "Select the valid frame in the debug view (thread: %s, frame: %s invalid)" % (self.thread_id, self.frame_id), 
+                    pydevd_console.CONSOLE_ERROR,
+                    "Select the valid frame in the debug view (thread: %s, frame: %s invalid)" % (self.thread_id, self.frame_id),
                 )
                 cmd = dbg.cmdFactory.makeErrorMessage(self.sequence, console_message.toXML())
         except:
diff --git a/python/helpers/pydev/pydevd_constants.py b/python/helpers/pydev/pydevd_constants.py
index 74e8974..e878d3b 100644
--- a/python/helpers/pydev/pydevd_constants.py
+++ b/python/helpers/pydev/pydevd_constants.py
@@ -72,13 +72,10 @@
 SUPPORT_GEVENT = os.getenv('GEVENT_SUPPORT', 'False') == 'True'
 
 USE_LIB_COPY = SUPPORT_GEVENT and not IS_PY3K and sys.version_info[1] >= 6
+import _pydev_threading as threading
 
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
-
-_nextThreadIdLock = threading.Lock()
+from _pydev_imps import _pydev_thread
+_nextThreadIdLock = _pydev_thread.allocate_lock()
 
 #=======================================================================================================================
 # Jython?
diff --git a/python/helpers/pydev/pydevd_custom_frames.py b/python/helpers/pydev/pydevd_custom_frames.py
index e259356..8709e6a 100644
--- a/python/helpers/pydev/pydevd_custom_frames.py
+++ b/python/helpers/pydev/pydevd_custom_frames.py
@@ -1,5 +1,6 @@
 from pydevd_constants import *  #@UnusedWildImport
 from pydevd_file_utils import GetFilenameAndBase
+from _pydev_imps import _pydev_thread
 threadingCurrentThread = threading.currentThread
 
 DEBUG = False
@@ -13,7 +14,7 @@
     
 def CustomFramesContainerInit(): #Note: no staticmethod on jython 2.1 (so, use free-function)
     
-    CustomFramesContainer.custom_frames_lock = threading.Lock()
+    CustomFramesContainer.custom_frames_lock = _pydev_thread.allocate_lock()
     
     # custom_frames can only be accessed if properly locked with custom_frames_lock! 
     # Key is a string identifying the frame (as well as the thread it belongs to). 
diff --git a/python/helpers/pydev/pydevd_file_utils.py b/python/helpers/pydev/pydevd_file_utils.py
index c135c4b..147aa66 100644
--- a/python/helpers/pydev/pydevd_file_utils.py
+++ b/python/helpers/pydev/pydevd_file_utils.py
@@ -38,6 +38,10 @@
         machine for the paths that'll actually have breakpoints).
 '''
 
+
+
+
+from pydevd_constants import *  #@UnusedWildImport
 import os.path
 import sys
 import traceback
diff --git a/python/helpers/pydev/pydevd_frame.py b/python/helpers/pydev/pydevd_frame.py
index 374d281..5d1e784 100644
--- a/python/helpers/pydev/pydevd_frame.py
+++ b/python/helpers/pydev/pydevd_frame.py
@@ -15,7 +15,11 @@
     CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT
 from pydevd_constants import *  # @UnusedWildImport
 from pydevd_file_utils import GetFilenameAndBase
-from pydevd_signature import sendSignatureCallTrace
+try:
+    from pydevd_signature import sendSignatureCallTrace
+except ImportError:
+    def sendSignatureCallTrace(*args, **kwargs):
+        pass
 import pydevd_vars
 import pydevd_dont_trace
 
diff --git a/python/helpers/pydev/pydevd_frame_utils.py b/python/helpers/pydev/pydevd_frame_utils.py
new file mode 100644
index 0000000..23becca
--- /dev/null
+++ b/python/helpers/pydev/pydevd_frame_utils.py
@@ -0,0 +1,21 @@
+class Frame:
+    def __init__(
+            self,
+            f_back,
+            f_fileno,
+            f_code,
+            f_locals,
+            f_globals={},
+            f_trace=None):
+        self.f_back = f_back
+        self.f_lineno = f_fileno
+        self.f_code = f_code
+        self.f_locals = f_locals
+        self.f_globals = f_globals
+        self.f_trace = f_trace
+
+
+class FCode:
+    def __init__(self, name, filename):
+        self.co_name = name
+        self.co_filename = filename
\ No newline at end of file
diff --git a/python/helpers/pydev/pydevd_resolver.py b/python/helpers/pydev/pydevd_resolver.py
index 3fe895c..ad49bd8 100644
--- a/python/helpers/pydev/pydevd_resolver.py
+++ b/python/helpers/pydev/pydevd_resolver.py
@@ -13,10 +13,13 @@
     setattr(__builtin__, 'False', 0)
 
 import pydevd_constants
-from pydevd_constants import DictIterItems, xrange, izip
+from pydevd_constants import DictIterItems, xrange
 
 
-MAX_ITEMS_TO_HANDLE = 500
+# Note: 300 is already a lot to see in the outline (after that the user should really use the shell to get things)
+# and this also means we'll pass less information to the client side (which makes debugging faster).
+MAX_ITEMS_TO_HANDLE = 300 
+
 TOO_LARGE_MSG = 'Too large to show contents. Max items to show: ' + str(MAX_ITEMS_TO_HANDLE)
 TOO_LARGE_ATTR = 'Unable to handle:'
 
@@ -272,19 +275,20 @@
         return var[int(attribute)]
 
     def getDictionary(self, var):
-        #return dict( [ (i, x) for i, x in enumerate(var) ] )
-        # modified 'cause jython does not have enumerate support
         l = len(var)
         d = {}
 
-        if l < MAX_ITEMS_TO_HANDLE:
-            format = '%0' + str(int(len(str(l)))) + 'd'
+        format_str = '%0' + str(int(len(str(l)))) + 'd'
 
-
-            for i, item in izip(xrange(l), var):
-                d[ format % i ] = item
-        else:
-            d[TOO_LARGE_ATTR] = TOO_LARGE_MSG
+        i = 0
+        for item in var:
+            d[format_str % i] = item
+            i += 1
+            
+            if i > MAX_ITEMS_TO_HANDLE:
+                d[TOO_LARGE_ATTR] = TOO_LARGE_MSG
+                break
+                
         d['__len__'] = len(var)
         return d
 
@@ -381,13 +385,24 @@
         This resolves a numpy ndarray returning some metadata about the NDArray
     '''
 
+    def is_numeric(self, obj):
+        if not hasattr(obj, 'dtype'):
+            return False
+        return obj.dtype.kind in 'biufc'
+
     def resolve(self, obj, attribute):
         if attribute == '__internals__':
             return defaultResolver.getDictionary(obj)
         if attribute == 'min':
-            return obj.min()
+            if self.is_numeric(obj):
+                return obj.min()
+            else:
+                return None
         if attribute == 'max':
-            return obj.max()
+            if self.is_numeric(obj):
+                return obj.max()
+            else:
+                return None
         if attribute == 'shape':
             return obj.shape
         if attribute == 'dtype':
@@ -403,8 +418,12 @@
             ret['min'] = 'ndarray too big, calculating min would slow down debugging'
             ret['max'] = 'ndarray too big, calculating max would slow down debugging'
         else:
-            ret['min'] = obj.min()
-            ret['max'] = obj.max()
+            if self.is_numeric(obj):
+                ret['min'] = obj.min()
+                ret['max'] = obj.max()
+            else:
+                ret['min'] = 'not a numeric object'
+                ret['max'] = 'not a numeric object'
         ret['shape'] = obj.shape
         ret['dtype'] = obj.dtype
         ret['size'] = obj.size
diff --git a/python/helpers/pydev/pydevd_signature.py b/python/helpers/pydev/pydevd_signature.py
index 03dc0eb..d7b37c8 100644
--- a/python/helpers/pydev/pydevd_signature.py
+++ b/python/helpers/pydev/pydevd_signature.py
@@ -1,8 +1,13 @@
 import inspect
-import trace
 import os
 
-trace._warn = lambda *args: None   # workaround for http://bugs.python.org/issue17143 (PY-8706)
+try:
+    import trace
+except ImportError:
+    pass
+else:
+    trace._warn = lambda *args: None   # workaround for http://bugs.python.org/issue17143 (PY-8706)
+
 import gc
 from pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand
 import pydevd_vars
diff --git a/python/helpers/pydev/pydevd_tracing.py b/python/helpers/pydev/pydevd_tracing.py
index 7bc1ba5..d362462 100644
--- a/python/helpers/pydev/pydevd_tracing.py
+++ b/python/helpers/pydev/pydevd_tracing.py
@@ -1,4 +1,5 @@
 from pydevd_constants import * #@UnusedWildImport
+from _pydev_imps import _pydev_thread
 
 try:
     import cStringIO as StringIO #may not always be available @UnusedImport
@@ -8,10 +9,6 @@
     except:
         import io as StringIO
 
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
 
 import sys #@Reimport
 import traceback
@@ -21,7 +18,7 @@
     '''
     _original_tracing = None
     _warn = True
-    _lock = threading.Lock()
+    _lock = _pydev_thread.allocate_lock()
     _traceback_limit = 1
     _warnings_shown = {}
  
diff --git a/python/helpers/pydev/pydevd_vars.py b/python/helpers/pydev/pydevd_vars.py
index 0cc45f7..3baea5b 100644
--- a/python/helpers/pydev/pydevd_vars.py
+++ b/python/helpers/pydev/pydevd_vars.py
@@ -3,10 +3,12 @@
 """
 import pickle
 from django_frame import DjangoTemplateFrame
+from pydevd_constants import * #@UnusedWildImport
 from types import * #@UnusedWildImport
 
 from pydevd_custom_frames import getCustomFrame
 from pydevd_xml import *
+from _pydev_imps import _pydev_thread
 
 try:
     from StringIO import StringIO
@@ -14,13 +16,10 @@
     from io import StringIO
 import sys #@Reimport
 
-if USE_LIB_COPY:
-    import _pydev_threading as threading
-else:
-    import threading
+import _pydev_threading as threading
 import traceback
 import pydevd_save_locals
-from pydev_imports import Exec, execfile
+from pydev_imports import Exec, quote, execfile
 
 try:
     import types
@@ -69,7 +68,7 @@
 # AdditionalFramesContainer
 #===============================================================================
 class AdditionalFramesContainer:
-    lock = threading.Lock()
+    lock = _pydev_thread.allocate_lock()
     additional_frames = {} #dict of dicts
 
 
diff --git a/python/helpers/pydev/requirements.txt b/python/helpers/pydev/requirements.txt
new file mode 100644
index 0000000..048a4c6
--- /dev/null
+++ b/python/helpers/pydev/requirements.txt
@@ -0,0 +1,4 @@
+Django==1.6.5
+nose==1.3.3
+ipython==2.1.0
+numpy==1.8.2
\ No newline at end of file
diff --git a/python/helpers/pydev/runfiles.py b/python/helpers/pydev/runfiles.py
index 67c88be..c2db611 100644
--- a/python/helpers/pydev/runfiles.py
+++ b/python/helpers/pydev/runfiles.py
@@ -75,7 +75,7 @@
 
     if test_framework == 0:
 
-        pydev_runfiles.main(configuration)
+        return pydev_runfiles.main(configuration) #Note: still doesn't return a proper value.
 
     else:
         #We'll convert the parameters to what nose or py.test expects.
@@ -144,7 +144,8 @@
             import pydev_runfiles_nose
             PYDEV_NOSE_PLUGIN_SINGLETON = pydev_runfiles_nose.StartPydevNosePluginSingleton(configuration)
             argv.append('--with-pydevplugin')
-            nose.run(argv=argv, addplugins=[PYDEV_NOSE_PLUGIN_SINGLETON])
+            # Return 'not' because it will return 'success' (so, exit == 0 if success)
+            return not nose.run(argv=argv, addplugins=[PYDEV_NOSE_PLUGIN_SINGLETON])
 
         elif test_framework == PY_TEST_FRAMEWORK:
             if DEBUG:
@@ -189,7 +190,7 @@
 
             argv.append('-p')
             argv.append('pydev_runfiles_pytest2')
-            pytest.main(argv)
+            return pytest.main(argv)
 
         else:
             raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,))
diff --git a/python/helpers/pydev/tests/check_pydevconsole.py b/python/helpers/pydev/tests/check_pydevconsole.py
deleted file mode 100644
index 7d1b7ee..0000000
--- a/python/helpers/pydev/tests/check_pydevconsole.py
+++ /dev/null
@@ -1,105 +0,0 @@
-import sys
-import os
-
-#Put pydevconsole in the path.
-sys.argv[0] = os.path.dirname(sys.argv[0]) 
-sys.path.insert(1, os.path.join(os.path.dirname(sys.argv[0])))
-
-print('Running tests with:', sys.executable)
-print('PYTHONPATH:')
-print('\n'.join(sorted(sys.path)))
-
-import threading
-import unittest
-
-import pydevconsole
-from pydev_imports import xmlrpclib, SimpleXMLRPCServer
-
-try:
-    raw_input
-    raw_input_name = 'raw_input'
-except NameError:
-    raw_input_name = 'input'
-
-#=======================================================================================================================
-# Test
-#=======================================================================================================================
-class Test(unittest.TestCase):
-
-    
-    def startClientThread(self, client_port):
-        class ClientThread(threading.Thread):
-            def __init__(self, client_port):
-                threading.Thread.__init__(self)
-                self.client_port = client_port
-                
-            def run(self):
-                class HandleRequestInput:
-                    def RequestInput(self):
-                        return 'RequestInput: OK'
-                
-                handle_request_input = HandleRequestInput()
-                
-                import pydev_localhost
-                print('Starting client with:', pydev_localhost.get_localhost(), self.client_port)
-                client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
-                client_server.register_function(handle_request_input.RequestInput)
-                client_server.serve_forever()
-                
-        client_thread = ClientThread(client_port)
-        client_thread.setDaemon(True)
-        client_thread.start()
-        return client_thread
-
-        
-    def getFreeAddresses(self):
-        import socket
-        s = socket.socket()
-        s.bind(('', 0))
-        port0 = s.getsockname()[1]
-        
-        s1 = socket.socket()
-        s1.bind(('', 0))
-        port1 = s1.getsockname()[1]
-        s.close()
-        s1.close()
-        return port0, port1
-        
-        
-    def testServer(self):
-        client_port, server_port = self.getFreeAddresses()
-        class ServerThread(threading.Thread):
-            def __init__(self, client_port, server_port):
-                threading.Thread.__init__(self)
-                self.client_port = client_port
-                self.server_port = server_port
-                
-            def run(self):
-                import pydev_localhost
-                print('Starting server with:', pydev_localhost.get_localhost(), self.server_port, self.client_port)
-                pydevconsole.StartServer(pydev_localhost.get_localhost(), self.server_port, self.client_port)
-        server_thread = ServerThread(client_port, server_port)
-        server_thread.setDaemon(True)
-        server_thread.start()
-
-        client_thread = self.startClientThread(client_port) #@UnusedVariable
-        
-        import time
-        time.sleep(.3) #let's give it some time to start the threads
-        
-        import pydev_localhost
-        server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
-        server.addExec("import sys; print('Running with: %s %s' % (sys.executable or sys.platform, sys.version))")
-        server.addExec('class Foo:')
-        server.addExec('    pass')
-        server.addExec('')
-        server.addExec('foo = Foo()')
-        server.addExec('a = %s()' % raw_input_name)
-        server.addExec('print (a)')
-        
-#=======================================================================================================================
-# main        
-#=======================================================================================================================
-if __name__ == '__main__':
-    unittest.main()
-
diff --git a/python/helpers/pydev/tests/test_check_pydevconsole.py b/python/helpers/pydev/tests/test_check_pydevconsole.py
new file mode 100644
index 0000000..5d09968
--- /dev/null
+++ b/python/helpers/pydev/tests/test_check_pydevconsole.py
@@ -0,0 +1,120 @@
+import threading
+import unittest
+
+import pydevconsole
+from pydev_imports import xmlrpclib, SimpleXMLRPCServer
+import sys
+from pydev_localhost import get_localhost
+from pydev_ipython_console_011 import get_pydev_frontend
+
+try:
+    raw_input
+    raw_input_name = 'raw_input'
+except NameError:
+    raw_input_name = 'input'
+
+#=======================================================================================================================
+# Test
+#=======================================================================================================================
+class Test(unittest.TestCase):
+
+    
+    def startClientThread(self, client_port):
+        class ClientThread(threading.Thread):
+            def __init__(self, client_port):
+                threading.Thread.__init__(self)
+                self.client_port = client_port
+                
+            def run(self):
+                class HandleRequestInput:
+                    def RequestInput(self):
+                        client_thread.requested_input = True
+                        return 'RequestInput: OK'
+                    
+                    def NotifyFinished(self, *args, **kwargs):
+                        client_thread.notified_finished += 1
+                        return 1
+                
+                handle_request_input = HandleRequestInput()
+                
+                import pydev_localhost
+                self.client_server = client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
+                client_server.register_function(handle_request_input.RequestInput)
+                client_server.register_function(handle_request_input.NotifyFinished)
+                client_server.serve_forever()
+                    
+            def shutdown(self):
+                return
+                self.client_server.shutdown()
+
+        client_thread = ClientThread(client_port)
+        client_thread.requested_input = False
+        client_thread.notified_finished = 0
+        client_thread.setDaemon(True)
+        client_thread.start()
+        return client_thread
+
+        
+    def getFreeAddresses(self):
+        import socket
+        s = socket.socket()
+        s.bind(('', 0))
+        port0 = s.getsockname()[1]
+        
+        s1 = socket.socket()
+        s1.bind(('', 0))
+        port1 = s1.getsockname()[1]
+        s.close()
+        s1.close()
+        return port0, port1
+        
+        
+    def testServer(self):
+        # Just making sure that the singleton is created in this thread.
+        get_pydev_frontend(get_localhost(), 0)
+        
+        client_port, server_port = self.getFreeAddresses()
+        class ServerThread(threading.Thread):
+            def __init__(self, client_port, server_port):
+                threading.Thread.__init__(self)
+                self.client_port = client_port
+                self.server_port = server_port
+                
+            def run(self):
+                import pydev_localhost
+                print('Starting server with:', pydev_localhost.get_localhost(), self.server_port, self.client_port)
+                pydevconsole.StartServer(pydev_localhost.get_localhost(), self.server_port, self.client_port)
+        server_thread = ServerThread(client_port, server_port)
+        server_thread.setDaemon(True)
+        server_thread.start()
+
+        client_thread = self.startClientThread(client_port) #@UnusedVariable
+        
+        try:
+            import time
+            time.sleep(.3) #let's give it some time to start the threads
+            
+            import pydev_localhost
+            server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
+            server.execLine("import sys; print('Running with: %s %s' % (sys.executable or sys.platform, sys.version))")
+            server.execLine('class Foo:')
+            server.execLine('    pass')
+            server.execLine('')
+            server.execLine('foo = Foo()')
+            server.execLine('a = %s()' % raw_input_name)
+            initial = time.time()
+            while not client_thread.requested_input:
+                if time.time() - initial > 2:
+                    raise AssertionError('Did not get the return asked before the timeout.')
+                time.sleep(.1)
+            frame_xml = server.getFrame()
+            self.assert_('RequestInput' in frame_xml, 'Did not fid RequestInput in:\n%s' % (frame_xml,))
+        finally:
+            client_thread.shutdown()
+        
+#=======================================================================================================================
+# main        
+#=======================================================================================================================
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/python/helpers/pydev/tests/test_get_referrers.py b/python/helpers/pydev/tests/test_get_referrers.py
index 7fc8514..8284b27 100644
--- a/python/helpers/pydev/tests/test_get_referrers.py
+++ b/python/helpers/pydev/tests/test_get_referrers.py
@@ -1,21 +1,7 @@
-import os.path
 import sys
 import threading
 import time
 
-IS_JYTHON = sys.platform.find('java') != -1
-
-try:
-    this_file_name = __file__
-except NameError:
-    # stupid jython. plain old __file__ isnt working for some reason
-    import test_runfiles  #@UnresolvedImport - importing the module itself
-    this_file_name = test_runfiles.__file__
-
-
-desired_runfiles_path = os.path.normpath(os.path.dirname(this_file_name) + "/..")
-sys.path.insert(0, desired_runfiles_path)
-
 import unittest
 import pydevd_referrers
 from pydev_imports import StringIO
diff --git a/python/helpers/pydev/tests/test_jyserver.py b/python/helpers/pydev/tests/test_jyserver.py
index 8765400..12be8bc 100644
--- a/python/helpers/pydev/tests/test_jyserver.py
+++ b/python/helpers/pydev/tests/test_jyserver.py
@@ -36,10 +36,15 @@
         unittest.TestCase.tearDown(self)
     
     def testIt(self):
+        if not IS_JYTHON:
+            return
         dbg('ok')
         
     def testMessage(self):
+        if not IS_JYTHON:
+            return
         t = jycompletionserver.T(0)
+        t.exit_process_on_kill = False
         
         l = []
         l.append(('Def', 'description'  , 'args'))
@@ -65,6 +70,8 @@
 
 
     def testCompletionSocketsAndMessages(self):
+        if not IS_JYTHON:
+            return
         dbg('testCompletionSocketsAndMessages')
         t, socket = self.createConnections()
         self.socket = socket
@@ -121,6 +128,7 @@
         Creates the connections needed for testing.
         '''
         t = jycompletionserver.T(p1)
+        t.exit_process_on_kill = False
         
         t.start()
 
diff --git a/python/helpers/pydev/tests/test_jysimpleTipper.py b/python/helpers/pydev/tests/test_jysimpleTipper.py
index 4a75563..bf421b2 100644
--- a/python/helpers/pydev/tests/test_jysimpleTipper.py
+++ b/python/helpers/pydev/tests/test_jysimpleTipper.py
@@ -4,17 +4,14 @@
 import unittest
 import os
 import sys
-#make it as if we were executing from the directory above this one (so that we can use pycompletionserver
-#without the need for it being in the pythonpath)
-sys.argv[0] = os.path.dirname(sys.argv[0]) 
-#twice the dirname to get the previous level from this file.
-sys.path.insert(1, os.path.join(os.path.dirname(sys.argv[0])))
 
 #this does not work (they must be in the system pythonpath)
 #sys.path.insert(1, r"D:\bin\eclipse321\plugins\org.junit_3.8.1\junit.jar" ) #some late loading jar tests
 #sys.path.insert(1, r"D:\bin\eclipse331_1\plugins\org.apache.ant_1.7.0.v200706080842\lib\ant.jar" ) #some late loading jar tests
 
+IS_JYTHON = 0
 if sys.platform.find('java') != -1:
+    IS_JYTHON = 1
     from _pydev_jy_imports_tipper import ismethod
     from _pydev_jy_imports_tipper import isclass
     from _pydev_jy_imports_tipper import dirObj
@@ -25,6 +22,8 @@
     from java.lang.System import arraycopy #@UnresolvedImport
     from java.lang.System import out #@UnresolvedImport
     import java.lang.String #@UnresolvedImport
+    import org.python.core.PyDictionary #@UnresolvedImport
+
 
 __DBG = 0
 def dbg(s):
@@ -234,22 +233,24 @@
         assert isMet[1][0].basicAsStr() == "function:met2 args=['arg1', 'arg2'], varargs=vararg, kwargs=kwarg, docs:docmet2"
         assert not isclass(met2)
         
+        
+if not IS_JYTHON:
+    # Disable tests if not running under Jython
+    class TestMod(unittest.TestCase):
+        pass
+    class TestCompl(TestMod):
+        pass
+    class TestSearch(TestMod):
+        pass
 
 
 if __name__ == '__main__':
-    if sys.platform.find('java') != -1:
-        #Only run if jython
-        suite = unittest.makeSuite(TestCompl)
-        suite2 = unittest.makeSuite(TestMod)
-        suite3 = unittest.makeSuite(TestSearch)
+    #Only run if jython
+    suite = unittest.makeSuite(TestCompl)
+    suite2 = unittest.makeSuite(TestMod)
+    suite3 = unittest.makeSuite(TestSearch)
+    
+    unittest.TextTestRunner(verbosity=1).run(suite)
+    unittest.TextTestRunner(verbosity=1).run(suite2)
+    unittest.TextTestRunner(verbosity=1).run(suite3)
         
-        unittest.TextTestRunner(verbosity=1).run(suite)
-        unittest.TextTestRunner(verbosity=1).run(suite2)
-        unittest.TextTestRunner(verbosity=1).run(suite3)
-        
-#        suite.addTest(Test('testCase12'))
-#        suite = unittest.TestSuite()
-#        unittest.TextTestRunner(verbosity=1).run(suite)
-
-    else:
-        sys.stdout.write('Not running jython tests for non-java platform: %s' % sys.platform)
diff --git a/python/helpers/pydev/tests/test_pydev_ipython_010.py b/python/helpers/pydev/tests/test_pydev_ipython_010.py
deleted file mode 100644
index 5ce1dc3..0000000
--- a/python/helpers/pydev/tests/test_pydev_ipython_010.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#TODO: This test no longer works (check if it should be fixed or removed altogether).
-
-#import unittest
-#import sys
-#import os
-##make it as if we were executing from the directory above this one
-#sys.argv[0] = os.path.dirname(sys.argv[0])
-##twice the dirname to get the previous level from this file.
-#sys.path.insert(1, os.path.join(os.path.dirname(sys.argv[0])))
-#
-#from pydev_localhost import get_localhost
-#
-#
-#IS_JYTHON = sys.platform.find('java') != -1
-#
-##=======================================================================================================================
-## TestCase
-##=======================================================================================================================
-#class TestCase(unittest.TestCase):
-#
-#    def setUp(self):
-#        unittest.TestCase.setUp(self)
-#
-#    def tearDown(self):
-#        unittest.TestCase.tearDown(self)
-#
-#    def testIPython(self):
-#        try:
-#            from pydev_ipython_console import PyDevFrontEnd
-#        except:
-#            if IS_JYTHON:
-#                return
-#        front_end = PyDevFrontEnd(get_localhost(), 0)
-#
-#        front_end.input_buffer = 'if True:'
-#        self.assert_(not front_end._on_enter())
-#
-#        front_end.input_buffer = 'if True:\n' + \
-#            front_end.continuation_prompt() + '    a = 10\n'
-#        self.assert_(not front_end._on_enter())
-#
-#
-#        front_end.input_buffer = 'if True:\n' + \
-#            front_end.continuation_prompt() + '    a = 10\n\n'
-#        self.assert_(front_end._on_enter())
-#
-#
-##        front_end.input_buffer = '  print a'
-##        self.assert_(not front_end._on_enter())
-##        front_end.input_buffer = ''
-##        self.assert_(front_end._on_enter())
-#
-#
-##        front_end.input_buffer = 'a.'
-##        front_end.complete_current_input()
-##        front_end.input_buffer = 'if True:'
-##        front_end._on_enter()
-#        front_end.input_buffer = 'a = 30'
-#        front_end._on_enter()
-#        front_end.input_buffer = 'print a'
-#        front_end._on_enter()
-#        front_end.input_buffer = 'a?'
-#        front_end._on_enter()
-#        print front_end.complete('%')
-#        print front_end.complete('%e')
-#        print front_end.complete('cd c:/t')
-#        print front_end.complete('cd c:/temp/')
-##        front_end.input_buffer = 'print raw_input("press enter\\n")'
-##        front_end._on_enter()
-##
-#
-##=======================================================================================================================
-## main
-##=======================================================================================================================
-#if __name__ == '__main__':
-#    if sys.platform.find('java') == -1:
-#        #IPython not available for Jython
-#        unittest.main()
-#    else:
-#        print('not supported on Jython')
diff --git a/python/helpers/pydev/tests/test_pydev_ipython_011.py b/python/helpers/pydev/tests/test_pydev_ipython_011.py
index 3cfa70f..dc4684f 100644
--- a/python/helpers/pydev/tests/test_pydev_ipython_011.py
+++ b/python/helpers/pydev/tests/test_pydev_ipython_011.py
@@ -7,81 +7,87 @@
 from pydev_localhost import get_localhost
 from pydev_console_utils import StdIn
 import socket
+from pydev_ipython_console_011 import get_pydev_frontend
+import time
 
-# make it as if we were executing from the directory above this one
-sys.argv[0] = os.path.dirname(sys.argv[0])
-# twice the dirname to get the previous level from this file.
-sys.path.insert(1, os.path.join(os.path.dirname(sys.argv[0])))
+try:
+    xrange
+except:
+    xrange = range
 
-# PyDevFrontEnd depends on singleton in IPython, so you
-# can't make multiple versions. So we reuse front_end for
-# all the tests
-
-orig_stdout = sys.stdout
-orig_stderr = sys.stderr
-
-stdout = sys.stdout = StringIO()
-stderr = sys.stderr = StringIO()
-
-from pydev_ipython_console_011 import PyDevFrontEnd
-s = socket.socket()
-s.bind(('', 0))
-client_port = s.getsockname()[1]
-s.close()
-front_end = PyDevFrontEnd(get_localhost(), client_port)
-
-
-def addExec(code, expected_more=False):
-    more = front_end.addExec(code)
-    eq_(expected_more, more)
 
 class TestBase(unittest.TestCase):
+    
+    
     def setUp(self):
-        front_end.input_splitter.reset()
-        stdout.truncate(0)
-        stdout.seek(0)
-        stderr.truncate(0)
-        stderr.seek(0)
+        # PyDevFrontEnd depends on singleton in IPython, so you
+        # can't make multiple versions. So we reuse self.front_end for
+        # all the tests
+        self.front_end = get_pydev_frontend(get_localhost(), 0)
+        
+        from pydev_ipython.inputhook import set_return_control_callback
+        set_return_control_callback(lambda:True)
+        self.front_end.clearBuffer()
+
     def tearDown(self):
         pass
+    
+    def addExec(self, code, expected_more=False):
+        more = self.front_end.addExec(code)
+        eq_(expected_more, more)
+    
+    def redirectStdout(self):
+        from IPython.utils import io
+        
+        self.original_stdout = sys.stdout
+        sys.stdout = io.stdout = StringIO()
+    
+    def restoreStdout(self):
+        from IPython.utils import io
+        io.stdout = sys.stdout = self.original_stdout
 
 
 class TestPyDevFrontEnd(TestBase):
+    
     def testAddExec_1(self):
-        addExec('if True:', True)
+        self.addExec('if True:', True)
+        
     def testAddExec_2(self):
-        addExec('if True:\n    testAddExec_a = 10\n', True)
+        #Change: 'more' must now be controlled in the client side after the initial 'True' returned.
+        self.addExec('if True:\n    testAddExec_a = 10\n', False) 
+        assert 'testAddExec_a' in self.front_end.getNamespace()
+        
     def testAddExec_3(self):
-        assert 'testAddExec_a' not in front_end.getNamespace()
-        addExec('if True:\n    testAddExec_a = 10\n\n')
-        assert 'testAddExec_a' in front_end.getNamespace()
-        eq_(front_end.getNamespace()['testAddExec_a'], 10)
+        assert 'testAddExec_x' not in self.front_end.getNamespace()
+        self.addExec('if True:\n    testAddExec_x = 10\n\n')
+        assert 'testAddExec_x' in self.front_end.getNamespace()
+        eq_(self.front_end.getNamespace()['testAddExec_x'], 10)
 
     def testGetNamespace(self):
-        assert 'testGetNamespace_a' not in front_end.getNamespace()
-        addExec('testGetNamespace_a = 10')
-        assert 'testGetNamespace_a' in front_end.getNamespace()
-        eq_(front_end.getNamespace()['testGetNamespace_a'], 10)
+        assert 'testGetNamespace_a' not in self.front_end.getNamespace()
+        self.addExec('testGetNamespace_a = 10')
+        assert 'testGetNamespace_a' in self.front_end.getNamespace()
+        eq_(self.front_end.getNamespace()['testGetNamespace_a'], 10)
 
     def testComplete(self):
-        unused_text, matches = front_end.complete('%')
+        unused_text, matches = self.front_end.complete('%')
         assert len(matches) > 1, 'at least one magic should appear in completions'
 
     def testCompleteDoesNotDoPythonMatches(self):
         # Test that IPython's completions do not do the things that
         # PyDev's completions will handle
-        addExec('testComplete_a = 5')
-        addExec('testComplete_b = 10')
-        addExec('testComplete_c = 15')
-        unused_text, matches = front_end.complete('testComplete_')
+        self.addExec('testComplete_a = 5')
+        self.addExec('testComplete_b = 10')
+        self.addExec('testComplete_c = 15')
+        unused_text, matches = self.front_end.complete('testComplete_')
         assert len(matches) == 0
 
     def testGetCompletions_1(self):
         # Test the merged completions include the standard completions
-        addExec('testComplete_a = 5')
-        addExec('testComplete_b = 10')
-        addExec('testComplete_c = 15')
-        res = front_end.getCompletions('testComplete_', 'testComplete_')
+        self.addExec('testComplete_a = 5')
+        self.addExec('testComplete_b = 10')
+        self.addExec('testComplete_c = 15')
+        res = self.front_end.getCompletions('testComplete_', 'testComplete_')
         matches = [f[0] for f in res]
         assert len(matches) == 3
         eq_(set(['testComplete_a', 'testComplete_b', 'testComplete_c']), set(matches))
@@ -90,60 +96,80 @@
         # Test that we get IPython completions in results
         # we do this by checking kw completion which PyDev does
         # not do by default
-        addExec('def ccc(ABC=123): pass')
-        res = front_end.getCompletions('ccc(', '')
+        self.addExec('def ccc(ABC=123): pass')
+        res = self.front_end.getCompletions('ccc(', '')
         matches = [f[0] for f in res]
         assert 'ABC=' in matches
 
     def testGetCompletions_3(self):
         # Test that magics return IPYTHON magic as type
-        res = front_end.getCompletions('%cd', '%cd')
+        res = self.front_end.getCompletions('%cd', '%cd')
         assert len(res) == 1
         eq_(res[0][3], '12')  # '12' == IToken.TYPE_IPYTHON_MAGIC
         assert len(res[0][1]) > 100, 'docstring for %cd should be a reasonably long string'
 
 class TestRunningCode(TestBase):
     def testPrint(self):
-        addExec('print("output")')
-        eq_(stdout.getvalue(), 'output\n')
+        self.redirectStdout()
+        try:
+            self.addExec('print("output")')
+            eq_(sys.stdout.getvalue(), 'output\n')
+        finally:
+            self.restoreStdout()
 
     def testQuestionMark_1(self):
-        addExec('?')
-        assert len(stdout.getvalue()) > 1000, 'IPython help should be pretty big'
+        self.redirectStdout()
+        try:
+            self.addExec('?')
+            assert len(sys.stdout.getvalue()) > 1000, 'IPython help should be pretty big'
+        finally:
+            self.restoreStdout()
 
     def testQuestionMark_2(self):
-        addExec('int?')
-        assert stdout.getvalue().find('Convert') != -1
+        self.redirectStdout()
+        try:
+            self.addExec('int?')
+            assert sys.stdout.getvalue().find('Convert') != -1
+        finally:
+            self.restoreStdout()
 
 
     def testGui(self):
-        from pydev_ipython.inputhook import get_inputhook, set_stdin_file
-        set_stdin_file(sys.stdin)
-        assert get_inputhook() is None
-        addExec('%gui tk')
-        # we can't test the GUI works here because we aren't connected to XML-RPC so
-        # nowhere for hook to run
-        assert get_inputhook() is not None
-        addExec('%gui none')
-        assert get_inputhook() is None
+        try:
+            import Tkinter
+        except:
+            return
+        else:
+            from pydev_ipython.inputhook import get_inputhook
+            assert get_inputhook() is None
+            self.addExec('%gui tk')
+            # we can't test the GUI works here because we aren't connected to XML-RPC so
+            # nowhere for hook to run
+            assert get_inputhook() is not None
+            self.addExec('%gui none')
+            assert get_inputhook() is None
 
     def testHistory(self):
         ''' Make sure commands are added to IPython's history '''
-        addExec('a=1')
-        addExec('b=2')
-        _ih = front_end.getNamespace()['_ih']
-        eq_(_ih[-1], 'b=2')
-        eq_(_ih[-2], 'a=1')
-
-        addExec('history')
-        hist = stdout.getvalue().split('\n')
-        eq_(hist[-1], '')
-        eq_(hist[-2], 'history')
-        eq_(hist[-3], 'b=2')
-        eq_(hist[-4], 'a=1')
+        self.redirectStdout()
+        try:
+            self.addExec('a=1')
+            self.addExec('b=2')
+            _ih = self.front_end.getNamespace()['_ih']
+            eq_(_ih[-1], 'b=2')
+            eq_(_ih[-2], 'a=1')
+    
+            self.addExec('history')
+            hist = sys.stdout.getvalue().split('\n')
+            eq_(hist[-1], '')
+            eq_(hist[-2], 'history')
+            eq_(hist[-3], 'b=2')
+            eq_(hist[-4], 'a=1')
+        finally:
+            self.restoreStdout()
 
     def testEdit(self):
-        ''' Make sure we can issue an edit command '''
+        ''' Make sure we can issue an edit command'''
         called_RequestInput = [False]
         called_IPythonEditor = [False]
         def startClientThread(client_port):
@@ -163,26 +189,47 @@
                     handle_request_input = HandleRequestInput()
 
                     import pydev_localhost
-                    client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
+                    self.client_server = client_server = SimpleXMLRPCServer(
+                        (pydev_localhost.get_localhost(), self.client_port), logRequests=False)
                     client_server.register_function(handle_request_input.RequestInput)
                     client_server.register_function(handle_request_input.IPythonEditor)
                     client_server.serve_forever()
+                    
+                def shutdown(self):
+                    return
+                    self.client_server.shutdown()
 
             client_thread = ClientThread(client_port)
             client_thread.setDaemon(True)
             client_thread.start()
             return client_thread
 
-        startClientThread(client_port)
+        # PyDevFrontEnd depends on singleton in IPython, so you
+        # can't make multiple versions. So we reuse self.front_end for
+        # all the tests
+        s = socket.socket()
+        s.bind(('', 0))
+        self.client_port = client_port = s.getsockname()[1]
+        s.close()
+        self.front_end = get_pydev_frontend(get_localhost(), client_port)
+
+        client_thread = startClientThread(self.client_port)
         orig_stdin = sys.stdin
-        sys.stdin = StdIn(self, get_localhost(), client_port)
+        sys.stdin = StdIn(self, get_localhost(), self.client_port)
         try:
             filename = 'made_up_file.py'
-            addExec('%edit ' + filename)
-            eq_(called_IPythonEditor[0], (os.path.abspath(filename), 0))
+            self.addExec('%edit ' + filename)
+            
+            for i in xrange(10):
+                if called_IPythonEditor[0] == (os.path.abspath(filename), '0'):
+                    break
+                time.sleep(.1)
+                
+            eq_(called_IPythonEditor[0], (os.path.abspath(filename), '0'))
             assert called_RequestInput[0], "Make sure the 'wait' parameter has been respected"
         finally:
             sys.stdin = orig_stdin
+            client_thread.shutdown()
 
 if __name__ == '__main__':
 
diff --git a/python/helpers/pydev/tests/test_pydevconsole.py b/python/helpers/pydev/tests/test_pydevconsole.py
index 9a9e3ed..1842198 100644
--- a/python/helpers/pydev/tests/test_pydevconsole.py
+++ b/python/helpers/pydev/tests/test_pydevconsole.py
@@ -1,10 +1,6 @@
 import threading
 import unittest
 import sys
-import os
-
-sys.argv[0] = os.path.dirname(sys.argv[0])
-sys.path.insert(1, os.path.join(os.path.dirname(sys.argv[0])))
 import pydevconsole
 from pydev_imports import xmlrpclib, SimpleXMLRPCServer, StringIO
 
@@ -19,104 +15,112 @@
 #=======================================================================================================================
 class Test(unittest.TestCase):
 
-    def setUp(self):
+    def testConsoleHello(self):
         self.original_stdout = sys.stdout
         sys.stdout = StringIO()
-
-
-    def tearDown(self):
-        ret = sys.stdout  #@UnusedVariable
-        sys.stdout = self.original_stdout
-        #print_ ret.getvalue() -- use to see test output
-
-    def testConsoleHello(self):
-        client_port, _server_port = self.getFreeAddresses()
-        client_thread = self.startClientThread(client_port)  #@UnusedVariable
-        import time
-        time.sleep(.3)  #let's give it some time to start the threads
-
-        import pydev_localhost
-        interpreter = pydevconsole.InterpreterInterface(pydev_localhost.get_localhost(), client_port, server=None)
-
-        (result,) = interpreter.hello("Hello pydevconsole")
-        self.assertEqual(result, "Hello eclipse")
+        
+        try:
+            client_port, _server_port = self.getFreeAddresses()
+            client_thread = self.startClientThread(client_port)  #@UnusedVariable
+            import time
+            time.sleep(.3)  #let's give it some time to start the threads
+    
+            import pydev_localhost
+            interpreter = pydevconsole.InterpreterInterface(pydev_localhost.get_localhost(), client_port, threading.currentThread())
+    
+            (result,) = interpreter.hello("Hello pydevconsole")
+            self.assertEqual(result, "Hello eclipse")
+        finally:
+            sys.stdout = self.original_stdout
 
 
     def testConsoleRequests(self):
-        client_port, _server_port = self.getFreeAddresses()
-        client_thread = self.startClientThread(client_port)  #@UnusedVariable
-        import time
-        time.sleep(.3)  #let's give it some time to start the threads
-
-        import pydev_localhost
-        interpreter = pydevconsole.InterpreterInterface(pydev_localhost.get_localhost(), client_port, server=None)
-        interpreter.addExec('class Foo:')
-        interpreter.addExec('   CONSTANT=1')
-        interpreter.addExec('')
-        interpreter.addExec('foo=Foo()')
-        interpreter.addExec('foo.__doc__=None')
-        interpreter.addExec('val = %s()' % (raw_input_name,))
-        interpreter.addExec('50')
-        interpreter.addExec('print (val)')
-        found = sys.stdout.getvalue().split()
+        self.original_stdout = sys.stdout
+        sys.stdout = StringIO()
+        
         try:
-            self.assertEqual(['50', 'input_request'], found)
-        except:
-            self.assertEqual(['input_request'], found)  #IPython
-
-        comps = interpreter.getCompletions('foo.', 'foo.')
-        self.assert_(
-            ('CONSTANT', '', '', '3') in comps or ('CONSTANT', '', '', '4') in comps, \
-            'Found: %s' % comps
-        )
-
-        comps = interpreter.getCompletions('"".', '"".')
-        self.assert_(
-            ('__add__', 'x.__add__(y) <==> x+y', '', '3') in comps or
-            ('__add__', '', '', '4') in comps or
-            ('__add__', 'x.__add__(y) <==> x+y\r\nx.__add__(y) <==> x+y', '()', '2') in comps or
-            ('__add__', 'x.\n__add__(y) <==> x+yx.\n__add__(y) <==> x+y', '()', '2'),
-            'Did not find __add__ in : %s' % (comps,)
-        )
-
-
-        completions = interpreter.getCompletions('', '')
-        for c in completions:
-            if c[0] == 'AssertionError':
-                break
-        else:
-            self.fail('Could not find AssertionError')
-
-        completions = interpreter.getCompletions('Assert', 'Assert')
-        for c in completions:
-            if c[0] == 'RuntimeError':
-                self.fail('Did not expect to find RuntimeError there')
-
-        self.assert_(('__doc__', None, '', '3') not in interpreter.getCompletions('foo.CO', 'foo.'))
-
-        comps = interpreter.getCompletions('va', 'va')
-        self.assert_(('val', '', '', '3') in comps or ('val', '', '', '4') in comps)
-
-        interpreter.addExec('s = "mystring"')
-
-        desc = interpreter.getDescription('val')
-        self.assert_(desc.find('str(object) -> string') >= 0 or
-                     desc == "'input_request'" or
-                     desc.find('str(string[, encoding[, errors]]) -> str') >= 0 or
-                     desc.find('str(Char* value)') >= 0 or
-                     desc.find('str(value: Char*)') >= 0,
-                     'Could not find what was needed in %s' % desc)
-
-        desc = interpreter.getDescription('val.join')
-        self.assert_(desc.find('S.join(sequence) -> string') >= 0 or
-                     desc.find('S.join(sequence) -> str') >= 0 or
-                     desc.find('S.join(iterable) -> string') >= 0 or
-                     desc == "<builtin method 'join'>"  or
-                     desc == "<built-in method join of str object>" or
-                     desc.find('str join(str self, list sequence)') >= 0 or
-                     desc.find('S.join(iterable) -> str') >= 0 or
-                     desc.find('join(self: str, sequence: list) -> str') >= 0,
-                     "Could not recognize: %s" % (desc,))
+            client_port, _server_port = self.getFreeAddresses()
+            client_thread = self.startClientThread(client_port)  #@UnusedVariable
+            import time
+            time.sleep(.3)  #let's give it some time to start the threads
+    
+            import pydev_localhost
+            from pydev_console_utils import CodeFragment
+            
+            interpreter = pydevconsole.InterpreterInterface(pydev_localhost.get_localhost(), client_port, threading.currentThread())
+            sys.stdout = StringIO()
+            interpreter.addExec(CodeFragment('class Foo:'))
+            interpreter.addExec(CodeFragment('   CONSTANT=1'))
+            interpreter.addExec(CodeFragment(''))
+            interpreter.addExec(CodeFragment('foo=Foo()'))
+            interpreter.addExec(CodeFragment('foo.__doc__=None'))
+            interpreter.addExec(CodeFragment('val = %s()' % (raw_input_name,)))
+            interpreter.addExec(CodeFragment('50'))
+            interpreter.addExec(CodeFragment('print (val)'))
+            found = sys.stdout.getvalue().split()
+            try:
+                self.assertEqual(['50', 'input_request'], found)
+            except:
+                self.assertEqual(['input_request'], found)  #IPython
+    
+            comps = interpreter.getCompletions('foo.', 'foo.')
+            self.assert_(
+                ('CONSTANT', '', '', '3') in comps or ('CONSTANT', '', '', '4') in comps, \
+                'Found: %s' % comps
+            )
+    
+            comps = interpreter.getCompletions('"".', '"".')
+            self.assert_(
+                ('__add__', 'x.__add__(y) <==> x+y', '', '3') in comps or
+                ('__add__', '', '', '4') in comps or
+                ('__add__', 'x.__add__(y) <==> x+y\r\nx.__add__(y) <==> x+y', '()', '2') in comps or
+                ('__add__', 'x.\n__add__(y) <==> x+yx.\n__add__(y) <==> x+y', '()', '2'),
+                'Did not find __add__ in : %s' % (comps,)
+            )
+    
+    
+            completions = interpreter.getCompletions('', '')
+            for c in completions:
+                if c[0] == 'AssertionError':
+                    break
+            else:
+                self.fail('Could not find AssertionError')
+    
+            completions = interpreter.getCompletions('Assert', 'Assert')
+            for c in completions:
+                if c[0] == 'RuntimeError':
+                    self.fail('Did not expect to find RuntimeError there')
+    
+            self.assert_(('__doc__', None, '', '3') not in interpreter.getCompletions('foo.CO', 'foo.'))
+    
+            comps = interpreter.getCompletions('va', 'va')
+            self.assert_(('val', '', '', '3') in comps or ('val', '', '', '4') in comps)
+    
+            interpreter.addExec(CodeFragment('s = "mystring"'))
+    
+            desc = interpreter.getDescription('val')
+            self.assert_(desc.find('str(object) -> string') >= 0 or
+                         desc == "'input_request'" or
+                         desc.find('str(string[, encoding[, errors]]) -> str') >= 0 or
+                         desc.find('str(Char* value)') >= 0 or
+                         desc.find('str(object=\'\') -> string') >= 0 or
+                         desc.find('str(value: Char*)') >= 0 or
+                         desc.find('str(object=\'\') -> str') >= 0
+                         ,
+                         'Could not find what was needed in %s' % desc)
+    
+            desc = interpreter.getDescription('val.join')
+            self.assert_(desc.find('S.join(sequence) -> string') >= 0 or
+                         desc.find('S.join(sequence) -> str') >= 0 or
+                         desc.find('S.join(iterable) -> string') >= 0 or
+                         desc == "<builtin method 'join'>"  or
+                         desc == "<built-in method join of str object>" or
+                         desc.find('str join(str self, list sequence)') >= 0 or
+                         desc.find('S.join(iterable) -> str') >= 0 or
+                         desc.find('join(self: str, sequence: list) -> str') >= 0,
+                         "Could not recognize: %s" % (desc,))
+        finally:
+            sys.stdout = self.original_stdout
 
 
     def startClientThread(self, client_port):
@@ -124,19 +128,28 @@
             def __init__(self, client_port):
                 threading.Thread.__init__(self)
                 self.client_port = client_port
+                
             def run(self):
                 class HandleRequestInput:
                     def RequestInput(self):
+                        client_thread.requested_input = True
                         return 'input_request'
-
+                    
+                    def NotifyFinished(self, *args, **kwargs):
+                        client_thread.notified_finished += 1
+                        return 1
+                
                 handle_request_input = HandleRequestInput()
-
+                
                 import pydev_localhost
                 client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False)
                 client_server.register_function(handle_request_input.RequestInput)
+                client_server.register_function(handle_request_input.NotifyFinished)
                 client_server.serve_forever()
-
+                
         client_thread = ClientThread(client_port)
+        client_thread.requested_input = False
+        client_thread.notified_finished = 0
         client_thread.setDaemon(True)
         client_thread.start()
         return client_thread
@@ -194,34 +207,50 @@
 
 
     def testServer(self):
-        client_port, server_port = self.getFreeAddresses()
-        class ServerThread(threading.Thread):
-            def __init__(self, client_port, server_port):
-                threading.Thread.__init__(self)
-                self.client_port = client_port
-                self.server_port = server_port
-
-            def run(self):
-                import pydev_localhost
-                pydevconsole.StartServer(pydev_localhost.get_localhost(), self.server_port, self.client_port)
-        server_thread = ServerThread(client_port, server_port)
-        server_thread.setDaemon(True)
-        server_thread.start()
-
-        client_thread = self.startClientThread(client_port)  #@UnusedVariable
-
-        import time
-        time.sleep(.3)  #let's give it some time to start the threads
-
-        import pydev_localhost
-        server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
-        server.addExec('class Foo:')
-        server.addExec('    pass')
-        server.addExec('')
-        server.addExec('foo = Foo()')
-        server.addExec('a = %s()' % (raw_input_name,))
-        server.addExec('print (a)')
-        self.assertEqual(['input_request'], sys.stdout.getvalue().split())
+        self.original_stdout = sys.stdout
+        sys.stdout = StringIO()
+        try:
+            client_port, server_port = self.getFreeAddresses()
+            class ServerThread(threading.Thread):
+                def __init__(self, client_port, server_port):
+                    threading.Thread.__init__(self)
+                    self.client_port = client_port
+                    self.server_port = server_port
+    
+                def run(self):
+                    import pydev_localhost
+                    pydevconsole.StartServer(pydev_localhost.get_localhost(), self.server_port, self.client_port)
+            server_thread = ServerThread(client_port, server_port)
+            server_thread.setDaemon(True)
+            server_thread.start()
+    
+            client_thread = self.startClientThread(client_port)  #@UnusedVariable
+    
+            import time
+            time.sleep(.3)  #let's give it some time to start the threads
+            sys.stdout = StringIO()
+    
+            import pydev_localhost
+            server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port))
+            server.execLine('class Foo:')
+            server.execLine('    pass')
+            server.execLine('')
+            server.execLine('foo = Foo()')
+            server.execLine('a = %s()' % (raw_input_name,))
+            server.execLine('print (a)')
+            initial = time.time()
+            while not client_thread.requested_input:
+                if time.time() - initial > 2:
+                    raise AssertionError('Did not get the return asked before the timeout.')
+                time.sleep(.1)
+                
+            while ['input_request'] != sys.stdout.getvalue().split():
+                if time.time() - initial > 2:
+                    break
+                time.sleep(.1)
+            self.assertEqual(['input_request'], sys.stdout.getvalue().split())
+        finally:
+            sys.stdout = self.original_stdout
 
 #=======================================================================================================================
 # main
diff --git a/python/helpers/pydev/tests/test_pyserver.py b/python/helpers/pydev/tests/test_pyserver.py
index a74876b..ea9daff 100644
--- a/python/helpers/pydev/tests/test_pyserver.py
+++ b/python/helpers/pydev/tests/test_pyserver.py
@@ -3,6 +3,7 @@
 '''
 import sys
 import os
+from _pydev_imps._pydev_thread import start_new_thread
 
 #make it as if we were executing from the directory above this one (so that we can use pycompletionserver
 #without the need for it being in the pythonpath)
@@ -13,6 +14,13 @@
 IS_PYTHON_3K = 0
 if sys.platform.find('java') == -1:
     
+    try:
+        import __builtin__ #@UnusedImport
+        BUILTIN_MOD = '__builtin__'
+    except ImportError:
+        BUILTIN_MOD = 'builtins'
+
+    
     
     try:
         import inspect
@@ -41,7 +49,7 @@
             unittest.TestCase.tearDown(self)
         
         def testMessage(self):
-            t = pycompletionserver.T(0)
+            t = pycompletionserver.CompletionServer(0)
             
             l = []
             l.append(('Def', 'description'  , 'args'))
@@ -62,14 +70,14 @@
             '''
             Creates the connections needed for testing.
             '''
-            t = pycompletionserver.T(p1)
-            
-            t.start()
-    
             server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
             server.bind((pycompletionserver.HOST, p1))
             server.listen(1)  #socket to receive messages.
     
+            t = pycompletionserver.CompletionServer(p1)
+            t.exit_process_on_kill = False
+            start_new_thread(t.run, ())
+    
             s, addr = server.accept()
     
             return t, s
@@ -106,6 +114,8 @@
                 #math is a builtin and because of that, it starts with None as a file
                 start = '@@COMPLETIONS(None,(__doc__,'
                 start_2 = '@@COMPLETIONS(None,(__name__,'
+                if '/math.so,' in completions or '/math.cpython-33m.so,' in completions or '/math.cpython-34m.so,' in completions:
+                    return
                 self.assert_(completions.startswith(start) or completions.startswith(start_2), '%s DOESNT START WITH %s' % (completions, (start, start_2)))
         
                 self.assert_('@@COMPLETIONS' in completions)
@@ -113,7 +123,7 @@
     
     
                 #now, test i
-                msg = quote_plus('__builtin__.list')
+                msg = quote_plus('%s.list' % BUILTIN_MOD)
                 send(socket, "@@IMPORTS:%s\nEND@@" % msg)
                 found = self.readMsg()
                 self.assert_('sort' in found, 'Could not find sort in: %s' % (found,))
diff --git a/python/helpers/pydev/tests/test_simpleTipper.py b/python/helpers/pydev/tests/test_simpleTipper.py
index f759ad6..255a521 100644
--- a/python/helpers/pydev/tests/test_simpleTipper.py
+++ b/python/helpers/pydev/tests/test_simpleTipper.py
@@ -1,12 +1,7 @@
 '''
 @author Fabio Zadrozny 
 '''
-import os
 import sys
-#make it as if we were executing from the directory above this one (so that we can use pycompletionserver
-#without the need for it being in the pythonpath)
-#twice the dirname to get the previous level from this file.
-sys.path.insert(1, os.path.split(os.path.split(__file__)[0])[0])
 
 try:
     import __builtin__ #@UnusedImport
@@ -50,13 +45,14 @@
                 pass
     
         def testImports5(self):
-            tip = _pydev_imports_tipper.GenerateTip('__builtin__.list')
+            tip = _pydev_imports_tipper.GenerateTip('%s.list' % BUILTIN_MOD)
             s = self.assertIn('sort', tip)
             self.CheckArgs(
                 s, 
                 '(cmp=None, key=None, reverse=False)', 
                 '(self, object cmp, object key, bool reverse)',
-                '(self, cmp: object, key: object, reverse: bool)'
+                '(self, cmp: object, key: object, reverse: bool)',
+                '(key=None, reverse=False)',
             )
             
         def testImports2a(self):
@@ -64,14 +60,24 @@
             self.assertIn('__doc__', tips)
             
         def testImports2b(self):
-            tips = _pydev_imports_tipper.GenerateTip('%s' % BUILTIN_MOD)
-            t = self.assertIn('file' , tips)
-            self.assert_('->' in t[1].strip() or 'file' in t[1])
+            try:
+                file
+            except:
+                pass
+            else:
+                tips = _pydev_imports_tipper.GenerateTip('%s' % BUILTIN_MOD)
+                t = self.assertIn('file' , tips)
+                self.assert_('->' in t[1].strip() or 'file' in t[1])
             
         def testImports2c(self):
-            tips = _pydev_imports_tipper.GenerateTip('%s.file' % BUILTIN_MOD)
-            t = self.assertIn('readlines' , tips)
-            self.assert_('->' in t[1] or 'sizehint' in t[1])
+            try:
+                file # file is not available on py 3
+            except:
+                pass
+            else:
+                tips = _pydev_imports_tipper.GenerateTip('%s.file' % BUILTIN_MOD)
+                t = self.assertIn('readlines' , tips)
+                self.assert_('->' in t[1] or 'sizehint' in t[1])
             
         def testImports(self):
             '''
@@ -110,9 +116,9 @@
             self.assertIn('RuntimeError'   , tip)
             self.assertIn('RuntimeWarning' , tip)
             
-            t = self.assertIn('cmp' , tip)
-            
-            self.CheckArgs(t, '(x, y)', '(object x, object y)', '(x: object, y: object)') #args
+            # Remove cmp as it's not available on py 3
+            #t = self.assertIn('cmp' , tip)
+            #self.CheckArgs(t, '(x, y)', '(object x, object y)', '(x: object, y: object)') #args
             
             t = self.assertIn('isinstance' , tip)
             self.CheckArgs(t, '(object, class_or_type_or_tuple)', '(object o, type typeinfo)', '(o: object, typeinfo: type)') #args
diff --git a/python/helpers/pydev/tests_mainloop/gui-glut.py b/python/helpers/pydev/tests_mainloop/gui-glut.py
index f05a4bc..34a16b4 100644
--- a/python/helpers/pydev/tests_mainloop/gui-glut.py
+++ b/python/helpers/pydev/tests_mainloop/gui-glut.py
@@ -9,42 +9,44 @@
 4) run: gl.glClearColor(1,1,1,1)
 """
 
-#!/usr/bin/env python
-import sys
-import OpenGL.GL as gl
-import OpenGL.GLUT as glut
+if __name__ == '__main__':
 
-def close():
-    glut.glutDestroyWindow(glut.glutGetWindow())
-
-def display():
-    gl.glClear (gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
-    glut.glutSwapBuffers()
-
-def resize(width,height):
-    gl.glViewport(0, 0, width, height+4)
-    gl.glMatrixMode(gl.GL_PROJECTION)
-    gl.glLoadIdentity()
-    gl.glOrtho(0, width, 0, height+4, -1, 1)
-    gl.glMatrixMode(gl.GL_MODELVIEW)
-
-if glut.glutGetWindow() > 0:
-    interactive = True
-    glut.glutInit(sys.argv)
-    glut.glutInitDisplayMode(glut.GLUT_DOUBLE |
-                             glut.GLUT_RGBA   |
-                             glut.GLUT_DEPTH)
-else:
-    interactive = False
-
-glut.glutCreateWindow('gui-glut')
-glut.glutDisplayFunc(display)
-glut.glutReshapeFunc(resize)
-# This is necessary on osx to be able to close the window
-#  (else the close button is disabled)
-if sys.platform == 'darwin' and not bool(glut.HAVE_FREEGLUT):
-    glut.glutWMCloseFunc(close)
-gl.glClearColor(0,0,0,1)
-
-if not interactive:
-    glut.glutMainLoop()
+    #!/usr/bin/env python
+    import sys
+    import OpenGL.GL as gl
+    import OpenGL.GLUT as glut
+    
+    def close():
+        glut.glutDestroyWindow(glut.glutGetWindow())
+    
+    def display():
+        gl.glClear (gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT)
+        glut.glutSwapBuffers()
+    
+    def resize(width,height):
+        gl.glViewport(0, 0, width, height+4)
+        gl.glMatrixMode(gl.GL_PROJECTION)
+        gl.glLoadIdentity()
+        gl.glOrtho(0, width, 0, height+4, -1, 1)
+        gl.glMatrixMode(gl.GL_MODELVIEW)
+    
+    if glut.glutGetWindow() > 0:
+        interactive = True
+        glut.glutInit(sys.argv)
+        glut.glutInitDisplayMode(glut.GLUT_DOUBLE |
+                                 glut.GLUT_RGBA   |
+                                 glut.GLUT_DEPTH)
+    else:
+        interactive = False
+    
+    glut.glutCreateWindow('gui-glut')
+    glut.glutDisplayFunc(display)
+    glut.glutReshapeFunc(resize)
+    # This is necessary on osx to be able to close the window
+    #  (else the close button is disabled)
+    if sys.platform == 'darwin' and not bool(glut.HAVE_FREEGLUT):
+        glut.glutWMCloseFunc(close)
+    gl.glClearColor(0,0,0,1)
+    
+    if not interactive:
+        glut.glutMainLoop()
diff --git a/python/helpers/pydev/tests_mainloop/gui-gtk.py b/python/helpers/pydev/tests_mainloop/gui-gtk.py
index 978f8f9..6df5c78 100644
--- a/python/helpers/pydev/tests_mainloop/gui-gtk.py
+++ b/python/helpers/pydev/tests_mainloop/gui-gtk.py
@@ -8,27 +8,28 @@
    interactive console
 """
 
-import pygtk
-pygtk.require('2.0')
-import gtk
-
-
-def hello_world(wigdet, data=None):
-    print("Hello World")
-
-def delete_event(widget, event, data=None):
-    return False
-
-def destroy(widget, data=None):
-    gtk.main_quit()
-
-window = gtk.Window(gtk.WINDOW_TOPLEVEL)
-window.connect("delete_event", delete_event)
-window.connect("destroy", destroy)
-button = gtk.Button("Hello World")
-button.connect("clicked", hello_world, None)
-
-window.add(button)
-button.show()
-window.show()
-
+if __name__ == '__main__':
+    import pygtk
+    pygtk.require('2.0')
+    import gtk
+    
+    
+    def hello_world(wigdet, data=None):
+        print("Hello World")
+    
+    def delete_event(widget, event, data=None):
+        return False
+    
+    def destroy(widget, data=None):
+        gtk.main_quit()
+    
+    window = gtk.Window(gtk.WINDOW_TOPLEVEL)
+    window.connect("delete_event", delete_event)
+    window.connect("destroy", destroy)
+    button = gtk.Button("Hello World")
+    button.connect("clicked", hello_world, None)
+    
+    window.add(button)
+    button.show()
+    window.show()
+    
diff --git a/python/helpers/pydev/tests_mainloop/gui-gtk3.py b/python/helpers/pydev/tests_mainloop/gui-gtk3.py
index a787f7e..6351d52 100644
--- a/python/helpers/pydev/tests_mainloop/gui-gtk3.py
+++ b/python/helpers/pydev/tests_mainloop/gui-gtk3.py
@@ -8,25 +8,26 @@
    interactive console
 """
 
-from gi.repository import Gtk
-
-
-def hello_world(wigdet, data=None):
-    print("Hello World")
-
-def delete_event(widget, event, data=None):
-    return False
-
-def destroy(widget, data=None):
-    Gtk.main_quit()
-
-window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
-window.connect("delete_event", delete_event)
-window.connect("destroy", destroy)
-button = Gtk.Button("Hello World")
-button.connect("clicked", hello_world, None)
-
-window.add(button)
-button.show()
-window.show()
-
+if __name__ == '__main__':
+    from gi.repository import Gtk
+    
+    
+    def hello_world(wigdet, data=None):
+        print("Hello World")
+    
+    def delete_event(widget, event, data=None):
+        return False
+    
+    def destroy(widget, data=None):
+        Gtk.main_quit()
+    
+    window = Gtk.Window(Gtk.WindowType.TOPLEVEL)
+    window.connect("delete_event", delete_event)
+    window.connect("destroy", destroy)
+    button = Gtk.Button("Hello World")
+    button.connect("clicked", hello_world, None)
+    
+    window.add(button)
+    button.show()
+    window.show()
+    
diff --git a/python/helpers/pydev/tests_mainloop/gui-pyglet.py b/python/helpers/pydev/tests_mainloop/gui-pyglet.py
index b646093..70f1a7f 100644
--- a/python/helpers/pydev/tests_mainloop/gui-pyglet.py
+++ b/python/helpers/pydev/tests_mainloop/gui-pyglet.py
@@ -8,20 +8,21 @@
    interactive console
 """
 
-import pyglet
-
-
-window = pyglet.window.Window()
-label = pyglet.text.Label('Hello, world',
-                          font_name='Times New Roman',
-                          font_size=36,
-                          x=window.width//2, y=window.height//2,
-                          anchor_x='center', anchor_y='center')
-@window.event
-def on_close():
-    window.close()
-
-@window.event
-def on_draw():
-    window.clear()
-    label.draw()
+if __name__ == '__main__':
+    import pyglet
+    
+    
+    window = pyglet.window.Window()
+    label = pyglet.text.Label('Hello, world',
+                              font_name='Times New Roman',
+                              font_size=36,
+                              x=window.width//2, y=window.height//2,
+                              anchor_x='center', anchor_y='center')
+    @window.event
+    def on_close():
+        window.close()
+    
+    @window.event
+    def on_draw():
+        window.clear()
+        label.draw()
diff --git a/python/helpers/pydev/tests_mainloop/gui-qt.py b/python/helpers/pydev/tests_mainloop/gui-qt.py
index c27cbd6..30fc48d 100644
--- a/python/helpers/pydev/tests_mainloop/gui-qt.py
+++ b/python/helpers/pydev/tests_mainloop/gui-qt.py
@@ -10,26 +10,27 @@
 Ref: Modified from http://zetcode.com/tutorials/pyqt4/firstprograms/
 """
 
-import sys
-from PyQt4 import QtGui, QtCore
-
-class SimpleWindow(QtGui.QWidget):
-    def __init__(self, parent=None):
-        QtGui.QWidget.__init__(self, parent)
-
-        self.setGeometry(300, 300, 200, 80)
-        self.setWindowTitle('Hello World')
-
-        quit = QtGui.QPushButton('Close', self)
-        quit.setGeometry(10, 10, 60, 35)
-
-        self.connect(quit, QtCore.SIGNAL('clicked()'),
-                     self, QtCore.SLOT('close()'))
-
 if __name__ == '__main__':
-    app = QtCore.QCoreApplication.instance()
-    if app is None:
-        app = QtGui.QApplication([])
-
-    sw = SimpleWindow()
-    sw.show()
+    import sys
+    from PyQt4 import QtGui, QtCore
+    
+    class SimpleWindow(QtGui.QWidget):
+        def __init__(self, parent=None):
+            QtGui.QWidget.__init__(self, parent)
+    
+            self.setGeometry(300, 300, 200, 80)
+            self.setWindowTitle('Hello World')
+    
+            quit = QtGui.QPushButton('Close', self)
+            quit.setGeometry(10, 10, 60, 35)
+    
+            self.connect(quit, QtCore.SIGNAL('clicked()'),
+                         self, QtCore.SLOT('close()'))
+    
+    if __name__ == '__main__':
+        app = QtCore.QCoreApplication.instance()
+        if app is None:
+            app = QtGui.QApplication([])
+    
+        sw = SimpleWindow()
+        sw.show()
diff --git a/python/helpers/pydev/tests_mainloop/gui-tk.py b/python/helpers/pydev/tests_mainloop/gui-tk.py
index 69ceb0b..4cef45f 100644
--- a/python/helpers/pydev/tests_mainloop/gui-tk.py
+++ b/python/helpers/pydev/tests_mainloop/gui-tk.py
@@ -8,24 +8,26 @@
    interactive console
 """
 
-try:
-    from Tkinter import *
-except:
-    # Python 3
-    from tkinter import *
-
-class MyApp:
-
-    def __init__(self, root):
-        frame = Frame(root)
-        frame.pack()
-
-        self.button = Button(frame, text="Hello", command=self.hello_world)
-        self.button.pack(side=LEFT)
-
-    def hello_world(self):
-        print("Hello World!")
-
-root = Tk()
-
-app = MyApp(root)
+if __name__ == '__main__':
+    
+    try:
+        from Tkinter import *
+    except:
+        # Python 3
+        from tkinter import *
+    
+    class MyApp:
+    
+        def __init__(self, root):
+            frame = Frame(root)
+            frame.pack()
+    
+            self.button = Button(frame, text="Hello", command=self.hello_world)
+            self.button.pack(side=LEFT)
+    
+        def hello_world(self):
+            print("Hello World!")
+    
+    root = Tk()
+    
+    app = MyApp(root)
diff --git a/python/helpers/pydev/tests_mainloop/gui-wx.py b/python/helpers/pydev/tests_mainloop/gui-wx.py
index 2101e7f..b9c28bf 100644
--- a/python/helpers/pydev/tests_mainloop/gui-wx.py
+++ b/python/helpers/pydev/tests_mainloop/gui-wx.py
@@ -11,91 +11,93 @@
 Ref: Modified from wxPython source code wxPython/samples/simple/simple.py
 """
 
-import wx
-
-
-class MyFrame(wx.Frame):
-    """
-    This is MyFrame.  It just shows a few controls on a wxPanel,
-    and has a simple menu.
-    """
-    def __init__(self, parent, title):
-        wx.Frame.__init__(self, parent, -1, title,
-                          pos=(150, 150), size=(350, 200))
-
-        # Create the menubar
-        menuBar = wx.MenuBar()
-
-        # and a menu
-        menu = wx.Menu()
-
-        # add an item to the menu, using \tKeyName automatically
-        # creates an accelerator, the third param is some help text
-        # that will show up in the statusbar
-        menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample")
-
-        # bind the menu event to an event handler
-        self.Bind(wx.EVT_MENU, self.OnTimeToClose, id=wx.ID_EXIT)
-
-        # and put the menu on the menubar
-        menuBar.Append(menu, "&File")
-        self.SetMenuBar(menuBar)
-
-        self.CreateStatusBar()
-
-        # Now create the Panel to put the other controls on.
-        panel = wx.Panel(self)
-
-        # and a few controls
-        text = wx.StaticText(panel, -1, "Hello World!")
-        text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD))
-        text.SetSize(text.GetBestSize())
-        btn = wx.Button(panel, -1, "Close")
-        funbtn = wx.Button(panel, -1, "Just for fun...")
-
-        # bind the button events to handlers
-        self.Bind(wx.EVT_BUTTON, self.OnTimeToClose, btn)
-        self.Bind(wx.EVT_BUTTON, self.OnFunButton, funbtn)
-
-        # Use a sizer to layout the controls, stacked vertically and with
-        # a 10 pixel border around each
-        sizer = wx.BoxSizer(wx.VERTICAL)
-        sizer.Add(text, 0, wx.ALL, 10)
-        sizer.Add(btn, 0, wx.ALL, 10)
-        sizer.Add(funbtn, 0, wx.ALL, 10)
-        panel.SetSizer(sizer)
-        panel.Layout()
-
-
-    def OnTimeToClose(self, evt):
-        """Event handler for the button click."""
-        print("See ya later!")
-        self.Close()
-
-    def OnFunButton(self, evt):
-        """Event handler for the button click."""
-        print("Having fun yet?")
-
-
-class MyApp(wx.App):
-    def OnInit(self):
-        frame = MyFrame(None, "Simple wxPython App")
-        self.SetTopWindow(frame)
-
-        print("Print statements go to this stdout window by default.")
-
-        frame.Show(True)
-        return True
-
-
 if __name__ == '__main__':
 
-    app = wx.GetApp()
-    if app is None:
-        app = MyApp(redirect=False, clearSigInt=False)
-    else:
-        frame = MyFrame(None, "Simple wxPython App")
-        app.SetTopWindow(frame)
-        print("Print statements go to this stdout window by default.")
-        frame.Show(True)
-
+    import wx
+    
+    
+    class MyFrame(wx.Frame):
+        """
+        This is MyFrame.  It just shows a few controls on a wxPanel,
+        and has a simple menu.
+        """
+        def __init__(self, parent, title):
+            wx.Frame.__init__(self, parent, -1, title,
+                              pos=(150, 150), size=(350, 200))
+    
+            # Create the menubar
+            menuBar = wx.MenuBar()
+    
+            # and a menu
+            menu = wx.Menu()
+    
+            # add an item to the menu, using \tKeyName automatically
+            # creates an accelerator, the third param is some help text
+            # that will show up in the statusbar
+            menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample")
+    
+            # bind the menu event to an event handler
+            self.Bind(wx.EVT_MENU, self.OnTimeToClose, id=wx.ID_EXIT)
+    
+            # and put the menu on the menubar
+            menuBar.Append(menu, "&File")
+            self.SetMenuBar(menuBar)
+    
+            self.CreateStatusBar()
+    
+            # Now create the Panel to put the other controls on.
+            panel = wx.Panel(self)
+    
+            # and a few controls
+            text = wx.StaticText(panel, -1, "Hello World!")
+            text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD))
+            text.SetSize(text.GetBestSize())
+            btn = wx.Button(panel, -1, "Close")
+            funbtn = wx.Button(panel, -1, "Just for fun...")
+    
+            # bind the button events to handlers
+            self.Bind(wx.EVT_BUTTON, self.OnTimeToClose, btn)
+            self.Bind(wx.EVT_BUTTON, self.OnFunButton, funbtn)
+    
+            # Use a sizer to layout the controls, stacked vertically and with
+            # a 10 pixel border around each
+            sizer = wx.BoxSizer(wx.VERTICAL)
+            sizer.Add(text, 0, wx.ALL, 10)
+            sizer.Add(btn, 0, wx.ALL, 10)
+            sizer.Add(funbtn, 0, wx.ALL, 10)
+            panel.SetSizer(sizer)
+            panel.Layout()
+    
+    
+        def OnTimeToClose(self, evt):
+            """Event handler for the button click."""
+            print("See ya later!")
+            self.Close()
+    
+        def OnFunButton(self, evt):
+            """Event handler for the button click."""
+            print("Having fun yet?")
+    
+    
+    class MyApp(wx.App):
+        def OnInit(self):
+            frame = MyFrame(None, "Simple wxPython App")
+            self.SetTopWindow(frame)
+    
+            print("Print statements go to this stdout window by default.")
+    
+            frame.Show(True)
+            return True
+    
+    
+    if __name__ == '__main__':
+    
+        app = wx.GetApp()
+        if app is None:
+            app = MyApp(redirect=False, clearSigInt=False)
+        else:
+            frame = MyFrame(None, "Simple wxPython App")
+            app.SetTopWindow(frame)
+            print("Print statements go to this stdout window by default.")
+            frame.Show(True)
+    
diff --git a/python/helpers/pydev/tests_python/_debugger_case18.py b/python/helpers/pydev/tests_python/_debugger_case18.py
index 69717b2..c221039 100644
--- a/python/helpers/pydev/tests_python/_debugger_case18.py
+++ b/python/helpers/pydev/tests_python/_debugger_case18.py
@@ -1,4 +1,4 @@
-
+import sys
 
 def m2(a):
     a = 10
@@ -6,7 +6,7 @@
     c = 30
     
     def function2():
-        print a
+        print(a)
 
     return a
 
diff --git a/python/helpers/pydev/tests_python/_debugger_case19.py b/python/helpers/pydev/tests_python/_debugger_case19.py
index aaf380c..07ac951 100644
--- a/python/helpers/pydev/tests_python/_debugger_case19.py
+++ b/python/helpers/pydev/tests_python/_debugger_case19.py
@@ -5,6 +5,6 @@
 
 if __name__ == '__main__':
     a = A()
-    print a._A__var
+    print(a._A__var)
     # Evaluate 'a.__var' should give a._A__var_
     print('TEST SUCEEDED')
diff --git a/python/helpers/pydev/tests_python/_debugger_case7.py b/python/helpers/pydev/tests_python/_debugger_case7.py
index 263110b..499d8d7 100644
--- a/python/helpers/pydev/tests_python/_debugger_case7.py
+++ b/python/helpers/pydev/tests_python/_debugger_case7.py
@@ -5,4 +5,4 @@
     
 if __name__ == '__main__':
     Call()
-    print 'TEST SUCEEDED!'
+    print('TEST SUCEEDED!')
diff --git a/python/helpers/pydev/tests_python/_debugger_case89.py b/python/helpers/pydev/tests_python/_debugger_case89.py
index e6f32dd..e22361d 100644
--- a/python/helpers/pydev/tests_python/_debugger_case89.py
+++ b/python/helpers/pydev/tests_python/_debugger_case89.py
@@ -1,16 +1,16 @@
 def Method1():
-    print 'm1'
+    print('m1')
 
 def Method2():
-    print 'm2 before'
+    print('m2 before')
     Method1()
-    print 'm2 after'
+    print('m2 after')
 
 def Method3():
-    print 'm3 before'
+    print('m3 before')
     Method2()
-    print 'm3 after'
+    print('m3 after')
    
 if __name__ == '__main__': 
     Method3()
-    print 'TEST SUCEEDED!'
+    print('TEST SUCEEDED!')
diff --git a/python/helpers/pydev/tests_python/_debugger_case_qthread1.py b/python/helpers/pydev/tests_python/_debugger_case_qthread1.py
new file mode 100644
index 0000000..eb8729f
--- /dev/null
+++ b/python/helpers/pydev/tests_python/_debugger_case_qthread1.py
@@ -0,0 +1,25 @@
+import time
+import sys
+
+try:
+    from PySide import QtCore
+except:
+    from PyQt4 import QtCore
+
+# Subclassing QThread
+# http://doc.qt.nokia.com/latest/qthread.html
+class AThread(QtCore.QThread):
+
+    def run(self):
+        count = 0
+        while count < 5:
+            time.sleep(.5)
+            print("Increasing", count);sys.stdout.flush()
+            count += 1
+
+app = QtCore.QCoreApplication([])
+thread = AThread()
+thread.finished.connect(app.exit)
+thread.start()
+app.exec_()
+print('TEST SUCEEDED!')
\ No newline at end of file
diff --git a/python/helpers/pydev/tests_python/_debugger_case_qthread2.py b/python/helpers/pydev/tests_python/_debugger_case_qthread2.py
new file mode 100644
index 0000000..b2ce315
--- /dev/null
+++ b/python/helpers/pydev/tests_python/_debugger_case_qthread2.py
@@ -0,0 +1,32 @@
+import time
+import sys
+
+try:
+    from PySide import QtCore
+except:
+    from PyQt4 import QtCore
+
+# Subclassing QObject and using moveToThread
+# http://labs.qt.nokia.com/2007/07/05/qthreads-no-longer-abstract/
+class SomeObject(QtCore.QObject):
+
+    finished = QtCore.Signal()
+
+    def longRunning(self):
+        count = 0
+        while count < 5:
+            time.sleep(.5)
+            print "Increasing"
+            count += 1
+        self.finished.emit()
+
+app = QtCore.QCoreApplication([])
+objThread = QtCore.QThread()
+obj = SomeObject()
+obj.moveToThread(objThread)
+obj.finished.connect(objThread.quit)
+objThread.started.connect(obj.longRunning)
+objThread.finished.connect(app.exit)
+objThread.start()
+app.exec_()
+print('TEST SUCEEDED!')
\ No newline at end of file
diff --git a/python/helpers/pydev/tests_python/_debugger_case_qthread3.py b/python/helpers/pydev/tests_python/_debugger_case_qthread3.py
new file mode 100644
index 0000000..22b0c91
--- /dev/null
+++ b/python/helpers/pydev/tests_python/_debugger_case_qthread3.py
@@ -0,0 +1,29 @@
+import time
+import sys
+
+try:
+    from PySide import QtCore
+except:
+    from PyQt4 import QtCore
+
+# Using a QRunnable
+# http://doc.qt.nokia.com/latest/qthreadpool.html
+# Note that a QRunnable isn't a subclass of QObject and therefore does
+# not provide signals and slots.
+class Runnable(QtCore.QRunnable):
+
+    def run(self):
+        count = 0
+        app = QtCore.QCoreApplication.instance()
+        while count < 5:
+            print "Increasing"
+            time.sleep(.5)
+            count += 1
+        app.quit()
+
+
+app = QtCore.QCoreApplication([])
+runnable = Runnable()
+QtCore.QThreadPool.globalInstance().start(runnable)
+app.exec_()
+print('TEST SUCEEDED!')
\ No newline at end of file
diff --git a/python/helpers/pydev/tests_python/test_additional_thread_info.py b/python/helpers/pydev/tests_python/test_additional_thread_info.py
index 6ae260d..71dc352 100644
--- a/python/helpers/pydev/tests_python/test_additional_thread_info.py
+++ b/python/helpers/pydev/tests_python/test_additional_thread_info.py
@@ -1,10 +1,21 @@
 import sys
 import os
+import pydev_monkey
 sys.path.insert(0, os.path.split(os.path.split(__file__)[0])[0])
 
 from pydevd_constants import Null
 import unittest
 
+try:
+    import thread
+except:
+    import _thread as thread
+
+try:
+    xrange
+except:
+    xrange = range
+    
 #=======================================================================================================================
 # TestCase
 #=======================================================================================================================
@@ -40,10 +51,7 @@
             
             
     def testStartNewThread(self):
-        import pydevd
-        import thread
-        original = thread.start_new_thread
-        thread.start_new_thread = pydevd.pydev_start_new_thread
+        pydev_monkey.patch_thread_modules()
         try:
             found = {}
             def function(a, b, *args, **kwargs):
@@ -62,15 +70,11 @@
             
             self.assertEqual({'a': 1, 'b': 2, 'args': (3, 4), 'kwargs': {'e': 2, 'd': 1}}, found)
         finally:
-            thread.start_new_thread = original
+            pydev_monkey.undo_patch_thread_modules()
             
             
     def testStartNewThread2(self):
-        import pydevd
-        import thread
-        
-        original = thread.start_new_thread
-        thread.start_new_thread = pydevd.pydev_start_new_thread
+        pydev_monkey.patch_thread_modules()
         try:
             found = {}
             
@@ -101,7 +105,7 @@
             
             self.assertEqual({'a': 1, 'b': 2, 'args': (3, 4), 'kwargs': {'e': 2, 'd': 1}}, found)
         finally:
-            thread.start_new_thread = original
+            pydev_monkey.undo_patch_thread_modules()
         
 
 #=======================================================================================================================
diff --git a/python/helpers/pydev/tests_python/test_debugger.py b/python/helpers/pydev/tests_python/test_debugger.py
index 3a216cb..ea569dd 100644
--- a/python/helpers/pydev/tests_python/test_debugger.py
+++ b/python/helpers/pydev/tests_python/test_debugger.py
@@ -5,6 +5,17 @@
 
     Note that it's a python script but it'll spawn a process to run as jython, ironpython and as python.
 '''
+SHOW_WRITES_AND_READS = False
+SHOW_OTHER_DEBUG_INFO = False
+SHOW_STDOUT = False
+
+
+
+from pydevd_constants import IS_PY3K
+try:
+    from thread import start_new_thread
+except:
+    from _thread import start_new_thread
 CMD_SET_PROPERTY_TRACE, CMD_EVALUATE_CONSOLE_EXPRESSION, CMD_RUN_CUSTOM_OPERATION, CMD_ENABLE_DONT_TRACE = 133, 134, 135, 141
 PYTHON_EXE = None
 IRONPYTHON_EXE = None
@@ -14,9 +25,13 @@
 
 import unittest
 import pydev_localhost
-
 port = None
 
+try:
+    xrange
+except:
+    xrange = range
+
 def UpdatePort():
     global port
     s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
@@ -25,29 +40,26 @@
     s.close()
 
 import os
-def NormFile(filename):
+def _get_debugger_test_file(filename):
     try:
         rPath = os.path.realpath  # @UndefinedVariable
     except:
         # jython does not support os.path.realpath
         # realpath is a no-op on systems without islink support
         rPath = os.path.abspath
-    return os.path.normcase(rPath(filename))
+        
+    return os.path.normcase(rPath(os.path.join(os.path.dirname(__file__), filename)))
 
-PYDEVD_FILE = NormFile('../pydevd.py')
+import pydevd
+PYDEVD_FILE = pydevd.__file__
+
 import sys
-sys.path.append(os.path.dirname(PYDEVD_FILE))
-
-SHOW_WRITES_AND_READS = False
-SHOW_RESULT_STR = False
-SHOW_OTHER_DEBUG_INFO = False
-
 
 import subprocess
 import socket
 import threading
 import time
-from urllib import quote_plus, quote, unquote_plus
+from pydev_imports import quote_plus, quote, unquote_plus
 
 
 #=======================================================================================================================
@@ -59,13 +71,16 @@
         threading.Thread.__init__(self)
         self.setDaemon(True)
         self.sock = sock
-        self.lastReceived = None
+        self.lastReceived = ''
 
     def run(self):
+        last_printed = None
         try:
             buf = ''
             while True:
                 l = self.sock.recv(1024)
+                if IS_PY3K:
+                    l = l.decode('utf-8')
                 buf += l
 
                 if '\n' in buf:
@@ -73,7 +88,9 @@
                     buf = ''
 
                 if SHOW_WRITES_AND_READS:
-                    print 'Test Reader Thread Received %s' % self.lastReceived.strip()
+                    if last_printed != self.lastReceived.strip():
+                        last_printed = self.lastReceived.strip()
+                        print('Test Reader Thread Received %s' % last_printed)
         except:
             pass  # ok, finished it
 
@@ -90,6 +107,8 @@
         self.setDaemon(True)
         self.finishedOk = False
         self._next_breakpoint_id = 0
+        self.log = []
+
 
     def DoKill(self):
         if hasattr(self, 'readerThread'):
@@ -98,10 +117,14 @@
         self.sock.close()
 
     def Write(self, s):
+        
         last = self.readerThread.lastReceived
         if SHOW_WRITES_AND_READS:
-            print 'Test Writer Thread Written %s' % (s,)
-        self.sock.send(s + '\n')
+            print('Test Writer Thread Written %s' % (s,))
+        msg = s + '\n'
+        if IS_PY3K:
+            msg = msg.encode('utf-8')
+        self.sock.send(msg)
         time.sleep(0.2)
 
         i = 0
@@ -112,16 +135,16 @@
 
     def StartSocket(self):
         if SHOW_WRITES_AND_READS:
-            print 'StartSocket'
+            print('StartSocket')
 
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         s.bind(('', port))
         s.listen(1)
         if SHOW_WRITES_AND_READS:
-            print 'Waiting in socket.accept()'
+            print('Waiting in socket.accept()')
         newSock, addr = s.accept()
         if SHOW_WRITES_AND_READS:
-            print 'Test Writer Thread Socket:', newSock, addr
+            print('Test Writer Thread Socket:', newSock, addr)
 
         readerThread = self.readerThread = ReaderThread(newSock)
         readerThread.start()
@@ -130,6 +153,7 @@
         self._sequence = -1
         # initial command is always the version
         self.WriteVersion()
+        self.log.append('StartSocket')
 
     def NextBreakpointId(self):
         self._next_breakpoint_id += 1
@@ -160,22 +184,27 @@
             109 is return
             111 is breakpoint
         '''
+        self.log.append('Start: WaitForBreakpointHit')
         i = 0
         # wait for hit breakpoint
-        while not ('stop_reason="%s"' % reason) in self.readerThread.lastReceived:
+        last = self.readerThread.lastReceived
+        while not ('stop_reason="%s"' % reason) in last:
             i += 1
             time.sleep(1)
+            last = self.readerThread.lastReceived
             if i >= 10:
                 raise AssertionError('After %s seconds, a break with reason: %s was not hit. Found: %s' % \
-                    (i, reason, self.readerThread.lastReceived))
+                    (i, reason, last))
 
         # we have something like <xml><thread id="12152656" stop_reason="111"><frame id="12453120" ...
-        splitted = self.readerThread.lastReceived.split('"')
+        splitted = last.split('"')
         threadId = splitted[1]
         frameId = splitted[7]
         if get_line:
+            self.log.append('End(0): WaitForBreakpointHit')
             return threadId, frameId, int(splitted[13])
 
+        self.log.append('End(1): WaitForBreakpointHit')
         return threadId, frameId
 
     def WaitForCustomOperation(self, expected):
@@ -264,6 +293,7 @@
 
     def WriteMakeInitialRun(self):
         self.Write("101\t%s\t" % self.NextSeq())
+        self.log.append('WriteMakeInitialRun')
 
     def WriteVersion(self):
         self.Write("501\t%s\t1.0\tWINDOWS\tID" % self.NextSeq())
@@ -274,6 +304,7 @@
         '''
         breakpoint_id = self.NextBreakpointId()
         self.Write("111\t%s\t%s\t%s\t%s\t%s\t%s\tNone\tNone" % (self.NextSeq(), breakpoint_id, 'python-line', self.TEST_FILE, line, func))
+        self.log.append('WriteAddBreakpoint: %s line: %s func: %s' % (breakpoint_id, line, func))
         return breakpoint_id
 
     def WriteRemoveBreakpoint(self, breakpoint_id):
@@ -284,6 +315,7 @@
 
     def WriteGetFrame(self, threadId, frameId):
         self.Write("114\t%s\t%s\t%s\tFRAME" % (self.NextSeq(), threadId, frameId))
+        self.log.append('WriteGetFrame')
 
     def WriteGetVariable(self, threadId, frameId, var_attrs):
         self.Write("110\t%s\t%s\t%s\tFRAME\t%s" % (self.NextSeq(), threadId, frameId, var_attrs))
@@ -301,6 +333,7 @@
         self.Write("105\t%s\t%s" % (self.NextSeq(), threadId,))
 
     def WriteRunThread(self, threadId):
+        self.log.append('WriteRunThread')
         self.Write("106\t%s\t%s" % (self.NextSeq(), threadId,))
 
     def WriteKillThread(self, threadId):
@@ -328,7 +361,7 @@
 #======================================================================================================================
 class WriterThreadCase19(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case19.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case19.py')
 
     def run(self):
         self.StartSocket()
@@ -352,7 +385,7 @@
 #======================================================================================================================
 class WriterThreadCase18(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case18.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case18.py')
 
     def run(self):
         self.StartSocket()
@@ -372,7 +405,7 @@
 #======================================================================================================================
 class WriterThreadCase17(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case17.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case17.py')
 
     def run(self):
         self.StartSocket()
@@ -400,7 +433,7 @@
 #======================================================================================================================
 class WriterThreadCase16(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case16.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case16.py')
 
     def run(self):
         self.StartSocket()
@@ -430,8 +463,16 @@
         self.WaitForVar('<var name="%27size%27')
 
         self.WriteGetVariable(threadId, frameId, 'bigarray')
-        self.WaitForVar(['<var name="min" type="int64" value="int64%253A 0" />', '<var name="size" type="int" value="int%3A 100000" />'])  # TODO: When on a 32 bit python we get an int32 (which makes this test fail).
-        self.WaitForVar(['<var name="max" type="int64" value="int64%253A 99999" />', '<var name="max" type="int32" value="int32%253A 99999" />'])
+        self.WaitForVar([
+            '<var name="min" type="int64" value="int64%253A 0" />', 
+            '<var name="min" type="int64" value="int64%3A 0" />', 
+            '<var name="size" type="int" value="int%3A 100000" />',
+        ])
+        self.WaitForVar([
+            '<var name="max" type="int64" value="int64%253A 99999" />', 
+            '<var name="max" type="int32" value="int32%253A 99999" />',
+            '<var name="max" type="int64" value="int64%3A 99999"'
+        ])
         self.WaitForVar('<var name="shape" type="tuple"')
         self.WaitForVar('<var name="dtype" type="dtype"')
         self.WaitForVar('<var name="size" type="int"')
@@ -441,8 +482,14 @@
         # this one is different because it crosses the magic threshold where we don't calculate
         # the min/max
         self.WriteGetVariable(threadId, frameId, 'hugearray')
-        self.WaitForVar('<var name="min" type="str" value="str%253A ndarray too big%252C calculating min would slow down debugging" />')
-        self.WaitForVar('<var name="max" type="str" value="str%253A ndarray too big%252C calculating max would slow down debugging" />')
+        self.WaitForVar([
+            '<var name="min" type="str" value="str%253A ndarray too big%252C calculating min would slow down debugging" />',
+            '<var name="min" type="str" value="str%3A ndarray too big%252C calculating min would slow down debugging" />',
+        ])
+        self.WaitForVar([
+            '<var name="max" type="str" value="str%253A ndarray too big%252C calculating max would slow down debugging" />',
+            '<var name="max" type="str" value="str%3A ndarray too big%252C calculating max would slow down debugging" />',
+        ])
         self.WaitForVar('<var name="shape" type="tuple"')
         self.WaitForVar('<var name="dtype" type="dtype"')
         self.WaitForVar('<var name="size" type="int"')
@@ -458,7 +505,7 @@
 #======================================================================================================================
 class WriterThreadCase15(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case15.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case15.py')
 
     def run(self):
         self.StartSocket()
@@ -472,7 +519,7 @@
         self.WaitForCustomOperation('val=Black')
         assert 7 == self._sequence, 'Expected 7. Had: %s' % self._sequence
 
-        self.WriteCustomOperation("%s\t%s\tEXPRESSION\tcarObj.color" % (threadId, frameId), "EXECFILE", NormFile('_debugger_case15_execfile.py'), "f")
+        self.WriteCustomOperation("%s\t%s\tEXPRESSION\tcarObj.color" % (threadId, frameId), "EXECFILE", _get_debugger_test_file('_debugger_case15_execfile.py'), "f")
         self.WaitForCustomOperation('val=Black')
         assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence
 
@@ -486,7 +533,7 @@
 #======================================================================================================================
 class WriterThreadCase14(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case14.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case14.py')
 
     def run(self):
         self.StartSocket()
@@ -499,21 +546,26 @@
 
         # Access some variable
         self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\tcarObj.color" % (threadId, frameId))
-        self.WaitForMultipleVars(['<more>False</more>', '%27Black%27'])
+        self.WaitForVar(['<more>False</more>', '%27Black%27'])
         assert 7 == self._sequence, 'Expected 9. Had: %s' % self._sequence
 
         # Change some variable
         self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\tcarObj.color='Red'" % (threadId, frameId))
         self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\tcarObj.color" % (threadId, frameId))
-        self.WaitForMultipleVars(['<more>False</more>', '%27Red%27'])
+        self.WaitForVar(['<more>False</more>', '%27Red%27'])
         assert 11 == self._sequence, 'Expected 13. Had: %s' % self._sequence
 
         # Iterate some loop
         self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\tfor i in range(3):" % (threadId, frameId))
-        self.WaitForVars('<xml><more>True</more></xml>')
-        self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\t    print i" % (threadId, frameId))
+        self.WaitForVar(['<xml><more>True</more></xml>', '<xml><more>1</more></xml>'])
+        self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\t    print(i)" % (threadId, frameId))
         self.WriteDebugConsoleExpression("%s\t%s\tEVALUATE\t" % (threadId, frameId))
-        self.WaitForVars('<xml><more>False</more><output message="0"></output><output message="1"></output><output message="2"></output></xml>')
+        self.WaitForVar(
+            [
+                '<xml><more>False</more><output message="0"></output><output message="1"></output><output message="2"></output></xml>',
+                '<xml><more>0</more><output message="0"></output><output message="1"></output><output message="2"></output></xml>'
+            ]
+            )
         assert 17 == self._sequence, 'Expected 19. Had: %s' % self._sequence
 
         self.WriteRunThread(threadId)
@@ -525,7 +577,7 @@
 #======================================================================================================================
 class WriterThreadCase13(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case13.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case13.py')
 
     def run(self):
         self.StartSocket()
@@ -575,7 +627,7 @@
 #======================================================================================================================
 class WriterThreadCase12(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case10.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case10.py')
 
     def run(self):
         self.StartSocket()
@@ -607,7 +659,7 @@
 #======================================================================================================================
 class WriterThreadCase11(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case10.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case10.py')
 
     def run(self):
         self.StartSocket()
@@ -648,7 +700,7 @@
 #======================================================================================================================
 class WriterThreadCase10(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case10.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case10.py')
 
     def run(self):
         self.StartSocket()
@@ -682,7 +734,7 @@
 #======================================================================================================================
 class WriterThreadCase9(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case89.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case89.py')
 
     def run(self):
         self.StartSocket()
@@ -715,7 +767,7 @@
 #======================================================================================================================
 class WriterThreadCase8(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case89.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case89.py')
 
     def run(self):
         self.StartSocket()
@@ -744,7 +796,7 @@
 #======================================================================================================================
 class WriterThreadCase7(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case7.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case7.py')
 
     def run(self):
         self.StartSocket()
@@ -782,7 +834,7 @@
 #=======================================================================================================================
 class WriterThreadCase6(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case56.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case56.py')
 
     def run(self):
         self.StartSocket()
@@ -817,7 +869,7 @@
 #=======================================================================================================================
 class WriterThreadCase5(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case56.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case56.py')
 
     def run(self):
         self.StartSocket()
@@ -855,7 +907,7 @@
 #=======================================================================================================================
 class WriterThreadCase4(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case4.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case4.py')
 
     def run(self):
         self.StartSocket()
@@ -877,12 +929,12 @@
 #=======================================================================================================================
 class WriterThreadCase3(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case3.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case3.py')
 
     def run(self):
         self.StartSocket()
         self.WriteMakeInitialRun()
-        time.sleep(1)
+        time.sleep(.5)
         breakpoint_id = self.WriteAddBreakpoint(4, '')
         self.WriteAddBreakpoint(5, 'FuncNotAvailable')  # Check that it doesn't get hit in the global when a function is available
 
@@ -909,7 +961,7 @@
 #=======================================================================================================================
 class WriterThreadCase2(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case2.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case2.py')
 
     def run(self):
         self.StartSocket()
@@ -930,8 +982,79 @@
 
         self.WriteRunThread(threadId)
 
+        self.log.append('Checking sequence. Found: %s' % (self._sequence))
         assert 15 == self._sequence, 'Expected 15. Had: %s' % self._sequence
 
+        self.log.append('Marking finished ok.')
+        self.finishedOk = True
+
+#=======================================================================================================================
+# WriterThreadCaseQThread1
+#=======================================================================================================================
+class WriterThreadCaseQThread1(AbstractWriterThread):
+
+    TEST_FILE = _get_debugger_test_file('_debugger_case_qthread1.py')
+
+    def run(self):
+        self.StartSocket()
+        breakpoint_id = self.WriteAddBreakpoint(16, 'run')
+        self.WriteMakeInitialRun()
+
+        threadId, frameId = self.WaitForBreakpointHit()
+
+        self.WriteRemoveBreakpoint(breakpoint_id)
+        self.WriteRunThread(threadId)
+
+        self.log.append('Checking sequence. Found: %s' % (self._sequence))
+        assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence
+
+        self.log.append('Marking finished ok.')
+        self.finishedOk = True
+
+#=======================================================================================================================
+# WriterThreadCaseQThread2
+#=======================================================================================================================
+class WriterThreadCaseQThread2(AbstractWriterThread):
+
+    TEST_FILE = _get_debugger_test_file('_debugger_case_qthread2.py')
+
+    def run(self):
+        self.StartSocket()
+        breakpoint_id = self.WriteAddBreakpoint(18, 'longRunning')
+        self.WriteMakeInitialRun()
+
+        threadId, frameId = self.WaitForBreakpointHit()
+
+        self.WriteRemoveBreakpoint(breakpoint_id)
+        self.WriteRunThread(threadId)
+
+        self.log.append('Checking sequence. Found: %s' % (self._sequence))
+        assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence
+
+        self.log.append('Marking finished ok.')
+        self.finishedOk = True
+
+#=======================================================================================================================
+# WriterThreadCaseQThread3
+#=======================================================================================================================
+class WriterThreadCaseQThread3(AbstractWriterThread):
+
+    TEST_FILE = _get_debugger_test_file('_debugger_case_qthread3.py')
+
+    def run(self):
+        self.StartSocket()
+        breakpoint_id = self.WriteAddBreakpoint(19, 'run')
+        self.WriteMakeInitialRun()
+
+        threadId, frameId = self.WaitForBreakpointHit()
+
+        self.WriteRemoveBreakpoint(breakpoint_id)
+        self.WriteRunThread(threadId)
+
+        self.log.append('Checking sequence. Found: %s' % (self._sequence))
+        assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence
+
+        self.log.append('Marking finished ok.')
         self.finishedOk = True
 
 #=======================================================================================================================
@@ -939,24 +1062,39 @@
 #=======================================================================================================================
 class WriterThreadCase1(AbstractWriterThread):
 
-    TEST_FILE = NormFile('_debugger_case1.py')
+    TEST_FILE = _get_debugger_test_file('_debugger_case1.py')
 
     def run(self):
         self.StartSocket()
+        
+        self.log.append('writing add breakpoint')
         self.WriteAddBreakpoint(6, 'SetUp')
+        
+        self.log.append('making initial run')
         self.WriteMakeInitialRun()
 
+        self.log.append('waiting for breakpoint hit')
         threadId, frameId = self.WaitForBreakpointHit()
 
+        self.log.append('get frame')
         self.WriteGetFrame(threadId, frameId)
 
+        self.log.append('step over')
         self.WriteStepOver(threadId)
 
+        self.log.append('get frame')
         self.WriteGetFrame(threadId, frameId)
 
+        self.log.append('run thread')
         self.WriteRunThread(threadId)
 
-        assert 13 == self._sequence, 'Expected 13. Had: %s' % self._sequence
+        self.log.append('asserting')
+        try:
+            assert 13 == self._sequence, 'Expected 13. Had: %s' % self._sequence
+        except:
+            self.log.append('assert failed!')
+            raise
+        self.log.append('asserted')
 
         self.finishedOk = True
 
@@ -972,6 +1110,7 @@
         UpdatePort()
         writerThread = writerThreadClass()
         writerThread.start()
+        time.sleep(1)
 
         localhost = pydev_localhost.get_localhost()
         args = self.getCommandLine()
@@ -987,60 +1126,74 @@
         ]
 
         if SHOW_OTHER_DEBUG_INFO:
-            print 'executing', ' '.join(args)
+            print('executing', ' '.join(args))
 
-#         process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=os.path.dirname(PYDEVD_FILE))
-        process = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=os.path.dirname(PYDEVD_FILE))
-        class ProcessReadThread(threading.Thread):
-            def run(self):
-                self.resultStr = None
-                self.resultStr = process.stdout.read()
-                process.stdout.close()
+        process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=os.path.dirname(PYDEVD_FILE))
 
-            def DoKill(self):
-                process.stdout.close()
+        stdout = []
+        stderr = []
 
-        processReadThread = ProcessReadThread()
-        processReadThread.setDaemon(True)
-        processReadThread.start()
+        def read(stream, buffer):
+            for line in stream.readlines():
+                if IS_PY3K:
+                    line = line.decode('utf-8')
+
+                if SHOW_STDOUT:
+                    print(line)
+                buffer.append(line)
+            
+        start_new_thread(read, (process.stdout, stdout))
+        
+        
         if SHOW_OTHER_DEBUG_INFO:
-            print 'Both processes started'
+            print('Both processes started')
 
         # polls can fail (because the process may finish and the thread still not -- so, we give it some more chances to
         # finish successfully).
-        pools_failed = 0
-        while writerThread.isAlive():
+        check = 0
+        while True:
             if process.poll() is not None:
-                pools_failed += 1
-            time.sleep(.2)
-            if pools_failed == 10:
                 break
-
-        if process.poll() is None:
-            for i in range(10):
-                if processReadThread.resultStr is None:
-                    time.sleep(.5)
-                else:
-                    break
             else:
-                writerThread.DoKill()
-
-        else:
-            if process.poll() < 0:
-                self.fail("The other process exited with error code: " + str(process.poll()) + " result:" + processReadThread.resultStr)
+                if not writerThread.isAlive():
+                    check += 1
+                    if check == 20:
+                        print('Warning: writer thread exited and process still did not.')
+                    if check == 100:
+                        self.fail_with_message(
+                            "The other process should've exited but still didn't (timeout for process to exit).", 
+                            stdout, stderr, writerThread
+                        )
+            time.sleep(.2)
+            
+            
+        poll = process.poll()
+        if poll < 0:
+            self.fail_with_message(
+                "The other process exited with error code: " + str(poll), stdout, stderr, writerThread)
 
 
-        if SHOW_RESULT_STR:
-            print processReadThread.resultStr
+        if stdout is None:
+            self.fail_with_message(
+                "The other process may still be running -- and didn't give any output.", stdout, stderr, writerThread)
 
-        if processReadThread.resultStr is None:
-            self.fail("The other process may still be running -- and didn't give any output")
+        if 'TEST SUCEEDED' not in ''.join(stdout):
+            self.fail_with_message("TEST SUCEEDED not found in stdout.", stdout, stderr, writerThread)
 
-        if 'TEST SUCEEDED' not in processReadThread.resultStr:
-            self.fail(processReadThread.resultStr)
-
+        for i in xrange(100):
+            if not writerThread.finishedOk:
+                time.sleep(.1)
+            
         if not writerThread.finishedOk:
-            self.fail("The thread that was doing the tests didn't finish successfully. Output: %s" % processReadThread.resultStr)
+            self.fail_with_message(
+                "The thread that was doing the tests didn't finish successfully.", stdout, stderr, writerThread)
+            
+    def fail_with_message(self, msg, stdout, stderr, writerThread):
+        self.fail(msg+
+            "\nStdout: \n"+'\n'.join(stdout)+
+            "\nStderr:"+'\n'.join(stderr)+
+            "\nLog:\n"+'\n'.join(getattr(writerThread, 'log', [])))
+        
 
     def testCase1(self):
         self.CheckCase(WriterThreadCase1)
@@ -1098,6 +1251,30 @@
         
     def testCase19(self):
         self.CheckCase(WriterThreadCase19)
+        
+    def _has_qt(self):
+        try:
+            from PySide import QtCore
+            return True
+        except:
+            try:
+                from PyQt4 import QtCore
+                return True
+            except:
+                pass
+        return False
+
+    def testCaseQthread1(self):
+        if self._has_qt():
+            self.CheckCase(WriterThreadCaseQThread1)
+
+    def testCaseQthread2(self):
+        if self._has_qt():
+            self.CheckCase(WriterThreadCaseQThread2)
+
+    def testCaseQthread3(self):
+        if self._has_qt():
+            self.CheckCase(WriterThreadCaseQThread3)
 
 
 class TestPython(unittest.TestCase, DebuggerBase):
@@ -1117,9 +1294,6 @@
     def testCase13(self):
         self.skipTest("Unsupported Decorators")
 
-    def testCase16(self):
-        self.skipTest("Unsupported numpy")
-
     # This case requires decorators to work (which are not present on Jython 2.1), so, this test is just removed from the jython run.
     def testCase17(self):
         self.skipTest("Unsupported Decorators")
@@ -1127,6 +1301,9 @@
     def testCase18(self):
         self.skipTest("Unsupported assign to local")
 
+    def testCase16(self):
+        self.skipTest("Unsupported numpy")
+
 class TestIronPython(unittest.TestCase, DebuggerBase):
     def getCommandLine(self):
         return [
@@ -1134,8 +1311,22 @@
                 '-X:Frames'
             ]
 
+    def testCase3(self):
+        self.skipTest("Timing issues") # This test fails once in a while due to timing issues on IronPython, so, skipping it. 
+        
+    def testCase7(self):
+        # This test checks that we start without variables and at each step a new var is created, but on ironpython,
+        # the variables exist all at once (with None values), so, we can't test it properly.
+        self.skipTest("Different behavior on IronPython") 
+        
+    def testCase13(self):
+        self.skipTest("Unsupported Decorators") # Not sure why it doesn't work on IronPython, but it's not so common, so, leave it be.
+        
     def testCase16(self):
         self.skipTest("Unsupported numpy")
+        
+    def testCase18(self):
+        self.skipTest("Unsupported assign to local")
 
 
 def GetLocationFromLine(line):
@@ -1157,49 +1348,90 @@
 
 
 
+
 import platform
 sysname = platform.system().lower()
 test_dependent = os.path.join('../../../', 'org.python.pydev.core', 'tests', 'org', 'python', 'pydev', 'core', 'TestDependent.' + sysname + '.properties')
-f = open(test_dependent)
-try:
-    for line in f.readlines():
-        var, loc = SplitLine(line)
-        if 'PYTHON_EXE' == var:
-            PYTHON_EXE = loc
 
-        if 'IRONPYTHON_EXE' == var:
-            IRONPYTHON_EXE = loc
-
-        if 'JYTHON_JAR_LOCATION' == var:
-            JYTHON_JAR_LOCATION = loc
-
-        if 'JAVA_LOCATION' == var:
-            JAVA_LOCATION = loc
-finally:
-    f.close()
-
-assert PYTHON_EXE, 'PYTHON_EXE not found in %s' % (test_dependent,)
-assert IRONPYTHON_EXE, 'IRONPYTHON_EXE not found in %s' % (test_dependent,)
-assert JYTHON_JAR_LOCATION, 'JYTHON_JAR_LOCATION not found in %s' % (test_dependent,)
-assert JAVA_LOCATION, 'JAVA_LOCATION not found in %s' % (test_dependent,)
-assert os.path.exists(PYTHON_EXE), 'The location: %s is not valid' % (PYTHON_EXE,)
-assert os.path.exists(IRONPYTHON_EXE), 'The location: %s is not valid' % (IRONPYTHON_EXE,)
-assert os.path.exists(JYTHON_JAR_LOCATION), 'The location: %s is not valid' % (JYTHON_JAR_LOCATION,)
-assert os.path.exists(JAVA_LOCATION), 'The location: %s is not valid' % (JAVA_LOCATION,)
-
-if False:
-    suite = unittest.TestSuite()
-    #PYTHON_EXE = r'C:\bin\Anaconda\python.exe'
-#     suite.addTest(TestPython('testCase10'))
-#     suite.addTest(TestPython('testCase3'))
-#     suite.addTest(TestPython('testCase16'))
-#     suite.addTest(TestPython('testCase17'))
-#     suite.addTest(TestPython('testCase18'))
-#     suite.addTest(TestPython('testCase19'))
-    suite = unittest.makeSuite(TestPython)
-    unittest.TextTestRunner(verbosity=3).run(suite)
+if os.path.exists(test_dependent):
+    f = open(test_dependent)
+    try:
+        for line in f.readlines():
+            var, loc = SplitLine(line)
+            if 'PYTHON_EXE' == var:
+                PYTHON_EXE = loc
     
-#    unittest.TextTestRunner(verbosity=3).run(suite)
-#    
-#    suite = unittest.makeSuite(TestJython)
-#    unittest.TextTestRunner(verbosity=3).run(suite)
+            if 'IRONPYTHON_EXE' == var:
+                IRONPYTHON_EXE = loc
+    
+            if 'JYTHON_JAR_LOCATION' == var:
+                JYTHON_JAR_LOCATION = loc
+    
+            if 'JAVA_LOCATION' == var:
+                JAVA_LOCATION = loc
+    finally:
+        f.close()
+else:
+    pass
+
+if IRONPYTHON_EXE is None:
+    sys.stderr.write('Warning: not running IronPython tests.\n')
+    class TestIronPython(unittest.TestCase):
+        pass
+    
+if JAVA_LOCATION is None:
+    sys.stderr.write('Warning: not running Jython tests.\n')
+    class TestJython(unittest.TestCase):
+        pass
+    
+# if PYTHON_EXE is None:
+PYTHON_EXE = sys.executable
+    
+    
+if __name__ == '__main__':
+    if False:
+        assert PYTHON_EXE, 'PYTHON_EXE not found in %s' % (test_dependent,)
+        assert IRONPYTHON_EXE, 'IRONPYTHON_EXE not found in %s' % (test_dependent,)
+        assert JYTHON_JAR_LOCATION, 'JYTHON_JAR_LOCATION not found in %s' % (test_dependent,)
+        assert JAVA_LOCATION, 'JAVA_LOCATION not found in %s' % (test_dependent,)
+        assert os.path.exists(PYTHON_EXE), 'The location: %s is not valid' % (PYTHON_EXE,)
+        assert os.path.exists(IRONPYTHON_EXE), 'The location: %s is not valid' % (IRONPYTHON_EXE,)
+        assert os.path.exists(JYTHON_JAR_LOCATION), 'The location: %s is not valid' % (JYTHON_JAR_LOCATION,)
+        assert os.path.exists(JAVA_LOCATION), 'The location: %s is not valid' % (JAVA_LOCATION,)
+    
+    if True:
+        #try:
+        #    os.remove(r'X:\pydev\plugins\org.python.pydev\pysrc\pydevd.pyc')
+        #except:
+        #    pass
+        suite = unittest.TestSuite()
+        
+#         suite.addTests(unittest.makeSuite(TestJython)) # Note: Jython should be 2.2.1
+#           
+#         suite.addTests(unittest.makeSuite(TestIronPython))
+#         
+#         suite.addTests(unittest.makeSuite(TestPython))
+
+
+
+
+#         suite.addTest(TestIronPython('testCase18'))
+#         suite.addTest(TestIronPython('testCase17'))
+#         suite.addTest(TestIronPython('testCase3'))
+#         suite.addTest(TestIronPython('testCase7'))
+#         
+        suite.addTest(TestPython('testCaseQthread1'))
+        suite.addTest(TestPython('testCaseQthread2'))
+        suite.addTest(TestPython('testCaseQthread3'))
+        
+#         suite.addTest(TestPython('testCase4'))
+
+
+#         suite.addTest(TestJython('testCase1'))
+#         suite.addTest(TestPython('testCase2'))
+#         unittest.TextTestRunner(verbosity=3).run(suite)
+    #     suite.addTest(TestPython('testCase17'))
+    #     suite.addTest(TestPython('testCase18'))
+    #     suite.addTest(TestPython('testCase19'))
+        
+        unittest.TextTestRunner(verbosity=3).run(suite)
diff --git a/python/helpers/pydev/tests_python/test_pydev_monkey.py b/python/helpers/pydev/tests_python/test_pydev_monkey.py
index 3eb7930..be1312a 100644
--- a/python/helpers/pydev/tests_python/test_pydev_monkey.py
+++ b/python/helpers/pydev/tests_python/test_pydev_monkey.py
@@ -1,17 +1,32 @@
 import unittest
 import pydev_monkey
 import sys
+from pydevd import SetupHolder
+from pydev_monkey import pydev_src_dir
 
 
 
 class TestCase(unittest.TestCase):
 
     def test_monkey(self):
-        check='''C:\\bin\\python.exe -u -c "
+        original = SetupHolder.setup
+        
+        try:
+            SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
+            check='''C:\\bin\\python.exe -u -c "
 connect(\\"127.0.0.1\\")
 "'''
-        sys.original_argv = []
-        self.assertEqual('"-u" "-c" "\nconnect(\\"127.0.0.1\\")\n"', pydev_monkey.patch_arg_str_win(check))
+            sys.original_argv = []
+            self.assertEqual(
+                '"C:\\bin\\python.exe" "-u" "-c" "import sys; '
+                'sys.path.append(r\'%s\'); '
+                'import pydevd; pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, '
+                    'trace_only_current_thread=False, patch_multiprocessing=True); '
+                    '\nconnect(\\"127.0.0.1\\")\n"' % pydev_src_dir, 
+                pydev_monkey.patch_arg_str_win(check)
+            )
+        finally:
+            SetupHolder.setup = original
 
     def test_str_to_args_windows(self):
         
diff --git a/python/helpers/pydev/tests_python/test_save_locals.py b/python/helpers/pydev/tests_python/test_save_locals.py
index fe65d4d..a3beb56 100644
--- a/python/helpers/pydev/tests_python/test_save_locals.py
+++ b/python/helpers/pydev/tests_python/test_save_locals.py
@@ -16,7 +16,7 @@
     save_locals(frame)
 
 
-def test_method(fn):
+def check_method(fn):
     """
     A harness for testing methods that attempt to modify the values of locals on the stack.
     """
@@ -36,7 +36,7 @@
 
 
     def test_set_locals_using_save_locals(self):
-        x = test_method(use_save_locals)
+        x = check_method(use_save_locals)
         self.assertEqual(x, 2)  # Expected to succeed
 
 
@@ -65,7 +65,7 @@
         def check_co_vars(a):
             frame = sys._getframe()
             def function2():
-                print a
+                print(a)
 
             assert 'a' in frame.f_code.co_cellvars
             frame = sys._getframe()
diff --git a/python/helpers/pydev/tests_runfiles/samples/__init__.py b/python/helpers/pydev/tests_runfiles/samples/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/python/helpers/pydev/tests_runfiles/samples/__init__.py
+++ /dev/null
diff --git a/python/helpers/pydev/tests_runfiles/samples/nested_dir/.cvsignore b/python/helpers/pydev/tests_runfiles/samples/nested_dir/.cvsignore
new file mode 100644
index 0000000..d1c8995
--- /dev/null
+++ b/python/helpers/pydev/tests_runfiles/samples/nested_dir/.cvsignore
@@ -0,0 +1,2 @@
+*.class
+*.pyc
diff --git a/python/helpers/pydev/tests_runfiles/samples/nested_dir/nested2/.cvsignore b/python/helpers/pydev/tests_runfiles/samples/nested_dir/nested2/.cvsignore
new file mode 100644
index 0000000..d1c8995
--- /dev/null
+++ b/python/helpers/pydev/tests_runfiles/samples/nested_dir/nested2/.cvsignore
@@ -0,0 +1,2 @@
+*.class
+*.pyc
diff --git a/python/helpers/pydev/tests_runfiles/samples/nested_dir/nested3/.cvsignore b/python/helpers/pydev/tests_runfiles/samples/nested_dir/nested3/.cvsignore
new file mode 100644
index 0000000..d1c8995
--- /dev/null
+++ b/python/helpers/pydev/tests_runfiles/samples/nested_dir/nested3/.cvsignore
@@ -0,0 +1,2 @@
+*.class
+*.pyc
diff --git a/python/helpers/pydev/tests_runfiles/samples/not_in_default_pythonpath.txt b/python/helpers/pydev/tests_runfiles/samples/not_in_default_pythonpath.txt
new file mode 100644
index 0000000..29cdc5b
--- /dev/null
+++ b/python/helpers/pydev/tests_runfiles/samples/not_in_default_pythonpath.txt
@@ -0,0 +1 @@
+(no __init__.py file)
\ No newline at end of file
diff --git a/python/helpers/pydev/tests_runfiles/test_runfiles.py b/python/helpers/pydev/tests_runfiles/test_runfiles.py
index 0c04764..fb34c40 100644
--- a/python/helpers/pydev/tests_runfiles/test_runfiles.py
+++ b/python/helpers/pydev/tests_runfiles/test_runfiles.py
@@ -26,6 +26,7 @@
 import pydev_runfiles
 import unittest
 import tempfile
+import re
 
 try:
     set
@@ -191,7 +192,7 @@
         files_with_tests = [1 for t in self.all_tests if len(t._tests) > 0]
         self.assertNotEquals(len(self.files), len(files_with_tests))
 
-    def count_tests(self, tests):
+    def count_suite(self, tests=None):
         total = 0
         for t in tests:
             total += t.countTestCases()
@@ -207,60 +208,60 @@
 
     def test_finding_tests_from_modules_with_bad_filter_returns_0_tests(self):
         self._setup_scenario(self.file_dir, ["NO_TESTS_ARE_SURE_TO_HAVE_THIS_NAME"])
-        self.assertEquals(0, self.count_tests(self.all_tests))
+        self.assertEquals(0, self.count_suite(self.all_tests))
 
     def test_finding_test_with_unique_name_returns_1_test(self):
         self._setup_scenario(self.file_dir, include_tests=["test_i_am_a_unique_test_name"])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEquals(1, self.count_tests(filtered_tests))
+        self.assertEquals(1, self.count_suite(filtered_tests))
 
     def test_finding_test_with_non_unique_name(self):
         self._setup_scenario(self.file_dir, include_tests=["test_non_unique_name"])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEquals(1, self.count_tests(filtered_tests) > 2)
+        self.assertEquals(1, self.count_suite(filtered_tests) > 2)
 
     def test_finding_tests_with_regex_filters(self):
         self._setup_scenario(self.file_dir, include_tests=["test_non*"])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEquals(1, self.count_tests(filtered_tests) > 2)
+        self.assertEquals(1, self.count_suite(filtered_tests) > 2)
 
         self._setup_scenario(self.file_dir, ["^$"])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEquals(0, self.count_tests(filtered_tests))
+        self.assertEquals(0, self.count_suite(filtered_tests))
 
         self._setup_scenario(self.file_dir, None, exclude_tests=["*"])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEquals(0, self.count_tests(filtered_tests))
+        self.assertEquals(0, self.count_suite(filtered_tests))
 
     def test_matching_tests(self):
         self._setup_scenario(self.file_dir, None, ['StillYetAnotherSampleTest'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(1, self.count_tests(filtered_tests))
+        self.assertEqual(1, self.count_suite(filtered_tests))
 
         self._setup_scenario(self.file_dir, None, ['SampleTest.test_xxxxxx1'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(1, self.count_tests(filtered_tests))
+        self.assertEqual(1, self.count_suite(filtered_tests))
 
         self._setup_scenario(self.file_dir, None, ['SampleTest'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(8, self.count_tests(filtered_tests))
+        self.assertEqual(8, self.count_suite(filtered_tests))
 
         self._setup_scenario(self.file_dir, None, ['AnotherSampleTest.todo_not_tested'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(1, self.count_tests(filtered_tests))
+        self.assertEqual(1, self.count_suite(filtered_tests))
 
         self._setup_scenario(self.file_dir, None, ['StillYetAnotherSampleTest', 'SampleTest.test_xxxxxx1'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(2, self.count_tests(filtered_tests))
+        self.assertEqual(2, self.count_suite(filtered_tests))
 
         self._setup_scenario(self.file_dir, None, exclude_tests=['*'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(self.count_tests(filtered_tests), 0)
+        self.assertEqual(self.count_suite(filtered_tests), 0)
 
 
         self._setup_scenario(self.file_dir, None, exclude_tests=['*a*'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(self.count_tests(filtered_tests), 6)
+        self.assertEqual(self.count_suite(filtered_tests), 6)
 
         self.assertEqual(
             set(self.MyTestRunner.list_test_names(filtered_tests)),
@@ -269,7 +270,7 @@
 
         self._setup_scenario(self.file_dir, None, exclude_tests=['*a*', '*x*'])
         filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
-        self.assertEqual(self.count_tests(filtered_tests), 2)
+        self.assertEqual(self.count_suite(filtered_tests), 2)
 
         self.assertEqual(
             set(self.MyTestRunner.list_test_names(filtered_tests)),
@@ -362,17 +363,43 @@
                     ('notifyTest', 'ok', '', '', simple_test, 'SampleTest.test_xxxxxx2'),
                     ('notifyTest', 'ok', '', '', simple_test2, 'YetAnotherSampleTest.test_abc'),
                 ]
+            
             if not IS_JYTHON:
-                expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpClass.',
-                        simpleClass_test.replace('/', os.path.sep), 'samples.simpleClass_test.SetUpClassTest <setUpClass>'))
-                expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpModule.',
-                            simpleModule_test.replace('/', os.path.sep), 'samples.simpleModule_test <setUpModule>'))
+                if 'samples.simpleClass_test' in str(notifications):
+                    expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpClass.',
+                            simpleClass_test.replace('/', os.path.sep), 'samples.simpleClass_test.SetUpClassTest <setUpClass>'))
+                    expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpModule.',
+                                simpleModule_test.replace('/', os.path.sep), 'samples.simpleModule_test <setUpModule>'))
+                else:
+                    expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpClass.',
+                            simpleClass_test.replace('/', os.path.sep), 'simpleClass_test.SetUpClassTest <setUpClass>'))
+                    expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpModule.',
+                                simpleModule_test.replace('/', os.path.sep), 'simpleModule_test <setUpModule>'))
             else:
                 expected.append(('notifyTest', 'ok', '', '', simpleClass_test, 'SetUpClassTest.test_blank'))
                 expected.append(('notifyTest', 'ok', '', '', simpleModule_test, 'SetUpModuleTest.test_blank'))
 
             expected.append(('notifyTestRunFinished',))
             expected.sort()
+            new_notifications = []
+            for notification in expected:
+                try:
+                    if len(notification) == 6:
+                        # Some are binary on Py3.
+                        new_notifications.append((
+                            notification[0], 
+                            notification[1], 
+                            notification[2].encode('latin1'), 
+                            notification[3].encode('latin1'), 
+                            notification[4], 
+                            notification[5], 
+                        ))
+                    else:
+                        new_notifications.append(notification)
+                except:
+                    raise
+            expected = new_notifications
+                    
             notifications.sort()
             self.assertEqual(
                 expected,
diff --git a/python/helpers/pydev/third_party/__init__.py b/python/helpers/pydev/third_party/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/python/helpers/pydev/third_party/__init__.py
@@ -0,0 +1 @@
+
diff --git a/python/helpers/pydev/third_party/pep8/autopep8.py b/python/helpers/pydev/third_party/pep8/autopep8.py
new file mode 100644
index 0000000..224b5c6
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/autopep8.py
@@ -0,0 +1,3687 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2010-2011 Hideo Hattori
+# Copyright (C) 2011-2013 Hideo Hattori, Steven Myint
+# Copyright (C) 2013-2014 Hideo Hattori, Steven Myint, Bill Wendling
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Automatically formats Python code to conform to the PEP 8 style guide.
+
+Fixes that only need be done once can be added by adding a function of the form
+"fix_<code>(source)" to this module. They should return the fixed source code.
+These fixes are picked up by apply_global_fixes().
+
+Fixes that depend on pep8 should be added as methods to FixPEP8. See the class
+documentation for more information.
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import bisect
+import codecs
+import collections
+import copy
+import difflib
+import fnmatch
+import inspect
+import io
+import itertools
+import keyword
+import locale
+import os
+import re
+import signal
+import sys
+import token
+import tokenize
+
+import pep8
+
+
+try:
+    unicode
+except NameError:
+    unicode = str
+
+
+__version__ = '1.0.3'
+
+
+CR = '\r'
+LF = '\n'
+CRLF = '\r\n'
+
+
+PYTHON_SHEBANG_REGEX = re.compile(r'^#!.*\bpython[23]?\b\s*$')
+
+
+# For generating line shortening candidates.
+SHORTEN_OPERATOR_GROUPS = frozenset([
+    frozenset([',']),
+    frozenset(['%']),
+    frozenset([',', '(', '[', '{']),
+    frozenset(['%', '(', '[', '{']),
+    frozenset([',', '(', '[', '{', '%', '+', '-', '*', '/', '//']),
+    frozenset(['%', '+', '-', '*', '/', '//']),
+])
+
+
+DEFAULT_IGNORE = 'E24'
+DEFAULT_INDENT_SIZE = 4
+
+
+# W602 is handled separately due to the need to avoid "with_traceback".
+CODE_TO_2TO3 = {
+    'E721': ['idioms'],
+    'W601': ['has_key'],
+    'W603': ['ne'],
+    'W604': ['repr'],
+    'W690': ['apply',
+             'except',
+             'exitfunc',
+             'import',
+             'numliterals',
+             'operator',
+             'paren',
+             'reduce',
+             'renames',
+             'standarderror',
+             'sys_exc',
+             'throw',
+             'tuple_params',
+             'xreadlines']}
+
+
+def check_lib2to3():
+    try:
+        import lib2to3
+    except ImportError:
+        sys.path.append(os.path.join(os.path.dirname(__file__), 'lib2to3'))
+        import lib2to3
+
+
+def open_with_encoding(filename, encoding=None, mode='r'):
+    """Return opened file with a specific encoding."""
+    if not encoding:
+        encoding = detect_encoding(filename)
+
+    return io.open(filename, mode=mode, encoding=encoding,
+                   newline='')  # Preserve line endings
+
+
+def detect_encoding(filename):
+    """Return file encoding."""
+    try:
+        with open(filename, 'rb') as input_file:
+            check_lib2to3()
+            from lib2to3.pgen2 import tokenize as lib2to3_tokenize
+            encoding = lib2to3_tokenize.detect_encoding(input_file.readline)[0]
+
+        # Check for correctness of encoding
+        with open_with_encoding(filename, encoding) as test_file:
+            test_file.read()
+
+        return encoding
+    except (LookupError, SyntaxError, UnicodeDecodeError):
+        return 'latin-1'
+
+
+def readlines_from_file(filename):
+    """Return contents of file."""
+    with open_with_encoding(filename) as input_file:
+        return input_file.readlines()
+
+
+def extended_blank_lines(logical_line,
+                         blank_lines,
+                         indent_level,
+                         previous_logical):
+    """Check for missing blank lines after class declaration."""
+    if previous_logical.startswith('class '):
+        if (
+            logical_line.startswith(('def ', 'class ', '@')) or
+            pep8.DOCSTRING_REGEX.match(logical_line)
+        ):
+            if indent_level and not blank_lines:
+                yield (0, 'E309 expected 1 blank line after class declaration')
+    elif previous_logical.startswith('def '):
+        if blank_lines and pep8.DOCSTRING_REGEX.match(logical_line):
+            yield (0, 'E303 too many blank lines ({0})'.format(blank_lines))
+    elif pep8.DOCSTRING_REGEX.match(previous_logical):
+        # Missing blank line between class docstring and method declaration.
+        if (
+            indent_level and
+            not blank_lines and
+            logical_line.startswith(('def ')) and
+            '(self' in logical_line
+        ):
+            yield (0, 'E301 expected 1 blank line, found 0')
+pep8.register_check(extended_blank_lines)
+
+
+def continued_indentation(logical_line, tokens, indent_level, indent_char,
+                          noqa):
+    """Override pep8's function to provide indentation information."""
+    first_row = tokens[0][2][0]
+    nrows = 1 + tokens[-1][2][0] - first_row
+    if noqa or nrows == 1:
+        return
+
+    # indent_next tells us whether the next block is indented. Assuming
+    # that it is indented by 4 spaces, then we should not allow 4-space
+    # indents on the final continuation line. In turn, some other
+    # indents are allowed to have an extra 4 spaces.
+    indent_next = logical_line.endswith(':')
+
+    row = depth = 0
+    valid_hangs = (
+        (DEFAULT_INDENT_SIZE,)
+        if indent_char != '\t' else (DEFAULT_INDENT_SIZE,
+                                     2 * DEFAULT_INDENT_SIZE)
+    )
+
+    # Remember how many brackets were opened on each line.
+    parens = [0] * nrows
+
+    # Relative indents of physical lines.
+    rel_indent = [0] * nrows
+
+    # For each depth, collect a list of opening rows.
+    open_rows = [[0]]
+    # For each depth, memorize the hanging indentation.
+    hangs = [None]
+
+    # Visual indents.
+    indent_chances = {}
+    last_indent = tokens[0][2]
+    indent = [last_indent[1]]
+
+    last_token_multiline = None
+    line = None
+    last_line = ''
+    last_line_begins_with_multiline = False
+    for token_type, text, start, end, line in tokens:
+
+        newline = row < start[0] - first_row
+        if newline:
+            row = start[0] - first_row
+            newline = (not last_token_multiline and
+                       token_type not in (tokenize.NL, tokenize.NEWLINE))
+            last_line_begins_with_multiline = last_token_multiline
+
+        if newline:
+            # This is the beginning of a continuation line.
+            last_indent = start
+
+            # Record the initial indent.
+            rel_indent[row] = pep8.expand_indent(line) - indent_level
+
+            # Identify closing bracket.
+            close_bracket = (token_type == tokenize.OP and text in ']})')
+
+            # Is the indent relative to an opening bracket line?
+            for open_row in reversed(open_rows[depth]):
+                hang = rel_indent[row] - rel_indent[open_row]
+                hanging_indent = hang in valid_hangs
+                if hanging_indent:
+                    break
+            if hangs[depth]:
+                hanging_indent = (hang == hangs[depth])
+
+            visual_indent = (not close_bracket and hang > 0 and
+                             indent_chances.get(start[1]))
+
+            if close_bracket and indent[depth]:
+                # Closing bracket for visual indent.
+                if start[1] != indent[depth]:
+                    yield (start, 'E124 {0}'.format(indent[depth]))
+            elif close_bracket and not hang:
+                pass
+            elif indent[depth] and start[1] < indent[depth]:
+                # Visual indent is broken.
+                yield (start, 'E128 {0}'.format(indent[depth]))
+            elif (hanging_indent or
+                  (indent_next and
+                   rel_indent[row] == 2 * DEFAULT_INDENT_SIZE)):
+                # Hanging indent is verified.
+                if close_bracket:
+                    yield (start, 'E123 {0}'.format(indent_level +
+                                                    rel_indent[open_row]))
+                hangs[depth] = hang
+            elif visual_indent is True:
+                # Visual indent is verified.
+                indent[depth] = start[1]
+            elif visual_indent in (text, unicode):
+                # Ignore token lined up with matching one from a previous line.
+                pass
+            else:
+                one_indented = (indent_level + rel_indent[open_row] +
+                                DEFAULT_INDENT_SIZE)
+                # Indent is broken.
+                if hang <= 0:
+                    error = ('E122', one_indented)
+                elif indent[depth]:
+                    error = ('E127', indent[depth])
+                elif hang > DEFAULT_INDENT_SIZE:
+                    error = ('E126', one_indented)
+                else:
+                    hangs[depth] = hang
+                    error = ('E121', one_indented)
+
+                yield (start, '{0} {1}'.format(*error))
+
+        # Look for visual indenting.
+        if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT)
+                and not indent[depth]):
+            indent[depth] = start[1]
+            indent_chances[start[1]] = True
+        # Deal with implicit string concatenation.
+        elif (token_type in (tokenize.STRING, tokenize.COMMENT) or
+              text in ('u', 'ur', 'b', 'br')):
+            indent_chances[start[1]] = unicode
+        # Special case for the "if" statement because len("if (") is equal to
+        # 4.
+        elif not indent_chances and not row and not depth and text == 'if':
+            indent_chances[end[1] + 1] = True
+        elif text == ':' and line[end[1]:].isspace():
+            open_rows[depth].append(row)
+
+        # Keep track of bracket depth.
+        if token_type == tokenize.OP:
+            if text in '([{':
+                depth += 1
+                indent.append(0)
+                hangs.append(None)
+                if len(open_rows) == depth:
+                    open_rows.append([])
+                open_rows[depth].append(row)
+                parens[row] += 1
+            elif text in ')]}' and depth > 0:
+                # Parent indents should not be more than this one.
+                prev_indent = indent.pop() or last_indent[1]
+                hangs.pop()
+                for d in range(depth):
+                    if indent[d] > prev_indent:
+                        indent[d] = 0
+                for ind in list(indent_chances):
+                    if ind >= prev_indent:
+                        del indent_chances[ind]
+                del open_rows[depth + 1:]
+                depth -= 1
+                if depth:
+                    indent_chances[indent[depth]] = True
+                for idx in range(row, -1, -1):
+                    if parens[idx]:
+                        parens[idx] -= 1
+                        break
+            assert len(indent) == depth + 1
+            if (
+                start[1] not in indent_chances and
+                # This is for purposes of speeding up E121 (GitHub #90).
+                not last_line.rstrip().endswith(',')
+            ):
+                # Allow to line up tokens.
+                indent_chances[start[1]] = text
+
+        last_token_multiline = (start[0] != end[0])
+        if last_token_multiline:
+            rel_indent[end[0] - first_row] = rel_indent[row]
+
+        last_line = line
+
+    if (
+        indent_next and
+        not last_line_begins_with_multiline and
+        pep8.expand_indent(line) == indent_level + DEFAULT_INDENT_SIZE
+    ):
+        pos = (start[0], indent[0] + 4)
+        yield (pos, 'E125 {0}'.format(indent_level +
+                                      2 * DEFAULT_INDENT_SIZE))
+del pep8._checks['logical_line'][pep8.continued_indentation]
+pep8.register_check(continued_indentation)
+
+
+class FixPEP8(object):
+
+    """Fix invalid code.
+
+    Fixer methods are prefixed "fix_". The _fix_source() method looks for these
+    automatically.
+
+    The fixer method can take either one or two arguments (in addition to
+    self). The first argument is "result", which is the error information from
+    pep8. The second argument, "logical", is required only for logical-line
+    fixes.
+
+    The fixer method can return the list of modified lines or None. An empty
+    list would mean that no changes were made. None would mean that only the
+    line reported in the pep8 error was modified. Note that the modified line
+    numbers that are returned are indexed at 1. This typically would correspond
+    with the line number reported in the pep8 error information.
+
+    [fixed method list]
+        - e121,e122,e123,e124,e125,e126,e127,e128,e129
+        - e201,e202,e203
+        - e211
+        - e221,e222,e223,e224,e225
+        - e231
+        - e251
+        - e261,e262
+        - e271,e272,e273,e274
+        - e301,e302,e303
+        - e401
+        - e502
+        - e701,e702
+        - e711
+        - w291
+
+    """
+
+    def __init__(self, filename,
+                 options,
+                 contents=None,
+                 long_line_ignore_cache=None):
+        self.filename = filename
+        if contents is None:
+            self.source = readlines_from_file(filename)
+        else:
+            sio = io.StringIO(contents)
+            self.source = sio.readlines()
+        self.options = options
+        self.indent_word = _get_indentword(''.join(self.source))
+
+        self.long_line_ignore_cache = (
+            set() if long_line_ignore_cache is None
+            else long_line_ignore_cache)
+
+        # Many fixers are the same even though pep8 categorizes them
+        # differently.
+        self.fix_e115 = self.fix_e112
+        self.fix_e116 = self.fix_e113
+        self.fix_e121 = self._fix_reindent
+        self.fix_e122 = self._fix_reindent
+        self.fix_e123 = self._fix_reindent
+        self.fix_e124 = self._fix_reindent
+        self.fix_e126 = self._fix_reindent
+        self.fix_e127 = self._fix_reindent
+        self.fix_e128 = self._fix_reindent
+        self.fix_e129 = self._fix_reindent
+        self.fix_e202 = self.fix_e201
+        self.fix_e203 = self.fix_e201
+        self.fix_e211 = self.fix_e201
+        self.fix_e221 = self.fix_e271
+        self.fix_e222 = self.fix_e271
+        self.fix_e223 = self.fix_e271
+        self.fix_e226 = self.fix_e225
+        self.fix_e227 = self.fix_e225
+        self.fix_e228 = self.fix_e225
+        self.fix_e241 = self.fix_e271
+        self.fix_e242 = self.fix_e224
+        self.fix_e261 = self.fix_e262
+        self.fix_e272 = self.fix_e271
+        self.fix_e273 = self.fix_e271
+        self.fix_e274 = self.fix_e271
+        self.fix_e309 = self.fix_e301
+        self.fix_e501 = (
+            self.fix_long_line_logically if
+            options and (options.aggressive >= 2 or options.experimental) else
+            self.fix_long_line_physically)
+        self.fix_e703 = self.fix_e702
+
+        self._ws_comma_done = False
+
+    def _fix_source(self, results):
+        try:
+            (logical_start, logical_end) = _find_logical(self.source)
+            logical_support = True
+        except (SyntaxError, tokenize.TokenError):  # pragma: no cover
+            logical_support = False
+
+        completed_lines = set()
+        for result in sorted(results, key=_priority_key):
+            if result['line'] in completed_lines:
+                continue
+
+            fixed_methodname = 'fix_' + result['id'].lower()
+            if hasattr(self, fixed_methodname):
+                fix = getattr(self, fixed_methodname)
+
+                line_index = result['line'] - 1
+                original_line = self.source[line_index]
+
+                is_logical_fix = len(inspect.getargspec(fix).args) > 2
+                if is_logical_fix:
+                    logical = None
+                    if logical_support:
+                        logical = _get_logical(self.source,
+                                               result,
+                                               logical_start,
+                                               logical_end)
+                        if logical and set(range(
+                            logical[0][0] + 1,
+                            logical[1][0] + 1)).intersection(
+                                completed_lines):
+                            continue
+
+                    modified_lines = fix(result, logical)
+                else:
+                    modified_lines = fix(result)
+
+                if modified_lines is None:
+                    # Force logical fixes to report what they modified.
+                    assert not is_logical_fix
+
+                    if self.source[line_index] == original_line:
+                        modified_lines = []
+
+                if modified_lines:
+                    completed_lines.update(modified_lines)
+                elif modified_lines == []:  # Empty list means no fix
+                    if self.options.verbose >= 2:
+                        print(
+                            '--->  Not fixing {f} on line {l}'.format(
+                                f=result['id'], l=result['line']),
+                            file=sys.stderr)
+                else:  # We assume one-line fix when None.
+                    completed_lines.add(result['line'])
+            else:
+                if self.options.verbose >= 3:
+                    print(
+                        "--->  '{0}' is not defined.".format(fixed_methodname),
+                        file=sys.stderr)
+
+                    info = result['info'].strip()
+                    print('--->  {0}:{1}:{2}:{3}'.format(self.filename,
+                                                         result['line'],
+                                                         result['column'],
+                                                         info),
+                          file=sys.stderr)
+
+    def fix(self):
+        """Return a version of the source code with PEP 8 violations fixed."""
+        pep8_options = {
+            'ignore': self.options.ignore,
+            'select': self.options.select,
+            'max_line_length': self.options.max_line_length,
+        }
+        results = _execute_pep8(pep8_options, self.source)
+
+        if self.options.verbose:
+            progress = {}
+            for r in results:
+                if r['id'] not in progress:
+                    progress[r['id']] = set()
+                progress[r['id']].add(r['line'])
+            print('--->  {n} issue(s) to fix {progress}'.format(
+                n=len(results), progress=progress), file=sys.stderr)
+
+        if self.options.line_range:
+            start, end = self.options.line_range
+            results = [r for r in results
+                       if start <= r['line'] <= end]
+
+        self._fix_source(filter_results(source=''.join(self.source),
+                                        results=results,
+                                        aggressive=self.options.aggressive))
+
+        if self.options.line_range:
+            # If number of lines has changed then change line_range.
+            count = sum(sline.count('\n')
+                        for sline in self.source[start - 1:end])
+            self.options.line_range[1] = start + count - 1
+
+        return ''.join(self.source)
+
+    def _fix_reindent(self, result):
+        """Fix a badly indented line.
+
+        This is done by adding or removing from its initial indent only.
+
+        """
+        num_indent_spaces = int(result['info'].split()[1])
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        self.source[line_index] = ' ' * num_indent_spaces + target.lstrip()
+
+    def fix_e112(self, result):
+        """Fix under-indented comments."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        if not target.lstrip().startswith('#'):
+            # Don't screw with invalid syntax.
+            return []
+
+        self.source[line_index] = self.indent_word + target
+
+    def fix_e113(self, result):
+        """Fix over-indented comments."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        indent = _get_indentation(target)
+        stripped = target.lstrip()
+
+        if not stripped.startswith('#'):
+            # Don't screw with invalid syntax.
+            return []
+
+        self.source[line_index] = indent[1:] + stripped
+
+    def fix_e125(self, result):
+        """Fix indentation undistinguish from the next logical line."""
+        num_indent_spaces = int(result['info'].split()[1])
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        spaces_to_add = num_indent_spaces - len(_get_indentation(target))
+        indent = len(_get_indentation(target))
+        modified_lines = []
+
+        while len(_get_indentation(self.source[line_index])) >= indent:
+            self.source[line_index] = (' ' * spaces_to_add +
+                                       self.source[line_index])
+            modified_lines.append(1 + line_index)  # Line indexed at 1.
+            line_index -= 1
+
+        return modified_lines
+
+    def fix_e201(self, result):
+        """Remove extraneous whitespace."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        offset = result['column'] - 1
+
+        if is_probably_part_of_multiline(target):
+            return []
+
+        fixed = fix_whitespace(target,
+                               offset=offset,
+                               replacement='')
+
+        self.source[line_index] = fixed
+
+    def fix_e224(self, result):
+        """Remove extraneous whitespace around operator."""
+        target = self.source[result['line'] - 1]
+        offset = result['column'] - 1
+        fixed = target[:offset] + target[offset:].replace('\t', ' ')
+        self.source[result['line'] - 1] = fixed
+
+    def fix_e225(self, result):
+        """Fix missing whitespace around operator."""
+        target = self.source[result['line'] - 1]
+        offset = result['column'] - 1
+        fixed = target[:offset] + ' ' + target[offset:]
+
+        # Only proceed if non-whitespace characters match.
+        # And make sure we don't break the indentation.
+        if (
+            fixed.replace(' ', '') == target.replace(' ', '') and
+            _get_indentation(fixed) == _get_indentation(target)
+        ):
+            self.source[result['line'] - 1] = fixed
+        else:
+            return []
+
+    def fix_e231(self, result):
+        """Add missing whitespace."""
+        # Optimize for comma case. This will fix all commas in the full source
+        # code in one pass. Don't do this more than once. If it fails the first
+        # time, there is no point in trying again.
+        if ',' in result['info'] and not self._ws_comma_done:
+            self._ws_comma_done = True
+            original = ''.join(self.source)
+            new = refactor(original, ['ws_comma'])
+            if original.strip() != new.strip():
+                self.source = [new]
+                return range(1, 1 + len(original))
+
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        offset = result['column']
+        fixed = target[:offset] + ' ' + target[offset:]
+        self.source[line_index] = fixed
+
+    def fix_e251(self, result):
+        """Remove whitespace around parameter '=' sign."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        # This is necessary since pep8 sometimes reports columns that goes
+        # past the end of the physical line. This happens in cases like,
+        # foo(bar\n=None)
+        c = min(result['column'] - 1,
+                len(target) - 1)
+
+        if target[c].strip():
+            fixed = target
+        else:
+            fixed = target[:c].rstrip() + target[c:].lstrip()
+
+        # There could be an escaped newline
+        #
+        #     def foo(a=\
+        #             1)
+        if fixed.endswith(('=\\\n', '=\\\r\n', '=\\\r')):
+            self.source[line_index] = fixed.rstrip('\n\r \t\\')
+            self.source[line_index + 1] = self.source[line_index + 1].lstrip()
+            return [line_index + 1, line_index + 2]  # Line indexed at 1
+
+        self.source[result['line'] - 1] = fixed
+
+    def fix_e262(self, result):
+        """Fix spacing after comment hash."""
+        target = self.source[result['line'] - 1]
+        offset = result['column']
+
+        code = target[:offset].rstrip(' \t#')
+        comment = target[offset:].lstrip(' \t#')
+
+        fixed = code + ('  # ' + comment if comment.strip() else '\n')
+
+        self.source[result['line'] - 1] = fixed
+
+    def fix_e271(self, result):
+        """Fix extraneous whitespace around keywords."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        offset = result['column'] - 1
+
+        if is_probably_part_of_multiline(target):
+            return []
+
+        fixed = fix_whitespace(target,
+                               offset=offset,
+                               replacement=' ')
+
+        if fixed == target:
+            return []
+        else:
+            self.source[line_index] = fixed
+
+    def fix_e301(self, result):
+        """Add missing blank line."""
+        cr = '\n'
+        self.source[result['line'] - 1] = cr + self.source[result['line'] - 1]
+
+    def fix_e302(self, result):
+        """Add missing 2 blank lines."""
+        add_linenum = 2 - int(result['info'].split()[-1])
+        cr = '\n' * add_linenum
+        self.source[result['line'] - 1] = cr + self.source[result['line'] - 1]
+
+    def fix_e303(self, result):
+        """Remove extra blank lines."""
+        delete_linenum = int(result['info'].split('(')[1].split(')')[0]) - 2
+        delete_linenum = max(1, delete_linenum)
+
+        # We need to count because pep8 reports an offset line number if there
+        # are comments.
+        cnt = 0
+        line = result['line'] - 2
+        modified_lines = []
+        while cnt < delete_linenum and line >= 0:
+            if not self.source[line].strip():
+                self.source[line] = ''
+                modified_lines.append(1 + line)  # Line indexed at 1
+                cnt += 1
+            line -= 1
+
+        return modified_lines
+
+    def fix_e304(self, result):
+        """Remove blank line following function decorator."""
+        line = result['line'] - 2
+        if not self.source[line].strip():
+            self.source[line] = ''
+
+    def fix_e401(self, result):
+        """Put imports on separate lines."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        offset = result['column'] - 1
+
+        if not target.lstrip().startswith('import'):
+            return []
+
+        indentation = re.split(pattern=r'\bimport\b',
+                               string=target, maxsplit=1)[0]
+        fixed = (target[:offset].rstrip('\t ,') + '\n' +
+                 indentation + 'import ' + target[offset:].lstrip('\t ,'))
+        self.source[line_index] = fixed
+
+    def fix_long_line_logically(self, result, logical):
+        """Try to make lines fit within --max-line-length characters."""
+        if (
+            not logical or
+            len(logical[2]) == 1 or
+            self.source[result['line'] - 1].lstrip().startswith('#')
+        ):
+            return self.fix_long_line_physically(result)
+
+        start_line_index = logical[0][0]
+        end_line_index = logical[1][0]
+        logical_lines = logical[2]
+
+        previous_line = get_item(self.source, start_line_index - 1, default='')
+        next_line = get_item(self.source, end_line_index + 1, default='')
+
+        single_line = join_logical_line(''.join(logical_lines))
+
+        try:
+            fixed = self.fix_long_line(
+                target=single_line,
+                previous_line=previous_line,
+                next_line=next_line,
+                original=''.join(logical_lines))
+        except (SyntaxError, tokenize.TokenError):
+            return self.fix_long_line_physically(result)
+
+        if fixed:
+            for line_index in range(start_line_index, end_line_index + 1):
+                self.source[line_index] = ''
+            self.source[start_line_index] = fixed
+            return range(start_line_index + 1, end_line_index + 1)
+        else:
+            return []
+
+    def fix_long_line_physically(self, result):
+        """Try to make lines fit within --max-line-length characters."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        previous_line = get_item(self.source, line_index - 1, default='')
+        next_line = get_item(self.source, line_index + 1, default='')
+
+        try:
+            fixed = self.fix_long_line(
+                target=target,
+                previous_line=previous_line,
+                next_line=next_line,
+                original=target)
+        except (SyntaxError, tokenize.TokenError):
+            return []
+
+        if fixed:
+            self.source[line_index] = fixed
+            return [line_index + 1]
+        else:
+            return []
+
+    def fix_long_line(self, target, previous_line,
+                      next_line, original):
+        cache_entry = (target, previous_line, next_line)
+        if cache_entry in self.long_line_ignore_cache:
+            return []
+
+        if target.lstrip().startswith('#'):
+            # Wrap commented lines.
+            return shorten_comment(
+                line=target,
+                max_line_length=self.options.max_line_length,
+                last_comment=not next_line.lstrip().startswith('#'))
+
+        fixed = get_fixed_long_line(
+            target=target,
+            previous_line=previous_line,
+            original=original,
+            indent_word=self.indent_word,
+            max_line_length=self.options.max_line_length,
+            aggressive=self.options.aggressive,
+            experimental=self.options.experimental,
+            verbose=self.options.verbose)
+        if fixed and not code_almost_equal(original, fixed):
+            return fixed
+        else:
+            self.long_line_ignore_cache.add(cache_entry)
+            return None
+
+    def fix_e502(self, result):
+        """Remove extraneous escape of newline."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        self.source[line_index] = target.rstrip('\n\r \t\\') + '\n'
+
+    def fix_e701(self, result):
+        """Put colon-separated compound statement on separate lines."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        c = result['column']
+
+        fixed_source = (target[:c] + '\n' +
+                        _get_indentation(target) + self.indent_word +
+                        target[c:].lstrip('\n\r \t\\'))
+        self.source[result['line'] - 1] = fixed_source
+        return [result['line'], result['line'] + 1]
+
+    def fix_e702(self, result, logical):
+        """Put semicolon-separated compound statement on separate lines."""
+        if not logical:
+            return []  # pragma: no cover
+        logical_lines = logical[2]
+
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        if target.rstrip().endswith('\\'):
+            # Normalize '1; \\\n2' into '1; 2'.
+            self.source[line_index] = target.rstrip('\n \r\t\\')
+            self.source[line_index + 1] = self.source[line_index + 1].lstrip()
+            return [line_index + 1, line_index + 2]
+
+        if target.rstrip().endswith(';'):
+            self.source[line_index] = target.rstrip('\n \r\t;') + '\n'
+            return [line_index + 1]
+
+        offset = result['column'] - 1
+        first = target[:offset].rstrip(';').rstrip()
+        second = (_get_indentation(logical_lines[0]) +
+                  target[offset:].lstrip(';').lstrip())
+
+        self.source[line_index] = first + '\n' + second
+        return [line_index + 1]
+
+    def fix_e711(self, result):
+        """Fix comparison with None."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        offset = result['column'] - 1
+
+        right_offset = offset + 2
+        if right_offset >= len(target):
+            return []
+
+        left = target[:offset].rstrip()
+        center = target[offset:right_offset]
+        right = target[right_offset:].lstrip()
+
+        if not right.startswith('None'):
+            return []
+
+        if center.strip() == '==':
+            new_center = 'is'
+        elif center.strip() == '!=':
+            new_center = 'is not'
+        else:
+            return []
+
+        self.source[line_index] = ' '.join([left, new_center, right])
+
+    def fix_e712(self, result):
+        """Fix comparison with boolean."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+        offset = result['column'] - 1
+
+        # Handle very easy "not" special cases.
+        if re.match(r'^\s*if \w+ == False:$', target):
+            self.source[line_index] = re.sub(r'if (\w+) == False:',
+                                             r'if not \1:', target, count=1)
+        elif re.match(r'^\s*if \w+ != True:$', target):
+            self.source[line_index] = re.sub(r'if (\w+) != True:',
+                                             r'if not \1:', target, count=1)
+        else:
+            right_offset = offset + 2
+            if right_offset >= len(target):
+                return []
+
+            left = target[:offset].rstrip()
+            center = target[offset:right_offset]
+            right = target[right_offset:].lstrip()
+
+            # Handle simple cases only.
+            new_right = None
+            if center.strip() == '==':
+                if re.match(r'\bTrue\b', right):
+                    new_right = re.sub(r'\bTrue\b *', '', right, count=1)
+            elif center.strip() == '!=':
+                if re.match(r'\bFalse\b', right):
+                    new_right = re.sub(r'\bFalse\b *', '', right, count=1)
+
+            if new_right is None:
+                return []
+
+            if new_right[0].isalnum():
+                new_right = ' ' + new_right
+
+            self.source[line_index] = left + new_right
+
+    def fix_e713(self, result):
+        """Fix non-membership check."""
+        line_index = result['line'] - 1
+        target = self.source[line_index]
+
+        # Handle very easy case only.
+        if re.match(r'^\s*if not \w+ in \w+:$', target):
+            self.source[line_index] = re.sub(r'if not (\w+) in (\w+):',
+                                             r'if \1 not in \2:',
+                                             target,
+                                             count=1)
+
+    def fix_w291(self, result):
+        """Remove trailing whitespace."""
+        fixed_line = self.source[result['line'] - 1].rstrip()
+        self.source[result['line'] - 1] = fixed_line + '\n'
+
+
+def get_fixed_long_line(target, previous_line, original,
+                        indent_word='    ', max_line_length=79,
+                        aggressive=False, experimental=False, verbose=False):
+    """Break up long line and return result.
+
+    Do this by generating multiple reformatted candidates and then
+    ranking the candidates to heuristically select the best option.
+
+    """
+    indent = _get_indentation(target)
+    source = target[len(indent):]
+    assert source.lstrip() == source
+
+    # Check for partial multiline.
+    tokens = list(generate_tokens(source))
+
+    candidates = shorten_line(
+        tokens, source, indent,
+        indent_word,
+        max_line_length,
+        aggressive=aggressive,
+        experimental=experimental,
+        previous_line=previous_line)
+
+    # Also sort alphabetically as a tie breaker (for determinism).
+    candidates = sorted(
+        sorted(set(candidates).union([target, original])),
+        key=lambda x: line_shortening_rank(x,
+                                           indent_word,
+                                           max_line_length,
+                                           experimental))
+
+    if verbose >= 4:
+        print(('-' * 79 + '\n').join([''] + candidates + ['']),
+              file=codecs.getwriter('utf-8')(sys.stderr.buffer
+                                             if hasattr(sys.stderr,
+                                                        'buffer')
+                                             else sys.stderr))
+
+    if candidates:
+        return candidates[0]
+
+
+def join_logical_line(logical_line):
+    """Return single line based on logical line input."""
+    indentation = _get_indentation(logical_line)
+
+    return indentation + untokenize_without_newlines(
+        generate_tokens(logical_line.lstrip())) + '\n'
+
+
+def untokenize_without_newlines(tokens):
+    """Return source code based on tokens."""
+    text = ''
+    last_row = 0
+    last_column = -1
+
+    for t in tokens:
+        token_string = t[1]
+        (start_row, start_column) = t[2]
+        (end_row, end_column) = t[3]
+
+        if start_row > last_row:
+            last_column = 0
+        if (
+            (start_column > last_column or token_string == '\n') and
+            not text.endswith(' ')
+        ):
+            text += ' '
+
+        if token_string != '\n':
+            text += token_string
+
+        last_row = end_row
+        last_column = end_column
+
+    return text
+
+
+def _find_logical(source_lines):
+    # Make a variable which is the index of all the starts of lines.
+    logical_start = []
+    logical_end = []
+    last_newline = True
+    parens = 0
+    for t in generate_tokens(''.join(source_lines)):
+        if t[0] in [tokenize.COMMENT, tokenize.DEDENT,
+                    tokenize.INDENT, tokenize.NL,
+                    tokenize.ENDMARKER]:
+            continue
+        if not parens and t[0] in [tokenize.NEWLINE, tokenize.SEMI]:
+            last_newline = True
+            logical_end.append((t[3][0] - 1, t[2][1]))
+            continue
+        if last_newline and not parens:
+            logical_start.append((t[2][0] - 1, t[2][1]))
+            last_newline = False
+        if t[0] == tokenize.OP:
+            if t[1] in '([{':
+                parens += 1
+            elif t[1] in '}])':
+                parens -= 1
+    return (logical_start, logical_end)
+
+
+def _get_logical(source_lines, result, logical_start, logical_end):
+    """Return the logical line corresponding to the result.
+
+    Assumes input is already E702-clean.
+
+    """
+    row = result['line'] - 1
+    col = result['column'] - 1
+    ls = None
+    le = None
+    for i in range(0, len(logical_start), 1):
+        assert logical_end
+        x = logical_end[i]
+        if x[0] > row or (x[0] == row and x[1] > col):
+            le = x
+            ls = logical_start[i]
+            break
+    if ls is None:
+        return None
+    original = source_lines[ls[0]:le[0] + 1]
+    return ls, le, original
+
+
+def get_item(items, index, default=None):
+    if 0 <= index < len(items):
+        return items[index]
+    else:
+        return default
+
+
+def reindent(source, indent_size):
+    """Reindent all lines."""
+    reindenter = Reindenter(source)
+    return reindenter.run(indent_size)
+
+
+def code_almost_equal(a, b):
+    """Return True if code is similar.
+
+    Ignore whitespace when comparing specific line.
+
+    """
+    split_a = split_and_strip_non_empty_lines(a)
+    split_b = split_and_strip_non_empty_lines(b)
+
+    if len(split_a) != len(split_b):
+        return False
+
+    for index in range(len(split_a)):
+        if ''.join(split_a[index].split()) != ''.join(split_b[index].split()):
+            return False
+
+    return True
+
+
+def split_and_strip_non_empty_lines(text):
+    """Return lines split by newline.
+
+    Ignore empty lines.
+
+    """
+    return [line.strip() for line in text.splitlines() if line.strip()]
+
+
+def fix_e265(source, aggressive=False):  # pylint: disable=unused-argument
+    """Format block comments."""
+    if '#' not in source:
+        # Optimization.
+        return source
+
+    ignored_line_numbers = multiline_string_lines(
+        source,
+        include_docstrings=True) | set(commented_out_code_lines(source))
+
+    fixed_lines = []
+    sio = io.StringIO(source)
+    line_number = 0
+    for line in sio.readlines():
+        line_number += 1
+        if (
+            line.lstrip().startswith('#') and
+            line_number not in ignored_line_numbers
+        ):
+            indentation = _get_indentation(line)
+            line = line.lstrip()
+
+            # Normalize beginning if not a shebang.
+            if len(line) > 1:
+                if (
+                    # Leave multiple spaces like '#    ' alone.
+                    (line.count('#') > 1 or line[1].isalnum())
+                    # Leave stylistic outlined blocks alone.
+                    and not line.rstrip().endswith('#')
+                ):
+                    line = '# ' + line.lstrip('# \t')
+
+            fixed_lines.append(indentation + line)
+        else:
+            fixed_lines.append(line)
+
+    return ''.join(fixed_lines)
+
+
+def refactor(source, fixer_names, ignore=None):
+    """Return refactored code using lib2to3.
+
+    Skip if ignore string is produced in the refactored code.
+
+    """
+    check_lib2to3()
+    from lib2to3 import pgen2
+    try:
+        new_text = refactor_with_2to3(source,
+                                      fixer_names=fixer_names)
+    except (pgen2.parse.ParseError,
+            SyntaxError,
+            UnicodeDecodeError,
+            UnicodeEncodeError):
+        return source
+
+    if ignore:
+        if ignore in new_text and ignore not in source:
+            return source
+
+    return new_text
+
+
+def code_to_2to3(select, ignore):
+    fixes = set()
+    for code, fix in CODE_TO_2TO3.items():
+        if code_match(code, select=select, ignore=ignore):
+            fixes |= set(fix)
+    return fixes
+
+
+def fix_2to3(source, aggressive=True, select=None, ignore=None):
+    """Fix various deprecated code (via lib2to3)."""
+    if not aggressive:
+        return source
+
+    select = select or []
+    ignore = ignore or []
+
+    return refactor(source,
+                    code_to_2to3(select=select,
+                                 ignore=ignore))
+
+
+def fix_w602(source, aggressive=True):
+    """Fix deprecated form of raising exception."""
+    if not aggressive:
+        return source
+
+    return refactor(source, ['raise'],
+                    ignore='with_traceback')
+
+
+def find_newline(source):
+    """Return type of newline used in source.
+
+    Input is a list of lines.
+
+    """
+    assert not isinstance(source, unicode)
+
+    counter = collections.defaultdict(int)
+    for line in source:
+        if line.endswith(CRLF):
+            counter[CRLF] += 1
+        elif line.endswith(CR):
+            counter[CR] += 1
+        elif line.endswith(LF):
+            counter[LF] += 1
+
+    return (sorted(counter, key=counter.get, reverse=True) or [LF])[0]
+
+
+def _get_indentword(source):
+    """Return indentation type."""
+    indent_word = '    '  # Default in case source has no indentation
+    try:
+        for t in generate_tokens(source):
+            if t[0] == token.INDENT:
+                indent_word = t[1]
+                break
+    except (SyntaxError, tokenize.TokenError):
+        pass
+    return indent_word
+
+
+def _get_indentation(line):
+    """Return leading whitespace."""
+    if line.strip():
+        non_whitespace_index = len(line) - len(line.lstrip())
+        return line[:non_whitespace_index]
+    else:
+        return ''
+
+
+def get_diff_text(old, new, filename):
+    """Return text of unified diff between old and new."""
+    newline = '\n'
+    diff = difflib.unified_diff(
+        old, new,
+        'original/' + filename,
+        'fixed/' + filename,
+        lineterm=newline)
+
+    text = ''
+    for line in diff:
+        text += line
+
+        # Work around missing newline (http://bugs.python.org/issue2142).
+        if text and not line.endswith(newline):
+            text += newline + r'\ No newline at end of file' + newline
+
+    return text
+
+
+def _priority_key(pep8_result):
+    """Key for sorting PEP8 results.
+
+    Global fixes should be done first. This is important for things like
+    indentation.
+
+    """
+    priority = [
+        # Fix multiline colon-based before semicolon based.
+        'e701',
+        # Break multiline statements early.
+        'e702',
+        # Things that make lines longer.
+        'e225', 'e231',
+        # Remove extraneous whitespace before breaking lines.
+        'e201',
+        # Shorten whitespace in comment before resorting to wrapping.
+        'e262'
+    ]
+    middle_index = 10000
+    lowest_priority = [
+        # We need to shorten lines last since the logical fixer can get in a
+        # loop, which causes us to exit early.
+        'e501'
+    ]
+    key = pep8_result['id'].lower()
+    try:
+        return priority.index(key)
+    except ValueError:
+        try:
+            return middle_index + lowest_priority.index(key) + 1
+        except ValueError:
+            return middle_index
+
+
+def shorten_line(tokens, source, indentation, indent_word, max_line_length,
+                 aggressive=False, experimental=False, previous_line=''):
+    """Separate line at OPERATOR.
+
+    Multiple candidates will be yielded.
+
+    """
+    for candidate in _shorten_line(tokens=tokens,
+                                   source=source,
+                                   indentation=indentation,
+                                   indent_word=indent_word,
+                                   aggressive=aggressive,
+                                   previous_line=previous_line):
+        yield candidate
+
+    if aggressive:
+        for key_token_strings in SHORTEN_OPERATOR_GROUPS:
+            shortened = _shorten_line_at_tokens(
+                tokens=tokens,
+                source=source,
+                indentation=indentation,
+                indent_word=indent_word,
+                key_token_strings=key_token_strings,
+                aggressive=aggressive)
+
+            if shortened is not None and shortened != source:
+                yield shortened
+
+    if experimental:
+        for shortened in _shorten_line_at_tokens_new(
+                tokens=tokens,
+                source=source,
+                indentation=indentation,
+                max_line_length=max_line_length):
+
+            yield shortened
+
+
+def _shorten_line(tokens, source, indentation, indent_word,
+                  aggressive=False, previous_line=''):
+    """Separate line at OPERATOR.
+
+    The input is expected to be free of newlines except for inside multiline
+    strings and at the end.
+
+    Multiple candidates will be yielded.
+
+    """
+    for (token_type,
+         token_string,
+         start_offset,
+         end_offset) in token_offsets(tokens):
+
+        if (
+            token_type == tokenize.COMMENT and
+            not is_probably_part_of_multiline(previous_line) and
+            not is_probably_part_of_multiline(source) and
+            not source[start_offset + 1:].strip().lower().startswith(
+                ('noqa', 'pragma:', 'pylint:'))
+        ):
+            # Move inline comments to previous line.
+            first = source[:start_offset]
+            second = source[start_offset:]
+            yield (indentation + second.strip() + '\n' +
+                   indentation + first.strip() + '\n')
+        elif token_type == token.OP and token_string != '=':
+            # Don't break on '=' after keyword as this violates PEP 8.
+
+            assert token_type != token.INDENT
+
+            first = source[:end_offset]
+
+            second_indent = indentation
+            if first.rstrip().endswith('('):
+                second_indent += indent_word
+            elif '(' in first:
+                second_indent += ' ' * (1 + first.find('('))
+            else:
+                second_indent += indent_word
+
+            second = (second_indent + source[end_offset:].lstrip())
+            if (
+                not second.strip() or
+                second.lstrip().startswith('#')
+            ):
+                continue
+
+            # Do not begin a line with a comma
+            if second.lstrip().startswith(','):
+                continue
+            # Do end a line with a dot
+            if first.rstrip().endswith('.'):
+                continue
+            if token_string in '+-*/':
+                fixed = first + ' \\' + '\n' + second
+            else:
+                fixed = first + '\n' + second
+
+            # Only fix if syntax is okay.
+            if check_syntax(normalize_multiline(fixed)
+                            if aggressive else fixed):
+                yield indentation + fixed
+
+
+# A convenient way to handle tokens.
+Token = collections.namedtuple('Token', ['token_type', 'token_string',
+                                         'spos', 'epos', 'line'])
+
+
+class ReformattedLines(object):
+
+    """The reflowed lines of atoms.
+
+    Each part of the line is represented as an "atom." They can be moved
+    around when need be to get the optimal formatting.
+
+    """
+
+    ###########################################################################
+    # Private Classes
+
+    class _Indent(object):
+
+        """Represent an indentation in the atom stream."""
+
+        def __init__(self, indent_amt):
+            self._indent_amt = indent_amt
+
+        def emit(self):
+            return ' ' * self._indent_amt
+
+        @property
+        def size(self):
+            return self._indent_amt
+
+    class _Space(object):
+
+        """Represent a space in the atom stream."""
+
+        def emit(self):
+            return ' '
+
+        @property
+        def size(self):
+            return 1
+
+    class _LineBreak(object):
+
+        """Represent a line break in the atom stream."""
+
+        def emit(self):
+            return '\n'
+
+        @property
+        def size(self):
+            return 0
+
+    def __init__(self, max_line_length):
+        self._max_line_length = max_line_length
+        self._lines = []
+        self._bracket_depth = 0
+        self._prev_item = None
+        self._prev_prev_item = None
+
+    def __repr__(self):
+        return self.emit()
+
+    ###########################################################################
+    # Public Methods
+
+    def add(self, obj, indent_amt, break_after_open_bracket):
+        if isinstance(obj, Atom):
+            self._add_item(obj, indent_amt)
+            return
+
+        self._add_container(obj, indent_amt, break_after_open_bracket)
+
+    def add_comment(self, item):
+        num_spaces = 2
+        if len(self._lines) > 1:
+            if isinstance(self._lines[-1], self._Space):
+                num_spaces -= 1
+            if len(self._lines) > 2:
+                if isinstance(self._lines[-2], self._Space):
+                    num_spaces -= 1
+
+        while num_spaces > 0:
+            self._lines.append(self._Space())
+            num_spaces -= 1
+        self._lines.append(item)
+
+    def add_indent(self, indent_amt):
+        self._lines.append(self._Indent(indent_amt))
+
+    def add_line_break(self, indent):
+        self._lines.append(self._LineBreak())
+        self.add_indent(len(indent))
+
+    def add_line_break_at(self, index, indent_amt):
+        self._lines.insert(index, self._LineBreak())
+        self._lines.insert(index + 1, self._Indent(indent_amt))
+
+    def add_space_if_needed(self, curr_text, equal=False):
+        if (
+            not self._lines or isinstance(
+                self._lines[-1], (self._LineBreak, self._Indent, self._Space))
+        ):
+            return
+
+        prev_text = unicode(self._prev_item)
+        prev_prev_text = (
+            unicode(self._prev_prev_item) if self._prev_prev_item else '')
+
+        if (
+            # The previous item was a keyword or identifier and the current
+            # item isn't an operator that doesn't require a space.
+            ((self._prev_item.is_keyword or self._prev_item.is_string or
+              self._prev_item.is_name or self._prev_item.is_number) and
+             (curr_text[0] not in '([{.,:}])' or
+              (curr_text[0] == '=' and equal))) or
+
+            # Don't place spaces around a '.', unless it's in an 'import'
+            # statement.
+            ((prev_prev_text != 'from' and prev_text[-1] != '.' and
+              curr_text != 'import') and
+
+             # Don't place a space before a colon.
+             curr_text[0] != ':' and
+
+             # Don't split up ending brackets by spaces.
+             ((prev_text[-1] in '}])' and curr_text[0] not in '.,}])') or
+
+              # Put a space after a colon or comma.
+              prev_text[-1] in ':,' or
+
+              # Put space around '=' if asked to.
+              (equal and prev_text == '=') or
+
+              # Put spaces around non-unary arithmetic operators.
+              ((self._prev_prev_item and
+                (prev_text not in '+-' and
+                 (self._prev_prev_item.is_name or
+                  self._prev_prev_item.is_number or
+                  self._prev_prev_item.is_string)) and
+                prev_text in ('+', '-', '%', '*', '/', '//', '**')))))
+        ):
+            self._lines.append(self._Space())
+
+    def previous_item(self):
+        """Return the previous non-whitespace item."""
+        return self._prev_item
+
+    def fits_on_current_line(self, item_extent):
+        return self.current_size() + item_extent <= self._max_line_length
+
+    def current_size(self):
+        """The size of the current line minus the indentation."""
+        size = 0
+        for item in reversed(self._lines):
+            size += item.size
+            if isinstance(item, self._LineBreak):
+                break
+
+        return size
+
+    def line_empty(self):
+        return (self._lines and
+                isinstance(self._lines[-1],
+                           (self._LineBreak, self._Indent)))
+
+    def emit(self):
+        string = ''
+        for item in self._lines:
+            if isinstance(item, self._LineBreak):
+                string = string.rstrip()
+            string += item.emit()
+
+        return string.rstrip() + '\n'
+
+    ###########################################################################
+    # Private Methods
+
+    def _add_item(self, item, indent_amt):
+        """Add an item to the line.
+
+        Reflow the line to get the best formatting after the item is
+        inserted. The bracket depth indicates if the item is being
+        inserted inside of a container or not.
+
+        """
+        if self._prev_item and self._prev_item.is_string and item.is_string:
+            # Place consecutive string literals on separate lines.
+            self._lines.append(self._LineBreak())
+            self._lines.append(self._Indent(indent_amt))
+
+        item_text = unicode(item)
+        if self._lines and self._bracket_depth:
+            # Adding the item into a container.
+            self._prevent_default_initializer_splitting(item, indent_amt)
+
+            if item_text in '.,)]}':
+                self._split_after_delimiter(item, indent_amt)
+
+        elif self._lines and not self.line_empty():
+            # Adding the item outside of a container.
+            if self.fits_on_current_line(len(item_text)):
+                self._enforce_space(item)
+
+            else:
+                # Line break for the new item.
+                self._lines.append(self._LineBreak())
+                self._lines.append(self._Indent(indent_amt))
+
+        self._lines.append(item)
+        self._prev_item, self._prev_prev_item = item, self._prev_item
+
+        if item_text in '([{':
+            self._bracket_depth += 1
+
+        elif item_text in '}])':
+            self._bracket_depth -= 1
+            assert self._bracket_depth >= 0
+
+    def _add_container(self, container, indent_amt, break_after_open_bracket):
+        actual_indent = indent_amt + 1
+
+        if (
+            unicode(self._prev_item) != '=' and
+            not self.line_empty() and
+            not self.fits_on_current_line(
+                container.size + self._bracket_depth + 2)
+        ):
+
+            if unicode(container)[0] == '(' and self._prev_item.is_name:
+                # Don't split before the opening bracket of a call.
+                break_after_open_bracket = True
+                actual_indent = indent_amt + 4
+            elif (
+                break_after_open_bracket or
+                unicode(self._prev_item) not in '([{'
+            ):
+                # If the container doesn't fit on the current line and the
+                # current line isn't empty, place the container on the next
+                # line.
+                self._lines.append(self._LineBreak())
+                self._lines.append(self._Indent(indent_amt))
+                break_after_open_bracket = False
+        else:
+            actual_indent = self.current_size() + 1
+            break_after_open_bracket = False
+
+        if isinstance(container, (ListComprehension, IfExpression)):
+            actual_indent = indent_amt
+
+        # Increase the continued indentation only if recursing on a
+        # container.
+        container.reflow(self, ' ' * actual_indent,
+                         break_after_open_bracket=break_after_open_bracket)
+
+    def _prevent_default_initializer_splitting(self, item, indent_amt):
+        """Prevent splitting between a default initializer.
+
+        When there is a default initializer, it's best to keep it all on
+        the same line. It's nicer and more readable, even if it goes
+        over the maximum allowable line length. This goes back along the
+        current line to determine if we have a default initializer, and,
+        if so, to remove extraneous whitespaces and add a line
+        break/indent before it if needed.
+
+        """
+        if unicode(item) == '=':
+            # This is the assignment in the initializer. Just remove spaces for
+            # now.
+            self._delete_whitespace()
+            return
+
+        if (not self._prev_item or not self._prev_prev_item or
+                unicode(self._prev_item) != '='):
+            return
+
+        self._delete_whitespace()
+        prev_prev_index = self._lines.index(self._prev_prev_item)
+
+        if (
+            isinstance(self._lines[prev_prev_index - 1], self._Indent) or
+            self.fits_on_current_line(item.size + 1)
+        ):
+            # The default initializer is already the only item on this line.
+            # Don't insert a newline here.
+            return
+
+        # Replace the space with a newline/indent combo.
+        if isinstance(self._lines[prev_prev_index - 1], self._Space):
+            del self._lines[prev_prev_index - 1]
+
+        self.add_line_break_at(self._lines.index(self._prev_prev_item),
+                               indent_amt)
+
+    def _split_after_delimiter(self, item, indent_amt):
+        """Split the line only after a delimiter."""
+        self._delete_whitespace()
+
+        if self.fits_on_current_line(item.size):
+            return
+
+        last_space = None
+        for item in reversed(self._lines):
+            if (
+                last_space and
+                (not isinstance(item, Atom) or not item.is_colon)
+            ):
+                break
+            else:
+                last_space = None
+            if isinstance(item, self._Space):
+                last_space = item
+            if isinstance(item, (self._LineBreak, self._Indent)):
+                return
+
+        if not last_space:
+            return
+
+        self.add_line_break_at(self._lines.index(last_space), indent_amt)
+
+    def _enforce_space(self, item):
+        """Enforce a space in certain situations.
+
+        There are cases where we will want a space where normally we
+        wouldn't put one. This just enforces the addition of a space.
+
+        """
+        if isinstance(self._lines[-1],
+                      (self._Space, self._LineBreak, self._Indent)):
+            return
+
+        if not self._prev_item:
+            return
+
+        item_text = unicode(item)
+        prev_text = unicode(self._prev_item)
+
+        # Prefer a space around a '.' in an import statement, and between the
+        # 'import' and '('.
+        if (
+            (item_text == '.' and prev_text == 'from') or
+            (item_text == 'import' and prev_text == '.') or
+            (item_text == '(' and prev_text == 'import')
+        ):
+            self._lines.append(self._Space())
+
+    def _delete_whitespace(self):
+        """Delete all whitespace from the end of the line."""
+        while isinstance(self._lines[-1], (self._Space, self._LineBreak,
+                                           self._Indent)):
+            del self._lines[-1]
+
+
+class Atom(object):
+
+    """The smallest unbreakable unit that can be reflowed."""
+
+    def __init__(self, atom):
+        self._atom = atom
+
+    def __repr__(self):
+        return self._atom.token_string
+
+    def __len__(self):
+        return self.size
+
+    def reflow(
+        self, reflowed_lines, continued_indent, extent,
+        break_after_open_bracket=False,
+        is_list_comp_or_if_expr=False,
+        next_is_dot=False
+    ):
+        if self._atom.token_type == tokenize.COMMENT:
+            reflowed_lines.add_comment(self)
+            return
+
+        total_size = extent if extent else self.size
+
+        if self._atom.token_string not in ',:([{}])':
+            # Some atoms will need an extra 1-sized space token after them.
+            total_size += 1
+
+        prev_item = reflowed_lines.previous_item()
+        if (
+            not is_list_comp_or_if_expr and
+            not reflowed_lines.fits_on_current_line(total_size) and
+            not (next_is_dot and
+                 reflowed_lines.fits_on_current_line(self.size + 1)) and
+            not reflowed_lines.line_empty() and
+            not self.is_colon and
+            not (prev_item and prev_item.is_name and
+                 unicode(self) == '(')
+        ):
+            # Start a new line if there is already something on the line and
+            # adding this atom would make it go over the max line length.
+            reflowed_lines.add_line_break(continued_indent)
+        else:
+            reflowed_lines.add_space_if_needed(unicode(self))
+
+        reflowed_lines.add(self, len(continued_indent),
+                           break_after_open_bracket)
+
+    def emit(self):
+        return self.__repr__()
+
+    @property
+    def is_keyword(self):
+        return keyword.iskeyword(self._atom.token_string)
+
+    @property
+    def is_string(self):
+        return self._atom.token_type == tokenize.STRING
+
+    @property
+    def is_name(self):
+        return self._atom.token_type == tokenize.NAME
+
+    @property
+    def is_number(self):
+        return self._atom.token_type == tokenize.NUMBER
+
+    @property
+    def is_comma(self):
+        return self._atom.token_string == ','
+
+    @property
+    def is_colon(self):
+        return self._atom.token_string == ':'
+
+    @property
+    def size(self):
+        return len(self._atom.token_string)
+
+
+class Container(object):
+
+    """Base class for all container types."""
+
+    def __init__(self, items):
+        self._items = items
+
+    def __repr__(self):
+        string = ''
+        last_was_keyword = False
+
+        for item in self._items:
+            if item.is_comma:
+                string += ', '
+            elif item.is_colon:
+                string += ': '
+            else:
+                item_string = unicode(item)
+                if (
+                    string and
+                    (last_was_keyword or
+                     (not string.endswith(tuple('([{,.:}]) ')) and
+                      not item_string.startswith(tuple('([{,.:}])'))))
+                ):
+                    string += ' '
+                string += item_string
+
+            last_was_keyword = item.is_keyword
+        return string
+
+    def __iter__(self):
+        for element in self._items:
+            yield element
+
+    def __getitem__(self, idx):
+        return self._items[idx]
+
+    def reflow(self, reflowed_lines, continued_indent,
+               break_after_open_bracket=False):
+        last_was_container = False
+        for (index, item) in enumerate(self._items):
+            next_item = get_item(self._items, index + 1)
+
+            if isinstance(item, Atom):
+                is_list_comp_or_if_expr = (
+                    isinstance(self, (ListComprehension, IfExpression)))
+                item.reflow(reflowed_lines, continued_indent,
+                            self._get_extent(index),
+                            is_list_comp_or_if_expr=is_list_comp_or_if_expr,
+                            next_is_dot=(next_item and
+                                         unicode(next_item) == '.'))
+                if last_was_container and item.is_comma:
+                    reflowed_lines.add_line_break(continued_indent)
+                last_was_container = False
+            else:  # isinstance(item, Container)
+                reflowed_lines.add(item, len(continued_indent),
+                                   break_after_open_bracket)
+                last_was_container = not isinstance(item, (ListComprehension,
+                                                           IfExpression))
+
+            if (
+                break_after_open_bracket and index == 0 and
+                # Prefer to keep empty containers together instead of
+                # separating them.
+                unicode(item) == self.open_bracket and
+                (not next_item or unicode(next_item) != self.close_bracket) and
+                (len(self._items) != 3 or not isinstance(next_item, Atom))
+            ):
+                reflowed_lines.add_line_break(continued_indent)
+                break_after_open_bracket = False
+            else:
+                next_next_item = get_item(self._items, index + 2)
+                if (
+                    unicode(item) not in ['.', '%', 'in'] and
+                    next_item and not isinstance(next_item, Container) and
+                    unicode(next_item) != ':' and
+                    next_next_item and (not isinstance(next_next_item, Atom) or
+                                        unicode(next_item) == 'not') and
+                    not reflowed_lines.line_empty() and
+                    not reflowed_lines.fits_on_current_line(
+                        self._get_extent(index + 1) + 2)
+                ):
+                    reflowed_lines.add_line_break(continued_indent)
+
+    def _get_extent(self, index):
+        """The extent of the full element.
+
+        E.g., the length of a function call or keyword.
+
+        """
+        extent = 0
+        prev_item = get_item(self._items, index - 1)
+        seen_dot = prev_item and unicode(prev_item) == '.'
+        while index < len(self._items):
+            item = get_item(self._items, index)
+            index += 1
+
+            if isinstance(item, (ListComprehension, IfExpression)):
+                break
+
+            if isinstance(item, Container):
+                if prev_item and prev_item.is_name:
+                    if seen_dot:
+                        extent += 1
+                    else:
+                        extent += item.size
+
+                    prev_item = item
+                    continue
+            elif (unicode(item) not in ['.', '=', ':', 'not'] and
+                  not item.is_name and not item.is_string):
+                break
+
+            if unicode(item) == '.':
+                seen_dot = True
+
+            extent += item.size
+            prev_item = item
+
+        return extent
+
+    @property
+    def is_string(self):
+        return False
+
+    @property
+    def size(self):
+        return len(self.__repr__())
+
+    @property
+    def is_keyword(self):
+        return False
+
+    @property
+    def is_name(self):
+        return False
+
+    @property
+    def is_comma(self):
+        return False
+
+    @property
+    def is_colon(self):
+        return False
+
+    @property
+    def open_bracket(self):
+        return None
+
+    @property
+    def close_bracket(self):
+        return None
+
+
+class Tuple(Container):
+
+    """A high-level representation of a tuple."""
+
+    @property
+    def open_bracket(self):
+        return '('
+
+    @property
+    def close_bracket(self):
+        return ')'
+
+
+class List(Container):
+
+    """A high-level representation of a list."""
+
+    @property
+    def open_bracket(self):
+        return '['
+
+    @property
+    def close_bracket(self):
+        return ']'
+
+
+class DictOrSet(Container):
+
+    """A high-level representation of a dictionary or set."""
+
+    @property
+    def open_bracket(self):
+        return '{'
+
+    @property
+    def close_bracket(self):
+        return '}'
+
+
+class ListComprehension(Container):
+
+    """A high-level representation of a list comprehension."""
+
+    @property
+    def size(self):
+        length = 0
+        for item in self._items:
+            if isinstance(item, IfExpression):
+                break
+            length += item.size
+        return length
+
+
+class IfExpression(Container):
+
+    """A high-level representation of an if-expression."""
+
+
+def _parse_container(tokens, index, for_or_if=None):
+    """Parse a high-level container, such as a list, tuple, etc."""
+
+    # Store the opening bracket.
+    items = [Atom(Token(*tokens[index]))]
+    index += 1
+
+    num_tokens = len(tokens)
+    while index < num_tokens:
+        tok = Token(*tokens[index])
+
+        if tok.token_string in ',)]}':
+            # First check if we're at the end of a list comprehension or
+            # if-expression. Don't add the ending token as part of the list
+            # comprehension or if-expression, because they aren't part of those
+            # constructs.
+            if for_or_if == 'for':
+                return (ListComprehension(items), index - 1)
+
+            elif for_or_if == 'if':
+                return (IfExpression(items), index - 1)
+
+            # We've reached the end of a container.
+            items.append(Atom(tok))
+
+            # If not, then we are at the end of a container.
+            if tok.token_string == ')':
+                # The end of a tuple.
+                return (Tuple(items), index)
+
+            elif tok.token_string == ']':
+                # The end of a list.
+                return (List(items), index)
+
+            elif tok.token_string == '}':
+                # The end of a dictionary or set.
+                return (DictOrSet(items), index)
+
+        elif tok.token_string in '([{':
+            # A sub-container is being defined.
+            (container, index) = _parse_container(tokens, index)
+            items.append(container)
+
+        elif tok.token_string == 'for':
+            (container, index) = _parse_container(tokens, index, 'for')
+            items.append(container)
+
+        elif tok.token_string == 'if':
+            (container, index) = _parse_container(tokens, index, 'if')
+            items.append(container)
+
+        else:
+            items.append(Atom(tok))
+
+        index += 1
+
+    return (None, None)
+
+
+def _parse_tokens(tokens):
+    """Parse the tokens.
+
+    This converts the tokens into a form where we can manipulate them
+    more easily.
+
+    """
+
+    index = 0
+    parsed_tokens = []
+
+    num_tokens = len(tokens)
+    while index < num_tokens:
+        tok = Token(*tokens[index])
+
+        assert tok.token_type != token.INDENT
+        if tok.token_type == tokenize.NEWLINE:
+            # There's only one newline and it's at the end.
+            break
+
+        if tok.token_string in '([{':
+            (container, index) = _parse_container(tokens, index)
+            if not container:
+                return None
+            parsed_tokens.append(container)
+        else:
+            parsed_tokens.append(Atom(tok))
+
+        index += 1
+
+    return parsed_tokens
+
+
+def _reflow_lines(parsed_tokens, indentation, max_line_length,
+                  start_on_prefix_line):
+    """Reflow the lines so that it looks nice."""
+
+    if unicode(parsed_tokens[0]) == 'def':
+        # A function definition gets indented a bit more.
+        continued_indent = indentation + ' ' * 2 * DEFAULT_INDENT_SIZE
+    else:
+        continued_indent = indentation + ' ' * DEFAULT_INDENT_SIZE
+
+    break_after_open_bracket = not start_on_prefix_line
+
+    lines = ReformattedLines(max_line_length)
+    lines.add_indent(len(indentation.lstrip('\r\n')))
+
+    if not start_on_prefix_line:
+        # If splitting after the opening bracket will cause the first element
+        # to be aligned weirdly, don't try it.
+        first_token = get_item(parsed_tokens, 0)
+        second_token = get_item(parsed_tokens, 1)
+
+        if (
+            first_token and second_token and
+            unicode(second_token)[0] == '(' and
+            len(indentation) + len(first_token) + 1 == len(continued_indent)
+        ):
+            return None
+
+    for item in parsed_tokens:
+        lines.add_space_if_needed(unicode(item), equal=True)
+
+        save_continued_indent = continued_indent
+        if start_on_prefix_line and isinstance(item, Container):
+            start_on_prefix_line = False
+            continued_indent = ' ' * (lines.current_size() + 1)
+
+        item.reflow(lines, continued_indent, break_after_open_bracket)
+        continued_indent = save_continued_indent
+
+    return lines.emit()
+
+
+def _shorten_line_at_tokens_new(tokens, source, indentation,
+                                max_line_length):
+    """Shorten the line taking its length into account.
+
+    The input is expected to be free of newlines except for inside
+    multiline strings and at the end.
+
+    """
+    # Yield the original source so to see if it's a better choice than the
+    # shortened candidate lines we generate here.
+    yield indentation + source
+
+    parsed_tokens = _parse_tokens(tokens)
+
+    if parsed_tokens:
+        # Perform two reflows. The first one starts on the same line as the
+        # prefix. The second starts on the line after the prefix.
+        fixed = _reflow_lines(parsed_tokens, indentation, max_line_length,
+                              start_on_prefix_line=True)
+        if fixed and check_syntax(normalize_multiline(fixed.lstrip())):
+            yield fixed
+
+        fixed = _reflow_lines(parsed_tokens, indentation, max_line_length,
+                              start_on_prefix_line=False)
+        if fixed and check_syntax(normalize_multiline(fixed.lstrip())):
+            yield fixed
+
+
+def _shorten_line_at_tokens(tokens, source, indentation, indent_word,
+                            key_token_strings, aggressive):
+    """Separate line by breaking at tokens in key_token_strings.
+
+    The input is expected to be free of newlines except for inside
+    multiline strings and at the end.
+
+    """
+    offsets = []
+    for (index, _t) in enumerate(token_offsets(tokens)):
+        (token_type,
+         token_string,
+         start_offset,
+         end_offset) = _t
+
+        assert token_type != token.INDENT
+
+        if token_string in key_token_strings:
+            # Do not break in containers with zero or one items.
+            unwanted_next_token = {
+                '(': ')',
+                '[': ']',
+                '{': '}'}.get(token_string)
+            if unwanted_next_token:
+                if (
+                    get_item(tokens,
+                             index + 1,
+                             default=[None, None])[1] == unwanted_next_token or
+                    get_item(tokens,
+                             index + 2,
+                             default=[None, None])[1] == unwanted_next_token
+                ):
+                    continue
+
+            if (
+                index > 2 and token_string == '(' and
+                tokens[index - 1][1] in ',(%['
+            ):
+                # Don't split after a tuple start, or before a tuple start if
+                # the tuple is in a list.
+                continue
+
+            if end_offset < len(source) - 1:
+                # Don't split right before newline.
+                offsets.append(end_offset)
+        else:
+            # Break at adjacent strings. These were probably meant to be on
+            # separate lines in the first place.
+            previous_token = get_item(tokens, index - 1)
+            if (
+                token_type == tokenize.STRING and
+                previous_token and previous_token[0] == tokenize.STRING
+            ):
+                offsets.append(start_offset)
+
+    current_indent = None
+    fixed = None
+    for line in split_at_offsets(source, offsets):
+        if fixed:
+            fixed += '\n' + current_indent + line
+
+            for symbol in '([{':
+                if line.endswith(symbol):
+                    current_indent += indent_word
+        else:
+            # First line.
+            fixed = line
+            assert not current_indent
+            current_indent = indent_word
+
+    assert fixed is not None
+
+    if check_syntax(normalize_multiline(fixed)
+                    if aggressive > 1 else fixed):
+        return indentation + fixed
+    else:
+        return None
+
+
+def token_offsets(tokens):
+    """Yield tokens and offsets."""
+    end_offset = 0
+    previous_end_row = 0
+    previous_end_column = 0
+    for t in tokens:
+        token_type = t[0]
+        token_string = t[1]
+        (start_row, start_column) = t[2]
+        (end_row, end_column) = t[3]
+
+        # Account for the whitespace between tokens.
+        end_offset += start_column
+        if previous_end_row == start_row:
+            end_offset -= previous_end_column
+
+        # Record the start offset of the token.
+        start_offset = end_offset
+
+        # Account for the length of the token itself.
+        end_offset += len(token_string)
+
+        yield (token_type,
+               token_string,
+               start_offset,
+               end_offset)
+
+        previous_end_row = end_row
+        previous_end_column = end_column
+
+
+def normalize_multiline(line):
+    """Normalize multiline-related code that will cause syntax error.
+
+    This is for purposes of checking syntax.
+
+    """
+    if line.startswith('def ') and line.rstrip().endswith(':'):
+        return line + ' pass'
+    elif line.startswith('return '):
+        return 'def _(): ' + line
+    elif line.startswith('@'):
+        return line + 'def _(): pass'
+    elif line.startswith('class '):
+        return line + ' pass'
+    elif line.startswith('if '):
+        return line + ' pass'
+    else:
+        return line
+
+
+def fix_whitespace(line, offset, replacement):
+    """Replace whitespace at offset and return fixed line."""
+    # Replace escaped newlines too
+    left = line[:offset].rstrip('\n\r \t\\')
+    right = line[offset:].lstrip('\n\r \t\\')
+    if right.startswith('#'):
+        return line
+    else:
+        return left + replacement + right
+
+
+def _execute_pep8(pep8_options, source):
+    """Execute pep8 via python method calls."""
+    class QuietReport(pep8.BaseReport):
+
+        """Version of checker that does not print."""
+
+        def __init__(self, options):
+            super(QuietReport, self).__init__(options)
+            self.__full_error_results = []
+
+        def error(self, line_number, offset, text, _):
+            """Collect errors."""
+            code = super(QuietReport, self).error(line_number, offset, text, _)
+            if code:
+                self.__full_error_results.append(
+                    {'id': code,
+                     'line': line_number,
+                     'column': offset + 1,
+                     'info': text})
+
+        def full_error_results(self):
+            """Return error results in detail.
+
+            Results are in the form of a list of dictionaries. Each
+            dictionary contains 'id', 'line', 'column', and 'info'.
+
+            """
+            return self.__full_error_results
+
+    checker = pep8.Checker('', lines=source,
+                           reporter=QuietReport, **pep8_options)
+    checker.check_all()
+    return checker.report.full_error_results()
+
+
+def _remove_leading_and_normalize(line):
+    return line.lstrip().rstrip(CR + LF) + '\n'
+
+
+class Reindenter(object):
+
+    """Reindents badly-indented code to uniformly use four-space indentation.
+
+    Released to the public domain, by Tim Peters, 03 October 2000.
+
+    """
+
+    def __init__(self, input_text):
+        sio = io.StringIO(input_text)
+        source_lines = sio.readlines()
+
+        self.string_content_line_numbers = multiline_string_lines(input_text)
+
+        # File lines, rstripped & tab-expanded. Dummy at start is so
+        # that we can use tokenize's 1-based line numbering easily.
+        # Note that a line is all-blank iff it is a newline.
+        self.lines = []
+        line_number = 0
+        for line in source_lines:
+            line_number += 1
+            # Do not modify if inside a multiline string.
+            if line_number in self.string_content_line_numbers:
+                self.lines.append(line)
+            else:
+                # Only expand leading tabs.
+                self.lines.append(_get_indentation(line).expandtabs() +
+                                  _remove_leading_and_normalize(line))
+
+        self.lines.insert(0, None)
+        self.index = 1  # index into self.lines of next line
+        self.input_text = input_text
+
+    def run(self, indent_size=DEFAULT_INDENT_SIZE):
+        """Fix indentation and return modified line numbers.
+
+        Line numbers are indexed at 1.
+
+        """
+        if indent_size < 1:
+            return self.input_text
+
+        try:
+            stats = _reindent_stats(tokenize.generate_tokens(self.getline))
+        except (SyntaxError, tokenize.TokenError):
+            return self.input_text
+        # Remove trailing empty lines.
+        lines = self.lines
+        while lines and lines[-1] == '\n':
+            lines.pop()
+        # Sentinel.
+        stats.append((len(lines), 0))
+        # Map count of leading spaces to # we want.
+        have2want = {}
+        # Program after transformation.
+        after = []
+        # Copy over initial empty lines -- there's nothing to do until
+        # we see a line with *something* on it.
+        i = stats[0][0]
+        after.extend(lines[1:i])
+        for i in range(len(stats) - 1):
+            thisstmt, thislevel = stats[i]
+            nextstmt = stats[i + 1][0]
+            have = _leading_space_count(lines[thisstmt])
+            want = thislevel * indent_size
+            if want < 0:
+                # A comment line.
+                if have:
+                    # An indented comment line. If we saw the same
+                    # indentation before, reuse what it most recently
+                    # mapped to.
+                    want = have2want.get(have, -1)
+                    if want < 0:
+                        # Then it probably belongs to the next real stmt.
+                        for j in range(i + 1, len(stats) - 1):
+                            jline, jlevel = stats[j]
+                            if jlevel >= 0:
+                                if have == _leading_space_count(lines[jline]):
+                                    want = jlevel * indent_size
+                                break
+                    if want < 0:            # Maybe it's a hanging
+                                            # comment like this one,
+                        # in which case we should shift it like its base
+                        # line got shifted.
+                        for j in range(i - 1, -1, -1):
+                            jline, jlevel = stats[j]
+                            if jlevel >= 0:
+                                want = (have + _leading_space_count(
+                                        after[jline - 1]) -
+                                        _leading_space_count(lines[jline]))
+                                break
+                    if want < 0:
+                        # Still no luck -- leave it alone.
+                        want = have
+                else:
+                    want = 0
+            assert want >= 0
+            have2want[have] = want
+            diff = want - have
+            if diff == 0 or have == 0:
+                after.extend(lines[thisstmt:nextstmt])
+            else:
+                line_number = thisstmt - 1
+                for line in lines[thisstmt:nextstmt]:
+                    line_number += 1
+                    if line_number in self.string_content_line_numbers:
+                        after.append(line)
+                    elif diff > 0:
+                        if line == '\n':
+                            after.append(line)
+                        else:
+                            after.append(' ' * diff + line)
+                    else:
+                        remove = min(_leading_space_count(line), -diff)
+                        after.append(line[remove:])
+
+        return ''.join(after)
+
+    def getline(self):
+        """Line-getter for tokenize."""
+        if self.index >= len(self.lines):
+            line = ''
+        else:
+            line = self.lines[self.index]
+            self.index += 1
+        return line
+
+
+def _reindent_stats(tokens):
+    """Return list of (lineno, indentlevel) pairs.
+
+    One for each stmt and comment line. indentlevel is -1 for comment lines, as
+    a signal that tokenize doesn't know what to do about them; indeed, they're
+    our headache!
+
+    """
+    find_stmt = 1  # Next token begins a fresh stmt?
+    level = 0  # Current indent level.
+    stats = []
+
+    for t in tokens:
+        token_type = t[0]
+        sline = t[2][0]
+        line = t[4]
+
+        if token_type == tokenize.NEWLINE:
+            # A program statement, or ENDMARKER, will eventually follow,
+            # after some (possibly empty) run of tokens of the form
+            #     (NL | COMMENT)* (INDENT | DEDENT+)?
+            find_stmt = 1
+
+        elif token_type == tokenize.INDENT:
+            find_stmt = 1
+            level += 1
+
+        elif token_type == tokenize.DEDENT:
+            find_stmt = 1
+            level -= 1
+
+        elif token_type == tokenize.COMMENT:
+            if find_stmt:
+                stats.append((sline, -1))
+                # But we're still looking for a new stmt, so leave
+                # find_stmt alone.
+
+        elif token_type == tokenize.NL:
+            pass
+
+        elif find_stmt:
+            # This is the first "real token" following a NEWLINE, so it
+            # must be the first token of the next program statement, or an
+            # ENDMARKER.
+            find_stmt = 0
+            if line:   # Not endmarker.
+                stats.append((sline, level))
+
+    return stats
+
+
+def _leading_space_count(line):
+    """Return number of leading spaces in line."""
+    i = 0
+    while i < len(line) and line[i] == ' ':
+        i += 1
+    return i
+
+
+def refactor_with_2to3(source_text, fixer_names):
+    """Use lib2to3 to refactor the source.
+
+    Return the refactored source code.
+
+    """
+    check_lib2to3()
+    from lib2to3.refactor import RefactoringTool
+    fixers = ['lib2to3.fixes.fix_' + name for name in fixer_names]
+    tool = RefactoringTool(fixer_names=fixers, explicit=fixers)
+
+    from lib2to3.pgen2 import tokenize as lib2to3_tokenize
+    try:
+        return unicode(tool.refactor_string(source_text, name=''))
+    except lib2to3_tokenize.TokenError:
+        return source_text
+
+
+def check_syntax(code):
+    """Return True if syntax is okay."""
+    try:
+        return compile(code, '<string>', 'exec')
+    except (SyntaxError, TypeError, UnicodeDecodeError):
+        return False
+
+
+def filter_results(source, results, aggressive):
+    """Filter out spurious reports from pep8.
+
+    If aggressive is True, we allow possibly unsafe fixes (E711, E712).
+
+    """
+    non_docstring_string_line_numbers = multiline_string_lines(
+        source, include_docstrings=False)
+    all_string_line_numbers = multiline_string_lines(
+        source, include_docstrings=True)
+
+    commented_out_code_line_numbers = commented_out_code_lines(source)
+
+    for r in results:
+        issue_id = r['id'].lower()
+
+        if r['line'] in non_docstring_string_line_numbers:
+            if issue_id.startswith(('e1', 'e501', 'w191')):
+                continue
+
+        if r['line'] in all_string_line_numbers:
+            if issue_id in ['e501']:
+                continue
+
+        # We must offset by 1 for lines that contain the trailing contents of
+        # multiline strings.
+        if not aggressive and (r['line'] + 1) in all_string_line_numbers:
+            # Do not modify multiline strings in non-aggressive mode. Remove
+            # trailing whitespace could break doctests.
+            if issue_id.startswith(('w29', 'w39')):
+                continue
+
+        if aggressive <= 0:
+            if issue_id.startswith(('e711', 'w6')):
+                continue
+
+        if aggressive <= 1:
+            if issue_id.startswith(('e712', 'e713')):
+                continue
+
+        if r['line'] in commented_out_code_line_numbers:
+            if issue_id.startswith(('e26', 'e501')):
+                continue
+
+        yield r
+
+
+def multiline_string_lines(source, include_docstrings=False):
+    """Return line numbers that are within multiline strings.
+
+    The line numbers are indexed at 1.
+
+    Docstrings are ignored.
+
+    """
+    line_numbers = set()
+    previous_token_type = ''
+    try:
+        for t in generate_tokens(source):
+            token_type = t[0]
+            start_row = t[2][0]
+            end_row = t[3][0]
+
+            if token_type == tokenize.STRING and start_row != end_row:
+                if (
+                    include_docstrings or
+                    previous_token_type != tokenize.INDENT
+                ):
+                    # We increment by one since we want the contents of the
+                    # string.
+                    line_numbers |= set(range(1 + start_row, 1 + end_row))
+
+            previous_token_type = token_type
+    except (SyntaxError, tokenize.TokenError):
+        pass
+
+    return line_numbers
+
+
+def commented_out_code_lines(source):
+    """Return line numbers of comments that are likely code.
+
+    Commented-out code is bad practice, but modifying it just adds even more
+    clutter.
+
+    """
+    line_numbers = []
+    try:
+        for t in generate_tokens(source):
+            token_type = t[0]
+            token_string = t[1]
+            start_row = t[2][0]
+            line = t[4]
+
+            # Ignore inline comments.
+            if not line.lstrip().startswith('#'):
+                continue
+
+            if token_type == tokenize.COMMENT:
+                stripped_line = token_string.lstrip('#').strip()
+                if (
+                    ' ' in stripped_line and
+                    '#' not in stripped_line and
+                    check_syntax(stripped_line)
+                ):
+                    line_numbers.append(start_row)
+    except (SyntaxError, tokenize.TokenError):
+        pass
+
+    return line_numbers
+
+
+def shorten_comment(line, max_line_length, last_comment=False):
+    """Return trimmed or split long comment line.
+
+    If there are no comments immediately following it, do a text wrap.
+    Doing this wrapping on all comments in general would lead to jagged
+    comment text.
+
+    """
+    assert len(line) > max_line_length
+    line = line.rstrip()
+
+    # PEP 8 recommends 72 characters for comment text.
+    indentation = _get_indentation(line) + '# '
+    max_line_length = min(max_line_length,
+                          len(indentation) + 72)
+
+    MIN_CHARACTER_REPEAT = 5
+    if (
+        len(line) - len(line.rstrip(line[-1])) >= MIN_CHARACTER_REPEAT and
+        not line[-1].isalnum()
+    ):
+        # Trim comments that end with things like ---------
+        return line[:max_line_length] + '\n'
+    elif last_comment and re.match(r'\s*#+\s*\w+', line):
+        import textwrap
+        split_lines = textwrap.wrap(line.lstrip(' \t#'),
+                                    initial_indent=indentation,
+                                    subsequent_indent=indentation,
+                                    width=max_line_length,
+                                    break_long_words=False,
+                                    break_on_hyphens=False)
+        return '\n'.join(split_lines) + '\n'
+    else:
+        return line + '\n'
+
+
+def normalize_line_endings(lines, newline):
+    """Return fixed line endings.
+
+    All lines will be modified to use the most common line ending.
+
+    """
+    return [line.rstrip('\n\r') + newline for line in lines]
+
+
+def mutual_startswith(a, b):
+    return b.startswith(a) or a.startswith(b)
+
+
+def code_match(code, select, ignore):
+    if ignore:
+        assert not isinstance(ignore, unicode)
+        for ignored_code in [c.strip() for c in ignore]:
+            if mutual_startswith(code.lower(), ignored_code.lower()):
+                return False
+
+    if select:
+        assert not isinstance(select, unicode)
+        for selected_code in [c.strip() for c in select]:
+            if mutual_startswith(code.lower(), selected_code.lower()):
+                return True
+        return False
+
+    return True
+
+
+def fix_code(source, options=None, encoding=None):
+    """Return fixed source code."""
+    if not options:
+        options = parse_args([''])
+
+    if not isinstance(source, unicode):
+        source = source.decode(encoding or locale.getpreferredencoding())
+
+    sio = io.StringIO(source)
+    return fix_lines(sio.readlines(), options=options)
+
+
+def fix_lines(source_lines, options, filename=''):
+    """Return fixed source code."""
+    # Transform everything to line feed. Then change them back to original
+    # before returning fixed source code.
+    original_newline = find_newline(source_lines)
+    tmp_source = ''.join(normalize_line_endings(source_lines, '\n'))
+
+    # Keep a history to break out of cycles.
+    previous_hashes = set()
+
+    if options.line_range:
+        fixed_source = apply_local_fixes(tmp_source, options)
+    else:
+        # Apply global fixes only once (for efficiency).
+        fixed_source = apply_global_fixes(tmp_source, options)
+
+    passes = 0
+    long_line_ignore_cache = set()
+    while hash(fixed_source) not in previous_hashes:
+        if options.pep8_passes >= 0 and passes > options.pep8_passes:
+            break
+        passes += 1
+
+        previous_hashes.add(hash(fixed_source))
+
+        tmp_source = copy.copy(fixed_source)
+
+        fix = FixPEP8(
+            filename,
+            options,
+            contents=tmp_source,
+            long_line_ignore_cache=long_line_ignore_cache)
+
+        fixed_source = fix.fix()
+
+    sio = io.StringIO(fixed_source)
+    return ''.join(normalize_line_endings(sio.readlines(), original_newline))
+
+
+def fix_file(filename, options=None, output=None):
+    if not options:
+        options = parse_args([filename])
+
+    original_source = readlines_from_file(filename)
+
+    fixed_source = original_source
+
+    if options.in_place or output:
+        encoding = detect_encoding(filename)
+
+    if output:
+        output = codecs.getwriter(encoding)(output.buffer
+                                            if hasattr(output, 'buffer')
+                                            else output)
+
+        output = LineEndingWrapper(output)
+
+    fixed_source = fix_lines(fixed_source, options, filename=filename)
+
+    if options.diff:
+        new = io.StringIO(fixed_source)
+        new = new.readlines()
+        diff = get_diff_text(original_source, new, filename)
+        if output:
+            output.write(diff)
+            output.flush()
+        else:
+            return diff
+    elif options.in_place:
+        fp = open_with_encoding(filename, encoding=encoding,
+                                mode='w')
+        fp.write(fixed_source)
+        fp.close()
+    else:
+        if output:
+            output.write(fixed_source)
+            output.flush()
+        else:
+            return fixed_source
+
+
+def global_fixes():
+    """Yield multiple (code, function) tuples."""
+    for function in globals().values():
+        if inspect.isfunction(function):
+            arguments = inspect.getargspec(function)[0]
+            if arguments[:1] != ['source']:
+                continue
+
+            code = extract_code_from_function(function)
+            if code:
+                yield (code, function)
+
+
+def apply_global_fixes(source, options, where='global'):
+    """Run global fixes on source code.
+
+    These are fixes that only need be done once (unlike those in
+    FixPEP8, which are dependent on pep8).
+
+    """
+    if code_match('E101', select=options.select, ignore=options.ignore):
+        source = reindent(source,
+                          indent_size=options.indent_size)
+
+    for (code, function) in global_fixes():
+        if code_match(code, select=options.select, ignore=options.ignore):
+            if options.verbose:
+                print('--->  Applying {0} fix for {1}'.format(where,
+                                                              code.upper()),
+                      file=sys.stderr)
+            source = function(source,
+                              aggressive=options.aggressive)
+
+    source = fix_2to3(source,
+                      aggressive=options.aggressive,
+                      select=options.select,
+                      ignore=options.ignore)
+
+    return source
+
+
+def apply_local_fixes(source, options):
+    """Ananologus to apply_global_fixes, but runs only those which makes sense
+    for the given line_range.
+
+    Do as much as we can without breaking code.
+
+    """
+    def find_ge(a, x):
+        """Find leftmost item greater than or equal to x."""
+        i = bisect.bisect_left(a, x)
+        if i != len(a):
+            return i, a[i]
+        return len(a) - 1, a[-1]
+
+    def find_le(a, x):
+        """Find rightmost value less than or equal to x."""
+        i = bisect.bisect_right(a, x)
+        if i:
+            return i - 1, a[i - 1]
+        return 0, a[0]
+
+    def local_fix(source, start_log, end_log,
+                  start_lines, end_lines, indents, last_line):
+        """apply_global_fixes to the source between start_log and end_log.
+
+        The subsource must be the correct syntax of a complete python program
+        (but all lines may share an indentation). The subsource's shared indent
+        is removed, fixes are applied and the indent prepended back. Taking
+        care to not reindent strings.
+
+        last_line is the strict cut off (options.line_range[1]), so that
+        lines after last_line are not modified.
+
+        """
+        if end_log < start_log:
+            return source
+
+        ind = indents[start_log]
+        indent = _get_indentation(source[start_lines[start_log]])
+
+        sl = slice(start_lines[start_log], end_lines[end_log] + 1)
+
+        subsource = source[sl]
+        # Remove indent from subsource.
+        if ind:
+            for line_no in start_lines[start_log:end_log + 1]:
+                pos = line_no - start_lines[start_log]
+                subsource[pos] = subsource[pos][ind:]
+
+        # Fix indentation of subsource.
+        fixed_subsource = apply_global_fixes(''.join(subsource),
+                                             options,
+                                             where='local')
+        fixed_subsource = fixed_subsource.splitlines(True)
+
+        # Add back indent for non multi-line strings lines.
+        msl = multiline_string_lines(''.join(fixed_subsource),
+                                     include_docstrings=False)
+        for i, line in enumerate(fixed_subsource):
+            if not i + 1 in msl:
+                fixed_subsource[i] = indent + line if line != '\n' else line
+
+        # We make a special case to look at the final line, if it's a multiline
+        # *and* the cut off is somewhere inside it, we take the fixed
+        # subset up until last_line, this assumes that the number of lines
+        # does not change in this multiline line.
+        changed_lines = len(fixed_subsource)
+        if (start_lines[end_log] != end_lines[end_log]
+                and end_lines[end_log] > last_line):
+            after_end = end_lines[end_log] - last_line
+            fixed_subsource = (fixed_subsource[:-after_end] +
+                               source[sl][-after_end:])
+            changed_lines -= after_end
+
+            options.line_range[1] = (options.line_range[0] +
+                                     changed_lines - 1)
+
+        return (source[:start_lines[start_log]] +
+                fixed_subsource +
+                source[end_lines[end_log] + 1:])
+
+    def is_continued_stmt(line,
+                          continued_stmts=frozenset(['else', 'elif',
+                                                     'finally', 'except'])):
+        return re.split('[ :]', line.strip(), 1)[0] in continued_stmts
+
+    assert options.line_range
+    start, end = options.line_range
+    start -= 1
+    end -= 1
+    last_line = end  # We shouldn't modify lines after this cut-off.
+
+    try:
+        logical = _find_logical(source)
+    except (SyntaxError, tokenize.TokenError):
+        return ''.join(source)
+
+    if not logical[0]:
+        # Just blank lines, this should imply that it will become '\n' ?
+        return apply_global_fixes(source, options)
+
+    start_lines, indents = zip(*logical[0])
+    end_lines, _ = zip(*logical[1])
+
+    source = source.splitlines(True)
+
+    start_log, start = find_ge(start_lines, start)
+    end_log, end = find_le(start_lines, end)
+
+    # Look behind one line, if it's indented less than current indent
+    # then we can move to this previous line knowing that its
+    # indentation level will not be changed.
+    if (start_log > 0
+            and indents[start_log - 1] < indents[start_log]
+            and not is_continued_stmt(source[start_log - 1])):
+        start_log -= 1
+        start = start_lines[start_log]
+
+    while start < end:
+
+        if is_continued_stmt(source[start]):
+            start_log += 1
+            start = start_lines[start_log]
+            continue
+
+        ind = indents[start_log]
+        for t in itertools.takewhile(lambda t: t[1][1] >= ind,
+                                     enumerate(logical[0][start_log:])):
+            n_log, n = start_log + t[0], t[1][0]
+        # start shares indent up to n.
+
+        if n <= end:
+            source = local_fix(source, start_log, n_log,
+                               start_lines, end_lines,
+                               indents, last_line)
+            start_log = n_log if n == end else n_log + 1
+            start = start_lines[start_log]
+            continue
+
+        else:
+            # Look at the line after end and see if allows us to reindent.
+            after_end_log, after_end = find_ge(start_lines, end + 1)
+
+            if indents[after_end_log] > indents[start_log]:
+                start_log, start = find_ge(start_lines, start + 1)
+                continue
+
+            if (indents[after_end_log] == indents[start_log]
+                    and is_continued_stmt(source[after_end])):
+                # find n, the beginning of the last continued statement
+                # Apply fix to previous block if there is one.
+                only_block = True
+                for n, n_ind in logical[0][start_log:end_log + 1][::-1]:
+                    if n_ind == ind and not is_continued_stmt(source[n]):
+                        n_log = start_lines.index(n)
+                        source = local_fix(source, start_log, n_log - 1,
+                                           start_lines, end_lines,
+                                           indents, last_line)
+                        start_log = n_log + 1
+                        start = start_lines[start_log]
+                        only_block = False
+                        break
+                if only_block:
+                    end_log, end = find_le(start_lines, end - 1)
+                continue
+
+            source = local_fix(source, start_log, end_log,
+                               start_lines, end_lines,
+                               indents, last_line)
+            break
+
+    return ''.join(source)
+
+
+def extract_code_from_function(function):
+    """Return code handled by function."""
+    if not function.__name__.startswith('fix_'):
+        return None
+
+    code = re.sub('^fix_', '', function.__name__)
+    if not code:
+        return None
+
+    try:
+        int(code[1:])
+    except ValueError:
+        return None
+
+    return code
+
+
+def create_parser():
+    """Return command-line parser."""
+    # Do import locally to be friendly to those who use autopep8 as a library
+    # and are supporting Python 2.6.
+    import argparse
+
+    parser = argparse.ArgumentParser(description=docstring_summary(__doc__),
+                                     prog='autopep8')
+    parser.add_argument('--version', action='version',
+                        version='%(prog)s ' + __version__)
+    parser.add_argument('-v', '--verbose', action='count', dest='verbose',
+                        default=0,
+                        help='print verbose messages; '
+                        'multiple -v result in more verbose messages')
+    parser.add_argument('-d', '--diff', action='store_true', dest='diff',
+                        help='print the diff for the fixed source')
+    parser.add_argument('-i', '--in-place', action='store_true',
+                        help='make changes to files in place')
+    parser.add_argument('-r', '--recursive', action='store_true',
+                        help='run recursively over directories; '
+                        'must be used with --in-place or --diff')
+    parser.add_argument('-j', '--jobs', type=int, metavar='n', default=1,
+                        help='number of parallel jobs; '
+                        'match CPU count if value is less than 1')
+    parser.add_argument('-p', '--pep8-passes', metavar='n',
+                        default=-1, type=int,
+                        help='maximum number of additional pep8 passes '
+                        '(default: infinite)')
+    parser.add_argument('-a', '--aggressive', action='count', default=0,
+                        help='enable non-whitespace changes; '
+                        'multiple -a result in more aggressive changes')
+    parser.add_argument('--experimental', action='store_true',
+                        help='enable experimental fixes')
+    parser.add_argument('--exclude', metavar='globs',
+                        help='exclude file/directory names that match these '
+                        'comma-separated globs')
+    parser.add_argument('--list-fixes', action='store_true',
+                        help='list codes for fixes; '
+                        'used by --ignore and --select')
+    parser.add_argument('--ignore', metavar='errors', default='',
+                        help='do not fix these errors/warnings '
+                        '(default: {0})'.format(DEFAULT_IGNORE))
+    parser.add_argument('--select', metavar='errors', default='',
+                        help='fix only these errors/warnings (e.g. E4,W)')
+    parser.add_argument('--max-line-length', metavar='n', default=79, type=int,
+                        help='set maximum allowed line length '
+                        '(default: %(default)s)')
+    parser.add_argument('--range', metavar='line', dest='line_range',
+                        default=None, type=int, nargs=2,
+                        help='only fix errors found within this inclusive '
+                        'range of line numbers (e.g. 1 99); '
+                        'line numbers are indexed at 1')
+    parser.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE,
+                        type=int, metavar='n',
+                        help='number of spaces per indent level '
+                             '(default %(default)s)')
+    parser.add_argument('files', nargs='*',
+                        help="files to format or '-' for standard in")
+
+    return parser
+
+
+def parse_args(arguments):
+    """Parse command-line options."""
+    parser = create_parser()
+    args = parser.parse_args(arguments)
+
+    if not args.files and not args.list_fixes:
+        parser.error('incorrect number of arguments')
+
+    args.files = [decode_filename(name) for name in args.files]
+
+    if '-' in args.files:
+        if len(args.files) > 1:
+            parser.error('cannot mix stdin and regular files')
+
+        if args.diff:
+            parser.error('--diff cannot be used with standard input')
+
+        if args.in_place:
+            parser.error('--in-place cannot be used with standard input')
+
+        if args.recursive:
+            parser.error('--recursive cannot be used with standard input')
+
+    if len(args.files) > 1 and not (args.in_place or args.diff):
+        parser.error('autopep8 only takes one filename as argument '
+                     'unless the "--in-place" or "--diff" args are '
+                     'used')
+
+    if args.recursive and not (args.in_place or args.diff):
+        parser.error('--recursive must be used with --in-place or --diff')
+
+    if args.exclude and not args.recursive:
+        parser.error('--exclude is only relevant when used with --recursive')
+
+    if args.in_place and args.diff:
+        parser.error('--in-place and --diff are mutually exclusive')
+
+    if args.max_line_length <= 0:
+        parser.error('--max-line-length must be greater than 0')
+
+    if args.select:
+        args.select = args.select.split(',')
+
+    if args.ignore:
+        args.ignore = args.ignore.split(',')
+    elif not args.select:
+        if args.aggressive:
+            # Enable everything by default if aggressive.
+            args.select = ['E', 'W']
+        else:
+            args.ignore = DEFAULT_IGNORE.split(',')
+
+    if args.exclude:
+        args.exclude = args.exclude.split(',')
+    else:
+        args.exclude = []
+
+    if args.jobs < 1:
+        # Do not import multiprocessing globally in case it is not supported
+        # on the platform.
+        import multiprocessing
+        args.jobs = multiprocessing.cpu_count()
+
+    if args.jobs > 1 and not args.in_place:
+        parser.error('parallel jobs requires --in-place')
+
+    if args.line_range:
+        if args.line_range[0] <= 0:
+            parser.error('--range must be positive numbers')
+        if args.line_range[0] > args.line_range[1]:
+            parser.error('First value of --range should be less than or equal '
+                         'to the second')
+
+    return args
+
+
+def decode_filename(filename):
+    """Return Unicode filename."""
+    if isinstance(filename, unicode):
+        return filename
+    else:
+        return filename.decode(sys.getfilesystemencoding())
+
+
+def supported_fixes():
+    """Yield pep8 error codes that autopep8 fixes.
+
+    Each item we yield is a tuple of the code followed by its
+    description.
+
+    """
+    yield ('E101', docstring_summary(reindent.__doc__))
+
+    instance = FixPEP8(filename=None, options=None, contents='')
+    for attribute in dir(instance):
+        code = re.match('fix_([ew][0-9][0-9][0-9])', attribute)
+        if code:
+            yield (
+                code.group(1).upper(),
+                re.sub(r'\s+', ' ',
+                       docstring_summary(getattr(instance, attribute).__doc__))
+            )
+
+    for (code, function) in sorted(global_fixes()):
+        yield (code.upper() + (4 - len(code)) * ' ',
+               re.sub(r'\s+', ' ', docstring_summary(function.__doc__)))
+
+    for code in sorted(CODE_TO_2TO3):
+        yield (code.upper() + (4 - len(code)) * ' ',
+               re.sub(r'\s+', ' ', docstring_summary(fix_2to3.__doc__)))
+
+
+def docstring_summary(docstring):
+    """Return summary of docstring."""
+    return docstring.split('\n')[0]
+
+
+def line_shortening_rank(candidate, indent_word, max_line_length,
+                         experimental=False):
+    """Return rank of candidate.
+
+    This is for sorting candidates.
+
+    """
+    if not candidate.strip():
+        return 0
+
+    rank = 0
+    lines = candidate.split('\n')
+
+    offset = 0
+    if (
+        not lines[0].lstrip().startswith('#') and
+        lines[0].rstrip()[-1] not in '([{'
+    ):
+        for (opening, closing) in ('()', '[]', '{}'):
+            # Don't penalize empty containers that aren't split up. Things like
+            # this "foo(\n    )" aren't particularly good.
+            opening_loc = lines[0].find(opening)
+            closing_loc = lines[0].find(closing)
+            if opening_loc >= 0:
+                if closing_loc < 0 or closing_loc != opening_loc + 1:
+                    offset = max(offset, 1 + opening_loc)
+
+    current_longest = max(offset + len(x.strip()) for x in lines)
+
+    rank += 4 * max(0, current_longest - max_line_length)
+
+    rank += len(lines)
+
+    # Too much variation in line length is ugly.
+    rank += 2 * standard_deviation(len(line) for line in lines)
+
+    bad_staring_symbol = {
+        '(': ')',
+        '[': ']',
+        '{': '}'}.get(lines[0][-1])
+
+    if len(lines) > 1:
+        if (
+            bad_staring_symbol and
+            lines[1].lstrip().startswith(bad_staring_symbol)
+        ):
+            rank += 20
+
+    for lineno, current_line in enumerate(lines):
+        current_line = current_line.strip()
+
+        if current_line.startswith('#'):
+            continue
+
+        for bad_start in ['.', '%', '+', '-', '/']:
+            if current_line.startswith(bad_start):
+                rank += 100
+
+            # Do not tolerate operators on their own line.
+            if current_line == bad_start:
+                rank += 1000
+
+        if current_line.endswith(('(', '[', '{', '.')):
+            # Avoid lonely opening. They result in longer lines.
+            if len(current_line) <= len(indent_word):
+                rank += 100
+
+            # Avoid the ugliness of ", (\n".
+            if (
+                current_line.endswith('(') and
+                current_line[:-1].rstrip().endswith(',')
+            ):
+                rank += 100
+
+            # Also avoid the ugliness of "foo.\nbar"
+            if current_line.endswith('.'):
+                rank += 100
+
+            if has_arithmetic_operator(current_line):
+                rank += 100
+
+        if current_line.endswith(('%', '(', '[', '{')):
+            rank -= 20
+
+        # Try to break list comprehensions at the "for".
+        if current_line.startswith('for '):
+            rank -= 50
+
+        if current_line.endswith('\\'):
+            # If a line ends in \-newline, it may be part of a
+            # multiline string. In that case, we would like to know
+            # how long that line is without the \-newline. If it's
+            # longer than the maximum, or has comments, then we assume
+            # that the \-newline is an okay candidate and only
+            # penalize it a bit.
+            total_len = len(current_line)
+            lineno += 1
+            while lineno < len(lines):
+                total_len += len(lines[lineno])
+
+                if lines[lineno].lstrip().startswith('#'):
+                    total_len = max_line_length
+                    break
+
+                if not lines[lineno].endswith('\\'):
+                    break
+
+                lineno += 1
+
+            if total_len < max_line_length:
+                rank += 10
+            else:
+                rank += 100 if experimental else 1
+
+        # Prefer breaking at commas rather than colon.
+        if ',' in current_line and current_line.endswith(':'):
+            rank += 10
+
+        rank += 10 * count_unbalanced_brackets(current_line)
+
+    return max(0, rank)
+
+
+def standard_deviation(numbers):
+    """Return standard devation."""
+    numbers = list(numbers)
+    if not numbers:
+        return 0
+    mean = sum(numbers) / len(numbers)
+    return (sum((n - mean) ** 2 for n in numbers) /
+            len(numbers)) ** .5
+
+
+def has_arithmetic_operator(line):
+    """Return True if line contains any arithmetic operators."""
+    for operator in pep8.ARITHMETIC_OP:
+        if operator in line:
+            return True
+
+    return False
+
+
+def count_unbalanced_brackets(line):
+    """Return number of unmatched open/close brackets."""
+    count = 0
+    for opening, closing in ['()', '[]', '{}']:
+        count += abs(line.count(opening) - line.count(closing))
+
+    return count
+
+
+def split_at_offsets(line, offsets):
+    """Split line at offsets.
+
+    Return list of strings.
+
+    """
+    result = []
+
+    previous_offset = 0
+    current_offset = 0
+    for current_offset in sorted(offsets):
+        if current_offset < len(line) and previous_offset != current_offset:
+            result.append(line[previous_offset:current_offset].strip())
+        previous_offset = current_offset
+
+    result.append(line[current_offset:])
+
+    return result
+
+
+class LineEndingWrapper(object):
+
+    r"""Replace line endings to work with sys.stdout.
+
+    It seems that sys.stdout expects only '\n' as the line ending, no matter
+    the platform. Otherwise, we get repeated line endings.
+
+    """
+
+    def __init__(self, output):
+        self.__output = output
+
+    def write(self, s):
+        self.__output.write(s.replace('\r\n', '\n').replace('\r', '\n'))
+
+    def flush(self):
+        self.__output.flush()
+
+
+def match_file(filename, exclude):
+    """Return True if file is okay for modifying/recursing."""
+    base_name = os.path.basename(filename)
+
+    if base_name.startswith('.'):
+        return False
+
+    for pattern in exclude:
+        if fnmatch.fnmatch(base_name, pattern):
+            return False
+
+    if not os.path.isdir(filename) and not is_python_file(filename):
+        return False
+
+    return True
+
+
+def find_files(filenames, recursive, exclude):
+    """Yield filenames."""
+    while filenames:
+        name = filenames.pop(0)
+        if recursive and os.path.isdir(name):
+            for root, directories, children in os.walk(name):
+                filenames += [os.path.join(root, f) for f in children
+                              if match_file(os.path.join(root, f),
+                                            exclude)]
+                directories[:] = [d for d in directories
+                                  if match_file(os.path.join(root, d),
+                                                exclude)]
+        else:
+            yield name
+
+
+def _fix_file(parameters):
+    """Helper function for optionally running fix_file() in parallel."""
+    if parameters[1].verbose:
+        print('[file:{0}]'.format(parameters[0]), file=sys.stderr)
+    try:
+        fix_file(*parameters)
+    except IOError as error:
+        print(unicode(error), file=sys.stderr)
+
+
+def fix_multiple_files(filenames, options, output=None):
+    """Fix list of files.
+
+    Optionally fix files recursively.
+
+    """
+    filenames = find_files(filenames, options.recursive, options.exclude)
+    if options.jobs > 1:
+        import multiprocessing
+        pool = multiprocessing.Pool(options.jobs)
+        pool.map(_fix_file,
+                 [(name, options) for name in filenames])
+    else:
+        for name in filenames:
+            _fix_file((name, options, output))
+
+
+def is_python_file(filename):
+    """Return True if filename is Python file."""
+    if filename.endswith('.py'):
+        return True
+
+    try:
+        with open_with_encoding(filename) as f:
+            first_line = f.readlines(1)[0]
+    except (IOError, IndexError):
+        return False
+
+    if not PYTHON_SHEBANG_REGEX.match(first_line):
+        return False
+
+    return True
+
+
+def is_probably_part_of_multiline(line):
+    """Return True if line is likely part of a multiline string.
+
+    When multiline strings are involved, pep8 reports the error as being
+    at the start of the multiline string, which doesn't work for us.
+
+    """
+    return (
+        '"""' in line or
+        "'''" in line or
+        line.rstrip().endswith('\\')
+    )
+
+
+def main():
+    """Tool main."""
+    try:
+        # Exit on broken pipe.
+        signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+    except AttributeError:  # pragma: no cover
+        # SIGPIPE is not available on Windows.
+        pass
+
+    try:
+        args = parse_args(sys.argv[1:])
+
+        if args.list_fixes:
+            for code, description in sorted(supported_fixes()):
+                print('{code} - {description}'.format(
+                    code=code, description=description))
+            return 0
+
+        if args.files == ['-']:
+            assert not args.in_place
+
+            # LineEndingWrapper is unnecessary here due to the symmetry between
+            # standard in and standard out.
+
+            sys.stdout.write(
+                fix_code(
+                    sys.stdin.read(),
+                    args,
+                    encoding=sys.stdin.encoding))
+        else:
+            if args.in_place or args.diff:
+                args.files = list(set(args.files))
+            else:
+                assert len(args.files) == 1
+                assert not args.recursive
+
+            fix_multiple_files(args.files, args, sys.stdout)
+    except KeyboardInterrupt:
+        return 1  # pragma: no cover
+
+
+class CachedTokenizer(object):
+
+    """A one-element cache around tokenize.generate_tokens().
+
+    Original code written by Ned Batchelder, in coverage.py.
+
+    """
+
+    def __init__(self):
+        self.last_text = None
+        self.last_tokens = None
+
+    def generate_tokens(self, text):
+        """A stand-in for tokenize.generate_tokens()."""
+        if text != self.last_text:
+            string_io = io.StringIO(text)
+            self.last_tokens = list(
+                tokenize.generate_tokens(string_io.readline)
+            )
+            self.last_text = text
+        return self.last_tokens
+
+_cached_tokenizer = CachedTokenizer()
+generate_tokens = _cached_tokenizer.generate_tokens
+
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/.gitignore b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/.gitignore
new file mode 100644
index 0000000..1c45ce5
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/.gitignore
@@ -0,0 +1 @@
+*.pickle
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/Grammar.txt b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/Grammar.txt
new file mode 100644
index 0000000..1e1f24c
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/Grammar.txt
@@ -0,0 +1,158 @@
+# Grammar for 2to3. This grammar supports Python 2.x and 3.x.
+
+# Note:  Changing the grammar specified in this file will most likely
+#        require corresponding changes in the parser module
+#        (../Modules/parsermodule.c).  If you can't make the changes to
+#        that module yourself, please co-ordinate the required changes
+#        with someone who can; ask around on python-dev for help.  Fred
+#        Drake <fdrake@acm.org> will probably be listening there.
+
+# NOTE WELL: You should also follow all the steps listed in PEP 306,
+# "How to Change Python's Grammar"
+
+# Commands for Kees Blom's railroad program
+#diagram:token NAME
+#diagram:token NUMBER
+#diagram:token STRING
+#diagram:token NEWLINE
+#diagram:token ENDMARKER
+#diagram:token INDENT
+#diagram:output\input python.bla
+#diagram:token DEDENT
+#diagram:output\textwidth 20.04cm\oddsidemargin  0.0cm\evensidemargin 0.0cm
+#diagram:rules
+
+# Start symbols for the grammar:
+#	file_input is a module or sequence of commands read from an input file;
+#	single_input is a single interactive statement;
+#	eval_input is the input for the eval() and input() functions.
+# NB: compound_stmt in single_input is followed by extra NEWLINE!
+file_input: (NEWLINE | stmt)* ENDMARKER
+single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
+eval_input: testlist NEWLINE* ENDMARKER
+
+decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
+decorators: decorator+
+decorated: decorators (classdef | funcdef)
+funcdef: 'def' NAME parameters ['->' test] ':' suite
+parameters: '(' [typedargslist] ')'
+typedargslist: ((tfpdef ['=' test] ',')*
+                ('*' [tname] (',' tname ['=' test])* [',' '**' tname] | '**' tname)
+                | tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
+tname: NAME [':' test]
+tfpdef: tname | '(' tfplist ')'
+tfplist: tfpdef (',' tfpdef)* [',']
+varargslist: ((vfpdef ['=' test] ',')*
+              ('*' [vname] (',' vname ['=' test])*  [',' '**' vname] | '**' vname)
+              | vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
+vname: NAME
+vfpdef: vname | '(' vfplist ')'
+vfplist: vfpdef (',' vfpdef)* [',']
+
+stmt: simple_stmt | compound_stmt
+simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
+small_stmt: (expr_stmt | print_stmt  | del_stmt | pass_stmt | flow_stmt |
+             import_stmt | global_stmt | exec_stmt | assert_stmt)
+expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
+                     ('=' (yield_expr|testlist_star_expr))*)
+testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
+augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
+            '<<=' | '>>=' | '**=' | '//=')
+# For normal assignments, additional restrictions enforced by the interpreter
+print_stmt: 'print' ( [ test (',' test)* [','] ] |
+                      '>>' test [ (',' test)+ [','] ] )
+del_stmt: 'del' exprlist
+pass_stmt: 'pass'
+flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
+break_stmt: 'break'
+continue_stmt: 'continue'
+return_stmt: 'return' [testlist]
+yield_stmt: yield_expr
+raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]]
+import_stmt: import_name | import_from
+import_name: 'import' dotted_as_names
+import_from: ('from' ('.'* dotted_name | '.'+)
+              'import' ('*' | '(' import_as_names ')' | import_as_names))
+import_as_name: NAME ['as' NAME]
+dotted_as_name: dotted_name ['as' NAME]
+import_as_names: import_as_name (',' import_as_name)* [',']
+dotted_as_names: dotted_as_name (',' dotted_as_name)*
+dotted_name: NAME ('.' NAME)*
+global_stmt: ('global' | 'nonlocal') NAME (',' NAME)*
+exec_stmt: 'exec' expr ['in' test [',' test]]
+assert_stmt: 'assert' test [',' test]
+
+compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated
+if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
+while_stmt: 'while' test ':' suite ['else' ':' suite]
+for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
+try_stmt: ('try' ':' suite
+           ((except_clause ':' suite)+
+	    ['else' ':' suite]
+	    ['finally' ':' suite] |
+	   'finally' ':' suite))
+with_stmt: 'with' with_item (',' with_item)*  ':' suite
+with_item: test ['as' expr]
+with_var: 'as' expr
+# NB compile.c makes sure that the default except clause is last
+except_clause: 'except' [test [(',' | 'as') test]]
+suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
+
+# Backward compatibility cruft to support:
+# [ x for x in lambda: True, lambda: False if x() ]
+# even while also allowing:
+# lambda x: 5 if x else 2
+# (But not a mix of the two)
+testlist_safe: old_test [(',' old_test)+ [',']]
+old_test: or_test | old_lambdef
+old_lambdef: 'lambda' [varargslist] ':' old_test
+
+test: or_test ['if' or_test 'else' test] | lambdef
+or_test: and_test ('or' and_test)*
+and_test: not_test ('and' not_test)*
+not_test: 'not' not_test | comparison
+comparison: expr (comp_op expr)*
+comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+star_expr: '*' expr
+expr: xor_expr ('|' xor_expr)*
+xor_expr: and_expr ('^' and_expr)*
+and_expr: shift_expr ('&' shift_expr)*
+shift_expr: arith_expr (('<<'|'>>') arith_expr)*
+arith_expr: term (('+'|'-') term)*
+term: factor (('*'|'/'|'%'|'//') factor)*
+factor: ('+'|'-'|'~') factor | power
+power: atom trailer* ['**' factor]
+atom: ('(' [yield_expr|testlist_gexp] ')' |
+       '[' [listmaker] ']' |
+       '{' [dictsetmaker] '}' |
+       '`' testlist1 '`' |
+       NAME | NUMBER | STRING+ | '.' '.' '.')
+listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
+testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
+lambdef: 'lambda' [varargslist] ':' test
+trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
+subscriptlist: subscript (',' subscript)* [',']
+subscript: test | [test] ':' [test] [sliceop]
+sliceop: ':' [test]
+exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
+testlist: test (',' test)* [',']
+dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) |
+                (test (comp_for | (',' test)* [','])) )
+
+classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
+
+arglist: (argument ',')* (argument [',']
+                         |'*' test (',' argument)* [',' '**' test] 
+                         |'**' test)
+argument: test [comp_for] | test '=' test  # Really [keyword '='] test
+
+comp_iter: comp_for | comp_if
+comp_for: 'for' exprlist 'in' testlist_safe [comp_iter]
+comp_if: 'if' old_test [comp_iter]
+
+testlist1: test (',' test)*
+
+# not used in grammar, but may appear in "node" passed from Parser to Compiler
+encoding_decl: NAME
+
+yield_expr: 'yield' [testlist]
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/PatternGrammar.txt b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/PatternGrammar.txt
new file mode 100644
index 0000000..36bf814
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/PatternGrammar.txt
@@ -0,0 +1,28 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+# A grammar to describe tree matching patterns.
+# Not shown here:
+# - 'TOKEN' stands for any token (leaf node)
+# - 'any' stands for any node (leaf or interior)
+# With 'any' we can still specify the sub-structure.
+
+# The start symbol is 'Matcher'.
+
+Matcher: Alternatives ENDMARKER
+
+Alternatives: Alternative ('|' Alternative)*
+
+Alternative: (Unit | NegatedUnit)+
+
+Unit: [NAME '='] ( STRING [Repeater]
+                 | NAME [Details] [Repeater]
+                 | '(' Alternatives ')' [Repeater]
+                 | '[' Alternatives ']'
+		 )
+
+NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')')
+
+Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}'
+
+Details: '<' Alternatives '>'
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__init__.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__init__.py
new file mode 100644
index 0000000..ea30561
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__init__.py
@@ -0,0 +1 @@
+#empty
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__main__.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__main__.py
new file mode 100644
index 0000000..80688ba
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/__main__.py
@@ -0,0 +1,4 @@
+import sys
+from .main import main
+
+sys.exit(main("lib2to3.fixes"))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_matcher.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_matcher.py
new file mode 100644
index 0000000..736ba2b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_matcher.py
@@ -0,0 +1,168 @@
+"""A bottom-up tree matching algorithm implementation meant to speed
+up 2to3's matching process. After the tree patterns are reduced to
+their rarest linear path, a linear Aho-Corasick automaton is
+created. The linear automaton traverses the linear paths from the
+leaves to the root of the AST and returns a set of nodes for further
+matching. This reduces significantly the number of candidate nodes."""
+
+__author__ = "George Boutsioukis <gboutsioukis@gmail.com>"
+
+import logging
+import itertools
+from collections import defaultdict
+
+from . import pytree
+from .btm_utils import reduce_tree
+
+class BMNode(object):
+    """Class for a node of the Aho-Corasick automaton used in matching"""
+    count = itertools.count()
+    def __init__(self):
+        self.transition_table = {}
+        self.fixers = []
+        self.id = next(BMNode.count)
+        self.content = ''
+
+class BottomMatcher(object):
+    """The main matcher class. After instantiating the patterns should
+    be added using the add_fixer method"""
+
+    def __init__(self):
+        self.match = set()
+        self.root = BMNode()
+        self.nodes = [self.root]
+        self.fixers = []
+        self.logger = logging.getLogger("RefactoringTool")
+
+    def add_fixer(self, fixer):
+        """Reduces a fixer's pattern tree to a linear path and adds it
+        to the matcher(a common Aho-Corasick automaton). The fixer is
+        appended on the matching states and called when they are
+        reached"""
+        self.fixers.append(fixer)
+        tree = reduce_tree(fixer.pattern_tree)
+        linear = tree.get_linear_subpattern()
+        match_nodes = self.add(linear, start=self.root)
+        for match_node in match_nodes:
+            match_node.fixers.append(fixer)
+
+    def add(self, pattern, start):
+        "Recursively adds a linear pattern to the AC automaton"
+        #print("adding pattern", pattern, "to", start)
+        if not pattern:
+            #print("empty pattern")
+            return [start]
+        if isinstance(pattern[0], tuple):
+            #alternatives
+            #print("alternatives")
+            match_nodes = []
+            for alternative in pattern[0]:
+                #add all alternatives, and add the rest of the pattern
+                #to each end node
+                end_nodes = self.add(alternative, start=start)
+                for end in end_nodes:
+                    match_nodes.extend(self.add(pattern[1:], end))
+            return match_nodes
+        else:
+            #single token
+            #not last
+            if pattern[0] not in start.transition_table:
+                #transition did not exist, create new
+                next_node = BMNode()
+                start.transition_table[pattern[0]] = next_node
+            else:
+                #transition exists already, follow
+                next_node = start.transition_table[pattern[0]]
+
+            if pattern[1:]:
+                end_nodes = self.add(pattern[1:], start=next_node)
+            else:
+                end_nodes = [next_node]
+            return end_nodes
+
+    def run(self, leaves):
+        """The main interface with the bottom matcher. The tree is
+        traversed from the bottom using the constructed
+        automaton. Nodes are only checked once as the tree is
+        retraversed. When the automaton fails, we give it one more
+        shot(in case the above tree matches as a whole with the
+        rejected leaf), then we break for the next leaf. There is the
+        special case of multiple arguments(see code comments) where we
+        recheck the nodes
+
+        Args:
+           The leaves of the AST tree to be matched
+
+        Returns:
+           A dictionary of node matches with fixers as the keys
+        """
+        current_ac_node = self.root
+        results = defaultdict(list)
+        for leaf in leaves:
+            current_ast_node = leaf
+            while current_ast_node:
+                current_ast_node.was_checked = True
+                for child in current_ast_node.children:
+                    # multiple statements, recheck
+                    if isinstance(child, pytree.Leaf) and child.value == u";":
+                        current_ast_node.was_checked = False
+                        break
+                if current_ast_node.type == 1:
+                    #name
+                    node_token = current_ast_node.value
+                else:
+                    node_token = current_ast_node.type
+
+                if node_token in current_ac_node.transition_table:
+                    #token matches
+                    current_ac_node = current_ac_node.transition_table[node_token]
+                    for fixer in current_ac_node.fixers:
+                        if not fixer in results:
+                            results[fixer] = []
+                        results[fixer].append(current_ast_node)
+
+                else:
+                    #matching failed, reset automaton
+                    current_ac_node = self.root
+                    if (current_ast_node.parent is not None
+                        and current_ast_node.parent.was_checked):
+                        #the rest of the tree upwards has been checked, next leaf
+                        break
+
+                    #recheck the rejected node once from the root
+                    if node_token in current_ac_node.transition_table:
+                        #token matches
+                        current_ac_node = current_ac_node.transition_table[node_token]
+                        for fixer in current_ac_node.fixers:
+                            if not fixer in results.keys():
+                                results[fixer] = []
+                            results[fixer].append(current_ast_node)
+
+                current_ast_node = current_ast_node.parent
+        return results
+
+    def print_ac(self):
+        "Prints a graphviz diagram of the BM automaton(for debugging)"
+        print("digraph g{")
+        def print_node(node):
+            for subnode_key in node.transition_table.keys():
+                subnode = node.transition_table[subnode_key]
+                print("%d -> %d [label=%s] //%s" %
+                      (node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
+                if subnode_key == 1:
+                    print(subnode.content)
+                print_node(subnode)
+        print_node(self.root)
+        print("}")
+
+# taken from pytree.py for debugging; only used by print_ac
+_type_reprs = {}
+def type_repr(type_num):
+    global _type_reprs
+    if not _type_reprs:
+        from .pygram import python_symbols
+        # printing tokens is possible but not as useful
+        # from .pgen2 import token // token.__dict__.items():
+        for name, val in python_symbols.__dict__.items():
+            if type(val) == int: _type_reprs[val] = name
+    return _type_reprs.setdefault(type_num, type_num)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_utils.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_utils.py
new file mode 100644
index 0000000..2276dc9
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_utils.py
@@ -0,0 +1,283 @@
+"Utility functions used by the btm_matcher module"
+
+from . import pytree
+from .pgen2 import grammar, token
+from .pygram import pattern_symbols, python_symbols
+
+syms = pattern_symbols
+pysyms = python_symbols
+tokens = grammar.opmap
+token_labels = token
+
+TYPE_ANY = -1
+TYPE_ALTERNATIVES = -2
+TYPE_GROUP = -3
+
+class MinNode(object):
+    """This class serves as an intermediate representation of the
+    pattern tree during the conversion to sets of leaf-to-root
+    subpatterns"""
+
+    def __init__(self, type=None, name=None):
+        self.type = type
+        self.name = name
+        self.children = []
+        self.leaf = False
+        self.parent = None
+        self.alternatives = []
+        self.group = []
+
+    def __repr__(self):
+        return str(self.type) + ' ' + str(self.name)
+
+    def leaf_to_root(self):
+        """Internal method. Returns a characteristic path of the
+        pattern tree. This method must be run for all leaves until the
+        linear subpatterns are merged into a single"""
+        node = self
+        subp = []
+        while node:
+            if node.type == TYPE_ALTERNATIVES:
+                node.alternatives.append(subp)
+                if len(node.alternatives) == len(node.children):
+                    #last alternative
+                    subp = [tuple(node.alternatives)]
+                    node.alternatives = []
+                    node = node.parent
+                    continue
+                else:
+                    node = node.parent
+                    subp = None
+                    break
+
+            if node.type == TYPE_GROUP:
+                node.group.append(subp)
+                #probably should check the number of leaves
+                if len(node.group) == len(node.children):
+                    subp = get_characteristic_subpattern(node.group)
+                    node.group = []
+                    node = node.parent
+                    continue
+                else:
+                    node = node.parent
+                    subp = None
+                    break
+
+            if node.type == token_labels.NAME and node.name:
+                #in case of type=name, use the name instead
+                subp.append(node.name)
+            else:
+                subp.append(node.type)
+
+            node = node.parent
+        return subp
+
+    def get_linear_subpattern(self):
+        """Drives the leaf_to_root method. The reason that
+        leaf_to_root must be run multiple times is because we need to
+        reject 'group' matches; for example the alternative form
+        (a | b c) creates a group [b c] that needs to be matched. Since
+        matching multiple linear patterns overcomes the automaton's
+        capabilities, leaf_to_root merges each group into a single
+        choice based on 'characteristic'ity,
+
+        i.e. (a|b c) -> (a|b) if b more characteristic than c
+
+        Returns: The most 'characteristic'(as defined by
+          get_characteristic_subpattern) path for the compiled pattern
+          tree.
+        """
+
+        for l in self.leaves():
+            subp = l.leaf_to_root()
+            if subp:
+                return subp
+
+    def leaves(self):
+        "Generator that returns the leaves of the tree"
+        for child in self.children:
+            for x in child.leaves():
+                yield x
+        if not self.children:
+            yield self
+
+def reduce_tree(node, parent=None):
+    """
+    Internal function. Reduces a compiled pattern tree to an
+    intermediate representation suitable for feeding the
+    automaton. This also trims off any optional pattern elements(like
+    [a], a*).
+    """
+
+    new_node = None
+    #switch on the node type
+    if node.type == syms.Matcher:
+        #skip
+        node = node.children[0]
+
+    if node.type == syms.Alternatives  :
+        #2 cases
+        if len(node.children) <= 2:
+            #just a single 'Alternative', skip this node
+            new_node = reduce_tree(node.children[0], parent)
+        else:
+            #real alternatives
+            new_node = MinNode(type=TYPE_ALTERNATIVES)
+            #skip odd children('|' tokens)
+            for child in node.children:
+                if node.children.index(child)%2:
+                    continue
+                reduced = reduce_tree(child, new_node)
+                if reduced is not None:
+                    new_node.children.append(reduced)
+    elif node.type == syms.Alternative:
+        if len(node.children) > 1:
+
+            new_node = MinNode(type=TYPE_GROUP)
+            for child in node.children:
+                reduced = reduce_tree(child, new_node)
+                if reduced:
+                    new_node.children.append(reduced)
+            if not new_node.children:
+                # delete the group if all of the children were reduced to None
+                new_node = None
+
+        else:
+            new_node = reduce_tree(node.children[0], parent)
+
+    elif node.type == syms.Unit:
+        if (isinstance(node.children[0], pytree.Leaf) and
+            node.children[0].value == '('):
+            #skip parentheses
+            return reduce_tree(node.children[1], parent)
+        if ((isinstance(node.children[0], pytree.Leaf) and
+               node.children[0].value == '[')
+               or
+               (len(node.children)>1 and
+               hasattr(node.children[1], "value") and
+               node.children[1].value == '[')):
+            #skip whole unit if its optional
+            return None
+
+        leaf = True
+        details_node = None
+        alternatives_node = None
+        has_repeater = False
+        repeater_node = None
+        has_variable_name = False
+
+        for child in node.children:
+            if child.type == syms.Details:
+                leaf = False
+                details_node = child
+            elif child.type == syms.Repeater:
+                has_repeater = True
+                repeater_node = child
+            elif child.type == syms.Alternatives:
+                alternatives_node = child
+            if hasattr(child, 'value') and child.value == '=': # variable name
+                has_variable_name = True
+
+        #skip variable name
+        if has_variable_name:
+            #skip variable name, '='
+            name_leaf = node.children[2]
+            if hasattr(name_leaf, 'value') and name_leaf.value == '(':
+                # skip parenthesis
+                name_leaf = node.children[3]
+        else:
+            name_leaf = node.children[0]
+
+        #set node type
+        if name_leaf.type == token_labels.NAME:
+            #(python) non-name or wildcard
+            if name_leaf.value == 'any':
+                new_node = MinNode(type=TYPE_ANY)
+            else:
+                if hasattr(token_labels, name_leaf.value):
+                    new_node = MinNode(type=getattr(token_labels, name_leaf.value))
+                else:
+                    new_node = MinNode(type=getattr(pysyms, name_leaf.value))
+
+        elif name_leaf.type == token_labels.STRING:
+            #(python) name or character; remove the apostrophes from
+            #the string value
+            name = name_leaf.value.strip("'")
+            if name in tokens:
+                new_node = MinNode(type=tokens[name])
+            else:
+                new_node = MinNode(type=token_labels.NAME, name=name)
+        elif name_leaf.type == syms.Alternatives:
+            new_node = reduce_tree(alternatives_node, parent)
+
+        #handle repeaters
+        if has_repeater:
+            if repeater_node.children[0].value == '*':
+                #reduce to None
+                new_node = None
+            elif repeater_node.children[0].value == '+':
+                #reduce to a single occurence i.e. do nothing
+                pass
+            else:
+                #TODO: handle {min, max} repeaters
+                raise NotImplementedError
+                pass
+
+        #add children
+        if details_node and new_node is not None:
+            for child in details_node.children[1:-1]:
+                #skip '<', '>' markers
+                reduced = reduce_tree(child, new_node)
+                if reduced is not None:
+                    new_node.children.append(reduced)
+    if new_node:
+        new_node.parent = parent
+    return new_node
+
+
+def get_characteristic_subpattern(subpatterns):
+    """Picks the most characteristic from a list of linear patterns
+    Current order used is:
+    names > common_names > common_chars
+    """
+    if not isinstance(subpatterns, list):
+        return subpatterns
+    if len(subpatterns)==1:
+        return subpatterns[0]
+
+    # first pick out the ones containing variable names
+    subpatterns_with_names = []
+    subpatterns_with_common_names = []
+    common_names = ['in', 'for', 'if' , 'not', 'None']
+    subpatterns_with_common_chars = []
+    common_chars = "[]().,:"
+    for subpattern in subpatterns:
+        if any(rec_test(subpattern, lambda x: type(x) is str)):
+            if any(rec_test(subpattern,
+                            lambda x: isinstance(x, str) and x in common_chars)):
+                subpatterns_with_common_chars.append(subpattern)
+            elif any(rec_test(subpattern,
+                              lambda x: isinstance(x, str) and x in common_names)):
+                subpatterns_with_common_names.append(subpattern)
+
+            else:
+                subpatterns_with_names.append(subpattern)
+
+    if subpatterns_with_names:
+        subpatterns = subpatterns_with_names
+    elif subpatterns_with_common_names:
+        subpatterns = subpatterns_with_common_names
+    elif subpatterns_with_common_chars:
+        subpatterns = subpatterns_with_common_chars
+    # of the remaining subpatterns pick out the longest one
+    return max(subpatterns, key=len)
+
+def rec_test(sequence, test_func):
+    """Tests test_func on all items of sequence and items of included
+    sub-iterables"""
+    for x in sequence:
+        if isinstance(x, (list, tuple)):
+            for y in rec_test(x, test_func):
+                yield y
+        else:
+            yield test_func(x)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixer_base.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixer_base.py
new file mode 100644
index 0000000..f6421ba
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixer_base.py
@@ -0,0 +1,189 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Base class for fixers (optional, but recommended)."""
+
+# Python imports
+import logging
+import itertools
+
+# Local imports
+from .patcomp import PatternCompiler
+from . import pygram
+from .fixer_util import does_tree_import
+
+class BaseFix(object):
+
+    """Optional base class for fixers.
+
+    The subclass name must be FixFooBar where FooBar is the result of
+    removing underscores and capitalizing the words of the fix name.
+    For example, the class name for a fixer named 'has_key' should be
+    FixHasKey.
+    """
+
+    PATTERN = None  # Most subclasses should override with a string literal
+    pattern = None  # Compiled pattern, set by compile_pattern()
+    pattern_tree = None # Tree representation of the pattern
+    options = None  # Options object passed to initializer
+    filename = None # The filename (set by set_filename)
+    logger = None   # A logger (set by set_filename)
+    numbers = itertools.count(1) # For new_name()
+    used_names = set() # A set of all used NAMEs
+    order = "post" # Does the fixer prefer pre- or post-order traversal
+    explicit = False # Is this ignored by refactor.py -f all?
+    run_order = 5   # Fixers will be sorted by run order before execution
+                    # Lower numbers will be run first.
+    _accept_type = None # [Advanced and not public] This tells RefactoringTool
+                        # which node type to accept when there's not a pattern.
+
+    keep_line_order = False # For the bottom matcher: match with the
+                            # original line order
+    BM_compatible = False # Compatibility with the bottom matching
+                          # module; every fixer should set this
+                          # manually
+
+    # Shortcut for access to Python grammar symbols
+    syms = pygram.python_symbols
+
+    def __init__(self, options, log):
+        """Initializer.  Subclass may override.
+
+        Args:
+            options: an dict containing the options passed to RefactoringTool
+            that could be used to customize the fixer through the command line.
+            log: a list to append warnings and other messages to.
+        """
+        self.options = options
+        self.log = log
+        self.compile_pattern()
+
+    def compile_pattern(self):
+        """Compiles self.PATTERN into self.pattern.
+
+        Subclass may override if it doesn't want to use
+        self.{pattern,PATTERN} in .match().
+        """
+        if self.PATTERN is not None:
+            PC = PatternCompiler()
+            self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
+                                                                 with_tree=True)
+
+    def set_filename(self, filename):
+        """Set the filename, and a logger derived from it.
+
+        The main refactoring tool should call this.
+        """
+        self.filename = filename
+        self.logger = logging.getLogger(filename)
+
+    def match(self, node):
+        """Returns match for a given parse tree node.
+
+        Should return a true or false object (not necessarily a bool).
+        It may return a non-empty dict of matching sub-nodes as
+        returned by a matching pattern.
+
+        Subclass may override.
+        """
+        results = {"node": node}
+        return self.pattern.match(node, results) and results
+
+    def transform(self, node, results):
+        """Returns the transformation for a given parse tree node.
+
+        Args:
+          node: the root of the parse tree that matched the fixer.
+          results: a dict mapping symbolic names to part of the match.
+
+        Returns:
+          None, or a node that is a modified copy of the
+          argument node.  The node argument may also be modified in-place to
+          effect the same change.
+
+        Subclass *must* override.
+        """
+        raise NotImplementedError()
+
+    def new_name(self, template=u"xxx_todo_changeme"):
+        """Return a string suitable for use as an identifier
+
+        The new name is guaranteed not to conflict with other identifiers.
+        """
+        name = template
+        while name in self.used_names:
+            name = template + unicode(self.numbers.next())
+        self.used_names.add(name)
+        return name
+
+    def log_message(self, message):
+        if self.first_log:
+            self.first_log = False
+            self.log.append("### In file %s ###" % self.filename)
+        self.log.append(message)
+
+    def cannot_convert(self, node, reason=None):
+        """Warn the user that a given chunk of code is not valid Python 3,
+        but that it cannot be converted automatically.
+
+        First argument is the top-level node for the code in question.
+        Optional second argument is why it can't be converted.
+        """
+        lineno = node.get_lineno()
+        for_output = node.clone()
+        for_output.prefix = u""
+        msg = "Line %d: could not convert: %s"
+        self.log_message(msg % (lineno, for_output))
+        if reason:
+            self.log_message(reason)
+
+    def warning(self, node, reason):
+        """Used for warning the user about possible uncertainty in the
+        translation.
+
+        First argument is the top-level node for the code in question.
+        Optional second argument is why it can't be converted.
+        """
+        lineno = node.get_lineno()
+        self.log_message("Line %d: %s" % (lineno, reason))
+
+    def start_tree(self, tree, filename):
+        """Some fixers need to maintain tree-wide state.
+        This method is called once, at the start of tree fix-up.
+
+        tree - the root node of the tree to be processed.
+        filename - the name of the file the tree came from.
+        """
+        self.used_names = tree.used_names
+        self.set_filename(filename)
+        self.numbers = itertools.count(1)
+        self.first_log = True
+
+    def finish_tree(self, tree, filename):
+        """Some fixers need to maintain tree-wide state.
+        This method is called once, at the conclusion of tree fix-up.
+
+        tree - the root node of the tree to be processed.
+        filename - the name of the file the tree came from.
+        """
+        pass
+
+
+class ConditionalFix(BaseFix):
+    """ Base class for fixers which not execute if an import is found. """
+
+    # This is the name of the import which, if found, will cause the test to be skipped
+    skip_on = None
+
+    def start_tree(self, *args):
+        super(ConditionalFix, self).start_tree(*args)
+        self._should_skip = None
+
+    def should_skip(self, node):
+        if self._should_skip is not None:
+            return self._should_skip
+        pkg = self.skip_on.split(".")
+        name = pkg[-1]
+        pkg = ".".join(pkg[:-1])
+        self._should_skip = does_tree_import(pkg, name, node)
+        return self._should_skip
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixer_util.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixer_util.py
new file mode 100644
index 0000000..78fdf26
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixer_util.py
@@ -0,0 +1,432 @@
+"""Utility functions, node construction macros, etc."""
+# Author: Collin Winter
+
+from itertools import islice
+
+# Local imports
+from .pgen2 import token
+from .pytree import Leaf, Node
+from .pygram import python_symbols as syms
+from . import patcomp
+
+
+###########################################################
+### Common node-construction "macros"
+###########################################################
+
+def KeywordArg(keyword, value):
+    return Node(syms.argument,
+                [keyword, Leaf(token.EQUAL, u"="), value])
+
+def LParen():
+    return Leaf(token.LPAR, u"(")
+
+def RParen():
+    return Leaf(token.RPAR, u")")
+
+def Assign(target, source):
+    """Build an assignment statement"""
+    if not isinstance(target, list):
+        target = [target]
+    if not isinstance(source, list):
+        source.prefix = u" "
+        source = [source]
+
+    return Node(syms.atom,
+                target + [Leaf(token.EQUAL, u"=", prefix=u" ")] + source)
+
+def Name(name, prefix=None):
+    """Return a NAME leaf"""
+    return Leaf(token.NAME, name, prefix=prefix)
+
+def Attr(obj, attr):
+    """A node tuple for obj.attr"""
+    return [obj, Node(syms.trailer, [Dot(), attr])]
+
+def Comma():
+    """A comma leaf"""
+    return Leaf(token.COMMA, u",")
+
+def Dot():
+    """A period (.) leaf"""
+    return Leaf(token.DOT, u".")
+
+def ArgList(args, lparen=LParen(), rparen=RParen()):
+    """A parenthesised argument list, used by Call()"""
+    node = Node(syms.trailer, [lparen.clone(), rparen.clone()])
+    if args:
+        node.insert_child(1, Node(syms.arglist, args))
+    return node
+
+def Call(func_name, args=None, prefix=None):
+    """A function call"""
+    node = Node(syms.power, [func_name, ArgList(args)])
+    if prefix is not None:
+        node.prefix = prefix
+    return node
+
+def Newline():
+    """A newline literal"""
+    return Leaf(token.NEWLINE, u"\n")
+
+def BlankLine():
+    """A blank line"""
+    return Leaf(token.NEWLINE, u"")
+
+def Number(n, prefix=None):
+    return Leaf(token.NUMBER, n, prefix=prefix)
+
+def Subscript(index_node):
+    """A numeric or string subscript"""
+    return Node(syms.trailer, [Leaf(token.LBRACE, u"["),
+                               index_node,
+                               Leaf(token.RBRACE, u"]")])
+
+def String(string, prefix=None):
+    """A string leaf"""
+    return Leaf(token.STRING, string, prefix=prefix)
+
+def ListComp(xp, fp, it, test=None):
+    """A list comprehension of the form [xp for fp in it if test].
+
+    If test is None, the "if test" part is omitted.
+    """
+    xp.prefix = u""
+    fp.prefix = u" "
+    it.prefix = u" "
+    for_leaf = Leaf(token.NAME, u"for")
+    for_leaf.prefix = u" "
+    in_leaf = Leaf(token.NAME, u"in")
+    in_leaf.prefix = u" "
+    inner_args = [for_leaf, fp, in_leaf, it]
+    if test:
+        test.prefix = u" "
+        if_leaf = Leaf(token.NAME, u"if")
+        if_leaf.prefix = u" "
+        inner_args.append(Node(syms.comp_if, [if_leaf, test]))
+    inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)])
+    return Node(syms.atom,
+                       [Leaf(token.LBRACE, u"["),
+                        inner,
+                        Leaf(token.RBRACE, u"]")])
+
+def FromImport(package_name, name_leafs):
+    """ Return an import statement in the form:
+        from package import name_leafs"""
+    # XXX: May not handle dotted imports properly (eg, package_name='foo.bar')
+    #assert package_name == '.' or '.' not in package_name, "FromImport has "\
+    #       "not been tested with dotted package names -- use at your own "\
+    #       "peril!"
+
+    for leaf in name_leafs:
+        # Pull the leaves out of their old tree
+        leaf.remove()
+
+    children = [Leaf(token.NAME, u"from"),
+                Leaf(token.NAME, package_name, prefix=u" "),
+                Leaf(token.NAME, u"import", prefix=u" "),
+                Node(syms.import_as_names, name_leafs)]
+    imp = Node(syms.import_from, children)
+    return imp
+
+
+###########################################################
+### Determine whether a node represents a given literal
+###########################################################
+
+def is_tuple(node):
+    """Does the node represent a tuple literal?"""
+    if isinstance(node, Node) and node.children == [LParen(), RParen()]:
+        return True
+    return (isinstance(node, Node)
+            and len(node.children) == 3
+            and isinstance(node.children[0], Leaf)
+            and isinstance(node.children[1], Node)
+            and isinstance(node.children[2], Leaf)
+            and node.children[0].value == u"("
+            and node.children[2].value == u")")
+
+def is_list(node):
+    """Does the node represent a list literal?"""
+    return (isinstance(node, Node)
+            and len(node.children) > 1
+            and isinstance(node.children[0], Leaf)
+            and isinstance(node.children[-1], Leaf)
+            and node.children[0].value == u"["
+            and node.children[-1].value == u"]")
+
+
+###########################################################
+### Misc
+###########################################################
+
+def parenthesize(node):
+    return Node(syms.atom, [LParen(), node, RParen()])
+
+
+consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum",
+                       "min", "max", "enumerate"])
+
+def attr_chain(obj, attr):
+    """Follow an attribute chain.
+
+    If you have a chain of objects where a.foo -> b, b.foo-> c, etc,
+    use this to iterate over all objects in the chain. Iteration is
+    terminated by getattr(x, attr) is None.
+
+    Args:
+        obj: the starting object
+        attr: the name of the chaining attribute
+
+    Yields:
+        Each successive object in the chain.
+    """
+    next = getattr(obj, attr)
+    while next:
+        yield next
+        next = getattr(next, attr)
+
+p0 = """for_stmt< 'for' any 'in' node=any ':' any* >
+        | comp_for< 'for' any 'in' node=any any* >
+     """
+p1 = """
+power<
+    ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' |
+      'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) )
+    trailer< '(' node=any ')' >
+    any*
+>
+"""
+p2 = """
+power<
+    ( 'sorted' | 'enumerate' )
+    trailer< '(' arglist<node=any any*> ')' >
+    any*
+>
+"""
+pats_built = False
+def in_special_context(node):
+    """ Returns true if node is in an environment where all that is required
+        of it is being iterable (ie, it doesn't matter if it returns a list
+        or an iterator).
+        See test_map_nochange in test_fixers.py for some examples and tests.
+        """
+    global p0, p1, p2, pats_built
+    if not pats_built:
+        p0 = patcomp.compile_pattern(p0)
+        p1 = patcomp.compile_pattern(p1)
+        p2 = patcomp.compile_pattern(p2)
+        pats_built = True
+    patterns = [p0, p1, p2]
+    for pattern, parent in zip(patterns, attr_chain(node, "parent")):
+        results = {}
+        if pattern.match(parent, results) and results["node"] is node:
+            return True
+    return False
+
+def is_probably_builtin(node):
+    """
+    Check that something isn't an attribute or function name etc.
+    """
+    prev = node.prev_sibling
+    if prev is not None and prev.type == token.DOT:
+        # Attribute lookup.
+        return False
+    parent = node.parent
+    if parent.type in (syms.funcdef, syms.classdef):
+        return False
+    if parent.type == syms.expr_stmt and parent.children[0] is node:
+        # Assignment.
+        return False
+    if parent.type == syms.parameters or \
+            (parent.type == syms.typedargslist and (
+            (prev is not None and prev.type == token.COMMA) or
+            parent.children[0] is node
+            )):
+        # The name of an argument.
+        return False
+    return True
+
+def find_indentation(node):
+    """Find the indentation of *node*."""
+    while node is not None:
+        if node.type == syms.suite and len(node.children) > 2:
+            indent = node.children[1]
+            if indent.type == token.INDENT:
+                return indent.value
+        node = node.parent
+    return u""
+
+###########################################################
+### The following functions are to find bindings in a suite
+###########################################################
+
+def make_suite(node):
+    if node.type == syms.suite:
+        return node
+    node = node.clone()
+    parent, node.parent = node.parent, None
+    suite = Node(syms.suite, [node])
+    suite.parent = parent
+    return suite
+
+def find_root(node):
+    """Find the top level namespace."""
+    # Scamper up to the top level namespace
+    while node.type != syms.file_input:
+        node = node.parent
+        if not node:
+            raise ValueError("root found before file_input node was found.")
+    return node
+
+def does_tree_import(package, name, node):
+    """ Returns true if name is imported from package at the
+        top level of the tree which node belongs to.
+        To cover the case of an import like 'import foo', use
+        None for the package and 'foo' for the name. """
+    binding = find_binding(name, find_root(node), package)
+    return bool(binding)
+
+def is_import(node):
+    """Returns true if the node is an import statement."""
+    return node.type in (syms.import_name, syms.import_from)
+
+def touch_import(package, name, node):
+    """ Works like `does_tree_import` but adds an import statement
+        if it was not imported. """
+    def is_import_stmt(node):
+        return (node.type == syms.simple_stmt and node.children and
+                is_import(node.children[0]))
+
+    root = find_root(node)
+
+    if does_tree_import(package, name, root):
+        return
+
+    # figure out where to insert the new import.  First try to find
+    # the first import and then skip to the last one.
+    insert_pos = offset = 0
+    for idx, node in enumerate(root.children):
+        if not is_import_stmt(node):
+            continue
+        for offset, node2 in enumerate(root.children[idx:]):
+            if not is_import_stmt(node2):
+                break
+        insert_pos = idx + offset
+        break
+
+    # if there are no imports where we can insert, find the docstring.
+    # if that also fails, we stick to the beginning of the file
+    if insert_pos == 0:
+        for idx, node in enumerate(root.children):
+            if (node.type == syms.simple_stmt and node.children and
+               node.children[0].type == token.STRING):
+                insert_pos = idx + 1
+                break
+
+    if package is None:
+        import_ = Node(syms.import_name, [
+            Leaf(token.NAME, u"import"),
+            Leaf(token.NAME, name, prefix=u" ")
+        ])
+    else:
+        import_ = FromImport(package, [Leaf(token.NAME, name, prefix=u" ")])
+
+    children = [import_, Newline()]
+    root.insert_child(insert_pos, Node(syms.simple_stmt, children))
+
+
+_def_syms = set([syms.classdef, syms.funcdef])
+def find_binding(name, node, package=None):
+    """ Returns the node which binds variable name, otherwise None.
+        If optional argument package is supplied, only imports will
+        be returned.
+        See test cases for examples."""
+    for child in node.children:
+        ret = None
+        if child.type == syms.for_stmt:
+            if _find(name, child.children[1]):
+                return child
+            n = find_binding(name, make_suite(child.children[-1]), package)
+            if n: ret = n
+        elif child.type in (syms.if_stmt, syms.while_stmt):
+            n = find_binding(name, make_suite(child.children[-1]), package)
+            if n: ret = n
+        elif child.type == syms.try_stmt:
+            n = find_binding(name, make_suite(child.children[2]), package)
+            if n:
+                ret = n
+            else:
+                for i, kid in enumerate(child.children[3:]):
+                    if kid.type == token.COLON and kid.value == ":":
+                        # i+3 is the colon, i+4 is the suite
+                        n = find_binding(name, make_suite(child.children[i+4]), package)
+                        if n: ret = n
+        elif child.type in _def_syms and child.children[1].value == name:
+            ret = child
+        elif _is_import_binding(child, name, package):
+            ret = child
+        elif child.type == syms.simple_stmt:
+            ret = find_binding(name, child, package)
+        elif child.type == syms.expr_stmt:
+            if _find(name, child.children[0]):
+                ret = child
+
+        if ret:
+            if not package:
+                return ret
+            if is_import(ret):
+                return ret
+    return None
+
+_block_syms = set([syms.funcdef, syms.classdef, syms.trailer])
+def _find(name, node):
+    nodes = [node]
+    while nodes:
+        node = nodes.pop()
+        if node.type > 256 and node.type not in _block_syms:
+            nodes.extend(node.children)
+        elif node.type == token.NAME and node.value == name:
+            return node
+    return None
+
+def _is_import_binding(node, name, package=None):
+    """ Will reuturn node if node will import name, or node
+        will import * from package.  None is returned otherwise.
+        See test cases for examples. """
+
+    if node.type == syms.import_name and not package:
+        imp = node.children[1]
+        if imp.type == syms.dotted_as_names:
+            for child in imp.children:
+                if child.type == syms.dotted_as_name:
+                    if child.children[2].value == name:
+                        return node
+                elif child.type == token.NAME and child.value == name:
+                    return node
+        elif imp.type == syms.dotted_as_name:
+            last = imp.children[-1]
+            if last.type == token.NAME and last.value == name:
+                return node
+        elif imp.type == token.NAME and imp.value == name:
+            return node
+    elif node.type == syms.import_from:
+        # unicode(...) is used to make life easier here, because
+        # from a.b import parses to ['import', ['a', '.', 'b'], ...]
+        if package and unicode(node.children[1]).strip() != package:
+            return None
+        n = node.children[3]
+        if package and _find(u"as", n):
+            # See test_from_import_as for explanation
+            return None
+        elif n.type == syms.import_as_names and _find(name, n):
+            return node
+        elif n.type == syms.import_as_name:
+            child = n.children[2]
+            if child.type == token.NAME and child.value == name:
+                return node
+        elif n.type == token.NAME and n.value == name:
+            return node
+        elif package and n.type == token.STAR:
+            return node
+    return None
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/__init__.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/__init__.py
new file mode 100644
index 0000000..b93054b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/__init__.py
@@ -0,0 +1 @@
+# Dummy file to make this directory a package.
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py
new file mode 100644
index 0000000..a7dc3a0
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py
@@ -0,0 +1,59 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for apply().
+
+This converts apply(func, v, k) into (func)(*v, **k)."""
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Call, Comma, parenthesize
+
+class FixApply(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    power< 'apply'
+        trailer<
+            '('
+            arglist<
+                (not argument<NAME '=' any>) func=any ','
+                (not argument<NAME '=' any>) args=any [','
+                (not argument<NAME '=' any>) kwds=any] [',']
+            >
+            ')'
+        >
+    >
+    """
+
+    def transform(self, node, results):
+        syms = self.syms
+        assert results
+        func = results["func"]
+        args = results["args"]
+        kwds = results.get("kwds")
+        prefix = node.prefix
+        func = func.clone()
+        if (func.type not in (token.NAME, syms.atom) and
+            (func.type != syms.power or
+             func.children[-2].type == token.DOUBLESTAR)):
+            # Need to parenthesize
+            func = parenthesize(func)
+        func.prefix = ""
+        args = args.clone()
+        args.prefix = ""
+        if kwds is not None:
+            kwds = kwds.clone()
+            kwds.prefix = ""
+        l_newargs = [pytree.Leaf(token.STAR, u"*"), args]
+        if kwds is not None:
+            l_newargs.extend([Comma(),
+                              pytree.Leaf(token.DOUBLESTAR, u"**"),
+                              kwds])
+            l_newargs[-2].prefix = u" " # that's the ** token
+        # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t)
+        # can be translated into f(x, y, *t) instead of f(*(x, y) + t)
+        #new = pytree.Node(syms.power, (func, ArgList(l_newargs)))
+        return Call(func, l_newargs, prefix=prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_basestring.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_basestring.py
new file mode 100644
index 0000000..a3c9a43
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_basestring.py
@@ -0,0 +1,14 @@
+"""Fixer for basestring -> str."""
+# Author: Christian Heimes
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+class FixBasestring(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = "'basestring'"
+
+    def transform(self, node, results):
+        return Name(u"str", prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_buffer.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_buffer.py
new file mode 100644
index 0000000..c6b0928
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_buffer.py
@@ -0,0 +1,22 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes buffer(...) into memoryview(...)."""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+
+class FixBuffer(fixer_base.BaseFix):
+    BM_compatible = True
+
+    explicit = True # The user must ask for this fixer
+
+    PATTERN = """
+              power< name='buffer' trailer< '(' [any] ')' > any* >
+              """
+
+    def transform(self, node, results):
+        name = results["name"]
+        name.replace(Name(u"memoryview", prefix=name.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_callable.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_callable.py
new file mode 100644
index 0000000..df33d61
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_callable.py
@@ -0,0 +1,37 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for callable().
+
+This converts callable(obj) into isinstance(obj, collections.Callable), adding a
+collections import if needed."""
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Call, Name, String, Attr, touch_import
+
+class FixCallable(fixer_base.BaseFix):
+    BM_compatible = True
+
+    order = "pre"
+
+    # Ignore callable(*args) or use of keywords.
+    # Either could be a hint that the builtin callable() is not being used.
+    PATTERN = """
+    power< 'callable'
+           trailer< lpar='('
+                    ( not(arglist | argument<any '=' any>) func=any
+                      | func=arglist<(not argument<any '=' any>) any ','> )
+                    rpar=')' >
+           after=any*
+    >
+    """
+
+    def transform(self, node, results):
+        func = results['func']
+
+        touch_import(None, u'collections', node=node)
+
+        args = [func.clone(), String(u', ')]
+        args.extend(Attr(Name(u'collections'), Name(u'Callable')))
+        return Call(Name(u'isinstance'), args, prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_dict.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_dict.py
new file mode 100644
index 0000000..f681e4d
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_dict.py
@@ -0,0 +1,107 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for dict methods.
+
+d.keys() -> list(d.keys())
+d.items() -> list(d.items())
+d.values() -> list(d.values())
+
+d.iterkeys() -> iter(d.keys())
+d.iteritems() -> iter(d.items())
+d.itervalues() -> iter(d.values())
+
+d.viewkeys() -> d.keys()
+d.viewitems() -> d.items()
+d.viewvalues() -> d.values()
+
+Except in certain very specific contexts: the iter() can be dropped
+when the context is list(), sorted(), iter() or for...in; the list()
+can be dropped when the context is list() or sorted() (but not iter()
+or for...in!). Special contexts that apply to both: list(), sorted(), tuple()
+set(), any(), all(), sum().
+
+Note: iter(d.keys()) could be written as iter(d) but since the
+original d.iterkeys() was also redundant we don't fix this.  And there
+are (rare) contexts where it makes a difference (e.g. when passing it
+as an argument to a function that introspects the argument).
+"""
+
+# Local imports
+from .. import pytree
+from .. import patcomp
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot
+from .. import fixer_util
+
+
+iter_exempt = fixer_util.consuming_calls | set(["iter"])
+
+
+class FixDict(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    power< head=any+
+         trailer< '.' method=('keys'|'items'|'values'|
+                              'iterkeys'|'iteritems'|'itervalues'|
+                              'viewkeys'|'viewitems'|'viewvalues') >
+         parens=trailer< '(' ')' >
+         tail=any*
+    >
+    """
+
+    def transform(self, node, results):
+        head = results["head"]
+        method = results["method"][0] # Extract node for method name
+        tail = results["tail"]
+        syms = self.syms
+        method_name = method.value
+        isiter = method_name.startswith(u"iter")
+        isview = method_name.startswith(u"view")
+        if isiter or isview:
+            method_name = method_name[4:]
+        assert method_name in (u"keys", u"items", u"values"), repr(method)
+        head = [n.clone() for n in head]
+        tail = [n.clone() for n in tail]
+        special = not tail and self.in_special_context(node, isiter)
+        args = head + [pytree.Node(syms.trailer,
+                                   [Dot(),
+                                    Name(method_name,
+                                         prefix=method.prefix)]),
+                       results["parens"].clone()]
+        new = pytree.Node(syms.power, args)
+        if not (special or isview):
+            new.prefix = u""
+            new = Call(Name(u"iter" if isiter else u"list"), [new])
+        if tail:
+            new = pytree.Node(syms.power, [new] + tail)
+        new.prefix = node.prefix
+        return new
+
+    P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
+    p1 = patcomp.compile_pattern(P1)
+
+    P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
+            | comp_for< 'for' any 'in' node=any any* >
+         """
+    p2 = patcomp.compile_pattern(P2)
+
+    def in_special_context(self, node, isiter):
+        if node.parent is None:
+            return False
+        results = {}
+        if (node.parent.parent is not None and
+               self.p1.match(node.parent.parent, results) and
+               results["node"] is node):
+            if isiter:
+                # iter(d.iterkeys()) -> iter(d.keys()), etc.
+                return results["func"].value in iter_exempt
+            else:
+                # list(d.keys()) -> list(d.keys()), etc.
+                return results["func"].value in fixer_util.consuming_calls
+        if not isiter:
+            return False
+        # for ... in d.iterkeys() -> for ... in d.keys(), etc.
+        return self.p2.match(node.parent, results) and results["node"] is node
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py
new file mode 100644
index 0000000..e324718
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py
@@ -0,0 +1,93 @@
+"""Fixer for except statements with named exceptions.
+
+The following cases will be converted:
+
+- "except E, T:" where T is a name:
+
+    except E as T:
+
+- "except E, T:" where T is not a name, tuple or list:
+
+        except E as t:
+            T = t
+
+    This is done because the target of an "except" clause must be a
+    name.
+
+- "except E, T:" where T is a tuple or list literal:
+
+        except E as t:
+            T = t.args
+"""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms
+
+def find_excepts(nodes):
+    for i, n in enumerate(nodes):
+        if n.type == syms.except_clause:
+            if n.children[0].value == u'except':
+                yield (n, nodes[i+2])
+
+class FixExcept(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    try_stmt< 'try' ':' (simple_stmt | suite)
+                  cleanup=(except_clause ':' (simple_stmt | suite))+
+                  tail=(['except' ':' (simple_stmt | suite)]
+                        ['else' ':' (simple_stmt | suite)]
+                        ['finally' ':' (simple_stmt | suite)]) >
+    """
+
+    def transform(self, node, results):
+        syms = self.syms
+
+        tail = [n.clone() for n in results["tail"]]
+
+        try_cleanup = [ch.clone() for ch in results["cleanup"]]
+        for except_clause, e_suite in find_excepts(try_cleanup):
+            if len(except_clause.children) == 4:
+                (E, comma, N) = except_clause.children[1:4]
+                comma.replace(Name(u"as", prefix=u" "))
+
+                if N.type != token.NAME:
+                    # Generate a new N for the except clause
+                    new_N = Name(self.new_name(), prefix=u" ")
+                    target = N.clone()
+                    target.prefix = u""
+                    N.replace(new_N)
+                    new_N = new_N.clone()
+
+                    # Insert "old_N = new_N" as the first statement in
+                    #  the except body. This loop skips leading whitespace
+                    #  and indents
+                    #TODO(cwinter) suite-cleanup
+                    suite_stmts = e_suite.children
+                    for i, stmt in enumerate(suite_stmts):
+                        if isinstance(stmt, pytree.Node):
+                            break
+
+                    # The assignment is different if old_N is a tuple or list
+                    # In that case, the assignment is old_N = new_N.args
+                    if is_tuple(N) or is_list(N):
+                        assign = Assign(target, Attr(new_N, Name(u'args')))
+                    else:
+                        assign = Assign(target, new_N)
+
+                    #TODO(cwinter) stopgap until children becomes a smart list
+                    for child in reversed(suite_stmts[:i]):
+                        e_suite.insert_child(0, child)
+                    e_suite.insert_child(i, assign)
+                elif N.prefix == u"":
+                    # No space after a comma is legal; no space after "as",
+                    # not so much.
+                    N.prefix = u" "
+
+        #TODO(cwinter) fix this when children becomes a smart list
+        children = [c.clone() for c in node.children[:3]] + try_cleanup + tail
+        return pytree.Node(node.type, children)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_exec.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_exec.py
new file mode 100644
index 0000000..50e1854
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_exec.py
@@ -0,0 +1,40 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for exec.
+
+This converts usages of the exec statement into calls to a built-in
+exec() function.
+
+exec code in ns1, ns2 -> exec(code, ns1, ns2)
+"""
+
+# Local imports
+from .. import pytree
+from .. import fixer_base
+from ..fixer_util import Comma, Name, Call
+
+
+class FixExec(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
+    |
+    exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any >
+    """
+
+    def transform(self, node, results):
+        assert results
+        syms = self.syms
+        a = results["a"]
+        b = results.get("b")
+        c = results.get("c")
+        args = [a.clone()]
+        args[0].prefix = ""
+        if b is not None:
+            args.extend([Comma(), b.clone()])
+        if c is not None:
+            args.extend([Comma(), c.clone()])
+
+        return Call(Name(u"exec"), args, prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_execfile.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_execfile.py
new file mode 100644
index 0000000..2f29d3b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_execfile.py
@@ -0,0 +1,52 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for execfile.
+
+This converts usages of the execfile function into calls to the built-in
+exec() function.
+"""
+
+from .. import fixer_base
+from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
+                          ArgList, String, syms)
+
+
+class FixExecfile(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
+    |
+    power< 'execfile' trailer< '(' filename=any ')' > >
+    """
+
+    def transform(self, node, results):
+        assert results
+        filename = results["filename"]
+        globals = results.get("globals")
+        locals = results.get("locals")
+
+        # Copy over the prefix from the right parentheses end of the execfile
+        # call.
+        execfile_paren = node.children[-1].children[-1].clone()
+        # Construct open().read().
+        open_args = ArgList([filename.clone()], rparen=execfile_paren)
+        open_call = Node(syms.power, [Name(u"open"), open_args])
+        read = [Node(syms.trailer, [Dot(), Name(u'read')]),
+                Node(syms.trailer, [LParen(), RParen()])]
+        open_expr = [open_call] + read
+        # Wrap the open call in a compile call. This is so the filename will be
+        # preserved in the execed code.
+        filename_arg = filename.clone()
+        filename_arg.prefix = u" "
+        exec_str = String(u"'exec'", u" ")
+        compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str]
+        compile_call = Call(Name(u"compile"), compile_args, u"")
+        # Finally, replace the execfile call with an exec call.
+        args = [compile_call]
+        if globals is not None:
+            args.extend([Comma(), globals.clone()])
+        if locals is not None:
+            args.extend([Comma(), locals.clone()])
+        return Call(Name(u"exec"), args, prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_exitfunc.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_exitfunc.py
new file mode 100644
index 0000000..89fb3db
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_exitfunc.py
@@ -0,0 +1,72 @@
+"""
+Convert use of sys.exitfunc to use the atexit module.
+"""
+
+# Author: Benjamin Peterson
+
+from lib2to3 import pytree, fixer_base
+from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms
+
+
+class FixExitfunc(fixer_base.BaseFix):
+    keep_line_order = True
+    BM_compatible = True
+
+    PATTERN = """
+              (
+                  sys_import=import_name<'import'
+                      ('sys'
+                      |
+                      dotted_as_names< (any ',')* 'sys' (',' any)* >
+                      )
+                  >
+              |
+                  expr_stmt<
+                      power< 'sys' trailer< '.' 'exitfunc' > >
+                  '=' func=any >
+              )
+              """
+
+    def __init__(self, *args):
+        super(FixExitfunc, self).__init__(*args)
+
+    def start_tree(self, tree, filename):
+        super(FixExitfunc, self).start_tree(tree, filename)
+        self.sys_import = None
+
+    def transform(self, node, results):
+        # First, find a the sys import. We'll just hope it's global scope.
+        if "sys_import" in results:
+            if self.sys_import is None:
+                self.sys_import = results["sys_import"]
+            return
+
+        func = results["func"].clone()
+        func.prefix = u""
+        register = pytree.Node(syms.power,
+                               Attr(Name(u"atexit"), Name(u"register"))
+                               )
+        call = Call(register, [func], node.prefix)
+        node.replace(call)
+
+        if self.sys_import is None:
+            # That's interesting.
+            self.warning(node, "Can't find sys import; Please add an atexit "
+                             "import at the top of your file.")
+            return
+
+        # Now add an atexit import after the sys import.
+        names = self.sys_import.children[1]
+        if names.type == syms.dotted_as_names:
+            names.append_child(Comma())
+            names.append_child(Name(u"atexit", u" "))
+        else:
+            containing_stmt = self.sys_import.parent
+            position = containing_stmt.children.index(self.sys_import)
+            stmt_container = containing_stmt.parent
+            new_import = pytree.Node(syms.import_name,
+                              [Name(u"import"), Name(u"atexit", u" ")]
+                              )
+            new = pytree.Node(syms.simple_stmt, [new_import])
+            containing_stmt.insert_child(position + 1, Newline())
+            containing_stmt.insert_child(position + 2, new)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py
new file mode 100644
index 0000000..18ee2ff
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py
@@ -0,0 +1,76 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes filter(F, X) into list(filter(F, X)).
+
+We avoid the transformation if the filter() call is directly contained
+in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or
+for V in <>:.
+
+NOTE: This is still not correct if the original code was depending on
+filter(F, X) to return a string if X is a string and a tuple if X is a
+tuple.  That would require type inference, which we don't do.  Let
+Python 2.6 figure it out.
+"""
+
+# Local imports
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, Call, ListComp, in_special_context
+
+class FixFilter(fixer_base.ConditionalFix):
+    BM_compatible = True
+
+    PATTERN = """
+    filter_lambda=power<
+        'filter'
+        trailer<
+            '('
+            arglist<
+                lambdef< 'lambda'
+                         (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
+                >
+                ','
+                it=any
+            >
+            ')'
+        >
+    >
+    |
+    power<
+        'filter'
+        trailer< '(' arglist< none='None' ',' seq=any > ')' >
+    >
+    |
+    power<
+        'filter'
+        args=trailer< '(' [any] ')' >
+    >
+    """
+
+    skip_on = "future_builtins.filter"
+
+    def transform(self, node, results):
+        if self.should_skip(node):
+            return
+
+        if "filter_lambda" in results:
+            new = ListComp(results.get("fp").clone(),
+                           results.get("fp").clone(),
+                           results.get("it").clone(),
+                           results.get("xp").clone())
+
+        elif "none" in results:
+            new = ListComp(Name(u"_f"),
+                           Name(u"_f"),
+                           results["seq"].clone(),
+                           Name(u"_f"))
+
+        else:
+            if in_special_context(node):
+                return None
+            new = node.clone()
+            new.prefix = u""
+            new = Call(Name(u"list"), [new])
+        new.prefix = node.prefix
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_funcattrs.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_funcattrs.py
new file mode 100644
index 0000000..9e45c02
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_funcattrs.py
@@ -0,0 +1,21 @@
+"""Fix function attribute names (f.func_x -> f.__x__)."""
+# Author: Collin Winter
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+
+class FixFuncattrs(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
+                                  | 'func_name' | 'func_defaults' | 'func_code'
+                                  | 'func_dict') > any* >
+    """
+
+    def transform(self, node, results):
+        attr = results["attr"][0]
+        attr.replace(Name((u"__%s__" % attr.value[5:]),
+                          prefix=attr.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_future.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_future.py
new file mode 100644
index 0000000..fbcb86a
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_future.py
@@ -0,0 +1,22 @@
+"""Remove __future__ imports
+
+from __future__ import foo is replaced with an empty line.
+"""
+# Author: Christian Heimes
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import BlankLine
+
+class FixFuture(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
+
+    # This should be run last -- some things check for the import
+    run_order = 10
+
+    def transform(self, node, results):
+        new = BlankLine()
+        new.prefix = node.prefix
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py
new file mode 100644
index 0000000..82233c8
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py
@@ -0,0 +1,19 @@
+"""
+Fixer that changes os.getcwdu() to os.getcwd().
+"""
+# Author: Victor Stinner
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+class FixGetcwdu(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+              power< 'os' trailer< dot='.' name='getcwdu' > any* >
+              """
+
+    def transform(self, node, results):
+        name = results["name"]
+        name.replace(Name(u"getcwd", prefix=name.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py
new file mode 100644
index 0000000..bead4cb
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py
@@ -0,0 +1,110 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for has_key().
+
+Calls to .has_key() methods are expressed in terms of the 'in'
+operator:
+
+    d.has_key(k) -> k in d
+
+CAVEATS:
+1) While the primary target of this fixer is dict.has_key(), the
+   fixer will change any has_key() method call, regardless of its
+   class.
+
+2) Cases like this will not be converted:
+
+    m = d.has_key
+    if m(k):
+        ...
+
+   Only *calls* to has_key() are converted. While it is possible to
+   convert the above to something like
+
+    m = d.__contains__
+    if m(k):
+        ...
+
+   this is currently not done.
+"""
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, parenthesize
+
+
+class FixHasKey(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    anchor=power<
+        before=any+
+        trailer< '.' 'has_key' >
+        trailer<
+            '('
+            ( not(arglist | argument<any '=' any>) arg=any
+            | arglist<(not argument<any '=' any>) arg=any ','>
+            )
+            ')'
+        >
+        after=any*
+    >
+    |
+    negation=not_test<
+        'not'
+        anchor=power<
+            before=any+
+            trailer< '.' 'has_key' >
+            trailer<
+                '('
+                ( not(arglist | argument<any '=' any>) arg=any
+                | arglist<(not argument<any '=' any>) arg=any ','>
+                )
+                ')'
+            >
+        >
+    >
+    """
+
+    def transform(self, node, results):
+        assert results
+        syms = self.syms
+        if (node.parent.type == syms.not_test and
+            self.pattern.match(node.parent)):
+            # Don't transform a node matching the first alternative of the
+            # pattern when its parent matches the second alternative
+            return None
+        negation = results.get("negation")
+        anchor = results["anchor"]
+        prefix = node.prefix
+        before = [n.clone() for n in results["before"]]
+        arg = results["arg"].clone()
+        after = results.get("after")
+        if after:
+            after = [n.clone() for n in after]
+        if arg.type in (syms.comparison, syms.not_test, syms.and_test,
+                        syms.or_test, syms.test, syms.lambdef, syms.argument):
+            arg = parenthesize(arg)
+        if len(before) == 1:
+            before = before[0]
+        else:
+            before = pytree.Node(syms.power, before)
+        before.prefix = u" "
+        n_op = Name(u"in", prefix=u" ")
+        if negation:
+            n_not = Name(u"not", prefix=u" ")
+            n_op = pytree.Node(syms.comp_op, (n_not, n_op))
+        new = pytree.Node(syms.comparison, (arg, n_op, before))
+        if after:
+            new = parenthesize(new)
+            new = pytree.Node(syms.power, (new,) + tuple(after))
+        if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr,
+                                syms.and_expr, syms.shift_expr,
+                                syms.arith_expr, syms.term,
+                                syms.factor, syms.power):
+            new = parenthesize(new)
+        new.prefix = prefix
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_idioms.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_idioms.py
new file mode 100644
index 0000000..37b6eef
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_idioms.py
@@ -0,0 +1,152 @@
+"""Adjust some old Python 2 idioms to their modern counterparts.
+
+* Change some type comparisons to isinstance() calls:
+    type(x) == T -> isinstance(x, T)
+    type(x) is T -> isinstance(x, T)
+    type(x) != T -> not isinstance(x, T)
+    type(x) is not T -> not isinstance(x, T)
+
+* Change "while 1:" into "while True:".
+
+* Change both
+
+    v = list(EXPR)
+    v.sort()
+    foo(v)
+
+and the more general
+
+    v = EXPR
+    v.sort()
+    foo(v)
+
+into
+
+    v = sorted(EXPR)
+    foo(v)
+"""
+# Author: Jacques Frechet, Collin Winter
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
+
+CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
+TYPE = "power< 'type' trailer< '(' x=any ')' > >"
+
+class FixIdioms(fixer_base.BaseFix):
+    explicit = True # The user must ask for this fixer
+
+    PATTERN = r"""
+        isinstance=comparison< %s %s T=any >
+        |
+        isinstance=comparison< T=any %s %s >
+        |
+        while_stmt< 'while' while='1' ':' any+ >
+        |
+        sorted=any<
+            any*
+            simple_stmt<
+              expr_stmt< id1=any '='
+                         power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
+              >
+              '\n'
+            >
+            sort=
+            simple_stmt<
+              power< id2=any
+                     trailer< '.' 'sort' > trailer< '(' ')' >
+              >
+              '\n'
+            >
+            next=any*
+        >
+        |
+        sorted=any<
+            any*
+            simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
+            sort=
+            simple_stmt<
+              power< id2=any
+                     trailer< '.' 'sort' > trailer< '(' ')' >
+              >
+              '\n'
+            >
+            next=any*
+        >
+    """ % (TYPE, CMP, CMP, TYPE)
+
+    def match(self, node):
+        r = super(FixIdioms, self).match(node)
+        # If we've matched one of the sort/sorted subpatterns above, we
+        # want to reject matches where the initial assignment and the
+        # subsequent .sort() call involve different identifiers.
+        if r and "sorted" in r:
+            if r["id1"] == r["id2"]:
+                return r
+            return None
+        return r
+
+    def transform(self, node, results):
+        if "isinstance" in results:
+            return self.transform_isinstance(node, results)
+        elif "while" in results:
+            return self.transform_while(node, results)
+        elif "sorted" in results:
+            return self.transform_sort(node, results)
+        else:
+            raise RuntimeError("Invalid match")
+
+    def transform_isinstance(self, node, results):
+        x = results["x"].clone() # The thing inside of type()
+        T = results["T"].clone() # The type being compared against
+        x.prefix = u""
+        T.prefix = u" "
+        test = Call(Name(u"isinstance"), [x, Comma(), T])
+        if "n" in results:
+            test.prefix = u" "
+            test = Node(syms.not_test, [Name(u"not"), test])
+        test.prefix = node.prefix
+        return test
+
+    def transform_while(self, node, results):
+        one = results["while"]
+        one.replace(Name(u"True", prefix=one.prefix))
+
+    def transform_sort(self, node, results):
+        sort_stmt = results["sort"]
+        next_stmt = results["next"]
+        list_call = results.get("list")
+        simple_expr = results.get("expr")
+
+        if list_call:
+            list_call.replace(Name(u"sorted", prefix=list_call.prefix))
+        elif simple_expr:
+            new = simple_expr.clone()
+            new.prefix = u""
+            simple_expr.replace(Call(Name(u"sorted"), [new],
+                                     prefix=simple_expr.prefix))
+        else:
+            raise RuntimeError("should not have reached here")
+        sort_stmt.remove()
+
+        btwn = sort_stmt.prefix
+        # Keep any prefix lines between the sort_stmt and the list_call and
+        # shove them right after the sorted() call.
+        if u"\n" in btwn:
+            if next_stmt:
+                # The new prefix should be everything from the sort_stmt's
+                # prefix up to the last newline, then the old prefix after a new
+                # line.
+                prefix_lines = (btwn.rpartition(u"\n")[0], next_stmt[0].prefix)
+                next_stmt[0].prefix = u"\n".join(prefix_lines)
+            else:
+                assert list_call.parent
+                assert list_call.next_sibling is None
+                # Put a blank line after list_call and set its prefix.
+                end_line = BlankLine()
+                list_call.parent.append_child(end_line)
+                assert list_call.next_sibling is end_line
+                # The new prefix should be everything up to the first new line
+                # of sort_stmt's prefix.
+                end_line.prefix = btwn.rpartition(u"\n")[0]
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_import.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_import.py
new file mode 100644
index 0000000..201e811
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_import.py
@@ -0,0 +1,99 @@
+"""Fixer for import statements.
+If spam is being imported from the local directory, this import:
+    from spam import eggs
+Becomes:
+    from .spam import eggs
+
+And this import:
+    import spam
+Becomes:
+    from . import spam
+"""
+
+# Local imports
+from .. import fixer_base
+from os.path import dirname, join, exists, sep
+from ..fixer_util import FromImport, syms, token
+
+
+def traverse_imports(names):
+    """
+    Walks over all the names imported in a dotted_as_names node.
+    """
+    pending = [names]
+    while pending:
+        node = pending.pop()
+        if node.type == token.NAME:
+            yield node.value
+        elif node.type == syms.dotted_name:
+            yield "".join([ch.value for ch in node.children])
+        elif node.type == syms.dotted_as_name:
+            pending.append(node.children[0])
+        elif node.type == syms.dotted_as_names:
+            pending.extend(node.children[::-2])
+        else:
+            raise AssertionError("unkown node type")
+
+
+class FixImport(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    import_from< 'from' imp=any 'import' ['('] any [')'] >
+    |
+    import_name< 'import' imp=any >
+    """
+
+    def start_tree(self, tree, name):
+        super(FixImport, self).start_tree(tree, name)
+        self.skip = "absolute_import" in tree.future_features
+
+    def transform(self, node, results):
+        if self.skip:
+            return
+        imp = results['imp']
+
+        if node.type == syms.import_from:
+            # Some imps are top-level (eg: 'import ham')
+            # some are first level (eg: 'import ham.eggs')
+            # some are third level (eg: 'import ham.eggs as spam')
+            # Hence, the loop
+            while not hasattr(imp, 'value'):
+                imp = imp.children[0]
+            if self.probably_a_local_import(imp.value):
+                imp.value = u"." + imp.value
+                imp.changed()
+        else:
+            have_local = False
+            have_absolute = False
+            for mod_name in traverse_imports(imp):
+                if self.probably_a_local_import(mod_name):
+                    have_local = True
+                else:
+                    have_absolute = True
+            if have_absolute:
+                if have_local:
+                    # We won't handle both sibling and absolute imports in the
+                    # same statement at the moment.
+                    self.warning(node, "absolute and local imports together")
+                return
+
+            new = FromImport(u".", [imp])
+            new.prefix = node.prefix
+            return new
+
+    def probably_a_local_import(self, imp_name):
+        if imp_name.startswith(u"."):
+            # Relative imports are certainly not local imports.
+            return False
+        imp_name = imp_name.split(u".", 1)[0]
+        base_path = dirname(self.filename)
+        base_path = join(base_path, imp_name)
+        # If there is no __init__.py next to the file its not in a package
+        # so can't be a relative import.
+        if not exists(join(dirname(base_path), "__init__.py")):
+            return False
+        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
+            if exists(base_path + ext):
+                return True
+        return False
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports.py
new file mode 100644
index 0000000..93c9e67
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports.py
@@ -0,0 +1,145 @@
+"""Fix incompatible imports and module references."""
+# Authors: Collin Winter, Nick Edds
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name, attr_chain
+
+MAPPING = {'StringIO':  'io',
+           'cStringIO': 'io',
+           'cPickle': 'pickle',
+           '__builtin__' : 'builtins',
+           'copy_reg': 'copyreg',
+           'Queue': 'queue',
+           'SocketServer': 'socketserver',
+           'ConfigParser': 'configparser',
+           'repr': 'reprlib',
+           'FileDialog': 'tkinter.filedialog',
+           'tkFileDialog': 'tkinter.filedialog',
+           'SimpleDialog': 'tkinter.simpledialog',
+           'tkSimpleDialog': 'tkinter.simpledialog',
+           'tkColorChooser': 'tkinter.colorchooser',
+           'tkCommonDialog': 'tkinter.commondialog',
+           'Dialog': 'tkinter.dialog',
+           'Tkdnd': 'tkinter.dnd',
+           'tkFont': 'tkinter.font',
+           'tkMessageBox': 'tkinter.messagebox',
+           'ScrolledText': 'tkinter.scrolledtext',
+           'Tkconstants': 'tkinter.constants',
+           'Tix': 'tkinter.tix',
+           'ttk': 'tkinter.ttk',
+           'Tkinter': 'tkinter',
+           'markupbase': '_markupbase',
+           '_winreg': 'winreg',
+           'thread': '_thread',
+           'dummy_thread': '_dummy_thread',
+           # anydbm and whichdb are handled by fix_imports2
+           'dbhash': 'dbm.bsd',
+           'dumbdbm': 'dbm.dumb',
+           'dbm': 'dbm.ndbm',
+           'gdbm': 'dbm.gnu',
+           'xmlrpclib': 'xmlrpc.client',
+           'DocXMLRPCServer': 'xmlrpc.server',
+           'SimpleXMLRPCServer': 'xmlrpc.server',
+           'httplib': 'http.client',
+           'htmlentitydefs' : 'html.entities',
+           'HTMLParser' : 'html.parser',
+           'Cookie': 'http.cookies',
+           'cookielib': 'http.cookiejar',
+           'BaseHTTPServer': 'http.server',
+           'SimpleHTTPServer': 'http.server',
+           'CGIHTTPServer': 'http.server',
+           #'test.test_support': 'test.support',
+           'commands': 'subprocess',
+           'UserString' : 'collections',
+           'UserList' : 'collections',
+           'urlparse' : 'urllib.parse',
+           'robotparser' : 'urllib.robotparser',
+}
+
+
+def alternates(members):
+    return "(" + "|".join(map(repr, members)) + ")"
+
+
+def build_pattern(mapping=MAPPING):
+    mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
+    bare_names = alternates(mapping.keys())
+
+    yield """name_import=import_name< 'import' ((%s) |
+               multiple_imports=dotted_as_names< any* (%s) any* >) >
+          """ % (mod_list, mod_list)
+    yield """import_from< 'from' (%s) 'import' ['(']
+              ( any | import_as_name< any 'as' any > |
+                import_as_names< any* >)  [')'] >
+          """ % mod_list
+    yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > |
+               multiple_imports=dotted_as_names<
+                 any* dotted_as_name< (%s) 'as' any > any* >) >
+          """ % (mod_list, mod_list)
+
+    # Find usages of module members in code e.g. thread.foo(bar)
+    yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
+
+
+class FixImports(fixer_base.BaseFix):
+
+    BM_compatible = True
+    keep_line_order = True
+    # This is overridden in fix_imports2.
+    mapping = MAPPING
+
+    # We want to run this fixer late, so fix_import doesn't try to make stdlib
+    # renames into relative imports.
+    run_order = 6
+
+    def build_pattern(self):
+        return "|".join(build_pattern(self.mapping))
+
+    def compile_pattern(self):
+        # We override this, so MAPPING can be pragmatically altered and the
+        # changes will be reflected in PATTERN.
+        self.PATTERN = self.build_pattern()
+        super(FixImports, self).compile_pattern()
+
+    # Don't match the node if it's within another match.
+    def match(self, node):
+        match = super(FixImports, self).match
+        results = match(node)
+        if results:
+            # Module usage could be in the trailer of an attribute lookup, so we
+            # might have nested matches when "bare_with_attr" is present.
+            if "bare_with_attr" not in results and \
+                    any(match(obj) for obj in attr_chain(node, "parent")):
+                return False
+            return results
+        return False
+
+    def start_tree(self, tree, filename):
+        super(FixImports, self).start_tree(tree, filename)
+        self.replace = {}
+
+    def transform(self, node, results):
+        import_mod = results.get("module_name")
+        if import_mod:
+            mod_name = import_mod.value
+            new_name = unicode(self.mapping[mod_name])
+            import_mod.replace(Name(new_name, prefix=import_mod.prefix))
+            if "name_import" in results:
+                # If it's not a "from x import x, y" or "import x as y" import,
+                # marked its usage to be replaced.
+                self.replace[mod_name] = new_name
+            if "multiple_imports" in results:
+                # This is a nasty hack to fix multiple imports on a line (e.g.,
+                # "import StringIO, urlparse"). The problem is that I can't
+                # figure out an easy way to make a pattern recognize the keys of
+                # MAPPING randomly sprinkled in an import statement.
+                results = self.match(node)
+                if results:
+                    self.transform(node, results)
+        else:
+            # Replace usage of the module.
+            bare_name = results["bare_with_attr"][0]
+            new_name = self.replace.get(bare_name.value)
+            if new_name:
+                bare_name.replace(Name(new_name, prefix=bare_name.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports2.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports2.py
new file mode 100644
index 0000000..9a33c67
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports2.py
@@ -0,0 +1,16 @@
+"""Fix incompatible imports and module references that must be fixed after
+fix_imports."""
+from . import fix_imports
+
+
+MAPPING = {
+            'whichdb': 'dbm',
+            'anydbm': 'dbm',
+          }
+
+
+class FixImports2(fix_imports.FixImports):
+
+    run_order = 7
+
+    mapping = MAPPING
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_input.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_input.py
new file mode 100644
index 0000000..fbf4c72
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_input.py
@@ -0,0 +1,26 @@
+"""Fixer that changes input(...) into eval(input(...))."""
+# Author: Andre Roberge
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Call, Name
+from .. import patcomp
+
+
+context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
+
+
+class FixInput(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+              power< 'input' args=trailer< '(' [any] ')' > >
+              """
+
+    def transform(self, node, results):
+        # If we're already wrapped in a eval() call, we're done.
+        if context.match(node.parent.parent):
+            return
+
+        new = node.clone()
+        new.prefix = u""
+        return Call(Name(u"eval"), [new], prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py
new file mode 100644
index 0000000..e7bb505
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py
@@ -0,0 +1,46 @@
+# Copyright 2006 Georg Brandl.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for intern().
+
+intern(s) -> sys.intern(s)"""
+
+# Local imports
+from .. import pytree
+from .. import fixer_base
+from ..fixer_util import Name, Attr, touch_import
+
+
+class FixIntern(fixer_base.BaseFix):
+    BM_compatible = True
+    order = "pre"
+
+    PATTERN = """
+    power< 'intern'
+           trailer< lpar='('
+                    ( not(arglist | argument<any '=' any>) obj=any
+                      | obj=arglist<(not argument<any '=' any>) any ','> )
+                    rpar=')' >
+           after=any*
+    >
+    """
+
+    def transform(self, node, results):
+        syms = self.syms
+        obj = results["obj"].clone()
+        if obj.type == syms.arglist:
+            newarglist = obj.clone()
+        else:
+            newarglist = pytree.Node(syms.arglist, [obj.clone()])
+        after = results["after"]
+        if after:
+            after = [n.clone() for n in after]
+        new = pytree.Node(syms.power,
+                          Attr(Name(u"sys"), Name(u"intern")) +
+                          [pytree.Node(syms.trailer,
+                                       [results["lpar"].clone(),
+                                        newarglist,
+                                        results["rpar"].clone()])] + after)
+        new.prefix = node.prefix
+        touch_import(None, u'sys', node)
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_isinstance.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_isinstance.py
new file mode 100644
index 0000000..4b04c8f
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_isinstance.py
@@ -0,0 +1,52 @@
+# Copyright 2008 Armin Ronacher.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that cleans up a tuple argument to isinstance after the tokens
+in it were fixed.  This is mainly used to remove double occurrences of
+tokens as a leftover of the long -> int / unicode -> str conversion.
+
+eg.  isinstance(x, (int, long)) -> isinstance(x, (int, int))
+       -> isinstance(x, int)
+"""
+
+from .. import fixer_base
+from ..fixer_util import token
+
+
+class FixIsinstance(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+    power<
+        'isinstance'
+        trailer< '(' arglist< any ',' atom< '('
+            args=testlist_gexp< any+ >
+        ')' > > ')' >
+    >
+    """
+
+    run_order = 6
+
+    def transform(self, node, results):
+        names_inserted = set()
+        testlist = results["args"]
+        args = testlist.children
+        new_args = []
+        iterator = enumerate(args)
+        for idx, arg in iterator:
+            if arg.type == token.NAME and arg.value in names_inserted:
+                if idx < len(args) - 1 and args[idx + 1].type == token.COMMA:
+                    iterator.next()
+                    continue
+            else:
+                new_args.append(arg)
+                if arg.type == token.NAME:
+                    names_inserted.add(arg.value)
+        if new_args and new_args[-1].type == token.COMMA:
+            del new_args[-1]
+        if len(new_args) == 1:
+            atom = testlist.parent
+            new_args[0].prefix = atom.prefix
+            atom.replace(new_args[0])
+        else:
+            args[:] = new_args
+            node.changed()
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools.py
new file mode 100644
index 0000000..067641b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools.py
@@ -0,0 +1,43 @@
+""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
+    itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
+
+    imports from itertools are fixed in fix_itertools_import.py
+
+    If itertools is imported as something else (ie: import itertools as it;
+    it.izip(spam, eggs)) method calls will not get fixed.
+    """
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+class FixItertools(fixer_base.BaseFix):
+    BM_compatible = True
+    it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')"
+    PATTERN = """
+              power< it='itertools'
+                  trailer<
+                     dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > >
+              |
+              power< func=%(it_funcs)s trailer< '(' [any] ')' > >
+              """ %(locals())
+
+    # Needs to be run after fix_(map|zip|filter)
+    run_order = 6
+
+    def transform(self, node, results):
+        prefix = None
+        func = results['func'][0]
+        if ('it' in results and
+            func.value not in (u'ifilterfalse', u'izip_longest')):
+            dot, it = (results['dot'], results['it'])
+            # Remove the 'itertools'
+            prefix = it.prefix
+            it.remove()
+            # Replace the node which contains ('.', 'function') with the
+            # function (to be consistent with the second part of the pattern)
+            dot.remove()
+            func.parent.replace(func)
+
+        prefix = prefix or func.prefix
+        func.replace(Name(func.value[1:], prefix=prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools_imports.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools_imports.py
new file mode 100644
index 0000000..28610cf
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools_imports.py
@@ -0,0 +1,57 @@
+""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import BlankLine, syms, token
+
+
+class FixItertoolsImports(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+              import_from< 'from' 'itertools' 'import' imports=any >
+              """ %(locals())
+
+    def transform(self, node, results):
+        imports = results['imports']
+        if imports.type == syms.import_as_name or not imports.children:
+            children = [imports]
+        else:
+            children = imports.children
+        for child in children[::2]:
+            if child.type == token.NAME:
+                member = child.value
+                name_node = child
+            elif child.type == token.STAR:
+                # Just leave the import as is.
+                return
+            else:
+                assert child.type == syms.import_as_name
+                name_node = child.children[0]
+            member_name = name_node.value
+            if member_name in (u'imap', u'izip', u'ifilter'):
+                child.value = None
+                child.remove()
+            elif member_name in (u'ifilterfalse', u'izip_longest'):
+                node.changed()
+                name_node.value = (u'filterfalse' if member_name[1] == u'f'
+                                   else u'zip_longest')
+
+        # Make sure the import statement is still sane
+        children = imports.children[:] or [imports]
+        remove_comma = True
+        for child in children:
+            if remove_comma and child.type == token.COMMA:
+                child.remove()
+            else:
+                remove_comma ^= True
+
+        while children and children[-1].type == token.COMMA:
+            children.pop().remove()
+
+        # If there are no imports left, just get rid of the entire statement
+        if (not (imports.children or getattr(imports, 'value', None)) or
+            imports.parent is None):
+            p = node.prefix
+            node = BlankLine()
+            node.prefix = p
+            return node
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_long.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_long.py
new file mode 100644
index 0000000..5dddde0
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_long.py
@@ -0,0 +1,19 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that turns 'long' into 'int' everywhere.
+"""
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import is_probably_builtin
+
+
+class FixLong(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = "'long'"
+
+    def transform(self, node, results):
+        if is_probably_builtin(node):
+            node.value = u"int"
+            node.changed()
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_map.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_map.py
new file mode 100644
index 0000000..7a7d0db
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_map.py
@@ -0,0 +1,91 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there
+exists a 'from future_builtins import map' statement in the top-level
+namespace.
+
+As a special case, map(None, X) is changed into list(X).  (This is
+necessary because the semantics are changed in this case -- the new
+map(None, X) is equivalent to [(x,) for x in X].)
+
+We avoid the transformation (except for the special case mentioned
+above) if the map() call is directly contained in iter(<>), list(<>),
+tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
+
+NOTE: This is still not correct if the original code was depending on
+map(F, X, Y, ...) to go on until the longest argument is exhausted,
+substituting None for missing values -- like zip(), it now stops as
+soon as the shortest argument is exhausted.
+"""
+
+# Local imports
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, Call, ListComp, in_special_context
+from ..pygram import python_symbols as syms
+
+class FixMap(fixer_base.ConditionalFix):
+    BM_compatible = True
+
+    PATTERN = """
+    map_none=power<
+        'map'
+        trailer< '(' arglist< 'None' ',' arg=any [','] > ')' >
+    >
+    |
+    map_lambda=power<
+        'map'
+        trailer<
+            '('
+            arglist<
+                lambdef< 'lambda'
+                         (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
+                >
+                ','
+                it=any
+            >
+            ')'
+        >
+    >
+    |
+    power<
+        'map' trailer< '(' [arglist=any] ')' >
+    >
+    """
+
+    skip_on = 'future_builtins.map'
+
+    def transform(self, node, results):
+        if self.should_skip(node):
+            return
+
+        if node.parent.type == syms.simple_stmt:
+            self.warning(node, "You should use a for loop here")
+            new = node.clone()
+            new.prefix = u""
+            new = Call(Name(u"list"), [new])
+        elif "map_lambda" in results:
+            new = ListComp(results["xp"].clone(),
+                           results["fp"].clone(),
+                           results["it"].clone())
+        else:
+            if "map_none" in results:
+                new = results["arg"].clone()
+            else:
+                if "arglist" in results:
+                    args = results["arglist"]
+                    if args.type == syms.arglist and \
+                       args.children[0].type == token.NAME and \
+                       args.children[0].value == "None":
+                        self.warning(node, "cannot convert map(None, ...) "
+                                     "with multiple arguments because map() "
+                                     "now truncates to the shortest sequence")
+                        return
+                if in_special_context(node):
+                    return None
+                new = node.clone()
+            new.prefix = u""
+            new = Call(Name(u"list"), [new])
+        new.prefix = node.prefix
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_metaclass.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_metaclass.py
new file mode 100644
index 0000000..4f5593c
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_metaclass.py
@@ -0,0 +1,228 @@
+"""Fixer for __metaclass__ = X -> (metaclass=X) methods.
+
+   The various forms of classef (inherits nothing, inherits once, inherints
+   many) don't parse the same in the CST so we look at ALL classes for
+   a __metaclass__ and if we find one normalize the inherits to all be
+   an arglist.
+
+   For one-liner classes ('class X: pass') there is no indent/dedent so
+   we normalize those into having a suite.
+
+   Moving the __metaclass__ into the classdef can also cause the class
+   body to be empty so there is some special casing for that as well.
+
+   This fixer also tries very hard to keep original indenting and spacing
+   in all those corner cases.
+
+"""
+# Author: Jack Diederich
+
+# Local imports
+from .. import fixer_base
+from ..pygram import token
+from ..fixer_util import Name, syms, Node, Leaf
+
+
+def has_metaclass(parent):
+    """ we have to check the cls_node without changing it.
+        There are two possiblities:
+          1)  clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta')
+          2)  clsdef => simple_stmt => expr_stmt => Leaf('__meta')
+    """
+    for node in parent.children:
+        if node.type == syms.suite:
+            return has_metaclass(node)
+        elif node.type == syms.simple_stmt and node.children:
+            expr_node = node.children[0]
+            if expr_node.type == syms.expr_stmt and expr_node.children:
+                left_side = expr_node.children[0]
+                if isinstance(left_side, Leaf) and \
+                        left_side.value == '__metaclass__':
+                    return True
+    return False
+
+
+def fixup_parse_tree(cls_node):
+    """ one-line classes don't get a suite in the parse tree so we add
+        one to normalize the tree
+    """
+    for node in cls_node.children:
+        if node.type == syms.suite:
+            # already in the preferred format, do nothing
+            return
+
+    # !%@#! oneliners have no suite node, we have to fake one up
+    for i, node in enumerate(cls_node.children):
+        if node.type == token.COLON:
+            break
+    else:
+        raise ValueError("No class suite and no ':'!")
+
+    # move everything into a suite node
+    suite = Node(syms.suite, [])
+    while cls_node.children[i+1:]:
+        move_node = cls_node.children[i+1]
+        suite.append_child(move_node.clone())
+        move_node.remove()
+    cls_node.append_child(suite)
+    node = suite
+
+
+def fixup_simple_stmt(parent, i, stmt_node):
+    """ if there is a semi-colon all the parts count as part of the same
+        simple_stmt.  We just want the __metaclass__ part so we move
+        everything after the semi-colon into its own simple_stmt node
+    """
+    for semi_ind, node in enumerate(stmt_node.children):
+        if node.type == token.SEMI: # *sigh*
+            break
+    else:
+        return
+
+    node.remove() # kill the semicolon
+    new_expr = Node(syms.expr_stmt, [])
+    new_stmt = Node(syms.simple_stmt, [new_expr])
+    while stmt_node.children[semi_ind:]:
+        move_node = stmt_node.children[semi_ind]
+        new_expr.append_child(move_node.clone())
+        move_node.remove()
+    parent.insert_child(i, new_stmt)
+    new_leaf1 = new_stmt.children[0].children[0]
+    old_leaf1 = stmt_node.children[0].children[0]
+    new_leaf1.prefix = old_leaf1.prefix
+
+
+def remove_trailing_newline(node):
+    if node.children and node.children[-1].type == token.NEWLINE:
+        node.children[-1].remove()
+
+
+def find_metas(cls_node):
+    # find the suite node (Mmm, sweet nodes)
+    for node in cls_node.children:
+        if node.type == syms.suite:
+            break
+    else:
+        raise ValueError("No class suite!")
+
+    # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ]
+    for i, simple_node in list(enumerate(node.children)):
+        if simple_node.type == syms.simple_stmt and simple_node.children:
+            expr_node = simple_node.children[0]
+            if expr_node.type == syms.expr_stmt and expr_node.children:
+                # Check if the expr_node is a simple assignment.
+                left_node = expr_node.children[0]
+                if isinstance(left_node, Leaf) and \
+                        left_node.value == u'__metaclass__':
+                    # We found a assignment to __metaclass__.
+                    fixup_simple_stmt(node, i, simple_node)
+                    remove_trailing_newline(simple_node)
+                    yield (node, i, simple_node)
+
+
+def fixup_indent(suite):
+    """ If an INDENT is followed by a thing with a prefix then nuke the prefix
+        Otherwise we get in trouble when removing __metaclass__ at suite start
+    """
+    kids = suite.children[::-1]
+    # find the first indent
+    while kids:
+        node = kids.pop()
+        if node.type == token.INDENT:
+            break
+
+    # find the first Leaf
+    while kids:
+        node = kids.pop()
+        if isinstance(node, Leaf) and node.type != token.DEDENT:
+            if node.prefix:
+                node.prefix = u''
+            return
+        else:
+            kids.extend(node.children[::-1])
+
+
+class FixMetaclass(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+    classdef<any*>
+    """
+
+    def transform(self, node, results):
+        if not has_metaclass(node):
+            return
+
+        fixup_parse_tree(node)
+
+        # find metaclasses, keep the last one
+        last_metaclass = None
+        for suite, i, stmt in find_metas(node):
+            last_metaclass = stmt
+            stmt.remove()
+
+        text_type = node.children[0].type # always Leaf(nnn, 'class')
+
+        # figure out what kind of classdef we have
+        if len(node.children) == 7:
+            # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite])
+            #                 0        1       2    3        4    5    6
+            if node.children[3].type == syms.arglist:
+                arglist = node.children[3]
+            # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite])
+            else:
+                parent = node.children[3].clone()
+                arglist = Node(syms.arglist, [parent])
+                node.set_child(3, arglist)
+        elif len(node.children) == 6:
+            # Node(classdef, ['class', 'name', '(',  ')', ':', suite])
+            #                 0        1       2     3    4    5
+            arglist = Node(syms.arglist, [])
+            node.insert_child(3, arglist)
+        elif len(node.children) == 4:
+            # Node(classdef, ['class', 'name', ':', suite])
+            #                 0        1       2    3
+            arglist = Node(syms.arglist, [])
+            node.insert_child(2, Leaf(token.RPAR, u')'))
+            node.insert_child(2, arglist)
+            node.insert_child(2, Leaf(token.LPAR, u'('))
+        else:
+            raise ValueError("Unexpected class definition")
+
+        # now stick the metaclass in the arglist
+        meta_txt = last_metaclass.children[0].children[0]
+        meta_txt.value = 'metaclass'
+        orig_meta_prefix = meta_txt.prefix
+
+        if arglist.children:
+            arglist.append_child(Leaf(token.COMMA, u','))
+            meta_txt.prefix = u' '
+        else:
+            meta_txt.prefix = u''
+
+        # compact the expression "metaclass = Meta" -> "metaclass=Meta"
+        expr_stmt = last_metaclass.children[0]
+        assert expr_stmt.type == syms.expr_stmt
+        expr_stmt.children[1].prefix = u''
+        expr_stmt.children[2].prefix = u''
+
+        arglist.append_child(last_metaclass)
+
+        fixup_indent(suite)
+
+        # check for empty suite
+        if not suite.children:
+            # one-liner that was just __metaclass_
+            suite.remove()
+            pass_leaf = Leaf(text_type, u'pass')
+            pass_leaf.prefix = orig_meta_prefix
+            node.append_child(pass_leaf)
+            node.append_child(Leaf(token.NEWLINE, u'\n'))
+
+        elif len(suite.children) > 1 and \
+                 (suite.children[-2].type == token.INDENT and
+                  suite.children[-1].type == token.DEDENT):
+            # there was only one line in the class body and it was __metaclass__
+            pass_leaf = Leaf(text_type, u'pass')
+            suite.insert_child(-1, pass_leaf)
+            suite.insert_child(-1, Leaf(token.NEWLINE, u'\n'))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_methodattrs.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_methodattrs.py
new file mode 100644
index 0000000..f3c1ecf
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_methodattrs.py
@@ -0,0 +1,24 @@
+"""Fix bound method attributes (method.im_? -> method.__?__).
+"""
+# Author: Christian Heimes
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+MAP = {
+    "im_func" : "__func__",
+    "im_self" : "__self__",
+    "im_class" : "__self__.__class__"
+    }
+
+class FixMethodattrs(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+    power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
+    """
+
+    def transform(self, node, results):
+        attr = results["attr"][0]
+        new = unicode(MAP[attr.value])
+        attr.replace(Name(new, prefix=attr.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_ne.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_ne.py
new file mode 100644
index 0000000..7025980
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_ne.py
@@ -0,0 +1,23 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that turns <> into !=."""
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+
+
+class FixNe(fixer_base.BaseFix):
+    # This is so simple that we don't need the pattern compiler.
+
+    _accept_type = token.NOTEQUAL
+
+    def match(self, node):
+        # Override
+        return node.value == u"<>"
+
+    def transform(self, node, results):
+        new = pytree.Leaf(token.NOTEQUAL, u"!=", prefix=node.prefix)
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_next.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_next.py
new file mode 100644
index 0000000..f021a9b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_next.py
@@ -0,0 +1,103 @@
+"""Fixer for it.next() -> next(it), per PEP 3114."""
+# Author: Collin Winter
+
+# Things that currently aren't covered:
+#   - listcomp "next" names aren't warned
+#   - "with" statement targets aren't checked
+
+# Local imports
+from ..pgen2 import token
+from ..pygram import python_symbols as syms
+from .. import fixer_base
+from ..fixer_util import Name, Call, find_binding
+
+bind_warning = "Calls to builtin next() possibly shadowed by global binding"
+
+
+class FixNext(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+    power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
+    |
+    power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > >
+    |
+    classdef< 'class' any+ ':'
+              suite< any*
+                     funcdef< 'def'
+                              name='next'
+                              parameters< '(' NAME ')' > any+ >
+                     any* > >
+    |
+    global=global_stmt< 'global' any* 'next' any* >
+    """
+
+    order = "pre" # Pre-order tree traversal
+
+    def start_tree(self, tree, filename):
+        super(FixNext, self).start_tree(tree, filename)
+
+        n = find_binding(u'next', tree)
+        if n:
+            self.warning(n, bind_warning)
+            self.shadowed_next = True
+        else:
+            self.shadowed_next = False
+
+    def transform(self, node, results):
+        assert results
+
+        base = results.get("base")
+        attr = results.get("attr")
+        name = results.get("name")
+
+        if base:
+            if self.shadowed_next:
+                attr.replace(Name(u"__next__", prefix=attr.prefix))
+            else:
+                base = [n.clone() for n in base]
+                base[0].prefix = u""
+                node.replace(Call(Name(u"next", prefix=node.prefix), base))
+        elif name:
+            n = Name(u"__next__", prefix=name.prefix)
+            name.replace(n)
+        elif attr:
+            # We don't do this transformation if we're assigning to "x.next".
+            # Unfortunately, it doesn't seem possible to do this in PATTERN,
+            #  so it's being done here.
+            if is_assign_target(node):
+                head = results["head"]
+                if "".join([str(n) for n in head]).strip() == u'__builtin__':
+                    self.warning(node, bind_warning)
+                return
+            attr.replace(Name(u"__next__"))
+        elif "global" in results:
+            self.warning(node, bind_warning)
+            self.shadowed_next = True
+
+
+### The following functions help test if node is part of an assignment
+###  target.
+
+def is_assign_target(node):
+    assign = find_assign(node)
+    if assign is None:
+        return False
+
+    for child in assign.children:
+        if child.type == token.EQUAL:
+            return False
+        elif is_subtree(child, node):
+            return True
+    return False
+
+def find_assign(node):
+    if node.type == syms.expr_stmt:
+        return node
+    if node.type == syms.simple_stmt or node.parent is None:
+        return None
+    return find_assign(node.parent)
+
+def is_subtree(root, node):
+    if root == node:
+        return True
+    return any(is_subtree(c, node) for c in root.children)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py
new file mode 100644
index 0000000..ba83478
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py
@@ -0,0 +1,21 @@
+"""Fixer for __nonzero__ -> __bool__ methods."""
+# Author: Collin Winter
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name, syms
+
+class FixNonzero(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+    classdef< 'class' any+ ':'
+              suite< any*
+                     funcdef< 'def' name='__nonzero__'
+                              parameters< '(' NAME ')' > any+ >
+                     any* > >
+    """
+
+    def transform(self, node, results):
+        name = results["name"]
+        new = Name(u"__bool__", prefix=name.prefix)
+        name.replace(new)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_numliterals.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_numliterals.py
new file mode 100644
index 0000000..b0c23f8
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_numliterals.py
@@ -0,0 +1,28 @@
+"""Fixer that turns 1L into 1, 0755 into 0o755.
+"""
+# Copyright 2007 Georg Brandl.
+# Licensed to PSF under a Contributor Agreement.
+
+# Local imports
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Number
+
+
+class FixNumliterals(fixer_base.BaseFix):
+    # This is so simple that we don't need the pattern compiler.
+
+    _accept_type = token.NUMBER
+
+    def match(self, node):
+        # Override
+        return (node.value.startswith(u"0") or node.value[-1] in u"Ll")
+
+    def transform(self, node, results):
+        val = node.value
+        if val[-1] in u'Ll':
+            val = val[:-1]
+        elif val.startswith(u'0') and val.isdigit() and len(set(val)) > 1:
+            val = u"0o" + val[1:]
+
+        return Number(val, prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_operator.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_operator.py
new file mode 100644
index 0000000..7bf2c0d
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_operator.py
@@ -0,0 +1,96 @@
+"""Fixer for operator functions.
+
+operator.isCallable(obj)       -> hasattr(obj, '__call__')
+operator.sequenceIncludes(obj) -> operator.contains(obj)
+operator.isSequenceType(obj)   -> isinstance(obj, collections.Sequence)
+operator.isMappingType(obj)    -> isinstance(obj, collections.Mapping)
+operator.isNumberType(obj)     -> isinstance(obj, numbers.Number)
+operator.repeat(obj, n)        -> operator.mul(obj, n)
+operator.irepeat(obj, n)       -> operator.imul(obj, n)
+"""
+
+# Local imports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import Call, Name, String, touch_import
+
+
+def invocation(s):
+    def dec(f):
+        f.invocation = s
+        return f
+    return dec
+
+
+class FixOperator(fixer_base.BaseFix):
+    BM_compatible = True
+    order = "pre"
+
+    methods = """
+              method=('isCallable'|'sequenceIncludes'
+                     |'isSequenceType'|'isMappingType'|'isNumberType'
+                     |'repeat'|'irepeat')
+              """
+    obj = "'(' obj=any ')'"
+    PATTERN = """
+              power< module='operator'
+                trailer< '.' %(methods)s > trailer< %(obj)s > >
+              |
+              power< %(methods)s trailer< %(obj)s > >
+              """ % dict(methods=methods, obj=obj)
+
+    def transform(self, node, results):
+        method = self._check_method(node, results)
+        if method is not None:
+            return method(node, results)
+
+    @invocation("operator.contains(%s)")
+    def _sequenceIncludes(self, node, results):
+        return self._handle_rename(node, results, u"contains")
+
+    @invocation("hasattr(%s, '__call__')")
+    def _isCallable(self, node, results):
+        obj = results["obj"]
+        args = [obj.clone(), String(u", "), String(u"'__call__'")]
+        return Call(Name(u"hasattr"), args, prefix=node.prefix)
+
+    @invocation("operator.mul(%s)")
+    def _repeat(self, node, results):
+        return self._handle_rename(node, results, u"mul")
+
+    @invocation("operator.imul(%s)")
+    def _irepeat(self, node, results):
+        return self._handle_rename(node, results, u"imul")
+
+    @invocation("isinstance(%s, collections.Sequence)")
+    def _isSequenceType(self, node, results):
+        return self._handle_type2abc(node, results, u"collections", u"Sequence")
+
+    @invocation("isinstance(%s, collections.Mapping)")
+    def _isMappingType(self, node, results):
+        return self._handle_type2abc(node, results, u"collections", u"Mapping")
+
+    @invocation("isinstance(%s, numbers.Number)")
+    def _isNumberType(self, node, results):
+        return self._handle_type2abc(node, results, u"numbers", u"Number")
+
+    def _handle_rename(self, node, results, name):
+        method = results["method"][0]
+        method.value = name
+        method.changed()
+
+    def _handle_type2abc(self, node, results, module, abc):
+        touch_import(None, module, node)
+        obj = results["obj"]
+        args = [obj.clone(), String(u", " + u".".join([module, abc]))]
+        return Call(Name(u"isinstance"), args, prefix=node.prefix)
+
+    def _check_method(self, node, results):
+        method = getattr(self, "_" + results["method"][0].value.encode("ascii"))
+        if callable(method):
+            if "module" in results:
+                return method
+            else:
+                sub = (unicode(results["obj"]),)
+                invocation_str = unicode(method.invocation) % sub
+                self.warning(node, u"You should use '%s' here." % invocation_str)
+        return None
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_paren.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_paren.py
new file mode 100644
index 0000000..8650cd9
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_paren.py
@@ -0,0 +1,44 @@
+"""Fixer that addes parentheses where they are required
+
+This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``."""
+
+# By Taek Joo Kim and Benjamin Peterson
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import LParen, RParen
+
+# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2]
+class FixParen(fixer_base.BaseFix):
+    BM_compatible = True
+
+    PATTERN = """
+        atom< ('[' | '(')
+            (listmaker< any
+                comp_for<
+                    'for' NAME 'in'
+                    target=testlist_safe< any (',' any)+ [',']
+                     >
+                    [any]
+                >
+            >
+            |
+            testlist_gexp< any
+                comp_for<
+                    'for' NAME 'in'
+                    target=testlist_safe< any (',' any)+ [',']
+                     >
+                    [any]
+                >
+            >)
+        (']' | ')') >
+    """
+
+    def transform(self, node, results):
+        target = results["target"]
+
+        lparen = LParen()
+        lparen.prefix = target.prefix
+        target.prefix = u"" # Make it hug the parentheses
+        target.insert_child(0, lparen)
+        target.append_child(RParen())
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_print.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_print.py
new file mode 100644
index 0000000..98786b3
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_print.py
@@ -0,0 +1,87 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for print.
+
+Change:
+    'print'          into 'print()'
+    'print ...'      into 'print(...)'
+    'print ... ,'    into 'print(..., end=" ")'
+    'print >>x, ...' into 'print(..., file=x)'
+
+No changes are applied if print_function is imported from __future__
+
+"""
+
+# Local imports
+from .. import patcomp
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, Call, Comma, String, is_tuple
+
+
+parend_expr = patcomp.compile_pattern(
+              """atom< '(' [atom|STRING|NAME] ')' >"""
+              )
+
+
+class FixPrint(fixer_base.BaseFix):
+
+    BM_compatible = True
+
+    PATTERN = """
+              simple_stmt< any* bare='print' any* > | print_stmt
+              """
+
+    def transform(self, node, results):
+        assert results
+
+        bare_print = results.get("bare")
+
+        if bare_print:
+            # Special-case print all by itself
+            bare_print.replace(Call(Name(u"print"), [],
+                               prefix=bare_print.prefix))
+            return
+        assert node.children[0] == Name(u"print")
+        args = node.children[1:]
+        if len(args) == 1 and parend_expr.match(args[0]):
+            # We don't want to keep sticking parens around an
+            # already-parenthesised expression.
+            return
+
+        sep = end = file = None
+        if args and args[-1] == Comma():
+            args = args[:-1]
+            end = " "
+        if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"):
+            assert len(args) >= 2
+            file = args[1].clone()
+            args = args[3:] # Strip a possible comma after the file expression
+        # Now synthesize a print(args, sep=..., end=..., file=...) node.
+        l_args = [arg.clone() for arg in args]
+        if l_args:
+            l_args[0].prefix = u""
+        if sep is not None or end is not None or file is not None:
+            if sep is not None:
+                self.add_kwarg(l_args, u"sep", String(repr(sep)))
+            if end is not None:
+                self.add_kwarg(l_args, u"end", String(repr(end)))
+            if file is not None:
+                self.add_kwarg(l_args, u"file", file)
+        n_stmt = Call(Name(u"print"), l_args)
+        n_stmt.prefix = node.prefix
+        return n_stmt
+
+    def add_kwarg(self, l_nodes, s_kwd, n_expr):
+        # XXX All this prefix-setting may lose comments (though rarely)
+        n_expr.prefix = u""
+        n_argument = pytree.Node(self.syms.argument,
+                                 (Name(s_kwd),
+                                  pytree.Leaf(token.EQUAL, u"="),
+                                  n_expr))
+        if l_nodes:
+            l_nodes.append(Comma())
+            n_argument.prefix = u" "
+        l_nodes.append(n_argument)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_raise.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_raise.py
new file mode 100644
index 0000000..b958ba0
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_raise.py
@@ -0,0 +1,90 @@
+"""Fixer for 'raise E, V, T'
+
+raise         -> raise
+raise E       -> raise E
+raise E, V    -> raise E(V)
+raise E, V, T -> raise E(V).with_traceback(T)
+raise E, None, T -> raise E.with_traceback(T)
+
+raise (((E, E'), E''), E'''), V -> raise E(V)
+raise "foo", V, T               -> warns about string exceptions
+
+
+CAVEATS:
+1) "raise E, V" will be incorrectly translated if V is an exception
+   instance. The correct Python 3 idiom is
+
+        raise E from V
+
+   but since we can't detect instance-hood by syntax alone and since
+   any client code would have to be changed as well, we don't automate
+   this.
+"""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
+
+class FixRaise(fixer_base.BaseFix):
+
+    BM_compatible = True
+    PATTERN = """
+    raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
+    """
+
+    def transform(self, node, results):
+        syms = self.syms
+
+        exc = results["exc"].clone()
+        if exc.type == token.STRING:
+            msg = "Python 3 does not support string exceptions"
+            self.cannot_convert(node, msg)
+            return
+
+        # Python 2 supports
+        #  raise ((((E1, E2), E3), E4), E5), V
+        # as a synonym for
+        #  raise E1, V
+        # Since Python 3 will not support this, we recurse down any tuple
+        # literals, always taking the first element.
+        if is_tuple(exc):
+            while is_tuple(exc):
+                # exc.children[1:-1] is the unparenthesized tuple
+                # exc.children[1].children[0] is the first element of the tuple
+                exc = exc.children[1].children[0].clone()
+            exc.prefix = u" "
+
+        if "val" not in results:
+            # One-argument raise
+            new = pytree.Node(syms.raise_stmt, [Name(u"raise"), exc])
+            new.prefix = node.prefix
+            return new
+
+        val = results["val"].clone()
+        if is_tuple(val):
+            args = [c.clone() for c in val.children[1:-1]]
+        else:
+            val.prefix = u""
+            args = [val]
+
+        if "tb" in results:
+            tb = results["tb"].clone()
+            tb.prefix = u""
+
+            e = exc
+            # If there's a traceback and None is passed as the value, then don't
+            # add a call, since the user probably just wants to add a
+            # traceback. See issue #9661.
+            if val.type != token.NAME or val.value != u"None":
+                e = Call(exc, args)
+            with_tb = Attr(e, Name(u'with_traceback')) + [ArgList([tb])]
+            new = pytree.Node(syms.simple_stmt, [Name(u"raise")] + with_tb)
+            new.prefix = node.prefix
+            return new
+        else:
+            return pytree.Node(syms.raise_stmt,
+                               [Name(u"raise"), Call(exc, args)],
+                               prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_raw_input.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_raw_input.py
new file mode 100644
index 0000000..3a73b81
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_raw_input.py
@@ -0,0 +1,17 @@
+"""Fixer that changes raw_input(...) into input(...)."""
+# Author: Andre Roberge
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+class FixRawInput(fixer_base.BaseFix):
+
+    BM_compatible = True
+    PATTERN = """
+              power< name='raw_input' trailer< '(' [any] ')' > any* >
+              """
+
+    def transform(self, node, results):
+        name = results["name"]
+        name.replace(Name(u"input", prefix=name.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_reduce.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_reduce.py
new file mode 100644
index 0000000..6bd785c
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_reduce.py
@@ -0,0 +1,35 @@
+# Copyright 2008 Armin Ronacher.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for reduce().
+
+Makes sure reduce() is imported from the functools module if reduce is
+used in that module.
+"""
+
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import touch_import
+
+
+
+class FixReduce(fixer_base.BaseFix):
+
+    BM_compatible = True
+    order = "pre"
+
+    PATTERN = """
+    power< 'reduce'
+        trailer< '('
+            arglist< (
+                (not(argument<any '=' any>) any ','
+                 not(argument<any '=' any>) any) |
+                (not(argument<any '=' any>) any ','
+                 not(argument<any '=' any>) any ','
+                 not(argument<any '=' any>) any)
+            ) >
+        ')' >
+    >
+    """
+
+    def transform(self, node, results):
+        touch_import(u'functools', u'reduce', node)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py
new file mode 100644
index 0000000..4bcce8c
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py
@@ -0,0 +1,70 @@
+"""Fix incompatible renames
+
+Fixes:
+  * sys.maxint -> sys.maxsize
+"""
+# Author: Christian Heimes
+# based on Collin Winter's fix_import
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name, attr_chain
+
+MAPPING = {"sys":  {"maxint" : "maxsize"},
+          }
+LOOKUP = {}
+
+def alternates(members):
+    return "(" + "|".join(map(repr, members)) + ")"
+
+
+def build_pattern():
+    #bare = set()
+    for module, replace in MAPPING.items():
+        for old_attr, new_attr in replace.items():
+            LOOKUP[(module, old_attr)] = new_attr
+            #bare.add(module)
+            #bare.add(old_attr)
+            #yield """
+            #      import_name< 'import' (module=%r
+            #          | dotted_as_names< any* module=%r any* >) >
+            #      """ % (module, module)
+            yield """
+                  import_from< 'from' module_name=%r 'import'
+                      ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
+                  """ % (module, old_attr, old_attr)
+            yield """
+                  power< module_name=%r trailer< '.' attr_name=%r > any* >
+                  """ % (module, old_attr)
+    #yield """bare_name=%s""" % alternates(bare)
+
+
+class FixRenames(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = "|".join(build_pattern())
+
+    order = "pre" # Pre-order tree traversal
+
+    # Don't match the node if it's within another match
+    def match(self, node):
+        match = super(FixRenames, self).match
+        results = match(node)
+        if results:
+            if any(match(obj) for obj in attr_chain(node, "parent")):
+                return False
+            return results
+        return False
+
+    #def start_tree(self, tree, filename):
+    #    super(FixRenames, self).start_tree(tree, filename)
+    #    self.replace = {}
+
+    def transform(self, node, results):
+        mod_name = results.get("module_name")
+        attr_name = results.get("attr_name")
+        #bare_name = results.get("bare_name")
+        #import_mod = results.get("module")
+
+        if mod_name and attr_name:
+            new_attr = unicode(LOOKUP[(mod_name.value, attr_name.value)])
+            attr_name.replace(Name(new_attr, prefix=attr_name.prefix))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_repr.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_repr.py
new file mode 100644
index 0000000..f343656
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_repr.py
@@ -0,0 +1,23 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that transforms `xyzzy` into repr(xyzzy)."""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Call, Name, parenthesize
+
+
+class FixRepr(fixer_base.BaseFix):
+
+    BM_compatible = True
+    PATTERN = """
+              atom < '`' expr=any '`' >
+              """
+
+    def transform(self, node, results):
+        expr = results["expr"].clone()
+
+        if expr.type == self.syms.testlist1:
+            expr = parenthesize(expr)
+        return Call(Name(u"repr"), [expr], prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py
new file mode 100644
index 0000000..d3d38ec
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py
@@ -0,0 +1,53 @@
+"""
+Optional fixer to transform set() calls to set literals.
+"""
+
+# Author: Benjamin Peterson
+
+from lib2to3 import fixer_base, pytree
+from lib2to3.fixer_util import token, syms
+
+
+
+class FixSetLiteral(fixer_base.BaseFix):
+
+    BM_compatible = True
+    explicit = True
+
+    PATTERN = """power< 'set' trailer< '('
+                     (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) >
+                                |
+                                single=any) ']' >
+                     |
+                     atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' >
+                     )
+                     ')' > >
+              """
+
+    def transform(self, node, results):
+        single = results.get("single")
+        if single:
+            # Make a fake listmaker
+            fake = pytree.Node(syms.listmaker, [single.clone()])
+            single.replace(fake)
+            items = fake
+        else:
+            items = results["items"]
+
+        # Build the contents of the literal
+        literal = [pytree.Leaf(token.LBRACE, u"{")]
+        literal.extend(n.clone() for n in items.children)
+        literal.append(pytree.Leaf(token.RBRACE, u"}"))
+        # Set the prefix of the right brace to that of the ')' or ']'
+        literal[-1].prefix = items.next_sibling.prefix
+        maker = pytree.Node(syms.dictsetmaker, literal)
+        maker.prefix = node.prefix
+
+        # If the original was a one tuple, we need to remove the extra comma.
+        if len(maker.children) == 4:
+            n = maker.children[2]
+            n.remove()
+            maker.children[-1].prefix = n.prefix
+
+        # Finally, replace the set call with our shiny new literal.
+        return maker
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_standarderror.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_standarderror.py
new file mode 100644
index 0000000..6cad511
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_standarderror.py
@@ -0,0 +1,18 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for StandardError -> Exception."""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+
+class FixStandarderror(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+              'StandardError'
+              """
+
+    def transform(self, node, results):
+        return Name(u"Exception", prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_sys_exc.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_sys_exc.py
new file mode 100644
index 0000000..2ecca2b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_sys_exc.py
@@ -0,0 +1,30 @@
+"""Fixer for sys.exc_{type, value, traceback}
+
+sys.exc_type -> sys.exc_info()[0]
+sys.exc_value -> sys.exc_info()[1]
+sys.exc_traceback -> sys.exc_info()[2]
+"""
+
+# By Jeff Balogh and Benjamin Peterson
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms
+
+class FixSysExc(fixer_base.BaseFix):
+    # This order matches the ordering of sys.exc_info().
+    exc_info = [u"exc_type", u"exc_value", u"exc_traceback"]
+    BM_compatible = True
+    PATTERN = """
+              power< 'sys' trailer< dot='.' attribute=(%s) > >
+              """ % '|'.join("'%s'" % e for e in exc_info)
+
+    def transform(self, node, results):
+        sys_attr = results["attribute"][0]
+        index = Number(self.exc_info.index(sys_attr.value))
+
+        call = Call(Name(u"exc_info"), prefix=sys_attr.prefix)
+        attr = Attr(Name(u"sys"), call)
+        attr[1].children[0].prefix = results["dot"].prefix
+        attr.append(Subscript(index))
+        return Node(syms.power, attr, prefix=node.prefix)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_throw.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_throw.py
new file mode 100644
index 0000000..1468d89
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_throw.py
@@ -0,0 +1,56 @@
+"""Fixer for generator.throw(E, V, T).
+
+g.throw(E)       -> g.throw(E)
+g.throw(E, V)    -> g.throw(E(V))
+g.throw(E, V, T) -> g.throw(E(V).with_traceback(T))
+
+g.throw("foo"[, V[, T]]) will warn about string exceptions."""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
+
+class FixThrow(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+    power< any trailer< '.' 'throw' >
+           trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' >
+    >
+    |
+    power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > >
+    """
+
+    def transform(self, node, results):
+        syms = self.syms
+
+        exc = results["exc"].clone()
+        if exc.type is token.STRING:
+            self.cannot_convert(node, "Python 3 does not support string exceptions")
+            return
+
+        # Leave "g.throw(E)" alone
+        val = results.get(u"val")
+        if val is None:
+            return
+
+        val = val.clone()
+        if is_tuple(val):
+            args = [c.clone() for c in val.children[1:-1]]
+        else:
+            val.prefix = u""
+            args = [val]
+
+        throw_args = results["args"]
+
+        if "tb" in results:
+            tb = results["tb"].clone()
+            tb.prefix = u""
+
+            e = Call(exc, args)
+            with_tb = Attr(e, Name(u'with_traceback')) + [ArgList([tb])]
+            throw_args.replace(pytree.Node(syms.power, with_tb))
+        else:
+            throw_args.replace(Call(exc, args))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py
new file mode 100644
index 0000000..6361717
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py
@@ -0,0 +1,175 @@
+"""Fixer for function definitions with tuple parameters.
+
+def func(((a, b), c), d):
+    ...
+
+    ->
+
+def func(x, d):
+    ((a, b), c) = x
+    ...
+
+It will also support lambdas:
+
+    lambda (x, y): x + y -> lambda t: t[0] + t[1]
+
+    # The parens are a syntax error in Python 3
+    lambda (x): x + y -> lambda x: x + y
+"""
+# Author: Collin Winter
+
+# Local imports
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms
+
+def is_docstring(stmt):
+    return isinstance(stmt, pytree.Node) and \
+           stmt.children[0].type == token.STRING
+
+class FixTupleParams(fixer_base.BaseFix):
+    run_order = 4 #use a lower order since lambda is part of other
+                  #patterns
+    BM_compatible = True
+
+    PATTERN = """
+              funcdef< 'def' any parameters< '(' args=any ')' >
+                       ['->' any] ':' suite=any+ >
+              |
+              lambda=
+              lambdef< 'lambda' args=vfpdef< '(' inner=any ')' >
+                       ':' body=any
+              >
+              """
+
+    def transform(self, node, results):
+        if "lambda" in results:
+            return self.transform_lambda(node, results)
+
+        new_lines = []
+        suite = results["suite"]
+        args = results["args"]
+        # This crap is so "def foo(...): x = 5; y = 7" is handled correctly.
+        # TODO(cwinter): suite-cleanup
+        if suite[0].children[1].type == token.INDENT:
+            start = 2
+            indent = suite[0].children[1].value
+            end = Newline()
+        else:
+            start = 0
+            indent = u"; "
+            end = pytree.Leaf(token.INDENT, u"")
+
+        # We need access to self for new_name(), and making this a method
+        #  doesn't feel right. Closing over self and new_lines makes the
+        #  code below cleaner.
+        def handle_tuple(tuple_arg, add_prefix=False):
+            n = Name(self.new_name())
+            arg = tuple_arg.clone()
+            arg.prefix = u""
+            stmt = Assign(arg, n.clone())
+            if add_prefix:
+                n.prefix = u" "
+            tuple_arg.replace(n)
+            new_lines.append(pytree.Node(syms.simple_stmt,
+                                         [stmt, end.clone()]))
+
+        if args.type == syms.tfpdef:
+            handle_tuple(args)
+        elif args.type == syms.typedargslist:
+            for i, arg in enumerate(args.children):
+                if arg.type == syms.tfpdef:
+                    # Without add_prefix, the emitted code is correct,
+                    #  just ugly.
+                    handle_tuple(arg, add_prefix=(i > 0))
+
+        if not new_lines:
+            return
+
+        # This isn't strictly necessary, but it plays nicely with other fixers.
+        # TODO(cwinter) get rid of this when children becomes a smart list
+        for line in new_lines:
+            line.parent = suite[0]
+
+        # TODO(cwinter) suite-cleanup
+        after = start
+        if start == 0:
+            new_lines[0].prefix = u" "
+        elif is_docstring(suite[0].children[start]):
+            new_lines[0].prefix = indent
+            after = start + 1
+
+        for line in new_lines:
+            line.parent = suite[0]
+        suite[0].children[after:after] = new_lines
+        for i in range(after+1, after+len(new_lines)+1):
+            suite[0].children[i].prefix = indent
+        suite[0].changed()
+
+    def transform_lambda(self, node, results):
+        args = results["args"]
+        body = results["body"]
+        inner = simplify_args(results["inner"])
+
+        # Replace lambda ((((x)))): x  with lambda x: x
+        if inner.type == token.NAME:
+            inner = inner.clone()
+            inner.prefix = u" "
+            args.replace(inner)
+            return
+
+        params = find_params(args)
+        to_index = map_to_index(params)
+        tup_name = self.new_name(tuple_name(params))
+
+        new_param = Name(tup_name, prefix=u" ")
+        args.replace(new_param.clone())
+        for n in body.post_order():
+            if n.type == token.NAME and n.value in to_index:
+                subscripts = [c.clone() for c in to_index[n.value]]
+                new = pytree.Node(syms.power,
+                                  [new_param.clone()] + subscripts)
+                new.prefix = n.prefix
+                n.replace(new)
+
+
+### Helper functions for transform_lambda()
+
+def simplify_args(node):
+    if node.type in (syms.vfplist, token.NAME):
+        return node
+    elif node.type == syms.vfpdef:
+        # These look like vfpdef< '(' x ')' > where x is NAME
+        # or another vfpdef instance (leading to recursion).
+        while node.type == syms.vfpdef:
+            node = node.children[1]
+        return node
+    raise RuntimeError("Received unexpected node %s" % node)
+
+def find_params(node):
+    if node.type == syms.vfpdef:
+        return find_params(node.children[1])
+    elif node.type == token.NAME:
+        return node.value
+    return [find_params(c) for c in node.children if c.type != token.COMMA]
+
+def map_to_index(param_list, prefix=[], d=None):
+    if d is None:
+        d = {}
+    for i, obj in enumerate(param_list):
+        trailer = [Subscript(Number(unicode(i)))]
+        if isinstance(obj, list):
+            map_to_index(obj, trailer, d=d)
+        else:
+            d[obj] = prefix + trailer
+    return d
+
+def tuple_name(param_list):
+    l = []
+    for obj in param_list:
+        if isinstance(obj, list):
+            l.append(tuple_name(obj))
+        else:
+            l.append(obj)
+    return u"_".join(l)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_types.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_types.py
new file mode 100644
index 0000000..fc9d495
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_types.py
@@ -0,0 +1,62 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer for removing uses of the types module.
+
+These work for only the known names in the types module.  The forms above
+can include types. or not.  ie, It is assumed the module is imported either as:
+
+    import types
+    from types import ... # either * or specific types
+
+The import statements are not modified.
+
+There should be another fixer that handles at least the following constants:
+
+   type([]) -> list
+   type(()) -> tuple
+   type('') -> str
+
+"""
+
+# Local imports
+from ..pgen2 import token
+from .. import fixer_base
+from ..fixer_util import Name
+
+_TYPE_MAPPING = {
+        'BooleanType' : 'bool',
+        'BufferType' : 'memoryview',
+        'ClassType' : 'type',
+        'ComplexType' : 'complex',
+        'DictType': 'dict',
+        'DictionaryType' : 'dict',
+        'EllipsisType' : 'type(Ellipsis)',
+        #'FileType' : 'io.IOBase',
+        'FloatType': 'float',
+        'IntType': 'int',
+        'ListType': 'list',
+        'LongType': 'int',
+        'ObjectType' : 'object',
+        'NoneType': 'type(None)',
+        'NotImplementedType' : 'type(NotImplemented)',
+        'SliceType' : 'slice',
+        'StringType': 'bytes', # XXX ?
+        'StringTypes' : 'str', # XXX ?
+        'TupleType': 'tuple',
+        'TypeType' : 'type',
+        'UnicodeType': 'str',
+        'XRangeType' : 'range',
+    }
+
+_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
+
+class FixTypes(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = '|'.join(_pats)
+
+    def transform(self, node, results):
+        new_value = unicode(_TYPE_MAPPING.get(results["name"].value))
+        if new_value:
+            return Name(new_value, prefix=node.prefix)
+        return None
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_unicode.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_unicode.py
new file mode 100644
index 0000000..2d776f6
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_unicode.py
@@ -0,0 +1,42 @@
+r"""Fixer for unicode.
+
+* Changes unicode to str and unichr to chr.
+
+* If "...\u..." is not unicode literal change it into "...\\u...".
+
+* Change u"..." into "...".
+
+"""
+
+from ..pgen2 import token
+from .. import fixer_base
+
+_mapping = {u"unichr" : u"chr", u"unicode" : u"str"}
+
+class FixUnicode(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = "STRING | 'unicode' | 'unichr'"
+
+    def start_tree(self, tree, filename):
+        super(FixUnicode, self).start_tree(tree, filename)
+        self.unicode_literals = 'unicode_literals' in tree.future_features
+
+    def transform(self, node, results):
+        if node.type == token.NAME:
+            new = node.clone()
+            new.value = _mapping[node.value]
+            return new
+        elif node.type == token.STRING:
+            val = node.value
+            if not self.unicode_literals and val[0] in u'\'"' and u'\\' in val:
+                val = ur'\\'.join([
+                    v.replace(u'\\u', ur'\\u').replace(u'\\U', ur'\\U')
+                    for v in val.split(ur'\\')
+                ])
+            if val[0] in u'uU':
+                val = val[1:]
+            if val == node.value:
+                return node
+            new = node.clone()
+            new.value = val
+            return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_urllib.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_urllib.py
new file mode 100644
index 0000000..34e1b27
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_urllib.py
@@ -0,0 +1,197 @@
+"""Fix changes imports of urllib which are now incompatible.
+   This is rather similar to fix_imports, but because of the more
+   complex nature of the fixing for urllib, it has its own fixer.
+"""
+# Author: Nick Edds
+
+# Local imports
+from lib2to3.fixes.fix_imports import alternates, FixImports
+from lib2to3 import fixer_base
+from lib2to3.fixer_util import (Name, Comma, FromImport, Newline,
+                                find_indentation, Node, syms)
+
+MAPPING = {"urllib":  [
+                ("urllib.request",
+                    ["URLopener", "FancyURLopener", "urlretrieve",
+                     "_urlopener", "urlopen", "urlcleanup",
+                     "pathname2url", "url2pathname"]),
+                ("urllib.parse",
+                    ["quote", "quote_plus", "unquote", "unquote_plus",
+                     "urlencode", "splitattr", "splithost", "splitnport",
+                     "splitpasswd", "splitport", "splitquery", "splittag",
+                     "splittype", "splituser", "splitvalue", ]),
+                ("urllib.error",
+                    ["ContentTooShortError"])],
+           "urllib2" : [
+                ("urllib.request",
+                    ["urlopen", "install_opener", "build_opener",
+                     "Request", "OpenerDirector", "BaseHandler",
+                     "HTTPDefaultErrorHandler", "HTTPRedirectHandler",
+                     "HTTPCookieProcessor", "ProxyHandler",
+                     "HTTPPasswordMgr",
+                     "HTTPPasswordMgrWithDefaultRealm",
+                     "AbstractBasicAuthHandler",
+                     "HTTPBasicAuthHandler", "ProxyBasicAuthHandler",
+                     "AbstractDigestAuthHandler",
+                     "HTTPDigestAuthHandler", "ProxyDigestAuthHandler",
+                     "HTTPHandler", "HTTPSHandler", "FileHandler",
+                     "FTPHandler", "CacheFTPHandler",
+                     "UnknownHandler"]),
+                ("urllib.error",
+                    ["URLError", "HTTPError"]),
+           ]
+}
+
+# Duplicate the url parsing functions for urllib2.
+MAPPING["urllib2"].append(MAPPING["urllib"][1])
+
+
+def build_pattern():
+    bare = set()
+    for old_module, changes in MAPPING.items():
+        for change in changes:
+            new_module, members = change
+            members = alternates(members)
+            yield """import_name< 'import' (module=%r
+                                  | dotted_as_names< any* module=%r any* >) >
+                  """ % (old_module, old_module)
+            yield """import_from< 'from' mod_member=%r 'import'
+                       ( member=%s | import_as_name< member=%s 'as' any > |
+                         import_as_names< members=any*  >) >
+                  """ % (old_module, members, members)
+            yield """import_from< 'from' module_star=%r 'import' star='*' >
+                  """ % old_module
+            yield """import_name< 'import'
+                                  dotted_as_name< module_as=%r 'as' any > >
+                  """ % old_module
+            # bare_with_attr has a special significance for FixImports.match().
+            yield """power< bare_with_attr=%r trailer< '.' member=%s > any* >
+                  """ % (old_module, members)
+
+
+class FixUrllib(FixImports):
+
+    def build_pattern(self):
+        return "|".join(build_pattern())
+
+    def transform_import(self, node, results):
+        """Transform for the basic import case. Replaces the old
+           import name with a comma separated list of its
+           replacements.
+        """
+        import_mod = results.get("module")
+        pref = import_mod.prefix
+
+        names = []
+
+        # create a Node list of the replacement modules
+        for name in MAPPING[import_mod.value][:-1]:
+            names.extend([Name(name[0], prefix=pref), Comma()])
+        names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref))
+        import_mod.replace(names)
+
+    def transform_member(self, node, results):
+        """Transform for imports of specific module elements. Replaces
+           the module to be imported from with the appropriate new
+           module.
+        """
+        mod_member = results.get("mod_member")
+        pref = mod_member.prefix
+        member = results.get("member")
+
+        # Simple case with only a single member being imported
+        if member:
+            # this may be a list of length one, or just a node
+            if isinstance(member, list):
+                member = member[0]
+            new_name = None
+            for change in MAPPING[mod_member.value]:
+                if member.value in change[1]:
+                    new_name = change[0]
+                    break
+            if new_name:
+                mod_member.replace(Name(new_name, prefix=pref))
+            else:
+                self.cannot_convert(node, "This is an invalid module element")
+
+        # Multiple members being imported
+        else:
+            # a dictionary for replacements, order matters
+            modules = []
+            mod_dict = {}
+            members = results["members"]
+            for member in members:
+                # we only care about the actual members
+                if member.type == syms.import_as_name:
+                    as_name = member.children[2].value
+                    member_name = member.children[0].value
+                else:
+                    member_name = member.value
+                    as_name = None
+                if member_name != u",":
+                    for change in MAPPING[mod_member.value]:
+                        if member_name in change[1]:
+                            if change[0] not in mod_dict:
+                                modules.append(change[0])
+                            mod_dict.setdefault(change[0], []).append(member)
+
+            new_nodes = []
+            indentation = find_indentation(node)
+            first = True
+            def handle_name(name, prefix):
+                if name.type == syms.import_as_name:
+                    kids = [Name(name.children[0].value, prefix=prefix),
+                            name.children[1].clone(),
+                            name.children[2].clone()]
+                    return [Node(syms.import_as_name, kids)]
+                return [Name(name.value, prefix=prefix)]
+            for module in modules:
+                elts = mod_dict[module]
+                names = []
+                for elt in elts[:-1]:
+                    names.extend(handle_name(elt, pref))
+                    names.append(Comma())
+                names.extend(handle_name(elts[-1], pref))
+                new = FromImport(module, names)
+                if not first or node.parent.prefix.endswith(indentation):
+                    new.prefix = indentation
+                new_nodes.append(new)
+                first = False
+            if new_nodes:
+                nodes = []
+                for new_node in new_nodes[:-1]:
+                    nodes.extend([new_node, Newline()])
+                nodes.append(new_nodes[-1])
+                node.replace(nodes)
+            else:
+                self.cannot_convert(node, "All module elements are invalid")
+
+    def transform_dot(self, node, results):
+        """Transform for calls to module members in code."""
+        module_dot = results.get("bare_with_attr")
+        member = results.get("member")
+        new_name = None
+        if isinstance(member, list):
+            member = member[0]
+        for change in MAPPING[module_dot.value]:
+            if member.value in change[1]:
+                new_name = change[0]
+                break
+        if new_name:
+            module_dot.replace(Name(new_name,
+                                    prefix=module_dot.prefix))
+        else:
+            self.cannot_convert(node, "This is an invalid module element")
+
+    def transform(self, node, results):
+        if results.get("module"):
+            self.transform_import(node, results)
+        elif results.get("mod_member"):
+            self.transform_member(node, results)
+        elif results.get("bare_with_attr"):
+            self.transform_dot(node, results)
+        # Renaming and star imports are not supported for these modules.
+        elif results.get("module_star"):
+            self.cannot_convert(node, "Cannot handle star imports.")
+        elif results.get("module_as"):
+            self.cannot_convert(node, "This module is now multiple modules")
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_ws_comma.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_ws_comma.py
new file mode 100644
index 0000000..37ff624
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_ws_comma.py
@@ -0,0 +1,39 @@
+"""Fixer that changes 'a ,b' into 'a, b'.
+
+This also changes '{a :b}' into '{a: b}', but does not touch other
+uses of colons.  It does not touch other uses of whitespace.
+
+"""
+
+from .. import pytree
+from ..pgen2 import token
+from .. import fixer_base
+
+class FixWsComma(fixer_base.BaseFix):
+
+    explicit = True # The user must ask for this fixers
+
+    PATTERN = """
+    any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]>
+    """
+
+    COMMA = pytree.Leaf(token.COMMA, u",")
+    COLON = pytree.Leaf(token.COLON, u":")
+    SEPS = (COMMA, COLON)
+
+    def transform(self, node, results):
+        new = node.clone()
+        comma = False
+        for child in new.children:
+            if child in self.SEPS:
+                prefix = child.prefix
+                if prefix.isspace() and u"\n" not in prefix:
+                    child.prefix = u""
+                comma = True
+            else:
+                if comma:
+                    prefix = child.prefix
+                    if not prefix:
+                        child.prefix = u" "
+                comma = False
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_xrange.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_xrange.py
new file mode 100644
index 0000000..f143672
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_xrange.py
@@ -0,0 +1,73 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Fixer that changes xrange(...) into range(...)."""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name, Call, consuming_calls
+from .. import patcomp
+
+
+class FixXrange(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+              power<
+                 (name='range'|name='xrange') trailer< '(' args=any ')' >
+              rest=any* >
+              """
+
+    def start_tree(self, tree, filename):
+        super(FixXrange, self).start_tree(tree, filename)
+        self.transformed_xranges = set()
+
+    def finish_tree(self, tree, filename):
+        self.transformed_xranges = None
+
+    def transform(self, node, results):
+        name = results["name"]
+        if name.value == u"xrange":
+            return self.transform_xrange(node, results)
+        elif name.value == u"range":
+            return self.transform_range(node, results)
+        else:
+            raise ValueError(repr(name))
+
+    def transform_xrange(self, node, results):
+        name = results["name"]
+        name.replace(Name(u"range", prefix=name.prefix))
+        # This prevents the new range call from being wrapped in a list later.
+        self.transformed_xranges.add(id(node))
+
+    def transform_range(self, node, results):
+        if (id(node) not in self.transformed_xranges and
+            not self.in_special_context(node)):
+            range_call = Call(Name(u"range"), [results["args"].clone()])
+            # Encase the range call in list().
+            list_call = Call(Name(u"list"), [range_call],
+                             prefix=node.prefix)
+            # Put things that were after the range() call after the list call.
+            for n in results["rest"]:
+                list_call.append_child(n)
+            return list_call
+
+    P1 = "power< func=NAME trailer< '(' node=any ')' > any* >"
+    p1 = patcomp.compile_pattern(P1)
+
+    P2 = """for_stmt< 'for' any 'in' node=any ':' any* >
+            | comp_for< 'for' any 'in' node=any any* >
+            | comparison< any 'in' node=any any*>
+         """
+    p2 = patcomp.compile_pattern(P2)
+
+    def in_special_context(self, node):
+        if node.parent is None:
+            return False
+        results = {}
+        if (node.parent.parent is not None and
+               self.p1.match(node.parent.parent, results) and
+               results["node"] is node):
+            # list(d.keys()) -> list(d.keys()), etc.
+            return results["func"].value in consuming_calls
+        # for ... in d.iterkeys() -> for ... in d.keys(), etc.
+        return self.p2.match(node.parent, results) and results["node"] is node
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_xreadlines.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_xreadlines.py
new file mode 100644
index 0000000..f50b9a2
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_xreadlines.py
@@ -0,0 +1,25 @@
+"""Fix "for x in f.xreadlines()" -> "for x in f".
+
+This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
+# Author: Collin Winter
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name
+
+
+class FixXreadlines(fixer_base.BaseFix):
+    BM_compatible = True
+    PATTERN = """
+    power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
+    |
+    power< any+ trailer< '.' no_call='xreadlines' > >
+    """
+
+    def transform(self, node, results):
+        no_call = results.get("no_call")
+
+        if no_call:
+            no_call.replace(Name(u"__iter__", prefix=no_call.prefix))
+        else:
+            node.replace([x.clone() for x in results["call"]])
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_zip.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_zip.py
new file mode 100644
index 0000000..c5d7b66
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_zip.py
@@ -0,0 +1,35 @@
+"""
+Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...)
+unless there exists a 'from future_builtins import zip' statement in the
+top-level namespace.
+
+We avoid the transformation if the zip() call is directly contained in
+iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
+"""
+
+# Local imports
+from .. import fixer_base
+from ..fixer_util import Name, Call, in_special_context
+
+class FixZip(fixer_base.ConditionalFix):
+
+    BM_compatible = True
+    PATTERN = """
+    power< 'zip' args=trailer< '(' [any] ')' >
+    >
+    """
+
+    skip_on = "future_builtins.zip"
+
+    def transform(self, node, results):
+        if self.should_skip(node):
+            return
+
+        if in_special_context(node):
+            return None
+
+        new = node.clone()
+        new.prefix = u""
+        new = Call(Name(u"list"), [new])
+        new.prefix = node.prefix
+        return new
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/main.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/main.py
new file mode 100644
index 0000000..ad0625e
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/main.py
@@ -0,0 +1,269 @@
+"""
+Main program for 2to3.
+"""
+
+from __future__ import with_statement
+
+import sys
+import os
+import difflib
+import logging
+import shutil
+import optparse
+
+from . import refactor
+
+
+def diff_texts(a, b, filename):
+    """Return a unified diff of two strings."""
+    a = a.splitlines()
+    b = b.splitlines()
+    return difflib.unified_diff(a, b, filename, filename,
+                                "(original)", "(refactored)",
+                                lineterm="")
+
+
+class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
+    """
+    A refactoring tool that can avoid overwriting its input files.
+    Prints output to stdout.
+
+    Output files can optionally be written to a different directory and or
+    have an extra file suffix appended to their name for use in situations
+    where you do not want to replace the input files.
+    """
+
+    def __init__(self, fixers, options, explicit, nobackups, show_diffs,
+                 input_base_dir='', output_dir='', append_suffix=''):
+        """
+        Args:
+            fixers: A list of fixers to import.
+            options: A dict with RefactoringTool configuration.
+            explicit: A list of fixers to run even if they are explicit.
+            nobackups: If true no backup '.bak' files will be created for those
+                files that are being refactored.
+            show_diffs: Should diffs of the refactoring be printed to stdout?
+            input_base_dir: The base directory for all input files.  This class
+                will strip this path prefix off of filenames before substituting
+                it with output_dir.  Only meaningful if output_dir is supplied.
+                All files processed by refactor() must start with this path.
+            output_dir: If supplied, all converted files will be written into
+                this directory tree instead of input_base_dir.
+            append_suffix: If supplied, all files output by this tool will have
+                this appended to their filename.  Useful for changing .py to
+                .py3 for example by passing append_suffix='3'.
+        """
+        self.nobackups = nobackups
+        self.show_diffs = show_diffs
+        if input_base_dir and not input_base_dir.endswith(os.sep):
+            input_base_dir += os.sep
+        self._input_base_dir = input_base_dir
+        self._output_dir = output_dir
+        self._append_suffix = append_suffix
+        super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
+
+    def log_error(self, msg, *args, **kwargs):
+        self.errors.append((msg, args, kwargs))
+        self.logger.error(msg, *args, **kwargs)
+
+    def write_file(self, new_text, filename, old_text, encoding):
+        orig_filename = filename
+        if self._output_dir:
+            if filename.startswith(self._input_base_dir):
+                filename = os.path.join(self._output_dir,
+                                        filename[len(self._input_base_dir):])
+            else:
+                raise ValueError('filename %s does not start with the '
+                                 'input_base_dir %s' % (
+                                         filename, self._input_base_dir))
+        if self._append_suffix:
+            filename += self._append_suffix
+        if orig_filename != filename:
+            output_dir = os.path.dirname(filename)
+            if not os.path.isdir(output_dir):
+                os.makedirs(output_dir)
+            self.log_message('Writing converted %s to %s.', orig_filename,
+                             filename)
+        if not self.nobackups:
+            # Make backup
+            backup = filename + ".bak"
+            if os.path.lexists(backup):
+                try:
+                    os.remove(backup)
+                except os.error, err:
+                    self.log_message("Can't remove backup %s", backup)
+            try:
+                os.rename(filename, backup)
+            except os.error, err:
+                self.log_message("Can't rename %s to %s", filename, backup)
+        # Actually write the new file
+        write = super(StdoutRefactoringTool, self).write_file
+        write(new_text, filename, old_text, encoding)
+        if not self.nobackups:
+            shutil.copymode(backup, filename)
+        if orig_filename != filename:
+            # Preserve the file mode in the new output directory.
+            shutil.copymode(orig_filename, filename)
+
+    def print_output(self, old, new, filename, equal):
+        if equal:
+            self.log_message("No changes to %s", filename)
+        else:
+            self.log_message("Refactored %s", filename)
+            if self.show_diffs:
+                diff_lines = diff_texts(old, new, filename)
+                try:
+                    if self.output_lock is not None:
+                        with self.output_lock:
+                            for line in diff_lines:
+                                print line
+                            sys.stdout.flush()
+                    else:
+                        for line in diff_lines:
+                            print line
+                except UnicodeEncodeError:
+                    warn("couldn't encode %s's diff for your terminal" %
+                         (filename,))
+                    return
+
+
+def warn(msg):
+    print >> sys.stderr, "WARNING: %s" % (msg,)
+
+
+def main(fixer_pkg, args=None):
+    """Main program.
+
+    Args:
+        fixer_pkg: the name of a package where the fixers are located.
+        args: optional; a list of command line arguments. If omitted,
+              sys.argv[1:] is used.
+
+    Returns a suggested exit status (0, 1, 2).
+    """
+    # Set up option parser
+    parser = optparse.OptionParser(usage="2to3 [options] file|dir ...")
+    parser.add_option("-d", "--doctests_only", action="store_true",
+                      help="Fix up doctests only")
+    parser.add_option("-f", "--fix", action="append", default=[],
+                      help="Each FIX specifies a transformation; default: all")
+    parser.add_option("-j", "--processes", action="store", default=1,
+                      type="int", help="Run 2to3 concurrently")
+    parser.add_option("-x", "--nofix", action="append", default=[],
+                      help="Prevent a transformation from being run")
+    parser.add_option("-l", "--list-fixes", action="store_true",
+                      help="List available transformations")
+    parser.add_option("-p", "--print-function", action="store_true",
+                      help="Modify the grammar so that print() is a function")
+    parser.add_option("-v", "--verbose", action="store_true",
+                      help="More verbose logging")
+    parser.add_option("--no-diffs", action="store_true",
+                      help="Don't show diffs of the refactoring")
+    parser.add_option("-w", "--write", action="store_true",
+                      help="Write back modified files")
+    parser.add_option("-n", "--nobackups", action="store_true", default=False,
+                      help="Don't write backups for modified files")
+    parser.add_option("-o", "--output-dir", action="store", type="str",
+                      default="", help="Put output files in this directory "
+                      "instead of overwriting the input files.  Requires -n.")
+    parser.add_option("-W", "--write-unchanged-files", action="store_true",
+                      help="Also write files even if no changes were required"
+                      " (useful with --output-dir); implies -w.")
+    parser.add_option("--add-suffix", action="store", type="str", default="",
+                      help="Append this string to all output filenames."
+                      " Requires -n if non-empty.  "
+                      "ex: --add-suffix='3' will generate .py3 files.")
+
+    # Parse command line arguments
+    refactor_stdin = False
+    flags = {}
+    options, args = parser.parse_args(args)
+    if options.write_unchanged_files:
+        flags["write_unchanged_files"] = True
+        if not options.write:
+            warn("--write-unchanged-files/-W implies -w.")
+        options.write = True
+    # If we allowed these, the original files would be renamed to backup names
+    # but not replaced.
+    if options.output_dir and not options.nobackups:
+        parser.error("Can't use --output-dir/-o without -n.")
+    if options.add_suffix and not options.nobackups:
+        parser.error("Can't use --add-suffix without -n.")
+
+    if not options.write and options.no_diffs:
+        warn("not writing files and not printing diffs; that's not very useful")
+    if not options.write and options.nobackups:
+        parser.error("Can't use -n without -w")
+    if options.list_fixes:
+        print "Available transformations for the -f/--fix option:"
+        for fixname in refactor.get_all_fix_names(fixer_pkg):
+            print fixname
+        if not args:
+            return 0
+    if not args:
+        print >> sys.stderr, "At least one file or directory argument required."
+        print >> sys.stderr, "Use --help to show usage."
+        return 2
+    if "-" in args:
+        refactor_stdin = True
+        if options.write:
+            print >> sys.stderr, "Can't write to stdin."
+            return 2
+    if options.print_function:
+        flags["print_function"] = True
+
+    # Set up logging handler
+    level = logging.DEBUG if options.verbose else logging.INFO
+    logging.basicConfig(format='%(name)s: %(message)s', level=level)
+    logger = logging.getLogger('lib2to3.main')
+
+    # Initialize the refactoring tool
+    avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
+    unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
+    explicit = set()
+    if options.fix:
+        all_present = False
+        for fix in options.fix:
+            if fix == "all":
+                all_present = True
+            else:
+                explicit.add(fixer_pkg + ".fix_" + fix)
+        requested = avail_fixes.union(explicit) if all_present else explicit
+    else:
+        requested = avail_fixes.union(explicit)
+    fixer_names = requested.difference(unwanted_fixes)
+    input_base_dir = os.path.commonprefix(args)
+    if (input_base_dir and not input_base_dir.endswith(os.sep)
+        and not os.path.isdir(input_base_dir)):
+        # One or more similar names were passed, their directory is the base.
+        # os.path.commonprefix() is ignorant of path elements, this corrects
+        # for that weird API.
+        input_base_dir = os.path.dirname(input_base_dir)
+    if options.output_dir:
+        input_base_dir = input_base_dir.rstrip(os.sep)
+        logger.info('Output in %r will mirror the input directory %r layout.',
+                    options.output_dir, input_base_dir)
+    rt = StdoutRefactoringTool(
+            sorted(fixer_names), flags, sorted(explicit),
+            options.nobackups, not options.no_diffs,
+            input_base_dir=input_base_dir,
+            output_dir=options.output_dir,
+            append_suffix=options.add_suffix)
+
+    # Refactor all files and directories passed as arguments
+    if not rt.errors:
+        if refactor_stdin:
+            rt.refactor_stdin()
+        else:
+            try:
+                rt.refactor(args, options.write, options.doctests_only,
+                            options.processes)
+            except refactor.MultiprocessingUnsupported:
+                assert options.processes > 1
+                print >> sys.stderr, "Sorry, -j isn't " \
+                    "supported on this platform."
+                return 1
+        rt.summarize()
+
+    # Return error status (0 if rt.errors is zero)
+    return int(bool(rt.errors))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/patcomp.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/patcomp.py
new file mode 100644
index 0000000..093e5f9
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/patcomp.py
@@ -0,0 +1,205 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Pattern compiler.
+
+The grammer is taken from PatternGrammar.txt.
+
+The compiler compiles a pattern to a pytree.*Pattern instance.
+"""
+
+__author__ = "Guido van Rossum <guido@python.org>"
+
+# Python imports
+import os
+import StringIO
+
+# Fairly local imports
+from .pgen2 import driver, literals, token, tokenize, parse, grammar
+
+# Really local imports
+from . import pytree
+from . import pygram
+
+# The pattern grammar file
+_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
+                                     "PatternGrammar.txt")
+
+
+class PatternSyntaxError(Exception):
+    pass
+
+
+def tokenize_wrapper(input):
+    """Tokenizes a string suppressing significant whitespace."""
+    skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
+    tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
+    for quintuple in tokens:
+        type, value, start, end, line_text = quintuple
+        if type not in skip:
+            yield quintuple
+
+
+class PatternCompiler(object):
+
+    def __init__(self, grammar_file=_PATTERN_GRAMMAR_FILE):
+        """Initializer.
+
+        Takes an optional alternative filename for the pattern grammar.
+        """
+        self.grammar = driver.load_grammar(grammar_file)
+        self.syms = pygram.Symbols(self.grammar)
+        self.pygrammar = pygram.python_grammar
+        self.pysyms = pygram.python_symbols
+        self.driver = driver.Driver(self.grammar, convert=pattern_convert)
+
+    def compile_pattern(self, input, debug=False, with_tree=False):
+        """Compiles a pattern string to a nested pytree.*Pattern object."""
+        tokens = tokenize_wrapper(input)
+        try:
+            root = self.driver.parse_tokens(tokens, debug=debug)
+        except parse.ParseError as e:
+            raise PatternSyntaxError(str(e))
+        if with_tree:
+            return self.compile_node(root), root
+        else:
+            return self.compile_node(root)
+
+    def compile_node(self, node):
+        """Compiles a node, recursively.
+
+        This is one big switch on the node type.
+        """
+        # XXX Optimize certain Wildcard-containing-Wildcard patterns
+        # that can be merged
+        if node.type == self.syms.Matcher:
+            node = node.children[0] # Avoid unneeded recursion
+
+        if node.type == self.syms.Alternatives:
+            # Skip the odd children since they are just '|' tokens
+            alts = [self.compile_node(ch) for ch in node.children[::2]]
+            if len(alts) == 1:
+                return alts[0]
+            p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1)
+            return p.optimize()
+
+        if node.type == self.syms.Alternative:
+            units = [self.compile_node(ch) for ch in node.children]
+            if len(units) == 1:
+                return units[0]
+            p = pytree.WildcardPattern([units], min=1, max=1)
+            return p.optimize()
+
+        if node.type == self.syms.NegatedUnit:
+            pattern = self.compile_basic(node.children[1:])
+            p = pytree.NegatedPattern(pattern)
+            return p.optimize()
+
+        assert node.type == self.syms.Unit
+
+        name = None
+        nodes = node.children
+        if len(nodes) >= 3 and nodes[1].type == token.EQUAL:
+            name = nodes[0].value
+            nodes = nodes[2:]
+        repeat = None
+        if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater:
+            repeat = nodes[-1]
+            nodes = nodes[:-1]
+
+        # Now we've reduced it to: STRING | NAME [Details] | (...) | [...]
+        pattern = self.compile_basic(nodes, repeat)
+
+        if repeat is not None:
+            assert repeat.type == self.syms.Repeater
+            children = repeat.children
+            child = children[0]
+            if child.type == token.STAR:
+                min = 0
+                max = pytree.HUGE
+            elif child.type == token.PLUS:
+                min = 1
+                max = pytree.HUGE
+            elif child.type == token.LBRACE:
+                assert children[-1].type == token.RBRACE
+                assert  len(children) in (3, 5)
+                min = max = self.get_int(children[1])
+                if len(children) == 5:
+                    max = self.get_int(children[3])
+            else:
+                assert False
+            if min != 1 or max != 1:
+                pattern = pattern.optimize()
+                pattern = pytree.WildcardPattern([[pattern]], min=min, max=max)
+
+        if name is not None:
+            pattern.name = name
+        return pattern.optimize()
+
+    def compile_basic(self, nodes, repeat=None):
+        # Compile STRING | NAME [Details] | (...) | [...]
+        assert len(nodes) >= 1
+        node = nodes[0]
+        if node.type == token.STRING:
+            value = unicode(literals.evalString(node.value))
+            return pytree.LeafPattern(_type_of_literal(value), value)
+        elif node.type == token.NAME:
+            value = node.value
+            if value.isupper():
+                if value not in TOKEN_MAP:
+                    raise PatternSyntaxError("Invalid token: %r" % value)
+                if nodes[1:]:
+                    raise PatternSyntaxError("Can't have details for token")
+                return pytree.LeafPattern(TOKEN_MAP[value])
+            else:
+                if value == "any":
+                    type = None
+                elif not value.startswith("_"):
+                    type = getattr(self.pysyms, value, None)
+                    if type is None:
+                        raise PatternSyntaxError("Invalid symbol: %r" % value)
+                if nodes[1:]: # Details present
+                    content = [self.compile_node(nodes[1].children[1])]
+                else:
+                    content = None
+                return pytree.NodePattern(type, content)
+        elif node.value == "(":
+            return self.compile_node(nodes[1])
+        elif node.value == "[":
+            assert repeat is None
+            subpattern = self.compile_node(nodes[1])
+            return pytree.WildcardPattern([[subpattern]], min=0, max=1)
+        assert False, node
+
+    def get_int(self, node):
+        assert node.type == token.NUMBER
+        return int(node.value)
+
+
+# Map named tokens to the type value for a LeafPattern
+TOKEN_MAP = {"NAME": token.NAME,
+             "STRING": token.STRING,
+             "NUMBER": token.NUMBER,
+             "TOKEN": None}
+
+
+def _type_of_literal(value):
+    if value[0].isalpha():
+        return token.NAME
+    elif value in grammar.opmap:
+        return grammar.opmap[value]
+    else:
+        return None
+
+
+def pattern_convert(grammar, raw_node_info):
+    """Converts raw node information to a Node or Leaf instance."""
+    type, value, context, children = raw_node_info
+    if children or type in grammar.number2symbol:
+        return pytree.Node(type, children, context=context)
+    else:
+        return pytree.Leaf(type, value, context=context)
+
+
+def compile_pattern(pattern):
+    return PatternCompiler().compile_pattern(pattern)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/__init__.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/__init__.py
new file mode 100644
index 0000000..af39048
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""The pgen2 package."""
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/conv.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/conv.py
new file mode 100644
index 0000000..28fbb0b
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/conv.py
@@ -0,0 +1,257 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Convert graminit.[ch] spit out by pgen to Python code.
+
+Pgen is the Python parser generator.  It is useful to quickly create a
+parser from a grammar file in Python's grammar notation.  But I don't
+want my parsers to be written in C (yet), so I'm translating the
+parsing tables to Python data structures and writing a Python parse
+engine.
+
+Note that the token numbers are constants determined by the standard
+Python tokenizer.  The standard token module defines these numbers and
+their names (the names are not used much).  The token numbers are
+hardcoded into the Python tokenizer and into pgen.  A Python
+implementation of the Python tokenizer is also available, in the
+standard tokenize module.
+
+On the other hand, symbol numbers (representing the grammar's
+non-terminals) are assigned by pgen based on the actual grammar
+input.
+
+Note: this module is pretty much obsolete; the pgen module generates
+equivalent grammar tables directly from the Grammar.txt input file
+without having to invoke the Python pgen C program.
+
+"""
+
+# Python imports
+import re
+
+# Local imports
+from pgen2 import grammar, token
+
+
+class Converter(grammar.Grammar):
+    """Grammar subclass that reads classic pgen output files.
+
+    The run() method reads the tables as produced by the pgen parser
+    generator, typically contained in two C files, graminit.h and
+    graminit.c.  The other methods are for internal use only.
+
+    See the base class for more documentation.
+
+    """
+
+    def run(self, graminit_h, graminit_c):
+        """Load the grammar tables from the text files written by pgen."""
+        self.parse_graminit_h(graminit_h)
+        self.parse_graminit_c(graminit_c)
+        self.finish_off()
+
+    def parse_graminit_h(self, filename):
+        """Parse the .h file written by pgen.  (Internal)
+
+        This file is a sequence of #define statements defining the
+        nonterminals of the grammar as numbers.  We build two tables
+        mapping the numbers to names and back.
+
+        """
+        try:
+            f = open(filename)
+        except IOError, err:
+            print "Can't open %s: %s" % (filename, err)
+            return False
+        self.symbol2number = {}
+        self.number2symbol = {}
+        lineno = 0
+        for line in f:
+            lineno += 1
+            mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line)
+            if not mo and line.strip():
+                print "%s(%s): can't parse %s" % (filename, lineno,
+                                                  line.strip())
+            else:
+                symbol, number = mo.groups()
+                number = int(number)
+                assert symbol not in self.symbol2number
+                assert number not in self.number2symbol
+                self.symbol2number[symbol] = number
+                self.number2symbol[number] = symbol
+        return True
+
+    def parse_graminit_c(self, filename):
+        """Parse the .c file written by pgen.  (Internal)
+
+        The file looks as follows.  The first two lines are always this:
+
+        #include "pgenheaders.h"
+        #include "grammar.h"
+
+        After that come four blocks:
+
+        1) one or more state definitions
+        2) a table defining dfas
+        3) a table defining labels
+        4) a struct defining the grammar
+
+        A state definition has the following form:
+        - one or more arc arrays, each of the form:
+          static arc arcs_<n>_<m>[<k>] = {
+                  {<i>, <j>},
+                  ...
+          };
+        - followed by a state array, of the form:
+          static state states_<s>[<t>] = {
+                  {<k>, arcs_<n>_<m>},
+                  ...
+          };
+
+        """
+        try:
+            f = open(filename)
+        except IOError, err:
+            print "Can't open %s: %s" % (filename, err)
+            return False
+        # The code below essentially uses f's iterator-ness!
+        lineno = 0
+
+        # Expect the two #include lines
+        lineno, line = lineno+1, f.next()
+        assert line == '#include "pgenheaders.h"\n', (lineno, line)
+        lineno, line = lineno+1, f.next()
+        assert line == '#include "grammar.h"\n', (lineno, line)
+
+        # Parse the state definitions
+        lineno, line = lineno+1, f.next()
+        allarcs = {}
+        states = []
+        while line.startswith("static arc "):
+            while line.startswith("static arc "):
+                mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$",
+                              line)
+                assert mo, (lineno, line)
+                n, m, k = map(int, mo.groups())
+                arcs = []
+                for _ in range(k):
+                    lineno, line = lineno+1, f.next()
+                    mo = re.match(r"\s+{(\d+), (\d+)},$", line)
+                    assert mo, (lineno, line)
+                    i, j = map(int, mo.groups())
+                    arcs.append((i, j))
+                lineno, line = lineno+1, f.next()
+                assert line == "};\n", (lineno, line)
+                allarcs[(n, m)] = arcs
+                lineno, line = lineno+1, f.next()
+            mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line)
+            assert mo, (lineno, line)
+            s, t = map(int, mo.groups())
+            assert s == len(states), (lineno, line)
+            state = []
+            for _ in range(t):
+                lineno, line = lineno+1, f.next()
+                mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line)
+                assert mo, (lineno, line)
+                k, n, m = map(int, mo.groups())
+                arcs = allarcs[n, m]
+                assert k == len(arcs), (lineno, line)
+                state.append(arcs)
+            states.append(state)
+            lineno, line = lineno+1, f.next()
+            assert line == "};\n", (lineno, line)
+            lineno, line = lineno+1, f.next()
+        self.states = states
+
+        # Parse the dfas
+        dfas = {}
+        mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line)
+        assert mo, (lineno, line)
+        ndfas = int(mo.group(1))
+        for i in range(ndfas):
+            lineno, line = lineno+1, f.next()
+            mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$',
+                          line)
+            assert mo, (lineno, line)
+            symbol = mo.group(2)
+            number, x, y, z = map(int, mo.group(1, 3, 4, 5))
+            assert self.symbol2number[symbol] == number, (lineno, line)
+            assert self.number2symbol[number] == symbol, (lineno, line)
+            assert x == 0, (lineno, line)
+            state = states[z]
+            assert y == len(state), (lineno, line)
+            lineno, line = lineno+1, f.next()
+            mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line)
+            assert mo, (lineno, line)
+            first = {}
+            rawbitset = eval(mo.group(1))
+            for i, c in enumerate(rawbitset):
+                byte = ord(c)
+                for j in range(8):
+                    if byte & (1<<j):
+                        first[i*8 + j] = 1
+            dfas[number] = (state, first)
+        lineno, line = lineno+1, f.next()
+        assert line == "};\n", (lineno, line)
+        self.dfas = dfas
+
+        # Parse the labels
+        labels = []
+        lineno, line = lineno+1, f.next()
+        mo = re.match(r"static label labels\[(\d+)\] = {$", line)
+        assert mo, (lineno, line)
+        nlabels = int(mo.group(1))
+        for i in range(nlabels):
+            lineno, line = lineno+1, f.next()
+            mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line)
+            assert mo, (lineno, line)
+            x, y = mo.groups()
+            x = int(x)
+            if y == "0":
+                y = None
+            else:
+                y = eval(y)
+            labels.append((x, y))
+        lineno, line = lineno+1, f.next()
+        assert line == "};\n", (lineno, line)
+        self.labels = labels
+
+        # Parse the grammar struct
+        lineno, line = lineno+1, f.next()
+        assert line == "grammar _PyParser_Grammar = {\n", (lineno, line)
+        lineno, line = lineno+1, f.next()
+        mo = re.match(r"\s+(\d+),$", line)
+        assert mo, (lineno, line)
+        ndfas = int(mo.group(1))
+        assert ndfas == len(self.dfas)
+        lineno, line = lineno+1, f.next()
+        assert line == "\tdfas,\n", (lineno, line)
+        lineno, line = lineno+1, f.next()
+        mo = re.match(r"\s+{(\d+), labels},$", line)
+        assert mo, (lineno, line)
+        nlabels = int(mo.group(1))
+        assert nlabels == len(self.labels), (lineno, line)
+        lineno, line = lineno+1, f.next()
+        mo = re.match(r"\s+(\d+)$", line)
+        assert mo, (lineno, line)
+        start = int(mo.group(1))
+        assert start in self.number2symbol, (lineno, line)
+        self.start = start
+        lineno, line = lineno+1, f.next()
+        assert line == "};\n", (lineno, line)
+        try:
+            lineno, line = lineno+1, f.next()
+        except StopIteration:
+            pass
+        else:
+            assert 0, (lineno, line)
+
+    def finish_off(self):
+        """Create additional useful structures.  (Internal)."""
+        self.keywords = {} # map from keyword strings to arc labels
+        self.tokens = {}   # map from numeric token values to arc labels
+        for ilabel, (type, value) in enumerate(self.labels):
+            if type == token.NAME and value is not None:
+                self.keywords[value] = ilabel
+            elif value is None:
+                self.tokens[type] = ilabel
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/driver.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/driver.py
new file mode 100644
index 0000000..39dafb9
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/driver.py
@@ -0,0 +1,157 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+# Modifications:
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Parser driver.
+
+This provides a high-level interface to parse a file into a syntax tree.
+
+"""
+
+__author__ = "Guido van Rossum <guido@python.org>"
+
+__all__ = ["Driver", "load_grammar"]
+
+# Python imports
+import codecs
+import os
+import logging
+import StringIO
+import sys
+
+# Pgen imports
+from . import grammar, parse, token, tokenize, pgen
+
+
+class Driver(object):
+
+    def __init__(self, grammar, convert=None, logger=None):
+        self.grammar = grammar
+        if logger is None:
+            logger = logging.getLogger()
+        self.logger = logger
+        self.convert = convert
+
+    def parse_tokens(self, tokens, debug=False):
+        """Parse a series of tokens and return the syntax tree."""
+        # XXX Move the prefix computation into a wrapper around tokenize.
+        p = parse.Parser(self.grammar, self.convert)
+        p.setup()
+        lineno = 1
+        column = 0
+        type = value = start = end = line_text = None
+        prefix = u""
+        for quintuple in tokens:
+            type, value, start, end, line_text = quintuple
+            if start != (lineno, column):
+                assert (lineno, column) <= start, ((lineno, column), start)
+                s_lineno, s_column = start
+                if lineno < s_lineno:
+                    prefix += "\n" * (s_lineno - lineno)
+                    lineno = s_lineno
+                    column = 0
+                if column < s_column:
+                    prefix += line_text[column:s_column]
+                    column = s_column
+            if type in (tokenize.COMMENT, tokenize.NL):
+                prefix += value
+                lineno, column = end
+                if value.endswith("\n"):
+                    lineno += 1
+                    column = 0
+                continue
+            if type == token.OP:
+                type = grammar.opmap[value]
+            if debug:
+                self.logger.debug("%s %r (prefix=%r)",
+                                  token.tok_name[type], value, prefix)
+            if p.addtoken(type, value, (prefix, start)):
+                if debug:
+                    self.logger.debug("Stop.")
+                break
+            prefix = ""
+            lineno, column = end
+            if value.endswith("\n"):
+                lineno += 1
+                column = 0
+        else:
+            # We never broke out -- EOF is too soon (how can this happen???)
+            raise parse.ParseError("incomplete input",
+                                   type, value, (prefix, start))
+        return p.rootnode
+
+    def parse_stream_raw(self, stream, debug=False):
+        """Parse a stream and return the syntax tree."""
+        tokens = tokenize.generate_tokens(stream.readline)
+        return self.parse_tokens(tokens, debug)
+
+    def parse_stream(self, stream, debug=False):
+        """Parse a stream and return the syntax tree."""
+        return self.parse_stream_raw(stream, debug)
+
+    def parse_file(self, filename, encoding=None, debug=False):
+        """Parse a file and return the syntax tree."""
+        stream = codecs.open(filename, "r", encoding)
+        try:
+            return self.parse_stream(stream, debug)
+        finally:
+            stream.close()
+
+    def parse_string(self, text, debug=False):
+        """Parse a string and return the syntax tree."""
+        tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
+        return self.parse_tokens(tokens, debug)
+
+
+def load_grammar(gt="Grammar.txt", gp=None,
+                 save=True, force=False, logger=None):
+    """Load the grammar (maybe from a pickle)."""
+    if logger is None:
+        logger = logging.getLogger()
+    if gp is None:
+        head, tail = os.path.splitext(gt)
+        if tail == ".txt":
+            tail = ""
+        gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
+    if force or not _newer(gp, gt):
+        logger.info("Generating grammar tables from %s", gt)
+        g = pgen.generate_grammar(gt)
+        if save:
+            logger.info("Writing grammar tables to %s", gp)
+            try:
+                g.dump(gp)
+            except IOError, e:
+                logger.info("Writing failed:"+str(e))
+    else:
+        g = grammar.Grammar()
+        g.load(gp)
+    return g
+
+
+def _newer(a, b):
+    """Inquire whether file a was written since file b."""
+    if not os.path.exists(a):
+        return False
+    if not os.path.exists(b):
+        return True
+    return os.path.getmtime(a) >= os.path.getmtime(b)
+
+
+def main(*args):
+    """Main program, when run as a script: produce grammar pickle files.
+
+    Calls load_grammar for each argument, a path to a grammar text file.
+    """
+    if not args:
+        args = sys.argv[1:]
+    logging.basicConfig(level=logging.INFO, stream=sys.stdout,
+                        format='%(message)s')
+    for gt in args:
+        load_grammar(gt, save=True, force=True)
+    return True
+
+if __name__ == "__main__":
+    sys.exit(int(not main()))
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/grammar.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/grammar.py
new file mode 100644
index 0000000..1aa5c43
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/grammar.py
@@ -0,0 +1,184 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""This module defines the data structures used to represent a grammar.
+
+These are a bit arcane because they are derived from the data
+structures used by Python's 'pgen' parser generator.
+
+There's also a table here mapping operators to their names in the
+token module; the Python tokenize module reports all operators as the
+fallback token code OP, but the parser needs the actual token code.
+
+"""
+
+# Python imports
+import pickle
+
+# Local imports
+from . import token, tokenize
+
+
+class Grammar(object):
+    """Pgen parsing tables conversion class.
+
+    Once initialized, this class supplies the grammar tables for the
+    parsing engine implemented by parse.py.  The parsing engine
+    accesses the instance variables directly.  The class here does not
+    provide initialization of the tables; several subclasses exist to
+    do this (see the conv and pgen modules).
+
+    The load() method reads the tables from a pickle file, which is
+    much faster than the other ways offered by subclasses.  The pickle
+    file is written by calling dump() (after loading the grammar
+    tables using a subclass).  The report() method prints a readable
+    representation of the tables to stdout, for debugging.
+
+    The instance variables are as follows:
+
+    symbol2number -- a dict mapping symbol names to numbers.  Symbol
+                     numbers are always 256 or higher, to distinguish
+                     them from token numbers, which are between 0 and
+                     255 (inclusive).
+
+    number2symbol -- a dict mapping numbers to symbol names;
+                     these two are each other's inverse.
+
+    states        -- a list of DFAs, where each DFA is a list of
+                     states, each state is a list of arcs, and each
+                     arc is a (i, j) pair where i is a label and j is
+                     a state number.  The DFA number is the index into
+                     this list.  (This name is slightly confusing.)
+                     Final states are represented by a special arc of
+                     the form (0, j) where j is its own state number.
+
+    dfas          -- a dict mapping symbol numbers to (DFA, first)
+                     pairs, where DFA is an item from the states list
+                     above, and first is a set of tokens that can
+                     begin this grammar rule (represented by a dict
+                     whose values are always 1).
+
+    labels        -- a list of (x, y) pairs where x is either a token
+                     number or a symbol number, and y is either None
+                     or a string; the strings are keywords.  The label
+                     number is the index in this list; label numbers
+                     are used to mark state transitions (arcs) in the
+                     DFAs.
+
+    start         -- the number of the grammar's start symbol.
+
+    keywords      -- a dict mapping keyword strings to arc labels.
+
+    tokens        -- a dict mapping token numbers to arc labels.
+
+    """
+
+    def __init__(self):
+        self.symbol2number = {}
+        self.number2symbol = {}
+        self.states = []
+        self.dfas = {}
+        self.labels = [(0, "EMPTY")]
+        self.keywords = {}
+        self.tokens = {}
+        self.symbol2label = {}
+        self.start = 256
+
+    def dump(self, filename):
+        """Dump the grammar tables to a pickle file."""
+        f = open(filename, "wb")
+        pickle.dump(self.__dict__, f, 2)
+        f.close()
+
+    def load(self, filename):
+        """Load the grammar tables from a pickle file."""
+        f = open(filename, "rb")
+        d = pickle.load(f)
+        f.close()
+        self.__dict__.update(d)
+
+    def copy(self):
+        """
+        Copy the grammar.
+        """
+        new = self.__class__()
+        for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords",
+                          "tokens", "symbol2label"):
+            setattr(new, dict_attr, getattr(self, dict_attr).copy())
+        new.labels = self.labels[:]
+        new.states = self.states[:]
+        new.start = self.start
+        return new
+
+    def report(self):
+        """Dump the grammar tables to standard output, for debugging."""
+        from pprint import pprint
+        print "s2n"
+        pprint(self.symbol2number)
+        print "n2s"
+        pprint(self.number2symbol)
+        print "states"
+        pprint(self.states)
+        print "dfas"
+        pprint(self.dfas)
+        print "labels"
+        pprint(self.labels)
+        print "start", self.start
+
+
+# Map from operator to number (since tokenize doesn't do this)
+
+opmap_raw = """
+( LPAR
+) RPAR
+[ LSQB
+] RSQB
+: COLON
+, COMMA
+; SEMI
++ PLUS
+- MINUS
+* STAR
+/ SLASH
+| VBAR
+& AMPER
+< LESS
+> GREATER
+= EQUAL
+. DOT
+% PERCENT
+` BACKQUOTE
+{ LBRACE
+} RBRACE
+@ AT
+== EQEQUAL
+!= NOTEQUAL
+<> NOTEQUAL
+<= LESSEQUAL
+>= GREATEREQUAL
+~ TILDE
+^ CIRCUMFLEX
+<< LEFTSHIFT
+>> RIGHTSHIFT
+** DOUBLESTAR
++= PLUSEQUAL
+-= MINEQUAL
+*= STAREQUAL
+/= SLASHEQUAL
+%= PERCENTEQUAL
+&= AMPEREQUAL
+|= VBAREQUAL
+^= CIRCUMFLEXEQUAL
+<<= LEFTSHIFTEQUAL
+>>= RIGHTSHIFTEQUAL
+**= DOUBLESTAREQUAL
+// DOUBLESLASH
+//= DOUBLESLASHEQUAL
+-> RARROW
+"""
+
+opmap = {}
+for line in opmap_raw.splitlines():
+    if line:
+        op, name = line.split()
+        opmap[op] = getattr(token, name)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/literals.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/literals.py
new file mode 100644
index 0000000..0b3948a
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/literals.py
@@ -0,0 +1,60 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Safely evaluate Python string literals without using eval()."""
+
+import re
+
+simple_escapes = {"a": "\a",
+                  "b": "\b",
+                  "f": "\f",
+                  "n": "\n",
+                  "r": "\r",
+                  "t": "\t",
+                  "v": "\v",
+                  "'": "'",
+                  '"': '"',
+                  "\\": "\\"}
+
+def escape(m):
+    all, tail = m.group(0, 1)
+    assert all.startswith("\\")
+    esc = simple_escapes.get(tail)
+    if esc is not None:
+        return esc
+    if tail.startswith("x"):
+        hexes = tail[1:]
+        if len(hexes) < 2:
+            raise ValueError("invalid hex string escape ('\\%s')" % tail)
+        try:
+            i = int(hexes, 16)
+        except ValueError:
+            raise ValueError("invalid hex string escape ('\\%s')" % tail)
+    else:
+        try:
+            i = int(tail, 8)
+        except ValueError:
+            raise ValueError("invalid octal string escape ('\\%s')" % tail)
+    return chr(i)
+
+def evalString(s):
+    assert s.startswith("'") or s.startswith('"'), repr(s[:1])
+    q = s[0]
+    if s[:3] == q*3:
+        q = q*3
+    assert s.endswith(q), repr(s[-len(q):])
+    assert len(s) >= 2*len(q)
+    s = s[len(q):-len(q)]
+    return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s)
+
+def test():
+    for i in range(256):
+        c = chr(i)
+        s = repr(c)
+        e = evalString(s)
+        if e != c:
+            print i, c, s, e
+
+
+if __name__ == "__main__":
+    test()
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/parse.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/parse.py
new file mode 100644
index 0000000..6bebdbb
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/parse.py
@@ -0,0 +1,201 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Parser engine for the grammar tables generated by pgen.
+
+The grammar table must be loaded first.
+
+See Parser/parser.c in the Python distribution for additional info on
+how this parsing engine works.
+
+"""
+
+# Local imports
+from . import token
+
+class ParseError(Exception):
+    """Exception to signal the parser is stuck."""
+
+    def __init__(self, msg, type, value, context):
+        Exception.__init__(self, "%s: type=%r, value=%r, context=%r" %
+                           (msg, type, value, context))
+        self.msg = msg
+        self.type = type
+        self.value = value
+        self.context = context
+
+class Parser(object):
+    """Parser engine.
+
+    The proper usage sequence is:
+
+    p = Parser(grammar, [converter])  # create instance
+    p.setup([start])                  # prepare for parsing
+    <for each input token>:
+        if p.addtoken(...):           # parse a token; may raise ParseError
+            break
+    root = p.rootnode                 # root of abstract syntax tree
+
+    A Parser instance may be reused by calling setup() repeatedly.
+
+    A Parser instance contains state pertaining to the current token
+    sequence, and should not be used concurrently by different threads
+    to parse separate token sequences.
+
+    See driver.py for how to get input tokens by tokenizing a file or
+    string.
+
+    Parsing is complete when addtoken() returns True; the root of the
+    abstract syntax tree can then be retrieved from the rootnode
+    instance variable.  When a syntax error occurs, addtoken() raises
+    the ParseError exception.  There is no error recovery; the parser
+    cannot be used after a syntax error was reported (but it can be
+    reinitialized by calling setup()).
+
+    """
+
+    def __init__(self, grammar, convert=None):
+        """Constructor.
+
+        The grammar argument is a grammar.Grammar instance; see the
+        grammar module for more information.
+
+        The parser is not ready yet for parsing; you must call the
+        setup() method to get it started.
+
+        The optional convert argument is a function mapping concrete
+        syntax tree nodes to abstract syntax tree nodes.  If not
+        given, no conversion is done and the syntax tree produced is
+        the concrete syntax tree.  If given, it must be a function of
+        two arguments, the first being the grammar (a grammar.Grammar
+        instance), and the second being the concrete syntax tree node
+        to be converted.  The syntax tree is converted from the bottom
+        up.
+
+        A concrete syntax tree node is a (type, value, context, nodes)
+        tuple, where type is the node type (a token or symbol number),
+        value is None for symbols and a string for tokens, context is
+        None or an opaque value used for error reporting (typically a
+        (lineno, offset) pair), and nodes is a list of children for
+        symbols, and None for tokens.
+
+        An abstract syntax tree node may be anything; this is entirely
+        up to the converter function.
+
+        """
+        self.grammar = grammar
+        self.convert = convert or (lambda grammar, node: node)
+
+    def setup(self, start=None):
+        """Prepare for parsing.
+
+        This *must* be called before starting to parse.
+
+        The optional argument is an alternative start symbol; it
+        defaults to the grammar's start symbol.
+
+        You can use a Parser instance to parse any number of programs;
+        each time you call setup() the parser is reset to an initial
+        state determined by the (implicit or explicit) start symbol.
+
+        """
+        if start is None:
+            start = self.grammar.start
+        # Each stack entry is a tuple: (dfa, state, node).
+        # A node is a tuple: (type, value, context, children),
+        # where children is a list of nodes or None, and context may be None.
+        newnode = (start, None, None, [])
+        stackentry = (self.grammar.dfas[start], 0, newnode)
+        self.stack = [stackentry]
+        self.rootnode = None
+        self.used_names = set() # Aliased to self.rootnode.used_names in pop()
+
+    def addtoken(self, type, value, context):
+        """Add a token; return True iff this is the end of the program."""
+        # Map from token to label
+        ilabel = self.classify(type, value, context)
+        # Loop until the token is shifted; may raise exceptions
+        while True:
+            dfa, state, node = self.stack[-1]
+            states, first = dfa
+            arcs = states[state]
+            # Look for a state with this label
+            for i, newstate in arcs:
+                t, v = self.grammar.labels[i]
+                if ilabel == i:
+                    # Look it up in the list of labels
+                    assert t < 256
+                    # Shift a token; we're done with it
+                    self.shift(type, value, newstate, context)
+                    # Pop while we are in an accept-only state
+                    state = newstate
+                    while states[state] == [(0, state)]:
+                        self.pop()
+                        if not self.stack:
+                            # Done parsing!
+                            return True
+                        dfa, state, node = self.stack[-1]
+                        states, first = dfa
+                    # Done with this token
+                    return False
+                elif t >= 256:
+                    # See if it's a symbol and if we're in its first set
+                    itsdfa = self.grammar.dfas[t]
+                    itsstates, itsfirst = itsdfa
+                    if ilabel in itsfirst:
+                        # Push a symbol
+                        self.push(t, self.grammar.dfas[t], newstate, context)
+                        break # To continue the outer while loop
+            else:
+                if (0, state) in arcs:
+                    # An accepting state, pop it and try something else
+                    self.pop()
+                    if not self.stack:
+                        # Done parsing, but another token is input
+                        raise ParseError("too much input",
+                                         type, value, context)
+                else:
+                    # No success finding a transition
+                    raise ParseError("bad input", type, value, context)
+
+    def classify(self, type, value, context):
+        """Turn a token into a label.  (Internal)"""
+        if type == token.NAME:
+            # Keep a listing of all used names
+            self.used_names.add(value)
+            # Check for reserved words
+            ilabel = self.grammar.keywords.get(value)
+            if ilabel is not None:
+                return ilabel
+        ilabel = self.grammar.tokens.get(type)
+        if ilabel is None:
+            raise ParseError("bad token", type, value, context)
+        return ilabel
+
+    def shift(self, type, value, newstate, context):
+        """Shift a token.  (Internal)"""
+        dfa, state, node = self.stack[-1]
+        newnode = (type, value, context, None)
+        newnode = self.convert(self.grammar, newnode)
+        if newnode is not None:
+            node[-1].append(newnode)
+        self.stack[-1] = (dfa, newstate, node)
+
+    def push(self, type, newdfa, newstate, context):
+        """Push a nonterminal.  (Internal)"""
+        dfa, state, node = self.stack[-1]
+        newnode = (type, None, context, [])
+        self.stack[-1] = (dfa, newstate, node)
+        self.stack.append((newdfa, 0, newnode))
+
+    def pop(self):
+        """Pop a nonterminal.  (Internal)"""
+        popdfa, popstate, popnode = self.stack.pop()
+        newnode = self.convert(self.grammar, popnode)
+        if newnode is not None:
+            if self.stack:
+                dfa, state, node = self.stack[-1]
+                node[-1].append(newnode)
+            else:
+                self.rootnode = newnode
+                self.rootnode.used_names = self.used_names
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/pgen.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/pgen.py
new file mode 100644
index 0000000..63084a4
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/pgen.py
@@ -0,0 +1,386 @@
+# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+# Pgen imports
+from . import grammar, token, tokenize
+
+class PgenGrammar(grammar.Grammar):
+    pass
+
+class ParserGenerator(object):
+
+    def __init__(self, filename, stream=None):
+        close_stream = None
+        if stream is None:
+            stream = open(filename)
+            close_stream = stream.close
+        self.filename = filename
+        self.stream = stream
+        self.generator = tokenize.generate_tokens(stream.readline)
+        self.gettoken() # Initialize lookahead
+        self.dfas, self.startsymbol = self.parse()
+        if close_stream is not None:
+            close_stream()
+        self.first = {} # map from symbol name to set of tokens
+        self.addfirstsets()
+
+    def make_grammar(self):
+        c = PgenGrammar()
+        names = self.dfas.keys()
+        names.sort()
+        names.remove(self.startsymbol)
+        names.insert(0, self.startsymbol)
+        for name in names:
+            i = 256 + len(c.symbol2number)
+            c.symbol2number[name] = i
+            c.number2symbol[i] = name
+        for name in names:
+            dfa = self.dfas[name]
+            states = []
+            for state in dfa:
+                arcs = []
+                for label, next in state.arcs.iteritems():
+                    arcs.append((self.make_label(c, label), dfa.index(next)))
+                if state.isfinal:
+                    arcs.append((0, dfa.index(state)))
+                states.append(arcs)
+            c.states.append(states)
+            c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name))
+        c.start = c.symbol2number[self.startsymbol]
+        return c
+
+    def make_first(self, c, name):
+        rawfirst = self.first[name]
+        first = {}
+        for label in rawfirst:
+            ilabel = self.make_label(c, label)
+            ##assert ilabel not in first # XXX failed on <> ... !=
+            first[ilabel] = 1
+        return first
+
+    def make_label(self, c, label):
+        # XXX Maybe this should be a method on a subclass of converter?
+        ilabel = len(c.labels)
+        if label[0].isalpha():
+            # Either a symbol name or a named token
+            if label in c.symbol2number:
+                # A symbol name (a non-terminal)
+                if label in c.symbol2label:
+                    return c.symbol2label[label]
+                else:
+                    c.labels.append((c.symbol2number[label], None))
+                    c.symbol2label[label] = ilabel
+                    return ilabel
+            else:
+                # A named token (NAME, NUMBER, STRING)
+                itoken = getattr(token, label, None)
+                assert isinstance(itoken, int), label
+                assert itoken in token.tok_name, label
+                if itoken in c.tokens:
+                    return c.tokens[itoken]
+                else:
+                    c.labels.append((itoken, None))
+                    c.tokens[itoken] = ilabel
+                    return ilabel
+        else:
+            # Either a keyword or an operator
+            assert label[0] in ('"', "'"), label
+            value = eval(label)
+            if value[0].isalpha():
+                # A keyword
+                if value in c.keywords:
+                    return c.keywords[value]
+                else:
+                    c.labels.append((token.NAME, value))
+                    c.keywords[value] = ilabel
+                    return ilabel
+            else:
+                # An operator (any non-numeric token)
+                itoken = grammar.opmap[value] # Fails if unknown token
+                if itoken in c.tokens:
+                    return c.tokens[itoken]
+                else:
+                    c.labels.append((itoken, None))
+                    c.tokens[itoken] = ilabel
+                    return ilabel
+
+    def addfirstsets(self):
+        names = self.dfas.keys()
+        names.sort()
+        for name in names:
+            if name not in self.first:
+                self.calcfirst(name)
+            #print name, self.first[name].keys()
+
+    def calcfirst(self, name):
+        dfa = self.dfas[name]
+        self.first[name] = None # dummy to detect left recursion
+        state = dfa[0]
+        totalset = {}
+        overlapcheck = {}
+        for label, next in state.arcs.iteritems():
+            if label in self.dfas:
+                if label in self.first:
+                    fset = self.first[label]
+                    if fset is None:
+                        raise ValueError("recursion for rule %r" % name)
+                else:
+                    self.calcfirst(label)
+                    fset = self.first[label]
+                totalset.update(fset)
+                overlapcheck[label] = fset
+            else:
+                totalset[label] = 1
+                overlapcheck[label] = {label: 1}
+        inverse = {}
+        for label, itsfirst in overlapcheck.iteritems():
+            for symbol in itsfirst:
+                if symbol in inverse:
+                    raise ValueError("rule %s is ambiguous; %s is in the"
+                                     " first sets of %s as well as %s" %
+                                     (name, symbol, label, inverse[symbol]))
+                inverse[symbol] = label
+        self.first[name] = totalset
+
+    def parse(self):
+        dfas = {}
+        startsymbol = None
+        # MSTART: (NEWLINE | RULE)* ENDMARKER
+        while self.type != token.ENDMARKER:
+            while self.type == token.NEWLINE:
+                self.gettoken()
+            # RULE: NAME ':' RHS NEWLINE
+            name = self.expect(token.NAME)
+            self.expect(token.OP, ":")
+            a, z = self.parse_rhs()
+            self.expect(token.NEWLINE)
+            #self.dump_nfa(name, a, z)
+            dfa = self.make_dfa(a, z)
+            #self.dump_dfa(name, dfa)
+            oldlen = len(dfa)
+            self.simplify_dfa(dfa)
+            newlen = len(dfa)
+            dfas[name] = dfa
+            #print name, oldlen, newlen
+            if startsymbol is None:
+                startsymbol = name
+        return dfas, startsymbol
+
+    def make_dfa(self, start, finish):
+        # To turn an NFA into a DFA, we define the states of the DFA
+        # to correspond to *sets* of states of the NFA.  Then do some
+        # state reduction.  Let's represent sets as dicts with 1 for
+        # values.
+        assert isinstance(start, NFAState)
+        assert isinstance(finish, NFAState)
+        def closure(state):
+            base = {}
+            addclosure(state, base)
+            return base
+        def addclosure(state, base):
+            assert isinstance(state, NFAState)
+            if state in base:
+                return
+            base[state] = 1
+            for label, next in state.arcs:
+                if label is None:
+                    addclosure(next, base)
+        states = [DFAState(closure(start), finish)]
+        for state in states: # NB states grows while we're iterating
+            arcs = {}
+            for nfastate in state.nfaset:
+                for label, next in nfastate.arcs:
+                    if label is not None:
+                        addclosure(next, arcs.setdefault(label, {}))
+            for label, nfaset in arcs.iteritems():
+                for st in states:
+                    if st.nfaset == nfaset:
+                        break
+                else:
+                    st = DFAState(nfaset, finish)
+                    states.append(st)
+                state.addarc(st, label)
+        return states # List of DFAState instances; first one is start
+
+    def dump_nfa(self, name, start, finish):
+        print "Dump of NFA for", name
+        todo = [start]
+        for i, state in enumerate(todo):
+            print "  State", i, state is finish and "(final)" or ""
+            for label, next in state.arcs:
+                if next in todo:
+                    j = todo.index(next)
+                else:
+                    j = len(todo)
+                    todo.append(next)
+                if label is None:
+                    print "    -> %d" % j
+                else:
+                    print "    %s -> %d" % (label, j)
+
+    def dump_dfa(self, name, dfa):
+        print "Dump of DFA for", name
+        for i, state in enumerate(dfa):
+            print "  State", i, state.isfinal and "(final)" or ""
+            for label, next in state.arcs.iteritems():
+                print "    %s -> %d" % (label, dfa.index(next))
+
+    def simplify_dfa(self, dfa):
+        # This is not theoretically optimal, but works well enough.
+        # Algorithm: repeatedly look for two states that have the same
+        # set of arcs (same labels pointing to the same nodes) and
+        # unify them, until things stop changing.
+
+        # dfa is a list of DFAState instances
+        changes = True
+        while changes:
+            changes = False
+            for i, state_i in enumerate(dfa):
+                for j in range(i+1, len(dfa)):
+                    state_j = dfa[j]
+                    if state_i == state_j:
+                        #print "  unify", i, j
+                        del dfa[j]
+                        for state in dfa:
+                            state.unifystate(state_j, state_i)
+                        changes = True
+                        break
+
+    def parse_rhs(self):
+        # RHS: ALT ('|' ALT)*
+        a, z = self.parse_alt()
+        if self.value != "|":
+            return a, z
+        else:
+            aa = NFAState()
+            zz = NFAState()
+            aa.addarc(a)
+            z.addarc(zz)
+            while self.value == "|":
+                self.gettoken()
+                a, z = self.parse_alt()
+                aa.addarc(a)
+                z.addarc(zz)
+            return aa, zz
+
+    def parse_alt(self):
+        # ALT: ITEM+
+        a, b = self.parse_item()
+        while (self.value in ("(", "[") or
+               self.type in (token.NAME, token.STRING)):
+            c, d = self.parse_item()
+            b.addarc(c)
+            b = d
+        return a, b
+
+    def parse_item(self):
+        # ITEM: '[' RHS ']' | ATOM ['+' | '*']
+        if self.value == "[":
+            self.gettoken()
+            a, z = self.parse_rhs()
+            self.expect(token.OP, "]")
+            a.addarc(z)
+            return a, z
+        else:
+            a, z = self.parse_atom()
+            value = self.value
+            if value not in ("+", "*"):
+                return a, z
+            self.gettoken()
+            z.addarc(a)
+            if value == "+":
+                return a, z
+            else:
+                return a, a
+
+    def parse_atom(self):
+        # ATOM: '(' RHS ')' | NAME | STRING
+        if self.value == "(":
+            self.gettoken()
+            a, z = self.parse_rhs()
+            self.expect(token.OP, ")")
+            return a, z
+        elif self.type in (token.NAME, token.STRING):
+            a = NFAState()
+            z = NFAState()
+            a.addarc(z, self.value)
+            self.gettoken()
+            return a, z
+        else:
+            self.raise_error("expected (...) or NAME or STRING, got %s/%s",
+                             self.type, self.value)
+
+    def expect(self, type, value=None):
+        if self.type != type or (value is not None and self.value != value):
+            self.raise_error("expected %s/%s, got %s/%s",
+                             type, value, self.type, self.value)
+        value = self.value
+        self.gettoken()
+        return value
+
+    def gettoken(self):
+        tup = self.generator.next()
+        while tup[0] in (tokenize.COMMENT, tokenize.NL):
+            tup = self.generator.next()
+        self.type, self.value, self.begin, self.end, self.line = tup
+        #print token.tok_name[self.type], repr(self.value)
+
+    def raise_error(self, msg, *args):
+        if args:
+            try:
+                msg = msg % args
+            except:
+                msg = " ".join([msg] + map(str, args))
+        raise SyntaxError(msg, (self.filename, self.end[0],
+                                self.end[1], self.line))
+
+class NFAState(object):
+
+    def __init__(self):
+        self.arcs = [] # list of (label, NFAState) pairs
+
+    def addarc(self, next, label=None):
+        assert label is None or isinstance(label, str)
+        assert isinstance(next, NFAState)
+        self.arcs.append((label, next))
+
+class DFAState(object):
+
+    def __init__(self, nfaset, final):
+        assert isinstance(nfaset, dict)
+        assert isinstance(iter(nfaset).next(), NFAState)
+        assert isinstance(final, NFAState)
+        self.nfaset = nfaset
+        self.isfinal = final in nfaset
+        self.arcs = {} # map from label to DFAState
+
+    def addarc(self, next, label):
+        assert isinstance(label, str)
+        assert label not in self.arcs
+        assert isinstance(next, DFAState)
+        self.arcs[label] = next
+
+    def unifystate(self, old, new):
+        for label, next in self.arcs.iteritems():
+            if next is old:
+                self.arcs[label] = new
+
+    def __eq__(self, other):
+        # Equality test -- ignore the nfaset instance variable
+        assert isinstance(other, DFAState)
+        if self.isfinal != other.isfinal:
+            return False
+        # Can't just return self.arcs == other.arcs, because that
+        # would invoke this method recursively, with cycles...
+        if len(self.arcs) != len(other.arcs):
+            return False
+        for label, next in self.arcs.iteritems():
+            if next is not other.arcs.get(label):
+                return False
+        return True
+
+    __hash__ = None # For Py3 compatibility.
+
+def generate_grammar(filename="Grammar.txt"):
+    p = ParserGenerator(filename)
+    return p.make_grammar()
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/token.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/token.py
new file mode 100644
index 0000000..61468b3
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/token.py
@@ -0,0 +1,82 @@
+#! /usr/bin/env python
+
+"""Token constants (from "token.h")."""
+
+#  Taken from Python (r53757) and modified to include some tokens
+#   originally monkeypatched in by pgen2.tokenize
+
+#--start constants--
+ENDMARKER = 0
+NAME = 1
+NUMBER = 2
+STRING = 3
+NEWLINE = 4
+INDENT = 5
+DEDENT = 6
+LPAR = 7
+RPAR = 8
+LSQB = 9
+RSQB = 10
+COLON = 11
+COMMA = 12
+SEMI = 13
+PLUS = 14
+MINUS = 15
+STAR = 16
+SLASH = 17
+VBAR = 18
+AMPER = 19
+LESS = 20
+GREATER = 21
+EQUAL = 22
+DOT = 23
+PERCENT = 24
+BACKQUOTE = 25
+LBRACE = 26
+RBRACE = 27
+EQEQUAL = 28
+NOTEQUAL = 29
+LESSEQUAL = 30
+GREATEREQUAL = 31
+TILDE = 32
+CIRCUMFLEX = 33
+LEFTSHIFT = 34
+RIGHTSHIFT = 35
+DOUBLESTAR = 36
+PLUSEQUAL = 37
+MINEQUAL = 38
+STAREQUAL = 39
+SLASHEQUAL = 40
+PERCENTEQUAL = 41
+AMPEREQUAL = 42
+VBAREQUAL = 43
+CIRCUMFLEXEQUAL = 44
+LEFTSHIFTEQUAL = 45
+RIGHTSHIFTEQUAL = 46
+DOUBLESTAREQUAL = 47
+DOUBLESLASH = 48
+DOUBLESLASHEQUAL = 49
+AT = 50
+OP = 51
+COMMENT = 52
+NL = 53
+RARROW = 54
+ERRORTOKEN = 55
+N_TOKENS = 56
+NT_OFFSET = 256
+#--end constants--
+
+tok_name = {}
+for _name, _value in globals().items():
+    if type(_value) is type(0):
+        tok_name[_value] = _name
+
+
+def ISTERMINAL(x):
+    return x < NT_OFFSET
+
+def ISNONTERMINAL(x):
+    return x >= NT_OFFSET
+
+def ISEOF(x):
+    return x == ENDMARKER
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py
new file mode 100644
index 0000000..f6e0284
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py
@@ -0,0 +1,499 @@
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation.
+# All rights reserved.
+
+"""Tokenization help for Python programs.
+
+generate_tokens(readline) is a generator that breaks a stream of
+text into Python tokens.  It accepts a readline-like method which is called
+repeatedly to get the next line of input (or "" for EOF).  It generates
+5-tuples with these members:
+
+    the token type (see token.py)
+    the token (a string)
+    the starting (row, column) indices of the token (a 2-tuple of ints)
+    the ending (row, column) indices of the token (a 2-tuple of ints)
+    the original line (string)
+
+It is designed to match the working of the Python tokenizer exactly, except
+that it produces COMMENT tokens for comments and gives type OP for all
+operators
+
+Older entry points
+    tokenize_loop(readline, tokeneater)
+    tokenize(readline, tokeneater=printtoken)
+are the same, except instead of generating tokens, tokeneater is a callback
+function to which the 5 fields described above are passed as 5 arguments,
+each time a new token is found."""
+
+__author__ = 'Ka-Ping Yee <ping@lfw.org>'
+__credits__ = \
+    'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
+
+import string, re
+from codecs import BOM_UTF8, lookup
+from lib2to3.pgen2.token import *
+
+from . import token
+__all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize",
+           "generate_tokens", "untokenize"]
+del token
+
+try:
+    bytes
+except NameError:
+    # Support bytes type in Python <= 2.5, so 2to3 turns itself into
+    # valid Python 3 code.
+    bytes = str
+
+def group(*choices): return '(' + '|'.join(choices) + ')'
+def any(*choices): return group(*choices) + '*'
+def maybe(*choices): return group(*choices) + '?'
+
+Whitespace = r'[ \f\t]*'
+Comment = r'#[^\r\n]*'
+Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
+Name = r'[a-zA-Z_]\w*'
+
+Binnumber = r'0[bB][01]*'
+Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
+Octnumber = r'0[oO]?[0-7]*[lL]?'
+Decnumber = r'[1-9]\d*[lL]?'
+Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber)
+Exponent = r'[eE][-+]?\d+'
+Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
+Expfloat = r'\d+' + Exponent
+Floatnumber = group(Pointfloat, Expfloat)
+Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
+Number = group(Imagnumber, Floatnumber, Intnumber)
+
+# Tail end of ' string.
+Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
+# Tail end of " string.
+Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
+# Tail end of ''' string.
+Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
+# Tail end of """ string.
+Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
+Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""')
+# Single-line ' or " string.
+String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
+               r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
+
+# Because of leftmost-then-longest match semantics, be sure to put the
+# longest operators first (e.g., if = came before ==, == would get
+# recognized as two instances of =).
+Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
+                 r"//=?", r"->",
+                 r"[+\-*/%&|^=<>]=?",
+                 r"~")
+
+Bracket = '[][(){}]'
+Special = group(r'\r?\n', r'[:;.,`@]')
+Funny = group(Operator, Bracket, Special)
+
+PlainToken = group(Number, Funny, String, Name)
+Token = Ignore + PlainToken
+
+# First (or only) line of ' or " string.
+ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
+                group("'", r'\\\r?\n'),
+                r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
+                group('"', r'\\\r?\n'))
+PseudoExtras = group(r'\\\r?\n', Comment, Triple)
+PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
+
+tokenprog, pseudoprog, single3prog, double3prog = map(
+    re.compile, (Token, PseudoToken, Single3, Double3))
+endprogs = {"'": re.compile(Single), '"': re.compile(Double),
+            "'''": single3prog, '"""': double3prog,
+            "r'''": single3prog, 'r"""': double3prog,
+            "u'''": single3prog, 'u"""': double3prog,
+            "b'''": single3prog, 'b"""': double3prog,
+            "ur'''": single3prog, 'ur"""': double3prog,
+            "br'''": single3prog, 'br"""': double3prog,
+            "R'''": single3prog, 'R"""': double3prog,
+            "U'''": single3prog, 'U"""': double3prog,
+            "B'''": single3prog, 'B"""': double3prog,
+            "uR'''": single3prog, 'uR"""': double3prog,
+            "Ur'''": single3prog, 'Ur"""': double3prog,
+            "UR'''": single3prog, 'UR"""': double3prog,
+            "bR'''": single3prog, 'bR"""': double3prog,
+            "Br'''": single3prog, 'Br"""': double3prog,
+            "BR'''": single3prog, 'BR"""': double3prog,
+            'r': None, 'R': None,
+            'u': None, 'U': None,
+            'b': None, 'B': None}
+
+triple_quoted = {}
+for t in ("'''", '"""',
+          "r'''", 'r"""', "R'''", 'R"""',
+          "u'''", 'u"""', "U'''", 'U"""',
+          "b'''", 'b"""', "B'''", 'B"""',
+          "ur'''", 'ur"""', "Ur'''", 'Ur"""',
+          "uR'''", 'uR"""', "UR'''", 'UR"""',
+          "br'''", 'br"""', "Br'''", 'Br"""',
+          "bR'''", 'bR"""', "BR'''", 'BR"""',):
+    triple_quoted[t] = t
+single_quoted = {}
+for t in ("'", '"',
+          "r'", 'r"', "R'", 'R"',
+          "u'", 'u"', "U'", 'U"',
+          "b'", 'b"', "B'", 'B"',
+          "ur'", 'ur"', "Ur'", 'Ur"',
+          "uR'", 'uR"', "UR'", 'UR"',
+          "br'", 'br"', "Br'", 'Br"',
+          "bR'", 'bR"', "BR'", 'BR"', ):
+    single_quoted[t] = t
+
+tabsize = 8
+
+class TokenError(Exception): pass
+
+class StopTokenizing(Exception): pass
+
+def printtoken(type, token, start, end, line): # for testing
+    (srow, scol) = start
+    (erow, ecol) = end
+    print "%d,%d-%d,%d:\t%s\t%s" % \
+        (srow, scol, erow, ecol, tok_name[type], repr(token))
+
+def tokenize(readline, tokeneater=printtoken):
+    """
+    The tokenize() function accepts two parameters: one representing the
+    input stream, and one providing an output mechanism for tokenize().
+
+    The first parameter, readline, must be a callable object which provides
+    the same interface as the readline() method of built-in file objects.
+    Each call to the function should return one line of input as a string.
+
+    The second parameter, tokeneater, must also be a callable object. It is
+    called once for each token, with five arguments, corresponding to the
+    tuples generated by generate_tokens().
+    """
+    try:
+        tokenize_loop(readline, tokeneater)
+    except StopTokenizing:
+        pass
+
+# backwards compatible interface
+def tokenize_loop(readline, tokeneater):
+    for token_info in generate_tokens(readline):
+        tokeneater(*token_info)
+
+class Untokenizer:
+
+    def __init__(self):
+        self.tokens = []
+        self.prev_row = 1
+        self.prev_col = 0
+
+    def add_whitespace(self, start):
+        row, col = start
+        assert row <= self.prev_row
+        col_offset = col - self.prev_col
+        if col_offset:
+            self.tokens.append(" " * col_offset)
+
+    def untokenize(self, iterable):
+        for t in iterable:
+            if len(t) == 2:
+                self.compat(t, iterable)
+                break
+            tok_type, token, start, end, line = t
+            self.add_whitespace(start)
+            self.tokens.append(token)
+            self.prev_row, self.prev_col = end
+            if tok_type in (NEWLINE, NL):
+                self.prev_row += 1
+                self.prev_col = 0
+        return "".join(self.tokens)
+
+    def compat(self, token, iterable):
+        startline = False
+        indents = []
+        toks_append = self.tokens.append
+        toknum, tokval = token
+        if toknum in (NAME, NUMBER):
+            tokval += ' '
+        if toknum in (NEWLINE, NL):
+            startline = True
+        for tok in iterable:
+            toknum, tokval = tok[:2]
+
+            if toknum in (NAME, NUMBER):
+                tokval += ' '
+
+            if toknum == INDENT:
+                indents.append(tokval)
+                continue
+            elif toknum == DEDENT:
+                indents.pop()
+                continue
+            elif toknum in (NEWLINE, NL):
+                startline = True
+            elif startline and indents:
+                toks_append(indents[-1])
+                startline = False
+            toks_append(tokval)
+
+cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)')
+
+def _get_normal_name(orig_enc):
+    """Imitates get_normal_name in tokenizer.c."""
+    # Only care about the first 12 characters.
+    enc = orig_enc[:12].lower().replace("_", "-")
+    if enc == "utf-8" or enc.startswith("utf-8-"):
+        return "utf-8"
+    if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+       enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+        return "iso-8859-1"
+    return orig_enc
+
+def detect_encoding(readline):
+    """
+    The detect_encoding() function is used to detect the encoding that should
+    be used to decode a Python source file. It requires one argment, readline,
+    in the same way as the tokenize() generator.
+
+    It will call readline a maximum of twice, and return the encoding used
+    (as a string) and a list of any lines (left as bytes) it has read
+    in.
+
+    It detects the encoding from the presence of a utf-8 bom or an encoding
+    cookie as specified in pep-0263. If both a bom and a cookie are present, but
+    disagree, a SyntaxError will be raised. If the encoding cookie is an invalid
+    charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
+    'utf-8-sig' is returned.
+
+    If no encoding is specified, then the default of 'utf-8' will be returned.
+    """
+    bom_found = False
+    encoding = None
+    default = 'utf-8'
+    def read_or_stop():
+        try:
+            return readline()
+        except StopIteration:
+            return bytes()
+
+    def find_cookie(line):
+        try:
+            line_string = line.decode('ascii')
+        except UnicodeDecodeError:
+            return None
+        match = cookie_re.match(line_string)
+        if not match:
+            return None
+        encoding = _get_normal_name(match.group(1))
+        try:
+            codec = lookup(encoding)
+        except LookupError:
+            # This behaviour mimics the Python interpreter
+            raise SyntaxError("unknown encoding: " + encoding)
+
+        if bom_found:
+            if codec.name != 'utf-8':
+                # This behaviour mimics the Python interpreter
+                raise SyntaxError('encoding problem: utf-8')
+            encoding += '-sig'
+        return encoding
+
+    first = read_or_stop()
+    if first.startswith(BOM_UTF8):
+        bom_found = True
+        first = first[3:]
+        default = 'utf-8-sig'
+    if not first:
+        return default, []
+
+    encoding = find_cookie(first)
+    if encoding:
+        return encoding, [first]
+
+    second = read_or_stop()
+    if not second:
+        return default, [first]
+
+    encoding = find_cookie(second)
+    if encoding:
+        return encoding, [first, second]
+
+    return default, [first, second]
+
+def untokenize(iterable):
+    """Transform tokens back into Python source code.
+
+    Each element returned by the iterable must be a token sequence
+    with at least two elements, a token number and token value.  If
+    only two tokens are passed, the resulting output is poor.
+
+    Round-trip invariant for full input:
+        Untokenized source will match input source exactly
+
+    Round-trip invariant for limited intput:
+        # Output text will tokenize the back to the input
+        t1 = [tok[:2] for tok in generate_tokens(f.readline)]
+        newcode = untokenize(t1)
+        readline = iter(newcode.splitlines(1)).next
+        t2 = [tok[:2] for tokin generate_tokens(readline)]
+        assert t1 == t2
+    """
+    ut = Untokenizer()
+    return ut.untokenize(iterable)
+
+def generate_tokens(readline):
+    """
+    The generate_tokens() generator requires one argment, readline, which
+    must be a callable object which provides the same interface as the
+    readline() method of built-in file objects. Each call to the function
+    should return one line of input as a string.  Alternately, readline
+    can be a callable function terminating with StopIteration:
+        readline = open(myfile).next    # Example of alternate readline
+
+    The generator produces 5-tuples with these members: the token type; the
+    token string; a 2-tuple (srow, scol) of ints specifying the row and
+    column where the token begins in the source; a 2-tuple (erow, ecol) of
+    ints specifying the row and column where the token ends in the source;
+    and the line on which the token was found. The line passed is the
+    logical line; continuation lines are included.
+    """
+    lnum = parenlev = continued = 0
+    namechars, numchars = string.ascii_letters + '_', '0123456789'
+    contstr, needcont = '', 0
+    contline = None
+    indents = [0]
+
+    while 1:                                   # loop over lines in stream
+        try:
+            line = readline()
+        except StopIteration:
+            line = ''
+        lnum = lnum + 1
+        pos, max = 0, len(line)
+
+        if contstr:                            # continued string
+            if not line:
+                raise TokenError, ("EOF in multi-line string", strstart)
+            endmatch = endprog.match(line)
+            if endmatch:
+                pos = end = endmatch.end(0)
+                yield (STRING, contstr + line[:end],
+                       strstart, (lnum, end), contline + line)
+                contstr, needcont = '', 0
+                contline = None
+            elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
+                yield (ERRORTOKEN, contstr + line,
+                           strstart, (lnum, len(line)), contline)
+                contstr = ''
+                contline = None
+                continue
+            else:
+                contstr = contstr + line
+                contline = contline + line
+                continue
+
+        elif parenlev == 0 and not continued:  # new statement
+            if not line: break
+            column = 0
+            while pos < max:                   # measure leading whitespace
+                if line[pos] == ' ': column = column + 1
+                elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize
+                elif line[pos] == '\f': column = 0
+                else: break
+                pos = pos + 1
+            if pos == max: break
+
+            if line[pos] in '#\r\n':           # skip comments or blank lines
+                if line[pos] == '#':
+                    comment_token = line[pos:].rstrip('\r\n')
+                    nl_pos = pos + len(comment_token)
+                    yield (COMMENT, comment_token,
+                           (lnum, pos), (lnum, pos + len(comment_token)), line)
+                    yield (NL, line[nl_pos:],
+                           (lnum, nl_pos), (lnum, len(line)), line)
+                else:
+                    yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
+                           (lnum, pos), (lnum, len(line)), line)
+                continue
+
+            if column > indents[-1]:           # count indents or dedents
+                indents.append(column)
+                yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
+            while column < indents[-1]:
+                if column not in indents:
+                    raise IndentationError(
+                        "unindent does not match any outer indentation level",
+                        ("<tokenize>", lnum, pos, line))
+                indents = indents[:-1]
+                yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
+
+        else:                                  # continued statement
+            if not line:
+                raise TokenError, ("EOF in multi-line statement", (lnum, 0))
+            continued = 0
+
+        while pos < max:
+            pseudomatch = pseudoprog.match(line, pos)
+            if pseudomatch:                                # scan for tokens
+                start, end = pseudomatch.span(1)
+                spos, epos, pos = (lnum, start), (lnum, end), end
+                token, initial = line[start:end], line[start]
+
+                if initial in numchars or \
+                   (initial == '.' and token != '.'):      # ordinary number
+                    yield (NUMBER, token, spos, epos, line)
+                elif initial in '\r\n':
+                    newline = NEWLINE
+                    if parenlev > 0:
+                        newline = NL
+                    yield (newline, token, spos, epos, line)
+                elif initial == '#':
+                    assert not token.endswith("\n")
+                    yield (COMMENT, token, spos, epos, line)
+                elif token in triple_quoted:
+                    endprog = endprogs[token]
+                    endmatch = endprog.match(line, pos)
+                    if endmatch:                           # all on one line
+                        pos = endmatch.end(0)
+                        token = line[start:pos]
+                        yield (STRING, token, spos, (lnum, pos), line)
+                    else:
+                        strstart = (lnum, start)           # multiple lines
+                        contstr = line[start:]
+                        contline = line
+                        break
+                elif initial in single_quoted or \
+                    token[:2] in single_quoted or \
+                    token[:3] in single_quoted:
+                    if token[-1] == '\n':                  # continued string
+                        strstart = (lnum, start)
+                        endprog = (endprogs[initial] or endprogs[token[1]] or
+                                   endprogs[token[2]])
+                        contstr, needcont = line[start:], 1
+                        contline = line
+                        break
+                    else:                                  # ordinary string
+                        yield (STRING, token, spos, epos, line)
+                elif initial in namechars:                 # ordinary name
+                    yield (NAME, token, spos, epos, line)
+                elif initial == '\\':                      # continued stmt
+                    # This yield is new; needed for better idempotency:
+                    yield (NL, token, spos, (lnum, pos), line)
+                    continued = 1
+                else:
+                    if initial in '([{': parenlev = parenlev + 1
+                    elif initial in ')]}': parenlev = parenlev - 1
+                    yield (OP, token, spos, epos, line)
+            else:
+                yield (ERRORTOKEN, line[pos],
+                           (lnum, pos), (lnum, pos+1), line)
+                pos = pos + 1
+
+    for indent in indents[1:]:                 # pop remaining indent levels
+        yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
+    yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
+
+if __name__ == '__main__':                     # testing
+    import sys
+    if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
+    else: tokenize(sys.stdin.readline)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pygram.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pygram.py
new file mode 100644
index 0000000..621ff24
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pygram.py
@@ -0,0 +1,40 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Export the Python grammar and symbols."""
+
+# Python imports
+import os
+
+# Local imports
+from .pgen2 import token
+from .pgen2 import driver
+from . import pytree
+
+# The grammar file
+_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt")
+_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
+                                     "PatternGrammar.txt")
+
+
+class Symbols(object):
+
+    def __init__(self, grammar):
+        """Initializer.
+
+        Creates an attribute for each grammar symbol (nonterminal),
+        whose value is the symbol's type (an int >= 256).
+        """
+        for name, symbol in grammar.symbol2number.iteritems():
+            setattr(self, name, symbol)
+
+
+python_grammar = driver.load_grammar(_GRAMMAR_FILE)
+
+python_symbols = Symbols(python_grammar)
+
+python_grammar_no_print_statement = python_grammar.copy()
+del python_grammar_no_print_statement.keywords["print"]
+
+pattern_grammar = driver.load_grammar(_PATTERN_GRAMMAR_FILE)
+pattern_symbols = Symbols(pattern_grammar)
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pytree.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pytree.py
new file mode 100644
index 0000000..179caca
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/pytree.py
@@ -0,0 +1,887 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""
+Python parse tree definitions.
+
+This is a very concrete parse tree; we need to keep every token and
+even the comments and whitespace between tokens.
+
+There's also a pattern matching implementation here.
+"""
+
+__author__ = "Guido van Rossum <guido@python.org>"
+
+import sys
+import warnings
+from StringIO import StringIO
+
+HUGE = 0x7FFFFFFF  # maximum repeat count, default max
+
+_type_reprs = {}
+def type_repr(type_num):
+    global _type_reprs
+    if not _type_reprs:
+        from .pygram import python_symbols
+        # printing tokens is possible but not as useful
+        # from .pgen2 import token // token.__dict__.items():
+        for name, val in python_symbols.__dict__.items():
+            if type(val) == int: _type_reprs[val] = name
+    return _type_reprs.setdefault(type_num, type_num)
+
+class Base(object):
+
+    """
+    Abstract base class for Node and Leaf.
+
+    This provides some default functionality and boilerplate using the
+    template pattern.
+
+    A node may be a subnode of at most one parent.
+    """
+
+    # Default values for instance variables
+    type = None    # int: token number (< 256) or symbol number (>= 256)
+    parent = None  # Parent node pointer, or None
+    children = ()  # Tuple of subnodes
+    was_changed = False
+    was_checked = False
+
+    def __new__(cls, *args, **kwds):
+        """Constructor that prevents Base from being instantiated."""
+        assert cls is not Base, "Cannot instantiate Base"
+        return object.__new__(cls)
+
+    def __eq__(self, other):
+        """
+        Compare two nodes for equality.
+
+        This calls the method _eq().
+        """
+        if self.__class__ is not other.__class__:
+            return NotImplemented
+        return self._eq(other)
+
+    __hash__ = None # For Py3 compatibility.
+
+    def __ne__(self, other):
+        """
+        Compare two nodes for inequality.
+
+        This calls the method _eq().
+        """
+        if self.__class__ is not other.__class__:
+            return NotImplemented
+        return not self._eq(other)
+
+    def _eq(self, other):
+        """
+        Compare two nodes for equality.
+
+        This is called by __eq__ and __ne__.  It is only called if the two nodes
+        have the same type.  This must be implemented by the concrete subclass.
+        Nodes should be considered equal if they have the same structure,
+        ignoring the prefix string and other context information.
+        """
+        raise NotImplementedError
+
+    def clone(self):
+        """
+        Return a cloned (deep) copy of self.
+
+        This must be implemented by the concrete subclass.
+        """
+        raise NotImplementedError
+
+    def post_order(self):
+        """
+        Return a post-order iterator for the tree.
+
+        This must be implemented by the concrete subclass.
+        """
+        raise NotImplementedError
+
+    def pre_order(self):
+        """
+        Return a pre-order iterator for the tree.
+
+        This must be implemented by the concrete subclass.
+        """
+        raise NotImplementedError
+
+    def set_prefix(self, prefix):
+        """
+        Set the prefix for the node (see Leaf class).
+
+        DEPRECATED; use the prefix property directly.
+        """
+        warnings.warn("set_prefix() is deprecated; use the prefix property",
+                      DeprecationWarning, stacklevel=2)
+        self.prefix = prefix
+
+    def get_prefix(self):
+        """
+        Return the prefix for the node (see Leaf class).
+
+        DEPRECATED; use the prefix property directly.
+        """
+        warnings.warn("get_prefix() is deprecated; use the prefix property",
+                      DeprecationWarning, stacklevel=2)
+        return self.prefix
+
+    def replace(self, new):
+        """Replace this node with a new one in the parent."""
+        assert self.parent is not None, str(self)
+        assert new is not None
+        if not isinstance(new, list):
+            new = [new]
+        l_children = []
+        found = False
+        for ch in self.parent.children:
+            if ch is self:
+                assert not found, (self.parent.children, self, new)
+                if new is not None:
+                    l_children.extend(new)
+                found = True
+            else:
+                l_children.append(ch)
+        assert found, (self.children, self, new)
+        self.parent.changed()
+        self.parent.children = l_children
+        for x in new:
+            x.parent = self.parent
+        self.parent = None
+
+    def get_lineno(self):
+        """Return the line number which generated the invocant node."""
+        node = self
+        while not isinstance(node, Leaf):
+            if not node.children:
+                return
+            node = node.children[0]
+        return node.lineno
+
+    def changed(self):
+        if self.parent:
+            self.parent.changed()
+        self.was_changed = True
+
+    def remove(self):
+        """
+        Remove the node from the tree. Returns the position of the node in its
+        parent's children before it was removed.
+        """
+        if self.parent:
+            for i, node in enumerate(self.parent.children):
+                if node is self:
+                    self.parent.changed()
+                    del self.parent.children[i]
+                    self.parent = None
+                    return i
+
+    @property
+    def next_sibling(self):
+        """
+        The node immediately following the invocant in their parent's children
+        list. If the invocant does not have a next sibling, it is None
+        """
+        if self.parent is None:
+            return None
+
+        # Can't use index(); we need to test by identity
+        for i, child in enumerate(self.parent.children):
+            if child is self:
+                try:
+                    return self.parent.children[i+1]
+                except IndexError:
+                    return None
+
+    @property
+    def prev_sibling(self):
+        """
+        The node immediately preceding the invocant in their parent's children
+        list. If the invocant does not have a previous sibling, it is None.
+        """
+        if self.parent is None:
+            return None
+
+        # Can't use index(); we need to test by identity
+        for i, child in enumerate(self.parent.children):
+            if child is self:
+                if i == 0:
+                    return None
+                return self.parent.children[i-1]
+
+    def leaves(self):
+        for child in self.children:
+            for x in child.leaves():
+                yield x
+
+    def depth(self):
+        if self.parent is None:
+            return 0
+        return 1 + self.parent.depth()
+
+    def get_suffix(self):
+        """
+        Return the string immediately following the invocant node. This is
+        effectively equivalent to node.next_sibling.prefix
+        """
+        next_sib = self.next_sibling
+        if next_sib is None:
+            return u""
+        return next_sib.prefix
+
+    if sys.version_info < (3, 0):
+        def __str__(self):
+            return unicode(self).encode("ascii")
+
+class Node(Base):
+
+    """Concrete implementation for interior nodes."""
+
+    def __init__(self,type, children,
+                 context=None,
+                 prefix=None,
+                 fixers_applied=None):
+        """
+        Initializer.
+
+        Takes a type constant (a symbol number >= 256), a sequence of
+        child nodes, and an optional context keyword argument.
+
+        As a side effect, the parent pointers of the children are updated.
+        """
+        assert type >= 256, type
+        self.type = type
+        self.children = list(children)
+        for ch in self.children:
+            assert ch.parent is None, repr(ch)
+            ch.parent = self
+        if prefix is not None:
+            self.prefix = prefix
+        if fixers_applied:
+            self.fixers_applied = fixers_applied[:]
+        else:
+            self.fixers_applied = None
+
+    def __repr__(self):
+        """Return a canonical string representation."""
+        return "%s(%s, %r)" % (self.__class__.__name__,
+                               type_repr(self.type),
+                               self.children)
+
+    def __unicode__(self):
+        """
+        Return a pretty string representation.
+
+        This reproduces the input source exactly.
+        """
+        return u"".join(map(unicode, self.children))
+
+    if sys.version_info > (3, 0):
+        __str__ = __unicode__
+
+    def _eq(self, other):
+        """Compare two nodes for equality."""
+        return (self.type, self.children) == (other.type, other.children)
+
+    def clone(self):
+        """Return a cloned (deep) copy of self."""
+        return Node(self.type, [ch.clone() for ch in self.children],
+                    fixers_applied=self.fixers_applied)
+
+    def post_order(self):
+        """Return a post-order iterator for the tree."""
+        for child in self.children:
+            for node in child.post_order():
+                yield node
+        yield self
+
+    def pre_order(self):
+        """Return a pre-order iterator for the tree."""
+        yield self
+        for child in self.children:
+            for node in child.pre_order():
+                yield node
+
+    def _prefix_getter(self):
+        """
+        The whitespace and comments preceding this node in the input.
+        """
+        if not self.children:
+            return ""
+        return self.children[0].prefix
+
+    def _prefix_setter(self, prefix):
+        if self.children:
+            self.children[0].prefix = prefix
+
+    prefix = property(_prefix_getter, _prefix_setter)
+
+    def set_child(self, i, child):
+        """
+        Equivalent to 'node.children[i] = child'. This method also sets the
+        child's parent attribute appropriately.
+        """
+        child.parent = self
+        self.children[i].parent = None
+        self.children[i] = child
+        self.changed()
+
+    def insert_child(self, i, child):
+        """
+        Equivalent to 'node.children.insert(i, child)'. This method also sets
+        the child's parent attribute appropriately.
+        """
+        child.parent = self
+        self.children.insert(i, child)
+        self.changed()
+
+    def append_child(self, child):
+        """
+        Equivalent to 'node.children.append(child)'. This method also sets the
+        child's parent attribute appropriately.
+        """
+        child.parent = self
+        self.children.append(child)
+        self.changed()
+
+
+class Leaf(Base):
+
+    """Concrete implementation for leaf nodes."""
+
+    # Default values for instance variables
+    _prefix = ""  # Whitespace and comments preceding this token in the input
+    lineno = 0    # Line where this token starts in the input
+    column = 0    # Column where this token tarts in the input
+
+    def __init__(self, type, value,
+                 context=None,
+                 prefix=None,
+                 fixers_applied=[]):
+        """
+        Initializer.
+
+        Takes a type constant (a token number < 256), a string value, and an
+        optional context keyword argument.
+        """
+        assert 0 <= type < 256, type
+        if context is not None:
+            self._prefix, (self.lineno, self.column) = context
+        self.type = type
+        self.value = value
+        if prefix is not None:
+            self._prefix = prefix
+        self.fixers_applied = fixers_applied[:]
+
+    def __repr__(self):
+        """Return a canonical string representation."""
+        return "%s(%r, %r)" % (self.__class__.__name__,
+                               self.type,
+                               self.value)
+
+    def __unicode__(self):
+        """
+        Return a pretty string representation.
+
+        This reproduces the input source exactly.
+        """
+        return self.prefix + unicode(self.value)
+
+    if sys.version_info > (3, 0):
+        __str__ = __unicode__
+
+    def _eq(self, other):
+        """Compare two nodes for equality."""
+        return (self.type, self.value) == (other.type, other.value)
+
+    def clone(self):
+        """Return a cloned (deep) copy of self."""
+        return Leaf(self.type, self.value,
+                    (self.prefix, (self.lineno, self.column)),
+                    fixers_applied=self.fixers_applied)
+
+    def leaves(self):
+        yield self
+
+    def post_order(self):
+        """Return a post-order iterator for the tree."""
+        yield self
+
+    def pre_order(self):
+        """Return a pre-order iterator for the tree."""
+        yield self
+
+    def _prefix_getter(self):
+        """
+        The whitespace and comments preceding this token in the input.
+        """
+        return self._prefix
+
+    def _prefix_setter(self, prefix):
+        self.changed()
+        self._prefix = prefix
+
+    prefix = property(_prefix_getter, _prefix_setter)
+
+def convert(gr, raw_node):
+    """
+    Convert raw node information to a Node or Leaf instance.
+
+    This is passed to the parser driver which calls it whenever a reduction of a
+    grammar rule produces a new complete node, so that the tree is build
+    strictly bottom-up.
+    """
+    type, value, context, children = raw_node
+    if children or type in gr.number2symbol:
+        # If there's exactly one child, return that child instead of
+        # creating a new node.
+        if len(children) == 1:
+            return children[0]
+        return Node(type, children, context=context)
+    else:
+        return Leaf(type, value, context=context)
+
+
+class BasePattern(object):
+
+    """
+    A pattern is a tree matching pattern.
+
+    It looks for a specific node type (token or symbol), and
+    optionally for a specific content.
+
+    This is an abstract base class.  There are three concrete
+    subclasses:
+
+    - LeafPattern matches a single leaf node;
+    - NodePattern matches a single node (usually non-leaf);
+    - WildcardPattern matches a sequence of nodes of variable length.
+    """
+
+    # Defaults for instance variables
+    type = None     # Node type (token if < 256, symbol if >= 256)
+    content = None  # Optional content matching pattern
+    name = None     # Optional name used to store match in results dict
+
+    def __new__(cls, *args, **kwds):
+        """Constructor that prevents BasePattern from being instantiated."""
+        assert cls is not BasePattern, "Cannot instantiate BasePattern"
+        return object.__new__(cls)
+
+    def __repr__(self):
+        args = [type_repr(self.type), self.content, self.name]
+        while args and args[-1] is None:
+            del args[-1]
+        return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
+
+    def optimize(self):
+        """
+        A subclass can define this as a hook for optimizations.
+
+        Returns either self or another node with the same effect.
+        """
+        return self
+
+    def match(self, node, results=None):
+        """
+        Does this pattern exactly match a node?
+
+        Returns True if it matches, False if not.
+
+        If results is not None, it must be a dict which will be
+        updated with the nodes matching named subpatterns.
+
+        Default implementation for non-wildcard patterns.
+        """
+        if self.type is not None and node.type != self.type:
+            return False
+        if self.content is not None:
+            r = None
+            if results is not None:
+                r = {}
+            if not self._submatch(node, r):
+                return False
+            if r:
+                results.update(r)
+        if results is not None and self.name:
+            results[self.name] = node
+        return True
+
+    def match_seq(self, nodes, results=None):
+        """
+        Does this pattern exactly match a sequence of nodes?
+
+        Default implementation for non-wildcard patterns.
+        """
+        if len(nodes) != 1:
+            return False
+        return self.match(nodes[0], results)
+
+    def generate_matches(self, nodes):
+        """
+        Generator yielding all matches for this pattern.
+
+        Default implementation for non-wildcard patterns.
+        """
+        r = {}
+        if nodes and self.match(nodes[0], r):
+            yield 1, r
+
+
+class LeafPattern(BasePattern):
+
+    def __init__(self, type=None, content=None, name=None):
+        """
+        Initializer.  Takes optional type, content, and name.
+
+        The type, if given must be a token type (< 256).  If not given,
+        this matches any *leaf* node; the content may still be required.
+
+        The content, if given, must be a string.
+
+        If a name is given, the matching node is stored in the results
+        dict under that key.
+        """
+        if type is not None:
+            assert 0 <= type < 256, type
+        if content is not None:
+            assert isinstance(content, basestring), repr(content)
+        self.type = type
+        self.content = content
+        self.name = name
+
+    def match(self, node, results=None):
+        """Override match() to insist on a leaf node."""
+        if not isinstance(node, Leaf):
+            return False
+        return BasePattern.match(self, node, results)
+
+    def _submatch(self, node, results=None):
+        """
+        Match the pattern's content to the node's children.
+
+        This assumes the node type matches and self.content is not None.
+
+        Returns True if it matches, False if not.
+
+        If results is not None, it must be a dict which will be
+        updated with the nodes matching named subpatterns.
+
+        When returning False, the results dict may still be updated.
+        """
+        return self.content == node.value
+
+
+class NodePattern(BasePattern):
+
+    wildcards = False
+
+    def __init__(self, type=None, content=None, name=None):
+        """
+        Initializer.  Takes optional type, content, and name.
+
+        The type, if given, must be a symbol type (>= 256).  If the
+        type is None this matches *any* single node (leaf or not),
+        except if content is not None, in which it only matches
+        non-leaf nodes that also match the content pattern.
+
+        The content, if not None, must be a sequence of Patterns that
+        must match the node's children exactly.  If the content is
+        given, the type must not be None.
+
+        If a name is given, the matching node is stored in the results
+        dict under that key.
+        """
+        if type is not None:
+            assert type >= 256, type
+        if content is not None:
+            assert not isinstance(content, basestring), repr(content)
+            content = list(content)
+            for i, item in enumerate(content):
+                assert isinstance(item, BasePattern), (i, item)
+                if isinstance(item, WildcardPattern):
+                    self.wildcards = True
+        self.type = type
+        self.content = content
+        self.name = name
+
+    def _submatch(self, node, results=None):
+        """
+        Match the pattern's content to the node's children.
+
+        This assumes the node type matches and self.content is not None.
+
+        Returns True if it matches, False if not.
+
+        If results is not None, it must be a dict which will be
+        updated with the nodes matching named subpatterns.
+
+        When returning False, the results dict may still be updated.
+        """
+        if self.wildcards:
+            for c, r in generate_matches(self.content, node.children):
+                if c == len(node.children):
+                    if results is not None:
+                        results.update(r)
+                    return True
+            return False
+        if len(self.content) != len(node.children):
+            return False
+        for subpattern, child in zip(self.content, node.children):
+            if not subpattern.match(child, results):
+                return False
+        return True
+
+
+class WildcardPattern(BasePattern):
+
+    """
+    A wildcard pattern can match zero or more nodes.
+
+    This has all the flexibility needed to implement patterns like:
+
+    .*      .+      .?      .{m,n}
+    (a b c | d e | f)
+    (...)*  (...)+  (...)?  (...){m,n}
+
+    except it always uses non-greedy matching.
+    """
+
+    def __init__(self, content=None, min=0, max=HUGE, name=None):
+        """
+        Initializer.
+
+        Args:
+            content: optional sequence of subsequences of patterns;
+                     if absent, matches one node;
+                     if present, each subsequence is an alternative [*]
+            min: optional minimum number of times to match, default 0
+            max: optional maximum number of times to match, default HUGE
+            name: optional name assigned to this match
+
+        [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is
+            equivalent to (a b c | d e | f g h); if content is None,
+            this is equivalent to '.' in regular expression terms.
+            The min and max parameters work as follows:
+                min=0, max=maxint: .*
+                min=1, max=maxint: .+
+                min=0, max=1: .?
+                min=1, max=1: .
+            If content is not None, replace the dot with the parenthesized
+            list of alternatives, e.g. (a b c | d e | f g h)*
+        """
+        assert 0 <= min <= max <= HUGE, (min, max)
+        if content is not None:
+            content = tuple(map(tuple, content))  # Protect against alterations
+            # Check sanity of alternatives
+            assert len(content), repr(content)  # Can't have zero alternatives
+            for alt in content:
+                assert len(alt), repr(alt) # Can have empty alternatives
+        self.content = content
+        self.min = min
+        self.max = max
+        self.name = name
+
+    def optimize(self):
+        """Optimize certain stacked wildcard patterns."""
+        subpattern = None
+        if (self.content is not None and
+            len(self.content) == 1 and len(self.content[0]) == 1):
+            subpattern = self.content[0][0]
+        if self.min == 1 and self.max == 1:
+            if self.content is None:
+                return NodePattern(name=self.name)
+            if subpattern is not None and  self.name == subpattern.name:
+                return subpattern.optimize()
+        if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and
+            subpattern.min <= 1 and self.name == subpattern.name):
+            return WildcardPattern(subpattern.content,
+                                   self.min*subpattern.min,
+                                   self.max*subpattern.max,
+                                   subpattern.name)
+        return self
+
+    def match(self, node, results=None):
+        """Does this pattern exactly match a node?"""
+        return self.match_seq([node], results)
+
+    def match_seq(self, nodes, results=None):
+        """Does this pattern exactly match a sequence of nodes?"""
+        for c, r in self.generate_matches(nodes):
+            if c == len(nodes):
+                if results is not None:
+                    results.update(r)
+                    if self.name:
+                        results[self.name] = list(nodes)
+                return True
+        return False
+
+    def generate_matches(self, nodes):
+        """
+        Generator yielding matches for a sequence of nodes.
+
+        Args:
+            nodes: sequence of nodes
+
+        Yields:
+            (count, results) tuples where:
+            count: the match comprises nodes[:count];
+            results: dict containing named submatches.
+        """
+        if self.content is None:
+            # Shortcut for special case (see __init__.__doc__)
+            for count in xrange(self.min, 1 + min(len(nodes), self.max)):
+                r = {}
+                if self.name:
+                    r[self.name] = nodes[:count]
+                yield count, r
+        elif self.name == "bare_name":
+            yield self._bare_name_matches(nodes)
+        else:
+            # The reason for this is that hitting the recursion limit usually
+            # results in some ugly messages about how RuntimeErrors are being
+            # ignored. We don't do this on non-CPython implementation because
+            # they don't have this problem.
+            if hasattr(sys, "getrefcount"):
+                save_stderr = sys.stderr
+                sys.stderr = StringIO()
+            try:
+                for count, r in self._recursive_matches(nodes, 0):
+                    if self.name:
+                        r[self.name] = nodes[:count]
+                    yield count, r
+            except RuntimeError:
+                # We fall back to the iterative pattern matching scheme if the recursive
+                # scheme hits the recursion limit.
+                for count, r in self._iterative_matches(nodes):
+                    if self.name:
+                        r[self.name] = nodes[:count]
+                    yield count, r
+            finally:
+                if hasattr(sys, "getrefcount"):
+                    sys.stderr = save_stderr
+
+    def _iterative_matches(self, nodes):
+        """Helper to iteratively yield the matches."""
+        nodelen = len(nodes)
+        if 0 >= self.min:
+            yield 0, {}
+
+        results = []
+        # generate matches that use just one alt from self.content
+        for alt in self.content:
+            for c, r in generate_matches(alt, nodes):
+                yield c, r
+                results.append((c, r))
+
+        # for each match, iterate down the nodes
+        while results:
+            new_results = []
+            for c0, r0 in results:
+                # stop if the entire set of nodes has been matched
+                if c0 < nodelen and c0 <= self.max:
+                    for alt in self.content:
+                        for c1, r1 in generate_matches(alt, nodes[c0:]):
+                            if c1 > 0:
+                                r = {}
+                                r.update(r0)
+                                r.update(r1)
+                                yield c0 + c1, r
+                                new_results.append((c0 + c1, r))
+            results = new_results
+
+    def _bare_name_matches(self, nodes):
+        """Special optimized matcher for bare_name."""
+        count = 0
+        r = {}
+        done = False
+        max = len(nodes)
+        while not done and count < max:
+            done = True
+            for leaf in self.content:
+                if leaf[0].match(nodes[count], r):
+                    count += 1
+                    done = False
+                    break
+        r[self.name] = nodes[:count]
+        return count, r
+
+    def _recursive_matches(self, nodes, count):
+        """Helper to recursively yield the matches."""
+        assert self.content is not None
+        if count >= self.min:
+            yield 0, {}
+        if count < self.max:
+            for alt in self.content:
+                for c0, r0 in generate_matches(alt, nodes):
+                    for c1, r1 in self._recursive_matches(nodes[c0:], count+1):
+                        r = {}
+                        r.update(r0)
+                        r.update(r1)
+                        yield c0 + c1, r
+
+
+class NegatedPattern(BasePattern):
+
+    def __init__(self, content=None):
+        """
+        Initializer.
+
+        The argument is either a pattern or None.  If it is None, this
+        only matches an empty sequence (effectively '$' in regex
+        lingo).  If it is not None, this matches whenever the argument
+        pattern doesn't have any matches.
+        """
+        if content is not None:
+            assert isinstance(content, BasePattern), repr(content)
+        self.content = content
+
+    def match(self, node):
+        # We never match a node in its entirety
+        return False
+
+    def match_seq(self, nodes):
+        # We only match an empty sequence of nodes in its entirety
+        return len(nodes) == 0
+
+    def generate_matches(self, nodes):
+        if self.content is None:
+            # Return a match if there is an empty sequence
+            if len(nodes) == 0:
+                yield 0, {}
+        else:
+            # Return a match if the argument pattern has no matches
+            for c, r in self.content.generate_matches(nodes):
+                return
+            yield 0, {}
+
+
+def generate_matches(patterns, nodes):
+    """
+    Generator yielding matches for a sequence of patterns and nodes.
+
+    Args:
+        patterns: a sequence of patterns
+        nodes: a sequence of nodes
+
+    Yields:
+        (count, results) tuples where:
+        count: the entire sequence of patterns matches nodes[:count];
+        results: dict containing named submatches.
+        """
+    if not patterns:
+        yield 0, {}
+    else:
+        p, rest = patterns[0], patterns[1:]
+        for c0, r0 in p.generate_matches(nodes):
+            if not rest:
+                yield c0, r0
+            else:
+                for c1, r1 in generate_matches(rest, nodes[c0:]):
+                    r = {}
+                    r.update(r0)
+                    r.update(r1)
+                    yield c0 + c1, r
diff --git a/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/refactor.py b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/refactor.py
new file mode 100644
index 0000000..a4c168d
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/refactor.py
@@ -0,0 +1,747 @@
+# Copyright 2006 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Refactoring framework.
+
+Used as a main program, this can refactor any number of files and/or
+recursively descend down directories.  Imported as a module, this
+provides infrastructure to write your own refactoring tool.
+"""
+
+from __future__ import with_statement
+
+__author__ = "Guido van Rossum <guido@python.org>"
+
+
+# Python imports
+import os
+import sys
+import logging
+import operator
+import collections
+import StringIO
+from itertools import chain
+
+# Local imports
+from .pgen2 import driver, tokenize, token
+from .fixer_util import find_root
+from . import pytree, pygram
+from . import btm_utils as bu
+from . import btm_matcher as bm
+
+
+def get_all_fix_names(fixer_pkg, remove_prefix=True):
+    """Return a sorted list of all available fix names in the given package."""
+    pkg = __import__(fixer_pkg, [], [], ["*"])
+    fixer_dir = os.path.dirname(pkg.__file__)
+    fix_names = []
+    for name in sorted(os.listdir(fixer_dir)):
+        if name.startswith("fix_") and name.endswith(".py"):
+            if remove_prefix:
+                name = name[4:]
+            fix_names.append(name[:-3])
+    return fix_names
+
+
+class _EveryNode(Exception):
+    pass
+
+
+def _get_head_types(pat):
+    """ Accepts a pytree Pattern Node and returns a set
+        of the pattern types which will match first. """
+
+    if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)):
+        # NodePatters must either have no type and no content
+        #   or a type and content -- so they don't get any farther
+        # Always return leafs
+        if pat.type is None:
+            raise _EveryNode
+        return set([pat.type])
+
+    if isinstance(pat, pytree.NegatedPattern):
+        if pat.content:
+            return _get_head_types(pat.content)
+        raise _EveryNode # Negated Patterns don't have a type
+
+    if isinstance(pat, pytree.WildcardPattern):
+        # Recurse on each node in content
+        r = set()
+        for p in pat.content:
+            for x in p:
+                r.update(_get_head_types(x))
+        return r
+
+    raise Exception("Oh no! I don't understand pattern %s" %(pat))
+
+
+def _get_headnode_dict(fixer_list):
+    """ Accepts a list of fixers and returns a dictionary
+        of head node type --> fixer list.  """
+    head_nodes = collections.defaultdict(list)
+    every = []
+    for fixer in fixer_list:
+        if fixer.pattern:
+            try:
+                heads = _get_head_types(fixer.pattern)
+            except _EveryNode:
+                every.append(fixer)
+            else:
+                for node_type in heads:
+                    head_nodes[node_type].append(fixer)
+        else:
+            if fixer._accept_type is not None:
+                head_nodes[fixer._accept_type].append(fixer)
+            else:
+                every.append(fixer)
+    for node_type in chain(pygram.python_grammar.symbol2number.itervalues(),
+                           pygram.python_grammar.tokens):
+        head_nodes[node_type].extend(every)
+    return dict(head_nodes)
+
+
+def get_fixers_from_package(pkg_name):
+    """
+    Return the fully qualified names for fixers in the package pkg_name.
+    """
+    return [pkg_name + "." + fix_name
+            for fix_name in get_all_fix_names(pkg_name, False)]
+
+def _identity(obj):
+    return obj
+
+if sys.version_info < (3, 0):
+    import codecs
+    _open_with_encoding = codecs.open
+    # codecs.open doesn't translate newlines sadly.
+    def _from_system_newlines(input):
+        return input.replace(u"\r\n", u"\n")
+    def _to_system_newlines(input):
+        if os.linesep != "\n":
+            return input.replace(u"\n", os.linesep)
+        else:
+            return input
+else:
+    _open_with_encoding = open
+    _from_system_newlines = _identity
+    _to_system_newlines = _identity
+
+
+def _detect_future_features(source):
+    have_docstring = False
+    gen = tokenize.generate_tokens(StringIO.StringIO(source).readline)
+    def advance():
+        tok = gen.next()
+        return tok[0], tok[1]
+    ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
+    features = set()
+    try:
+        while True:
+            tp, value = advance()
+            if tp in ignore:
+                continue
+            elif tp == token.STRING:
+                if have_docstring:
+                    break
+                have_docstring = True
+            elif tp == token.NAME and value == u"from":
+                tp, value = advance()
+                if tp != token.NAME or value != u"__future__":
+                    break
+                tp, value = advance()
+                if tp != token.NAME or value != u"import":
+                    break
+                tp, value = advance()
+                if tp == token.OP and value == u"(":
+                    tp, value = advance()
+                while tp == token.NAME:
+                    features.add(value)
+                    tp, value = advance()
+                    if tp != token.OP or value != u",":
+                        break
+                    tp, value = advance()
+            else:
+                break
+    except StopIteration:
+        pass
+    return frozenset(features)
+
+
+class FixerError(Exception):
+    """A fixer could not be loaded."""
+
+
+class RefactoringTool(object):
+
+    _default_options = {"print_function" : False,
+                        "write_unchanged_files" : False}
+
+    CLASS_PREFIX = "Fix" # The prefix for fixer classes
+    FILE_PREFIX = "fix_" # The prefix for modules with a fixer within
+
+    def __init__(self, fixer_names, options=None, explicit=None):
+        """Initializer.
+
+        Args:
+            fixer_names: a list of fixers to import
+            options: an dict with configuration.
+            explicit: a list of fixers to run even if they are explicit.
+        """
+        self.fixers = fixer_names
+        self.explicit = explicit or []
+        self.options = self._default_options.copy()
+        if options is not None:
+            self.options.update(options)
+        if self.options["print_function"]:
+            self.grammar = pygram.python_grammar_no_print_statement
+        else:
+            self.grammar = pygram.python_grammar
+        # When this is True, the refactor*() methods will call write_file() for
+        # files processed even if they were not changed during refactoring. If
+        # and only if the refactor method's write parameter was True.
+        self.write_unchanged_files = self.options.get("write_unchanged_files")
+        self.errors = []
+        self.logger = logging.getLogger("RefactoringTool")
+        self.fixer_log = []
+        self.wrote = False
+        self.driver = driver.Driver(self.grammar,
+                                    convert=pytree.convert,
+                                    logger=self.logger)
+        self.pre_order, self.post_order = self.get_fixers()
+
+
+        self.files = []  # List of files that were or should be modified
+
+        self.BM = bm.BottomMatcher()
+        self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
+        self.bmi_post_order = []
+
+        for fixer in chain(self.post_order, self.pre_order):
+            if fixer.BM_compatible:
+                self.BM.add_fixer(fixer)
+                # remove fixers that will be handled by the bottom-up
+                # matcher
+            elif fixer in self.pre_order:
+                self.bmi_pre_order.append(fixer)
+            elif fixer in self.post_order:
+                self.bmi_post_order.append(fixer)
+
+        self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
+        self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
+
+
+
+    def get_fixers(self):
+        """Inspects the options to load the requested patterns and handlers.
+
+        Returns:
+          (pre_order, post_order), where pre_order is the list of fixers that
+          want a pre-order AST traversal, and post_order is the list that want
+          post-order traversal.
+        """
+        pre_order_fixers = []
+        post_order_fixers = []
+        for fix_mod_path in self.fixers:
+            mod = __import__(fix_mod_path, {}, {}, ["*"])
+            fix_name = fix_mod_path.rsplit(".", 1)[-1]
+            if fix_name.startswith(self.FILE_PREFIX):
+                fix_name = fix_name[len(self.FILE_PREFIX):]
+            parts = fix_name.split("_")
+            class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts])
+            try:
+                fix_class = getattr(mod, class_name)
+            except AttributeError:
+                raise FixerError("Can't find %s.%s" % (fix_name, class_name))
+            fixer = fix_class(self.options, self.fixer_log)
+            if fixer.explicit and self.explicit is not True and \
+                    fix_mod_path not in self.explicit:
+                self.log_message("Skipping implicit fixer: %s", fix_name)
+                continue
+
+            self.log_debug("Adding transformation: %s", fix_name)
+            if fixer.order == "pre":
+                pre_order_fixers.append(fixer)
+            elif fixer.order == "post":
+                post_order_fixers.append(fixer)
+            else:
+                raise FixerError("Illegal fixer order: %r" % fixer.order)
+
+        key_func = operator.attrgetter("run_order")
+        pre_order_fixers.sort(key=key_func)
+        post_order_fixers.sort(key=key_func)
+        return (pre_order_fixers, post_order_fixers)
+
+    def log_error(self, msg, *args, **kwds):
+        """Called when an error occurs."""
+        raise
+
+    def log_message(self, msg, *args):
+        """Hook to log a message."""
+        if args:
+            msg = msg % args
+        self.logger.info(msg)
+
+    def log_debug(self, msg, *args):
+        if args:
+            msg = msg % args
+        self.logger.debug(msg)
+
+    def print_output(self, old_text, new_text, filename, equal):
+        """Called with the old version, new version, and filename of a
+        refactored file."""
+        pass
+
+    def refactor(self, items, write=False, doctests_only=False):
+        """Refactor a list of files and directories."""
+
+        for dir_or_file in items:
+            if os.path.isdir(dir_or_file):
+                self.refactor_dir(dir_or_file, write, doctests_only)
+            else:
+                self.refactor_file(dir_or_file, write, doctests_only)
+
+    def refactor_dir(self, dir_name, write=False, doctests_only=False):
+        """Descends down a directory and refactor every Python file found.
+
+        Python files are assumed to have a .py extension.
+
+        Files and subdirectories starting with '.' are skipped.
+        """
+        py_ext = os.extsep + "py"
+        for dirpath, dirnames, filenames in os.walk(dir_name):
+            self.log_debug("Descending into %s", dirpath)
+            dirnames.sort()
+            filenames.sort()
+            for name in filenames:
+                if (not name.startswith(".") and
+                    os.path.splitext(name)[1] == py_ext):
+                    fullname = os.path.join(dirpath, name)
+                    self.refactor_file(fullname, write, doctests_only)
+            # Modify dirnames in-place to remove subdirs with leading dots
+            dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")]
+
+    def _read_python_source(self, filename):
+        """
+        Do our best to decode a Python source file correctly.
+        """
+        try:
+            f = open(filename, "rb")
+        except IOError as err:
+            self.log_error("Can't open %s: %s", filename, err)
+            return None, None
+        try:
+            encoding = tokenize.detect_encoding(f.readline)[0]
+        finally:
+            f.close()
+        with _open_with_encoding(filename, "r", encoding=encoding) as f:
+            return _from_system_newlines(f.read()), encoding
+
+    def refactor_file(self, filename, write=False, doctests_only=False):
+        """Refactors a file."""
+        input, encoding = self._read_python_source(filename)
+        if input is None:
+            # Reading the file failed.
+            return
+        input += u"\n" # Silence certain parse errors
+        if doctests_only:
+            self.log_debug("Refactoring doctests in %s", filename)
+            output = self.refactor_docstring(input, filename)
+            if self.write_unchanged_files or output != input:
+                self.processed_file(output, filename, input, write, encoding)
+            else:
+                self.log_debug("No doctest changes in %s", filename)
+        else:
+            tree = self.refactor_string(input, filename)
+            if self.write_unchanged_files or (tree and tree.was_changed):
+                # The [:-1] is to take off the \n we added earlier
+                self.processed_file(unicode(tree)[:-1], filename,
+                                    write=write, encoding=encoding)
+            else:
+                self.log_debug("No changes in %s", filename)
+
+    def refactor_string(self, data, name):
+        """Refactor a given input string.
+
+        Args:
+            data: a string holding the code to be refactored.
+            name: a human-readable name for use in error/log messages.
+
+        Returns:
+            An AST corresponding to the refactored input stream; None if
+            there were errors during the parse.
+        """
+        features = _detect_future_features(data)
+        if "print_function" in features:
+            self.driver.grammar = pygram.python_grammar_no_print_statement
+        try:
+            tree = self.driver.parse_string(data)
+        except Exception as err:
+            self.log_error("Can't parse %s: %s: %s",
+                           name, err.__class__.__name__, err)
+            return
+        finally:
+            self.driver.grammar = self.grammar
+        tree.future_features = features
+        self.log_debug("Refactoring %s", name)
+        self.refactor_tree(tree, name)
+        return tree
+
+    def refactor_stdin(self, doctests_only=False):
+        input = sys.stdin.read()
+        if doctests_only:
+            self.log_debug("Refactoring doctests in stdin")
+            output = self.refactor_docstring(input, "<stdin>")
+            if self.write_unchanged_files or output != input:
+                self.processed_file(output, "<stdin>", input)
+            else:
+                self.log_debug("No doctest changes in stdin")
+        else:
+            tree = self.refactor_string(input, "<stdin>")
+            if self.write_unchanged_files or (tree and tree.was_changed):
+                self.processed_file(unicode(tree), "<stdin>", input)
+            else:
+                self.log_debug("No changes in stdin")
+
+    def refactor_tree(self, tree, name):
+        """Refactors a parse tree (modifying the tree in place).
+
+        For compatible patterns the bottom matcher module is
+        used. Otherwise the tree is traversed node-to-node for
+        matches.
+
+        Args:
+            tree: a pytree.Node instance representing the root of the tree
+                  to be refactored.
+            name: a human-readable name for this tree.
+
+        Returns:
+            True if the tree was modified, False otherwise.
+        """
+
+        for fixer in chain(self.pre_order, self.post_order):
+            fixer.start_tree(tree, name)
+
+        #use traditional matching for the incompatible fixers
+        self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
+        self.traverse_by(self.bmi_post_order_heads, tree.post_order())
+
+        # obtain a set of candidate nodes
+        match_set = self.BM.run(tree.leaves())
+
+        while any(match_set.values()):
+            for fixer in self.BM.fixers:
+                if fixer in match_set and match_set[fixer]:
+                    #sort by depth; apply fixers from bottom(of the AST) to top
+                    match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
+
+                    if fixer.keep_line_order:
+                        #some fixers(eg fix_imports) must be applied
+                        #with the original file's line order
+                        match_set[fixer].sort(key=pytree.Base.get_lineno)
+
+                    for node in list(match_set[fixer]):
+                        if node in match_set[fixer]:
+                            match_set[fixer].remove(node)
+
+                        try:
+                            find_root(node)
+                        except ValueError:
+                            # this node has been cut off from a
+                            # previous transformation ; skip
+                            continue
+
+                        if node.fixers_applied and fixer in node.fixers_applied:
+                            # do not apply the same fixer again
+                            continue
+
+                        results = fixer.match(node)
+
+                        if results:
+                            new = fixer.transform(node, results)
+                            if new is not None:
+                                node.replace(new)
+                                #new.fixers_applied.append(fixer)
+                                for node in new.post_order():
+                                    # do not apply the fixer again to
+                                    # this or any subnode
+                                    if not node.fixers_applied:
+                                        node.fixers_applied = []
+                                    node.fixers_applied.append(fixer)
+
+                                # update the original match set for
+                                # the added code
+                                new_matches = self.BM.run(new.leaves())
+                                for fxr in new_matches:
+                                    if not fxr in match_set:
+                                        match_set[fxr]=[]
+
+                                    match_set[fxr].extend(new_matches[fxr])
+
+        for fixer in chain(self.pre_order, self.post_order):
+            fixer.finish_tree(tree, name)
+        return tree.was_changed
+
+    def traverse_by(self, fixers, traversal):
+        """Traverse an AST, applying a set of fixers to each node.
+
+        This is a helper method for refactor_tree().
+
+        Args:
+            fixers: a list of fixer instances.
+            traversal: a generator that yields AST nodes.
+
+        Returns:
+            None
+        """
+        if not fixers:
+            return
+        for node in traversal:
+            for fixer in fixers[node.type]:
+                results = fixer.match(node)
+                if results:
+                    new = fixer.transform(node, results)
+                    if new is not None:
+                        node.replace(new)
+                        node = new
+
+    def processed_file(self, new_text, filename, old_text=None, write=False,
+                       encoding=None):
+        """
+        Called when a file has been refactored and there may be changes.
+        """
+        self.files.append(filename)
+        if old_text is None:
+            old_text = self._read_python_source(filename)[0]
+            if old_text is None:
+                return
+        equal = old_text == new_text
+        self.print_output(old_text, new_text, filename, equal)
+        if equal:
+            self.log_debug("No changes to %s", filename)
+            if not self.write_unchanged_files:
+                return
+        if write:
+            self.write_file(new_text, filename, old_text, encoding)
+        else:
+            self.log_debug("Not writing changes to %s", filename)
+
+    def write_file(self, new_text, filename, old_text, encoding=None):
+        """Writes a string to a file.
+
+        It first shows a unified diff between the old text and the new text, and
+        then rewrites the file; the latter is only done if the write option is
+        set.
+        """
+        try:
+            f = _open_with_encoding(filename, "w", encoding=encoding)
+        except os.error as err:
+            self.log_error("Can't create %s: %s", filename, err)
+            return
+        try:
+            f.write(_to_system_newlines(new_text))
+        except os.error as err:
+            self.log_error("Can't write %s: %s", filename, err)
+        finally:
+            f.close()
+        self.log_debug("Wrote changes to %s", filename)
+        self.wrote = True
+
+    PS1 = ">>> "
+    PS2 = "... "
+
+    def refactor_docstring(self, input, filename):
+        """Refactors a docstring, looking for doctests.
+
+        This returns a modified version of the input string.  It looks
+        for doctests, which start with a ">>>" prompt, and may be
+        continued with "..." prompts, as long as the "..." is indented
+        the same as the ">>>".
+
+        (Unfortunately we can't use the doctest module's parser,
+        since, like most parsers, it is not geared towards preserving
+        the original source.)
+        """
+        result = []
+        block = None
+        block_lineno = None
+        indent = None
+        lineno = 0
+        for line in input.splitlines(True):
+            lineno += 1
+            if line.lstrip().startswith(self.PS1):
+                if block is not None:
+                    result.extend(self.refactor_doctest(block, block_lineno,
+                                                        indent, filename))
+                block_lineno = lineno
+                block = [line]
+                i = line.find(self.PS1)
+                indent = line[:i]
+            elif (indent is not None and
+                  (line.startswith(indent + self.PS2) or
+                   line == indent + self.PS2.rstrip() + u"\n")):
+                block.append(line)
+            else:
+                if block is not None:
+                    result.extend(self.refactor_doctest(block, block_lineno,
+                                                        indent, filename))
+                block = None
+                indent = None
+                result.append(line)
+        if block is not None:
+            result.extend(self.refactor_doctest(block, block_lineno,
+                                                indent, filename))
+        return u"".join(result)
+
+    def refactor_doctest(self, block, lineno, indent, filename):
+        """Refactors one doctest.
+
+        A doctest is given as a block of lines, the first of which starts
+        with ">>>" (possibly indented), while the remaining lines start
+        with "..." (identically indented).
+
+        """
+        try:
+            tree = self.parse_block(block, lineno, indent)
+        except Exception as err:
+            if self.logger.isEnabledFor(logging.DEBUG):
+                for line in block:
+                    self.log_debug("Source: %s", line.rstrip(u"\n"))
+            self.log_error("Can't parse docstring in %s line %s: %s: %s",
+                           filename, lineno, err.__class__.__name__, err)
+            return block
+        if self.refactor_tree(tree, filename):
+            new = unicode(tree).splitlines(True)
+            # Undo the adjustment of the line numbers in wrap_toks() below.
+            clipped, new = new[:lineno-1], new[lineno-1:]
+            assert clipped == [u"\n"] * (lineno-1), clipped
+            if not new[-1].endswith(u"\n"):
+                new[-1] += u"\n"
+            block = [indent + self.PS1 + new.pop(0)]
+            if new:
+                block += [indent + self.PS2 + line for line in new]
+        return block
+
+    def summarize(self):
+        if self.wrote:
+            were = "were"
+        else:
+            were = "need to be"
+        if not self.files:
+            self.log_message("No files %s modified.", were)
+        else:
+            self.log_message("Files that %s modified:", were)
+            for file in self.files:
+                self.log_message(file)
+        if self.fixer_log:
+            self.log_message("Warnings/messages while refactoring:")
+            for message in self.fixer_log:
+                self.log_message(message)
+        if self.errors:
+            if len(self.errors) == 1:
+                self.log_message("There was 1 error:")
+            else:
+                self.log_message("There were %d errors:", len(self.errors))
+            for msg, args, kwds in self.errors:
+                self.log_message(msg, *args, **kwds)
+
+    def parse_block(self, block, lineno, indent):
+        """Parses a block into a tree.
+
+        This is necessary to get correct line number / offset information
+        in the parser diagnostics and embedded into the parse tree.
+        """
+        tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
+        tree.future_features = frozenset()
+        return tree
+
+    def wrap_toks(self, block, lineno, indent):
+        """Wraps a tokenize stream to systematically modify start/end."""
+        tokens = tokenize.generate_tokens(self.gen_lines(block, indent).next)
+        for type, value, (line0, col0), (line1, col1), line_text in tokens:
+            line0 += lineno - 1
+            line1 += lineno - 1
+            # Don't bother updating the columns; this is too complicated
+            # since line_text would also have to be updated and it would
+            # still break for tokens spanning lines.  Let the user guess
+            # that the column numbers for doctests are relative to the
+            # end of the prompt string (PS1 or PS2).
+            yield type, value, (line0, col0), (line1, col1), line_text
+
+
+    def gen_lines(self, block, indent):
+        """Generates lines as expected by tokenize from a list of lines.
+
+        This strips the first len(indent + self.PS1) characters off each line.
+        """
+        prefix1 = indent + self.PS1
+        prefix2 = indent + self.PS2
+        prefix = prefix1
+        for line in block:
+            if line.startswith(prefix):
+                yield line[len(prefix):]
+            elif line == prefix.rstrip() + u"\n":
+                yield u"\n"
+            else:
+                raise AssertionError("line=%r, prefix=%r" % (line, prefix))
+            prefix = prefix2
+        while True:
+            yield ""
+
+
+class MultiprocessingUnsupported(Exception):
+    pass
+
+
+class MultiprocessRefactoringTool(RefactoringTool):
+
+    def __init__(self, *args, **kwargs):
+        super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
+        self.queue = None
+        self.output_lock = None
+
+    def refactor(self, items, write=False, doctests_only=False,
+                 num_processes=1):
+        if num_processes == 1:
+            return super(MultiprocessRefactoringTool, self).refactor(
+                items, write, doctests_only)
+        try:
+            import multiprocessing
+        except ImportError:
+            raise MultiprocessingUnsupported
+        if self.queue is not None:
+            raise RuntimeError("already doing multiple processes")
+        self.queue = multiprocessing.JoinableQueue()
+        self.output_lock = multiprocessing.Lock()
+        processes = [multiprocessing.Process(target=self._child)
+                     for i in xrange(num_processes)]
+        try:
+            for p in processes:
+                p.start()
+            super(MultiprocessRefactoringTool, self).refactor(items, write,
+                                                              doctests_only)
+        finally:
+            self.queue.join()
+            for i in xrange(num_processes):
+                self.queue.put(None)
+            for p in processes:
+                if p.is_alive():
+                    p.join()
+            self.queue = None
+
+    def _child(self):
+        task = self.queue.get()
+        while task is not None:
+            args, kwargs = task
+            try:
+                super(MultiprocessRefactoringTool, self).refactor_file(
+                    *args, **kwargs)
+            finally:
+                self.queue.task_done()
+            task = self.queue.get()
+
+    def refactor_file(self, *args, **kwargs):
+        if self.queue is not None:
+            self.queue.put((args, kwargs))
+        else:
+            return super(MultiprocessRefactoringTool, self).refactor_file(
+                *args, **kwargs)
diff --git a/python/helpers/pydev/third_party/pep8/pep8.py b/python/helpers/pydev/third_party/pep8/pep8.py
new file mode 100644
index 0000000..d907066
--- /dev/null
+++ b/python/helpers/pydev/third_party/pep8/pep8.py
@@ -0,0 +1,1959 @@
+#!/usr/bin/env python
+# pep8.py - Check Python source code formatting, according to PEP 8
+# Copyright (C) 2006-2009 Johann C. Rocholl <johann@rocholl.net>
+# Copyright (C) 2009-2014 Florent Xicluna <florent.xicluna@gmail.com>
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+r"""
+Check Python source code formatting, according to PEP 8.
+
+For usage and a list of options, try this:
+$ python pep8.py -h
+
+This program and its regression test suite live here:
+http://github.com/jcrocholl/pep8
+
+Groups of errors and warnings:
+E errors
+W warnings
+100 indentation
+200 whitespace
+300 blank lines
+400 imports
+500 line length
+600 deprecation
+700 statements
+900 syntax error
+"""
+from __future__ import with_statement
+
+__version__ = '1.6.0a0'
+
+import os
+import sys
+import re
+import time
+import inspect
+import keyword
+import tokenize
+from optparse import OptionParser
+from fnmatch import fnmatch
+try:
+    from configparser import RawConfigParser
+    from io import TextIOWrapper
+except ImportError:
+    from ConfigParser import RawConfigParser
+
+DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__'
+DEFAULT_IGNORE = 'E123,E226,E24,E704'
+if sys.platform == 'win32':
+    DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8')
+else:
+    DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or
+                                  os.path.expanduser('~/.config'), 'pep8')
+PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8')
+TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite')
+MAX_LINE_LENGTH = 79
+REPORT_FORMAT = {
+    'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s',
+    'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s',
+}
+
+PyCF_ONLY_AST = 1024
+SINGLETONS = frozenset(['False', 'None', 'True'])
+KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS
+UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-'])
+ARITHMETIC_OP = frozenset(['**', '*', '/', '//', '+', '-'])
+WS_OPTIONAL_OPERATORS = ARITHMETIC_OP.union(['^', '&', '|', '<<', '>>', '%'])
+WS_NEEDED_OPERATORS = frozenset([
+    '**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>',
+    '%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '='])
+WHITESPACE = frozenset(' \t')
+NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE])
+SKIP_TOKENS = NEWLINE.union([tokenize.INDENT, tokenize.DEDENT])
+# ERRORTOKEN is triggered by backticks in Python 3
+SKIP_COMMENTS = SKIP_TOKENS.union([tokenize.COMMENT, tokenize.ERRORTOKEN])
+BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines']
+
+INDENT_REGEX = re.compile(r'([ \t]*)')
+RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,')
+RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,.*,\s*\w+\s*$')
+ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b')
+DOCSTRING_REGEX = re.compile(r'u?r?["\']')
+EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
+WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?:  |\t)')
+COMPARE_SINGLETON_REGEX = re.compile(r'([=!]=)\s*(None|False|True)')
+COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^[({ ]+\s+(in|is)\s')
+COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type'
+                                r'|\s*\(\s*([^)]*[^ )])\s*\))')
+KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS))
+OPERATOR_REGEX = re.compile(r'(?:[^,\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)')
+LAMBDA_REGEX = re.compile(r'\blambda\b')
+HUNK_REGEX = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$')
+
+# Work around Python < 2.6 behaviour, which does not generate NL after
+# a comment which is on a line by itself.
+COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n'
+
+
+##############################################################################
+# Plugins (check functions) for physical lines
+##############################################################################
+
+
+def tabs_or_spaces(physical_line, indent_char):
+    r"""Never mix tabs and spaces.
+
+    The most popular way of indenting Python is with spaces only.  The
+    second-most popular way is with tabs only.  Code indented with a mixture
+    of tabs and spaces should be converted to using spaces exclusively.  When
+    invoking the Python command line interpreter with the -t option, it issues
+    warnings about code that illegally mixes tabs and spaces.  When using -tt
+    these warnings become errors.  These options are highly recommended!
+
+    Okay: if a == 0:\n        a = 1\n        b = 1
+    E101: if a == 0:\n        a = 1\n\tb = 1
+    """
+    indent = INDENT_REGEX.match(physical_line).group(1)
+    for offset, char in enumerate(indent):
+        if char != indent_char:
+            return offset, "E101 indentation contains mixed spaces and tabs"
+
+
+def tabs_obsolete(physical_line):
+    r"""For new projects, spaces-only are strongly recommended over tabs.
+
+    Okay: if True:\n    return
+    W191: if True:\n\treturn
+    """
+    indent = INDENT_REGEX.match(physical_line).group(1)
+    if '\t' in indent:
+        return indent.index('\t'), "W191 indentation contains tabs"
+
+
+def trailing_whitespace(physical_line):
+    r"""Trailing whitespace is superfluous.
+
+    The warning returned varies on whether the line itself is blank, for easier
+    filtering for those who want to indent their blank lines.
+
+    Okay: spam(1)\n#
+    W291: spam(1) \n#
+    W293: class Foo(object):\n    \n    bang = 12
+    """
+    physical_line = physical_line.rstrip('\n')    # chr(10), newline
+    physical_line = physical_line.rstrip('\r')    # chr(13), carriage return
+    physical_line = physical_line.rstrip('\x0c')  # chr(12), form feed, ^L
+    stripped = physical_line.rstrip(' \t\v')
+    if physical_line != stripped:
+        if stripped:
+            return len(stripped), "W291 trailing whitespace"
+        else:
+            return 0, "W293 blank line contains whitespace"
+
+
+def trailing_blank_lines(physical_line, lines, line_number, total_lines):
+    r"""Trailing blank lines are superfluous.
+
+    Okay: spam(1)
+    W391: spam(1)\n
+
+    However the last line should end with a new line (warning W292).
+    """
+    if line_number == total_lines:
+        stripped_last_line = physical_line.rstrip()
+        if not stripped_last_line:
+            return 0, "W391 blank line at end of file"
+        if stripped_last_line == physical_line:
+            return len(physical_line), "W292 no newline at end of file"
+
+
+def maximum_line_length(physical_line, max_line_length, multiline):
+    r"""Limit all lines to a maximum of 79 characters.
+
+    There are still many devices around that are limited to 80 character
+    lines; plus, limiting windows to 80 characters makes it possible to have
+    several windows side-by-side.  The default wrapping on such devices looks
+    ugly.  Therefore, please limit all lines to a maximum of 79 characters.
+    For flowing long blocks of text (docstrings or comments), limiting the
+    length to 72 characters is recommended.
+
+    Reports error E501.
+    """
+    line = physical_line.rstrip()
+    length = len(line)
+    if length > max_line_length and not noqa(line):
+        # Special case for long URLs in multi-line docstrings or comments,
+        # but still report the error when the 72 first chars are whitespaces.
+        chunks = line.split()
+        if ((len(chunks) == 1 and multiline) or
+            (len(chunks) == 2 and chunks[0] == '#')) and \
+                len(line) - len(chunks[-1]) < max_line_length - 7:
+            return
+        if hasattr(line, 'decode'):   # Python 2
+            # The line could contain multi-byte characters
+            try:
+                length = len(line.decode('utf-8'))
+            except UnicodeError:
+                pass
+        if length > max_line_length:
+            return (max_line_length, "E501 line too long "
+                    "(%d > %d characters)" % (length, max_line_length))
+
+
+##############################################################################
+# Plugins (check functions) for logical lines
+##############################################################################
+
+
+def blank_lines(logical_line, blank_lines, indent_level, line_number,
+                blank_before, previous_logical, previous_indent_level):
+    r"""Separate top-level function and class definitions with two blank lines.
+
+    Method definitions inside a class are separated by a single blank line.
+
+    Extra blank lines may be used (sparingly) to separate groups of related
+    functions.  Blank lines may be omitted between a bunch of related
+    one-liners (e.g. a set of dummy implementations).
+
+    Use blank lines in functions, sparingly, to indicate logical sections.
+
+    Okay: def a():\n    pass\n\n\ndef b():\n    pass
+    Okay: def a():\n    pass\n\n\n# Foo\n# Bar\n\ndef b():\n    pass
+
+    E301: class Foo:\n    b = 0\n    def bar():\n        pass
+    E302: def a():\n    pass\n\ndef b(n):\n    pass
+    E303: def a():\n    pass\n\n\n\ndef b(n):\n    pass
+    E303: def a():\n\n\n\n    pass
+    E304: @decorator\n\ndef a():\n    pass
+    """
+    if line_number < 3 and not previous_logical:
+        return  # Don't expect blank lines before the first line
+    if previous_logical.startswith('@'):
+        if blank_lines:
+            yield 0, "E304 blank lines found after function decorator"
+    elif blank_lines > 2 or (indent_level and blank_lines == 2):
+        yield 0, "E303 too many blank lines (%d)" % blank_lines
+    elif logical_line.startswith(('def ', 'class ', '@')):
+        if indent_level:
+            if not (blank_before or previous_indent_level < indent_level or
+                    DOCSTRING_REGEX.match(previous_logical)):
+                yield 0, "E301 expected 1 blank line, found 0"
+        elif blank_before != 2:
+            yield 0, "E302 expected 2 blank lines, found %d" % blank_before
+
+
+def extraneous_whitespace(logical_line):
+    r"""Avoid extraneous whitespace.
+
+    Avoid extraneous whitespace in these situations:
+    - Immediately inside parentheses, brackets or braces.
+    - Immediately before a comma, semicolon, or colon.
+
+    Okay: spam(ham[1], {eggs: 2})
+    E201: spam( ham[1], {eggs: 2})
+    E201: spam(ham[ 1], {eggs: 2})
+    E201: spam(ham[1], { eggs: 2})
+    E202: spam(ham[1], {eggs: 2} )
+    E202: spam(ham[1 ], {eggs: 2})
+    E202: spam(ham[1], {eggs: 2 })
+
+    E203: if x == 4: print x, y; x, y = y , x
+    E203: if x == 4: print x, y ; x, y = y, x
+    E203: if x == 4 : print x, y; x, y = y, x
+    """
+    line = logical_line
+    for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line):
+        text = match.group()
+        char = text.strip()
+        found = match.start()
+        if text == char + ' ':
+            # assert char in '([{'
+            yield found + 1, "E201 whitespace after '%s'" % char
+        elif line[found - 1] != ',':
+            code = ('E202' if char in '}])' else 'E203')  # if char in ',;:'
+            yield found, "%s whitespace before '%s'" % (code, char)
+
+
+def whitespace_around_keywords(logical_line):
+    r"""Avoid extraneous whitespace around keywords.
+
+    Okay: True and False
+    E271: True and  False
+    E272: True  and False
+    E273: True and\tFalse
+    E274: True\tand False
+    """
+    for match in KEYWORD_REGEX.finditer(logical_line):
+        before, after = match.groups()
+
+        if '\t' in before:
+            yield match.start(1), "E274 tab before keyword"
+        elif len(before) > 1:
+            yield match.start(1), "E272 multiple spaces before keyword"
+
+        if '\t' in after:
+            yield match.start(2), "E273 tab after keyword"
+        elif len(after) > 1:
+            yield match.start(2), "E271 multiple spaces after keyword"
+
+
+def missing_whitespace(logical_line):
+    r"""Each comma, semicolon or colon should be followed by whitespace.
+
+    Okay: [a, b]
+    Okay: (3,)
+    Okay: a[1:4]
+    Okay: a[:4]
+    Okay: a[1:]
+    Okay: a[1:4:2]
+    E231: ['a','b']
+    E231: foo(bar,baz)
+    E231: [{'a':'b'}]
+    """
+    line = logical_line
+    for index in range(len(line) - 1):
+        char = line[index]
+        if char in ',;:' and line[index + 1] not in WHITESPACE:
+            before = line[:index]
+            if char == ':' and before.count('[') > before.count(']') and \
+                    before.rfind('{') < before.rfind('['):
+                continue  # Slice syntax, no space required
+            if char == ',' and line[index + 1] == ')':
+                continue  # Allow tuple with only one element: (3,)
+            yield index, "E231 missing whitespace after '%s'" % char
+
+
+def indentation(logical_line, previous_logical, indent_char,
+                indent_level, previous_indent_level):
+    r"""Use 4 spaces per indentation level.
+
+    For really old code that you don't want to mess up, you can continue to
+    use 8-space tabs.
+
+    Okay: a = 1
+    Okay: if a == 0:\n    a = 1
+    E111:   a = 1
+    E114:   # a = 1
+
+    Okay: for item in items:\n    pass
+    E112: for item in items:\npass
+    E115: for item in items:\n# Hi\n    pass
+
+    Okay: a = 1\nb = 2
+    E113: a = 1\n    b = 2
+    E116: a = 1\n    # b = 2
+    """
+    c = 0 if logical_line else 3
+    tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)"
+    if indent_level % 4:
+        yield 0, tmpl % (1 + c, "indentation is not a multiple of four")
+    indent_expect = previous_logical.endswith(':')
+    if indent_expect and indent_level <= previous_indent_level:
+        yield 0, tmpl % (2 + c, "expected an indented block")
+    elif not indent_expect and indent_level > previous_indent_level:
+        yield 0, tmpl % (3 + c, "unexpected indentation")
+
+
+def continued_indentation(logical_line, tokens, indent_level, hang_closing,
+                          indent_char, noqa, verbose):
+    r"""Continuation lines indentation.
+
+    Continuation lines should align wrapped elements either vertically
+    using Python's implicit line joining inside parentheses, brackets
+    and braces, or using a hanging indent.
+
+    When using a hanging indent these considerations should be applied:
+    - there should be no arguments on the first line, and
+    - further indentation should be used to clearly distinguish itself as a
+      continuation line.
+
+    Okay: a = (\n)
+    E123: a = (\n    )
+
+    Okay: a = (\n    42)
+    E121: a = (\n   42)
+    E122: a = (\n42)
+    E123: a = (\n    42\n    )
+    E124: a = (24,\n     42\n)
+    E125: if (\n    b):\n    pass
+    E126: a = (\n        42)
+    E127: a = (24,\n      42)
+    E128: a = (24,\n    42)
+    E129: if (a or\n    b):\n    pass
+    E131: a = (\n    42\n 24)
+    """
+    first_row = tokens[0][2][0]
+    nrows = 1 + tokens[-1][2][0] - first_row
+    if noqa or nrows == 1:
+        return
+
+    # indent_next tells us whether the next block is indented; assuming
+    # that it is indented by 4 spaces, then we should not allow 4-space
+    # indents on the final continuation line; in turn, some other
+    # indents are allowed to have an extra 4 spaces.
+    indent_next = logical_line.endswith(':')
+
+    row = depth = 0
+    valid_hangs = (4,) if indent_char != '\t' else (4, 8)
+    # remember how many brackets were opened on each line
+    parens = [0] * nrows
+    # relative indents of physical lines
+    rel_indent = [0] * nrows
+    # for each depth, collect a list of opening rows
+    open_rows = [[0]]
+    # for each depth, memorize the hanging indentation
+    hangs = [None]
+    # visual indents
+    indent_chances = {}
+    last_indent = tokens[0][2]
+    visual_indent = None
+    # for each depth, memorize the visual indent column
+    indent = [last_indent[1]]
+    if verbose >= 3:
+        print(">>> " + tokens[0][4].rstrip())
+
+    for token_type, text, start, end, line in tokens:
+
+        newline = row < start[0] - first_row
+        if newline:
+            row = start[0] - first_row
+            newline = not last_token_multiline and token_type not in NEWLINE
+
+        if newline:
+            # this is the beginning of a continuation line.
+            last_indent = start
+            if verbose >= 3:
+                print("... " + line.rstrip())
+
+            # record the initial indent.
+            rel_indent[row] = expand_indent(line) - indent_level
+
+            # identify closing bracket
+            close_bracket = (token_type == tokenize.OP and text in ']})')
+
+            # is the indent relative to an opening bracket line?
+            for open_row in reversed(open_rows[depth]):
+                hang = rel_indent[row] - rel_indent[open_row]
+                hanging_indent = hang in valid_hangs
+                if hanging_indent:
+                    break
+            if hangs[depth]:
+                hanging_indent = (hang == hangs[depth])
+            # is there any chance of visual indent?
+            visual_indent = (not close_bracket and hang > 0 and
+                             indent_chances.get(start[1]))
+
+            if close_bracket and indent[depth]:
+                # closing bracket for visual indent
+                if start[1] != indent[depth]:
+                    yield (start, "E124 closing bracket does not match "
+                           "visual indentation")
+            elif close_bracket and not hang:
+                # closing bracket matches indentation of opening bracket's line
+                if hang_closing:
+                    yield start, "E133 closing bracket is missing indentation"
+            elif indent[depth] and start[1] < indent[depth]:
+                if visual_indent is not True:
+                    # visual indent is broken
+                    yield (start, "E128 continuation line "
+                           "under-indented for visual indent")
+            elif hanging_indent or (indent_next and rel_indent[row] == 8):
+                # hanging indent is verified
+                if close_bracket and not hang_closing:
+                    yield (start, "E123 closing bracket does not match "
+                           "indentation of opening bracket's line")
+                hangs[depth] = hang
+            elif visual_indent is True:
+                # visual indent is verified
+                indent[depth] = start[1]
+            elif visual_indent in (text, str):
+                # ignore token lined up with matching one from a previous line
+                pass
+            else:
+                # indent is broken
+                if hang <= 0:
+                    error = "E122", "missing indentation or outdented"
+                elif indent[depth]:
+                    error = "E127", "over-indented for visual indent"
+                elif not close_bracket and hangs[depth]:
+                    error = "E131", "unaligned for hanging indent"
+                else:
+                    hangs[depth] = hang
+                    if hang > 4:
+                        error = "E126", "over-indented for hanging indent"
+                    else:
+                        error = "E121", "under-indented for hanging indent"
+                yield start, "%s continuation line %s" % error
+
+        # look for visual indenting
+        if (parens[row] and token_type not in (tokenize.NL, tokenize.COMMENT)
+                and not indent[depth]):
+            indent[depth] = start[1]
+            indent_chances[start[1]] = True
+            if verbose >= 4:
+                print("bracket depth %s indent to %s" % (depth, start[1]))
+        # deal with implicit string concatenation
+        elif (token_type in (tokenize.STRING, tokenize.COMMENT) or
+              text in ('u', 'ur', 'b', 'br')):
+            indent_chances[start[1]] = str
+        # special case for the "if" statement because len("if (") == 4
+        elif not indent_chances and not row and not depth and text == 'if':
+            indent_chances[end[1] + 1] = True
+        elif text == ':' and line[end[1]:].isspace():
+            open_rows[depth].append(row)
+
+        # keep track of bracket depth
+        if token_type == tokenize.OP:
+            if text in '([{':
+                depth += 1
+                indent.append(0)
+                hangs.append(None)
+                if len(open_rows) == depth:
+                    open_rows.append([])
+                open_rows[depth].append(row)
+                parens[row] += 1
+                if verbose >= 4:
+                    print("bracket depth %s seen, col %s, visual min = %s" %
+                          (depth, start[1], indent[depth]))
+            elif text in ')]}' and depth > 0:
+                # parent indents should not be more than this one
+                prev_indent = indent.pop() or last_indent[1]
+                hangs.pop()
+                for d in range(depth):
+                    if indent[d] > prev_indent:
+                        indent[d] = 0
+                for ind in list(indent_chances):
+                    if ind >= prev_indent:
+                        del indent_chances[ind]
+                del open_rows[depth + 1:]
+                depth -= 1
+                if depth:
+                    indent_chances[indent[depth]] = True
+                for idx in range(row, -1, -1):
+                    if parens[idx]:
+                        parens[idx] -= 1
+                        break
+            assert len(indent) == depth + 1
+            if start[1] not in indent_chances:
+                # allow to line up tokens
+                indent_chances[start[1]] = text
+
+        last_token_multiline = (start[0] != end[0])
+        if last_token_multiline:
+            rel_indent[end[0] - first_row] = rel_indent[row]
+
+    if indent_next and expand_indent(line) == indent_level + 4:
+        pos = (start[0], indent[0] + 4)
+        if visual_indent:
+            code = "E129 visually indented line"
+        else:
+            code = "E125 continuation line"
+        yield pos, "%s with same indent as next logical line" % code
+
+
+def whitespace_before_parameters(logical_line, tokens):
+    r"""Avoid extraneous whitespace.
+
+    Avoid extraneous whitespace in the following situations:
+    - before the open parenthesis that starts the argument list of a
+      function call.
+    - before the open parenthesis that starts an indexing or slicing.
+
+    Okay: spam(1)
+    E211: spam (1)
+
+    Okay: dict['key'] = list[index]
+    E211: dict ['key'] = list[index]
+    E211: dict['key'] = list [index]
+    """
+    prev_type, prev_text, __, prev_end, __ = tokens[0]
+    for index in range(1, len(tokens)):
+        token_type, text, start, end, __ = tokens[index]
+        if (token_type == tokenize.OP and
+            text in '([' and
+            start != prev_end and
+            (prev_type == tokenize.NAME or prev_text in '}])') and
+            # Syntax "class A (B):" is allowed, but avoid it
+            (index < 2 or tokens[index - 2][1] != 'class') and
+                # Allow "return (a.foo for a in range(5))"
+                not keyword.iskeyword(prev_text)):
+            yield prev_end, "E211 whitespace before '%s'" % text
+        prev_type = token_type
+        prev_text = text
+        prev_end = end
+
+
+def whitespace_around_operator(logical_line):
+    r"""Avoid extraneous whitespace around an operator.
+
+    Okay: a = 12 + 3
+    E221: a = 4  + 5
+    E222: a = 4 +  5
+    E223: a = 4\t+ 5
+    E224: a = 4 +\t5
+    """
+    for match in OPERATOR_REGEX.finditer(logical_line):
+        before, after = match.groups()
+
+        if '\t' in before:
+            yield match.start(1), "E223 tab before operator"
+        elif len(before) > 1:
+            yield match.start(1), "E221 multiple spaces before operator"
+
+        if '\t' in after:
+            yield match.start(2), "E224 tab after operator"
+        elif len(after) > 1:
+            yield match.start(2), "E222 multiple spaces after operator"
+
+
+def missing_whitespace_around_operator(logical_line, tokens):
+    r"""Surround operators with a single space on either side.
+
+    - Always surround these binary operators with a single space on
+      either side: assignment (=), augmented assignment (+=, -= etc.),
+      comparisons (==, <, >, !=, <=, >=, in, not in, is, is not),
+      Booleans (and, or, not).
+
+    - If operators with different priorities are used, consider adding
+      whitespace around the operators with the lowest priorities.
+
+    Okay: i = i + 1
+    Okay: submitted += 1
+    Okay: x = x * 2 - 1
+    Okay: hypot2 = x * x + y * y
+    Okay: c = (a + b) * (a - b)
+    Okay: foo(bar, key='word', *args, **kwargs)
+    Okay: alpha[:-i]
+
+    E225: i=i+1
+    E225: submitted +=1
+    E225: x = x /2 - 1
+    E225: z = x **y
+    E226: c = (a+b) * (a-b)
+    E226: hypot2 = x*x + y*y
+    E227: c = a|b
+    E228: msg = fmt%(errno, errmsg)
+    """
+    parens = 0
+    need_space = False
+    prev_type = tokenize.OP
+    prev_text = prev_end = None
+    for token_type, text, start, end, line in tokens:
+        if token_type in SKIP_COMMENTS:
+            continue
+        if text in ('(', 'lambda'):
+            parens += 1
+        elif text == ')':
+            parens -= 1
+        if need_space:
+            if start != prev_end:
+                # Found a (probably) needed space
+                if need_space is not True and not need_space[1]:
+                    yield (need_space[0],
+                           "E225 missing whitespace around operator")
+                need_space = False
+            elif text == '>' and prev_text in ('<', '-'):
+                # Tolerate the "<>" operator, even if running Python 3
+                # Deal with Python 3's annotated return value "->"
+                pass
+            else:
+                if need_space is True or need_space[1]:
+                    # A needed trailing space was not found
+                    yield prev_end, "E225 missing whitespace around operator"
+                else:
+                    code, optype = 'E226', 'arithmetic'
+                    if prev_text == '%':
+                        code, optype = 'E228', 'modulo'
+                    elif prev_text not in ARITHMETIC_OP:
+                        code, optype = 'E227', 'bitwise or shift'
+                    yield (need_space[0], "%s missing whitespace "
+                           "around %s operator" % (code, optype))
+                need_space = False
+        elif token_type == tokenize.OP and prev_end is not None:
+            if text == '=' and parens:
+                # Allow keyword args or defaults: foo(bar=None).
+                pass
+            elif text in WS_NEEDED_OPERATORS:
+                need_space = True
+            elif text in UNARY_OPERATORS:
+                # Check if the operator is being used as a binary operator
+                # Allow unary operators: -123, -x, +1.
+                # Allow argument unpacking: foo(*args, **kwargs).
+                if (prev_text in '}])' if prev_type == tokenize.OP
+                        else prev_text not in KEYWORDS):
+                    need_space = None
+            elif text in WS_OPTIONAL_OPERATORS:
+                need_space = None
+
+            if need_space is None:
+                # Surrounding space is optional, but ensure that
+                # trailing space matches opening space
+                need_space = (prev_end, start != prev_end)
+            elif need_space and start == prev_end:
+                # A needed opening space was not found
+                yield prev_end, "E225 missing whitespace around operator"
+                need_space = False
+        prev_type = token_type
+        prev_text = text
+        prev_end = end
+
+
+def whitespace_around_comma(logical_line):
+    r"""Avoid extraneous whitespace after a comma or a colon.
+
+    Note: these checks are disabled by default
+
+    Okay: a = (1, 2)
+    E241: a = (1,  2)
+    E242: a = (1,\t2)
+    """
+    line = logical_line
+    for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line):
+        found = m.start() + 1
+        if '\t' in m.group():
+            yield found, "E242 tab after '%s'" % m.group()[0]
+        else:
+            yield found, "E241 multiple spaces after '%s'" % m.group()[0]
+
+
+def whitespace_around_named_parameter_equals(logical_line, tokens):
+    r"""Don't use spaces around the '=' sign in function arguments.
+
+    Don't use spaces around the '=' sign when used to indicate a
+    keyword argument or a default parameter value.
+
+    Okay: def complex(real, imag=0.0):
+    Okay: return magic(r=real, i=imag)
+    Okay: boolean(a == b)
+    Okay: boolean(a != b)
+    Okay: boolean(a <= b)
+    Okay: boolean(a >= b)
+
+    E251: def complex(real, imag = 0.0):
+    E251: return magic(r = real, i = imag)
+    """
+    parens = 0
+    no_space = False
+    prev_end = None
+    message = "E251 unexpected spaces around keyword / parameter equals"
+    for token_type, text, start, end, line in tokens:
+        if token_type == tokenize.NL:
+            continue
+        if no_space:
+            no_space = False
+            if start != prev_end:
+                yield (prev_end, message)
+        elif token_type == tokenize.OP:
+            if text == '(':
+                parens += 1
+            elif text == ')':
+                parens -= 1
+            elif parens and text == '=':
+                no_space = True
+                if start != prev_end:
+                    yield (prev_end, message)
+        prev_end = end
+
+
+def whitespace_before_comment(logical_line, tokens):
+    r"""Separate inline comments by at least two spaces.
+
+    An inline comment is a comment on the same line as a statement.  Inline
+    comments should be separated by at least two spaces from the statement.
+    They should start with a # and a single space.
+
+    Each line of a block comment starts with a # and a single space
+    (unless it is indented text inside the comment).
+
+    Okay: x = x + 1  # Increment x
+    Okay: x = x + 1    # Increment x
+    Okay: # Block comment
+    E261: x = x + 1 # Increment x
+    E262: x = x + 1  #Increment x
+    E262: x = x + 1  #  Increment x
+    E265: #Block comment
+    E266: ### Block comment
+    """
+    prev_end = (0, 0)
+    for token_type, text, start, end, line in tokens:
+        if token_type == tokenize.COMMENT:
+            inline_comment = line[:start[1]].strip()
+            if inline_comment:
+                if prev_end[0] == start[0] and start[1] < prev_end[1] + 2:
+                    yield (prev_end,
+                           "E261 at least two spaces before inline comment")
+            symbol, sp, comment = text.partition(' ')
+            bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#')
+            if inline_comment:
+                if bad_prefix or comment[:1] in WHITESPACE:
+                    yield start, "E262 inline comment should start with '# '"
+            elif bad_prefix and (bad_prefix != '!' or start[0] > 1):
+                if bad_prefix != '#':
+                    yield start, "E265 block comment should start with '# '"
+                elif comment:
+                    yield start, "E266 too many leading '#' for block comment"
+        elif token_type != tokenize.NL:
+            prev_end = end
+
+
+def imports_on_separate_lines(logical_line):
+    r"""Imports should usually be on separate lines.
+
+    Okay: import os\nimport sys
+    E401: import sys, os
+
+    Okay: from subprocess import Popen, PIPE
+    Okay: from myclas import MyClass
+    Okay: from foo.bar.yourclass import YourClass
+    Okay: import myclass
+    Okay: import foo.bar.yourclass
+    """
+    line = logical_line
+    if line.startswith('import '):
+        found = line.find(',')
+        if -1 < found and ';' not in line[:found]:
+            yield found, "E401 multiple imports on one line"
+
+
+def compound_statements(logical_line):
+    r"""Compound statements (on the same line) are generally discouraged.
+
+    While sometimes it's okay to put an if/for/while with a small body
+    on the same line, never do this for multi-clause statements.
+    Also avoid folding such long lines!
+
+    Always use a def statement instead of an assignment statement that
+    binds a lambda expression directly to a name.
+
+    Okay: if foo == 'blah':\n    do_blah_thing()
+    Okay: do_one()
+    Okay: do_two()
+    Okay: do_three()
+
+    E701: if foo == 'blah': do_blah_thing()
+    E701: for x in lst: total += x
+    E701: while t < 10: t = delay()
+    E701: if foo == 'blah': do_blah_thing()
+    E701: else: do_non_blah_thing()
+    E701: try: something()
+    E701: finally: cleanup()
+    E701: if foo == 'blah': one(); two(); three()
+    E702: do_one(); do_two(); do_three()
+    E703: do_four();  # useless semicolon
+    E704: def f(x): return 2*x
+    E731: f = lambda x: 2*x
+    """
+    line = logical_line
+    last_char = len(line) - 1
+    found = line.find(':')
+    while -1 < found < last_char:
+        before = line[:found]
+        if ((before.count('{') <= before.count('}') and   # {'a': 1} (dict)
+             before.count('[') <= before.count(']') and   # [1:2] (slice)
+             before.count('(') <= before.count(')'))):    # (annotation)
+            if LAMBDA_REGEX.search(before):
+                yield 0, "E731 do not assign a lambda expression, use a def"
+                break
+            if before.startswith('def '):
+                yield 0, "E704 multiple statements on one line (def)"
+            else:
+                yield found, "E701 multiple statements on one line (colon)"
+        found = line.find(':', found + 1)
+    found = line.find(';')
+    while -1 < found:
+        if found < last_char:
+            yield found, "E702 multiple statements on one line (semicolon)"
+        else:
+            yield found, "E703 statement ends with a semicolon"
+        found = line.find(';', found + 1)
+
+
+def explicit_line_join(logical_line, tokens):
+    r"""Avoid explicit line join between brackets.
+
+    The preferred way of wrapping long lines is by using Python's implied line
+    continuation inside parentheses, brackets and braces.  Long lines can be
+    broken over multiple lines by wrapping expressions in parentheses.  These
+    should be used in preference to using a backslash for line continuation.
+
+    E502: aaa = [123, \\n       123]
+    E502: aaa = ("bbb " \\n       "ccc")
+
+    Okay: aaa = [123,\n       123]
+    Okay: aaa = ("bbb "\n       "ccc")
+    Okay: aaa = "bbb " \\n    "ccc"
+    """
+    prev_start = prev_end = parens = 0
+    for token_type, text, start, end, line in tokens:
+        if start[0] != prev_start and parens and backslash:
+            yield backslash, "E502 the backslash is redundant between brackets"
+        if end[0] != prev_end:
+            if line.rstrip('\r\n').endswith('\\'):
+                backslash = (end[0], len(line.splitlines()[-1]) - 1)
+            else:
+                backslash = None
+            prev_start = prev_end = end[0]
+        else:
+            prev_start = start[0]
+        if token_type == tokenize.OP:
+            if text in '([{':
+                parens += 1
+            elif text in ')]}':
+                parens -= 1
+
+
+def comparison_to_singleton(logical_line, noqa):
+    r"""Comparison to singletons should use "is" or "is not".
+
+    Comparisons to singletons like None should always be done
+    with "is" or "is not", never the equality operators.
+
+    Okay: if arg is not None:
+    E711: if arg != None:
+    E712: if arg == True:
+
+    Also, beware of writing if x when you really mean if x is not None --
+    e.g. when testing whether a variable or argument that defaults to None was
+    set to some other value.  The other value might have a type (such as a
+    container) that could be false in a boolean context!
+    """
+    match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line)
+    if match:
+        same = (match.group(1) == '==')
+        singleton = match.group(2)
+        msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton)
+        if singleton in ('None',):
+            code = 'E711'
+        else:
+            code = 'E712'
+            nonzero = ((singleton == 'True' and same) or
+                       (singleton == 'False' and not same))
+            msg += " or 'if %scond:'" % ('' if nonzero else 'not ')
+        yield match.start(1), ("%s comparison to %s should be %s" %
+                               (code, singleton, msg))
+
+
+def comparison_negative(logical_line):
+    r"""Negative comparison should be done using "not in" and "is not".
+
+    Okay: if x not in y:\n    pass
+    Okay: assert (X in Y or X is Z)
+    Okay: if not (X in Y):\n    pass
+    Okay: zz = x is not y
+    E713: Z = not X in Y
+    E713: if not X.B in Y:\n    pass
+    E714: if not X is Y:\n    pass
+    E714: Z = not X.B is Y
+    """
+    match = COMPARE_NEGATIVE_REGEX.search(logical_line)
+    if match:
+        pos = match.start(1)
+        if match.group(2) == 'in':
+            yield pos, "E713 test for membership should be 'not in'"
+        else:
+            yield pos, "E714 test for object identity should be 'is not'"
+
+
+def comparison_type(logical_line):
+    r"""Object type comparisons should always use isinstance().
+
+    Do not compare types directly.
+
+    Okay: if isinstance(obj, int):
+    E721: if type(obj) is type(1):
+
+    When checking if an object is a string, keep in mind that it might be a
+    unicode string too! In Python 2.3, str and unicode have a common base
+    class, basestring, so you can do:
+
+    Okay: if isinstance(obj, basestring):
+    Okay: if type(a1) is type(b1):
+    """
+    match = COMPARE_TYPE_REGEX.search(logical_line)
+    if match:
+        inst = match.group(1)
+        if inst and isidentifier(inst) and inst not in SINGLETONS:
+            return  # Allow comparison for types which are not obvious
+        yield match.start(), "E721 do not compare types, use 'isinstance()'"
+
+
+def python_3000_has_key(logical_line, noqa):
+    r"""The {}.has_key() method is removed in Python 3: use the 'in' operator.
+
+    Okay: if "alph" in d:\n    print d["alph"]
+    W601: assert d.has_key('alph')
+    """
+    pos = logical_line.find('.has_key(')
+    if pos > -1 and not noqa:
+        yield pos, "W601 .has_key() is deprecated, use 'in'"
+
+
+def python_3000_raise_comma(logical_line):
+    r"""When raising an exception, use "raise ValueError('message')".
+
+    The older form is removed in Python 3.
+
+    Okay: raise DummyError("Message")
+    W602: raise DummyError, "Message"
+    """
+    match = RAISE_COMMA_REGEX.match(logical_line)
+    if match and not RERAISE_COMMA_REGEX.match(logical_line):
+        yield match.end() - 1, "W602 deprecated form of raising exception"
+
+
+def python_3000_not_equal(logical_line):
+    r"""New code should always use != instead of <>.
+
+    The older syntax is removed in Python 3.
+
+    Okay: if a != 'no':
+    W603: if a <> 'no':
+    """
+    pos = logical_line.find('<>')
+    if pos > -1:
+        yield pos, "W603 '<>' is deprecated, use '!='"
+
+
+def python_3000_backticks(logical_line):
+    r"""Backticks are removed in Python 3: use repr() instead.
+
+    Okay: val = repr(1 + 2)
+    W604: val = `1 + 2`
+    """
+    pos = logical_line.find('`')
+    if pos > -1:
+        yield pos, "W604 backticks are deprecated, use 'repr()'"
+
+
+##############################################################################
+# Helper functions
+##############################################################################
+
+
+if '' == ''.encode():
+    # Python 2: implicit encoding.
+    def readlines(filename):
+        """Read the source code."""
+        with open(filename, 'rU') as f:
+            return f.readlines()
+    isidentifier = re.compile(r'[a-zA-Z_]\w*').match
+    stdin_get_value = sys.stdin.read
+else:
+    # Python 3
+    def readlines(filename):
+        """Read the source code."""
+        try:
+            with open(filename, 'rb') as f:
+                (coding, lines) = tokenize.detect_encoding(f.readline)
+                f = TextIOWrapper(f, coding, line_buffering=True)
+                return [l.decode(coding) for l in lines] + f.readlines()
+        except (LookupError, SyntaxError, UnicodeError):
+            # Fall back if file encoding is improperly declared
+            with open(filename, encoding='latin-1') as f:
+                return f.readlines()
+    isidentifier = str.isidentifier
+
+    def stdin_get_value():
+        return TextIOWrapper(sys.stdin.buffer, errors='ignore').read()
+noqa = re.compile(r'# no(?:qa|pep8)\b', re.I).search
+
+
+def expand_indent(line):
+    r"""Return the amount of indentation.
+
+    Tabs are expanded to the next multiple of 8.
+
+    >>> expand_indent('    ')
+    4
+    >>> expand_indent('\t')
+    8
+    >>> expand_indent('       \t')
+    8
+    >>> expand_indent('        \t')
+    16
+    """
+    if '\t' not in line:
+        return len(line) - len(line.lstrip())
+    result = 0
+    for char in line:
+        if char == '\t':
+            result = result // 8 * 8 + 8
+        elif char == ' ':
+            result += 1
+        else:
+            break
+    return result
+
+
+def mute_string(text):
+    """Replace contents with 'xxx' to prevent syntax matching.
+
+    >>> mute_string('"abc"')
+    '"xxx"'
+    >>> mute_string("'''abc'''")
+    "'''xxx'''"
+    >>> mute_string("r'abc'")
+    "r'xxx'"
+    """
+    # String modifiers (e.g. u or r)
+    start = text.index(text[-1]) + 1
+    end = len(text) - 1
+    # Triple quotes
+    if text[-3:] in ('"""', "'''"):
+        start += 2
+        end -= 2
+    return text[:start] + 'x' * (end - start) + text[end:]
+
+
+def parse_udiff(diff, patterns=None, parent='.'):
+    """Return a dictionary of matching lines."""
+    # For each file of the diff, the entry key is the filename,
+    # and the value is a set of row numbers to consider.
+    rv = {}
+    path = nrows = None
+    for line in diff.splitlines():
+        if nrows:
+            if line[:1] != '-':
+                nrows -= 1
+            continue
+        if line[:3] == '@@ ':
+            hunk_match = HUNK_REGEX.match(line)
+            (row, nrows) = [int(g or '1') for g in hunk_match.groups()]
+            rv[path].update(range(row, row + nrows))
+        elif line[:3] == '+++':
+            path = line[4:].split('\t', 1)[0]
+            if path[:2] == 'b/':
+                path = path[2:]
+            rv[path] = set()
+    return dict([(os.path.join(parent, path), rows)
+                 for (path, rows) in rv.items()
+                 if rows and filename_match(path, patterns)])
+
+
+def normalize_paths(value, parent=os.curdir):
+    """Parse a comma-separated list of paths.
+
+    Return a list of absolute paths.
+    """
+    if not value or isinstance(value, list):
+        return value
+    paths = []
+    for path in value.split(','):
+        if '/' in path:
+            path = os.path.abspath(os.path.join(parent, path))
+        paths.append(path.rstrip('/'))
+    return paths
+
+
+def filename_match(filename, patterns, default=True):
+    """Check if patterns contains a pattern that matches filename.
+
+    If patterns is unspecified, this always returns True.
+    """
+    if not patterns:
+        return default
+    return any(fnmatch(filename, pattern) for pattern in patterns)
+
+
+if COMMENT_WITH_NL:
+    def _is_eol_token(token):
+        return (token[0] in NEWLINE or
+                (token[0] == tokenize.COMMENT and token[1] == token[4]))
+else:
+    def _is_eol_token(token):
+        return token[0] in NEWLINE
+
+
+##############################################################################
+# Framework to run all checks
+##############################################################################
+
+
+_checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}}
+
+
+def register_check(check, codes=None):
+    """Register a new check object."""
+    def _add_check(check, kind, codes, args):
+        if check in _checks[kind]:
+            _checks[kind][check][0].extend(codes or [])
+        else:
+            _checks[kind][check] = (codes or [''], args)
+    if inspect.isfunction(check):
+        args = inspect.getargspec(check)[0]
+        if args and args[0] in ('physical_line', 'logical_line'):
+            if codes is None:
+                codes = ERRORCODE_REGEX.findall(check.__doc__ or '')
+            _add_check(check, args[0], codes, args)
+    elif inspect.isclass(check):
+        if inspect.getargspec(check.__init__)[0][:2] == ['self', 'tree']:
+            _add_check(check, 'tree', codes, None)
+
+
+def init_checks_registry():
+    """Register all globally visible functions.
+
+    The first argument name is either 'physical_line' or 'logical_line'.
+    """
+    mod = inspect.getmodule(register_check)
+    for (name, function) in inspect.getmembers(mod, inspect.isfunction):
+        register_check(function)
+init_checks_registry()
+
+
+class Checker(object):
+    """Load a Python source file, tokenize it, check coding style."""
+
+    def __init__(self, filename=None, lines=None,
+                 options=None, report=None, **kwargs):
+        if options is None:
+            options = StyleGuide(kwargs).options
+        else:
+            assert not kwargs
+        self._io_error = None
+        self._physical_checks = options.physical_checks
+        self._logical_checks = options.logical_checks
+        self._ast_checks = options.ast_checks
+        self.max_line_length = options.max_line_length
+        self.multiline = False  # in a multiline string?
+        self.hang_closing = options.hang_closing
+        self.verbose = options.verbose
+        self.filename = filename
+        if filename is None:
+            self.filename = 'stdin'
+            self.lines = lines or []
+        elif filename == '-':
+            self.filename = 'stdin'
+            self.lines = stdin_get_value().splitlines(True)
+        elif lines is None:
+            try:
+                self.lines = readlines(filename)
+            except IOError:
+                (exc_type, exc) = sys.exc_info()[:2]
+                self._io_error = '%s: %s' % (exc_type.__name__, exc)
+                self.lines = []
+        else:
+            self.lines = lines
+        if self.lines:
+            ord0 = ord(self.lines[0][0])
+            if ord0 in (0xef, 0xfeff):  # Strip the UTF-8 BOM
+                if ord0 == 0xfeff:
+                    self.lines[0] = self.lines[0][1:]
+                elif self.lines[0][:3] == '\xef\xbb\xbf':
+                    self.lines[0] = self.lines[0][3:]
+        self.report = report or options.report
+        self.report_error = self.report.error
+
+    def report_invalid_syntax(self):
+        """Check if the syntax is valid."""
+        (exc_type, exc) = sys.exc_info()[:2]
+        if len(exc.args) > 1:
+            offset = exc.args[1]
+            if len(offset) > 2:
+                offset = offset[1:3]
+        else:
+            offset = (1, 0)
+        self.report_error(offset[0], offset[1] or 0,
+                          'E901 %s: %s' % (exc_type.__name__, exc.args[0]),
+                          self.report_invalid_syntax)
+
+    def readline(self):
+        """Get the next line from the input buffer."""
+        if self.line_number >= self.total_lines:
+            return ''
+        line = self.lines[self.line_number]
+        self.line_number += 1
+        if self.indent_char is None and line[:1] in WHITESPACE:
+            self.indent_char = line[0]
+        return line
+
+    def run_check(self, check, argument_names):
+        """Run a check plugin."""
+        arguments = []
+        for name in argument_names:
+            arguments.append(getattr(self, name))
+        return check(*arguments)
+
+    def check_physical(self, line):
+        """Run all physical checks on a raw input line."""
+        self.physical_line = line
+        for name, check, argument_names in self._physical_checks:
+            result = self.run_check(check, argument_names)
+            if result is not None:
+                (offset, text) = result
+                self.report_error(self.line_number, offset, text, check)
+                if text[:4] == 'E101':
+                    self.indent_char = line[0]
+
+    def build_tokens_line(self):
+        """Build a logical line from tokens."""
+        logical = []
+        comments = []
+        length = 0
+        prev_row = prev_col = mapping = None
+        for token_type, text, start, end, line in self.tokens:
+            if token_type in SKIP_TOKENS:
+                continue
+            if not mapping:
+                mapping = [(0, start)]
+            if token_type == tokenize.COMMENT:
+                comments.append(text)
+                continue
+            if token_type == tokenize.STRING:
+                text = mute_string(text)
+            if prev_row:
+                (start_row, start_col) = start
+                if prev_row != start_row:    # different row
+                    prev_text = self.lines[prev_row - 1][prev_col - 1]
+                    if prev_text == ',' or (prev_text not in '{[('
+                                            and text not in '}])'):
+                        text = ' ' + text
+                elif prev_col != start_col:  # different column
+                    text = line[prev_col:start_col] + text
+            logical.append(text)
+            length += len(text)
+            mapping.append((length, end))
+            (prev_row, prev_col) = end
+        self.logical_line = ''.join(logical)
+        self.noqa = comments and noqa(''.join(comments))
+        return mapping
+
+    def check_logical(self):
+        """Build a line from tokens and run all logical checks on it."""
+        self.report.increment_logical_line()
+        mapping = self.build_tokens_line()
+        (start_row, start_col) = mapping[0][1]
+        start_line = self.lines[start_row - 1]
+        self.indent_level = expand_indent(start_line[:start_col])
+        if self.blank_before < self.blank_lines:
+            self.blank_before = self.blank_lines
+        if self.verbose >= 2:
+            print(self.logical_line[:80].rstrip())
+        for name, check, argument_names in self._logical_checks:
+            if self.verbose >= 4:
+                print('   ' + name)
+            for offset, text in self.run_check(check, argument_names) or ():
+                if not isinstance(offset, tuple):
+                    for token_offset, pos in mapping:
+                        if offset <= token_offset:
+                            break
+                    offset = (pos[0], pos[1] + offset - token_offset)
+                self.report_error(offset[0], offset[1], text, check)
+        if self.logical_line:
+            self.previous_indent_level = self.indent_level
+            self.previous_logical = self.logical_line
+        self.blank_lines = 0
+        self.tokens = []
+
+    def check_ast(self):
+        """Build the file's AST and run all AST checks."""
+        try:
+            tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST)
+        except (SyntaxError, TypeError):
+            return self.report_invalid_syntax()
+        for name, cls, __ in self._ast_checks:
+            checker = cls(tree, self.filename)
+            for lineno, offset, text, check in checker.run():
+                if not self.lines or not noqa(self.lines[lineno - 1]):
+                    self.report_error(lineno, offset, text, check)
+
+    def generate_tokens(self):
+        """Tokenize the file, run physical line checks and yield tokens."""
+        if self._io_error:
+            self.report_error(1, 0, 'E902 %s' % self._io_error, readlines)
+        tokengen = tokenize.generate_tokens(self.readline)
+        try:
+            for token in tokengen:
+                if token[2][0] > self.total_lines:
+                    return
+                self.maybe_check_physical(token)
+                yield token
+        except (SyntaxError, tokenize.TokenError):
+            self.report_invalid_syntax()
+
+    def maybe_check_physical(self, token):
+        """If appropriate (based on token), check current physical line(s)."""
+        # Called after every token, but act only on end of line.
+        if _is_eol_token(token):
+            # Obviously, a newline token ends a single physical line.
+            self.check_physical(token[4])
+        elif token[0] == tokenize.STRING and '\n' in token[1]:
+            # Less obviously, a string that contains newlines is a
+            # multiline string, either triple-quoted or with internal
+            # newlines backslash-escaped. Check every physical line in the
+            # string *except* for the last one: its newline is outside of
+            # the multiline string, so we consider it a regular physical
+            # line, and will check it like any other physical line.
+            #
+            # Subtleties:
+            # - we don't *completely* ignore the last line; if it contains
+            #   the magical "# noqa" comment, we disable all physical
+            #   checks for the entire multiline string
+            # - have to wind self.line_number back because initially it
+            #   points to the last line of the string, and we want
+            #   check_physical() to give accurate feedback
+            if noqa(token[4]):
+                return
+            self.multiline = True
+            self.line_number = token[2][0]
+            for line in token[1].split('\n')[:-1]:
+                self.check_physical(line + '\n')
+                self.line_number += 1
+            self.multiline = False
+
+    def check_all(self, expected=None, line_offset=0):
+        """Run all checks on the input file."""
+        self.report.init_file(self.filename, self.lines, expected, line_offset)
+        self.total_lines = len(self.lines)
+        if self._ast_checks:
+            self.check_ast()
+        self.line_number = 0
+        self.indent_char = None
+        self.indent_level = self.previous_indent_level = 0
+        self.previous_logical = ''
+        self.tokens = []
+        self.blank_lines = self.blank_before = 0
+        parens = 0
+        for token in self.generate_tokens():
+            self.tokens.append(token)
+            token_type, text = token[0:2]
+            if self.verbose >= 3:
+                if token[2][0] == token[3][0]:
+                    pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
+                else:
+                    pos = 'l.%s' % token[3][0]
+                print('l.%s\t%s\t%s\t%r' %
+                      (token[2][0], pos, tokenize.tok_name[token[0]], text))
+            if token_type == tokenize.OP:
+                if text in '([{':
+                    parens += 1
+                elif text in '}])':
+                    parens -= 1
+            elif not parens:
+                if token_type in NEWLINE:
+                    if token_type == tokenize.NEWLINE:
+                        self.check_logical()
+                        self.blank_before = 0
+                    elif len(self.tokens) == 1:
+                        # The physical line contains only this token.
+                        self.blank_lines += 1
+                        del self.tokens[0]
+                    else:
+                        self.check_logical()
+                elif COMMENT_WITH_NL and token_type == tokenize.COMMENT:
+                    if len(self.tokens) == 1:
+                        # The comment also ends a physical line
+                        token = list(token)
+                        token[1] = text.rstrip('\r\n')
+                        token[3] = (token[2][0], token[2][1] + len(token[1]))
+                        self.tokens = [tuple(token)]
+                        self.check_logical()
+        if self.tokens:
+            self.check_physical(self.lines[-1])
+            self.check_logical()
+        return self.report.get_file_results()
+
+
+class BaseReport(object):
+    """Collect the results of the checks."""
+
+    print_filename = False
+
+    def __init__(self, options):
+        self._benchmark_keys = options.benchmark_keys
+        self._ignore_code = options.ignore_code
+        # Results
+        self.elapsed = 0
+        self.total_errors = 0
+        self.counters = dict.fromkeys(self._benchmark_keys, 0)
+        self.messages = {}
+
+    def start(self):
+        """Start the timer."""
+        self._start_time = time.time()
+
+    def stop(self):
+        """Stop the timer."""
+        self.elapsed = time.time() - self._start_time
+
+    def init_file(self, filename, lines, expected, line_offset):
+        """Signal a new file."""
+        self.filename = filename
+        self.lines = lines
+        self.expected = expected or ()
+        self.line_offset = line_offset
+        self.file_errors = 0
+        self.counters['files'] += 1
+        self.counters['physical lines'] += len(lines)
+
+    def increment_logical_line(self):
+        """Signal a new logical line."""
+        self.counters['logical lines'] += 1
+
+    def error(self, line_number, offset, text, check):
+        """Report an error, according to options."""
+        code = text[:4]
+        if self._ignore_code(code):
+            return
+        if code in self.counters:
+            self.counters[code] += 1
+        else:
+            self.counters[code] = 1
+            self.messages[code] = text[5:]
+        # Don't care about expected errors or warnings
+        if code in self.expected:
+            return
+        if self.print_filename and not self.file_errors:
+            print(self.filename)
+        self.file_errors += 1
+        self.total_errors += 1
+        return code
+
+    def get_file_results(self):
+        """Return the count of errors and warnings for this file."""
+        return self.file_errors
+
+    def get_count(self, prefix=''):
+        """Return the total count of errors and warnings."""
+        return sum([self.counters[key]
+                    for key in self.messages if key.startswith(prefix)])
+
+    def get_statistics(self, prefix=''):
+        """Get statistics for message codes that start with the prefix.
+
+        prefix='' matches all errors and warnings
+        prefix='E' matches all errors
+        prefix='W' matches all warnings
+        prefix='E4' matches all errors that have to do with imports
+        """
+        return ['%-7s %s %s' % (self.counters[key], key, self.messages[key])
+                for key in sorted(self.messages) if key.startswith(prefix)]
+
+    def print_statistics(self, prefix=''):
+        """Print overall statistics (number of errors and warnings)."""
+        for line in self.get_statistics(prefix):
+            print(line)
+
+    def print_benchmark(self):
+        """Print benchmark numbers."""
+        print('%-7.2f %s' % (self.elapsed, 'seconds elapsed'))
+        if self.elapsed:
+            for key in self._benchmark_keys:
+                print('%-7d %s per second (%d total)' %
+                      (self.counters[key] / self.elapsed, key,
+                       self.counters[key]))
+
+
+class FileReport(BaseReport):
+    """Collect the results of the checks and print only the filenames."""
+    print_filename = True
+
+
+class StandardReport(BaseReport):
+    """Collect and print the results of the checks."""
+
+    def __init__(self, options):
+        super(StandardReport, self).__init__(options)
+        self._fmt = REPORT_FORMAT.get(options.format.lower(),
+                                      options.format)
+        self._repeat = options.repeat
+        self._show_source = options.show_source
+        self._show_pep8 = options.show_pep8
+
+    def init_file(self, filename, lines, expected, line_offset):
+        """Signal a new file."""
+        self._deferred_print = []
+        return super(StandardReport, self).init_file(
+            filename, lines, expected, line_offset)
+
+    def error(self, line_number, offset, text, check):
+        """Report an error, according to options."""
+        code = super(StandardReport, self).error(line_number, offset,
+                                                 text, check)
+        if code and (self.counters[code] == 1 or self._repeat):
+            self._deferred_print.append(
+                (line_number, offset, code, text[5:], check.__doc__))
+        return code
+
+    def get_file_results(self):
+        """Print the result and return the overall count for this file."""
+        self._deferred_print.sort()
+        for line_number, offset, code, text, doc in self._deferred_print:
+            print(self._fmt % {
+                'path': self.filename,
+                'row': self.line_offset + line_number, 'col': offset + 1,
+                'code': code, 'text': text,
+            })
+            if self._show_source:
+                if line_number > len(self.lines):
+                    line = ''
+                else:
+                    line = self.lines[line_number - 1]
+                print(line.rstrip())
+                print(re.sub(r'\S', ' ', line[:offset]) + '^')
+            if self._show_pep8 and doc:
+                print('    ' + doc.strip())
+        return self.file_errors
+
+
+class DiffReport(StandardReport):
+    """Collect and print the results for the changed lines only."""
+
+    def __init__(self, options):
+        super(DiffReport, self).__init__(options)
+        self._selected = options.selected_lines
+
+    def error(self, line_number, offset, text, check):
+        if line_number not in self._selected[self.filename]:
+            return
+        return super(DiffReport, self).error(line_number, offset, text, check)
+
+
+class StyleGuide(object):
+    """Initialize a PEP-8 instance with few options."""
+
+    def __init__(self, *args, **kwargs):
+        # build options from the command line
+        self.checker_class = kwargs.pop('checker_class', Checker)
+        parse_argv = kwargs.pop('parse_argv', False)
+        config_file = kwargs.pop('config_file', None)
+        parser = kwargs.pop('parser', None)
+        # build options from dict
+        options_dict = dict(*args, **kwargs)
+        arglist = None if parse_argv else options_dict.get('paths', None)
+        options, self.paths = process_options(
+            arglist, parse_argv, config_file, parser)
+        if options_dict:
+            options.__dict__.update(options_dict)
+            if 'paths' in options_dict:
+                self.paths = options_dict['paths']
+
+        self.runner = self.input_file
+        self.options = options
+
+        if not options.reporter:
+            options.reporter = BaseReport if options.quiet else StandardReport
+
+        options.select = tuple(options.select or ())
+        if not (options.select or options.ignore or
+                options.testsuite or options.doctest) and DEFAULT_IGNORE:
+            # The default choice: ignore controversial checks
+            options.ignore = tuple(DEFAULT_IGNORE.split(','))
+        else:
+            # Ignore all checks which are not explicitly selected
+            options.ignore = ('',) if options.select else tuple(options.ignore)
+        options.benchmark_keys = BENCHMARK_KEYS[:]
+        options.ignore_code = self.ignore_code
+        options.physical_checks = self.get_checks('physical_line')
+        options.logical_checks = self.get_checks('logical_line')
+        options.ast_checks = self.get_checks('tree')
+        self.init_report()
+
+    def init_report(self, reporter=None):
+        """Initialize the report instance."""
+        self.options.report = (reporter or self.options.reporter)(self.options)
+        return self.options.report
+
+    def check_files(self, paths=None):
+        """Run all checks on the paths."""
+        if paths is None:
+            paths = self.paths
+        report = self.options.report
+        runner = self.runner
+        report.start()
+        try:
+            for path in paths:
+                if os.path.isdir(path):
+                    self.input_dir(path)
+                elif not self.excluded(path):
+                    runner(path)
+        except KeyboardInterrupt:
+            print('... stopped')
+        report.stop()
+        return report
+
+    def input_file(self, filename, lines=None, expected=None, line_offset=0):
+        """Run all checks on a Python source file."""
+        if self.options.verbose:
+            print('checking %s' % filename)
+        fchecker = self.checker_class(
+            filename, lines=lines, options=self.options)
+        return fchecker.check_all(expected=expected, line_offset=line_offset)
+
+    def input_dir(self, dirname):
+        """Check all files in this directory and all subdirectories."""
+        dirname = dirname.rstrip('/')
+        if self.excluded(dirname):
+            return 0
+        counters = self.options.report.counters
+        verbose = self.options.verbose
+        filepatterns = self.options.filename
+        runner = self.runner
+        for root, dirs, files in os.walk(dirname):
+            if verbose:
+                print('directory ' + root)
+            counters['directories'] += 1
+            for subdir in sorted(dirs):
+                if self.excluded(subdir, root):
+                    dirs.remove(subdir)
+            for filename in sorted(files):
+                # contain a pattern that matches?
+                if ((filename_match(filename, filepatterns) and
+                     not self.excluded(filename, root))):
+                    runner(os.path.join(root, filename))
+
+    def excluded(self, filename, parent=None):
+        """Check if the file should be excluded.
+
+        Check if 'options.exclude' contains a pattern that matches filename.
+        """
+        if not self.options.exclude:
+            return False
+        basename = os.path.basename(filename)
+        if filename_match(basename, self.options.exclude):
+            return True
+        if parent:
+            filename = os.path.join(parent, filename)
+        filename = os.path.abspath(filename)
+        return filename_match(filename, self.options.exclude)
+
+    def ignore_code(self, code):
+        """Check if the error code should be ignored.
+
+        If 'options.select' contains a prefix of the error code,
+        return False.  Else, if 'options.ignore' contains a prefix of
+        the error code, return True.
+        """
+        if len(code) < 4 and any(s.startswith(code)
+                                 for s in self.options.select):
+            return False
+        return (code.startswith(self.options.ignore) and
+                not code.startswith(self.options.select))
+
+    def get_checks(self, argument_name):
+        """Get all the checks for this category.
+
+        Find all globally visible functions where the first argument name
+        starts with argument_name and which contain selected tests.
+        """
+        checks = []
+        for check, attrs in _checks[argument_name].items():
+            (codes, args) = attrs
+            if any(not (code and self.ignore_code(code)) for code in codes):
+                checks.append((check.__name__, check, args))
+        return sorted(checks)
+
+
+def get_parser(prog='pep8', version=__version__):
+    parser = OptionParser(prog=prog, version=version,
+                          usage="%prog [options] input ...")
+    parser.config_options = [
+        'exclude', 'filename', 'select', 'ignore', 'max-line-length',
+        'hang-closing', 'count', 'format', 'quiet', 'show-pep8',
+        'show-source', 'statistics', 'verbose']
+    parser.add_option('-v', '--verbose', default=0, action='count',
+                      help="print status messages, or debug with -vv")
+    parser.add_option('-q', '--quiet', default=0, action='count',
+                      help="report only file names, or nothing with -qq")
+    parser.add_option('-r', '--repeat', default=True, action='store_true',
+                      help="(obsolete) show all occurrences of the same error")
+    parser.add_option('--first', action='store_false', dest='repeat',
+                      help="show first occurrence of each error")
+    parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE,
+                      help="exclude files or directories which match these "
+                           "comma separated patterns (default: %default)")
+    parser.add_option('--filename', metavar='patterns', default='*.py',
+                      help="when parsing directories, only check filenames "
+                           "matching these comma separated patterns "
+                           "(default: %default)")
+    parser.add_option('--select', metavar='errors', default='',
+                      help="select errors and warnings (e.g. E,W6)")
+    parser.add_option('--ignore', metavar='errors', default='',
+                      help="skip errors and warnings (e.g. E4,W)")
+    parser.add_option('--show-source', action='store_true',
+                      help="show source code for each error")
+    parser.add_option('--show-pep8', action='store_true',
+                      help="show text of PEP 8 for each error "
+                           "(implies --first)")
+    parser.add_option('--statistics', action='store_true',
+                      help="count errors and warnings")
+    parser.add_option('--count', action='store_true',
+                      help="print total number of errors and warnings "
+                           "to standard error and set exit code to 1 if "
+                           "total is not null")
+    parser.add_option('--max-line-length', type='int', metavar='n',
+                      default=MAX_LINE_LENGTH,
+                      help="set maximum allowed line length "
+                           "(default: %default)")
+    parser.add_option('--hang-closing', action='store_true',
+                      help="hang closing bracket instead of matching "
+                           "indentation of opening bracket's line")
+    parser.add_option('--format', metavar='format', default='default',
+                      help="set the error format [default|pylint|<custom>]")
+    parser.add_option('--diff', action='store_true',
+                      help="report only lines changed according to the "
+                           "unified diff received on STDIN")
+    group = parser.add_option_group("Testing Options")
+    if os.path.exists(TESTSUITE_PATH):
+        group.add_option('--testsuite', metavar='dir',
+                         help="run regression tests from dir")
+        group.add_option('--doctest', action='store_true',
+                         help="run doctest on myself")
+    group.add_option('--benchmark', action='store_true',
+                     help="measure processing speed")
+    return parser
+
+
+def read_config(options, args, arglist, parser):
+    """Read both user configuration and local configuration."""
+    config = RawConfigParser()
+
+    user_conf = options.config
+    if user_conf and os.path.isfile(user_conf):
+        if options.verbose:
+            print('user configuration: %s' % user_conf)
+        config.read(user_conf)
+
+    local_dir = os.curdir
+    parent = tail = args and os.path.abspath(os.path.commonprefix(args))
+    while tail:
+        if config.read([os.path.join(parent, fn) for fn in PROJECT_CONFIG]):
+            local_dir = parent
+            if options.verbose:
+                print('local configuration: in %s' % parent)
+            break
+        (parent, tail) = os.path.split(parent)
+
+    pep8_section = parser.prog
+    if config.has_section(pep8_section):
+        option_list = dict([(o.dest, o.type or o.action)
+                            for o in parser.option_list])
+
+        # First, read the default values
+        (new_options, __) = parser.parse_args([])
+
+        # Second, parse the configuration
+        for opt in config.options(pep8_section):
+            if opt.replace('_', '-') not in parser.config_options:
+                print("  unknown option '%s' ignored" % opt)
+                continue
+            if options.verbose > 1:
+                print("  %s = %s" % (opt, config.get(pep8_section, opt)))
+            normalized_opt = opt.replace('-', '_')
+            opt_type = option_list[normalized_opt]
+            if opt_type in ('int', 'count'):
+                value = config.getint(pep8_section, opt)
+            elif opt_type == 'string':
+                value = config.get(pep8_section, opt)
+                if normalized_opt == 'exclude':
+                    value = normalize_paths(value, local_dir)
+            else:
+                assert opt_type in ('store_true', 'store_false')
+                value = config.getboolean(pep8_section, opt)
+            setattr(new_options, normalized_opt, value)
+
+        # Third, overwrite with the command-line options
+        (options, __) = parser.parse_args(arglist, values=new_options)
+    options.doctest = options.testsuite = False
+    return options
+
+
+def process_options(arglist=None, parse_argv=False, config_file=None,
+                    parser=None):
+    """Process options passed either via arglist or via command line args."""
+    if not parser:
+        parser = get_parser()
+    if not parser.has_option('--config'):
+        if config_file is True:
+            config_file = DEFAULT_CONFIG
+        group = parser.add_option_group("Configuration", description=(
+            "The project options are read from the [%s] section of the "
+            "tox.ini file or the setup.cfg file located in any parent folder "
+            "of the path(s) being processed.  Allowed options are: %s." %
+            (parser.prog, ', '.join(parser.config_options))))
+        group.add_option('--config', metavar='path', default=config_file,
+                         help="user config file location (default: %default)")
+    # Don't read the command line if the module is used as a library.
+    if not arglist and not parse_argv:
+        arglist = []
+    # If parse_argv is True and arglist is None, arguments are
+    # parsed from the command line (sys.argv)
+    (options, args) = parser.parse_args(arglist)
+    options.reporter = None
+
+    if options.ensure_value('testsuite', False):
+        args.append(options.testsuite)
+    elif not options.ensure_value('doctest', False):
+        if parse_argv and not args:
+            if options.diff or any(os.path.exists(name)
+                                   for name in PROJECT_CONFIG):
+                args = ['.']
+            else:
+                parser.error('input not specified')
+        options = read_config(options, args, arglist, parser)
+        options.reporter = parse_argv and options.quiet == 1 and FileReport
+
+    options.filename = options.filename and options.filename.split(',')
+    options.exclude = normalize_paths(options.exclude)
+    options.select = options.select and options.select.split(',')
+    options.ignore = options.ignore and options.ignore.split(',')
+
+    if options.diff:
+        options.reporter = DiffReport
+        stdin = stdin_get_value()
+        options.selected_lines = parse_udiff(stdin, options.filename, args[0])
+        args = sorted(options.selected_lines)
+
+    return options, args
+
+
+def _main():
+    """Parse options and run checks on Python source."""
+    import signal
+
+    # Handle "Broken pipe" gracefully
+    try:
+        signal.signal(signal.SIGPIPE, lambda signum, frame: sys.exit(1))
+    except AttributeError:
+        pass    # not supported on Windows
+
+    pep8style = StyleGuide(parse_argv=True, config_file=True)
+    options = pep8style.options
+    if options.doctest or options.testsuite:
+        from testsuite.support import run_tests
+        report = run_tests(pep8style)
+    else:
+        report = pep8style.check_files()
+    if options.statistics:
+        report.print_statistics()
+    if options.benchmark:
+        report.print_benchmark()
+    if options.testsuite and not options.quiet:
+        report.print_results()
+    if report.total_errors:
+        if options.count:
+            sys.stderr.write(str(report.total_errors) + '\n')
+        sys.exit(1)
+
+if __name__ == '__main__':
+    _main()
\ No newline at end of file
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/__init__.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/__init__.py
new file mode 100644
index 0000000..3b927b4
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/__init__.py
@@ -0,0 +1,518 @@
+#@PydevCodeAnalysisIgnore
+"""create and manipulate C data types in Python"""
+
+import os as _os, sys as _sys
+from itertools import chain as _chain
+
+# special developer support to use ctypes from the CVS sandbox,
+# without installing it
+# XXX Remove this for the python core version
+_magicfile = _os.path.join(_os.path.dirname(__file__), ".CTYPES_DEVEL")
+if _os.path.isfile(_magicfile):
+    execfile(_magicfile)
+del _magicfile
+
+__version__ = "0.9.9.6"
+
+from _ctypes import Union, Structure, Array
+from _ctypes import _Pointer
+from _ctypes import CFuncPtr as _CFuncPtr
+from _ctypes import __version__ as _ctypes_version
+from _ctypes import RTLD_LOCAL, RTLD_GLOBAL
+from _ctypes import ArgumentError
+
+from struct import calcsize as _calcsize
+
+if __version__ != _ctypes_version:
+    raise Exception, ("Version number mismatch", __version__, _ctypes_version)
+
+if _os.name in ("nt", "ce"):
+    from _ctypes import FormatError
+
+from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL, \
+     FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI
+
+"""
+WINOLEAPI -> HRESULT
+WINOLEAPI_(type)
+
+STDMETHODCALLTYPE
+
+STDMETHOD(name)
+STDMETHOD_(type, name)
+
+STDAPICALLTYPE
+"""
+
+def create_string_buffer(init, size=None):
+    """create_string_buffer(aString) -> character array
+    create_string_buffer(anInteger) -> character array
+    create_string_buffer(aString, anInteger) -> character array
+    """
+    if isinstance(init, (str, unicode)):
+        if size is None:
+            size = len(init) + 1
+        buftype = c_char * size
+        buf = buftype()
+        buf.value = init
+        return buf
+    elif isinstance(init, (int, long)):
+        buftype = c_char * init
+        buf = buftype()
+        return buf
+    raise TypeError, init
+
+def c_buffer(init, size=None):
+##    "deprecated, use create_string_buffer instead"
+##    import warnings
+##    warnings.warn("c_buffer is deprecated, use create_string_buffer instead",
+##                  DeprecationWarning, stacklevel=2)
+    return create_string_buffer(init, size)
+
+_c_functype_cache = {}
+def CFUNCTYPE(restype, *argtypes):
+    """CFUNCTYPE(restype, *argtypes) -> function prototype.
+
+    restype: the result type
+    argtypes: a sequence specifying the argument types
+
+    The function prototype can be called in three ways to create a
+    callable object:
+
+    prototype(integer address) -> foreign function
+    prototype(callable) -> create and return a C callable function from callable
+    prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method
+    prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal
+    prototype((function name, dll object)[, paramflags]) -> foreign function exported by name
+    """
+    try:
+        return _c_functype_cache[(restype, argtypes)]
+    except KeyError:
+        class CFunctionType(_CFuncPtr):
+            _argtypes_ = argtypes
+            _restype_ = restype
+            _flags_ = _FUNCFLAG_CDECL
+        _c_functype_cache[(restype, argtypes)] = CFunctionType
+        return CFunctionType
+
+if _os.name in ("nt", "ce"):
+    from _ctypes import LoadLibrary as _dlopen
+    from _ctypes import FUNCFLAG_STDCALL as _FUNCFLAG_STDCALL
+    if _os.name == "ce":
+        # 'ce' doesn't have the stdcall calling convention
+        _FUNCFLAG_STDCALL = _FUNCFLAG_CDECL
+
+    _win_functype_cache = {}
+    def WINFUNCTYPE(restype, *argtypes):
+        # docstring set later (very similar to CFUNCTYPE.__doc__)
+        try:
+            return _win_functype_cache[(restype, argtypes)]
+        except KeyError:
+            class WinFunctionType(_CFuncPtr):
+                _argtypes_ = argtypes
+                _restype_ = restype
+                _flags_ = _FUNCFLAG_STDCALL
+            _win_functype_cache[(restype, argtypes)] = WinFunctionType
+            return WinFunctionType
+    if WINFUNCTYPE.__doc__:
+        WINFUNCTYPE.__doc__ = CFUNCTYPE.__doc__.replace("CFUNCTYPE", "WINFUNCTYPE")
+
+elif _os.name == "posix":
+    from _ctypes import dlopen as _dlopen #@UnresolvedImport
+
+from _ctypes import sizeof, byref, addressof, alignment
+from _ctypes import _SimpleCData
+
+class py_object(_SimpleCData):
+    _type_ = "O"
+
+class c_short(_SimpleCData):
+    _type_ = "h"
+
+class c_ushort(_SimpleCData):
+    _type_ = "H"
+
+class c_long(_SimpleCData):
+    _type_ = "l"
+
+class c_ulong(_SimpleCData):
+    _type_ = "L"
+
+if _calcsize("i") == _calcsize("l"):
+    # if int and long have the same size, make c_int an alias for c_long
+    c_int = c_long
+    c_uint = c_ulong
+else:
+    class c_int(_SimpleCData):
+        _type_ = "i"
+
+    class c_uint(_SimpleCData):
+        _type_ = "I"
+
+class c_float(_SimpleCData):
+    _type_ = "f"
+
+class c_double(_SimpleCData):
+    _type_ = "d"
+
+if _calcsize("l") == _calcsize("q"):
+    # if long and long long have the same size, make c_longlong an alias for c_long
+    c_longlong = c_long
+    c_ulonglong = c_ulong
+else:
+    class c_longlong(_SimpleCData):
+        _type_ = "q"
+
+    class c_ulonglong(_SimpleCData):
+        _type_ = "Q"
+    ##    def from_param(cls, val):
+    ##        return ('d', float(val), val)
+    ##    from_param = classmethod(from_param)
+
+class c_ubyte(_SimpleCData):
+    _type_ = "B"
+c_ubyte.__ctype_le__ = c_ubyte.__ctype_be__ = c_ubyte
+# backward compatibility:
+##c_uchar = c_ubyte
+
+class c_byte(_SimpleCData):
+    _type_ = "b"
+c_byte.__ctype_le__ = c_byte.__ctype_be__ = c_byte
+
+class c_char(_SimpleCData):
+    _type_ = "c"
+c_char.__ctype_le__ = c_char.__ctype_be__ = c_char
+
+class c_char_p(_SimpleCData):
+    _type_ = "z"
+
+class c_void_p(_SimpleCData):
+    _type_ = "P"
+c_voidp = c_void_p # backwards compatibility (to a bug)
+
+# This cache maps types to pointers to them.
+_pointer_type_cache = {}
+
+def POINTER(cls):
+    try:
+        return _pointer_type_cache[cls]
+    except KeyError:
+        pass
+    if type(cls) is str:
+        klass = type(_Pointer)("LP_%s" % cls,
+                               (_Pointer,),
+                               {})
+        _pointer_type_cache[id(klass)] = klass
+        return klass
+    else:
+        name = "LP_%s" % cls.__name__
+        klass = type(_Pointer)(name,
+                               (_Pointer,),
+                               {'_type_': cls})
+        _pointer_type_cache[cls] = klass
+    return klass
+
+try:
+    from _ctypes import set_conversion_mode
+except ImportError:
+    pass
+else:
+    if _os.name in ("nt", "ce"):
+        set_conversion_mode("mbcs", "ignore")
+    else:
+        set_conversion_mode("ascii", "strict")
+
+    class c_wchar_p(_SimpleCData):
+        _type_ = "Z"
+
+    class c_wchar(_SimpleCData):
+        _type_ = "u"
+
+    POINTER(c_wchar).from_param = c_wchar_p.from_param #_SimpleCData.c_wchar_p_from_param
+
+    def create_unicode_buffer(init, size=None):
+        """create_unicode_buffer(aString) -> character array
+        create_unicode_buffer(anInteger) -> character array
+        create_unicode_buffer(aString, anInteger) -> character array
+        """
+        if isinstance(init, (str, unicode)):
+            if size is None:
+                size = len(init) + 1
+            buftype = c_wchar * size
+            buf = buftype()
+            buf.value = init
+            return buf
+        elif isinstance(init, (int, long)):
+            buftype = c_wchar * init
+            buf = buftype()
+            return buf
+        raise TypeError, init
+
+POINTER(c_char).from_param = c_char_p.from_param #_SimpleCData.c_char_p_from_param
+
+# XXX Deprecated
+def SetPointerType(pointer, cls):
+    if _pointer_type_cache.get(cls, None) is not None:
+        raise RuntimeError, \
+              "This type already exists in the cache"
+    if not _pointer_type_cache.has_key(id(pointer)):
+        raise RuntimeError, \
+              "What's this???"
+    pointer.set_type(cls)
+    _pointer_type_cache[cls] = pointer
+    del _pointer_type_cache[id(pointer)]
+
+
+def pointer(inst):
+    return POINTER(type(inst))(inst)
+
+# XXX Deprecated
+def ARRAY(typ, len):
+    return typ * len
+
+################################################################
+
+
+class CDLL(object):
+    """An instance of this class represents a loaded dll/shared
+    library, exporting functions using the standard C calling
+    convention (named 'cdecl' on Windows).
+
+    The exported functions can be accessed as attributes, or by
+    indexing with the function name.  Examples:
+
+    <obj>.qsort -> callable object
+    <obj>['qsort'] -> callable object
+
+    Calling the functions releases the Python GIL during the call and
+    reaquires it afterwards.
+    """
+    class _FuncPtr(_CFuncPtr):
+        _flags_ = _FUNCFLAG_CDECL
+        _restype_ = c_int # default, can be overridden in instances
+
+    def __init__(self, name, mode=RTLD_LOCAL, handle=None):
+        self._name = name
+        if handle is None:
+            self._handle = _dlopen(self._name, mode)
+        else:
+            self._handle = handle
+
+    def __repr__(self):
+        return "<%s '%s', handle %x at %x>" % \
+               (self.__class__.__name__, self._name,
+                (self._handle & (_sys.maxint * 2 + 1)),
+                id(self))
+
+    def __getattr__(self, name):
+        if name.startswith('__') and name.endswith('__'):
+            raise AttributeError, name
+        return self.__getitem__(name)
+
+    def __getitem__(self, name_or_ordinal):
+        func = self._FuncPtr((name_or_ordinal, self))
+        if not isinstance(name_or_ordinal, (int, long)):
+            func.__name__ = name_or_ordinal
+            setattr(self, name_or_ordinal, func)
+        return func
+
+class PyDLL(CDLL):
+    """This class represents the Python library itself.  It allows to
+    access Python API functions.  The GIL is not released, and
+    Python exceptions are handled correctly.
+    """
+    class _FuncPtr(_CFuncPtr):
+        _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
+        _restype_ = c_int # default, can be overridden in instances
+
+if _os.name in ("nt", "ce"):
+
+    class WinDLL(CDLL):
+        """This class represents a dll exporting functions using the
+        Windows stdcall calling convention.
+        """
+        class _FuncPtr(_CFuncPtr):
+            _flags_ = _FUNCFLAG_STDCALL
+            _restype_ = c_int # default, can be overridden in instances
+
+    # XXX Hm, what about HRESULT as normal parameter?
+    # Mustn't it derive from c_long then?
+    from _ctypes import _check_HRESULT, _SimpleCData
+    class HRESULT(_SimpleCData):
+        _type_ = "l"
+        # _check_retval_ is called with the function's result when it
+        # is used as restype.  It checks for the FAILED bit, and
+        # raises a WindowsError if it is set.
+        #
+        # The _check_retval_ method is implemented in C, so that the
+        # method definition itself is not included in the traceback
+        # when it raises an error - that is what we want (and Python
+        # doesn't have a way to raise an exception in the caller's
+        # frame).
+        _check_retval_ = _check_HRESULT
+
+    class OleDLL(CDLL):
+        """This class represents a dll exporting functions using the
+        Windows stdcall calling convention, and returning HRESULT.
+        HRESULT error values are automatically raised as WindowsError
+        exceptions.
+        """
+        class _FuncPtr(_CFuncPtr):
+            _flags_ = _FUNCFLAG_STDCALL
+            _restype_ = HRESULT
+
+class LibraryLoader(object):
+    def __init__(self, dlltype):
+        self._dlltype = dlltype
+
+    def __getattr__(self, name):
+        if name[0] == '_':
+            raise AttributeError(name)
+        dll = self._dlltype(name)
+        setattr(self, name, dll)
+        return dll
+
+    def __getitem__(self, name):
+        return getattr(self, name)
+
+    def LoadLibrary(self, name):
+        return self._dlltype(name)
+
+cdll = LibraryLoader(CDLL)
+pydll = LibraryLoader(PyDLL)
+
+if _os.name in ("nt", "ce"):
+    pythonapi = PyDLL("python dll", None, _sys.dllhandle)
+elif _sys.platform == "cygwin":
+    pythonapi = PyDLL("libpython%d.%d.dll" % _sys.version_info[:2])
+else:
+    pythonapi = PyDLL(None)
+
+
+if _os.name in ("nt", "ce"):
+    windll = LibraryLoader(WinDLL)
+    oledll = LibraryLoader(OleDLL)
+
+    if _os.name == "nt":
+        GetLastError = windll.kernel32.GetLastError
+    else:
+        GetLastError = windll.coredll.GetLastError
+
+    def WinError(code=None, descr=None):
+        if code is None:
+            code = GetLastError()
+        if descr is None:
+            descr = FormatError(code).strip()
+        return WindowsError(code, descr)
+
+_pointer_type_cache[None] = c_void_p
+
+if sizeof(c_uint) == sizeof(c_void_p):
+    c_size_t = c_uint
+elif sizeof(c_ulong) == sizeof(c_void_p):
+    c_size_t = c_ulong
+
+# functions
+
+from _ctypes import _memmove_addr, _memset_addr, _string_at_addr, _cast_addr
+
+## void *memmove(void *, const void *, size_t);
+memmove = CFUNCTYPE(c_void_p, c_void_p, c_void_p, c_size_t)(_memmove_addr)
+
+## void *memset(void *, int, size_t)
+memset = CFUNCTYPE(c_void_p, c_void_p, c_int, c_size_t)(_memset_addr)
+
+def PYFUNCTYPE(restype, *argtypes):
+    class CFunctionType(_CFuncPtr):
+        _argtypes_ = argtypes
+        _restype_ = restype
+        _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
+    return CFunctionType
+_cast = PYFUNCTYPE(py_object, c_void_p, py_object)(_cast_addr)
+
+def cast(obj, typ):
+    result = _cast(obj, typ)
+    result.__keepref = obj
+    return result
+
+_string_at = CFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr)
+def string_at(ptr, size=0):
+    """string_at(addr[, size]) -> string
+
+    Return the string at addr."""
+    return _string_at(ptr, size)
+
+try:
+    from _ctypes import _wstring_at_addr
+except ImportError:
+    pass
+else:
+    _wstring_at = CFUNCTYPE(py_object, c_void_p, c_int)(_wstring_at_addr)
+    def wstring_at(ptr, size=0):
+        """wstring_at(addr[, size]) -> string
+
+        Return the string at addr."""
+        return _wstring_at(ptr, size)
+
+
+if _os.name == "nt": # COM stuff
+    def DllGetClassObject(rclsid, riid, ppv):
+        # First ask ctypes.com.server than comtypes.server for the
+        # class object.
+
+        # trick py2exe by doing dynamic imports
+        result = -2147221231 # CLASS_E_CLASSNOTAVAILABLE
+        try:
+            ctcom = __import__("ctypes.com.server", globals(), locals(), ['*'])
+        except ImportError:
+            pass
+        else:
+            result = ctcom.DllGetClassObject(rclsid, riid, ppv)
+
+        if result == -2147221231: # CLASS_E_CLASSNOTAVAILABLE
+            try:
+                ccom = __import__("comtypes.server", globals(), locals(), ['*'])
+            except ImportError:
+                pass
+            else:
+                result = ccom.DllGetClassObject(rclsid, riid, ppv)
+
+        return result
+
+    def DllCanUnloadNow():
+        # First ask ctypes.com.server than comtypes.server if we can unload or not.
+        # trick py2exe by doing dynamic imports
+        result = 0 # S_OK
+        try:
+            ctcom = __import__("ctypes.com.server", globals(), locals(), ['*'])
+        except ImportError:
+            pass
+        else:
+            result = ctcom.DllCanUnloadNow()
+            if result != 0: # != S_OK
+                return result
+
+        try:
+            ccom = __import__("comtypes.server", globals(), locals(), ['*'])
+        except ImportError:
+            return result
+        try:
+            return ccom.DllCanUnloadNow()
+        except AttributeError:
+            pass
+        return result
+
+from ctypes._endian import BigEndianStructure, LittleEndianStructure
+
+# Fill in specifically-sized types
+c_int8 = c_byte
+c_uint8 = c_ubyte
+for kind in [c_short, c_int, c_long, c_longlong]:
+    if sizeof(kind) == 2: c_int16 = kind
+    elif sizeof(kind) == 4: c_int32 = kind
+    elif sizeof(kind) == 8: c_int64 = kind
+for kind in [c_ushort, c_uint, c_ulong, c_ulonglong]:
+    if sizeof(kind) == 2: c_uint16 = kind
+    elif sizeof(kind) == 4: c_uint32 = kind
+    elif sizeof(kind) == 8: c_uint64 = kind
+del(kind)
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/_ctypes.dll b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/_ctypes.dll
new file mode 100644
index 0000000..238e869
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/_ctypes.dll
Binary files differ
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/_endian.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/_endian.py
new file mode 100644
index 0000000..7de0376
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/_endian.py
@@ -0,0 +1,58 @@
+#@PydevCodeAnalysisIgnore
+import sys
+from ctypes import *
+
+_array_type = type(c_int * 3)
+
+def _other_endian(typ):
+    """Return the type with the 'other' byte order.  Simple types like
+    c_int and so on already have __ctype_be__ and __ctype_le__
+    attributes which contain the types, for more complicated types
+    only arrays are supported.
+    """
+    try:
+        return getattr(typ, _OTHER_ENDIAN)
+    except AttributeError:
+        if type(typ) == _array_type:
+            return _other_endian(typ._type_) * typ._length_
+        raise TypeError("This type does not support other endian: %s" % typ)
+
+class _swapped_meta(type(Structure)):
+    def __setattr__(self, attrname, value):
+        if attrname == "_fields_":
+            fields = []
+            for desc in value:
+                name = desc[0]
+                typ = desc[1]
+                rest = desc[2:]
+                fields.append((name, _other_endian(typ)) + rest)
+            value = fields
+        super(_swapped_meta, self).__setattr__(attrname, value)
+
+################################################################
+
+# Note: The Structure metaclass checks for the *presence* (not the
+# value!) of a _swapped_bytes_ attribute to determine the bit order in
+# structures containing bit fields.
+
+if sys.byteorder == "little":
+    _OTHER_ENDIAN = "__ctype_be__"
+
+    LittleEndianStructure = Structure
+
+    class BigEndianStructure(Structure):
+        """Structure with big endian byte order"""
+        __metaclass__ = _swapped_meta
+        _swappedbytes_ = None
+
+elif sys.byteorder == "big":
+    _OTHER_ENDIAN = "__ctype_le__"
+
+    BigEndianStructure = Structure
+    class LittleEndianStructure(Structure):
+        """Structure with little endian byte order"""
+        __metaclass__ = _swapped_meta
+        _swappedbytes_ = None
+
+else:
+    raise RuntimeError("Invalid byteorder")
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/ctypes-README.txt b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/ctypes-README.txt
new file mode 100644
index 0000000..bf8de1e
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/ctypes-README.txt
@@ -0,0 +1,134 @@
+(Note: this is a compiled distribution of ctypes, compiled for cygwin
+ to allow using the cygwin conversions directly from interpreterInfo.  The tests
+ have been removed to reduce the added size.  It is only used by PyDev on cygwin). 
+
+Overview
+
+    ctypes is a ffi (Foreign Function Interface) package for Python.
+
+    It allows to call functions exposed from dlls/shared libraries and
+    has extensive facilities to create, access and manipulate simpole
+    and complicated C data types transparently from Python - in other
+    words: wrap libraries in pure Python.
+
+    ctypes runs on Windows, MacOS X, Linux, Solaris, FreeBSD.  It may
+    also run on other systems, provided that libffi supports this
+    platform.
+
+    On Windows, ctypes contains (the beginning of) a COM framework
+    mainly targetted to use and implement custom COM interfaces.
+
+
+News
+
+    ctypes now uses the same code base and libffi on all platforms.
+    For easier installation, the libffi sources are now included in
+    the source distribution - no need to find, build, and install a
+    compatible libffi version.
+
+
+Requirements
+
+    ctypes 0.9 requires Python 2.3 or higher, since it makes intensive
+    use of the new type system.
+
+    ctypes uses libffi, which is copyright Red Hat, Inc.  Complete
+    license see below.
+
+
+Installation
+
+    Windows
+
+        On Windows, it is the easiest to download the executable
+        installer for your Python version and execute this.
+
+    Installation from source
+
+        Separate source distributions are available for windows and
+        non-windows systems.  Please use the .zip file for Windows (it
+        contains the ctypes.com framework), and use the .tar.gz file
+        for non-Windows systems (it contains the complete
+        cross-platform libffi sources).
+
+        To install ctypes from source, unpack the distribution, enter
+        the ctypes-0.9.x source directory, and enter
+
+            python setup.py build
+
+	This will build the Python extension modules.  A C compiler is
+	required. On OS X, the segment attribute live_support must be
+	defined. If your compiler doesn't know about it, upgrade or
+	set the environment variable CCASFLAGS="-Dno_live_support".
+
+	To run the supplied tests, enter
+
+	    python setup.py test
+
+	To install ctypes, enter
+
+            python setup.py install --help
+
+        to see the avaibable options, and finally
+
+	    python setup.py install [options]
+
+
+        For Windows CE, a project file is provided in
+        wince\_ctypes.vcw.  MS embedded Visual C 4.0 is required to
+        build the extension modules.
+
+
+Additional notes
+
+    Current version: 0.9.9.3
+
+    Homepage: http://starship.python.net/crew/theller/ctypes.html
+
+
+ctypes license
+
+  Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Thomas Heller
+
+  Permission is hereby granted, free of charge, to any person
+  obtaining a copy of this software and associated documentation files
+  (the "Software"), to deal in the Software without restriction,
+  including without limitation the rights to use, copy, modify, merge,
+  publish, distribute, sublicense, and/or sell copies of the Software,
+  and to permit persons to whom the Software is furnished to do so,
+  subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be
+  included in all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+  NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+  BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+  ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+  CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+  SOFTWARE.
+
+libffi license
+
+  libffi - Copyright (c) 1996-2003  Red Hat, Inc.
+
+  Permission is hereby granted, free of charge, to any person
+  obtaining a copy of this software and associated documentation files
+  (the ``Software''), to deal in the Software without restriction,
+  including without limitation the rights to use, copy, modify, merge,
+  publish, distribute, sublicense, and/or sell copies of the Software,
+  and to permit persons to whom the Software is furnished to do so,
+  subject to the following conditions:
+
+  The above copyright notice and this permission notice shall be
+  included in all copies or substantial portions of the Software.
+
+  THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND,
+  EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+  MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+  NONINFRINGEMENT.  IN NO EVENT SHALL CYGNUS SOLUTIONS BE LIABLE FOR
+  ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+  CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+  WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/.cvsignore b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/.cvsignore
new file mode 100644
index 0000000..0d20b64
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/.cvsignore
@@ -0,0 +1 @@
+*.pyc
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/__init__.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/__init__.py
new file mode 100644
index 0000000..5621def
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/__init__.py
@@ -0,0 +1,9 @@
+"""
+Enough Mach-O to make your head spin.
+
+See the relevant header files in /usr/include/mach-o
+
+And also Apple's documentation.
+"""
+
+__version__ = '1.0'
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/dyld.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/dyld.py
new file mode 100644
index 0000000..85073aa
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/dyld.py
@@ -0,0 +1,167 @@
+#@PydevCodeAnalysisIgnore
+"""
+dyld emulation
+"""
+
+import os
+from framework import framework_info
+from dylib import dylib_info
+from itertools import *
+
+__all__ = [
+    'dyld_find', 'framework_find',
+    'framework_info', 'dylib_info',
+]
+
+# These are the defaults as per man dyld(1)
+#
+DEFAULT_FRAMEWORK_FALLBACK = [
+    os.path.expanduser("~/Library/Frameworks"),
+    "/Library/Frameworks",
+    "/Network/Library/Frameworks",
+    "/System/Library/Frameworks",
+]
+
+DEFAULT_LIBRARY_FALLBACK = [
+    os.path.expanduser("~/lib"),
+    "/usr/local/lib",
+    "/lib",
+    "/usr/lib",
+]
+
+def ensure_utf8(s):
+    """Not all of PyObjC and Python understand unicode paths very well yet"""
+    if isinstance(s, unicode):
+        return s.encode('utf8')
+    return s
+
+def dyld_env(env, var):
+    if env is None:
+        env = os.environ
+    rval = env.get(var)
+    if rval is None:
+        return []
+    return rval.split(':')
+
+def dyld_image_suffix(env=None):
+    if env is None:
+        env = os.environ
+    return env.get('DYLD_IMAGE_SUFFIX')
+
+def dyld_framework_path(env=None):
+    return dyld_env(env, 'DYLD_FRAMEWORK_PATH')
+
+def dyld_library_path(env=None):
+    return dyld_env(env, 'DYLD_LIBRARY_PATH')
+
+def dyld_fallback_framework_path(env=None):
+    return dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
+
+def dyld_fallback_library_path(env=None):
+    return dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
+
+def dyld_image_suffix_search(iterator, env=None):
+    """For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
+    suffix = dyld_image_suffix(env)
+    if suffix is None:
+        return iterator
+    def _inject(iterator=iterator, suffix=suffix):
+        for path in iterator:
+            if path.endswith('.dylib'):
+                yield path[:-len('.dylib')] + suffix + '.dylib'
+            else:
+                yield path + suffix
+            yield path
+    return _inject()
+
+def dyld_override_search(name, env=None):
+    # If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
+    # framework name, use the first file that exists in the framework
+    # path if any.  If there is none go on to search the DYLD_LIBRARY_PATH
+    # if any.
+
+    framework = framework_info(name)
+
+    if framework is not None:
+        for path in dyld_framework_path(env):
+            yield os.path.join(path, framework['name'])
+
+    # If DYLD_LIBRARY_PATH is set then use the first file that exists
+    # in the path.  If none use the original name.
+    for path in dyld_library_path(env):
+        yield os.path.join(path, os.path.basename(name))
+
+def dyld_executable_path_search(name, executable_path=None):
+    # If we haven't done any searching and found a library and the
+    # dylib_name starts with "@executable_path/" then construct the
+    # library name.
+    if name.startswith('@executable_path/') and executable_path is not None:
+        yield os.path.join(executable_path, name[len('@executable_path/'):])
+
+def dyld_default_search(name, env=None):
+    yield name
+
+    framework = framework_info(name)
+
+    if framework is not None:
+        fallback_framework_path = dyld_fallback_framework_path(env)
+        for path in fallback_framework_path:
+            yield os.path.join(path, framework['name'])
+
+    fallback_library_path = dyld_fallback_library_path(env)
+    for path in fallback_library_path:
+        yield os.path.join(path, os.path.basename(name))
+
+    if framework is not None and not fallback_framework_path:
+        for path in DEFAULT_FRAMEWORK_FALLBACK:
+            yield os.path.join(path, framework['name'])
+
+    if not fallback_library_path:
+        for path in DEFAULT_LIBRARY_FALLBACK:
+            yield os.path.join(path, os.path.basename(name))
+
+def dyld_find(name, executable_path=None, env=None):
+    """
+    Find a library or framework using dyld semantics
+    """
+    name = ensure_utf8(name)
+    executable_path = ensure_utf8(executable_path)
+    for path in dyld_image_suffix_search(chain(
+                dyld_override_search(name, env),
+                dyld_executable_path_search(name, executable_path),
+                dyld_default_search(name, env),
+            ), env):
+        if os.path.isfile(path):
+            return path
+    raise ValueError, "dylib %s could not be found" % (name,)
+
+def framework_find(fn, executable_path=None, env=None):
+    """
+    Find a framework using dyld semantics in a very loose manner.
+
+    Will take input such as:
+        Python
+        Python.framework
+        Python.framework/Versions/Current
+    """
+    try:
+        return dyld_find(fn, executable_path=executable_path, env=env)
+    except ValueError:
+        pass
+    fmwk_index = fn.rfind('.framework')
+    if fmwk_index == -1:
+        fmwk_index = len(fn)
+        fn += '.framework'
+    fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
+    try:
+        return dyld_find(fn, executable_path=executable_path, env=env)
+    except ValueError:
+        raise e
+
+def test_dyld_find():
+    env = {}
+    assert dyld_find('libSystem.dylib') == '/usr/lib/libSystem.dylib'
+    assert dyld_find('System.framework/System') == '/System/Library/Frameworks/System.framework/System'
+
+if __name__ == '__main__':
+    test_dyld_find()
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/dylib.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/dylib.py
new file mode 100644
index 0000000..aa10750
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/dylib.py
@@ -0,0 +1,63 @@
+"""
+Generic dylib path manipulation
+"""
+
+import re
+
+__all__ = ['dylib_info']
+
+DYLIB_RE = re.compile(r"""(?x)
+(?P<location>^.*)(?:^|/)
+(?P<name>
+    (?P<shortname>\w+?)
+    (?:\.(?P<version>[^._]+))?
+    (?:_(?P<suffix>[^._]+))?
+    \.dylib$
+)
+""")
+
+def dylib_info(filename):
+    """
+    A dylib name can take one of the following four forms:
+        Location/Name.SomeVersion_Suffix.dylib
+        Location/Name.SomeVersion.dylib
+        Location/Name_Suffix.dylib
+        Location/Name.dylib
+
+    returns None if not found or a mapping equivalent to:
+        dict(
+            location='Location',
+            name='Name.SomeVersion_Suffix.dylib',
+            shortname='Name',
+            version='SomeVersion',
+            suffix='Suffix',
+        )
+
+    Note that SomeVersion and Suffix are optional and may be None
+    if not present.
+    """
+    is_dylib = DYLIB_RE.match(filename)
+    if not is_dylib:
+        return None
+    return is_dylib.groupdict()
+
+
+def test_dylib_info():
+    def d(location=None, name=None, shortname=None, version=None, suffix=None):
+        return dict(
+            location=location,
+            name=name,
+            shortname=shortname,
+            version=version,
+            suffix=suffix
+        )
+    assert dylib_info('completely/invalid') is None
+    assert dylib_info('completely/invalide_debug') is None
+    assert dylib_info('P/Foo.dylib') == d('P', 'Foo.dylib', 'Foo')
+    assert dylib_info('P/Foo_debug.dylib') == d('P', 'Foo_debug.dylib', 'Foo', suffix='debug')
+    assert dylib_info('P/Foo.A.dylib') == d('P', 'Foo.A.dylib', 'Foo', 'A')
+    assert dylib_info('P/Foo_debug.A.dylib') == d('P', 'Foo_debug.A.dylib', 'Foo_debug', 'A')
+    assert dylib_info('P/Foo.A_debug.dylib') == d('P', 'Foo.A_debug.dylib', 'Foo', 'A', 'debug')
+
+if __name__ == '__main__':
+    test_dylib_info()
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/framework.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/framework.py
new file mode 100644
index 0000000..ad6ed55
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/macholib/framework.py
@@ -0,0 +1,65 @@
+"""
+Generic framework path manipulation
+"""
+
+import re
+
+__all__ = ['framework_info']
+
+STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
+(?P<location>^.*)(?:^|/)
+(?P<name>
+    (?P<shortname>\w+).framework/
+    (?:Versions/(?P<version>[^/]+)/)?
+    (?P=shortname)
+    (?:_(?P<suffix>[^_]+))?
+)$
+""")
+
+def framework_info(filename):
+    """
+    A framework name can take one of the following four forms:
+        Location/Name.framework/Versions/SomeVersion/Name_Suffix
+        Location/Name.framework/Versions/SomeVersion/Name
+        Location/Name.framework/Name_Suffix
+        Location/Name.framework/Name
+
+    returns None if not found, or a mapping equivalent to:
+        dict(
+            location='Location',
+            name='Name.framework/Versions/SomeVersion/Name_Suffix',
+            shortname='Name',
+            version='SomeVersion',
+            suffix='Suffix',
+        )
+
+    Note that SomeVersion and Suffix are optional and may be None
+    if not present
+    """
+    is_framework = STRICT_FRAMEWORK_RE.match(filename)
+    if not is_framework:
+        return None
+    return is_framework.groupdict()
+
+def test_framework_info():
+    def d(location=None, name=None, shortname=None, version=None, suffix=None):
+        return dict(
+            location=location,
+            name=name,
+            shortname=shortname,
+            version=version,
+            suffix=suffix
+        )
+    assert framework_info('completely/invalid') is None
+    assert framework_info('completely/invalid/_debug') is None
+    assert framework_info('P/F.framework') is None
+    assert framework_info('P/F.framework/_debug') is None
+    assert framework_info('P/F.framework/F') == d('P', 'F.framework/F', 'F')
+    assert framework_info('P/F.framework/F_debug') == d('P', 'F.framework/F_debug', 'F', suffix='debug')
+    assert framework_info('P/F.framework/Versions') is None
+    assert framework_info('P/F.framework/Versions/A') is None
+    assert framework_info('P/F.framework/Versions/A/F') == d('P', 'F.framework/Versions/A/F', 'F', 'A')
+    assert framework_info('P/F.framework/Versions/A/F_debug') == d('P', 'F.framework/Versions/A/F_debug', 'F', 'A', 'debug')
+
+if __name__ == '__main__':
+    test_framework_info()
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/util.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/util.py
new file mode 100644
index 0000000..6db0cfb
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/util.py
@@ -0,0 +1,124 @@
+#@PydevCodeAnalysisIgnore
+import sys, os
+import ctypes
+
+# find_library(name) returns the pathname of a library, or None.
+if os.name == "nt":
+    def find_library(name):
+        # See MSDN for the REAL search order.
+        for directory in os.environ['PATH'].split(os.pathsep):
+            fname = os.path.join(directory, name)
+            if os.path.exists(fname):
+                return fname
+            if fname.lower().endswith(".dll"):
+                continue
+            fname = fname + ".dll"
+            if os.path.exists(fname):
+                return fname
+        return None
+
+if os.name == "ce":
+    # search path according to MSDN:
+    # - absolute path specified by filename
+    # - The .exe launch directory
+    # - the Windows directory
+    # - ROM dll files (where are they?)
+    # - OEM specified search path: HKLM\Loader\SystemPath
+    def find_library(name):
+        return name
+
+if os.name == "posix" and sys.platform == "darwin":
+    from ctypes.macholib.dyld import dyld_find as _dyld_find
+    def find_library(name):
+        possible = ['lib%s.dylib' % name,
+                    '%s.dylib' % name,
+                    '%s.framework/%s' % (name, name)]
+        for name in possible:
+            try:
+                return _dyld_find(name)
+            except ValueError:
+                continue
+        return None
+
+elif os.name == "posix":
+    # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
+    import re, tempfile
+
+    def _findLib_gcc(name):
+        expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name
+        cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \
+              '$CC -Wl,-t -o /dev/null 2>&1 -l' + name
+        try:
+            fdout, outfile = tempfile.mkstemp()
+            fd = os.popen(cmd)
+            trace = fd.read()
+            err = fd.close()
+        finally:
+            try:
+                os.unlink(outfile)
+            except OSError, e:
+                import errno
+                if e.errno != errno.ENOENT:
+                    raise
+        res = re.search(expr, trace)
+        if not res:
+            return None
+        return res.group(0)
+
+    def _findLib_ld(name):
+        expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name
+        res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read())
+        if not res:
+            # Hm, this works only for libs needed by the python executable.
+            cmd = 'ldd %s 2>/dev/null' % sys.executable
+            res = re.search(expr, os.popen(cmd).read())
+            if not res:
+                return None
+        return res.group(0)
+
+    def _get_soname(f):
+        cmd = "objdump -p -j .dynamic 2>/dev/null " + f
+        res = re.search(r'\sSONAME\s+([^\s]+)', os.popen(cmd).read())
+        if not res:
+            return None
+        return res.group(1)
+
+    def find_library(name):
+        lib = _findLib_ld(name) or _findLib_gcc(name)
+        if not lib:
+            return None
+        return _get_soname(lib)
+
+################################################################
+# test code
+
+def test():
+    from ctypes import cdll
+    if os.name == "nt":
+        sys.stdout.write('%s\n' % (cdll.msvcrt,))
+        sys.stdout.write('%s\n' % (cdll.load("msvcrt"),))
+        sys.stdout.write('%s\n' % (find_library("msvcrt"),))
+
+    if os.name == "posix":
+        # find and load_version
+        sys.stdout.write('%s\n' % (find_library("m"),))
+        sys.stdout.write('%s\n' % (find_library("c"),))
+        sys.stdout.write('%s\n' % (find_library("bz2"),))
+
+        # getattr
+##        print_ cdll.m
+##        print_ cdll.bz2
+
+        # load
+        if sys.platform == "darwin":
+            sys.stdout.write('%s\n' % (cdll.LoadLibrary("libm.dylib"),))
+            sys.stdout.write('%s\n' % (cdll.LoadLibrary("libcrypto.dylib"),))
+            sys.stdout.write('%s\n' % (cdll.LoadLibrary("libSystem.dylib"),))
+            sys.stdout.write('%s\n' % (cdll.LoadLibrary("System.framework/System"),))
+        else:
+            sys.stdout.write('%s\n' % (cdll.LoadLibrary("libm.so"),))
+            sys.stdout.write('%s\n' % (cdll.LoadLibrary("libcrypt.so"),))
+            sys.stdout.write('%s\n' % (find_library("crypt"),))
+
+if __name__ == "__main__":
+    test()
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/wintypes.py b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/wintypes.py
new file mode 100644
index 0000000..d31f11e
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/ctypes/wintypes.py
@@ -0,0 +1,98 @@
+#@PydevCodeAnalysisIgnore
+# XXX This module needs cleanup.
+
+from ctypes import *
+
+DWORD = c_ulong
+WORD = c_ushort
+BYTE = c_byte
+
+ULONG = c_ulong
+LONG = c_long
+
+LARGE_INTEGER = c_longlong
+ULARGE_INTEGER = c_ulonglong
+
+
+HANDLE = c_ulong # in the header files: void *
+
+HWND = HANDLE
+HDC = HANDLE
+HMODULE = HANDLE
+HINSTANCE = HANDLE
+HRGN = HANDLE
+HTASK = HANDLE
+HKEY = HANDLE
+HPEN = HANDLE
+HGDIOBJ = HANDLE
+HMENU = HANDLE
+
+LCID = DWORD
+
+WPARAM = c_uint
+LPARAM = c_long
+
+BOOL = c_long
+VARIANT_BOOL = c_short
+
+LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p
+LPCWSTR = LPWSTR = c_wchar_p
+
+LPCSTR = LPSTR = c_char_p
+
+class RECT(Structure):
+    _fields_ = [("left", c_long),
+                ("top", c_long),
+                ("right", c_long),
+                ("bottom", c_long)]
+RECTL = RECT
+
+class POINT(Structure):
+    _fields_ = [("x", c_long),
+                ("y", c_long)]
+POINTL = POINT
+
+class SIZE(Structure):
+    _fields_ = [("cx", c_long),
+                ("cy", c_long)]
+SIZEL = SIZE
+
+def RGB(red, green, blue):
+    return red + (green << 8) + (blue << 16)
+
+class FILETIME(Structure):
+    _fields_ = [("dwLowDateTime", DWORD),
+                ("dwHighDateTime", DWORD)]
+
+class MSG(Structure):
+    _fields_ = [("hWnd", HWND),
+                ("message", c_uint),
+                ("wParam", WPARAM),
+                ("lParam", LPARAM),
+                ("time", DWORD),
+                ("pt", POINT)]
+MAX_PATH = 260
+
+class WIN32_FIND_DATAA(Structure):
+    _fields_ = [("dwFileAttributes", DWORD),
+                ("ftCreationTime", FILETIME),
+                ("ftLastAccessTime", FILETIME),
+                ("ftLastWriteTime", FILETIME),
+                ("nFileSizeHigh", DWORD),
+                ("nFileSizeLow", DWORD),
+                ("dwReserved0", DWORD),
+                ("dwReserved1", DWORD),
+                ("cFileName", c_char * MAX_PATH),
+                ("cAlternameFileName", c_char * 14)]
+
+class WIN32_FIND_DATAW(Structure):
+    _fields_ = [("dwFileAttributes", DWORD),
+                ("ftCreationTime", FILETIME),
+                ("ftLastAccessTime", FILETIME),
+                ("ftLastWriteTime", FILETIME),
+                ("nFileSizeHigh", DWORD),
+                ("nFileSizeLow", DWORD),
+                ("dwReserved0", DWORD),
+                ("dwReserved1", DWORD),
+                ("cFileName", c_wchar * MAX_PATH),
+                ("cAlternameFileName", c_wchar * 14)]
diff --git a/python/helpers/pydev/third_party/wrapped_for_pydev/not_in_default_pythonpath.txt b/python/helpers/pydev/third_party/wrapped_for_pydev/not_in_default_pythonpath.txt
new file mode 100644
index 0000000..24084e9
--- /dev/null
+++ b/python/helpers/pydev/third_party/wrapped_for_pydev/not_in_default_pythonpath.txt
@@ -0,0 +1 @@
+The wrapped_for_pydev folder is not in the default pythonpath... (no __init__.py file)
\ No newline at end of file
diff --git a/python/helpers/rest_formatter.py b/python/helpers/rest_formatter.py
index e3575f8..a1d176c 100644
--- a/python/helpers/rest_formatter.py
+++ b/python/helpers/rest_formatter.py
@@ -1,4 +1,5 @@
 import sys
+import re
 from docutils.core import publish_string
 from docutils import nodes
 from docutils.nodes import Text
@@ -54,6 +55,53 @@
       self.body.append("</a>")
     HTMLTranslator.depart_field_body(self, node)
 
+  def visit_reference(self, node):
+    atts = {}
+    if 'refuri' in node:
+      atts['href'] = node['refuri']
+      if self.settings.cloak_email_addresses and atts['href'].startswith('mailto:'):
+        atts['href'] = self.cloak_mailto(atts['href'])
+        self.in_mailto = True
+      # atts['class'] += ' external'
+    else:
+      assert 'refid' in node, 'References must have "refuri" or "refid" attribute.'
+      atts['href'] = '#' + node['refid']
+      atts['class'] += ' internal'
+    if not isinstance(node.parent, nodes.TextElement):
+      assert len(node) == 1 and isinstance(node[0], nodes.image)
+      atts['class'] += ' image-reference'
+    self.body.append(self.starttag(node, 'a', '', **atts))
+
+  def starttag(self, node, tagname, suffix='\n', **attributes):
+    attr_dicts = [attributes]
+    if isinstance(node, nodes.Node):
+        attr_dicts.append(node.attributes)
+    if isinstance(node, dict):
+        attr_dicts.append(node)
+    # Munge each attribute dictionary.  Unfortunately, we need to
+    # iterate through attributes one at a time because some
+    # versions of docutils don't case-normalize attributes.
+    for attr_dict in attr_dicts:
+        for (key, val) in attr_dict.items():
+            # Prefix all CSS classes with "rst-"; and prefix all
+            # names with "rst-" to avoid conflicts.
+            if key.lower() in ('class', 'id', 'name'):
+                attr_dict[key] = 'rst-%s' % val
+            elif key.lower() in ('classes', 'ids', 'names'):
+                attr_dict[key] = ['rst-%s' % cls for cls in val]
+            elif key.lower() == 'href':
+                if attr_dict[key][:1]=='#':
+                    attr_dict[key] = '#rst-%s' % attr_dict[key][1:]
+                else:
+                    pass
+    # For headings, use class="heading"
+    if re.match(r'^h\d+$', tagname):
+        attributes['class'] = ' '.join([attributes.get('class',''),
+                                        'heading']).strip()
+
+    return HTMLTranslator.starttag(self, node, tagname, suffix,
+                                       **attributes)
+
 
   def visit_field_list(self, node):
     fields = {}
diff --git a/python/ide/src/com/jetbrains/python/PyIdeCommonOptionsForm.java b/python/ide/src/com/jetbrains/python/PyIdeCommonOptionsForm.java
index 3b59037..79c3552 100644
--- a/python/ide/src/com/jetbrains/python/PyIdeCommonOptionsForm.java
+++ b/python/ide/src/com/jetbrains/python/PyIdeCommonOptionsForm.java
@@ -15,6 +15,7 @@
  */
 package com.jetbrains.python;
 
+import com.intellij.application.options.ModulesComboBox;
 import com.intellij.execution.configuration.EnvironmentVariablesComponent;
 import com.intellij.execution.util.PathMappingsComponent;
 import com.intellij.ide.util.PropertiesComponent;
@@ -26,7 +27,6 @@
 import com.intellij.openapi.projectRoots.SdkModel;
 import com.intellij.openapi.projectRoots.impl.SdkListCellRenderer;
 import com.intellij.openapi.roots.ModuleRootManager;
-import com.intellij.application.options.ModulesComboBox;
 import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel;
 import com.intellij.openapi.ui.ComboBox;
 import com.intellij.openapi.ui.TextFieldWithBrowseButton;
diff --git a/python/ide/src/com/jetbrains/python/configuration/PyContentEntriesModuleConfigurable.java b/python/ide/src/com/jetbrains/python/configuration/PyContentEntriesModuleConfigurable.java
index bcbebc3..ca00208 100644
--- a/python/ide/src/com/jetbrains/python/configuration/PyContentEntriesModuleConfigurable.java
+++ b/python/ide/src/com/jetbrains/python/configuration/PyContentEntriesModuleConfigurable.java
@@ -1,60 +1,29 @@
 package com.jetbrains.python.configuration;
 
 import com.intellij.facet.impl.DefaultFacetsProvider;
-import com.intellij.openapi.Disposable;
-import com.intellij.openapi.actionSystem.AnActionEvent;
-import com.intellij.openapi.actionSystem.CustomShortcutSet;
-import com.intellij.openapi.actionSystem.Presentation;
 import com.intellij.openapi.application.ApplicationManager;
 import com.intellij.openapi.module.Module;
 import com.intellij.openapi.module.impl.ModuleConfigurationStateImpl;
 import com.intellij.openapi.options.Configurable;
 import com.intellij.openapi.options.ConfigurationException;
 import com.intellij.openapi.options.SearchableConfigurable;
-import com.intellij.openapi.project.Project;
-import com.intellij.openapi.roots.ContentEntry;
-import com.intellij.openapi.roots.ContentFolder;
 import com.intellij.openapi.roots.ModifiableRootModel;
 import com.intellij.openapi.roots.ModuleRootManager;
-import com.intellij.openapi.roots.impl.ContentEntryImpl;
-import com.intellij.openapi.roots.impl.ContentFolderBaseImpl;
-import com.intellij.openapi.roots.ui.configuration.*;
-import com.intellij.openapi.roots.ui.configuration.actions.ContentEntryEditingAction;
-import com.intellij.openapi.util.Comparing;
+import com.intellij.openapi.roots.ui.configuration.DefaultModulesProvider;
+import com.intellij.openapi.roots.ui.configuration.FacetsProvider;
 import com.intellij.openapi.util.Computable;
-import com.intellij.openapi.util.Disposer;
-import com.intellij.openapi.vfs.VfsUtilCore;
-import com.intellij.openapi.vfs.VirtualFile;
-import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
-import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;
-import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
-import com.intellij.ui.JBColor;
-import com.intellij.util.EventDispatcher;
-import com.intellij.util.containers.MultiMap;
-import com.jetbrains.python.templateLanguages.TemplatesService;
-import icons.PythonIcons;
+import com.jetbrains.python.module.PyContentEntriesEditor;
 import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.jetbrains.jps.model.java.JavaSourceRootType;
-import org.jetbrains.jps.model.module.JpsModuleSourceRootType;
 
 import javax.swing.*;
-import javax.swing.event.ChangeEvent;
-import javax.swing.event.ChangeListener;
-import javax.swing.tree.TreeCellRenderer;
 import java.awt.*;
-import java.awt.event.InputEvent;
-import java.awt.event.KeyEvent;
-import java.util.ArrayList;
-import java.util.List;
 
 public class PyContentEntriesModuleConfigurable extends SearchableConfigurable.Parent.Abstract {
-  private static final Color TEMPLATES_COLOR = JBColor.MAGENTA;
-
   private final Module myModule;
   private final JPanel myTopPanel = new JPanel(new BorderLayout());
   protected ModifiableRootModel myModifiableModel;
-  protected MyCommonContentEntriesEditor myEditor;
+  protected PyContentEntriesEditor myEditor;
 
   public PyContentEntriesModuleConfigurable(final Module module) {
     myModule = module;
@@ -108,8 +77,8 @@
     myTopPanel.add(component, BorderLayout.CENTER);
   }
 
-  protected MyCommonContentEntriesEditor createEditor(@NotNull Module module, @NotNull ModuleConfigurationStateImpl state) {
-    return new MyCommonContentEntriesEditor(module, state, JavaSourceRootType.SOURCE);
+  protected PyContentEntriesEditor createEditor(@NotNull Module module, @NotNull ModuleConfigurationStateImpl state) {
+    return new PyContentEntriesEditor(module, state, JavaSourceRootType.SOURCE);
   }
 
   @Override
@@ -171,314 +140,4 @@
     return "python.project.structure";
   }
 
-  private static class MyContentEntryTreeEditor extends ContentEntryTreeEditor {
-
-    private final ChangeListener myListener = new ChangeListener() {
-      @Override
-      public void stateChanged(ChangeEvent e) {
-        update();
-      }
-    };
-
-    public MyContentEntryTreeEditor(Project project, List<ModuleSourceRootEditHandler<?>> handlers) {
-      super(project, handlers);
-    }
-
-    @Override
-    public void setContentEntryEditor(ContentEntryEditor newEditor) {
-      MyCommonContentEntriesEditor.MyContentEntryEditor existingEditor = getContentEntryEditor();
-      if (Comparing.equal(existingEditor, newEditor)) {
-        return;
-      }
-      if (existingEditor != null) {
-        existingEditor.removeListener(myListener);
-      }
-      if (newEditor != null) {
-        ((MyCommonContentEntriesEditor.MyContentEntryEditor)newEditor).addListener(myListener);
-      }
-      super.setContentEntryEditor(newEditor);
-    }
-
-    @Override
-    public MyCommonContentEntriesEditor.MyContentEntryEditor getContentEntryEditor() {
-      return (MyCommonContentEntriesEditor.MyContentEntryEditor)super.getContentEntryEditor();
-    }
-
-    @Override
-    protected void createEditingActions() {
-      super.createEditingActions();
-
-      ContentEntryEditingAction a = new ContentEntryEditingAction(myTree) {
-        {
-          final Presentation templatePresentation = getTemplatePresentation();
-          templatePresentation.setText("Templates");
-          templatePresentation.setDescription("Template Folders");
-          templatePresentation.setIcon(PythonIcons.Python.TemplateRoot);
-        }
-
-        @Override
-        public boolean isSelected(AnActionEvent e) {
-          final VirtualFile[] selectedFiles = getSelectedFiles();
-          return selectedFiles.length != 0 && getContentEntryEditor().hasTemplateRoot(selectedFiles[0]);
-        }
-
-        @Override
-        public void setSelected(AnActionEvent e, boolean isSelected) {
-          final VirtualFile[] selectedFiles = getSelectedFiles();
-          assert selectedFiles.length != 0;
-
-          for (VirtualFile selectedFile : selectedFiles) {
-            boolean wasSelected = getContentEntryEditor().hasTemplateRoot(selectedFile);
-            if (isSelected) {
-              if (!wasSelected) {
-                getContentEntryEditor().addTemplateRoot(selectedFile);
-              }
-            }
-            else {
-              if (wasSelected) {
-                getContentEntryEditor().removeTemplateRoot(selectedFile);
-              }
-            }
-          }
-        }
-      };
-      myEditingActionsGroup.add(a);
-      a.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_R, InputEvent.ALT_MASK)), myTree);
-    }
-
-    @Override
-    protected TreeCellRenderer getContentEntryCellRenderer() {
-      return new ContentEntryTreeCellRenderer(this, getEditHandlers()) {
-        @Override
-        protected Icon updateIcon(final ContentEntry entry, final VirtualFile file, final Icon originalIcon) {
-          if (getContentEntryEditor().hasTemplateRoot(file)) {
-            return PythonIcons.Python.TemplateRoot;
-          }
-          return super.updateIcon(entry, file, originalIcon);
-        }
-      };
-    }
-  }
-
-  protected static class MyCommonContentEntriesEditor extends CommonContentEntriesEditor {
-    private final MultiMap<ContentEntry, VirtualFilePointer> myTemplateRoots = new MultiMap<ContentEntry, VirtualFilePointer>();
-    private final Module myModule;
-    private Disposable myFilePointersDisposable;
-
-    private final VirtualFilePointerListener DUMMY_LISTENER = new VirtualFilePointerListener() {
-      @Override
-      public void beforeValidityChanged(@NotNull VirtualFilePointer[] pointers) {
-      }
-
-      @Override
-      public void validityChanged(@NotNull VirtualFilePointer[] pointers) {
-      }
-    };
-
-    public MyCommonContentEntriesEditor(Module module,
-                                        ModuleConfigurationStateImpl moduleConfigurationState,
-                                        JpsModuleSourceRootType<?>... rootTypes) {
-      super(module.getName(), moduleConfigurationState, rootTypes);
-      myModule = module;
-      reset();
-    }
-
-    @Override
-    protected ContentEntryTreeEditor createContentEntryTreeEditor(Project project) {
-      return new MyContentEntryTreeEditor(project, getEditHandlers());
-    }
-
-    @Override
-    protected List<ContentEntry> addContentEntries(VirtualFile[] files) {
-      List<ContentEntry> entries = super.addContentEntries(files);
-      addContentEntryPanels(entries.toArray(new ContentEntry[entries.size()]));
-      return entries;
-    }
-
-    @Override
-    public void reset() {
-      if (myFilePointersDisposable != null) {
-        Disposer.dispose(myFilePointersDisposable);
-      }
-      myTemplateRoots.clear();
-
-      myFilePointersDisposable = Disposer.newDisposable();
-      final TemplatesService instance = TemplatesService.getInstance(myModule);
-      if (instance != null) {
-        final List<VirtualFile> folders = instance.getTemplateFolders();
-        for (VirtualFile folder : folders) {
-          ContentEntry contentEntry = findContentEntryForFile(folder);
-          if (contentEntry != null) {
-            myTemplateRoots.putValue(contentEntry, VirtualFilePointerManager.getInstance().create(folder, myFilePointersDisposable,
-                                                                                                   DUMMY_LISTENER));
-          }
-        }
-      }
-
-      if (myRootTreeEditor != null) {
-        ContentEntryEditor editor = myRootTreeEditor.getContentEntryEditor();
-        if(editor!=null) editor.update();
-        myRootTreeEditor.update();
-      }
-    }
-
-    @Nullable
-    private ContentEntry findContentEntryForFile(VirtualFile virtualFile) {
-      for (ContentEntry contentEntry : getModel().getContentEntries()) {
-        final VirtualFile file = contentEntry.getFile();
-        if (file != null && VfsUtilCore.isAncestor(file, virtualFile, false)) {
-          return contentEntry;
-        }
-      }
-      return null;
-    }
-
-    @Override
-    public void disposeUIResources() {
-      super.disposeUIResources();
-      if (myFilePointersDisposable != null) {
-        Disposer.dispose(myFilePointersDisposable);
-      }
-    }
-
-    @Override
-    public void apply() throws ConfigurationException {
-      super.apply();
-      List<VirtualFile> templateRoots = getCurrentState();
-      TemplatesService.getInstance(myModule).setTemplateFolders(templateRoots.toArray(new VirtualFile[templateRoots.size()]));
-    }
-
-    private List<VirtualFile> getCurrentState() {
-      List<VirtualFile> result = new ArrayList<VirtualFile>();
-      for (ContentEntry entry : myTemplateRoots.keySet()) {
-        for (VirtualFilePointer filePointer : myTemplateRoots.get(entry)) {
-          result.add(filePointer.getFile());
-        }
-      }
-      return result;
-    }
-
-    @Override
-    public boolean isModified() {
-      if (super.isModified()) return true;
-      final TemplatesService templatesService = TemplatesService.getInstance(myModule);
-      if (templatesService != null) {
-        List<VirtualFile> original = templatesService.getTemplateFolders();
-        List<VirtualFile> current = getCurrentState();
-
-        if (!Comparing.haveEqualElements(original, current)) return true;
-
-      }
-      return false;
-    }
-
-    @Override
-    protected MyContentEntryEditor createContentEntryEditor(String contentEntryUrl) {
-      return new MyContentEntryEditor(contentEntryUrl, getEditHandlers());
-    }
-
-    protected class MyContentEntryEditor extends ContentEntryEditor {
-      private final EventDispatcher<ChangeListener> myEventDispatcher = EventDispatcher.create(ChangeListener.class);
-
-      public MyContentEntryEditor(String contentEntryUrl, List<ModuleSourceRootEditHandler<?>> handlers) {
-        super(contentEntryUrl, handlers);
-      }
-
-      @Override
-      protected ModifiableRootModel getModel() {
-        return MyCommonContentEntriesEditor.this.getModel();
-      }
-
-      public void addListener(ChangeListener changeListener) {
-        myEventDispatcher.addListener(changeListener);
-      }
-
-      public void removeListener(ChangeListener changeListener) {
-        myEventDispatcher.removeListener(changeListener);
-      }
-
-      @Override
-      protected ContentRootPanel createContentRootPane() {
-        return new MyContentRootPanel();
-      }
-
-      @Override
-      public void deleteContentFolder(ContentEntry contentEntry, ContentFolder folder) {
-        if (folder instanceof TemplateRootFolder) {
-          final VirtualFile file = folder.getFile();
-          if (file != null) {
-            removeTemplateRoot(file);
-          }
-        }
-        else {
-          super.deleteContentFolder(contentEntry, folder);
-        }
-      }
-
-      public void addTemplateRoot(@NotNull final VirtualFile file) {
-        final VirtualFilePointer root = VirtualFilePointerManager.getInstance().create(file, myFilePointersDisposable, DUMMY_LISTENER);
-        myTemplateRoots.putValue(getContentEntry(), root);
-        myEventDispatcher.getMulticaster().stateChanged(new ChangeEvent(this));
-        update();
-      }
-
-      public void removeTemplateRoot(@NotNull final VirtualFile file) {
-        final VirtualFilePointer root = getTemplateRoot(file);
-        if (root != null) {
-          myTemplateRoots.remove(getContentEntry(), root);
-          myEventDispatcher.getMulticaster().stateChanged(new ChangeEvent(this));
-          update();
-        }
-      }
-
-      public boolean hasTemplateRoot(@NotNull final VirtualFile file) {
-        return getTemplateRoot(file) != null;
-      }
-
-      @Nullable
-      public VirtualFilePointer getTemplateRoot(@NotNull final VirtualFile file) {
-        for (VirtualFilePointer filePointer : myTemplateRoots.get(getContentEntry())) {
-          if (Comparing.equal(filePointer.getFile(), file)) {
-            return filePointer;
-          }
-        }
-        return null;
-      }
-
-      protected class MyContentRootPanel extends ContentRootPanel {
-        public MyContentRootPanel() {
-          super(MyContentEntryEditor.this, getEditHandlers());
-        }
-
-        @Override
-        @NotNull
-        protected ContentEntryImpl getContentEntry() {
-          //noinspection ConstantConditions
-          return (ContentEntryImpl)MyContentEntryEditor.this.getContentEntry();
-        }
-
-        @Override
-        protected void addFolderGroupComponents() {
-          super.addFolderGroupComponents();
-          if (!myTemplateRoots.get(getContentEntry()).isEmpty()) {
-            final List<TemplateRootFolder> folders = new ArrayList<TemplateRootFolder>(myTemplateRoots.size());
-            for (VirtualFilePointer root : myTemplateRoots.get(getContentEntry())) {
-              folders.add(new TemplateRootFolder(root, getContentEntry()));
-            }
-            final JComponent sourcesComponent = createFolderGroupComponent("Template Folders",
-                                                                           folders.toArray(new ContentFolder[folders.size()]),
-                                                                           TEMPLATES_COLOR, null);
-            this.add(sourcesComponent, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTH,
-                                                              GridBagConstraints.HORIZONTAL, new Insets(0, 0, 10, 0), 0, 0));
-          }
-        }
-      }
-    }
-  }
-
-  private static class TemplateRootFolder extends ContentFolderBaseImpl {
-    protected TemplateRootFolder(@NotNull VirtualFilePointer filePointer, @NotNull ContentEntryImpl contentEntry) {
-      super(filePointer, contentEntry);
-    }
-  }
 }
diff --git a/python/ide/src/com/jetbrains/python/configuration/PythonPathEditor.java b/python/ide/src/com/jetbrains/python/configuration/PythonPathEditor.java
index 33169a7..0512bb0 100644
--- a/python/ide/src/com/jetbrains/python/configuration/PythonPathEditor.java
+++ b/python/ide/src/com/jetbrains/python/configuration/PythonPathEditor.java
@@ -126,7 +126,7 @@
 
   @Override
   protected void addToolbarButtons(ToolbarDecorator toolbarDecorator) {
-    AnActionButton reloadButton = new AnActionButton("Reload List of Paths", AllIcons.Actions.Refresh) {
+    AnActionButton reloadButton = new AnActionButton("Reload list of paths", AllIcons.Actions.Refresh) {
       @Override
       public void actionPerformed(AnActionEvent e) {
         onReloadButtonClicked();
diff --git a/python/ide/src/com/jetbrains/python/configuration/PythonSdkDetailsDialog.java b/python/ide/src/com/jetbrains/python/configuration/PythonSdkDetailsDialog.java
index b0ca6db..213b0e1 100644
--- a/python/ide/src/com/jetbrains/python/configuration/PythonSdkDetailsDialog.java
+++ b/python/ide/src/com/jetbrains/python/configuration/PythonSdkDetailsDialog.java
@@ -435,7 +435,7 @@
 
   private class ShowPathButton extends AnActionButton implements DumbAware {
     public ShowPathButton() {
-      super("Show path for the selected interpreter", AllIcons.Actions.ShowAsTree);
+      super("Show paths for the selected interpreter", AllIcons.Actions.ShowAsTree);
     }
 
     @Override
diff --git a/python/ide/src/com/jetbrains/python/newProject/actions/AbstractProjectSettingsStep.java b/python/ide/src/com/jetbrains/python/newProject/actions/AbstractProjectSettingsStep.java
index dd68cdb..1ad9cf7 100644
--- a/python/ide/src/com/jetbrains/python/newProject/actions/AbstractProjectSettingsStep.java
+++ b/python/ide/src/com/jetbrains/python/newProject/actions/AbstractProjectSettingsStep.java
@@ -61,6 +61,7 @@
 abstract public class AbstractProjectSettingsStep extends AbstractActionWithPanel implements DumbAware {
   protected final DirectoryProjectGenerator myProjectGenerator;
   private final NullableConsumer<AbstractProjectSettingsStep> myCallback;
+  private final boolean myIsWelcomeScreen;
   private PythonSdkChooserCombo mySdkCombo;
   private boolean myInstallFramework;
   private TextFieldWithBrowseButton myLocationField;
@@ -70,10 +71,13 @@
   private AnAction myCreateAction;
   private Sdk mySdk;
 
-  public AbstractProjectSettingsStep(DirectoryProjectGenerator projectGenerator, NullableConsumer<AbstractProjectSettingsStep> callback) {
+  public AbstractProjectSettingsStep(DirectoryProjectGenerator projectGenerator,
+                                     NullableConsumer<AbstractProjectSettingsStep> callback,
+                                     boolean isWelcomeScreen) {
     super();
     myProjectGenerator = projectGenerator;
     myCallback = callback;
+    myIsWelcomeScreen = isWelcomeScreen;
     myProjectDirectory = FileUtil.findSequentNonexistentFile(new File(ProjectUtil.getBaseDir()), "untitled", "");
     if (myProjectGenerator instanceof WebProjectTemplate) {
       ((WebProjectTemplate)myProjectGenerator).getPeer().addSettingsStateListener(new WebProjectGenerator.SettingsStateListener() {
@@ -109,17 +113,12 @@
   @Override
   public JPanel createPanel() {
     final JPanel basePanel = createBasePanel();
-    final JPanel mainPanel = new JPanel(new BorderLayout()) {
-      @Override
-      protected void paintComponent(Graphics g) {
-        myLocationField.requestFocus();
-      }
-    };
+    final JPanel mainPanel = new JPanel(new BorderLayout());
 
     final JPanel scrollPanel = new JPanel(new BorderLayout());
 
     final DirectoryProjectGenerator[] generators = Extensions.getExtensions(DirectoryProjectGenerator.EP_NAME);
-    final int height = generators.length == 0 ? 150 : 400;
+    final int height = generators.length == 0 && !myIsWelcomeScreen ? 150 : 400;
     mainPanel.setPreferredSize(new Dimension(mainPanel.getPreferredSize().width, height));
     myErrorLabel = new JLabel("");
     myErrorLabel.setForeground(JBColor.RED);
@@ -273,6 +272,7 @@
   }
 
   public boolean checkValid() {
+    if (myLocationField == null) return true;
     final String projectName = myLocationField.getText();
     setErrorText(null);
     myInstallFramework = false;
@@ -367,22 +367,6 @@
     myErrorLabel.setForeground(MessageType.WARNING.getTitleForeground());
   }
 
-  public void selectCompatiblePython() {
-    //DirectoryProjectGenerator generator = getProjectGenerator();
-    //if (generator instanceof PyFrameworkProjectGenerator && !((PyFrameworkProjectGenerator)generator).supportsPython3()) {
-    //  Sdk sdk = getSdk();
-    //  if (sdk != null && PythonSdkType.getLanguageLevelForSdk(sdk).isPy3K()) {
-    //    Sdk python2Sdk = PythonSdkType.findPython2Sdk(null);
-    //    if (python2Sdk != null) {
-    //      mySdkCombo.getComboBox().setSelectedItem(python2Sdk);
-    //      mySdkCombo.getComboBox().revalidate();
-    //      mySdkCombo.getComboBox().repaint();
-    //
-    //    }
-    //  }
-    //}
-  }
-
   private static boolean acceptsRemoteSdk(DirectoryProjectGenerator generator) {
     if (generator instanceof PyFrameworkProjectGenerator) {
       return ((PyFrameworkProjectGenerator)generator).acceptsRemoteSdk();
diff --git a/python/ide/src/com/jetbrains/python/newProject/actions/GenerateProjectCallback.java b/python/ide/src/com/jetbrains/python/newProject/actions/GenerateProjectCallback.java
new file mode 100644
index 0000000..2aa9a39
--- /dev/null
+++ b/python/ide/src/com/jetbrains/python/newProject/actions/GenerateProjectCallback.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2000-2013 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.jetbrains.python.newProject.actions;
+
+import com.intellij.ide.GeneralSettings;
+import com.intellij.ide.util.projectWizard.WebProjectTemplate;
+import com.intellij.internal.statistic.UsageTrigger;
+import com.intellij.internal.statistic.beans.ConvertUsagesUtil;
+import com.intellij.openapi.application.ApplicationManager;
+import com.intellij.openapi.diagnostic.Logger;
+import com.intellij.openapi.module.Module;
+import com.intellij.openapi.options.ConfigurationException;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.project.ProjectManager;
+import com.intellij.openapi.projectRoots.ProjectJdkTable;
+import com.intellij.openapi.projectRoots.Sdk;
+import com.intellij.openapi.projectRoots.SdkAdditionalData;
+import com.intellij.openapi.projectRoots.impl.SdkConfigurationUtil;
+import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel;
+import com.intellij.openapi.ui.Messages;
+import com.intellij.openapi.util.Computable;
+import com.intellij.openapi.vfs.LocalFileSystem;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.platform.DirectoryProjectGenerator;
+import com.intellij.platform.PlatformProjectOpenProcessor;
+import com.intellij.projectImport.ProjectOpenedCallback;
+import com.intellij.util.NullableConsumer;
+import com.jetbrains.python.configuration.PyConfigurableInterpreterList;
+import com.jetbrains.python.newProject.PyNewProjectSettings;
+import com.jetbrains.python.newProject.PythonProjectGenerator;
+import com.jetbrains.python.sdk.PyDetectedSdk;
+import com.jetbrains.python.sdk.PySdkService;
+import com.jetbrains.python.sdk.PythonSdkAdditionalData;
+import com.jetbrains.python.sdk.PythonSdkType;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.io.File;
+import java.util.List;
+
+public class GenerateProjectCallback implements NullableConsumer<AbstractProjectSettingsStep> {
+  private static final Logger LOG = Logger.getInstance(GenerateProjectCallback.class);
+  @Nullable private final Runnable myRunnable;
+
+  public GenerateProjectCallback(@Nullable final Runnable runnable) {
+
+    myRunnable = runnable;
+  }
+
+  @Override
+  public void consume(@Nullable AbstractProjectSettingsStep settingsStep) {
+    if (myRunnable != null) {
+      myRunnable.run();
+    }
+    if (settingsStep == null) return;
+
+    Sdk sdk = settingsStep.getSdk();
+    final Project project = ProjectManager.getInstance().getDefaultProject();
+    final ProjectSdksModel model = PyConfigurableInterpreterList.getInstance(project).getModel();
+    if (sdk instanceof PyDetectedSdk) {
+      final String name = sdk.getName();
+      VirtualFile sdkHome = ApplicationManager.getApplication().runWriteAction(new Computable<VirtualFile>() {
+        @Override
+        public VirtualFile compute() {
+          return LocalFileSystem.getInstance().refreshAndFindFileByPath(name);
+        }
+      });
+      PySdkService.getInstance().solidifySdk(sdk);
+      sdk = SdkConfigurationUtil.setupSdk(ProjectJdkTable.getInstance().getAllJdks(), sdkHome, PythonSdkType.getInstance(), true, null,
+                                          null);
+      model.addSdk(sdk);
+      settingsStep.setSdk(sdk);
+      try {
+        model.apply();
+      }
+      catch (ConfigurationException exception) {
+        LOG.error("Error adding detected python interpreter " + exception.getMessage());
+      }
+    }
+    Project newProject = generateProject(project, settingsStep);
+    if (newProject != null) {
+      SdkConfigurationUtil.setDirectoryProjectSdk(newProject, sdk);
+      final List<Sdk> sdks = PythonSdkType.getAllSdks();
+      for (Sdk s : sdks) {
+        final SdkAdditionalData additionalData = s.getSdkAdditionalData();
+        if (additionalData instanceof PythonSdkAdditionalData) {
+          ((PythonSdkAdditionalData)additionalData).reassociateWithCreatedProject(newProject);
+        }
+      }
+    }
+  }
+
+  @Nullable
+  private Project generateProject(@NotNull final Project project, @NotNull final AbstractProjectSettingsStep settings) {
+    final DirectoryProjectGenerator generator = settings.getProjectGenerator();
+    final File location = new File(settings.getProjectLocation());
+    if (!location.exists() && !location.mkdirs()) {
+      Messages.showErrorDialog(project, "Cannot create directory '" + location + "'", "Create Project");
+      return null;
+    }
+
+    final VirtualFile baseDir = ApplicationManager.getApplication().runWriteAction(new Computable<VirtualFile>() {
+      public VirtualFile compute() {
+        return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(location);
+      }
+    });
+    LOG.assertTrue(baseDir != null, "Couldn't find '" + location + "' in VFS");
+    baseDir.refresh(false, true);
+
+    if (baseDir.getChildren().length > 0) {
+      int rc = Messages.showYesNoDialog(project,
+                                        "The directory '" + location +
+                                        "' is not empty. Would you like to create a project from existing sources instead?",
+                                        "Create New Project", Messages.getQuestionIcon());
+      if (rc == Messages.YES) {
+        return PlatformProjectOpenProcessor.getInstance().doOpenProject(baseDir, null, false);
+      }
+    }
+
+    String generatorName = generator == null ? "empty" : ConvertUsagesUtil.ensureProperKey(generator.getName());
+    UsageTrigger.trigger("NewDirectoryProjectAction." + generatorName);
+
+    GeneralSettings.getInstance().setLastProjectCreationLocation(location.getParent());
+
+    return PlatformProjectOpenProcessor.doOpenProject(baseDir, null, false, -1, new ProjectOpenedCallback() {
+      @Override
+      public void projectOpened(Project project, Module module) {
+        if (generator != null) {
+          Object projectSettings = null;
+          if (generator instanceof PythonProjectGenerator) {
+            projectSettings = ((PythonProjectGenerator)generator).getProjectSettings();
+          }
+          else if (generator instanceof WebProjectTemplate) {
+            projectSettings = ((WebProjectTemplate)generator).getPeer().getSettings();
+          }
+          if (projectSettings instanceof PyNewProjectSettings) {
+            ((PyNewProjectSettings)projectSettings).setSdk(settings.getSdk());
+            ((PyNewProjectSettings)projectSettings).setInstallFramework(settings.installFramework());
+          }
+          //noinspection unchecked
+          generator.generateProject(project, baseDir, projectSettings, module);
+        }
+      }
+    }, false);
+  }
+}
diff --git a/python/ide/src/com/jetbrains/python/newProject/actions/PluginSpecificProjectsStep.java b/python/ide/src/com/jetbrains/python/newProject/actions/PluginSpecificProjectsStep.java
index a0c5f6e..ca0f360 100644
--- a/python/ide/src/com/jetbrains/python/newProject/actions/PluginSpecificProjectsStep.java
+++ b/python/ide/src/com/jetbrains/python/newProject/actions/PluginSpecificProjectsStep.java
@@ -27,11 +27,11 @@
 public class PluginSpecificProjectsStep extends DefaultActionGroup implements DumbAware {
 
   public PluginSpecificProjectsStep(@NotNull final NullableConsumer<AbstractProjectSettingsStep> callback,
-                                    @NotNull final List<DirectoryProjectGenerator> projectGenerators) {
+                                    @NotNull final List<DirectoryProjectGenerator> projectGenerators, boolean isWelcomeScreen) {
     super("Plugin-specific", true);
     getTemplatePresentation().setIcon(AllIcons.Nodes.PluginLogo);
     for (DirectoryProjectGenerator generator : projectGenerators) {
-      add(new ProjectSpecificAction(callback, generator));
+      add(new ProjectSpecificAction(callback, generator, isWelcomeScreen));
     }
   }
 }
diff --git a/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificAction.java b/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificAction.java
index 4397a09..fbb65f0 100644
--- a/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificAction.java
+++ b/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificAction.java
@@ -28,10 +28,16 @@
   private final ProjectSpecificSettingsStep mySettings;
 
   public ProjectSpecificAction(@NotNull final NullableConsumer<AbstractProjectSettingsStep> callback,
-                               @NotNull final DirectoryProjectGenerator projectGenerator) {
-    super(projectGenerator.getName(), true);
+                               @NotNull final DirectoryProjectGenerator projectGenerator, boolean isWelcomeScreen) {
+    this(callback, projectGenerator, projectGenerator.getName(), isWelcomeScreen);
+  }
+
+  public ProjectSpecificAction(@NotNull final NullableConsumer<AbstractProjectSettingsStep> callback,
+                               @NotNull final DirectoryProjectGenerator projectGenerator,
+                               @NotNull final String name, boolean isWelcomeScreen) {
+    super(name, true);
     getTemplatePresentation().setIcon(projectGenerator.getLogo());
-    mySettings = new ProjectSpecificSettingsStep(projectGenerator, callback);
+    mySettings = new ProjectSpecificSettingsStep(projectGenerator, callback, isWelcomeScreen);
     add(mySettings);
   }
 
diff --git a/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificSettingsStep.java b/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificSettingsStep.java
index 5e60601..b85a06e 100644
--- a/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificSettingsStep.java
+++ b/python/ide/src/com/jetbrains/python/newProject/actions/ProjectSpecificSettingsStep.java
@@ -31,8 +31,8 @@
 public class ProjectSpecificSettingsStep extends AbstractProjectSettingsStep implements DumbAware {
 
   public ProjectSpecificSettingsStep(@NotNull final DirectoryProjectGenerator projectGenerator,
-                                     @NotNull final NullableConsumer<AbstractProjectSettingsStep> callback) {
-    super(projectGenerator, callback);
+                                     @NotNull final NullableConsumer<AbstractProjectSettingsStep> callback, boolean isWelcomeScreen) {
+    super(projectGenerator, callback, isWelcomeScreen);
   }
 
   @Override
diff --git a/python/ide/src/com/jetbrains/python/newProject/actions/PyCharmNewProjectStep.java b/python/ide/src/com/jetbrains/python/newProject/actions/PyCharmNewProjectStep.java
index 4f9f107..0628c6d 100644
--- a/python/ide/src/com/jetbrains/python/newProject/actions/PyCharmNewProjectStep.java
+++ b/python/ide/src/com/jetbrains/python/newProject/actions/PyCharmNewProjectStep.java
@@ -16,153 +16,33 @@
 package com.jetbrains.python.newProject.actions;
 
 import com.google.common.collect.Lists;
-import com.intellij.ide.GeneralSettings;
-import com.intellij.ide.util.projectWizard.WebProjectTemplate;
-import com.intellij.internal.statistic.UsageTrigger;
-import com.intellij.internal.statistic.beans.ConvertUsagesUtil;
 import com.intellij.openapi.actionSystem.DefaultActionGroup;
-import com.intellij.openapi.application.ApplicationManager;
-import com.intellij.openapi.diagnostic.Logger;
 import com.intellij.openapi.extensions.Extensions;
-import com.intellij.openapi.module.Module;
-import com.intellij.openapi.options.ConfigurationException;
 import com.intellij.openapi.project.DumbAware;
-import com.intellij.openapi.project.Project;
-import com.intellij.openapi.project.ProjectManager;
-import com.intellij.openapi.projectRoots.ProjectJdkTable;
-import com.intellij.openapi.projectRoots.Sdk;
-import com.intellij.openapi.projectRoots.SdkAdditionalData;
-import com.intellij.openapi.projectRoots.impl.SdkConfigurationUtil;
-import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel;
-import com.intellij.openapi.ui.Messages;
-import com.intellij.openapi.util.Computable;
-import com.intellij.openapi.vfs.LocalFileSystem;
-import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.platform.DirectoryProjectGenerator;
-import com.intellij.platform.PlatformProjectOpenProcessor;
-import com.intellij.projectImport.ProjectOpenedCallback;
 import com.intellij.util.NullableConsumer;
-import com.jetbrains.python.configuration.PyConfigurableInterpreterList;
 import com.jetbrains.python.newProject.PyFrameworkProjectGenerator;
-import com.jetbrains.python.newProject.PyNewProjectSettings;
 import com.jetbrains.python.newProject.PythonBaseProjectGenerator;
 import com.jetbrains.python.newProject.PythonProjectGenerator;
-import com.jetbrains.python.sdk.PyDetectedSdk;
-import com.jetbrains.python.sdk.PySdkService;
-import com.jetbrains.python.sdk.PythonSdkAdditionalData;
-import com.jetbrains.python.sdk.PythonSdkType;
 import org.jetbrains.annotations.NotNull;
 import org.jetbrains.annotations.Nullable;
 
-import java.io.File;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
 
 public class PyCharmNewProjectStep extends DefaultActionGroup implements DumbAware {
-  private static final Logger LOG = Logger.getInstance(PyCharmNewProjectStep.class);
 
   public PyCharmNewProjectStep(@NotNull final String name, @Nullable final Runnable runnable) {
+    this(name, runnable, false);
+  }
+
+  public PyCharmNewProjectStep(@NotNull final String name, @Nullable final Runnable runnable, boolean isWelcomeScreen) {
     super(name, true);
 
-    final NullableConsumer<AbstractProjectSettingsStep> callback = new NullableConsumer<AbstractProjectSettingsStep>() {
-      @Override
-      public void consume(@Nullable AbstractProjectSettingsStep settingsStep) {
-        if (runnable != null)
-          runnable.run();
-        if (settingsStep == null) return;
+    final NullableConsumer<AbstractProjectSettingsStep> callback = new GenerateProjectCallback(runnable);
 
-        Sdk sdk = settingsStep.getSdk();
-        final Project project = ProjectManager.getInstance().getDefaultProject();
-        final ProjectSdksModel model = PyConfigurableInterpreterList.getInstance(project).getModel();
-        if (sdk instanceof PyDetectedSdk) {
-          final String name = sdk.getName();
-          VirtualFile sdkHome = ApplicationManager.getApplication().runWriteAction(new Computable<VirtualFile>() {
-            @Override
-            public VirtualFile compute() {
-              return LocalFileSystem.getInstance().refreshAndFindFileByPath(name);
-            }
-          });
-          PySdkService.getInstance().solidifySdk(sdk);
-          sdk = SdkConfigurationUtil.setupSdk(ProjectJdkTable.getInstance().getAllJdks(), sdkHome, PythonSdkType.getInstance(), true, null,
-                                              null);
-          model.addSdk(sdk);
-          settingsStep.setSdk(sdk);
-          try {
-            model.apply();
-          }
-          catch (ConfigurationException exception) {
-            LOG.error("Error adding detected python interpreter " + exception.getMessage());
-          }
-        }
-        Project newProject = generateProject(project, settingsStep);
-        if (newProject != null) {
-          SdkConfigurationUtil.setDirectoryProjectSdk(newProject, sdk);
-          final List<Sdk> sdks = PythonSdkType.getAllSdks();
-          for (Sdk s : sdks) {
-            final SdkAdditionalData additionalData = s.getSdkAdditionalData();
-            if (additionalData instanceof PythonSdkAdditionalData) {
-              ((PythonSdkAdditionalData)additionalData).reassociateWithCreatedProject(newProject);
-            }
-          }
-        }
-      }
-
-      @Nullable
-      private Project generateProject(@NotNull final Project project, @NotNull final AbstractProjectSettingsStep settings) {
-        final DirectoryProjectGenerator generator = settings.getProjectGenerator();
-        final File location = new File(settings.getProjectLocation());
-        if (!location.exists() && !location.mkdirs()) {
-          Messages.showErrorDialog(project, "Cannot create directory '" + location + "'", "Create Project");
-          return null;
-        }
-
-        final VirtualFile baseDir = ApplicationManager.getApplication().runWriteAction(new Computable<VirtualFile>() {
-          public VirtualFile compute() {
-            return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(location);
-          }
-        });
-        LOG.assertTrue(baseDir != null, "Couldn't find '" + location + "' in VFS");
-        baseDir.refresh(false, true);
-
-        if (baseDir.getChildren().length > 0) {
-          int rc = Messages.showYesNoDialog(project,
-                                            "The directory '" + location +
-                                            "' is not empty. Would you like to create a project from existing sources instead?",
-                                            "Create New Project", Messages.getQuestionIcon());
-          if (rc == Messages.YES) {
-            return PlatformProjectOpenProcessor.getInstance().doOpenProject(baseDir, null, false);
-          }
-        }
-
-        String generatorName = generator == null ? "empty" : ConvertUsagesUtil.ensureProperKey(generator.getName());
-        UsageTrigger.trigger("NewDirectoryProjectAction." + generatorName);
-
-        GeneralSettings.getInstance().setLastProjectCreationLocation(location.getParent());
-
-        return PlatformProjectOpenProcessor.doOpenProject(baseDir, null, false, -1, new ProjectOpenedCallback() {
-          @Override
-          public void projectOpened(Project project, Module module) {
-            if (generator != null) {
-              Object projectSettings = null;
-              if (generator instanceof PythonProjectGenerator)
-                projectSettings = ((PythonProjectGenerator)generator).getProjectSettings();
-              else if (generator instanceof WebProjectTemplate) {
-                projectSettings = ((WebProjectTemplate)generator).getPeer().getSettings();
-              }
-              if (projectSettings instanceof PyNewProjectSettings) {
-                ((PyNewProjectSettings)projectSettings).setSdk(settings.getSdk());
-                ((PyNewProjectSettings)projectSettings).setInstallFramework(settings.installFramework());
-              }
-              //noinspection unchecked
-              generator.generateProject(project, baseDir, projectSettings, module);
-            }
-          }
-        }, false);
-      }
-    };
-
-    final ProjectSpecificAction action = new ProjectSpecificAction(callback, new PythonBaseProjectGenerator());
+    final ProjectSpecificAction action = new ProjectSpecificAction(callback, new PythonBaseProjectGenerator(), isWelcomeScreen);
     add(action);
 
     final DirectoryProjectGenerator[] generators = Extensions.getExtensions(DirectoryProjectGenerator.EP_NAME);
@@ -181,19 +61,17 @@
     List<DirectoryProjectGenerator> pluginSpecificGenerators = Lists.newArrayList();
     for (DirectoryProjectGenerator generator : generators) {
       if (generator instanceof PythonProjectGenerator)
-        add(new ProjectSpecificAction(callback, generator));
+        add(new ProjectSpecificAction(callback, generator, isWelcomeScreen));
       else
         pluginSpecificGenerators.add(generator);
     }
 
     if (!pluginSpecificGenerators.isEmpty()) {
-      add(new PluginSpecificProjectsStep(callback, pluginSpecificGenerators));
+      add(new PluginSpecificProjectsStep(callback, pluginSpecificGenerators, isWelcomeScreen));
     }
   }
 
   public PyCharmNewProjectStep() {
-    this("Select Project Type", null);
-
+    this("Select Project Type", null, true);
   }
-
 }
diff --git a/python/openapi/src/com/jetbrains/python/run/PythonRunConfigurationParams.java b/python/openapi/src/com/jetbrains/python/run/PythonRunConfigurationParams.java
index 80ca3ce..adb1c31 100644
--- a/python/openapi/src/com/jetbrains/python/run/PythonRunConfigurationParams.java
+++ b/python/openapi/src/com/jetbrains/python/run/PythonRunConfigurationParams.java
@@ -28,5 +28,8 @@
   String getScriptParameters();
 
   void setScriptParameters(String scriptParameters);
+
+  boolean showCommandLineAfterwards();
+  void setShowCommandLineAfterwards(boolean showCommandLineAfterwards);
 }
 
diff --git a/python/pluginSrc/com/jetbrains/python/module/PythonModuleConfigurationEditorProvider.java b/python/pluginSrc/com/jetbrains/python/module/PythonModuleConfigurationEditorProvider.java
index 0120348..50b79c7 100644
--- a/python/pluginSrc/com/jetbrains/python/module/PythonModuleConfigurationEditorProvider.java
+++ b/python/pluginSrc/com/jetbrains/python/module/PythonModuleConfigurationEditorProvider.java
@@ -15,15 +15,16 @@
  */
 package com.jetbrains.python.module;
 
+import com.intellij.openapi.module.Module;
+import com.intellij.openapi.module.ModuleConfigurationEditor;
 import com.intellij.openapi.module.ModuleType;
+import com.intellij.openapi.roots.ui.configuration.DefaultModuleConfigurationEditorFactory;
 import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationEditorProvider;
 import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationState;
-import com.intellij.openapi.roots.ui.configuration.DefaultModuleConfigurationEditorFactory;
-import com.intellij.openapi.module.ModuleConfigurationEditor;
-import com.intellij.openapi.module.Module;
+import org.jetbrains.jps.model.java.JavaSourceRootType;
 
-import java.util.List;
 import java.util.ArrayList;
+import java.util.List;
 
 /**
  * @author yole
@@ -34,7 +35,7 @@
     if (!(ModuleType.get(module) instanceof PythonModuleType)) return ModuleConfigurationEditor.EMPTY;
     final DefaultModuleConfigurationEditorFactory editorFactory = DefaultModuleConfigurationEditorFactory.getInstance();
     final List<ModuleConfigurationEditor> editors = new ArrayList<ModuleConfigurationEditor>();
-    editors.add(editorFactory.createModuleContentRootsEditor(state));
+    editors.add(new PyContentEntriesEditor(module, state, JavaSourceRootType.SOURCE));
     editors.add(editorFactory.createClasspathEditor(state));
     return editors.toArray(new ModuleConfigurationEditor[editors.size()]);
   }
diff --git a/python/psi-api/src/com/jetbrains/python/psi/PyAnnotation.java b/python/psi-api/src/com/jetbrains/python/psi/PyAnnotation.java
index a6182fb..71dc728 100644
--- a/python/psi-api/src/com/jetbrains/python/psi/PyAnnotation.java
+++ b/python/psi-api/src/com/jetbrains/python/psi/PyAnnotation.java
@@ -22,9 +22,7 @@
 /**
  * @author yole
  */
-public interface PyAnnotation extends PyElement, StubBasedPsiElement<PyAnnotationStub> {
-  PyExpression getValue();
-
+public interface PyAnnotation extends PyTypedElement, StubBasedPsiElement<PyAnnotationStub> {
   @Nullable
-  PyClass resolveToClass();
+  PyExpression getValue();
 }
diff --git a/python/psi-api/src/com/jetbrains/python/psi/PyTargetExpression.java b/python/psi-api/src/com/jetbrains/python/psi/PyTargetExpression.java
index 0b67d85..399b8f8 100644
--- a/python/psi-api/src/com/jetbrains/python/psi/PyTargetExpression.java
+++ b/python/psi-api/src/com/jetbrains/python/psi/PyTargetExpression.java
@@ -28,7 +28,7 @@
  * @author yole
  */
 public interface PyTargetExpression extends PyQualifiedExpression, PsiNamedElement, PsiNameIdentifierOwner, PyDocStringOwner,
-                                            StubBasedPsiElement<PyTargetExpressionStub> {
+                                            PyQualifiedNameOwner, StubBasedPsiElement<PyTargetExpressionStub> {
   PyTargetExpression[] EMPTY_ARRAY = new PyTargetExpression[0];
 
   /**
diff --git a/python/pydevSrc/com/jetbrains/python/debugger/IPyDebugProcess.java b/python/pydevSrc/com/jetbrains/python/debugger/IPyDebugProcess.java
index 1714f71..ab6ccfa 100644
--- a/python/pydevSrc/com/jetbrains/python/debugger/IPyDebugProcess.java
+++ b/python/pydevSrc/com/jetbrains/python/debugger/IPyDebugProcess.java
@@ -23,4 +23,6 @@
   int handleDebugPort(int port) throws IOException;
 
   void recordSignature(PySignature signature);
+
+  void showConsole(PyThreadInfo thread);
 }
diff --git a/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractCommand.java b/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractCommand.java
index efa86d4..15bb76b 100644
--- a/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractCommand.java
+++ b/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractCommand.java
@@ -33,6 +33,8 @@
   public static final int SMART_STEP_INTO = 128;
   public static final int EXIT = 129;
   public static final int CALL_SIGNATURE_TRACE = 130;
+  public static final int SHOW_CONSOLE = 142;
+  public static final int ERROR = 901;
 
   public static final int VERSION = 501;
   public static final String NEW_LINE_CHAR = "@_@NEW_LINE_CHAR@_@";
@@ -180,6 +182,10 @@
     return command == EXIT;
   }
 
+  public static boolean isErrorEvent(int command) {
+    return command == ERROR;
+  }
+
   protected static class Payload {
     private final StringBuilder myBuilder = new StringBuilder();
     private static final char SEPARATOR = '\t';
diff --git a/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractThreadCommand.java b/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractThreadCommand.java
index 690d893..b235b78 100644
--- a/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractThreadCommand.java
+++ b/python/pydevSrc/com/jetbrains/python/debugger/pydev/AbstractThreadCommand.java
@@ -18,11 +18,11 @@
     return command == CREATE_THREAD ||
            command == KILL_THREAD ||
            command == RESUME_THREAD ||
-           command == SUSPEND_THREAD;
+           command == SUSPEND_THREAD ||
+           command == SHOW_CONSOLE;
   }
 
   public String getThreadId() {
     return myThreadId;
   }
-
 }
diff --git a/python/pydevSrc/com/jetbrains/python/debugger/pydev/RemoteDebugger.java b/python/pydevSrc/com/jetbrains/python/debugger/pydev/RemoteDebugger.java
index 5f962b2..df33148 100644
--- a/python/pydevSrc/com/jetbrains/python/debugger/pydev/RemoteDebugger.java
+++ b/python/pydevSrc/com/jetbrains/python/debugger/pydev/RemoteDebugger.java
@@ -396,7 +396,8 @@
     if (type != null) {
       final RemoveBreakpointCommand command = new RemoveBreakpointCommand(this, type, file, line);
       execute(command);  // remove temp. breakpoint
-    } else {
+    }
+    else {
       LOG.error("Temp breakpoint not found for " + file + ":" + line);
     }
   }
@@ -474,6 +475,9 @@
         else if (AbstractCommand.isCallSignatureTrace(frame.getCommand())) {
           recordCallSignature(ProtocolParser.parseCallSignature(frame.getPayload()));
         }
+        else if (AbstractCommand.isErrorEvent(frame.getCommand())) {
+          LOG.error("Error response from debugger: " + frame.getPayload());
+        }
         else {
           placeResponse(frame.getSequence(), frame);
         }
@@ -530,6 +534,19 @@
           }
           break;
         }
+        case AbstractCommand.SHOW_CONSOLE: {
+          final PyThreadInfo event = parseThreadEvent(frame);
+          PyThreadInfo thread = myThreads.get(event.getId());
+          if (thread == null) {
+            myThreads.put(event.getId(), event);
+            thread = event;
+          }
+          thread.updateState(PyThreadInfo.State.SUSPENDED, event.getFrames());
+          thread.setStopReason(event.getStopReason());
+          thread.setMessage(event.getMessage());
+          myDebugProcess.showConsole(thread);
+          break;
+        }
       }
     }
 
diff --git a/python/rest/gen/com/jetbrains/rest/lexer/_RestFlexLexer.java b/python/rest/gen/com/jetbrains/rest/lexer/_RestFlexLexer.java
index 8fa7e54..f79403d 100644
--- a/python/rest/gen/com/jetbrains/rest/lexer/_RestFlexLexer.java
+++ b/python/rest/gen/com/jetbrains/rest/lexer/_RestFlexLexer.java
@@ -1,4 +1,4 @@
-/* The following code was generated by JFlex 1.4.3 on 4/26/14 12:40 PM */
+/* The following code was generated by JFlex 1.4.3 on 8/25/14 1:56 PM */
 
 package com.jetbrains.rest.lexer;
 
@@ -11,8 +11,8 @@
 /**
  * This class is a scanner generated by 
  * <a href="http://www.jflex.de/">JFlex</a> 1.4.3
- * on 4/26/14 12:40 PM from the specification file
- * <tt>/Users/ignatov/src/ultimate/tools/lexer/../../community/python/rest/src/com/jetbrains/rest/lexer/rest.flex</tt>
+ * on 8/25/14 1:56 PM from the specification file
+ * <tt>/home/ktisha/IDEA/tools/lexer/../../community/python/rest/src/com/jetbrains/rest/lexer/rest.flex</tt>
  */
 public class _RestFlexLexer implements FlexLexer, RestTokenTypes {
   /** initial size of the lookahead buffer */
@@ -1293,7 +1293,7 @@
     "\1\232\21\42\1\0\1\33\1\0\1\42\1\33\1\42"+
     "\7\0\1\164\1\0\1\165\1\33\4\0\1\33\6\0"+
     "\4\42\1\233\22\42\1\0\1\33\1\0\1\42\1\33"+
-    "\1\42\12\0\1\33\4\0\1\33\6\0\27\56\1\0"+
+    "\1\42\12\0\1\33\4\0\1\56\6\0\27\56\1\0"+
     "\1\234\1\0\1\56\1\33\1\56\12\0\1\33\4\0"+
     "\1\33\1\235\1\0\1\236\3\0\27\237\1\0\1\33"+
     "\1\0\1\237\1\33\1\240\1\241\2\0\1\242\1\243"+
diff --git a/python/rest/src/com/jetbrains/rest/lexer/rest.flex b/python/rest/src/com/jetbrains/rest/lexer/rest.flex
index ee0b331..354779f 100644
--- a/python/rest/src/com/jetbrains/rest/lexer/rest.flex
+++ b/python/rest/src/com/jetbrains/rest/lexer/rest.flex
@@ -170,7 +170,7 @@
 {USUAL_TYPES}"::"                                   { yybegin(IN_VALUE); return DIRECTIVE;}
 {HIGHLIGHT_TYPES}"::"                               { yybegin(IN_HIGHLIGHT); return CUSTOM_DIRECTIVE;}
 [0-9A-Za-z\-:]*"::"                                 { yybegin(IN_VALUE); return CUSTOM_DIRECTIVE;}
-"|"[0-9A-Za-z]*"|"                                  { return SUBSTITUTION;}
+"|"[0-9A-Za-z_]*"|"                                 { return SUBSTITUTION;}
 [0-9A-Za-z_\[|.]*                                   { yybegin(IN_COMMENT); return COMMENT;}
 {CRLF}{2}                                           { yybegin(INIT); return COMMENT;}
 {SPACE}*{CRLF}+                                     { return WHITESPACE; }
diff --git a/python/rest/src/com/jetbrains/rest/parsing/RestParser.java b/python/rest/src/com/jetbrains/rest/parsing/RestParser.java
index 791d94d..1df9fc1 100644
--- a/python/rest/src/com/jetbrains/rest/parsing/RestParser.java
+++ b/python/rest/src/com/jetbrains/rest/parsing/RestParser.java
@@ -95,7 +95,7 @@
       listMarker.drop();
   }
 
-  private void parseMarkup(PsiBuilder builder) {
+  private static void parseMarkup(PsiBuilder builder) {
     PsiBuilder.Marker marker = builder.mark();
     IElementType type = builder.getTokenType();
     if (type == RestTokenTypes.SUBSTITUTION) {
@@ -131,19 +131,19 @@
     }
   }
 
-  private void gotoNextWhiteSpaces(PsiBuilder builder) {
+  private static void gotoNextWhiteSpaces(PsiBuilder builder) {
      while(!"\n".equals(builder.getTokenText()) && !(builder.getTokenType() == RestTokenTypes.TITLE) && !builder.eof() && (builder.getTokenType() != null)) {
        builder.advanceLexer();
     }
   }
 
-  private void skipBlankLines(PsiBuilder builder) {
+  private static void skipBlankLines(PsiBuilder builder) {
      while("\n".equals(builder.getTokenText()) && !builder.eof() && (builder.getTokenType() != null)) {
        builder.advanceLexer();
     }
   }
 
-  private void parseDirective(PsiBuilder builder, String white, PsiBuilder.Marker marker) {
+  private static void parseDirective(PsiBuilder builder, String white, PsiBuilder.Marker marker) {
     gotoNextWhiteSpaces(builder);
     if (builder.getTokenType() != RestTokenTypes.WHITESPACE) {
       builder.advanceLexer();
@@ -157,7 +157,6 @@
     }
     else {
       marker.done(RestElementTypes.DIRECTIVE_BLOCK);
-      return;
     }
   }
 }
diff --git a/python/src/META-INF/pycharm-core.xml b/python/src/META-INF/pycharm-core.xml
index 4732ad0..a9f7824 100644
--- a/python/src/META-INF/pycharm-core.xml
+++ b/python/src/META-INF/pycharm-core.xml
@@ -8,6 +8,9 @@
   </xi:include>
   <xi:include href="/META-INF/RegExpPlugin.xml" xpointer="xpointer(/idea-plugin/*)"/>
   <xi:include href="/META-INF/SpellCheckerPlugin.xml" xpointer="xpointer(/idea-plugin/*)"/>
+  <xi:include href="/META-INF/RemoteServers.xml" xpointer="xpointer(/idea-plugin/*)">
+    <xi:fallback/>
+  </xi:include>
 
   <application-components>
     <component>
diff --git a/python/src/META-INF/python-core.xml b/python/src/META-INF/python-core.xml
index 292237a..7adbc46 100644
--- a/python/src/META-INF/python-core.xml
+++ b/python/src/META-INF/python-core.xml
@@ -589,6 +589,9 @@
     <pyClassMembersProvider implementation="com.jetbrains.python.codeInsight.userSkeletons.PyUserSkeletonsClassMembersProvider"/>
     <typeProvider implementation="com.jetbrains.python.codeInsight.userSkeletons.PyUserSkeletonsTypeProvider"/>
 
+    <!-- typing -->
+    <typeProvider implementation="com.jetbrains.python.codeInsight.PyTypingTypeProvider"/>
+
     <typeProvider implementation="com.jetbrains.python.debugger.PyCallSignatureTypeProvider"/>
     <pyReferenceResolveProvider implementation="com.jetbrains.python.psi.resolve.PythonBuiltinReferenceResolveProvider"/>
 
diff --git a/python/src/com/jetbrains/python/codeInsight/PyTypingTypeProvider.java b/python/src/com/jetbrains/python/codeInsight/PyTypingTypeProvider.java
new file mode 100644
index 0000000..f03befa
--- /dev/null
+++ b/python/src/com/jetbrains/python/codeInsight/PyTypingTypeProvider.java
@@ -0,0 +1,375 @@
+/*
+ * Copyright 2000-2014 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.jetbrains.python.codeInsight;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.intellij.openapi.project.Project;
+import com.intellij.psi.PsiElement;
+import com.intellij.psi.PsiPolyVariantReference;
+import com.intellij.psi.util.QualifiedName;
+import com.jetbrains.python.PyNames;
+import com.jetbrains.python.psi.*;
+import com.jetbrains.python.psi.impl.PyExpressionCodeFragmentImpl;
+import com.jetbrains.python.psi.impl.PyPsiUtils;
+import com.jetbrains.python.psi.resolve.PyResolveContext;
+import com.jetbrains.python.psi.types.*;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * @author vlan
+ */
+public class PyTypingTypeProvider extends PyTypeProviderBase {
+  private static ImmutableMap<String, String> BUILTIN_COLLECTIONS = ImmutableMap.<String, String>builder()
+    .put("typing.List", "list")
+    .put("typing.Dict", "dict")
+    .put("typing.Set", PyNames.SET)
+    .put("typing.Tuple", PyNames.TUPLE)
+    .build();
+
+  private static ImmutableSet<String> GENERIC_CLASSES = ImmutableSet.<String>builder()
+    .add("typing.Generic")
+    .add("typing.AbstractGeneric")
+    .add("typing.Protocol")
+    .build();
+
+  public PyType getParameterType(@NotNull PyNamedParameter param, @NotNull PyFunction func, @NotNull TypeEvalContext context) {
+    final PyAnnotation annotation = param.getAnnotation();
+    if (annotation != null) {
+      // XXX: Requires switching from stub to AST
+      final PyExpression value = annotation.getValue();
+      if (value != null) {
+        return getTypingType(value, context);
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  @Override
+  public PyType getReturnType(@NotNull Callable callable, @NotNull TypeEvalContext context) {
+    if (callable instanceof PyFunction) {
+      final PyFunction function = (PyFunction)callable;
+      final PyAnnotation annotation = function.getAnnotation();
+      if (annotation != null) {
+        // XXX: Requires switching from stub to AST
+        final PyExpression value = annotation.getValue();
+        if (value != null) {
+          return getTypingType(value, context);
+        }
+      }
+      final PyType constructorType = getGenericConstructorType(function, context);
+      if (constructorType != null) {
+        return constructorType;
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyType getGenericConstructorType(@NotNull PyFunction function, @NotNull TypeEvalContext context) {
+    if (PyUtil.isInit(function)) {
+      final PyClass cls = function.getContainingClass();
+      if (cls != null) {
+        final List<PyGenericType> genericTypes = collectGenericTypes(cls, context);
+
+        final PyType elementType;
+        if (genericTypes.size() == 1) {
+          elementType = genericTypes.get(0);
+        }
+        else if (genericTypes.size() > 1) {
+          elementType = PyTupleType.create(cls, genericTypes.toArray(new PyType[genericTypes.size()]));
+        }
+        else {
+          elementType = null;
+        }
+
+        if (elementType != null) {
+          return new PyCollectionTypeImpl(cls, false, elementType);
+        }
+      }
+    }
+    return null;
+  }
+
+  @NotNull
+  private static List<PyGenericType> collectGenericTypes(@NotNull PyClass cls, @NotNull TypeEvalContext context) {
+    boolean isGeneric = false;
+    for (PyClass ancestor : cls.getAncestorClasses(context)) {
+      if (GENERIC_CLASSES.contains(ancestor.getQualifiedName())) {
+        isGeneric = true;
+        break;
+      }
+    }
+    if (isGeneric) {
+      final ArrayList<PyGenericType> results = new ArrayList<PyGenericType>();
+      // XXX: Requires switching from stub to AST
+      for (PyExpression expr : cls.getSuperClassExpressions()) {
+        if (expr instanceof PySubscriptionExpression) {
+          final PyExpression indexExpr = ((PySubscriptionExpression)expr).getIndexExpression();
+          if (indexExpr != null) {
+            final PyGenericType genericType = getGenericType(indexExpr, context);
+            if (genericType != null) {
+              results.add(genericType);
+            }
+          }
+        }
+      }
+      return results;
+    }
+    return Collections.emptyList();
+  }
+
+  @Nullable
+  private static PyType getTypingType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    final PyType unionType = getUnionType(expression, context);
+    if (unionType != null) {
+      return unionType;
+    }
+    final PyType parameterizedType = getParameterizedType(expression, context);
+    if (parameterizedType != null) {
+      return parameterizedType;
+    }
+    final PyType builtinCollection = getBuiltinCollection(expression, context);
+    if (builtinCollection != null) {
+      return builtinCollection;
+    }
+    final PyType genericType = getGenericType(expression, context);
+    if (genericType != null) {
+      return genericType;
+    }
+    final PyType functionType = getFunctionType(expression, context);
+    if (functionType != null) {
+      return functionType;
+    }
+    final PyType stringBasedType = getStringBasedType(expression, context);
+    if (stringBasedType != null) {
+      return stringBasedType;
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyType getStringBasedType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    if (expression instanceof PyStringLiteralExpression) {
+      // XXX: Requires switching from stub to AST
+      final String contents = ((PyStringLiteralExpression)expression).getStringValue();
+      final Project project = expression.getProject();
+      final PyExpressionCodeFragmentImpl codeFragment = new PyExpressionCodeFragmentImpl(project, "dummy.py", contents, false);
+      codeFragment.setContext(expression.getContainingFile());
+      final PsiElement element = codeFragment.getFirstChild();
+      if (element instanceof PyExpressionStatement) {
+        final PyExpression dummyExpr = ((PyExpressionStatement)element).getExpression();
+        return getType(dummyExpr, context);
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyType getFunctionType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    if (expression instanceof PySubscriptionExpression) {
+      final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)expression;
+      final PyExpression operand = subscriptionExpr.getOperand();
+      final String operandName = resolveToQualifiedName(operand, context);
+      if ("typing.Function".equals(operandName)) {
+        final PyExpression indexExpr = subscriptionExpr.getIndexExpression();
+        if (indexExpr instanceof PyTupleExpression) {
+          final PyTupleExpression tupleExpr = (PyTupleExpression)indexExpr;
+          final PyExpression[] elements = tupleExpr.getElements();
+          if (elements.length == 2) {
+            final PyExpression parametersExpr = elements[0];
+            if (parametersExpr instanceof PyListLiteralExpression) {
+              final List<PyCallableParameter> parameters = new ArrayList<PyCallableParameter>();
+              final PyListLiteralExpression listExpr = (PyListLiteralExpression)parametersExpr;
+              for (PyExpression argExpr : listExpr.getElements()) {
+                parameters.add(new PyCallableParameterImpl(null, getType(argExpr, context)));
+              }
+              final PyExpression returnTypeExpr = elements[1];
+              final PyType returnType = getType(returnTypeExpr, context);
+              return new PyCallableTypeImpl(parameters, returnType);
+            }
+          }
+        }
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyType getUnionType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    if (expression instanceof PySubscriptionExpression) {
+      final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)expression;
+      final PyExpression operand = subscriptionExpr.getOperand();
+      final String operandName = resolveToQualifiedName(operand, context);
+      if ("typing.Union".equals(operandName)) {
+        return PyUnionType.union(getIndexTypes(subscriptionExpr, context));
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyGenericType getGenericType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    final PsiElement resolved = resolve(expression, context);
+    if (resolved instanceof PyTargetExpression) {
+      final PyTargetExpression targetExpr = (PyTargetExpression)resolved;
+      final QualifiedName calleeName = targetExpr.getCalleeName();
+      if (calleeName != null && "typevar".equals(calleeName.toString())) {
+        // XXX: Requires switching from stub to AST
+        final PyExpression assigned = targetExpr.findAssignedValue();
+        if (assigned instanceof PyCallExpression) {
+          final PyCallExpression assignedCall = (PyCallExpression)assigned;
+          final PyExpression callee = assignedCall.getCallee();
+          if (callee != null) {
+            final String calleeQName = resolveToQualifiedName(callee, context);
+            if ("typing.typevar".equals(calleeQName)) {
+              final PyExpression[] arguments = assignedCall.getArguments();
+              if (arguments.length > 0) {
+                final PyExpression firstArgument = arguments[0];
+                if (firstArgument instanceof PyStringLiteralExpression) {
+                  final String name = ((PyStringLiteralExpression)firstArgument).getStringValue();
+                  if (name != null) {
+                    return new PyGenericType(name, getGenericTypeBound(arguments, context));
+                  }
+                }
+              }
+            }
+
+          }
+        }
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyType getGenericTypeBound(@NotNull PyExpression[] typeVarArguments, @NotNull TypeEvalContext context) {
+    final List<PyType> types = new ArrayList<PyType>();
+    if (typeVarArguments.length > 1) {
+      final PyExpression secondArgument = typeVarArguments[1];
+      if (secondArgument instanceof PyKeywordArgument) {
+        final PyKeywordArgument valuesArgument = (PyKeywordArgument)secondArgument;
+        final PyExpression valueExpr = PyPsiUtils.flattenParens(valuesArgument.getValueExpression());
+        if (valueExpr instanceof PyTupleExpression) {
+          final PyTupleExpression tupleExpr = (PyTupleExpression)valueExpr;
+          for (PyExpression expr : tupleExpr.getElements()) {
+            types.add(getType(expr, context));
+          }
+        }
+      }
+    }
+    return PyUnionType.union(types);
+  }
+
+  @NotNull
+  private static List<PyType> getIndexTypes(@NotNull PySubscriptionExpression expression, @NotNull TypeEvalContext context) {
+    final List<PyType> types = new ArrayList<PyType>();
+    final PyExpression indexExpr = expression.getIndexExpression();
+    if (indexExpr instanceof PyTupleExpression) {
+      final PyTupleExpression tupleExpr = (PyTupleExpression)indexExpr;
+      for (PyExpression expr : tupleExpr.getElements()) {
+        types.add(getType(expr, context));
+      }
+    }
+    return types;
+  }
+
+  @Nullable
+  private static PyType getParameterizedType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    if (expression instanceof PySubscriptionExpression) {
+      final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)expression;
+      final PyExpression operand = subscriptionExpr.getOperand();
+      final PyExpression indexExpr = subscriptionExpr.getIndexExpression();
+      final PyType operandType = getType(operand, context);
+      if (operandType instanceof PyClassType) {
+        final PyClass cls = ((PyClassType)operandType).getPyClass();
+        if (PyNames.TUPLE.equals(cls.getQualifiedName())) {
+          final List<PyType> indexTypes = getIndexTypes(subscriptionExpr, context);
+          return PyTupleType.create(expression, indexTypes.toArray(new PyType[indexTypes.size()]));
+        }
+        else if (indexExpr != null) {
+          final PyType indexType = context.getType(indexExpr);
+          return new PyCollectionTypeImpl(cls, false, indexType);
+        }
+      }
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PyType getBuiltinCollection(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    final String collectionName = resolveToQualifiedName(expression, context);
+    final String builtinName = BUILTIN_COLLECTIONS.get(collectionName);
+    return builtinName != null ? PyTypeParser.getTypeByName(expression, builtinName) : null;
+  }
+
+  @Nullable
+  private static PyType getType(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    // It is possible to replace PyAnnotation.getType() with this implementation
+    final PyType typingType = getTypingType(expression, context);
+    if (typingType != null) {
+      return typingType;
+    }
+    final PyType type = context.getType(expression);
+    if (type instanceof PyClassLikeType) {
+      final PyClassLikeType classType = (PyClassLikeType)type;
+      if (classType.isDefinition()) {
+        return classType.toInstance();
+      }
+    }
+    else if (type instanceof PyNoneType) {
+      return type;
+    }
+    return null;
+  }
+
+  @Nullable
+  private static PsiElement resolve(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    if (expression instanceof PyReferenceOwner) {
+      final PyReferenceOwner referenceOwner = (PyReferenceOwner)expression;
+      final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(context);
+      final PsiPolyVariantReference reference = referenceOwner.getReference(resolveContext);
+      final PsiElement element = reference.resolve();
+      if (element instanceof PyFunction) {
+        final PyFunction function = (PyFunction)element;
+        if (PyUtil.isInit(function)) {
+          final PyClass cls = function.getContainingClass();
+          if (cls != null) {
+            return cls;
+          }
+        }
+      }
+      return element;
+    }
+    return null;
+  }
+
+  @Nullable
+  private static String resolveToQualifiedName(@NotNull PyExpression expression, @NotNull TypeEvalContext context) {
+    final PsiElement element = resolve(expression, context);
+    if (element instanceof PyQualifiedNameOwner) {
+      final PyQualifiedNameOwner qualifiedNameOwner = (PyQualifiedNameOwner)element;
+      return qualifiedNameOwner.getQualifiedName();
+    }
+    return null;
+  }
+}
diff --git a/python/src/com/jetbrains/python/codeInsight/highlighting/PyHighlightExitPointsHandlerFactory.java b/python/src/com/jetbrains/python/codeInsight/highlighting/PyHighlightExitPointsHandlerFactory.java
index d0bc9dc..27522d6 100644
--- a/python/src/com/jetbrains/python/codeInsight/highlighting/PyHighlightExitPointsHandlerFactory.java
+++ b/python/src/com/jetbrains/python/codeInsight/highlighting/PyHighlightExitPointsHandlerFactory.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2000-2013 JetBrains s.r.o.
+ * Copyright 2000-2014 JetBrains s.r.o.
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -15,30 +15,26 @@
  */
 package com.jetbrains.python.codeInsight.highlighting;
 
-import com.intellij.codeInsight.TargetElementUtilBase;
 import com.intellij.codeInsight.highlighting.HighlightUsagesHandlerBase;
-import com.intellij.codeInsight.highlighting.HighlightUsagesHandlerFactory;
+import com.intellij.codeInsight.highlighting.HighlightUsagesHandlerFactoryBase;
 import com.intellij.openapi.editor.Editor;
 import com.intellij.psi.PsiElement;
 import com.intellij.psi.PsiFile;
 import com.intellij.psi.util.PsiTreeUtil;
 import com.jetbrains.python.psi.PyExpression;
 import com.jetbrains.python.psi.PyReturnStatement;
+import org.jetbrains.annotations.NotNull;
 
 /**
  * @author oleg
  */
-public class PyHighlightExitPointsHandlerFactory implements HighlightUsagesHandlerFactory {
-  public HighlightUsagesHandlerBase createHighlightUsagesHandler(final Editor editor, final PsiFile file) {
-    int offset = TargetElementUtilBase.adjustOffset(file, editor.getDocument(), editor.getCaretModel().getOffset());
-    PsiElement target = file.findElementAt(offset);
-    if (target != null) {
-      final PyReturnStatement returnStatement = PsiTreeUtil.getParentOfType(target, PyReturnStatement.class);
-      if (returnStatement != null) {
-        final PyExpression returnExpr = returnStatement.getExpression();
-        if (returnExpr == null || !PsiTreeUtil.isAncestor(returnExpr, target, false)) {
-          return new PyHighlightExitPointsHandler(editor, file, target);
-        }
+public class PyHighlightExitPointsHandlerFactory extends HighlightUsagesHandlerFactoryBase {
+  public HighlightUsagesHandlerBase createHighlightUsagesHandler(@NotNull Editor editor, @NotNull PsiFile file, @NotNull PsiElement target) {
+    final PyReturnStatement returnStatement = PsiTreeUtil.getParentOfType(target, PyReturnStatement.class);
+    if (returnStatement != null) {
+      final PyExpression returnExpr = returnStatement.getExpression();
+      if (returnExpr == null || !PsiTreeUtil.isAncestor(returnExpr, target, false)) {
+        return new PyHighlightExitPointsHandler(editor, file, target);
       }
     }
     return null;
diff --git a/python/src/com/jetbrains/python/codeInsight/intentions/ImportToImportFromIntention.java b/python/src/com/jetbrains/python/codeInsight/intentions/ImportToImportFromIntention.java
index fc35833..e76240c 100644
--- a/python/src/com/jetbrains/python/codeInsight/intentions/ImportToImportFromIntention.java
+++ b/python/src/com/jetbrains/python/codeInsight/intentions/ImportToImportFromIntention.java
@@ -57,10 +57,11 @@
     private PsiElement myReferee = null;
     private PyImportElement myImportElement = null;
     private Collection<PsiReference> myReferences = null;
-    private boolean myHasModuleReference = false; // is anything that resolves to our imported module is just an exact reference to that module
+    private boolean myHasModuleReference = false;
+      // is anything that resolves to our imported module is just an exact reference to that module
     private int myRelativeLevel; // true if "from ... import"
 
-    public IntentionState(Editor editor, PsiFile file) {
+    public IntentionState(@NotNull Editor editor, @NotNull PsiFile file) {
       boolean available = false;
       myImportElement = findImportElement(editor, file);
       if (myImportElement != null) {
@@ -70,10 +71,10 @@
           available = true;
         }
         else if (parent instanceof PyFromImportStatement) {
-          PyFromImportStatement from_import = (PyFromImportStatement)parent;
-          final int relative_level = from_import.getRelativeLevel();
-          if (from_import.isValid() && relative_level > 0 && from_import.getImportSource() == null) {
-            myRelativeLevel = relative_level;
+          final PyFromImportStatement fromImport = (PyFromImportStatement)parent;
+          final int relativeLevel = fromImport.getRelativeLevel();
+          if (fromImport.isValid() && relativeLevel > 0 && fromImport.getImportSource() == null) {
+            myRelativeLevel = relativeLevel;
             available = true;
           }
         }
@@ -92,25 +93,27 @@
       assert myImportElement != null : "isAvailable() must have returned true, but myImportElement is null";
 
       // usages of imported name are qualifiers; what they refer to?
-      PyReferenceExpression reference = myImportElement.getImportReferenceExpression();
-      if (reference != null) {
-        myModuleName = PyPsiUtils.toPath(reference);
+      final PyReferenceExpression importReference = myImportElement.getImportReferenceExpression();
+      if (importReference != null) {
+        myModuleName = PyPsiUtils.toPath(importReference);
         myQualifierName = myImportElement.getVisibleName();
-        myReferee = reference.getReference().resolve();
+        myReferee = importReference.getReference().resolve();
         myHasModuleReference = false;
         if (myReferee != null && myModuleName != null && myQualifierName != null) {
           final Collection<PsiReference> references = new ArrayList<PsiReference>();
           PsiTreeUtil.processElements(file, new PsiElementProcessor() {
             public boolean execute(@NotNull PsiElement element) {
               if (element instanceof PyReferenceExpression && PsiTreeUtil.getParentOfType(element, PyImportElement.class) == null) {
-                PyReferenceExpression ref = (PyReferenceExpression)element;
+                final PyReferenceExpression ref = (PyReferenceExpression)element;
                 if (myQualifierName.equals(PyPsiUtils.toPath(ref))) {  // filter out other names that might resolve to our target
-                  PsiElement parent_elt = ref.getParent();
-                  if (parent_elt instanceof PyQualifiedExpression) { // really qualified by us, not just referencing?
-                    PsiElement resolved = ref.getReference().resolve();
+                  final PsiElement parentElt = ref.getParent();
+                  if (parentElt instanceof PyQualifiedExpression) { // really qualified by us, not just referencing?
+                    final PsiElement resolved = ref.getReference().resolve();
                     if (resolved == myReferee) references.add(ref.getReference());
                   }
-                  else myHasModuleReference = true;
+                  else {
+                    myHasModuleReference = true;
+                  }
                 }
               }
               return true;
@@ -123,68 +126,70 @@
 
     public void invoke() {
       assert myImportElement != null : "isAvailable() must have returned true, but myImportElement is null";
-      PyUtil.sure(myImportElement.getImportReferenceExpression());
-      Project project = myImportElement.getProject();
+      sure(myImportElement.getImportReferenceExpression());
+      final Project project = myImportElement.getProject();
+
+      final PyElementGenerator generator = PyElementGenerator.getInstance(project);
+      final LanguageLevel languageLevel = LanguageLevel.forElement(myImportElement);
 
       // usages of imported name are qualifiers; what they refer to?
       try {
         // remember names and make them drop qualifiers
-        Set<String> used_names = new HashSet<String>();
+        final Set<String> usedNames = new HashSet<String>();
         for (PsiReference ref : myReferences) {
-          PsiElement elt = ref.getElement();
-          PsiElement parent_elt = elt.getParent();
-          used_names.add(sure(PyUtil.sure(parent_elt).getLastChild()).getText()); // TODO: find ident node more properly
+          final PsiElement elt = ref.getElement();
+          final PsiElement parentElt = elt.getParent();
+          // TODO: find ident node more properly
+          final String nameUsed = sure(sure(parentElt).getLastChild()).getText();
+          usedNames.add(nameUsed);
           if (!FileModificationService.getInstance().preparePsiElementForWrite(elt)) {
             return;
           }
-          PsiElement next_elt = elt.getNextSibling();
-          if (next_elt != null && ".".equals(next_elt.getText())) next_elt.delete();
-          elt.delete();
+          assert parentElt instanceof PyReferenceExpression;
+          final PyElement newReference = generator.createExpressionFromText(languageLevel, nameUsed);
+          parentElt.replace(newReference);
         }
 
         // create a separate import stmt for the module
-        PsiElement importer = myImportElement.getParent();
-        PyStatement import_statement;
-        PyImportElement[] import_elements;
+        final PsiElement importer = myImportElement.getParent();
+        final PyStatement importStatement;
+        final PyImportElement[] importElements;
         if (importer instanceof PyImportStatement) {
-          import_statement = (PyImportStatement)importer;
-          import_elements = ((PyImportStatement)import_statement).getImportElements();
+          importStatement = (PyImportStatement)importer;
+          importElements = ((PyImportStatement)importStatement).getImportElements();
         }
         else if (importer instanceof PyFromImportStatement) {
-          import_statement = (PyFromImportStatement)importer;
-          import_elements = ((PyFromImportStatement)import_statement).getImportElements();
+          importStatement = (PyFromImportStatement)importer;
+          importElements = ((PyFromImportStatement)importStatement).getImportElements();
         }
         else {
           throw new IncorrectOperationException("Not an import at all");
         }
-        PyElementGenerator generator = PyElementGenerator.getInstance(project);
-        StringBuilder builder = new StringBuilder("from ").append(getDots()).append(myModuleName).append(" import ");
-        builder.append(StringUtil.join(used_names, ", "));
-        PyFromImportStatement from_import_stmt =
-          generator.createFromText(LanguageLevel.getDefault(), PyFromImportStatement.class, builder.toString());
-        PsiElement parent = import_statement.getParent();
+        final PyFromImportStatement newImportStatement =
+          generator.createFromImportStatement(languageLevel, getDots() + myModuleName, StringUtil.join(usedNames, ", "), null);
+        final PsiElement parent = importStatement.getParent();
         sure(parent);
         sure(parent.isValid());
-        if (import_elements.length == 1) {
+        if (importElements.length == 1) {
           if (myHasModuleReference) {
-            parent.addAfter(from_import_stmt, import_statement); // add 'import from': we need the module imported as is
+            parent.addAfter(newImportStatement, importStatement); // add 'import from': we need the module imported as is
           }
           else { // replace entire existing import
-            sure(parent.getNode()).replaceChild(sure(import_statement.getNode()), sure(from_import_stmt.getNode()));
+            sure(parent.getNode()).replaceChild(sure(importStatement.getNode()), sure(newImportStatement.getNode()));
             // import_statement.replace(from_import_stmt);
           }
         }
         else {
           if (!myHasModuleReference) {
             // cut the module out of import, add a from-import.
-            for (PyImportElement pie : import_elements) {
+            for (PyImportElement pie : importElements) {
               if (pie == myImportElement) {
                 PyUtil.removeListNode(pie);
                 break;
               }
             }
           }
-          parent.addAfter(from_import_stmt, import_statement);
+          parent.addAfter(newImportStatement, importStatement);
         }
       }
       catch (IncorrectOperationException ignored) {
@@ -193,18 +198,24 @@
     }
 
 
+    @NotNull
     public String getText() {
-      String module_name = "?";
+      String moduleName = "?";
       if (myImportElement != null) {
-        PyReferenceExpression reference = myImportElement.getImportReferenceExpression();
-        if (reference != null) module_name = PyPsiUtils.toPath(reference);
+        final PyReferenceExpression reference = myImportElement.getImportReferenceExpression();
+        if (reference != null) {
+          moduleName = PyPsiUtils.toPath(reference);
+        }
       }
-      return PyBundle.message("INTN.convert.to.from.$0.import.$1", getDots()+module_name, "...");
+      return PyBundle.message("INTN.convert.to.from.$0.import.$1", getDots() + moduleName, "...");
     }
 
+    @NotNull
     private String getDots() {
       String dots = "";
-      for (int i=0; i<myRelativeLevel; i+=1) dots += "."; // this generally runs 1-2 times, so it's cheaper than allocating a StringBuilder
+      for (int i = 0; i < myRelativeLevel; i += 1) {
+        dots += "."; // this generally runs 1-2 times, so it's cheaper than allocating a StringBuilder
+      }
       return dots;
     }
   }
@@ -222,10 +233,15 @@
   }
 
   @Nullable
-  private static PyImportElement findImportElement(Editor editor, PsiFile file) {
-    PyImportElement import_elt = PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset()), PyImportElement.class);
-    if (import_elt != null && import_elt.isValid()) return import_elt;
-    else return null;
+  private static PyImportElement findImportElement(@NotNull Editor editor, @NotNull PsiFile file) {
+    final PsiElement elementAtCaret = file.findElementAt(editor.getCaretModel().getOffset());
+    final PyImportElement importElement = PsiTreeUtil.getParentOfType(elementAtCaret, PyImportElement.class);
+    if (importElement != null && importElement.isValid()) {
+      return importElement;
+    }
+    else {
+      return null;
+    }
   }
 
   public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
@@ -233,7 +249,7 @@
       return false;
     }
 
-    IntentionState state = new IntentionState(editor, file);
+    final IntentionState state = new IntentionState(editor, file);
     if (state.isAvailable()) {
       myText = state.getText();
       return true;
@@ -242,7 +258,7 @@
   }
 
   public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
-    IntentionState state = new IntentionState(editor, file);
+    final IntentionState state = new IntentionState(editor, file);
     state.invoke();
   }
 
diff --git a/python/src/com/jetbrains/python/codeInsight/intentions/PyGenerateDocstringIntention.java b/python/src/com/jetbrains/python/codeInsight/intentions/PyGenerateDocstringIntention.java
index 7acfc97..ca7ec65 100644
--- a/python/src/com/jetbrains/python/codeInsight/intentions/PyGenerateDocstringIntention.java
+++ b/python/src/com/jetbrains/python/codeInsight/intentions/PyGenerateDocstringIntention.java
@@ -69,6 +69,7 @@
     if (function == null || statementList != null) {
       return false;
     }
+    if (!elementAt.equals(function.getNameNode())) return false;
     return isAvailableForFunction(project, function);
   }
 
diff --git a/python/src/com/jetbrains/python/codeInsight/override/PyOverrideImplementUtil.java b/python/src/com/jetbrains/python/codeInsight/override/PyOverrideImplementUtil.java
index 48a4275..e11f3e0 100644
--- a/python/src/com/jetbrains/python/codeInsight/override/PyOverrideImplementUtil.java
+++ b/python/src/com/jetbrains/python/codeInsight/override/PyOverrideImplementUtil.java
@@ -143,7 +143,7 @@
       return;
     }
     new WriteCommandAction(pyClass.getProject(), pyClass.getContainingFile()) {
-      protected void run(final Result result) throws Throwable {
+      protected void run(@NotNull final Result result) throws Throwable {
         write(pyClass, membersToOverride, editor, implement);
       }
     }.execute();
@@ -173,9 +173,7 @@
     PyPsiUtils.removeRedundantPass(statementList);
     if (element != null) {
       final PyStatementList targetStatementList = element.getStatementList();
-      final int start = targetStatementList != null
-                        ? targetStatementList.getTextRange().getStartOffset()
-                        : element.getTextRange().getStartOffset();
+      final int start = targetStatementList.getTextRange().getStartOffset();
       editor.getCaretModel().moveToOffset(start);
       editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
       editor.getSelectionModel().setSelection(start, element.getTextRange().getEndOffset());
@@ -238,7 +236,7 @@
           PsiElement outerClass = PsiTreeUtil.getParentOfType(pyClass, PyClass.class, true, PyFunction.class);
           String className = pyClass.getName();
           final List<String> nameResult = Lists.newArrayList(className);
-          while(outerClass instanceof PyClass) {
+          while(outerClass != null) {
             nameResult.add(0, ((PyClass)outerClass).getName());
             outerClass = PsiTreeUtil.getParentOfType(outerClass, PyClass.class, true, PyFunction.class);
           }
diff --git a/python/src/com/jetbrains/python/console/PyConsoleOptions.java b/python/src/com/jetbrains/python/console/PyConsoleOptions.java
index 59ae687..2f8eb5e 100644
--- a/python/src/com/jetbrains/python/console/PyConsoleOptions.java
+++ b/python/src/com/jetbrains/python/console/PyConsoleOptions.java
@@ -146,6 +146,7 @@
       form.setUseModuleSdk(myUseModuleSdk);
       form.addContentRoots(myAddContentRoots);
       form.addSourceRoots(myAddSourceRoots);
+
       boolean moduleWasAutoselected = false;
       if (form.isUseModuleSdk() != myUseModuleSdk) {
         myUseModuleSdk = form.isUseModuleSdk();
diff --git a/python/src/com/jetbrains/python/console/PyDebugConsoleBuilder.java b/python/src/com/jetbrains/python/console/PyDebugConsoleBuilder.java
index a99b933..7fbd432 100644
--- a/python/src/com/jetbrains/python/console/PyDebugConsoleBuilder.java
+++ b/python/src/com/jetbrains/python/console/PyDebugConsoleBuilder.java
@@ -37,7 +37,7 @@
 
   public PyDebugConsoleBuilder(final Project project, @Nullable Sdk sdk) {
     myProject = project;
-    this.mySdk = sdk;
+    mySdk = sdk;
   }
 
   public ConsoleView getConsole() {
diff --git a/python/src/com/jetbrains/python/console/PydevConsoleRunner.java b/python/src/com/jetbrains/python/console/PydevConsoleRunner.java
index b3ebacd..c710fd0 100644
--- a/python/src/com/jetbrains/python/console/PydevConsoleRunner.java
+++ b/python/src/com/jetbrains/python/console/PydevConsoleRunner.java
@@ -125,10 +125,10 @@
 
   private Sdk mySdk;
   @NotNull private CommandLineArgumentsProvider myCommandLineArgumentsProvider;
-  private int[] myPorts;
+  protected int[] myPorts;
   private PydevConsoleCommunication myPydevConsoleCommunication;
   private PyConsoleProcessHandler myProcessHandler;
-  private PydevConsoleExecuteActionHandler myConsoleExecuteActionHandler;
+  protected PydevConsoleExecuteActionHandler myConsoleExecuteActionHandler;
   private List<ConsoleListener> myConsoleListeners = ContainerUtil.createLockFreeCopyOnWriteList();
   private final PyConsoleType myConsoleType;
   private Map<String, String> myEnvironmentVariables;
@@ -146,9 +146,9 @@
   private String myConsoleTitle = null;
 
   public PydevConsoleRunner(@NotNull final Project project,
-                               @NotNull Sdk sdk, @NotNull final PyConsoleType consoleType,
-                               @Nullable final String workingDir,
-                               Map<String, String> environmentVariables, String ... statementsToExecute) {
+                            @NotNull Sdk sdk, @NotNull final PyConsoleType consoleType,
+                            @Nullable final String workingDir,
+                            Map<String, String> environmentVariables, String... statementsToExecute) {
     super(project, consoleType.getTitle(), workingDir);
     mySdk = sdk;
     myConsoleType = consoleType;
@@ -281,6 +281,30 @@
     return actions;
   }
 
+  public void runSync() {
+    myPorts = findAvailablePorts(getProject(), myConsoleType);
+
+    assert myPorts != null;
+
+    myCommandLineArgumentsProvider = createCommandLineArgumentsProvider(mySdk, myEnvironmentVariables, myPorts);
+
+    try {
+      super.initAndRun();
+    }
+    catch (ExecutionException e) {
+      LOG.warn("Error running console", e);
+      ExecutionHelper.showErrors(getProject(), Arrays.<Exception>asList(e), "Python Console", null);
+    }
+
+    ProgressManager.getInstance().run(new Task.Backgroundable(getProject(), "Connecting to console", false) {
+      @Override
+      public void run(@NotNull final ProgressIndicator indicator) {
+        indicator.setText("Connecting to console...");
+        connect(myStatementsToExecute);
+      }
+    });
+  }
+
   public void run() {
     UIUtil.invokeAndWaitIfNeeded(new Runnable() {
       @Override
@@ -337,9 +361,9 @@
     return ports;
   }
 
-  private static CommandLineArgumentsProvider createCommandLineArgumentsProvider(final Sdk sdk,
-                                                                                 final Map<String, String> environmentVariables,
-                                                                                 int[] ports) {
+  protected CommandLineArgumentsProvider createCommandLineArgumentsProvider(final Sdk sdk,
+                                                                            final Map<String, String> environmentVariables,
+                                                                            int[] ports) {
     final ArrayList<String> args = new ArrayList<String>();
     args.add(sdk.getHomePath());
     final String versionString = sdk.getVersionString();
@@ -518,8 +542,11 @@
   public void initAndRun(final String... statements2execute) throws ExecutionException {
     super.initAndRun();
 
-    if (handshake()) {
+    connect(statements2execute);
+  }
 
+  public void connect(final String[] statements2execute) {
+    if (handshake()) {
       ApplicationManager.getApplication().invokeLater(new Runnable() {
 
         @Override
diff --git a/python/src/com/jetbrains/python/debugger/PyDebugProcess.java b/python/src/com/jetbrains/python/debugger/PyDebugProcess.java
index cff49fb..cdea412 100644
--- a/python/src/com/jetbrains/python/debugger/PyDebugProcess.java
+++ b/python/src/com/jetbrains/python/debugger/PyDebugProcess.java
@@ -17,6 +17,7 @@
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import com.intellij.execution.console.DuplexConsoleView;
 import com.intellij.execution.process.ProcessEvent;
 import com.intellij.execution.process.ProcessHandler;
 import com.intellij.execution.process.ProcessListener;
@@ -35,6 +36,7 @@
 import com.intellij.openapi.util.Key;
 import com.intellij.openapi.util.Pair;
 import com.intellij.remote.RemoteProcessHandlerBase;
+import com.intellij.util.ui.UIUtil;
 import com.intellij.xdebugger.*;
 import com.intellij.xdebugger.breakpoints.XBreakpoint;
 import com.intellij.xdebugger.breakpoints.XBreakpointHandler;
@@ -43,6 +45,7 @@
 import com.intellij.xdebugger.evaluation.XDebuggerEditorsProvider;
 import com.intellij.xdebugger.frame.XValueChildrenList;
 import com.intellij.xdebugger.stepping.XSmartStepIntoHandler;
+import com.jetbrains.python.console.PythonDebugLanguageConsoleView;
 import com.jetbrains.python.console.pydev.PydevCompletionVariant;
 import com.jetbrains.python.debugger.pydev.*;
 import com.jetbrains.python.run.PythonProcessHandler;
@@ -84,6 +87,7 @@
   private final XSmartStepIntoHandler<?> mySmartStepIntoHandler;
   private boolean myWaitingForConnection = false;
   private PyStackFrame myStackFrameBeforeResume;
+  private PyStackFrame myConsoleContextFrame = null;
 
   public PyDebugProcess(final @NotNull XDebugSession session,
                         @NotNull final ServerSocket serverSocket,
@@ -285,6 +289,19 @@
     PySignatureCacheManager.getInstance(getSession().getProject()).recordSignature(myPositionConverter.convertSignature(signature));
   }
 
+  @Override
+  public void showConsole(PyThreadInfo thread) {
+    myConsoleContextFrame = new PyExecutionStack(this, thread).getTopFrame();
+    if (myExecutionConsole instanceof PythonDebugLanguageConsoleView) {
+      UIUtil.invokeLaterIfNeeded(new Runnable() {
+        @Override
+        public void run() {
+          ((PythonDebugLanguageConsoleView)myExecutionConsole).enableConsole(false);
+        }
+      });
+    }
+  }
+
   protected void afterConnect() {
   }
 
@@ -545,6 +562,11 @@
     }
 
     final PyStackFrame frame = (PyStackFrame)getSession().getCurrentStackFrame();
+
+    if (frame == null && myConsoleContextFrame != null) {
+      return myConsoleContextFrame;
+    }
+
     if (frame == null) {
       throw new PyDebuggerException("Process is running");
     }
diff --git a/python/src/com/jetbrains/python/debugger/PyExecutionStack.java b/python/src/com/jetbrains/python/debugger/PyExecutionStack.java
index ed48f09..7d9aba3 100644
--- a/python/src/com/jetbrains/python/debugger/PyExecutionStack.java
+++ b/python/src/com/jetbrains/python/debugger/PyExecutionStack.java
@@ -37,7 +37,7 @@
   }
 
   @Override
-  public XStackFrame getTopFrame() {
+  public PyStackFrame getTopFrame() {
     if (myTopFrame == null) {
       final List<PyStackFrameInfo> frames = myThreadInfo.getFrames();
       if (frames != null) {
diff --git a/python/src/com/jetbrains/python/documentation/PythonDocumentationProvider.java b/python/src/com/jetbrains/python/documentation/PythonDocumentationProvider.java
index 55bc1e4..913ed65 100644
--- a/python/src/com/jetbrains/python/documentation/PythonDocumentationProvider.java
+++ b/python/src/com/jetbrains/python/documentation/PythonDocumentationProvider.java
@@ -34,6 +34,7 @@
 import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.psi.*;
 import com.intellij.psi.util.PsiTreeUtil;
+import com.intellij.psi.util.QualifiedName;
 import com.intellij.util.Function;
 import com.jetbrains.python.PyNames;
 import com.jetbrains.python.codeInsight.PyCodeInsightSettings;
@@ -44,13 +45,16 @@
 import com.jetbrains.python.debugger.PySignatureUtil;
 import com.jetbrains.python.psi.*;
 import com.jetbrains.python.psi.impl.PyBuiltinCache;
-import com.intellij.psi.util.QualifiedName;
 import com.jetbrains.python.psi.resolve.QualifiedNameFinder;
-import com.jetbrains.python.psi.types.*;
+import com.jetbrains.python.psi.types.PyClassType;
+import com.jetbrains.python.psi.types.PyType;
+import com.jetbrains.python.psi.types.PyTypeParser;
+import com.jetbrains.python.psi.types.TypeEvalContext;
 import com.jetbrains.python.toolbox.ChainIterable;
 import com.jetbrains.python.toolbox.FP;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.methods.HeadMethod;
+import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
 import org.jetbrains.annotations.NonNls;
 import org.jetbrains.annotations.NotNull;
 import org.jetbrains.annotations.Nullable;
@@ -414,8 +418,10 @@
       return true;
     }
     HttpClient client = new HttpClient();
-    client.setTimeout(5 * 1000);
-    client.setConnectionTimeout(5 * 1000);
+    HttpConnectionManagerParams params = client.getHttpConnectionManager().getParams();
+    params.setSoTimeout(5 * 1000);
+    params.setConnectionTimeout(5 * 1000);
+
     try {
       HeadMethod method = new HeadMethod(url);
       int rc = client.executeMethod(method);
@@ -618,9 +624,7 @@
     if (checkReturn) {
       RaiseVisitor visitor = new RaiseVisitor();
       PyStatementList statementList = element.getStatementList();
-      if (statementList != null) {
-        statementList.accept(visitor);
-      }
+      statementList.accept(visitor);
       if (visitor.myHasReturn) {
         builder.append(prefix).append("return:").append(offset);
         if (PyCodeInsightSettings.getInstance().INSERT_TYPE_DOCSTUB) {
diff --git a/python/src/com/jetbrains/python/documentation/doctest/PyDocstringVisitorFilter.java b/python/src/com/jetbrains/python/documentation/doctest/PyDocstringVisitorFilter.java
index e79d52c..71af6aa 100644
--- a/python/src/com/jetbrains/python/documentation/doctest/PyDocstringVisitorFilter.java
+++ b/python/src/com/jetbrains/python/documentation/doctest/PyDocstringVisitorFilter.java
@@ -43,7 +43,7 @@
         visitorClass == PyByteLiteralInspection.class || visitorClass == PyNonAsciiCharInspection.class ||
         visitorClass == PyPackageRequirementsInspection.class || visitorClass == PyMandatoryEncodingInspection.class ||
         visitorClass == PyInterpreterInspection.class || visitorClass == PyDocstringTypesInspection.class ||
-        visitorClass == PySingleQuotedDocstringInspection.class) {
+        visitorClass == PySingleQuotedDocstringInspection.class || visitorClass == PyClassHasNoInitInspection.class) {
       return false;
     }
     //annotators
diff --git a/python/src/com/jetbrains/python/formatter/PythonFormattingModelBuilder.java b/python/src/com/jetbrains/python/formatter/PythonFormattingModelBuilder.java
index aee9e53..4b71d6a 100644
--- a/python/src/com/jetbrains/python/formatter/PythonFormattingModelBuilder.java
+++ b/python/src/com/jetbrains/python/formatter/PythonFormattingModelBuilder.java
@@ -46,13 +46,13 @@
   public FormattingModel createModel(@NotNull PsiElement element,
                                      @NotNull CodeStyleSettings settings,
                                      @NotNull FormattingMode mode) {
+    final ASTNode fileNode = element.getContainingFile().getNode();
     if (DUMP_FORMATTING_AST) {
-      ASTNode fileNode = element.getContainingFile().getNode();
       System.out.println("AST tree for " + element.getContainingFile().getName() + ":");
       printAST(fileNode, 0);
     }
     final PyBlockContext context = new PyBlockContext(settings, createSpacingBuilder(settings), mode);
-    final PyBlock block = new PyBlock(null, element.getNode(), null, Indent.getNoneIndent(), null, context);
+    final PyBlock block = new PyBlock(null, fileNode, null, Indent.getNoneIndent(), null, context);
     if (DUMP_FORMATTING_AST) {
       FormattingModelDumper.dumpFormattingModel(block, 2, System.out);
     }
diff --git a/python/src/com/jetbrains/python/inspections/PyCompatibilityInspection.java b/python/src/com/jetbrains/python/inspections/PyCompatibilityInspection.java
index c8a40a0..0f69f4f 100644
--- a/python/src/com/jetbrains/python/inspections/PyCompatibilityInspection.java
+++ b/python/src/com/jetbrains/python/inspections/PyCompatibilityInspection.java
@@ -200,9 +200,7 @@
           }
           for (int i = 0; i != myVersionsToProcess.size(); ++i) {
             LanguageLevel languageLevel = myVersionsToProcess.get(i);
-            PsiFile file = resolved.getContainingFile();
-            VirtualFile virtualFile = file.getVirtualFile();
-            if (virtualFile != null && ind.isInLibraryClasses(virtualFile)) {
+            if (PyBuiltinCache.getInstance(resolved).isBuiltin(resolved)) {
               if (!"print".equals(name) && !myUsedImports.contains(name) && UnsupportedFeaturesUtil.BUILTINS.get(languageLevel).contains(name)) {
                 len = appendLanguageLevel(message, len, languageLevel);
               }
diff --git a/python/src/com/jetbrains/python/inspections/PyStatementEffectInspection.java b/python/src/com/jetbrains/python/inspections/PyStatementEffectInspection.java
index 0cc7a4a..a521ed7 100644
--- a/python/src/com/jetbrains/python/inspections/PyStatementEffectInspection.java
+++ b/python/src/com/jetbrains/python/inspections/PyStatementEffectInspection.java
@@ -72,9 +72,6 @@
       final PyTryPart tryPart = PsiTreeUtil.getParentOfType(node, PyTryPart.class);
       if (tryPart != null) {
         final PyStatementList statementList = tryPart.getStatementList();
-        if (statementList == null) {
-          return;
-        }
         if (statementList.getStatements().length == 1 && statementList.getStatements()[0] == node) {
           return;
         }
diff --git a/python/src/com/jetbrains/python/inspections/quickfix/PyDefaultArgumentQuickFix.java b/python/src/com/jetbrains/python/inspections/quickfix/PyDefaultArgumentQuickFix.java
index 31723d3..db47f17 100644
--- a/python/src/com/jetbrains/python/inspections/quickfix/PyDefaultArgumentQuickFix.java
+++ b/python/src/com/jetbrains/python/inspections/quickfix/PyDefaultArgumentQuickFix.java
@@ -61,29 +61,36 @@
       PyStatementList list = function.getStatementList();
       PyParameterList paramList = function.getParameterList();
 
-      StringBuilder str = new StringBuilder("def foo(");
+      final StringBuilder functionText = new StringBuilder("def foo(");
       int size = paramList.getParameters().length;
       for (int i = 0; i != size; ++i) {
         PyParameter p = paramList.getParameters()[i];
         if (p == param)
-          str.append(defName).append("=None");
+          functionText.append(defName).append("=None");
         else
-          str.append(p.getText());
+          functionText.append(p.getText());
         if (i != size-1)
-          str.append(", ");
+          functionText.append(", ");
       }
-      str.append("):\n\tpass");
-      PyIfStatement ifStatement = elementGenerator.createFromText(LanguageLevel.forElement(function), PyIfStatement.class,
-                                                "if not " + defName + ": " + defName + " = " + defaultValue.getText());
-
-      PyStatement firstStatement = list.getStatements()[0];
-      PyStringLiteralExpression docString = function.getDocStringExpression();
-      if (docString != null)
-        list.addAfter(ifStatement, firstStatement);
-      else
-        list.addBefore(ifStatement, firstStatement);
-      paramList.replace(elementGenerator.createFromText(LanguageLevel.forElement(defaultValue),
-                                                                 PyFunction.class, str.toString()).getParameterList());
+      functionText.append("):\n\tif not ").append(defName).append(":\n\t\t").append(defName).append(" = ").append(defaultValue.getText());
+      final PyStatement[] statements = list.getStatements();
+      PyStatement firstStatement = statements.length > 0 ? statements[0] : null;
+      PyFunction newFunction = elementGenerator.createFromText(LanguageLevel.forElement(function), PyFunction.class,
+                                                               functionText.toString());
+      if (firstStatement == null) {
+        function.replace(newFunction);
+      }
+      else {
+        final PyStatement ifStatement = newFunction.getStatementList().getStatements()[0];
+        PyStringLiteralExpression docString = function.getDocStringExpression();
+        if (docString != null)
+          list.addAfter(ifStatement, firstStatement);
+        else {
+          list.addBefore(ifStatement, firstStatement);
+        }
+        paramList.replace(elementGenerator.createFromText(LanguageLevel.forElement(defaultValue),
+                                                          PyFunction.class, functionText.toString()).getParameterList());
+      }
     }
   }
 }
diff --git a/python/src/com/jetbrains/python/inspections/quickfix/StatementEffectFunctionCallQuickFix.java b/python/src/com/jetbrains/python/inspections/quickfix/StatementEffectFunctionCallQuickFix.java
index 95daf46..43f4f03 100644
--- a/python/src/com/jetbrains/python/inspections/quickfix/StatementEffectFunctionCallQuickFix.java
+++ b/python/src/com/jetbrains/python/inspections/quickfix/StatementEffectFunctionCallQuickFix.java
@@ -18,14 +18,14 @@
 import com.intellij.codeInspection.LocalQuickFix;
 import com.intellij.codeInspection.ProblemDescriptor;
 import com.intellij.openapi.project.Project;
+import com.intellij.psi.PsiComment;
 import com.intellij.psi.PsiElement;
 import com.intellij.psi.PsiWhiteSpace;
+import com.intellij.psi.impl.source.tree.LeafPsiElement;
 import com.jetbrains.python.PyBundle;
 import com.jetbrains.python.PyNames;
-import com.jetbrains.python.psi.LanguageLevel;
-import com.jetbrains.python.psi.PyElementGenerator;
-import com.jetbrains.python.psi.PyExpression;
-import com.jetbrains.python.psi.PyReferenceExpression;
+import com.jetbrains.python.PyTokenTypes;
+import com.jetbrains.python.psi.*;
 import org.jetbrains.annotations.NotNull;
 
 /**
@@ -48,15 +48,17 @@
     PsiElement expression = descriptor.getPsiElement();
     if (expression != null && expression.isWritable() && expression instanceof PyReferenceExpression) {
       final String expressionText = expression.getText();
-      if (PyNames.PRINT.equals(expressionText) || PyNames.EXEC.equals(expressionText))
-        replacePrintExec(expression);
+      if (PyNames.PRINT.equals(expressionText))
+        replacePrint(expression);
+      else if (PyNames.EXEC.equals(expressionText))
+        replaceExec(expression);
       else
         expression.replace(PyElementGenerator.getInstance(project).createCallExpression(LanguageLevel.forElement(expression),
                                                                                         expressionText));
     }
   }
 
-  private static void replacePrintExec(@NotNull final PsiElement expression) {
+  private static void replaceExec(@NotNull final PsiElement expression) {
     final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(expression.getProject());
     final String expressionText = expression.getText();
     final StringBuilder stringBuilder = new StringBuilder(expressionText + " (");
@@ -77,14 +79,88 @@
 
     RemoveUnnecessaryBackslashQuickFix.removeBackSlash(next);
     if (whiteSpace != null) whiteSpace.delete();
+    if (next == null) {
+      stringBuilder.append(")");
+      expression.replace(elementGenerator.createFromText(LanguageLevel.forElement(expression), PyExpression.class,
+                                                         stringBuilder.toString()));
+      return;
+    }
+    if (next instanceof PyExpressionStatement) {
+      final PyExpression expr = ((PyExpressionStatement)next).getExpression();
+      if (expr instanceof PyBinaryExpression) {
+        addInArguments(stringBuilder, (PyBinaryExpression)expr);
+      }
+      else if (expr instanceof PyTupleExpression) {
+        final PyExpression[] elements = ((PyTupleExpression)expr).getElements();
+        if (elements.length > 1) {
+          if (elements[0] instanceof PyBinaryExpression) {
+            addInArguments(stringBuilder, (PyBinaryExpression)elements[0]);
+          }
+          stringBuilder.append(", ");
+          stringBuilder.append(elements[1].getText());
+        }
+      }
+      else {
+        stringBuilder.append(next.getText());
+      }
+    }
+    else {
+      stringBuilder.append(next.getText());
+    }
+    next.delete();
+    stringBuilder.append(")");
+    expression.replace(elementGenerator.createFromText(LanguageLevel.forElement(expression), PyExpression.class,
+                                                       stringBuilder.toString()));
+  }
+
+  private static void addInArguments(@NotNull final StringBuilder stringBuilder, @NotNull final PyBinaryExpression binaryExpression) {
+    final PsiElement operator = binaryExpression.getPsiOperator();
+    if (operator instanceof LeafPsiElement && ((LeafPsiElement)operator).getElementType() == PyTokenTypes.IN_KEYWORD) {
+      stringBuilder.append(binaryExpression.getLeftExpression().getText());
+      stringBuilder.append(", ");
+      final PyExpression rightExpression = binaryExpression.getRightExpression();
+      if (rightExpression != null)
+        stringBuilder.append(rightExpression.getText());
+    }
+  }
+
+  private static void replacePrint(@NotNull final PsiElement expression) {
+    final PyElementGenerator elementGenerator = PyElementGenerator.getInstance(expression.getProject());
+    final String expressionText = expression.getText();
+    final StringBuilder stringBuilder = new StringBuilder(expressionText + " (");
+
+    final PsiElement whiteSpace = expression.getContainingFile().findElementAt(expression.getTextOffset() + expression.getTextLength());
+    PsiElement next = null;
+    if (whiteSpace instanceof PsiWhiteSpace) {
+      final String whiteSpaceText = whiteSpace.getText();
+      if (!whiteSpaceText.contains("\n")) {
+        next = whiteSpace.getNextSibling();
+        while (next instanceof PsiWhiteSpace && whiteSpaceText.contains("\\")) {
+          next = next.getNextSibling();
+        }
+      }
+    }
+    else
+      next = whiteSpace;
+
+    RemoveUnnecessaryBackslashQuickFix.removeBackSlash(next);
+    if (whiteSpace != null) whiteSpace.delete();
+    String commentText = null;
     if (next != null) {
-      final String text = next.getText();
+      final PsiElement lastChild = next.getLastChild();
+      if (lastChild instanceof PsiComment) {
+        commentText = lastChild.getText();
+      }
+      final String text = next instanceof PyExpressionStatement ? ((PyExpressionStatement)next).getExpression().getText() : next.getText();
+
       stringBuilder.append(text);
-      if (text.endsWith(",") && PyNames.PRINT.equals(expressionText))
-        stringBuilder.append(" end=' '");
+      if (text.endsWith(",")) stringBuilder.append(" end=' '");
       next.delete();
     }
     stringBuilder.append(")");
+    if (commentText != null) {
+      stringBuilder.append(commentText);
+    }
     expression.replace(elementGenerator.createFromText(LanguageLevel.forElement(expression), PyExpression.class,
                                                        stringBuilder.toString()));
   }
diff --git a/python/src/com/jetbrains/python/inspections/unresolvedReference/PyUnresolvedReferencesInspection.java b/python/src/com/jetbrains/python/inspections/unresolvedReference/PyUnresolvedReferencesInspection.java
index 4fd5862..b7523ca 100644
--- a/python/src/com/jetbrains/python/inspections/unresolvedReference/PyUnresolvedReferencesInspection.java
+++ b/python/src/com/jetbrains/python/inspections/unresolvedReference/PyUnresolvedReferencesInspection.java
@@ -755,7 +755,7 @@
       return false;
     }
 
-    private static void addCreateMemberFromUsageFixes(PyType type, PsiReference reference, String refText, List<LocalQuickFix> actions) {
+    private void addCreateMemberFromUsageFixes(PyType type, PsiReference reference, String refText, List<LocalQuickFix> actions) {
       PsiElement element = reference.getElement();
       if (type instanceof PyClassTypeImpl) {
         PyClass cls = ((PyClassType)type).getPyClass();
@@ -771,6 +771,7 @@
       else if (type instanceof PyModuleType) {
         PyFile file = ((PyModuleType)type).getModule();
         actions.add(new AddFunctionQuickFix(refText, file.getName()));
+        addCreateClassFix(refText, element, actions);
       }
     }
 
diff --git a/python/src/com/jetbrains/python/module/PyContentEntriesEditor.java b/python/src/com/jetbrains/python/module/PyContentEntriesEditor.java
new file mode 100644
index 0000000..8ea8b26
--- /dev/null
+++ b/python/src/com/jetbrains/python/module/PyContentEntriesEditor.java
@@ -0,0 +1,367 @@
+/*
+ * Copyright 2000-2013 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.jetbrains.python.module;
+
+import com.intellij.openapi.Disposable;
+import com.intellij.openapi.actionSystem.AnActionEvent;
+import com.intellij.openapi.actionSystem.CustomShortcutSet;
+import com.intellij.openapi.actionSystem.Presentation;
+import com.intellij.openapi.module.Module;
+import com.intellij.openapi.options.ConfigurationException;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.roots.ContentEntry;
+import com.intellij.openapi.roots.ContentFolder;
+import com.intellij.openapi.roots.ModifiableRootModel;
+import com.intellij.openapi.roots.impl.ContentEntryImpl;
+import com.intellij.openapi.roots.impl.ContentFolderBaseImpl;
+import com.intellij.openapi.roots.ui.configuration.*;
+import com.intellij.openapi.roots.ui.configuration.actions.ContentEntryEditingAction;
+import com.intellij.openapi.util.Comparing;
+import com.intellij.openapi.util.Disposer;
+import com.intellij.openapi.vfs.VfsUtilCore;
+import com.intellij.openapi.vfs.VirtualFile;
+import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
+import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;
+import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
+import com.intellij.ui.JBColor;
+import com.intellij.util.EventDispatcher;
+import com.intellij.util.containers.MultiMap;
+import com.jetbrains.python.templateLanguages.TemplatesService;
+import icons.PythonIcons;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+import org.jetbrains.jps.model.module.JpsModuleSourceRootType;
+
+import javax.swing.*;
+import javax.swing.event.ChangeEvent;
+import javax.swing.event.ChangeListener;
+import javax.swing.tree.TreeCellRenderer;
+import java.awt.*;
+import java.awt.event.InputEvent;
+import java.awt.event.KeyEvent;
+import java.util.ArrayList;
+import java.util.List;
+
+public class PyContentEntriesEditor extends CommonContentEntriesEditor {
+  private static final Color TEMPLATES_COLOR = JBColor.MAGENTA;
+  private final MultiMap<ContentEntry, VirtualFilePointer> myTemplateRoots = new MultiMap<ContentEntry, VirtualFilePointer>();
+  private final Module myModule;
+  private Disposable myFilePointersDisposable;
+
+  private final VirtualFilePointerListener DUMMY_LISTENER = new VirtualFilePointerListener() {
+    @Override
+    public void beforeValidityChanged(@NotNull VirtualFilePointer[] pointers) {
+    }
+
+    @Override
+    public void validityChanged(@NotNull VirtualFilePointer[] pointers) {
+    }
+  };
+
+  public PyContentEntriesEditor(Module module, ModuleConfigurationState moduleConfigurationState,
+                                      JpsModuleSourceRootType<?>... rootTypes) {
+    super(module.getName(), moduleConfigurationState, rootTypes);
+    myModule = module;
+    reset();
+  }
+
+  @Override
+  protected ContentEntryTreeEditor createContentEntryTreeEditor(Project project) {
+    return new MyContentEntryTreeEditor(project, getEditHandlers());
+  }
+
+  @Override
+  protected List<ContentEntry> addContentEntries(VirtualFile[] files) {
+    List<ContentEntry> entries = super.addContentEntries(files);
+    addContentEntryPanels(entries.toArray(new ContentEntry[entries.size()]));
+    return entries;
+  }
+
+  @Override
+  public void reset() {
+    if (myFilePointersDisposable != null) {
+      Disposer.dispose(myFilePointersDisposable);
+    }
+    myTemplateRoots.clear();
+
+    myFilePointersDisposable = Disposer.newDisposable();
+    final TemplatesService instance = TemplatesService.getInstance(myModule);
+    if (instance != null) {
+      final List<VirtualFile> folders = instance.getTemplateFolders();
+      for (VirtualFile folder : folders) {
+        ContentEntry contentEntry = findContentEntryForFile(folder);
+        if (contentEntry != null) {
+          myTemplateRoots.putValue(contentEntry, VirtualFilePointerManager.getInstance().create(folder, myFilePointersDisposable,
+                                                                                                DUMMY_LISTENER));
+        }
+      }
+    }
+
+    if (myRootTreeEditor != null) {
+      ContentEntryEditor editor = myRootTreeEditor.getContentEntryEditor();
+      if(editor!=null) editor.update();
+      myRootTreeEditor.update();
+    }
+  }
+
+  @Nullable
+  private ContentEntry findContentEntryForFile(VirtualFile virtualFile) {
+    for (ContentEntry contentEntry : getModel().getContentEntries()) {
+      final VirtualFile file = contentEntry.getFile();
+      if (file != null && VfsUtilCore.isAncestor(file, virtualFile, false)) {
+        return contentEntry;
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public void disposeUIResources() {
+    super.disposeUIResources();
+    if (myFilePointersDisposable != null) {
+      Disposer.dispose(myFilePointersDisposable);
+    }
+  }
+
+  @Override
+  public void apply() throws ConfigurationException {
+    super.apply();
+    List<VirtualFile> templateRoots = getCurrentState();
+    TemplatesService.getInstance(myModule).setTemplateFolders(templateRoots.toArray(new VirtualFile[templateRoots.size()]));
+  }
+
+  private List<VirtualFile> getCurrentState() {
+    List<VirtualFile> result = new ArrayList<VirtualFile>();
+    for (ContentEntry entry : myTemplateRoots.keySet()) {
+      for (VirtualFilePointer filePointer : myTemplateRoots.get(entry)) {
+        result.add(filePointer.getFile());
+      }
+    }
+    return result;
+  }
+
+  @Override
+  public boolean isModified() {
+    if (super.isModified()) return true;
+    final TemplatesService templatesService = TemplatesService.getInstance(myModule);
+    if (templatesService != null) {
+      List<VirtualFile> original = templatesService.getTemplateFolders();
+      List<VirtualFile> current = getCurrentState();
+
+      if (!Comparing.haveEqualElements(original, current)) return true;
+
+    }
+    return false;
+  }
+
+  @Override
+  protected MyContentEntryEditor createContentEntryEditor(String contentEntryUrl) {
+    return new MyContentEntryEditor(contentEntryUrl, getEditHandlers());
+  }
+
+  protected class MyContentEntryEditor extends ContentEntryEditor {
+    private final EventDispatcher<ChangeListener> myEventDispatcher = EventDispatcher.create(ChangeListener.class);
+
+    public MyContentEntryEditor(String contentEntryUrl, List<ModuleSourceRootEditHandler<?>> handlers) {
+      super(contentEntryUrl, handlers);
+    }
+
+    @Override
+    protected ModifiableRootModel getModel() {
+      return PyContentEntriesEditor.this.getModel();
+    }
+
+    public void addListener(ChangeListener changeListener) {
+      myEventDispatcher.addListener(changeListener);
+    }
+
+    public void removeListener(ChangeListener changeListener) {
+      myEventDispatcher.removeListener(changeListener);
+    }
+
+    @Override
+    protected ContentRootPanel createContentRootPane() {
+      return new MyContentRootPanel();
+    }
+
+    @Override
+    public void deleteContentFolder(ContentEntry contentEntry, ContentFolder folder) {
+      if (folder instanceof TemplateRootFolder) {
+        final VirtualFile file = folder.getFile();
+        if (file != null) {
+          removeTemplateRoot(file);
+        }
+      }
+      else {
+        super.deleteContentFolder(contentEntry, folder);
+      }
+    }
+
+    public void addTemplateRoot(@NotNull final VirtualFile file) {
+      final VirtualFilePointer root = VirtualFilePointerManager.getInstance().create(file, myFilePointersDisposable, DUMMY_LISTENER);
+      myTemplateRoots.putValue(getContentEntry(), root);
+      myEventDispatcher.getMulticaster().stateChanged(new ChangeEvent(this));
+      update();
+    }
+
+    public void removeTemplateRoot(@NotNull final VirtualFile file) {
+      final VirtualFilePointer root = getTemplateRoot(file);
+      if (root != null) {
+        myTemplateRoots.remove(getContentEntry(), root);
+        myEventDispatcher.getMulticaster().stateChanged(new ChangeEvent(this));
+        update();
+      }
+    }
+
+    public boolean hasTemplateRoot(@NotNull final VirtualFile file) {
+      return getTemplateRoot(file) != null;
+    }
+
+    @Nullable
+    public VirtualFilePointer getTemplateRoot(@NotNull final VirtualFile file) {
+      for (VirtualFilePointer filePointer : myTemplateRoots.get(getContentEntry())) {
+        if (Comparing.equal(filePointer.getFile(), file)) {
+          return filePointer;
+        }
+      }
+      return null;
+    }
+
+    protected class MyContentRootPanel extends ContentRootPanel {
+      public MyContentRootPanel() {
+        super(MyContentEntryEditor.this, getEditHandlers());
+      }
+
+      @Override
+      @NotNull
+      protected ContentEntryImpl getContentEntry() {
+        //noinspection ConstantConditions
+        return (ContentEntryImpl)MyContentEntryEditor.this.getContentEntry();
+      }
+
+      @Override
+      protected void addFolderGroupComponents() {
+        super.addFolderGroupComponents();
+        if (!myTemplateRoots.get(getContentEntry()).isEmpty()) {
+          final List<TemplateRootFolder> folders = new ArrayList<TemplateRootFolder>(myTemplateRoots.size());
+          for (VirtualFilePointer root : myTemplateRoots.get(getContentEntry())) {
+            folders.add(new TemplateRootFolder(root, getContentEntry()));
+          }
+          final JComponent sourcesComponent = createFolderGroupComponent("Template Folders",
+                                                                         folders.toArray(new ContentFolder[folders.size()]),
+                                                                         TEMPLATES_COLOR, null);
+          this.add(sourcesComponent, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTH,
+                                                            GridBagConstraints.HORIZONTAL, new Insets(0, 0, 10, 0), 0, 0));
+        }
+      }
+    }
+  }
+
+  private static class MyContentEntryTreeEditor extends ContentEntryTreeEditor {
+
+    private final ChangeListener myListener = new ChangeListener() {
+      @Override
+      public void stateChanged(ChangeEvent e) {
+        update();
+      }
+    };
+
+    public MyContentEntryTreeEditor(Project project, List<ModuleSourceRootEditHandler<?>> handlers) {
+      super(project, handlers);
+    }
+
+    @Override
+    public void setContentEntryEditor(ContentEntryEditor newEditor) {
+      PyContentEntriesEditor.MyContentEntryEditor existingEditor = getContentEntryEditor();
+      if (Comparing.equal(existingEditor, newEditor)) {
+        return;
+      }
+      if (existingEditor != null) {
+        existingEditor.removeListener(myListener);
+      }
+      if (newEditor != null) {
+        ((PyContentEntriesEditor.MyContentEntryEditor)newEditor).addListener(myListener);
+      }
+      super.setContentEntryEditor(newEditor);
+    }
+
+    @Override
+    public PyContentEntriesEditor.MyContentEntryEditor getContentEntryEditor() {
+      return (PyContentEntriesEditor.MyContentEntryEditor)super.getContentEntryEditor();
+    }
+
+    @Override
+    protected void createEditingActions() {
+      super.createEditingActions();
+
+      ContentEntryEditingAction a = new ContentEntryEditingAction(myTree) {
+        {
+          final Presentation templatePresentation = getTemplatePresentation();
+          templatePresentation.setText("Templates");
+          templatePresentation.setDescription("Template Folders");
+          templatePresentation.setIcon(PythonIcons.Python.TemplateRoot);
+        }
+
+        @Override
+        public boolean isSelected(AnActionEvent e) {
+          final VirtualFile[] selectedFiles = getSelectedFiles();
+          return selectedFiles.length != 0 && getContentEntryEditor().hasTemplateRoot(selectedFiles[0]);
+        }
+
+        @Override
+        public void setSelected(AnActionEvent e, boolean isSelected) {
+          final VirtualFile[] selectedFiles = getSelectedFiles();
+          assert selectedFiles.length != 0;
+
+          for (VirtualFile selectedFile : selectedFiles) {
+            boolean wasSelected = getContentEntryEditor().hasTemplateRoot(selectedFile);
+            if (isSelected) {
+              if (!wasSelected) {
+                getContentEntryEditor().addTemplateRoot(selectedFile);
+              }
+            }
+            else {
+              if (wasSelected) {
+                getContentEntryEditor().removeTemplateRoot(selectedFile);
+              }
+            }
+          }
+        }
+      };
+      myEditingActionsGroup.add(a);
+      a.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_R, InputEvent.ALT_MASK)), myTree);
+    }
+
+    @Override
+    protected TreeCellRenderer getContentEntryCellRenderer() {
+      return new ContentEntryTreeCellRenderer(this, getEditHandlers()) {
+        @Override
+        protected Icon updateIcon(final ContentEntry entry, final VirtualFile file, final Icon originalIcon) {
+          if (getContentEntryEditor().hasTemplateRoot(file)) {
+            return PythonIcons.Python.TemplateRoot;
+          }
+          return super.updateIcon(entry, file, originalIcon);
+        }
+      };
+    }
+  }
+  private static class TemplateRootFolder extends ContentFolderBaseImpl {
+    protected TemplateRootFolder(@NotNull VirtualFilePointer filePointer, @NotNull ContentEntryImpl contentEntry) {
+      super(filePointer, contentEntry);
+    }
+  }
+
+}
diff --git a/python/src/com/jetbrains/python/psi/impl/PyAnnotationImpl.java b/python/src/com/jetbrains/python/psi/impl/PyAnnotationImpl.java
index f3c806d..f8a4d48 100644
--- a/python/src/com/jetbrains/python/psi/impl/PyAnnotationImpl.java
+++ b/python/src/com/jetbrains/python/psi/impl/PyAnnotationImpl.java
@@ -16,14 +16,16 @@
 package com.jetbrains.python.psi.impl;
 
 import com.intellij.lang.ASTNode;
-import com.intellij.psi.PsiElement;
-import com.intellij.psi.PsiPolyVariantReference;
 import com.jetbrains.python.PyElementTypes;
 import com.jetbrains.python.psi.PyAnnotation;
-import com.jetbrains.python.psi.PyClass;
 import com.jetbrains.python.psi.PyExpression;
-import com.jetbrains.python.psi.PyReferenceExpression;
 import com.jetbrains.python.psi.stubs.PyAnnotationStub;
+import com.jetbrains.python.psi.types.PyClassLikeType;
+import com.jetbrains.python.psi.types.PyNoneType;
+import com.jetbrains.python.psi.types.PyType;
+import com.jetbrains.python.psi.types.TypeEvalContext;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
 
 /**
  * @author yole
@@ -37,19 +39,26 @@
     super(stub, PyElementTypes.ANNOTATION);
   }
 
+  @Nullable
   @Override
   public PyExpression getValue() {
     return findChildByClass(PyExpression.class);
   }
 
+  @Nullable
   @Override
-  public PyClass resolveToClass() {
-    PyExpression expr = getValue();
-    if (expr instanceof PyReferenceExpression) {
-      final PsiPolyVariantReference reference = ((PyReferenceExpression)expr).getReference();
-      final PsiElement result = reference.resolve();
-      if (result instanceof PyClass) {
-        return (PyClass) result;
+  public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
+    final PyExpression value = getValue();
+    if (value != null) {
+      final PyType type = context.getType(value);
+      if (type instanceof PyClassLikeType) {
+        final PyClassLikeType classType = (PyClassLikeType)type;
+        if (classType.isDefinition()) {
+          return classType.toInstance();
+        }
+      }
+      else if (type instanceof PyNoneType) {
+        return type;
       }
     }
     return null;
diff --git a/python/src/com/jetbrains/python/psi/impl/PyClassImpl.java b/python/src/com/jetbrains/python/psi/impl/PyClassImpl.java
index 7538fba..bb2104f 100644
--- a/python/src/com/jetbrains/python/psi/impl/PyClassImpl.java
+++ b/python/src/com/jetbrains/python/psi/impl/PyClassImpl.java
@@ -20,7 +20,6 @@
 import com.intellij.openapi.util.Comparing;
 import com.intellij.openapi.util.Key;
 import com.intellij.openapi.util.NotNullLazyValue;
-import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.psi.*;
 import com.intellij.psi.scope.PsiScopeProcessor;
 import com.intellij.psi.search.LocalSearchScope;
@@ -187,6 +186,11 @@
         }
       }
     }
+    // Heuristic: unfold Foo[Bar] to Foo for subscription expressions for superclasses
+    else if (expression instanceof PySubscriptionExpression) {
+      final PySubscriptionExpression subscriptionExpr = (PySubscriptionExpression)expression;
+      return subscriptionExpr.getOperand();
+    }
     return expression;
   }
 
@@ -237,27 +241,7 @@
 
   @Nullable
   public String getQualifiedName() {
-    String name = getName();
-    final PyClassStub stub = getStub();
-    PsiElement ancestor = stub != null ? stub.getParentStub().getPsi() : getParent();
-    while (!(ancestor instanceof PsiFile)) {
-      if (ancestor == null) return name;    // can this happen?
-      if (ancestor instanceof PyClass) {
-        name = ((PyClass)ancestor).getName() + "." + name;
-      }
-      ancestor = stub != null ? ((StubBasedPsiElement)ancestor).getStub().getParentStub().getPsi() : ancestor.getParent();
-    }
-
-    PsiFile psiFile = ((PsiFile)ancestor).getOriginalFile();
-    final PyFile builtins = PyBuiltinCache.getInstance(this).getBuiltinsFile();
-    if (!psiFile.equals(builtins)) {
-      VirtualFile vFile = psiFile.getVirtualFile();
-      if (vFile != null) {
-        final String packageName = QualifiedNameFinder.findShortestImportableName(this, vFile);
-        return packageName + "." + name;
-      }
-    }
-    return name;
+    return QualifiedNameFinder.getQualifiedName(this);
   }
 
   @Override
diff --git a/python/src/com/jetbrains/python/psi/impl/PyFunctionImpl.java b/python/src/com/jetbrains/python/psi/impl/PyFunctionImpl.java
index 2531b37..86f84ab 100644
--- a/python/src/com/jetbrains/python/psi/impl/PyFunctionImpl.java
+++ b/python/src/com/jetbrains/python/psi/impl/PyFunctionImpl.java
@@ -184,11 +184,11 @@
       }
     }
     if (context.maySwitchToAST(this) && LanguageLevel.forElement(this).isAtLeast(LanguageLevel.PYTHON30)) {
-      PyAnnotation anno = getAnnotation();
-      if (anno != null) {
-        PyClass pyClass = anno.resolveToClass();
-        if (pyClass != null) {
-          return new PyClassTypeImpl(pyClass, false);
+      final PyAnnotation annotation = getAnnotation();
+      if (annotation != null) {
+        final PyType type = context.getType(annotation);
+        if (type != null) {
+          return type;
         }
       }
     }
@@ -571,7 +571,7 @@
 
   @Override
   public PyAnnotation getAnnotation() {
-    return findChildByClass(PyAnnotation.class);
+    return getStubOrPsiChild(PyElementTypes.ANNOTATION);
   }
 
   @NotNull
@@ -699,21 +699,6 @@
   @Nullable
   @Override
   public String getQualifiedName() {
-    String name = getName();
-    if (name == null) {
-      return null;
-    }
-    PyClass containingClass = getContainingClass();
-    if (containingClass != null) {
-      return containingClass.getQualifiedName() + "." + name;
-    }
-    if (PsiTreeUtil.getStubOrPsiParent(this) instanceof PyFile) {
-      VirtualFile virtualFile = getContainingFile().getVirtualFile();
-      if (virtualFile != null) {
-        final String packageName = QualifiedNameFinder.findShortestImportableName(this, virtualFile);
-        return packageName + "." + name;
-      }
-    }
-    return null;
+    return QualifiedNameFinder.getQualifiedName(this);
   }
 }
diff --git a/python/src/com/jetbrains/python/psi/impl/PyNamedParameterImpl.java b/python/src/com/jetbrains/python/psi/impl/PyNamedParameterImpl.java
index bf792fe..cc9178c 100644
--- a/python/src/com/jetbrains/python/psi/impl/PyNamedParameterImpl.java
+++ b/python/src/com/jetbrains/python/psi/impl/PyNamedParameterImpl.java
@@ -181,11 +181,11 @@
       PyParameterList parameterList = (PyParameterList)parent;
       PyFunction func = parameterList.getContainingFunction();
       if (func != null) {
-        PyAnnotation anno = getAnnotation();
-        if (anno != null) {
-          final PyClass pyClass = anno.resolveToClass();
-          if (pyClass != null) {
-            return new PyClassTypeImpl(pyClass, false);
+        final PyAnnotation annotation = getAnnotation();
+        if (annotation != null) {
+          final PyType type = context.getType(annotation);
+          if (type != null) {
+            return type;
           }
         }
         StructuredDocString docString = func.getStructuredDocString();
diff --git a/python/src/com/jetbrains/python/psi/impl/PyTargetExpressionImpl.java b/python/src/com/jetbrains/python/psi/impl/PyTargetExpressionImpl.java
index 545e8e8..3805c8d 100644
--- a/python/src/com/jetbrains/python/psi/impl/PyTargetExpressionImpl.java
+++ b/python/src/com/jetbrains/python/psi/impl/PyTargetExpressionImpl.java
@@ -43,6 +43,7 @@
 import com.jetbrains.python.psi.impl.references.PyTargetReference;
 import com.jetbrains.python.psi.impl.stubs.CustomTargetExpressionStub;
 import com.jetbrains.python.psi.resolve.PyResolveContext;
+import com.jetbrains.python.psi.resolve.QualifiedNameFinder;
 import com.jetbrains.python.psi.resolve.RatedResolveResult;
 import com.jetbrains.python.psi.stubs.PyClassStub;
 import com.jetbrains.python.psi.stubs.PyFunctionStub;
@@ -707,4 +708,10 @@
     super.subtreeChanged();
     myQualifiedName = null;
   }
+
+  @Nullable
+  @Override
+  public String getQualifiedName() {
+    return QualifiedNameFinder.getQualifiedName(this);
+  }
 }
diff --git a/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java b/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java
index 838a4db..f207169 100644
--- a/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java
+++ b/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java
@@ -101,7 +101,13 @@
                                   @Nullable PsiElement roof) {
     // Use real context here to enable correct completion and resolve in case of PyExpressionCodeFragment!!!
     final PsiElement realContext = PyPsiUtils.getRealContext(element);
-    final ScopeOwner originalOwner = ScopeUtil.getScopeOwner(realContext);
+    final ScopeOwner originalOwner;
+    if (realContext != element && realContext instanceof PyFile) {
+      originalOwner = (PyFile)realContext;
+    }
+    else {
+      originalOwner = ScopeUtil.getScopeOwner(realContext);
+    }
     final PsiElement parent = element.getParent();
     final boolean isGlobalOrNonlocal = parent instanceof PyGlobalStatement || parent instanceof PyNonlocalStatement;
     ScopeOwner owner = originalOwner;
diff --git a/python/src/com/jetbrains/python/psi/resolve/QualifiedNameFinder.java b/python/src/com/jetbrains/python/psi/resolve/QualifiedNameFinder.java
index 97a15da..1c081ef 100644
--- a/python/src/com/jetbrains/python/psi/resolve/QualifiedNameFinder.java
+++ b/python/src/com/jetbrains/python/psi/resolve/QualifiedNameFinder.java
@@ -24,10 +24,14 @@
 import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.psi.*;
 import com.jetbrains.python.PyNames;
+import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
+import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
 import com.jetbrains.python.psi.PyClass;
+import com.jetbrains.python.psi.PyElement;
 import com.jetbrains.python.psi.PyFile;
 import com.jetbrains.python.psi.PyFunction;
 import com.intellij.psi.util.QualifiedName;
+import com.jetbrains.python.psi.impl.PyBuiltinCache;
 import org.jetbrains.annotations.NotNull;
 import org.jetbrains.annotations.Nullable;
 
@@ -156,6 +160,36 @@
     return qname;
   }
 
+  @Nullable
+  public static String getQualifiedName(@NotNull PyElement element) {
+    final String name = element.getName();
+    if (name != null) {
+      final ScopeOwner owner = ScopeUtil.getScopeOwner(element);
+      final PyBuiltinCache builtinCache = PyBuiltinCache.getInstance(element);
+      if (owner instanceof PyClass) {
+        final String classQName = ((PyClass)owner).getQualifiedName();
+        if (classQName != null) {
+          return classQName + "." + name;
+        }
+      }
+      else if (owner instanceof PyFile) {
+        if (builtinCache.isBuiltin(element)) {
+          return name;
+        }
+        else {
+          final VirtualFile virtualFile = ((PyFile)owner).getVirtualFile();
+          if (virtualFile != null) {
+            final String fileQName = findShortestImportableName(element, virtualFile);
+            if (fileQName != null) {
+              return fileQName + "." + name;
+            }
+          }
+        }
+      }
+    }
+    return null;
+  }
+
   /**
    * Tries to find roots that contain given vfile, and among them the root that contains at the smallest depth.
    * For equal depth source root is in preference to library.
diff --git a/python/src/com/jetbrains/python/psi/stubs/PyClassNameIndex.java b/python/src/com/jetbrains/python/psi/stubs/PyClassNameIndex.java
index 814f450..14e1546 100644
--- a/python/src/com/jetbrains/python/psi/stubs/PyClassNameIndex.java
+++ b/python/src/com/jetbrains/python/psi/stubs/PyClassNameIndex.java
@@ -57,7 +57,7 @@
     int pos = qName.lastIndexOf(".");
     String shortName = pos > 0 ? qName.substring(pos+1) : qName;
     for (PyClass pyClass : find(shortName, project, scope)) {
-      if (pyClass.getQualifiedName().equals(qName)) {
+      if (qName.equals(pyClass.getQualifiedName())) {
         return pyClass;
       }
     }
diff --git a/python/src/com/jetbrains/python/psi/types/PyFunctionType.java b/python/src/com/jetbrains/python/psi/types/PyFunctionType.java
index 4bdbae6..fe36334 100644
--- a/python/src/com/jetbrains/python/psi/types/PyFunctionType.java
+++ b/python/src/com/jetbrains/python/psi/types/PyFunctionType.java
@@ -108,10 +108,14 @@
    */
   @Nullable
   private PyClassTypeImpl selectFakeType(@Nullable PyExpression location, @NotNull TypeEvalContext context) {
-    if (location instanceof PyReferenceExpression && isBoundMethodReference(((PyReferenceExpression)location), context)) {
-      return PyBuiltinCache.getInstance(getCallable()).getObjectType(PyNames.FAKE_METHOD);
+    final String fakeClassName;
+    if (location instanceof PyReferenceExpression && isBoundMethodReference((PyReferenceExpression)location, context)) {
+      fakeClassName = PyNames.FAKE_METHOD;
     }
-    return PyBuiltinCache.getInstance(getCallable()).getObjectType(PyNames.FAKE_FUNCTION);
+    else {
+      fakeClassName = PyNames.FAKE_FUNCTION;
+    }
+    return PyBuiltinCache.getInstance(getCallable()).getObjectType(fakeClassName);
   }
 
   private boolean isBoundMethodReference(@NotNull PyReferenceExpression location, @NotNull TypeEvalContext context) {
diff --git a/python/src/com/jetbrains/python/psi/types/PyTypeChecker.java b/python/src/com/jetbrains/python/psi/types/PyTypeChecker.java
index 9881d6c..2e7c58c 100644
--- a/python/src/com/jetbrains/python/psi/types/PyTypeChecker.java
+++ b/python/src/com/jetbrains/python/psi/types/PyTypeChecker.java
@@ -428,41 +428,39 @@
   public static AnalyzeCallResults analyzeCall(@NotNull PyBinaryExpression expr, @NotNull TypeEvalContext context) {
     final PsiPolyVariantReference ref = expr.getReference(PyResolveContext.noImplicits().withTypeEvalContext(context));
     final ResolveResult[] resolveResult;
-    if (ref != null) {
-      resolveResult = ref.multiResolve(false);
-      AnalyzeCallResults firstResults = null;
-      for (ResolveResult result : resolveResult) {
-        final PsiElement resolved = result.getElement();
-        if (resolved instanceof PyTypedElement) {
-          final PyTypedElement typedElement = (PyTypedElement)resolved;
-          final PyType type = context.getType(typedElement);
-          if (!(type instanceof PyFunctionType)) {
-            return null;
-          }
-          final Callable callable = ((PyFunctionType)type).getCallable();
-          final boolean isRight = PyNames.isRightOperatorName(typedElement.getName());
-          final PyExpression arg = isRight ? expr.getLeftExpression() : expr.getRightExpression();
-          final PyExpression receiver = isRight ? expr.getRightExpression() : expr.getLeftExpression();
-          final PyParameter[] parameters = callable.getParameterList().getParameters();
-          if (parameters.length >= 2) {
-            final PyNamedParameter param = parameters[1].getAsNamed();
-            if (arg != null && param != null) {
-              final Map<PyExpression, PyNamedParameter> arguments = new LinkedHashMap<PyExpression, PyNamedParameter>();
-              arguments.put(arg, param);
-              final AnalyzeCallResults results = new AnalyzeCallResults(callable, receiver, arguments);
-              if (firstResults == null) {
-                firstResults = results;
-              }
-              if (match(context.getType(param), context.getType(arg), context)) {
-                return results;
-              }
+    resolveResult = ref.multiResolve(false);
+    AnalyzeCallResults firstResults = null;
+    for (ResolveResult result : resolveResult) {
+      final PsiElement resolved = result.getElement();
+      if (resolved instanceof PyTypedElement) {
+        final PyTypedElement typedElement = (PyTypedElement)resolved;
+        final PyType type = context.getType(typedElement);
+        if (!(type instanceof PyFunctionType)) {
+          return null;
+        }
+        final Callable callable = ((PyFunctionType)type).getCallable();
+        final boolean isRight = PyNames.isRightOperatorName(typedElement.getName());
+        final PyExpression arg = isRight ? expr.getLeftExpression() : expr.getRightExpression();
+        final PyExpression receiver = isRight ? expr.getRightExpression() : expr.getLeftExpression();
+        final PyParameter[] parameters = callable.getParameterList().getParameters();
+        if (parameters.length >= 2) {
+          final PyNamedParameter param = parameters[1].getAsNamed();
+          if (arg != null && param != null) {
+            final Map<PyExpression, PyNamedParameter> arguments = new LinkedHashMap<PyExpression, PyNamedParameter>();
+            arguments.put(arg, param);
+            final AnalyzeCallResults results = new AnalyzeCallResults(callable, receiver, arguments);
+            if (firstResults == null) {
+              firstResults = results;
+            }
+            if (match(context.getType(param), context.getType(arg), context)) {
+              return results;
             }
           }
         }
       }
-      if (firstResults != null) {
-        return firstResults;
-      }
+    }
+    if (firstResults != null) {
+      return firstResults;
     }
     return null;
   }
@@ -471,22 +469,20 @@
   public static AnalyzeCallResults analyzeCall(@NotNull PySubscriptionExpression expr, @NotNull TypeEvalContext context) {
     final PsiReference ref = expr.getReference(PyResolveContext.noImplicits().withTypeEvalContext(context));
     final PsiElement resolved;
-    if (ref != null) {
-      resolved = ref.resolve();
-      if (resolved instanceof PyTypedElement) {
-        final PyType type = context.getType((PyTypedElement)resolved);
-        if (type instanceof PyFunctionType) {
-          final Callable callable = ((PyFunctionType)type).getCallable();
-          final PyParameter[] parameters = callable.getParameterList().getParameters();
-          if (parameters.length == 2) {
-            final PyNamedParameter param = parameters[1].getAsNamed();
-            if (param != null) {
-              final Map<PyExpression, PyNamedParameter> arguments = new LinkedHashMap<PyExpression, PyNamedParameter>();
-              final PyExpression arg = expr.getIndexExpression();
-              if (arg != null) {
-                arguments.put(arg, param);
-                return new AnalyzeCallResults(callable, expr.getOperand(), arguments);
-              }
+    resolved = ref.resolve();
+    if (resolved instanceof PyTypedElement) {
+      final PyType type = context.getType((PyTypedElement)resolved);
+      if (type instanceof PyFunctionType) {
+        final Callable callable = ((PyFunctionType)type).getCallable();
+        final PyParameter[] parameters = callable.getParameterList().getParameters();
+        if (parameters.length == 2) {
+          final PyNamedParameter param = parameters[1].getAsNamed();
+          if (param != null) {
+            final Map<PyExpression, PyNamedParameter> arguments = new LinkedHashMap<PyExpression, PyNamedParameter>();
+            final PyExpression arg = expr.getIndexExpression();
+            if (arg != null) {
+              arguments.put(arg, param);
+              return new AnalyzeCallResults(callable, expr.getOperand(), arguments);
             }
           }
         }
diff --git a/python/src/com/jetbrains/python/refactoring/PyReplaceExpressionUtil.java b/python/src/com/jetbrains/python/refactoring/PyReplaceExpressionUtil.java
index 2cebb3e..4a8d2b9 100644
--- a/python/src/com/jetbrains/python/refactoring/PyReplaceExpressionUtil.java
+++ b/python/src/com/jetbrains/python/refactoring/PyReplaceExpressionUtil.java
@@ -61,21 +61,26 @@
 
   private PyReplaceExpressionUtil() {}
 
+  /**
+   * @param oldExpr old expression that will be substituted
+   * @param newExpr new expression to substitute with
+   * @return whether new expression should be wrapped in parenthesis to preserve original semantics
+   */
   public static boolean isNeedParenthesis(@NotNull final PyElement oldExpr, @NotNull final PyElement newExpr) {
     final PyElement parentExpr = (PyElement)oldExpr.getParent();
     if (parentExpr instanceof PyArgumentList) {
       return newExpr instanceof PyTupleExpression;
     }
-    if (!(parentExpr instanceof PyExpression)) {
+    if (parentExpr instanceof PyParenthesizedExpression || !(parentExpr instanceof PyExpression)) {
       return false;
     }
-    int newPriority = getExpressionPriority(newExpr);
-    int parentPriority = getExpressionPriority(parentExpr);
+    final int newPriority = getExpressionPriority(newExpr);
+    final int parentPriority = getExpressionPriority(parentExpr);
     if (parentPriority > newPriority) {
       return true;
     } else if (parentPriority == newPriority && parentPriority != 0) {
       if (parentExpr instanceof PyBinaryExpression) {
-        PyBinaryExpression binaryExpression = (PyBinaryExpression)parentExpr;
+        final PyBinaryExpression binaryExpression = (PyBinaryExpression)parentExpr;
         if (isNotAssociative(binaryExpression) && oldExpr.equals(binaryExpression.getRightExpression())) {
           return true;
         }
diff --git a/python/src/com/jetbrains/python/refactoring/inline/PyInlineLocalHandler.java b/python/src/com/jetbrains/python/refactoring/inline/PyInlineLocalHandler.java
index 575399c..4f2dc65 100644
--- a/python/src/com/jetbrains/python/refactoring/inline/PyInlineLocalHandler.java
+++ b/python/src/com/jetbrains/python/refactoring/inline/PyInlineLocalHandler.java
@@ -29,18 +29,19 @@
 import com.intellij.openapi.extensions.Extensions;
 import com.intellij.openapi.project.Project;
 import com.intellij.openapi.util.Pair;
+import com.intellij.openapi.util.TextRange;
 import com.intellij.openapi.wm.WindowManager;
-import com.intellij.psi.PsiElement;
-import com.intellij.psi.PsiFile;
-import com.intellij.psi.PsiReference;
-import com.intellij.psi.PsiWhiteSpace;
+import com.intellij.psi.*;
+import com.intellij.psi.codeStyle.CodeStyleManager;
 import com.intellij.psi.search.GlobalSearchScope;
 import com.intellij.psi.search.searches.ReferencesSearch;
 import com.intellij.psi.util.PsiTreeUtil;
 import com.intellij.refactoring.RefactoringBundle;
 import com.intellij.refactoring.util.CommonRefactoringUtil;
 import com.intellij.refactoring.util.RefactoringMessageDialog;
+import com.intellij.util.Function;
 import com.intellij.util.Query;
+import com.intellij.util.containers.ContainerUtil;
 import com.jetbrains.python.PyBundle;
 import com.jetbrains.python.PyTokenTypes;
 import com.jetbrains.python.PythonLanguage;
@@ -51,6 +52,7 @@
 import com.jetbrains.python.psi.impl.PyPsiUtils;
 import com.jetbrains.python.refactoring.PyDefUseUtil;
 import com.jetbrains.python.refactoring.PyReplaceExpressionUtil;
+import org.jetbrains.annotations.NotNull;
 import org.jetbrains.annotations.Nullable;
 
 import java.util.ArrayList;
@@ -61,7 +63,7 @@
  */
 public class PyInlineLocalHandler extends InlineActionHandler {
   private static final Logger LOG = Logger.getInstance(PyInlineLocalHandler.class.getName());
-  
+
   private static final String REFACTORING_NAME = RefactoringBundle.message("inline.variable.title");
   private static final Pair<PyStatement, Boolean> EMPTY_DEF_RESULT = Pair.create(null, false);
   private static final String HELP_ID = "python.reference.inline";
@@ -111,21 +113,21 @@
     final PyStatement def = defPair.first;
     if (def == null || getValue(def) == null){
       final String key = defPair.second ? "variable.has.no.dominating.definition" : "variable.has.no.initializer";
-      String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message(key, localName));
+      final String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message(key, localName));
       CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, HELP_ID);
       return;
     }
 
     if (def instanceof PyAssignmentStatement && ((PyAssignmentStatement)def).getTargets().length > 1){
       highlightManager.addOccurrenceHighlights(editor, new PsiElement[] {def}, writeAttributes, true, null);
-      String message = RefactoringBundle.getCannotRefactorMessage(PyBundle.message("refactoring.inline.local.multiassignment", localName));
+      final String message = RefactoringBundle.getCannotRefactorMessage(PyBundle.message("refactoring.inline.local.multiassignment", localName));
       CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, HELP_ID);
       return;
     }
 
     final PsiElement[] refsToInline = PyDefUseUtil.getPostRefs(containerBlock, local, getObject(def));
     if (refsToInline.length == 0) {
-      String message = RefactoringBundle.message("variable.is.never.used", localName);
+      final String message = RefactoringBundle.message("variable.is.never.used", localName);
       CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, HELP_ID);
       return;
     }
@@ -133,11 +135,10 @@
     final TextAttributes attributes = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(EditorColors.SEARCH_RESULT_ATTRIBUTES);
     if (editor != null && !ApplicationManager.getApplication().isUnitTestMode()) {
       highlightManager.addOccurrenceHighlights(editor, refsToInline, attributes, true, null);
-      int occurrencesCount = refsToInline.length;
-      String occurencesString = RefactoringBundle.message("occurrences.string", occurrencesCount);
-      final String promptKey = "inline.local.variable.prompt";
-      final String question = RefactoringBundle.message(promptKey, localName) + " " + occurencesString;
-      RefactoringMessageDialog dialog = new RefactoringMessageDialog(REFACTORING_NAME, question, HELP_ID, "OptionPane.questionIcon", true, project);
+      final int occurrencesCount = refsToInline.length;
+      final String occurrencesString = RefactoringBundle.message("occurrences.string", occurrencesCount);
+      final String question = RefactoringBundle.message("inline.local.variable.prompt", localName) + " " + occurrencesString;
+      final RefactoringMessageDialog dialog = new RefactoringMessageDialog(REFACTORING_NAME, question, HELP_ID, "OptionPane.questionIcon", true, project);
       dialog.show();
       if (!dialog.isOK()){
         WindowManager.getInstance().getStatusBar(project).setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting"));
@@ -145,11 +146,11 @@
       }
     }
 
-    PsiFile workingFile = local.getContainingFile();
+    final PsiFile workingFile = local.getContainingFile();
     for (PsiElement ref : refsToInline) {
       final PsiFile otherFile = ref.getContainingFile();
       if (!otherFile.equals(workingFile)) {
-        String message = RefactoringBundle.message("variable.is.referenced.in.multiple.files", localName);
+        final String message = RefactoringBundle.message("variable.is.referenced.in.multiple.files", localName);
         CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, HELP_ID);
         return;
       }
@@ -170,9 +171,8 @@
         if (editor != null) {
           highlightManager.addOccurrenceHighlights(editor, defs, writeAttributes, true, null);
           highlightManager.addOccurrenceHighlights(editor, new PsiElement[]{ref}, attributes, true, null);
-          String message =
-            RefactoringBundle
-              .getCannotRefactorMessage(RefactoringBundle.message("variable.is.accessed.for.writing.and.used.with.inlined", localName));
+          final String message = RefactoringBundle.getCannotRefactorMessage(
+            RefactoringBundle.message("variable.is.accessed.for.writing.and.used.with.inlined", localName));
           CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, HELP_ID);
         }
         WindowManager.getInstance().getStatusBar(project).setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting"));
@@ -185,7 +185,7 @@
       public void run() {
         ApplicationManager.getApplication().runWriteAction(new Runnable() {
           public void run() {
-            PsiElement[] exprs = new PsiElement[refsToInline.length];
+            final PsiElement[] exprs = new PsiElement[refsToInline.length];
             final PyExpression value = prepareValue(def, localName, project);
             final PyExpression withParent = PyElementGenerator.getInstance(project).createExpressionFromText("(" + value.getText() + ")");
             final PsiElement lastChild = def.getLastChild();
@@ -195,7 +195,7 @@
             }
 
             for (int i = 0, refsToInlineLength = refsToInline.length; i < refsToInlineLength; i++) {
-              PsiElement element = refsToInline[i];
+              final PsiElement element = refsToInline[i];
               if (PyReplaceExpressionUtil.isNeedParenthesis((PyExpression)element, value)) {
                 exprs[i] = element.replace(withParent);
               } else {
@@ -207,6 +207,16 @@
               PyPsiUtils.removeElements(next);
             }
             PyPsiUtils.removeElements(def);
+
+            final List<TextRange> ranges = ContainerUtil.mapNotNull(exprs, new Function<PsiElement, TextRange>() {
+              @Override
+              public TextRange fun(PsiElement element) {
+                final PyStatement parentalStatement = PsiTreeUtil.getParentOfType(element, PyStatement.class, false);
+                return parentalStatement != null ? parentalStatement.getTextRange() : null;
+              }
+            });
+            CodeStyleManager.getInstance(project).reformatText(workingFile, ranges);
+
             if (editor != null && !ApplicationManager.getApplication().isUnitTestMode()) {
               highlightManager.addOccurrenceHighlights(editor, exprs, attributes, true, null);
               WindowManager.getInstance().getStatusBar(project)
@@ -262,7 +272,7 @@
   }
 
   @Nullable
-  private static PyExpression getValue(PyStatement def) {
+  private static PyExpression getValue(@Nullable PyStatement def) {
     if (def == null) return null;
     if (def instanceof PyAssignmentStatement) {
       return ((PyAssignmentStatement)def).getAssignedValue();
@@ -271,7 +281,7 @@
   }
 
   @Nullable
-  private static PyExpression getObject(PyStatement def) {
+  private static PyExpression getObject(@Nullable PyStatement def) {
     if (def == null) return null;
     if (def instanceof PyAssignmentStatement) {
       return ((PyAssignmentStatement)def).getTargets()[0];
@@ -279,12 +289,15 @@
     return ((PyAugAssignmentStatement)def).getTarget();
   }
 
-  private static PyExpression prepareValue(PyStatement def, String localName, Project project) {
+  @NotNull
+  private static PyExpression prepareValue(@NotNull PyStatement def, @NotNull String localName, @NotNull Project project) {
     final PyExpression value = getValue(def);
     assert value != null;
     if (def instanceof PyAugAssignmentStatementImpl) {
       final PyAugAssignmentStatementImpl expression = (PyAugAssignmentStatementImpl)def;
-      String op = expression.getOperation().getText().replace('=', ' ');
+      final PsiElement operation = expression.getOperation();
+      assert operation != null;
+      final String op = operation.getText().replace('=', ' ');
       return PyElementGenerator.getInstance(project).createExpressionFromText(localName + " " + op + value.getText() + ")");
     }
     return value;
diff --git a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfElseSurrounder.java b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfElseSurrounder.java
index 76c4f49..1e1b6d0 100644
--- a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfElseSurrounder.java
+++ b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfElseSurrounder.java
@@ -44,7 +44,6 @@
       PyElementGenerator.getInstance(project).createFromText(LanguageLevel.getDefault(), PyIfStatement.class, "if True:\n    pass\nelse:    pass\n");
     final PsiElement parent = elements[0].getParent();
     final PyStatementList statementList = ifStatement.getIfPart().getStatementList();
-    assert statementList != null;
     statementList.addRange(elements[0], elements[elements.length - 1]);
     statementList.deleteChildRange(statementList.getFirstChild(), statementList.getFirstChild());
 
diff --git a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfSurrounder.java b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfSurrounder.java
index 5ccca3d..ac93e60 100644
--- a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfSurrounder.java
+++ b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithIfSurrounder.java
@@ -22,10 +22,7 @@
 import com.intellij.openapi.util.TextRange;
 import com.intellij.psi.PsiElement;
 import com.intellij.util.IncorrectOperationException;
-import com.jetbrains.python.psi.LanguageLevel;
-import com.jetbrains.python.psi.PyElementGenerator;
-import com.jetbrains.python.psi.PyIfStatement;
-import com.jetbrains.python.psi.PyStatementList;
+import com.jetbrains.python.psi.*;
 import org.jetbrains.annotations.NotNull;
 import org.jetbrains.annotations.Nullable;
 
@@ -40,7 +37,6 @@
     PyIfStatement ifStatement = PyElementGenerator.getInstance(project).createFromText(LanguageLevel.getDefault(), PyIfStatement.class, "if True:\n    ");
     final PsiElement parent = elements[0].getParent();
     final PyStatementList statementList = ifStatement.getIfPart().getStatementList();
-    assert statementList != null;
     statementList.addRange(elements[0], elements[elements.length - 1]);
     ifStatement = (PyIfStatement) parent.addBefore(ifStatement, elements[0]);
     parent.deleteChildRange(elements[0], elements[elements.length - 1]);
@@ -49,7 +45,8 @@
     if (ifStatement == null) {
       return null;
     }
-    return ifStatement.getIfPart().getCondition().getTextRange();
+    final PyExpression condition = ifStatement.getIfPart().getCondition();
+    return condition != null ? condition.getTextRange() : null;
   }
 
   public String getTemplateDescription() {
diff --git a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryExceptSurrounder.java b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryExceptSurrounder.java
index 25a9f26..7466def 100644
--- a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryExceptSurrounder.java
+++ b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryExceptSurrounder.java
@@ -44,7 +44,6 @@
       createFromText(LanguageLevel.getDefault(), PyTryExceptStatement.class, getTemplate());
     final PsiElement parent = elements[0].getParent();
     final PyStatementList statementList = tryStatement.getTryPart().getStatementList();
-    assert statementList != null;
     statementList.addRange(elements[0], elements[elements.length - 1]);
     statementList.getFirstChild().delete();
     tryStatement = (PyTryExceptStatement)parent.addBefore(tryStatement, elements[0]);
@@ -71,7 +70,6 @@
   protected TextRange getResultRange(PyTryExceptStatement tryStatement) {
     final PyExceptPart part = tryStatement.getExceptParts()[0];
     final PyStatementList list = part.getStatementList();
-    assert list != null;
     return list.getTextRange();
   }
 
diff --git a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryFinallySurrounder.java b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryFinallySurrounder.java
index d457e32..532fad5 100644
--- a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryFinallySurrounder.java
+++ b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithTryFinallySurrounder.java
@@ -42,7 +42,6 @@
     final PyFinallyPart finallyPart = tryStatement.getFinallyPart();
     assert finallyPart != null;
     final PyStatementList statementList = finallyPart.getStatementList();
-    assert statementList != null;
     return statementList.getTextRange();
   }
 }
diff --git a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithWhileSurrounder.java b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithWhileSurrounder.java
index 4ce6639..a564d4c 100644
--- a/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithWhileSurrounder.java
+++ b/python/src/com/jetbrains/python/refactoring/surround/surrounders/statements/PyWithWhileSurrounder.java
@@ -41,7 +41,6 @@
       PyElementGenerator.getInstance(project).createFromText(LanguageLevel.getDefault(), PyWhileStatement.class, "while True:\n    ");
     final PsiElement parent = elements[0].getParent();
     final PyStatementList statementList = whileStatement.getWhilePart().getStatementList();
-    assert statementList != null;
     statementList.addRange(elements[0], elements[elements.length - 1]);
     whileStatement = (PyWhileStatement) parent.addBefore(whileStatement, elements[0]);
     parent.deleteChildRange(elements[0], elements[elements.length - 1]);
diff --git a/python/src/com/jetbrains/python/run/AbstractPythonRunConfiguration.java b/python/src/com/jetbrains/python/run/AbstractPythonRunConfiguration.java
index 339f91e..0927d2e 100644
--- a/python/src/com/jetbrains/python/run/AbstractPythonRunConfiguration.java
+++ b/python/src/com/jetbrains/python/run/AbstractPythonRunConfiguration.java
@@ -37,10 +37,10 @@
 import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.psi.PsiElement;
 import com.intellij.psi.util.PsiTreeUtil;
+import com.intellij.util.PathMappingSettings;
 import com.intellij.util.PlatformUtils;
 import com.jetbrains.python.PyBundle;
 import com.jetbrains.python.PythonModuleTypeBase;
-import com.intellij.util.PathMappingSettings;
 import com.jetbrains.python.psi.PyClass;
 import com.jetbrains.python.psi.PyFunction;
 import com.jetbrains.python.sdk.PythonEnvUtil;
@@ -64,6 +64,7 @@
   private boolean myUseModuleSdk;
   private boolean myAddContentRoots = true;
   private boolean myAddSourceRoots = true;
+
   protected PathMappingSettings myMappingSettings;
 
   public AbstractPythonRunConfiguration(Project project, final ConfigurationFactory factory) {
diff --git a/python/src/com/jetbrains/python/run/PythonCommandLineState.java b/python/src/com/jetbrains/python/run/PythonCommandLineState.java
index 220939e..b1c768f 100644
--- a/python/src/com/jetbrains/python/run/PythonCommandLineState.java
+++ b/python/src/com/jetbrains/python/run/PythonCommandLineState.java
@@ -141,7 +141,8 @@
   protected void addTracebackFilter(Project project, ConsoleView consoleView, ProcessHandler processHandler) {
     if (PySdkUtil.isRemote(myConfig.getSdk())) {
       assert processHandler instanceof RemoteProcessHandlerBase;
-      consoleView.addMessageFilter(new PyRemoteTracebackFilter(project, myConfig.getWorkingDirectory(), (RemoteProcessHandlerBase) processHandler));
+      consoleView
+        .addMessageFilter(new PyRemoteTracebackFilter(project, myConfig.getWorkingDirectory(), (RemoteProcessHandlerBase)processHandler));
     }
     else {
       consoleView.addMessageFilter(new PythonTracebackFilter(project, myConfig.getWorkingDirectory()));
@@ -174,7 +175,8 @@
     GeneralCommandLine commandLine = generateCommandLine(patchers);
 
     // Extend command line
-    PythonRunConfigurationExtensionsManager.getInstance().patchCommandLine(myConfig, getRunnerSettings(), commandLine, getEnvironment().getRunner().getRunnerId());
+    PythonRunConfigurationExtensionsManager.getInstance()
+      .patchCommandLine(myConfig, getRunnerSettings(), commandLine, getEnvironment().getRunner().getRunnerId());
     Sdk sdk = PythonSdkType.findSdkByPath(myConfig.getInterpreterPath());
     final ProcessHandler processHandler;
     if (PySdkUtil.isRemote(sdk)) {
diff --git a/python/src/com/jetbrains/python/run/PythonRunConfiguration.java b/python/src/com/jetbrains/python/run/PythonRunConfiguration.java
index 118c27b..5efc8d4 100644
--- a/python/src/com/jetbrains/python/run/PythonRunConfiguration.java
+++ b/python/src/com/jetbrains/python/run/PythonRunConfiguration.java
@@ -47,8 +47,10 @@
   public static final String SCRIPT_NAME = "SCRIPT_NAME";
   public static final String PARAMETERS = "PARAMETERS";
   public static final String MULTIPROCESS = "MULTIPROCESS";
+  public static final String SHOW_COMMAND_LINE = "SHOW_COMMAND_LINE";
   private String myScriptName;
   private String myScriptParameters;
+  private boolean myShowCommandLineAfterwards = false;
 
   protected PythonRunConfiguration(Project project, ConfigurationFactory configurationFactory) {
     super(project, configurationFactory);
@@ -98,17 +100,27 @@
     myScriptParameters = scriptParameters;
   }
 
+  public boolean showCommandLineAfterwards() {
+    return myShowCommandLineAfterwards;
+  }
+
+  public void setShowCommandLineAfterwards(boolean showCommandLineAfterwards) {
+    myShowCommandLineAfterwards = showCommandLineAfterwards;
+  }
+
   public void readExternal(Element element) throws InvalidDataException {
     PathMacroManager.getInstance(getProject()).expandPaths(element);
     super.readExternal(element);
     myScriptName = JDOMExternalizerUtil.readField(element, SCRIPT_NAME);
     myScriptParameters = JDOMExternalizerUtil.readField(element, PARAMETERS);
+    myShowCommandLineAfterwards = Boolean.parseBoolean(JDOMExternalizerUtil.readField(element, SHOW_COMMAND_LINE, "false"));
   }
 
   public void writeExternal(Element element) throws WriteExternalException {
     super.writeExternal(element);
     JDOMExternalizerUtil.writeField(element, SCRIPT_NAME, myScriptName);
     JDOMExternalizerUtil.writeField(element, PARAMETERS, myScriptParameters);
+    JDOMExternalizerUtil.writeField(element, SHOW_COMMAND_LINE, Boolean.toString(myShowCommandLineAfterwards));
     PathMacroManager.getInstance(getProject()).collapsePathsRecursively(element);
   }
 
@@ -120,6 +132,7 @@
     AbstractPythonRunConfiguration.copyParams(source.getBaseParams(), target.getBaseParams());
     target.setScriptName(source.getScriptName());
     target.setScriptParameters(source.getScriptParameters());
+    target.setShowCommandLineAfterwards(source.showCommandLineAfterwards());
   }
 
   @Override
diff --git a/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.form b/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.form
index 1ba2451..33e45b9 100644
--- a/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.form
+++ b/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.form
@@ -1,6 +1,6 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <form xmlns="http://www.intellij.com/uidesigner/form/" version="1" bind-to-class="com.jetbrains.python.run.PythonRunConfigurationForm">
-  <grid id="27dc6" binding="myRootPanel" layout-manager="GridLayoutManager" row-count="5" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
+  <grid id="27dc6" binding="myRootPanel" layout-manager="GridLayoutManager" row-count="6" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
     <margin top="0" left="0" bottom="0" right="0"/>
     <constraints>
       <xy x="20" y="20" width="507" height="400"/>
@@ -44,7 +44,7 @@
       </component>
       <vspacer id="f8c96">
         <constraints>
-          <grid row="4" column="1" row-span="1" col-span="1" vsize-policy="6" hsize-policy="1" anchor="0" fill="2" indent="0" use-parent-layout="false"/>
+          <grid row="5" column="1" row-span="1" col-span="1" vsize-policy="6" hsize-policy="1" anchor="0" fill="2" indent="0" use-parent-layout="false"/>
         </constraints>
       </vspacer>
       <grid id="9b4b8" binding="myCommonOptionsPlaceholder" layout-manager="BorderLayout" hgap="0" vgap="0">
@@ -63,6 +63,15 @@
         <border type="none"/>
         <children/>
       </grid>
+      <component id="99b13" class="com.intellij.ui.components.JBCheckBox" binding="myShowCommandLineCheckbox">
+        <constraints>
+          <grid row="4" column="0" row-span="1" col-span="2" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
+        </constraints>
+        <properties>
+          <selected value="true"/>
+          <text value="Show command line afterwards"/>
+        </properties>
+      </component>
     </children>
   </grid>
 </form>
diff --git a/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.java b/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.java
index da041f9..cad905f 100644
--- a/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.java
+++ b/python/src/com/jetbrains/python/run/PythonRunConfigurationForm.java
@@ -25,6 +25,7 @@
 import com.intellij.openapi.vfs.VirtualFile;
 import com.intellij.ui.PanelWithAnchor;
 import com.intellij.ui.RawCommandLineEditor;
+import com.intellij.ui.components.JBCheckBox;
 import com.intellij.ui.components.JBLabel;
 import com.jetbrains.python.debugger.PyDebuggerOptionsProvider;
 import org.jetbrains.annotations.NotNull;
@@ -44,6 +45,7 @@
   private final AbstractPyCommonOptionsForm myCommonOptionsForm;
   private JComponent anchor;
   private final Project myProject;
+  private JBCheckBox myShowCommandLineCheckbox;
 
   public PythonRunConfigurationForm(PythonRunConfiguration configuration) {
     myCommonOptionsForm = PyCommonOptionsFormFactory.getInstance().createForm(configuration.getCommonOptionsFormData());
@@ -105,6 +107,16 @@
   }
 
   @Override
+  public boolean showCommandLineAfterwards() {
+    return myShowCommandLineCheckbox.isSelected();
+  }
+
+  @Override
+  public void setShowCommandLineAfterwards(boolean showCommandLineAfterwards) {
+    myShowCommandLineCheckbox.setSelected(showCommandLineAfterwards);
+  }
+
+  @Override
   public JComponent getAnchor() {
     return anchor;
   }
diff --git a/python/src/com/jetbrains/python/run/PythonScriptCommandLineState.java b/python/src/com/jetbrains/python/run/PythonScriptCommandLineState.java
index 81d2daa..cd92fdb 100644
--- a/python/src/com/jetbrains/python/run/PythonScriptCommandLineState.java
+++ b/python/src/com/jetbrains/python/run/PythonScriptCommandLineState.java
@@ -15,11 +15,37 @@
  */
 package com.jetbrains.python.run;
 
+import com.google.common.collect.Lists;
+import com.intellij.execution.*;
 import com.intellij.execution.configurations.GeneralCommandLine;
 import com.intellij.execution.configurations.ParametersList;
 import com.intellij.execution.configurations.ParamsGroup;
+import com.intellij.execution.executors.DefaultDebugExecutor;
+import com.intellij.execution.process.CommandLineArgumentsProvider;
+import com.intellij.execution.process.OSProcessHandler;
+import com.intellij.execution.process.ProcessHandler;
 import com.intellij.execution.runners.ExecutionEnvironment;
+import com.intellij.execution.ui.RunContentDescriptor;
+import com.intellij.openapi.actionSystem.AnAction;
+import com.intellij.openapi.progress.ProgressIndicator;
+import com.intellij.openapi.progress.ProgressManager;
+import com.intellij.openapi.progress.Task;
+import com.intellij.openapi.project.Project;
+import com.intellij.openapi.projectRoots.Sdk;
+import com.intellij.openapi.util.io.FileUtil;
 import com.intellij.openapi.util.text.StringUtil;
+import com.intellij.util.ArrayUtil;
+import com.intellij.util.ui.UIUtil;
+import com.jetbrains.python.PythonHelpersLocator;
+import com.jetbrains.python.console.PyConsoleType;
+import com.jetbrains.python.console.PydevConsoleRunner;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
 
 /**
  * @author yole
@@ -32,6 +58,34 @@
     myConfig = runConfiguration;
   }
 
+  @NotNull
+  @Override
+  public ExecutionResult execute(Executor executor, final CommandLinePatcher... patchers) throws ExecutionException {
+    if (myConfig.showCommandLineAfterwards()) {
+      if (executor.getId() == DefaultDebugExecutor.EXECUTOR_ID) {
+        return super.execute(executor, ArrayUtil.append(patchers, new CommandLinePatcher() {
+          @Override
+          public void patchCommandLine(GeneralCommandLine commandLine) {
+            commandLine.getParametersList().getParamsGroup(PythonCommandLineState.GROUP_DEBUGGER).addParameterAt(1, "--cmd-line");
+          }
+        }));
+      }
+
+      PydevConsoleRunner runner =
+        new PythonScriptWithConsoleRunner(myConfig.getProject(), myConfig.getSdk(), PyConsoleType.PYTHON, myConfig.getWorkingDirectory(),
+                                          myConfig.getEnvs(), patchers);
+
+      runner.runSync();
+
+      List<AnAction> actions = Lists.newArrayList(createActions(runner.getConsoleView(), runner.getProcessHandler()));
+
+      return new DefaultExecutionResult(runner.getConsoleView(), runner.getProcessHandler(), actions.toArray(new AnAction[actions.size()]));
+    }
+    else {
+      return super.execute(executor, patchers);
+    }
+  }
+
   @Override
   protected void buildCommandLineParameters(GeneralCommandLine commandLine) {
     ParametersList parametersList = commandLine.getParametersList();
@@ -53,4 +107,68 @@
     }
   }
 
+  /**
+   * @author traff
+   */
+  public class PythonScriptWithConsoleRunner extends PydevConsoleRunner {
+
+    private CommandLinePatcher[] myPatchers;
+
+    public PythonScriptWithConsoleRunner(@NotNull Project project,
+                                         @NotNull Sdk sdk,
+                                         @NotNull PyConsoleType consoleType,
+                                         @Nullable String workingDir,
+                                         Map<String, String> environmentVariables,
+                                         CommandLinePatcher[] patchers,
+                                         String... statementsToExecute) {
+      super(project, sdk, consoleType, workingDir, environmentVariables, statementsToExecute);
+      myPatchers = patchers;
+    }
+
+    @Override
+    protected void createContentDescriptorAndActions() {
+      AnAction a = createConsoleExecAction(myConsoleExecuteActionHandler);
+      registerActionShortcuts(Lists.newArrayList(a), getConsoleView().getConsole().getConsoleEditor().getComponent());
+    }
+
+    @Override
+    protected CommandLineArgumentsProvider createCommandLineArgumentsProvider(final Sdk sdk,
+                                                                              final Map<String, String> environmentVariables,
+                                                                              int[] ports) {
+      final ArrayList<String> args = new ArrayList<String>();
+      args.add(sdk.getHomePath());
+      final String versionString = sdk.getVersionString();
+      if (versionString == null || !versionString.toLowerCase().contains("jython")) {
+        args.add("-u");
+      }
+      args.add(FileUtil.toSystemDependentName(PythonHelpersLocator.getHelperPath("pydev/pydev_run_in_console.py")));
+      for (int port : ports) {
+        args.add(String.valueOf(port));
+      }
+
+      try {
+        GeneralCommandLine cmd = generateCommandLine(myPatchers);
+        args.addAll(cmd.getParametersList().getList());
+      }
+      catch (Exception e) {
+        //pass
+      }
+      return new CommandLineArgumentsProvider() {
+        @Override
+        public String[] getArguments() {
+          return ArrayUtil.toStringArray(args);
+        }
+
+        @Override
+        public boolean passParentEnvs() {
+          return false;
+        }
+
+        @Override
+        public Map<String, String> getAdditionalEnvs() {
+          return addDefaultEnvironments(sdk, environmentVariables);
+        }
+      };
+    }
+  }
 }
diff --git a/python/src/com/jetbrains/python/sdk/PySdkListCellRenderer.java b/python/src/com/jetbrains/python/sdk/PySdkListCellRenderer.java
index 1a42621..e165e59 100644
--- a/python/src/com/jetbrains/python/sdk/PySdkListCellRenderer.java
+++ b/python/src/com/jetbrains/python/sdk/PySdkListCellRenderer.java
@@ -20,6 +20,7 @@
 import com.intellij.openapi.projectRoots.SdkModificator;
 import com.intellij.openapi.projectRoots.SdkType;
 import com.intellij.openapi.util.IconLoader;
+import com.intellij.openapi.util.io.FileUtil;
 import com.intellij.ui.LayeredIcon;
 import com.intellij.ui.ListCellRendererWrapper;
 import com.jetbrains.python.sdk.flavors.PythonSdkFlavor;
@@ -113,7 +114,7 @@
       }
     }
     else if (new File(name).exists()) {
-      name = "..." + File.separator + new File(name).getParentFile().getParentFile().getName();
+      name = FileUtil.getLocationRelativeToUserHome(name);
     }
     return name;
   }
diff --git a/python/src/com/jetbrains/python/sdk/flavors/WinPythonSdkFlavor.java b/python/src/com/jetbrains/python/sdk/flavors/WinPythonSdkFlavor.java
index 8e75e63..9d22af8 100644
--- a/python/src/com/jetbrains/python/sdk/flavors/WinPythonSdkFlavor.java
+++ b/python/src/com/jetbrains/python/sdk/flavors/WinPythonSdkFlavor.java
@@ -15,7 +15,9 @@
  */
 package com.jetbrains.python.sdk.flavors;
 
+import com.google.common.collect.ImmutableMap;
 import com.intellij.openapi.util.io.FileUtil;
+import com.intellij.openapi.util.io.WindowsRegistryUtil;
 import com.intellij.openapi.util.text.StringUtil;
 import com.intellij.openapi.vfs.LocalFileSystem;
 import com.intellij.openapi.vfs.VirtualFile;
@@ -23,15 +25,17 @@
 import com.jetbrains.python.PythonHelpersLocator;
 
 import java.io.File;
-import java.util.Collection;
-import java.util.Set;
-import java.util.TreeSet;
+import java.util.*;
 
 /**
  * @author yole
  */
 public class WinPythonSdkFlavor extends CPythonSdkFlavor {
   public static WinPythonSdkFlavor INSTANCE = new WinPythonSdkFlavor();
+  private static Map<String, String> ourRegistryMap =
+    ImmutableMap.of("HKEY_LOCAL_MACHINE\\SOFTWARE\\Python\\PythonCore", "python.exe",
+                    "HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node\\Python\\PythonCore", "python.exe",
+                    "HKEY_LOCAL_MACHINE\\SOFTWARE\\IronPython", "ipy.exe");
 
   private WinPythonSdkFlavor() {
   }
@@ -48,6 +52,7 @@
     for (String name : exe_names) {
       findInstallations(candidates, name, "C:\\", "C:\\Program Files\\");
       findInPath(candidates, name);
+      findInRegistry(candidates);
     }
   }
 
@@ -59,6 +64,7 @@
 
   public static void findInPath(Collection<String> candidates, String exeName) {
     final String path = System.getenv("PATH");
+    if (path == null) return;
     for (String pathEntry : StringUtil.split(path, ";")) {
       if (pathEntry.startsWith("\"") && pathEntry.endsWith("\"")) {
         if (pathEntry.length() < 2) continue;
@@ -71,6 +77,25 @@
     }
   }
 
+  public static void findInRegistry(Collection<String> candidates) {
+    for (Map.Entry<String, String> entry : ourRegistryMap.entrySet()) {
+      final String prefix = entry.getKey();
+      final String exePath = entry.getValue();
+      List<String> strings = WindowsRegistryUtil.readRegistryBranch(prefix);
+      for (String string : strings) {
+        final String path =
+          WindowsRegistryUtil.readRegistryDefault(prefix + "\\" + string +
+                                                  "\\InstallPath");
+        if (path != null) {
+          File f = new File(path, exePath);
+          if (f.exists()) {
+            candidates.add(FileUtil.toSystemDependentName(f.getPath()));
+          }
+        }
+      }
+    }
+  }
+
   private static void findSubdirInstallations(Collection<String> candidates, String rootDir, String dir_prefix, String exe_name) {
     VirtualFile rootVDir = LocalFileSystem.getInstance().findFileByPath(rootDir);
     if (rootVDir != null) {
diff --git a/python/src/com/jetbrains/python/validation/CompatibilityVisitor.java b/python/src/com/jetbrains/python/validation/CompatibilityVisitor.java
index f48af7f..9be1594 100644
--- a/python/src/com/jetbrains/python/validation/CompatibilityVisitor.java
+++ b/python/src/com/jetbrains/python/validation/CompatibilityVisitor.java
@@ -295,7 +295,7 @@
       }
     }
     commonRegisterProblem(message, " not support this syntax. Raise with no arguments can only be used in an except block",
-                          len, node, null);
+                          len, node, null, false);
     // raise 1, 2, 3
     len = 0;
     message = new StringBuilder(myCommonMessage);
diff --git a/python/testData/dotNet/whole_namespace.py b/python/testData/dotNet/whole_namespace.py
index b07fa0e..163f6df 100644
--- a/python/testData/dotNet/whole_namespace.py
+++ b/python/testData/dotNet/whole_namespace.py
@@ -2,5 +2,5 @@
 
 clr.AddReferenceByPartialName("SingleNameSpace")
 
-<caret>ingleNameSpace
+import <caret>SingleNameSpace
 print SingleNameSpace.MyClass
\ No newline at end of file
diff --git a/python/testData/formatter/reformatOfSingleElementPossible.py b/python/testData/formatter/reformatOfSingleElementPossible.py
new file mode 100644
index 0000000..d97d8ab
--- /dev/null
+++ b/python/testData/formatter/reformatOfSingleElementPossible.py
@@ -0,0 +1,2 @@
+x=[1,2,3]
+y='spam<caret>'*2
\ No newline at end of file
diff --git a/python/testData/formatter/reformatOfSingleElementPossible_after.py b/python/testData/formatter/reformatOfSingleElementPossible_after.py
new file mode 100644
index 0000000..2528c03
--- /dev/null
+++ b/python/testData/formatter/reformatOfSingleElementPossible_after.py
@@ -0,0 +1,2 @@
+x=[1,2,3]
+y = 'spam' * 2
\ No newline at end of file
diff --git a/python/testData/highlighting/unsupportedFeaturesInPython3.py b/python/testData/highlighting/unsupportedFeaturesInPython3.py
index fce5f97..5a4f399 100644
--- a/python/testData/highlighting/unsupportedFeaturesInPython3.py
+++ b/python/testData/highlighting/unsupportedFeaturesInPython3.py
@@ -18,7 +18,7 @@
 
 <error descr="Python version 3.0 does not have module __builtin__">import __builtin__</error>
 
-<error descr="Python version 3.0 does not support this syntax. Raise with no arguments can only be used in an except block">raise</error>
+<warning descr="Python version 3.0 does not support this syntax. Raise with no arguments can only be used in an except block">raise</warning>
 
 try:
     pass
diff --git a/python/testData/inspections/DefaultArgumentEmptyList.py b/python/testData/inspections/DefaultArgumentEmptyList.py
new file mode 100644
index 0000000..55748eb
--- /dev/null
+++ b/python/testData/inspections/DefaultArgumentEmptyList.py
@@ -0,0 +1 @@
+def foo(args=<warning descr="Default argument value is mutable">[<caret>]</warning>):<EOLError descr="Indent expected"></EOLError>
\ No newline at end of file
diff --git a/python/testData/inspections/DefaultArgumentEmptyList_after.py b/python/testData/inspections/DefaultArgumentEmptyList_after.py
new file mode 100644
index 0000000..2de1bb8
--- /dev/null
+++ b/python/testData/inspections/DefaultArgumentEmptyList_after.py
@@ -0,0 +1,3 @@
+def foo(args=None):
+    if not args:
+        args = []
\ No newline at end of file
diff --git a/python/testData/inspections/DefaultArgument_after.py b/python/testData/inspections/DefaultArgument_after.py
index e2036d2..8007f94 100644
--- a/python/testData/inspections/DefaultArgument_after.py
+++ b/python/testData/inspections/DefaultArgument_after.py
@@ -1,3 +1,4 @@
 def foo(args=None):
-    if not args: args = []
+    if not args:
+        args = []
     pass
\ No newline at end of file
diff --git a/python/testData/inspections/ReplacePrintComment.py b/python/testData/inspections/ReplacePrintComment.py
new file mode 100644
index 0000000..1cf3778
--- /dev/null
+++ b/python/testData/inspections/ReplacePrintComment.py
@@ -0,0 +1 @@
+<warning descr="Statement seems to have no effect and can be replaced with function call to have effect">print</warning><error descr="End of statement expected"> </error><warning descr="Statement seems to have no effect">'%s %s %s %s' % bar</warning>  # <- doesn't work either
\ No newline at end of file
diff --git a/python/testData/inspections/ReplacePrintComment_after.py b/python/testData/inspections/ReplacePrintComment_after.py
new file mode 100644
index 0000000..416deb8
--- /dev/null
+++ b/python/testData/inspections/ReplacePrintComment_after.py
@@ -0,0 +1 @@
+print('%s %s %s %s' % bar)  # <- doesn't work either
diff --git a/python/testData/inspections/ReplacePrintEnd.py b/python/testData/inspections/ReplacePrintEnd.py
new file mode 100644
index 0000000..afc37fb
--- /dev/null
+++ b/python/testData/inspections/ReplacePrintEnd.py
@@ -0,0 +1 @@
+<warning descr="Statement seems to have no effect and can be replaced with function call to have effect">print</warning><error descr="End of statement expected"> </error><warning descr="Statement seems to have no effect">var,</warning> 
\ No newline at end of file
diff --git a/python/testData/inspections/ReplacePrintEnd_after.py b/python/testData/inspections/ReplacePrintEnd_after.py
new file mode 100644
index 0000000..bae6b0c
--- /dev/null
+++ b/python/testData/inspections/ReplacePrintEnd_after.py
@@ -0,0 +1 @@
+print(var, end=' ')
diff --git a/python/testData/intentions/afterDocStubKeywordOnly.py b/python/testData/intentions/afterDocStubKeywordOnly.py
index c65f5e9..161b9c1 100644
--- a/python/testData/intentions/afterDocStubKeywordOnly.py
+++ b/python/testData/intentions/afterDocStubKeywordOnly.py
@@ -1,4 +1,4 @@
-def f(my, *, param, **args):
+def foo(my, *, param, **args):
   """
 
   :param my:
diff --git a/python/testData/intentions/afterImportToImportFrom.py b/python/testData/intentions/afterImportToImportFrom.py
new file mode 100644
index 0000000..45b576a
--- /dev/null
+++ b/python/testData/intentions/afterImportToImportFrom.py
@@ -0,0 +1,9 @@
+from __builtin__ import staticmethod, divmod
+
+quotient, rem = divmod(42, 3)
+
+// PY-11074
+class MyClass(object):
+    @staticmethod
+    def method():
+        pass
\ No newline at end of file
diff --git a/python/testData/intentions/beforeDocStubKeywordOnly.py b/python/testData/intentions/beforeDocStubKeywordOnly.py
index b87a414..d0534c4 100644
--- a/python/testData/intentions/beforeDocStubKeywordOnly.py
+++ b/python/testData/intentions/beforeDocStubKeywordOnly.py
@@ -1,2 +1,2 @@
-def f(my, <caret>*, param, **args):
+def f<caret>oo(my, *, param, **args):
   pass
\ No newline at end of file
diff --git a/python/testData/intentions/beforeImportToImportFrom.py b/python/testData/intentions/beforeImportToImportFrom.py
new file mode 100644
index 0000000..88b5894
--- /dev/null
+++ b/python/testData/intentions/beforeImportToImportFrom.py
@@ -0,0 +1,9 @@
+import __builtin<caret>__ as b
+
+quotient, rem = b.divmod(42, 3)
+
+// PY-11074
+class MyClass(object):
+    @b.staticmethod
+    def method():
+        pass
\ No newline at end of file
diff --git a/python/testData/refactoring/inlinelocal/py5832.after.py b/python/testData/refactoring/inlinelocal/py5832.after.py
index 6b78080..bde7ba3 100644
--- a/python/testData/refactoring/inlinelocal/py5832.after.py
+++ b/python/testData/refactoring/inlinelocal/py5832.after.py
@@ -1,3 +1,5 @@
 def foo(arg):
     print arg
+
+
 foo(('a', 'b'))
\ No newline at end of file
diff --git a/python/testData/refactoring/inlinelocal/referenceInParenthesis.after.py b/python/testData/refactoring/inlinelocal/referenceInParenthesis.after.py
new file mode 100644
index 0000000..67bea27
--- /dev/null
+++ b/python/testData/refactoring/inlinelocal/referenceInParenthesis.after.py
@@ -0,0 +1,3 @@
+print ('spam!' * 42)
+('spam!' * 42)
+hex('spam!' * 42)
\ No newline at end of file
diff --git a/python/testData/refactoring/inlinelocal/referenceInParenthesis.before.py b/python/testData/refactoring/inlinelocal/referenceInParenthesis.before.py
new file mode 100644
index 0000000..da3059f
--- /dev/null
+++ b/python/testData/refactoring/inlinelocal/referenceInParenthesis.before.py
@@ -0,0 +1,5 @@
+x<caret> = 'spam!' * 42
+
+print (x)
+(x)
+hex(x)
\ No newline at end of file
diff --git a/python/testData/refactoring/inlinelocal/resultExceedsRightMargin.after.py b/python/testData/refactoring/inlinelocal/resultExceedsRightMargin.after.py
new file mode 100644
index 0000000..acc61d6
--- /dev/null
+++ b/python/testData/refactoring/inlinelocal/resultExceedsRightMargin.after.py
@@ -0,0 +1,3 @@
+result = '123456789|123456789|123456789|123456789|123456789|' + \
+         '123456789|123456789|123456789|123456789|123456789|' + \
+         '123456789|123456789|123456789|123456789|123456789|'
\ No newline at end of file
diff --git a/python/testData/refactoring/inlinelocal/resultExceedsRightMargin.before.py b/python/testData/refactoring/inlinelocal/resultExceedsRightMargin.before.py
new file mode 100644
index 0000000..8910b0d
--- /dev/null
+++ b/python/testData/refactoring/inlinelocal/resultExceedsRightMargin.before.py
@@ -0,0 +1,3 @@
+s = '123456789|123456789|123456789|123456789|123456789|'
+
+result = s + s + s
\ No newline at end of file
diff --git a/python/testData/typing/typing.py b/python/testData/typing/typing.py
new file mode 100644
index 0000000..1fbd5fc
--- /dev/null
+++ b/python/testData/typing/typing.py
@@ -0,0 +1,582 @@
+"""Static type checking helpers"""
+
+from abc import ABCMeta, abstractmethod, abstractproperty
+import inspect
+import sys
+import re
+
+
+__all__ = [
+    # Type system related
+    'AbstractGeneric',
+    'AbstractGenericMeta',
+    'Any',
+    'AnyStr',
+    'Dict',
+    'Function',
+    'Generic',
+    'GenericMeta',
+    'IO',
+    'List',
+    'Match',
+    'Pattern',
+    'Protocol',
+    'Set',
+    'Tuple',
+    'Undefined',
+    'Union',
+    'cast',
+    'forwardref',
+    'overload',
+    'typevar',
+    # Protocols and abstract base classes
+    'Container',
+    'Iterable',
+    'Iterator',
+    'Sequence',
+    'Sized',
+    'AbstractSet',
+    'Mapping',
+    'BinaryIO',
+    'TextIO',
+]
+
+
+def builtinclass(cls):
+    """Mark a class as a built-in/extension class for type checking."""
+    return cls
+
+
+def ducktype(type):
+    """Return a duck type declaration decorator.
+
+    The decorator only affects type checking.
+    """
+    def decorator(cls):
+        return cls
+    return decorator
+
+
+def disjointclass(type):
+    """Return a disjoint class declaration decorator.
+
+    The decorator only affects type checking.
+    """
+    def decorator(cls):
+        return cls
+    return decorator
+
+
+class GenericMeta(type):
+    """Metaclass for generic classes that support indexing by types."""
+    
+    def __getitem__(self, args):
+        # Just ignore args; they are for compile-time checks only.
+        return self
+
+
+class Generic(metaclass=GenericMeta):
+    """Base class for generic classes."""
+
+
+class AbstractGenericMeta(ABCMeta):
+    """Metaclass for abstract generic classes that support type indexing.
+
+    This is used for both protocols and ordinary abstract classes.
+    """
+    
+    def __new__(mcls, name, bases, namespace):
+        cls = super().__new__(mcls, name, bases, namespace)
+        # 'Protocol' must be an explicit base class in order for a class to
+        # be a protocol.
+        cls._is_protocol = name == 'Protocol' or Protocol in bases
+        return cls
+    
+    def __getitem__(self, args):
+        # Just ignore args; they are for compile-time checks only.
+        return self
+
+
+class Protocol(metaclass=AbstractGenericMeta):
+    """Base class for protocol classes."""
+
+    @classmethod
+    def __subclasshook__(cls, c):
+        if not cls._is_protocol:
+            # No structural checks since this isn't a protocol.
+            return NotImplemented
+        
+        if cls is Protocol:
+            # Every class is a subclass of the empty protocol.
+            return True
+
+        # Find all attributes defined in the protocol.
+        attrs = cls._get_protocol_attrs()
+
+        for attr in attrs:
+            if not any(attr in d.__dict__ for d in c.__mro__):
+                return NotImplemented
+        return True
+
+    @classmethod
+    def _get_protocol_attrs(cls):
+        # Get all Protocol base classes.
+        protocol_bases = []
+        for c in cls.__mro__:
+            if getattr(c, '_is_protocol', False) and c.__name__ != 'Protocol':
+                protocol_bases.append(c)
+        
+        # Get attributes included in protocol.
+        attrs = set()
+        for base in protocol_bases:
+            for attr in base.__dict__.keys():
+                # Include attributes not defined in any non-protocol bases.
+                for c in cls.__mro__:
+                    if (c is not base and attr in c.__dict__ and
+                            not getattr(c, '_is_protocol', False)):
+                        break
+                else:
+                    if (not attr.startswith('_abc_') and
+                        attr != '__abstractmethods__' and
+                        attr != '_is_protocol' and
+                        attr != '__dict__' and
+                        attr != '_get_protocol_attrs' and
+                        attr != '__module__'):
+                        attrs.add(attr)
+        
+        return attrs
+
+
+class AbstractGeneric(metaclass=AbstractGenericMeta):
+    """Base class for abstract generic classes."""
+
+
+class TypeAlias:
+    """Class for defining generic aliases for library types."""
+    
+    def __init__(self, target_type):
+        self.target_type = target_type
+    
+    def __getitem__(self, typeargs):
+        return self.target_type
+
+
+Traceback = object() # TODO proper type object
+
+
+# Define aliases for built-in types that support indexing.
+List = TypeAlias(list)
+Dict = TypeAlias(dict)
+Set = TypeAlias(set)
+Tuple = TypeAlias(tuple)
+Function = TypeAlias(callable)
+Pattern = TypeAlias(type(re.compile('')))
+Match = TypeAlias(type(re.match('', '')))
+
+def union(x): return x
+
+Union = TypeAlias(union)
+
+class typevar:
+    def __init__(self, name, *, values=None):
+        self.name = name
+        self.values = values
+
+
+# Predefined type variables.
+AnyStr = typevar('AnyStr', values=(str, bytes))
+
+
+class forwardref:
+    def __init__(self, name):
+        self.name = name
+
+
+def Any(x):
+    """The Any type; can also be used to cast a value to type Any."""
+    return x
+
+def cast(type, object):
+    """Cast a value to a type.
+
+    This only affects static checking; simply return object at runtime.
+    """
+    return object
+
+
+def overload(func):
+    """Function decorator for defining overloaded functions."""
+    frame = sys._getframe(1)
+    locals = frame.f_locals
+    # See if there is a previous overload variant available.  Also verify
+    # that the existing function really is overloaded: otherwise, replace
+    # the definition.  The latter is actually important if we want to reload
+    # a library module such as genericpath with a custom one that uses
+    # overloading in the implementation.
+    if func.__name__ in locals and hasattr(locals[func.__name__], 'dispatch'):
+        orig_func = locals[func.__name__]
+        
+        def wrapper(*args, **kwargs):
+            ret, ok = orig_func.dispatch(*args, **kwargs)
+            if ok:
+                return ret
+            return func(*args, **kwargs)
+        wrapper.isoverload = True
+        wrapper.dispatch = make_dispatcher(func, orig_func.dispatch)
+        wrapper.next = orig_func
+        wrapper.__name__ = func.__name__
+        if hasattr(func, '__isabstractmethod__'):
+            # Note that we can't reliably check that abstractmethod is
+            # used consistently across overload variants, so we let a
+            # static checker do it.
+            wrapper.__isabstractmethod__ = func.__isabstractmethod__
+        return wrapper
+    else:
+        # Return the initial overload variant.
+        func.isoverload = True
+        func.dispatch = make_dispatcher(func)
+        func.next = None
+        return func
+
+
+def is_erased_type(t):
+    return t is Any or isinstance(t, typevar)
+
+
+def make_dispatcher(func, previous=None):
+    """Create argument dispatcher for an overloaded function.
+
+    Also handle chaining of multiple overload variants.
+    """
+    (args, varargs, varkw, defaults,
+     kwonlyargs, kwonlydefaults, annotations) = inspect.getfullargspec(func)
+    
+    argtypes = []
+    for arg in args:
+        ann = annotations.get(arg)
+        if isinstance(ann, forwardref):
+            ann = ann.name
+        if is_erased_type(ann):
+            ann = None
+        elif isinstance(ann, str):
+            # The annotation is a string => evaluate it lazily when the
+            # overloaded function is first called.
+            frame = sys._getframe(2)
+            t = [None]
+            ann_str = ann
+            def check(x):
+                if not t[0]:
+                    # Evaluate string in the context of the overload caller.
+                    t[0] = eval(ann_str, frame.f_globals, frame.f_locals)
+                    if is_erased_type(t[0]):
+                        # Anything goes.
+                        t[0] = object
+                if isinstance(t[0], type):
+                    return isinstance(x, t[0])
+                else:
+                    return t[0](x)
+            ann = check
+        argtypes.append(ann)
+
+    maxargs = len(argtypes)
+    minargs = maxargs
+    if defaults:
+        minargs = len(argtypes) - len(defaults)
+    
+    def dispatch(*args, **kwargs):
+        if previous:
+            ret, ok = previous(*args, **kwargs)
+            if ok:
+                return ret, ok
+
+        nargs = len(args)
+        if nargs < minargs or nargs > maxargs:
+            # Invalid argument count.
+            return None, False
+        
+        for i in range(nargs):
+            argtype = argtypes[i]
+            if argtype:
+                if isinstance(argtype, type):
+                    if not isinstance(args[i], argtype):
+                        break
+                else:
+                    if not argtype(args[i]):
+                        break
+        else:
+            return func(*args, **kwargs), True
+        return None, False
+    return dispatch
+
+
+class Undefined:
+    """Class that represents an undefined value with a specified type.
+
+    At runtime the name Undefined is bound to an instance of this
+    class.  The intent is that any operation on an Undefined object
+    raises an exception, including use in a boolean context.  Some
+    operations cannot be disallowed: Undefined can be used as an
+    operand of 'is', and it can be assigned to variables and stored in
+    containers.
+
+    'Undefined' makes it possible to declare the static type of a
+    variable even if there is no useful default value to initialize it
+    with:
+
+      from typing import Undefined
+      x = Undefined(int)
+      y = Undefined # type: int
+
+    The latter form can be used if efficiency is of utmost importance,
+    since it saves a call operation and potentially additional
+    operations needed to evaluate a type expression.  Undefined(x)
+    just evaluates to Undefined, ignoring the argument value.
+    """
+    
+    def __repr__(self):
+        return '<typing.Undefined>'
+
+    def __setattr__(self, attr, value):
+        raise AttributeError("'Undefined' object has no attribute '%s'" % attr)
+
+    def __eq__(self, other):
+        raise TypeError("'Undefined' object cannot be compared")
+
+    def __call__(self, type):
+        return self
+
+    def __bool__(self):
+        raise TypeError("'Undefined' object is not valid as a boolean")
+
+
+Undefined = Undefined()
+
+
+# Abstract classes
+
+
+T = typevar('T')
+KT = typevar('KT')
+VT = typevar('VT')
+
+
+class SupportsInt(Protocol):
+    @abstractmethod
+    def __int__(self) -> int: pass
+
+
+class SupportsFloat(Protocol):
+    @abstractmethod
+    def __float__(self) -> float: pass
+
+
+class SupportsAbs(Protocol[T]):
+    @abstractmethod
+    def __abs__(self) -> T: pass
+
+
+class SupportsRound(Protocol[T]):
+    @abstractmethod
+    def __round__(self, ndigits: int = 0) -> T: pass
+
+
+class Reversible(Protocol[T]):
+    @abstractmethod
+    def __reversed__(self) -> 'Iterator[T]': pass
+
+
+class Sized(Protocol):
+    @abstractmethod
+    def __len__(self) -> int: pass
+
+
+class Container(Protocol[T]):
+    @abstractmethod
+    def __contains__(self, x) -> bool: pass
+
+
+class Iterable(Protocol[T]):
+    @abstractmethod
+    def __iter__(self) -> 'Iterator[T]': pass
+
+
+class Iterator(Iterable[T], Protocol[T]):
+    @abstractmethod
+    def __next__(self) -> T: pass
+
+
+class Sequence(Sized, Iterable[T], Container[T], AbstractGeneric[T]):
+    @abstractmethod
+    @overload
+    def __getitem__(self, i: int) -> T: pass
+    
+    @abstractmethod
+    @overload
+    def __getitem__(self, s: slice) -> 'Sequence[T]': pass
+    
+    @abstractmethod
+    def __reversed__(self, s: slice) -> Iterator[T]: pass
+    
+    @abstractmethod
+    def index(self, x) -> int: pass
+    
+    @abstractmethod
+    def count(self, x) -> int: pass
+
+
+for t in list, tuple, str, bytes, range:
+    Sequence.register(t)
+
+
+class AbstractSet(Sized, Iterable[T], AbstractGeneric[T]):
+    @abstractmethod
+    def __contains__(self, x: object) -> bool: pass
+    @abstractmethod
+    def __and__(self, s: 'AbstractSet[T]') -> 'AbstractSet[T]': pass
+    @abstractmethod
+    def __or__(self, s: 'AbstractSet[T]') -> 'AbstractSet[T]': pass
+    @abstractmethod
+    def __sub__(self, s: 'AbstractSet[T]') -> 'AbstractSet[T]': pass
+    @abstractmethod
+    def __xor__(self, s: 'AbstractSet[T]') -> 'AbstractSet[T]': pass
+    @abstractmethod
+    def isdisjoint(self, s: 'AbstractSet[T]') -> bool: pass
+
+
+for t in set, frozenset, type({}.keys()), type({}.items()):
+    AbstractSet.register(t)
+
+
+class Mapping(Sized, Iterable[KT], AbstractGeneric[KT, VT]):
+    @abstractmethod
+    def __getitem__(self, k: KT) -> VT: pass
+    @abstractmethod
+    def __setitem__(self, k: KT, v: VT) -> None: pass
+    @abstractmethod
+    def __delitem__(self, v: KT) -> None: pass
+    @abstractmethod
+    def __contains__(self, o: object) -> bool: pass
+
+    @abstractmethod
+    def clear(self) -> None: pass
+    @abstractmethod
+    def copy(self) -> 'Mapping[KT, VT]': pass
+    @overload
+    @abstractmethod
+    def get(self, k: KT) -> VT: pass
+    @overload
+    @abstractmethod
+    def get(self, k: KT, default: VT) -> VT: pass
+    @overload
+    @abstractmethod
+    def pop(self, k: KT) -> VT: pass
+    @overload
+    @abstractmethod
+    def pop(self, k: KT, default: VT) -> VT: pass
+    @abstractmethod
+    def popitem(self) -> Tuple[KT, VT]: pass
+    @overload
+    @abstractmethod
+    def setdefault(self, k: KT) -> VT: pass
+    @overload
+    @abstractmethod
+    def setdefault(self, k: KT, default: VT) -> VT: pass
+    
+    @overload
+    @abstractmethod
+    def update(self, m: 'Mapping[KT, VT]') -> None: pass
+    @overload
+    @abstractmethod
+    def update(self, m: Iterable[Tuple[KT, VT]]) -> None: pass
+    
+    @abstractmethod
+    def keys(self) -> AbstractSet[KT]: pass
+    @abstractmethod
+    def values(self) -> AbstractSet[VT]: pass
+    @abstractmethod
+    def items(self) -> AbstractSet[Tuple[KT, VT]]: pass
+
+
+# TODO Consider more types: os.environ, etc. However, these add dependencies.
+Mapping.register(dict)
+
+
+# Note that the BinaryIO and TextIO classes must be in sync with typing module
+# stubs.
+
+
+class IO(AbstractGeneric[AnyStr]):
+    @abstractproperty
+    def mode(self) -> str: pass
+    @abstractproperty
+    def name(self) -> str: pass
+    @abstractmethod
+    def close(self) -> None: pass
+    @abstractmethod
+    def closed(self) -> bool: pass
+    @abstractmethod
+    def fileno(self) -> int: pass
+    @abstractmethod
+    def flush(self) -> None: pass
+    @abstractmethod
+    def isatty(self) -> bool: pass
+    @abstractmethod
+    def read(self, n: int = -1) -> AnyStr: pass
+    @abstractmethod
+    def readable(self) -> bool: pass
+    @abstractmethod
+    def readline(self, limit: int = -1) -> AnyStr: pass
+    @abstractmethod
+    def readlines(self, hint: int = -1) -> List[AnyStr]: pass
+    @abstractmethod
+    def seek(self, offset: int, whence: int = 0) -> int: pass
+    @abstractmethod
+    def seekable(self) -> bool: pass
+    @abstractmethod
+    def tell(self) -> int: pass
+    @abstractmethod
+    def truncate(self, size: int = None) -> int: pass
+    @abstractmethod
+    def writable(self) -> bool: pass
+    @abstractmethod
+    def write(self, s: AnyStr) -> int: pass
+    @abstractmethod
+    def writelines(self, lines: List[AnyStr]) -> None: pass
+
+    @abstractmethod
+    def __enter__(self) -> 'IO[AnyStr]': pass
+    @abstractmethod
+    def __exit__(self, type, value, traceback) -> None: pass
+
+
+class BinaryIO(IO[bytes]):
+    @overload
+    @abstractmethod
+    def write(self, s: bytes) -> int: pass
+    @overload
+    @abstractmethod
+    def write(self, s: bytearray) -> int: pass
+
+    @abstractmethod
+    def __enter__(self) -> 'BinaryIO': pass
+
+
+class TextIO(IO[str]):
+    @abstractproperty
+    def buffer(self) -> BinaryIO: pass
+    @abstractproperty
+    def encoding(self) -> str: pass
+    @abstractproperty
+    def errors(self) -> str: pass
+    @abstractproperty
+    def line_buffering(self) -> bool: pass
+    @abstractproperty
+    def newlines(self) -> Any: pass
+    @abstractmethod
+    def __enter__(self) -> 'TextIO': pass
+
+
+# TODO Register IO/TextIO/BinaryIO as the base class of file-like types.
+
+
+del t
diff --git a/python/testSrc/com/jetbrains/python/PyFillParagraphTest.java b/python/testSrc/com/jetbrains/python/PyFillParagraphTest.java
index 0a26253..287c65f 100644
--- a/python/testSrc/com/jetbrains/python/PyFillParagraphTest.java
+++ b/python/testSrc/com/jetbrains/python/PyFillParagraphTest.java
@@ -22,6 +22,7 @@
 import com.intellij.openapi.command.CommandProcessor;
 import com.intellij.psi.codeStyle.CodeStyleSettings;
 import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
+import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
 import com.jetbrains.python.fixtures.PyTestCase;
 
 /**
@@ -62,7 +63,8 @@
   }
 
   public void testEnter() {
-    final CodeStyleSettings settings = CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings();
+    final CommonCodeStyleSettings settings =
+      CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings().getCommonSettings(PythonLanguage.getInstance());
     int oldValue = settings.RIGHT_MARGIN;
     settings.RIGHT_MARGIN = 80;
     try {
diff --git a/python/testSrc/com/jetbrains/python/PyFormatterTest.java b/python/testSrc/com/jetbrains/python/PyFormatterTest.java
index 3680544..35a97e0 100644
--- a/python/testSrc/com/jetbrains/python/PyFormatterTest.java
+++ b/python/testSrc/com/jetbrains/python/PyFormatterTest.java
@@ -21,10 +21,12 @@
 import com.intellij.psi.codeStyle.CodeStyleManager;
 import com.intellij.psi.codeStyle.CodeStyleSettings;
 import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
+import com.intellij.psi.util.PsiTreeUtil;
 import com.jetbrains.python.fixtures.PyTestCase;
 import com.jetbrains.python.formatter.PyCodeStyleSettings;
 import com.jetbrains.python.psi.LanguageLevel;
 import com.jetbrains.python.psi.PyElementGenerator;
+import com.jetbrains.python.psi.PyStatement;
 import com.jetbrains.python.psi.impl.PythonLanguageLevelPusher;
 
 /**
@@ -273,19 +275,19 @@
   }
 
   public void testWrapDefinitionWithLongLine() { // IDEA-92081
-    settings().RIGHT_MARGIN = 30;
+    settings().setRightMargin(PythonLanguage.getInstance(), 30);
     settings().WRAP_LONG_LINES = true;
     doTest();
   }
 
   public void testWrapAssignment() {  // PY-8572
-    settings().RIGHT_MARGIN = 120;
+    settings().setRightMargin(PythonLanguage.getInstance(), 120);
     settings().WRAP_LONG_LINES = false;
     doTest();
   }
 
   public void testIndentInSlice() {  // PY-8572
-    settings().RIGHT_MARGIN = 120;
+    settings().setRightMargin(PythonLanguage.getInstance(), 120);
     settings().WRAP_LONG_LINES = false;
     doTest();
   }
@@ -352,7 +354,7 @@
   }
 
   public void testWrapInBinaryExpression() {  // PY-9032
-    settings().RIGHT_MARGIN = 80;
+    settings().setRightMargin(PythonLanguage.getInstance(), 80);
     doTest(true);
   }
 
@@ -370,7 +372,7 @@
   }
 
   public void testWrapImports() {  // PY-9163
-    settings().RIGHT_MARGIN = 80;
+    settings().setRightMargin(PythonLanguage.getInstance(), 80);
     doTest();
   }
 
@@ -424,6 +426,26 @@
     myFixture.checkResultByFile("formatter/" + getTestName(true) + "_after.py");
   }
 
+  /**
+   * This test merely checks that call to {@link com.intellij.psi.codeStyle.CodeStyleManager#reformat(com.intellij.psi.PsiElement)}
+   * is possible for Python sources.
+   */
+  public void testReformatOfSingleElementPossible() {
+    myFixture.configureByFile("formatter/" + getTestName(true) + ".py");
+    WriteCommandAction.runWriteCommandAction(myFixture.getProject(), new Runnable() {
+      @Override
+      public void run() {
+        final PsiElement elementAtCaret = myFixture.getFile().findElementAt(myFixture.getCaretOffset());
+        assertNotNull(elementAtCaret);
+        final PyStatement statement = PsiTreeUtil.getParentOfType(elementAtCaret, PyStatement.class, false);
+        assertNotNull(statement);
+        final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myFixture.getProject());
+        codeStyleManager.reformat(statement);
+      }
+    });
+    myFixture.checkResultByFile("formatter/" + getTestName(true) + "_after.py");
+  }
+
   private CodeStyleSettings settings() {
     return CodeStyleSettingsManager.getInstance().getSettings(myFixture.getProject());
   }
diff --git a/python/testSrc/com/jetbrains/python/PyQuickFixTest.java b/python/testSrc/com/jetbrains/python/PyQuickFixTest.java
index c36372d..e49b0ea 100644
--- a/python/testSrc/com/jetbrains/python/PyQuickFixTest.java
+++ b/python/testSrc/com/jetbrains/python/PyQuickFixTest.java
@@ -251,6 +251,23 @@
                           PyBundle.message("QFIX.statement.effect.introduce.variable"), true, true);
   }
 
+  public void testReplacePrintEnd() {
+    runWithLanguageLevel(LanguageLevel.PYTHON34, new Runnable() {
+      @Override
+      public void run() {
+        doInspectionTest("ReplacePrintEnd.py", PyStatementEffectInspection.class, PyBundle.message("QFIX.statement.effect"), true, true);
+      }});
+  }
+
+  public void testReplacePrintComment() {
+    runWithLanguageLevel(LanguageLevel.PYTHON34, new Runnable() {
+      @Override
+      public void run() {
+        doInspectionTest("ReplacePrintComment.py", PyStatementEffectInspection.class, PyBundle.message("QFIX.statement.effect"), true,
+                         true);
+      }});
+  }
+
   public void testUnresolvedWith() {  // PY-2083
     setLanguageLevel(LanguageLevel.PYTHON25);
     doInspectionTest("UnresolvedWith.py", PyUnresolvedReferencesInspection.class,
@@ -296,6 +313,11 @@
                      PyBundle.message("QFIX.default.argument"), true, true);
   }
 
+  public void testDefaultArgumentEmptyList() {
+    doInspectionTest("DefaultArgumentEmptyList.py", PyDefaultArgumentInspection.class,
+                     PyBundle.message("QFIX.default.argument"), true, true);
+  }
+
   public void testPyArgumentEqualDefault() {                      //PY-3125
     doInspectionTest("ArgumentEqualDefault.py", PyArgumentEqualDefaultInspection.class,
                      PyBundle.message("QFIX.remove.argument.equal.default"), true, true);
diff --git a/python/testSrc/com/jetbrains/python/PyTypeTest.java b/python/testSrc/com/jetbrains/python/PyTypeTest.java
index b0e41bc..b6ed109 100644
--- a/python/testSrc/com/jetbrains/python/PyTypeTest.java
+++ b/python/testSrc/com/jetbrains/python/PyTypeTest.java
@@ -835,6 +835,37 @@
            "expr = (1,) + (True, 'spam') + ()");
   }
 
+  public void testConstructorUnification() {
+    doTest("C[int]",
+           "class C(object):\n" +
+           "    def __init__(self, x):\n" +
+           "        '''\n" +
+           "        :type x: T\n" +
+           "        :rtype: C[T]\n" +
+           "        '''\n" +
+           "        pass\n" +
+           "\n" +
+           "expr = C(10)\n");
+  }
+
+  public void testGenericClassMethodUnification() {
+    doTest("int",
+           "class C(object):\n" +
+           "    def __init__(self, x):\n" +
+           "        '''\n" +
+           "        :type x: T\n" +
+           "        :rtype: C[T]\n" +
+           "        '''\n" +
+           "        pass\n" +
+           "    def foo(self):\n" +
+           "        '''\n" +
+           "        :rtype: T\n" +
+           "        '''\n" +
+           "        pass\n" +
+           "\n" +
+           "expr = C(10).foo()\n");
+  }
+
   private static TypeEvalContext getTypeEvalContext(@NotNull PyExpression element) {
     return TypeEvalContext.userInitiated(element.getContainingFile()).withTracing();
   }
diff --git a/python/testSrc/com/jetbrains/python/PyTypingTest.java b/python/testSrc/com/jetbrains/python/PyTypingTest.java
new file mode 100644
index 0000000..9513795
--- /dev/null
+++ b/python/testSrc/com/jetbrains/python/PyTypingTest.java
@@ -0,0 +1,267 @@
+/*
+ * Copyright 2000-2014 JetBrains s.r.o.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.jetbrains.python;
+
+import com.intellij.testFramework.LightProjectDescriptor;
+import com.jetbrains.python.documentation.PythonDocumentationProvider;
+import com.jetbrains.python.fixtures.PyTestCase;
+import com.jetbrains.python.psi.LanguageLevel;
+import com.jetbrains.python.psi.PyExpression;
+import com.jetbrains.python.psi.types.PyType;
+import com.jetbrains.python.psi.types.TypeEvalContext;
+import org.jetbrains.annotations.NotNull;
+import org.jetbrains.annotations.Nullable;
+
+/**
+ * Tests for a type system based on mypy's typing module.
+ *
+ * @author vlan
+ */
+public class PyTypingTest extends PyTestCase {
+  @Nullable
+  @Override
+  protected LightProjectDescriptor getProjectDescriptor() {
+    return ourPy3Descriptor;
+  }
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    setLanguageLevel(LanguageLevel.PYTHON32);
+  }
+
+  @Override
+  public void tearDown() throws Exception {
+    setLanguageLevel(null);
+    super.tearDown();
+  }
+
+  public void testClassType() {
+    doTest("Foo",
+           "class Foo:" +
+           "    pass\n" +
+           "\n" +
+           "def f(expr: Foo):\n" +
+           "    pass\n");
+  }
+
+  public void testClassReturnType() {
+    doTest("Foo",
+           "class Foo:" +
+           "    pass\n" +
+           "\n" +
+           "def f() -> Foo:\n" +
+           "    pass\n" +
+           "\n" +
+           "expr = f()\n");
+
+  }
+
+  public void testNoneType() {
+    doTest("None",
+           "def f(expr: None):\n" +
+           "    pass\n");
+  }
+
+  public void testNoneReturnType() {
+    doTest("None",
+           "def f() -> None:\n" +
+           "    return 0\n" +
+           "expr = f()\n");
+  }
+
+  public void testUnionType() {
+    doTest("int | str",
+           "from typing import Union\n" +
+           "\n" +
+           "def f(expr: Union[int, str]):\n" +
+           "    pass\n");
+  }
+
+  public void testBuiltinList() {
+    doTest("list",
+           "from typing import List\n" +
+           "\n" +
+           "def f(expr: List):\n" +
+           "    pass\n");
+  }
+
+  public void testBuiltinListWithParameter() {
+    doTest("list[int]",
+           "from typing import List\n" +
+           "\n" +
+           "def f(expr: List[int]):\n" +
+           "    pass\n");
+  }
+
+  public void testBuiltinDictWithParameters() {
+    doTest("dict[str, int]",
+           "from typing import Dict\n" +
+           "\n" +
+           "def f(expr: Dict[str, int]):\n" +
+           "    pass\n");
+  }
+
+  public void testBuiltinTuple() {
+    doTest("tuple",
+           "from typing import Tuple\n" +
+           "\n" +
+           "def f(expr: Tuple):\n" +
+           "    pass\n");
+  }
+
+  public void testBuiltinTupleWithParameters() {
+    doTest("(int, str)",
+           "from typing import Tuple\n" +
+           "\n" +
+           "def f(expr: Tuple[int, str]):\n" +
+           "    pass\n");
+  }
+
+  public void testAnyType() {
+    doTest("unknown",
+           "from typing import Any\n" +
+           "\n" +
+           "def f(expr: Any):\n" +
+           "    pass\n");
+  }
+
+  public void testGenericType() {
+    doTest("A",
+           "from typing import typevar\n" +
+           "\n" +
+           "T = typevar('A')\n" +
+           "\n" +
+           "def f(expr: T):\n" +
+           "    pass\n");
+  }
+
+  public void testGenericBoundedType() {
+    doTest("T <= int | str",
+           "from typing import typevar\n" +
+           "\n" +
+           "T = typevar('T', values=(int, str))\n" +
+           "\n" +
+           "def f(expr: T):\n" +
+           "    pass\n");
+  }
+
+  public void testParameterizedClass() {
+    doTest("C[int]",
+           "from typing import Generic, typevar\n" +
+           "\n" +
+           "T = typevar('T')\n" +
+           "\n" +
+           "class C(Generic[T]):\n" +
+           "    def __init__(self, x: T):\n" +
+           "        pass\n" +
+           "\n" +
+           "expr = C(10)\n");
+  }
+
+  public void testParameterizedClassMethod() {
+    doTest("int",
+           "from typing import Generic, typevar\n" +
+           "\n" +
+           "T = typevar('T')\n" +
+           "\n" +
+           "class C(Generic[T]):\n" +
+           "    def __init__(self, x: T):\n" +
+           "        pass\n" +
+           "    def foo(self) -> T:\n" +
+           "        pass\n" +
+           "\n" +
+           "expr = C(10).foo()\n");
+  }
+
+  public void testParameterizedClassInheritance() {
+    doTest("int",
+           "from typing import Generic, typevar\n" +
+           "\n" +
+           "T = typevar('T')\n" +
+           "\n" +
+           "class B(Generic[T]):\n" +
+           "    def foo(self) -> T:\n" +
+           "        pass\n" +
+           "class C(B[T]):\n" +
+           "    def __init__(self, x: T):\n" +
+           "        pass\n" +
+           "\n" +
+           "expr = C(10).foo()\n");
+  }
+
+  public void testAnyStrUnification() {
+    doTest("bytes",
+           "from typing import AnyStr\n" +
+           "\n" +
+           "def foo(x: AnyStr) -> AnyStr:\n" +
+           "    pass\n" +
+           "\n" +
+           "expr = foo(b'bar')\n");
+  }
+
+  public void testAnyStrForUnknown() {
+    doTest("str | bytes",
+           "from typing import AnyStr\n" +
+           "\n" +
+           "def foo(x: AnyStr) -> AnyStr:\n" +
+           "    pass\n" +
+           "\n" +
+           "def bar(x):\n" +
+           "    expr = foo(x)\n");
+  }
+
+  public void testFunctionType() {
+    doTest("(int, str) -> str",
+           "from typing import Function\n" +
+           "\n" +
+           "def foo(expr: Function[[int, str], str]):\n" +
+           "    pass\n");
+  }
+
+  public void testTypeInStringLiteral() {
+    doTest("C",
+           "class C:\n" +
+           "    def foo(self, expr: 'C'):\n" +
+           "        pass\n");
+  }
+
+  public void testQualifiedTypeInStringLiteral() {
+    doTest("str",
+           "import typing\n" +
+           "\n" +
+           "def foo(x: 'typing.AnyStr') -> typing.AnyStr:\n" +
+           "    pass\n" +
+           "\n" +
+           "expr = foo('bar')\n");
+  }
+
+  private void doTest(@NotNull String expectedType, @NotNull String text) {
+    myFixture.copyDirectoryToProject("typing", "");
+    myFixture.configureByText(PythonFileType.INSTANCE, text);
+    final PyExpression expr = myFixture.findElementByText("expr", PyExpression.class);
+    final TypeEvalContext codeAnalysis = TypeEvalContext.codeAnalysis(expr.getContainingFile());
+    final TypeEvalContext userInitiated = TypeEvalContext.userInitiated(expr.getContainingFile()).withTracing();
+    assertType(expectedType, expr, codeAnalysis, "code analysis");
+    assertType(expectedType, expr, userInitiated, "user initiated");
+  }
+
+  private static void assertType(String expectedType, PyExpression expr, TypeEvalContext context, String contextName) {
+    final PyType actual = context.getType(expr);
+    final String actualType = PythonDocumentationProvider.getTypeName(actual, context);
+    assertEquals("Failed in " + contextName + " context", expectedType, actualType);
+  }
+}
diff --git a/python/testSrc/com/jetbrains/python/PyWrapTest.java b/python/testSrc/com/jetbrains/python/PyWrapTest.java
index 05c21b8..4146391 100644
--- a/python/testSrc/com/jetbrains/python/PyWrapTest.java
+++ b/python/testSrc/com/jetbrains/python/PyWrapTest.java
@@ -17,6 +17,7 @@
 
 import com.intellij.psi.codeStyle.CodeStyleSettings;
 import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
+import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
 import com.jetbrains.python.fixtures.PyTestCase;
 
 /**
@@ -30,17 +31,19 @@
   protected void setUp() throws Exception {
     super.setUp();
     final CodeStyleSettings settings = CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings();
+    final CommonCodeStyleSettings pythonSettings = settings.getCommonSettings(PythonLanguage.getInstance());
     myOldWrap = settings.WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN;
-    myOldMargin = settings.RIGHT_MARGIN;
+    myOldMargin = pythonSettings.RIGHT_MARGIN;
     settings.WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN = true;
-    settings.RIGHT_MARGIN = 80;
+    pythonSettings.RIGHT_MARGIN = 80;
   }
 
   @Override
   protected void tearDown() throws Exception {
     final CodeStyleSettings settings = CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings();
+    final CommonCodeStyleSettings pythonSettings = settings.getCommonSettings(PythonLanguage.getInstance());
     settings.WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN = myOldWrap;
-    settings.RIGHT_MARGIN = myOldMargin;
+    pythonSettings.RIGHT_MARGIN = myOldMargin;
     super.tearDown();
   }
 
@@ -71,9 +74,10 @@
 
   public void testWrapRightMargin() {
     final CodeStyleSettings settings = CodeStyleSettingsManager.getInstance(myFixture.getProject()).getCurrentSettings();
-    int oldValue = settings.RIGHT_MARGIN;
+    final CommonCodeStyleSettings pythonSettings = settings.getCommonSettings(PythonLanguage.getInstance());
+    int oldValue = pythonSettings.RIGHT_MARGIN;
     boolean oldMarginValue = settings.WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN;
-    settings.RIGHT_MARGIN = 100;
+    pythonSettings.RIGHT_MARGIN = 100;
     settings.WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN = true;
     try {
       final String testName = "wrap/" + getTestName(true);
@@ -84,7 +88,7 @@
       myFixture.checkResultByFile(testName + ".after.py");
     }
     finally {
-      settings.RIGHT_MARGIN = oldValue;
+      pythonSettings.RIGHT_MARGIN = oldValue;
       settings.WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN = oldMarginValue;
     }
   }
diff --git a/python/testSrc/com/jetbrains/python/fixtures/PyTestCase.java b/python/testSrc/com/jetbrains/python/fixtures/PyTestCase.java
index 99da2a4..8a237b7 100644
--- a/python/testSrc/com/jetbrains/python/fixtures/PyTestCase.java
+++ b/python/testSrc/com/jetbrains/python/fixtures/PyTestCase.java
@@ -111,6 +111,7 @@
     final PythonLanguageLevelPusher levelPusher = Extensions.findExtension(FilePropertyPusher.EP_NAME, PythonLanguageLevelPusher.class);
     levelPusher.flushLanguageLevelCache();
     super.tearDown();
+    clearFields(this);
   }
 
   @Nullable
diff --git a/python/testSrc/com/jetbrains/python/intentions/PyIntentionTest.java b/python/testSrc/com/jetbrains/python/intentions/PyIntentionTest.java
index 2891d81..058da63 100644
--- a/python/testSrc/com/jetbrains/python/intentions/PyIntentionTest.java
+++ b/python/testSrc/com/jetbrains/python/intentions/PyIntentionTest.java
@@ -259,6 +259,11 @@
     doTest("Convert to 'import sys'");
   }
 
+  // PY-11074
+  public void testImportToImportFrom() {
+    doTest("Convert to 'from __builtin__ import ...'");
+  }
+
   public void testTypeInDocstring() {
     doDocReferenceTest();
   }
diff --git a/python/testSrc/com/jetbrains/python/refactoring/PyInlineLocalTest.java b/python/testSrc/com/jetbrains/python/refactoring/PyInlineLocalTest.java
index 4f02da1..9a764b9 100644
--- a/python/testSrc/com/jetbrains/python/refactoring/PyInlineLocalTest.java
+++ b/python/testSrc/com/jetbrains/python/refactoring/PyInlineLocalTest.java
@@ -18,6 +18,9 @@
 import com.intellij.codeInsight.TargetElementUtilBase;
 import com.intellij.openapi.util.Comparing;
 import com.intellij.psi.PsiElement;
+import com.intellij.psi.codeStyle.CodeStyleSettings;
+import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
+import com.jetbrains.python.PythonLanguage;
 import com.jetbrains.python.fixtures.PyTestCase;
 import com.jetbrains.python.refactoring.inline.PyInlineLocalHandler;
 
@@ -81,7 +84,21 @@
     doTest();
   }
 
+  // PY-12401
   public void testComment() {
     doTest();
   }
+
+  // PY-13114
+  public void testReferenceInParenthesis() {
+    doTest();
+  }
+
+  // PY-12409
+  public void testResultExceedsRightMargin() {
+    final CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(myFixture.getProject());
+    settings.WRAP_LONG_LINES = true;
+    settings.setRightMargin(PythonLanguage.getInstance(), 80);
+    doTest();
+  }
 }