Add base vars and sudo check
This commit is contained in:
parent
c151fd6910
commit
054f5ad80c
8733 changed files with 137813 additions and 15 deletions
1
venv/lib/python3.8/site-packages/debugpy/ThirdPartyNotices.txt
Symbolic link
1
venv/lib/python3.8/site-packages/debugpy/ThirdPartyNotices.txt
Symbolic link
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/5b/39/93/f39dc19473ad904895ff1b64f7a89acd87950ca581dada8a32d726fe3a
|
38
venv/lib/python3.8/site-packages/debugpy/__init__.py
Normal file
38
venv/lib/python3.8/site-packages/debugpy/__init__.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
"""An implementation of the Debug Adapter Protocol (DAP) for Python.
|
||||
|
||||
https://microsoft.github.io/debug-adapter-protocol/
|
||||
"""
|
||||
|
||||
# debugpy stable public API consists solely of members of this module that are
|
||||
# enumerated below.
|
||||
__all__ = [ # noqa
|
||||
"__version__",
|
||||
"breakpoint",
|
||||
"configure",
|
||||
"connect",
|
||||
"debug_this_thread",
|
||||
"is_client_connected",
|
||||
"listen",
|
||||
"log_to",
|
||||
"trace_this_thread",
|
||||
"wait_for_client",
|
||||
]
|
||||
|
||||
import sys
|
||||
|
||||
assert sys.version_info >= (3, 7), (
|
||||
"Python 3.6 and below is not supported by this version of debugpy; "
|
||||
"use debugpy 1.5.1 or earlier."
|
||||
)
|
||||
|
||||
|
||||
# Actual definitions are in a separate file to work around parsing issues causing
|
||||
# SyntaxError on Python 2 and preventing the above version check from executing.
|
||||
from debugpy.public_api import * # noqa
|
||||
from debugpy.public_api import __version__
|
||||
|
||||
del sys
|
39
venv/lib/python3.8/site-packages/debugpy/__main__.py
Normal file
39
venv/lib/python3.8/site-packages/debugpy/__main__.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# debugpy can also be invoked directly rather than via -m. In this case, the first
|
||||
# entry on sys.path is the one added automatically by Python for the directory
|
||||
# containing this file. This means that import debugpy will not work, since we need
|
||||
# the parent directory of debugpy/ to be in sys.path, rather than debugpy/ itself.
|
||||
#
|
||||
# The other issue is that many other absolute imports will break, because they
|
||||
# will be resolved relative to debugpy/ - e.g. `import debugger` will then try
|
||||
# to import debugpy/debugger.py.
|
||||
#
|
||||
# To fix both, we need to replace the automatically added entry such that it points
|
||||
# at parent directory of debugpy/ instead of debugpy/ itself, import debugpy with that
|
||||
# in sys.path, and then remove the first entry entry altogether, so that it doesn't
|
||||
# affect any further imports we might do. For example, suppose the user did:
|
||||
#
|
||||
# python /foo/bar/debugpy ...
|
||||
#
|
||||
# At the beginning of this script, sys.path will contain "/foo/bar/debugpy" as the
|
||||
# first entry. What we want is to replace it with "/foo/bar', then import debugpy
|
||||
# with that in effect, and then remove the replaced entry before any more
|
||||
# code runs. The imported debugpy module will remain in sys.modules, and thus all
|
||||
# future imports of it or its submodules will resolve accordingly.
|
||||
if "debugpy" not in sys.modules:
|
||||
# Do not use dirname() to walk up - this can be a relative path, e.g. ".".
|
||||
sys.path[0] = sys.path[0] + "/../"
|
||||
import debugpy # noqa
|
||||
|
||||
del sys.path[0]
|
||||
|
||||
from debugpy.server import cli
|
||||
|
||||
cli.main()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
126
venv/lib/python3.8/site-packages/debugpy/_vendored/__init__.py
Normal file
126
venv/lib/python3.8/site-packages/debugpy/_vendored/__init__.py
Normal file
|
@ -0,0 +1,126 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import contextlib
|
||||
from importlib import import_module
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import _util
|
||||
|
||||
|
||||
VENDORED_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
# TODO: Move the "pydevd" git submodule to the debugpy/_vendored directory
|
||||
# and then drop the following fallback.
|
||||
if "pydevd" not in os.listdir(VENDORED_ROOT):
|
||||
VENDORED_ROOT = os.path.dirname(VENDORED_ROOT)
|
||||
|
||||
|
||||
def list_all(resolve=False):
|
||||
"""Return the list of vendored projects."""
|
||||
# TODO: Derive from os.listdir(VENDORED_ROOT)?
|
||||
projects = ["pydevd"]
|
||||
if not resolve:
|
||||
return projects
|
||||
return [project_root(name) for name in projects]
|
||||
|
||||
|
||||
def project_root(project):
|
||||
"""Return the path the root dir of the vendored project.
|
||||
|
||||
If "project" is an empty string then the path prefix for vendored
|
||||
projects (e.g. "debugpy/_vendored/") will be returned.
|
||||
"""
|
||||
if not project:
|
||||
project = ""
|
||||
return os.path.join(VENDORED_ROOT, project)
|
||||
|
||||
|
||||
def iter_project_files(project, relative=False, **kwargs):
|
||||
"""Yield (dirname, basename, filename) for all files in the project."""
|
||||
if relative:
|
||||
with _util.cwd(VENDORED_ROOT):
|
||||
for result in _util.iter_all_files(project, **kwargs):
|
||||
yield result
|
||||
else:
|
||||
root = project_root(project)
|
||||
for result in _util.iter_all_files(root, **kwargs):
|
||||
yield result
|
||||
|
||||
|
||||
def iter_packaging_files(project):
|
||||
"""Yield the filenames for all files in the project.
|
||||
|
||||
The filenames are relative to "debugpy/_vendored". This is most
|
||||
useful for the "package data" in a setup.py.
|
||||
"""
|
||||
# TODO: Use default filters? __pycache__ and .pyc?
|
||||
prune_dir = None
|
||||
exclude_file = None
|
||||
try:
|
||||
mod = import_module("._{}_packaging".format(project), __name__)
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
prune_dir = getattr(mod, "prune_dir", prune_dir)
|
||||
exclude_file = getattr(mod, "exclude_file", exclude_file)
|
||||
results = iter_project_files(
|
||||
project, relative=True, prune_dir=prune_dir, exclude_file=exclude_file
|
||||
)
|
||||
for _, _, filename in results:
|
||||
yield filename
|
||||
|
||||
|
||||
def prefix_matcher(*prefixes):
|
||||
"""Return a module match func that matches any of the given prefixes."""
|
||||
assert prefixes
|
||||
|
||||
def match(name, module):
|
||||
for prefix in prefixes:
|
||||
if name.startswith(prefix):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
return match
|
||||
|
||||
|
||||
def check_modules(project, match, root=None):
|
||||
"""Verify that only vendored modules have been imported."""
|
||||
if root is None:
|
||||
root = project_root(project)
|
||||
extensions = []
|
||||
unvendored = {}
|
||||
for modname, mod in list(sys.modules.items()):
|
||||
if not match(modname, mod):
|
||||
continue
|
||||
try:
|
||||
filename = getattr(mod, "__file__", None)
|
||||
except: # In theory it's possible that any error is raised when accessing __file__
|
||||
filename = None
|
||||
if not filename: # extension module
|
||||
extensions.append(modname)
|
||||
elif not filename.startswith(root):
|
||||
unvendored[modname] = filename
|
||||
return unvendored, extensions
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def vendored(project, root=None):
|
||||
"""A context manager under which the vendored project will be imported."""
|
||||
if root is None:
|
||||
root = project_root(project)
|
||||
# Add the vendored project directory, so that it gets tried first.
|
||||
sys.path.insert(0, root)
|
||||
try:
|
||||
yield root
|
||||
finally:
|
||||
sys.path.remove(root)
|
||||
|
||||
|
||||
def preimport(project, modules, **kwargs):
|
||||
"""Import each of the named modules out of the vendored project."""
|
||||
with vendored(project, **kwargs):
|
||||
for name in modules:
|
||||
import_module(name)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,48 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from . import VENDORED_ROOT
|
||||
from ._util import cwd, iter_all_files
|
||||
|
||||
|
||||
INCLUDES = [
|
||||
'setup_pydevd_cython.py',
|
||||
]
|
||||
|
||||
|
||||
def iter_files():
|
||||
# From the root of pydevd repo, we want only scripts and
|
||||
# subdirectories that constitute the package itself (not helper
|
||||
# scripts, tests etc). But when walking down into those
|
||||
# subdirectories, we want everything below.
|
||||
|
||||
with cwd(VENDORED_ROOT):
|
||||
return iter_all_files('pydevd', prune_dir, exclude_file)
|
||||
|
||||
|
||||
def prune_dir(dirname, basename):
|
||||
if basename == '__pycache__':
|
||||
return True
|
||||
elif dirname != 'pydevd':
|
||||
return False
|
||||
elif basename.startswith('pydev'):
|
||||
return False
|
||||
elif basename.startswith('_pydev'):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def exclude_file(dirname, basename):
|
||||
if dirname == 'pydevd':
|
||||
if basename in INCLUDES:
|
||||
return False
|
||||
elif not basename.endswith('.py'):
|
||||
return True
|
||||
elif 'pydev' not in basename:
|
||||
return True
|
||||
return False
|
||||
|
||||
if basename.endswith('.pyc'):
|
||||
return True
|
||||
return False
|
59
venv/lib/python3.8/site-packages/debugpy/_vendored/_util.py
Normal file
59
venv/lib/python3.8/site-packages/debugpy/_vendored/_util.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cwd(dirname):
|
||||
"""A context manager for operating in a different directory."""
|
||||
orig = os.getcwd()
|
||||
os.chdir(dirname)
|
||||
try:
|
||||
yield orig
|
||||
finally:
|
||||
os.chdir(orig)
|
||||
|
||||
|
||||
def iter_all_files(root, prune_dir=None, exclude_file=None):
|
||||
"""Yield (dirname, basename, filename) for each file in the tree.
|
||||
|
||||
This is an alternative to os.walk() that flattens out the tree and
|
||||
with filtering.
|
||||
"""
|
||||
pending = [root]
|
||||
while pending:
|
||||
dirname = pending.pop(0)
|
||||
for result in _iter_files(dirname, pending, prune_dir, exclude_file):
|
||||
yield result
|
||||
|
||||
|
||||
def iter_tree(root, prune_dir=None, exclude_file=None):
|
||||
"""Yield (dirname, files) for each directory in the tree.
|
||||
|
||||
The list of files is actually a list of (basename, filename).
|
||||
|
||||
This is an alternative to os.walk() with filtering."""
|
||||
pending = [root]
|
||||
while pending:
|
||||
dirname = pending.pop(0)
|
||||
files = []
|
||||
for _, b, f in _iter_files(dirname, pending, prune_dir, exclude_file):
|
||||
files.append((b, f))
|
||||
yield dirname, files
|
||||
|
||||
|
||||
def _iter_files(dirname, subdirs, prune_dir, exclude_file):
|
||||
for basename in os.listdir(dirname):
|
||||
filename = os.path.join(dirname, basename)
|
||||
if os.path.isdir(filename):
|
||||
if prune_dir is not None and prune_dir(dirname, basename):
|
||||
continue
|
||||
subdirs.append(filename)
|
||||
else:
|
||||
# TODO: Use os.path.isfile() to narrow it down?
|
||||
if exclude_file is not None and exclude_file(dirname, basename):
|
||||
continue
|
||||
yield dirname, basename, filename
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from importlib import import_module
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from . import check_modules, prefix_matcher, preimport, vendored
|
||||
|
||||
# Ensure that pydevd is our vendored copy.
|
||||
_unvendored, _ = check_modules('pydevd',
|
||||
prefix_matcher('pydev', '_pydev'))
|
||||
if _unvendored:
|
||||
_unvendored = sorted(_unvendored.values())
|
||||
msg = 'incompatible copy of pydevd already imported'
|
||||
# raise ImportError(msg)
|
||||
warnings.warn(msg + ':\n {}'.format('\n '.join(_unvendored)))
|
||||
|
||||
# If debugpy logging is enabled, enable it for pydevd as well
|
||||
if "DEBUGPY_LOG_DIR" in os.environ:
|
||||
os.environ[str("PYDEVD_DEBUG")] = str("True")
|
||||
os.environ[str("PYDEVD_DEBUG_FILE")] = os.environ["DEBUGPY_LOG_DIR"] + str("/debugpy.pydevd.log")
|
||||
|
||||
# Constants must be set before importing any other pydevd module
|
||||
# # due to heavy use of "from" in them.
|
||||
with vendored('pydevd'):
|
||||
pydevd_constants = import_module('_pydevd_bundle.pydevd_constants')
|
||||
# We limit representation size in our representation provider when needed.
|
||||
pydevd_constants.MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 2 ** 32
|
||||
|
||||
# Now make sure all the top-level modules and packages in pydevd are
|
||||
# loaded. Any pydevd modules that aren't loaded at this point, will
|
||||
# be loaded using their parent package's __path__ (i.e. one of the
|
||||
# following).
|
||||
preimport('pydevd', [
|
||||
'_pydev_bundle',
|
||||
'_pydev_runfiles',
|
||||
'_pydevd_bundle',
|
||||
'_pydevd_frame_eval',
|
||||
'pydev_ipython',
|
||||
'pydevd_plugins',
|
||||
'pydevd',
|
||||
])
|
||||
|
||||
# When pydevd is imported it sets the breakpoint behavior, but it needs to be
|
||||
# overridden because by default pydevd will connect to the remote debugger using
|
||||
# its own custom protocol rather than DAP.
|
||||
import pydevd # noqa
|
||||
import debugpy # noqa
|
||||
|
||||
|
||||
def debugpy_breakpointhook():
|
||||
debugpy.breakpoint()
|
||||
|
||||
|
||||
pydevd.install_breakpointhook(debugpy_breakpointhook)
|
||||
|
||||
# Ensure that pydevd uses JSON protocol
|
||||
from _pydevd_bundle import pydevd_constants
|
||||
from _pydevd_bundle import pydevd_defaults
|
||||
pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = pydevd_constants.HTTP_JSON_PROTOCOL
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,155 @@
|
|||
'''
|
||||
License: Apache 2.0
|
||||
Author: Yuli Fitterman
|
||||
'''
|
||||
import types
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import IS_JYTHON
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
import traceback;
|
||||
|
||||
traceback.print_exc() # Ok, no inspect available (search will not work)
|
||||
|
||||
from _pydev_bundle._pydev_imports_tipper import signature_from_docstring
|
||||
|
||||
|
||||
def is_bound_method(obj):
|
||||
if isinstance(obj, types.MethodType):
|
||||
return getattr(obj, '__self__', getattr(obj, 'im_self', None)) is not None
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_class_name(instance):
|
||||
return getattr(getattr(instance, "__class__", None), "__name__", None)
|
||||
|
||||
|
||||
def get_bound_class_name(obj):
|
||||
my_self = getattr(obj, '__self__', getattr(obj, 'im_self', None))
|
||||
if my_self is None:
|
||||
return None
|
||||
return get_class_name(my_self)
|
||||
|
||||
|
||||
def get_description(obj):
|
||||
try:
|
||||
ob_call = obj.__call__
|
||||
except:
|
||||
ob_call = None
|
||||
|
||||
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
|
||||
fob = getattr(obj, '__init__', lambda: None)
|
||||
if not isinstance(fob, (types.FunctionType, types.MethodType)):
|
||||
fob = obj
|
||||
elif is_bound_method(ob_call):
|
||||
fob = ob_call
|
||||
else:
|
||||
fob = obj
|
||||
|
||||
argspec = ""
|
||||
fn_name = None
|
||||
fn_class = None
|
||||
if isinstance(fob, (types.FunctionType, types.MethodType)):
|
||||
spec_info = inspect.getfullargspec(fob)
|
||||
argspec = inspect.formatargspec(*spec_info)
|
||||
fn_name = getattr(fob, '__name__', None)
|
||||
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
|
||||
fn_name = "__init__"
|
||||
fn_class = getattr(obj, "__name__", "UnknownClass")
|
||||
elif is_bound_method(obj) or is_bound_method(ob_call):
|
||||
fn_class = get_bound_class_name(obj) or "UnknownClass"
|
||||
|
||||
else:
|
||||
fn_name = getattr(fob, '__name__', None)
|
||||
fn_self = getattr(fob, '__self__', None)
|
||||
if fn_self is not None and not isinstance(fn_self, types.ModuleType):
|
||||
fn_class = get_class_name(fn_self)
|
||||
|
||||
doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj)
|
||||
return create_method_stub(fn_name, fn_class, argspec, doc_string)
|
||||
|
||||
|
||||
def create_method_stub(fn_name, fn_class, argspec, doc_string):
|
||||
if fn_name and argspec:
|
||||
doc_string = "" if doc_string is None else doc_string
|
||||
fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0)
|
||||
if fn_class:
|
||||
expr = fn_class if fn_name == '__init__' else fn_class + '().' + fn_name
|
||||
return create_class_stub(fn_class, fn_stub) + "\n" + expr
|
||||
else:
|
||||
expr = fn_name
|
||||
return fn_stub + "\n" + expr
|
||||
elif doc_string:
|
||||
if fn_name:
|
||||
restored_signature, _ = signature_from_docstring(doc_string, fn_name)
|
||||
if restored_signature:
|
||||
return create_method_stub(fn_name, fn_class, restored_signature, doc_string)
|
||||
return create_function_stub('unknown', '(*args, **kwargs)', doc_string) + '\nunknown'
|
||||
|
||||
else:
|
||||
return ''
|
||||
|
||||
|
||||
def get_docstring(obj):
|
||||
if obj is not None:
|
||||
try:
|
||||
if IS_JYTHON:
|
||||
# Jython
|
||||
doc = obj.__doc__
|
||||
if doc is not None:
|
||||
return doc
|
||||
|
||||
from _pydev_bundle import _pydev_jy_imports_tipper
|
||||
|
||||
is_method, infos = _pydev_jy_imports_tipper.ismethod(obj)
|
||||
ret = ''
|
||||
if is_method:
|
||||
for info in infos:
|
||||
ret += info.get_as_doc()
|
||||
return ret
|
||||
|
||||
else:
|
||||
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc is not None:
|
||||
return doc
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
return ''
|
||||
try:
|
||||
# if no attempt succeeded, try to return repr()...
|
||||
return repr(obj)
|
||||
except:
|
||||
try:
|
||||
# otherwise the class
|
||||
return str(obj.__class__)
|
||||
except:
|
||||
# if all fails, go to an empty string
|
||||
return ''
|
||||
|
||||
|
||||
def create_class_stub(class_name, contents):
|
||||
return "class %s(object):\n%s" % (class_name, contents)
|
||||
|
||||
|
||||
def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0):
|
||||
|
||||
def shift_right(string, prefix):
|
||||
return ''.join(prefix + line for line in string.splitlines(True))
|
||||
|
||||
fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1))
|
||||
ret = '''
|
||||
def %s%s:
|
||||
"""%s"""
|
||||
pass
|
||||
''' % (fn_name, fn_argspec, fn_docstring)
|
||||
ret = ret[1:] # remove first /n
|
||||
ret = ret.replace('\t', " ")
|
||||
if indent:
|
||||
prefix = " " * indent
|
||||
ret = shift_right(ret, prefix)
|
||||
return ret
|
|
@ -0,0 +1,267 @@
|
|||
from collections import namedtuple
|
||||
from string import ascii_letters, digits
|
||||
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
import pydevconsole
|
||||
|
||||
import builtins as __builtin__ # Py3
|
||||
|
||||
try:
|
||||
import java.lang # @UnusedImport
|
||||
from _pydev_bundle import _pydev_jy_imports_tipper
|
||||
_pydev_imports_tipper = _pydev_jy_imports_tipper
|
||||
except ImportError:
|
||||
IS_JYTHON = False
|
||||
from _pydev_bundle import _pydev_imports_tipper
|
||||
|
||||
dir2 = _pydev_imports_tipper.generate_imports_tip_for_module
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# _StartsWithFilter
|
||||
#=======================================================================================================================
|
||||
class _StartsWithFilter:
|
||||
'''
|
||||
Used because we can't create a lambda that'll use an outer scope in jython 2.1
|
||||
'''
|
||||
|
||||
def __init__(self, start_with):
|
||||
self.start_with = start_with.lower()
|
||||
|
||||
def __call__(self, name):
|
||||
return name.lower().startswith(self.start_with)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Completer
|
||||
#
|
||||
# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev)
|
||||
#=======================================================================================================================
|
||||
class Completer:
|
||||
|
||||
def __init__(self, namespace=None, global_namespace=None):
|
||||
"""Create a new completer for the command line.
|
||||
|
||||
Completer([namespace,global_namespace]) -> completer instance.
|
||||
|
||||
If unspecified, the default namespace where completions are performed
|
||||
is __main__ (technically, __main__.__dict__). Namespaces should be
|
||||
given as dictionaries.
|
||||
|
||||
An optional second namespace can be given. This allows the completer
|
||||
to handle cases where both the local and global scopes need to be
|
||||
distinguished.
|
||||
|
||||
Completer instances should be used as the completion mechanism of
|
||||
readline via the set_completer() call:
|
||||
|
||||
readline.set_completer(Completer(my_namespace).complete)
|
||||
"""
|
||||
|
||||
# Don't bind to namespace quite yet, but flag whether the user wants a
|
||||
# specific namespace or to use __main__.__dict__. This will allow us
|
||||
# to bind to __main__.__dict__ at completion time, not now.
|
||||
if namespace is None:
|
||||
self.use_main_ns = 1
|
||||
else:
|
||||
self.use_main_ns = 0
|
||||
self.namespace = namespace
|
||||
|
||||
# The global namespace, if given, can be bound directly
|
||||
if global_namespace is None:
|
||||
self.global_namespace = {}
|
||||
else:
|
||||
self.global_namespace = global_namespace
|
||||
|
||||
def complete(self, text):
|
||||
"""Return the next possible completion for 'text'.
|
||||
|
||||
This is called successively with state == 0, 1, 2, ... until it
|
||||
returns None. The completion should begin with 'text'.
|
||||
|
||||
"""
|
||||
if self.use_main_ns:
|
||||
# In pydev this option should never be used
|
||||
raise RuntimeError('Namespace must be provided!')
|
||||
self.namespace = __main__.__dict__ # @UndefinedVariable
|
||||
|
||||
if "." in text:
|
||||
return self.attr_matches(text)
|
||||
else:
|
||||
return self.global_matches(text)
|
||||
|
||||
def global_matches(self, text):
|
||||
"""Compute matches when text is a simple name.
|
||||
|
||||
Return a list of all keywords, built-in functions and names currently
|
||||
defined in self.namespace or self.global_namespace that match.
|
||||
|
||||
"""
|
||||
|
||||
def get_item(obj, attr):
|
||||
return obj[attr]
|
||||
|
||||
a = {}
|
||||
|
||||
for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: # @UndefinedVariable
|
||||
a.update(dict_with_comps)
|
||||
|
||||
filter = _StartsWithFilter(text)
|
||||
|
||||
return dir2(a, a.keys(), get_item, filter)
|
||||
|
||||
def attr_matches(self, text):
|
||||
"""Compute matches when text contains a dot.
|
||||
|
||||
Assuming the text is of the form NAME.NAME....[NAME], and is
|
||||
evaluatable in self.namespace or self.global_namespace, it will be
|
||||
evaluated and its attributes (as revealed by dir()) are used as
|
||||
possible completions. (For class instances, class members are are
|
||||
also considered.)
|
||||
|
||||
WARNING: this can still invoke arbitrary C code, if an object
|
||||
with a __getattr__ hook is evaluated.
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
# Another option, seems to work great. Catches things like ''.<tab>
|
||||
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) # @UndefinedVariable
|
||||
|
||||
if not m:
|
||||
return []
|
||||
|
||||
expr, attr = m.group(1, 3)
|
||||
try:
|
||||
obj = eval(expr, self.namespace)
|
||||
except:
|
||||
try:
|
||||
obj = eval(expr, self.global_namespace)
|
||||
except:
|
||||
return []
|
||||
|
||||
filter = _StartsWithFilter(attr)
|
||||
|
||||
words = dir2(obj, filter=filter)
|
||||
|
||||
return words
|
||||
|
||||
|
||||
def generate_completions(frame, act_tok):
|
||||
'''
|
||||
:return list(tuple(method_name, docstring, parameters, completion_type))
|
||||
|
||||
method_name: str
|
||||
docstring: str
|
||||
parameters: str -- i.e.: "(a, b)"
|
||||
completion_type is an int
|
||||
See: _pydev_bundle._pydev_imports_tipper for TYPE_ constants
|
||||
'''
|
||||
if frame is None:
|
||||
return []
|
||||
|
||||
# Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
|
||||
# (Names not resolved in generator expression in method)
|
||||
# See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
|
||||
updated_globals = {}
|
||||
updated_globals.update(frame.f_globals)
|
||||
updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals
|
||||
|
||||
if pydevconsole.IPYTHON:
|
||||
completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals)
|
||||
else:
|
||||
completer = Completer(updated_globals, None)
|
||||
# list(tuple(name, descr, parameters, type))
|
||||
completions = completer.complete(act_tok)
|
||||
|
||||
return completions
|
||||
|
||||
|
||||
def generate_completions_as_xml(frame, act_tok):
|
||||
completions = generate_completions(frame, act_tok)
|
||||
return completions_to_xml(completions)
|
||||
|
||||
|
||||
def completions_to_xml(completions):
|
||||
valid_xml = pydevd_xml.make_valid_xml_value
|
||||
quote = pydevd_xml.quote
|
||||
msg = ["<xml>"]
|
||||
|
||||
for comp in completions:
|
||||
msg.append('<comp p0="')
|
||||
msg.append(valid_xml(quote(comp[0], '/>_= \t')))
|
||||
msg.append('" p1="')
|
||||
msg.append(valid_xml(quote(comp[1], '/>_= \t')))
|
||||
msg.append('" p2="')
|
||||
msg.append(valid_xml(quote(comp[2], '/>_= \t')))
|
||||
msg.append('" p3="')
|
||||
msg.append(valid_xml(quote(comp[3], '/>_= \t')))
|
||||
msg.append('"/>')
|
||||
msg.append("</xml>")
|
||||
|
||||
return ''.join(msg)
|
||||
|
||||
|
||||
identifier_start = ascii_letters + '_'
|
||||
identifier_part = ascii_letters + '_' + digits
|
||||
|
||||
identifier_start = set(identifier_start)
|
||||
identifier_part = set(identifier_part)
|
||||
|
||||
|
||||
def isidentifier(s):
|
||||
return s.isidentifier()
|
||||
|
||||
|
||||
TokenAndQualifier = namedtuple('TokenAndQualifier', 'token, qualifier')
|
||||
|
||||
|
||||
def extract_token_and_qualifier(text, line=0, column=0):
|
||||
'''
|
||||
Extracts the token a qualifier from the text given the line/colum
|
||||
(see test_extract_token_and_qualifier for examples).
|
||||
|
||||
:param unicode text:
|
||||
:param int line: 0-based
|
||||
:param int column: 0-based
|
||||
'''
|
||||
# Note: not using the tokenize module because text should be unicode and
|
||||
# line/column refer to the unicode text (otherwise we'd have to know
|
||||
# those ranges after converted to bytes).
|
||||
if line < 0:
|
||||
line = 0
|
||||
if column < 0:
|
||||
column = 0
|
||||
|
||||
if isinstance(text, bytes):
|
||||
text = text.decode('utf-8')
|
||||
|
||||
lines = text.splitlines()
|
||||
try:
|
||||
text = lines[line]
|
||||
except IndexError:
|
||||
return TokenAndQualifier(u'', u'')
|
||||
|
||||
if column >= len(text):
|
||||
column = len(text)
|
||||
|
||||
text = text[:column]
|
||||
token = u''
|
||||
qualifier = u''
|
||||
|
||||
temp_token = []
|
||||
for i in range(column - 1, -1, -1):
|
||||
c = text[i]
|
||||
if c in identifier_part or isidentifier(c) or c == u'.':
|
||||
temp_token.append(c)
|
||||
else:
|
||||
break
|
||||
temp_token = u''.join(reversed(temp_token))
|
||||
if u'.' in temp_token:
|
||||
temp_token = temp_token.split(u'.')
|
||||
token = u'.'.join(temp_token[:-1])
|
||||
qualifier = temp_token[-1]
|
||||
else:
|
||||
qualifier = temp_token
|
||||
|
||||
return TokenAndQualifier(token, qualifier)
|
|
@ -0,0 +1,14 @@
|
|||
# We must redefine it in Py3k if it's not already there
|
||||
def execfile(file, glob=None, loc=None):
|
||||
if glob is None:
|
||||
import sys
|
||||
glob = sys._getframe().f_back.f_globals
|
||||
if loc is None:
|
||||
loc = glob
|
||||
|
||||
import tokenize
|
||||
with tokenize.open(file) as stream:
|
||||
contents = stream.read()
|
||||
|
||||
# execute the script (note: it's important to compile first to have the filename set in debug mode)
|
||||
exec(compile(contents + "\n", file, 'exec'), glob, loc)
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/57/26/19/5e17accfeb7f7eb7069b39ba6f92b671cbfbc984535d671d6eac70a733
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/61/8a/61/c4dc130f2683ef7b16ce75aa738d42970e6f1c5f39fab2002f01ea5c1d
|
|
@ -0,0 +1,373 @@
|
|||
import inspect
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from _pydev_bundle._pydev_tipper_common import do_find
|
||||
from _pydevd_bundle.pydevd_utils import hasattr_checked, dir_checked
|
||||
|
||||
from inspect import getfullargspec
|
||||
|
||||
|
||||
def getargspec(*args, **kwargs):
|
||||
arg_spec = getfullargspec(*args, **kwargs)
|
||||
return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {}
|
||||
|
||||
|
||||
# completion types.
|
||||
TYPE_IMPORT = '0'
|
||||
TYPE_CLASS = '1'
|
||||
TYPE_FUNCTION = '2'
|
||||
TYPE_ATTR = '3'
|
||||
TYPE_BUILTIN = '4'
|
||||
TYPE_PARAM = '5'
|
||||
|
||||
|
||||
def _imp(name, log=None):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if '.' in name:
|
||||
sub = name[0:name.rfind('.')]
|
||||
|
||||
if log is not None:
|
||||
log.add_content('Unable to import', name, 'trying with', sub)
|
||||
log.add_exception()
|
||||
|
||||
return _imp(sub, log)
|
||||
else:
|
||||
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
|
||||
if log is not None:
|
||||
log.add_content(s)
|
||||
log.add_exception()
|
||||
|
||||
raise ImportError(s)
|
||||
|
||||
|
||||
IS_IPY = False
|
||||
if sys.platform == 'cli':
|
||||
IS_IPY = True
|
||||
_old_imp = _imp
|
||||
|
||||
def _imp(name, log=None):
|
||||
# We must add a reference in clr for .Net
|
||||
import clr # @UnresolvedImport
|
||||
initial_name = name
|
||||
while '.' in name:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
break # If it worked, that's OK.
|
||||
except:
|
||||
name = name[0:name.rfind('.')]
|
||||
else:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
except:
|
||||
pass # That's OK (not dot net module).
|
||||
|
||||
return _old_imp(initial_name, log)
|
||||
|
||||
|
||||
def get_file(mod):
|
||||
f = None
|
||||
try:
|
||||
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
|
||||
except:
|
||||
try:
|
||||
f = getattr(mod, '__file__', None)
|
||||
except:
|
||||
f = None
|
||||
if f and f.lower(f[-4:]) in ['.pyc', '.pyo']:
|
||||
filename = f[:-4] + '.py'
|
||||
if os.path.exists(filename):
|
||||
f = filename
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def Find(name, log=None):
|
||||
f = None
|
||||
|
||||
mod = _imp(name, log)
|
||||
parent = mod
|
||||
foundAs = ''
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
f = get_file(mod)
|
||||
|
||||
components = name.split('.')
|
||||
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
# this happens in the following case:
|
||||
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
# but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
f = get_file(mod)
|
||||
else:
|
||||
if len(foundAs) > 0:
|
||||
foundAs = foundAs + '.'
|
||||
foundAs = foundAs + comp
|
||||
|
||||
old_comp = comp
|
||||
|
||||
return f, mod, parent, foundAs
|
||||
|
||||
|
||||
def search_definition(data):
|
||||
'''@return file, line, col
|
||||
'''
|
||||
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
try:
|
||||
return do_find(f, mod), foundAs
|
||||
except:
|
||||
return do_find(f, parent), foundAs
|
||||
|
||||
|
||||
def generate_tip(data, log=None):
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
|
||||
f, mod, parent, foundAs = Find(data, log)
|
||||
# print_ >> open('temp.txt', 'w'), f
|
||||
tips = generate_imports_tip_for_module(mod)
|
||||
return f, tips
|
||||
|
||||
|
||||
def check_char(c):
|
||||
if c == '-' or c == '.':
|
||||
return '_'
|
||||
return c
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
|
||||
'''
|
||||
@param obj_to_complete: the object from where we should get the completions
|
||||
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as kwonly_arg parameter
|
||||
@param getattr: the way to get kwonly_arg given object from the obj_to_complete (used for the completer)
|
||||
@param filter: kwonly_arg callable that receives the name and decides if it should be appended or not to the results
|
||||
@return: list of tuples, so that each tuple represents kwonly_arg completion with:
|
||||
name, doc, args, type (from the TYPE_* constants)
|
||||
'''
|
||||
ret = []
|
||||
|
||||
if dir_comps is None:
|
||||
dir_comps = dir_checked(obj_to_complete)
|
||||
if hasattr_checked(obj_to_complete, '__dict__'):
|
||||
dir_comps.append('__dict__')
|
||||
if hasattr_checked(obj_to_complete, '__class__'):
|
||||
dir_comps.append('__class__')
|
||||
|
||||
get_complete_info = True
|
||||
|
||||
if len(dir_comps) > 1000:
|
||||
# ok, we don't want to let our users wait forever...
|
||||
# no complete info for you...
|
||||
|
||||
get_complete_info = False
|
||||
|
||||
dontGetDocsOn = (float, int, str, tuple, list, dict)
|
||||
dontGetattrOn = (dict, list, set, tuple)
|
||||
for d in dir_comps:
|
||||
|
||||
if d is None:
|
||||
continue
|
||||
|
||||
if not filter(d):
|
||||
continue
|
||||
|
||||
args = ''
|
||||
|
||||
try:
|
||||
try:
|
||||
if isinstance(obj_to_complete, dontGetattrOn):
|
||||
raise Exception('Since python 3.9, e.g. "dict[str]" will return'
|
||||
" a dict that's only supposed to take strings. "
|
||||
'Interestingly, e.g. dict["val"] is also valid '
|
||||
'and presumably represents a dict that only takes '
|
||||
'keys that are "val". This breaks our check for '
|
||||
'class attributes.')
|
||||
obj = getattr(obj_to_complete.__class__, d)
|
||||
except:
|
||||
obj = getattr(obj_to_complete, d)
|
||||
except: # just ignore and get it without additional info
|
||||
ret.append((d, '', args, TYPE_BUILTIN))
|
||||
else:
|
||||
|
||||
if get_complete_info:
|
||||
try:
|
||||
retType = TYPE_BUILTIN
|
||||
|
||||
# check if we have to get docs
|
||||
getDoc = True
|
||||
for class_ in dontGetDocsOn:
|
||||
|
||||
if isinstance(obj, class_):
|
||||
getDoc = False
|
||||
break
|
||||
|
||||
doc = ''
|
||||
if getDoc:
|
||||
# no need to get this info... too many constants are defined and
|
||||
# makes things much slower (passing all that through sockets takes quite some time)
|
||||
try:
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc is None:
|
||||
doc = ''
|
||||
except: # may happen on jython when checking java classes (so, just ignore it)
|
||||
doc = ''
|
||||
|
||||
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
|
||||
try:
|
||||
args, vargs, kwargs, defaults, kwonly_args, kwonly_defaults = getargspec(obj)
|
||||
|
||||
args = args[:]
|
||||
|
||||
for kwonly_arg in kwonly_args:
|
||||
default = kwonly_defaults.get(kwonly_arg, _SENTINEL)
|
||||
if default is not _SENTINEL:
|
||||
args.append('%s=%s' % (kwonly_arg, default))
|
||||
else:
|
||||
args.append(str(kwonly_arg))
|
||||
|
||||
args = '(%s)' % (', '.join(args))
|
||||
except TypeError:
|
||||
# ok, let's see if we can get the arguments from the doc
|
||||
args, doc = signature_from_docstring(doc, getattr(obj, '__name__', None))
|
||||
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif inspect.isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif inspect.ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
else:
|
||||
retType = TYPE_ATTR
|
||||
|
||||
# add token and doc to return - assure only strings.
|
||||
ret.append((d, doc, args, retType))
|
||||
|
||||
except: # just ignore and get it without aditional info
|
||||
ret.append((d, '', args, TYPE_BUILTIN))
|
||||
|
||||
else: # get_complete_info == False
|
||||
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif inspect.isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif inspect.ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
else:
|
||||
retType = TYPE_ATTR
|
||||
# ok, no complete info, let's try to do this as fast and clean as possible
|
||||
# so, no docs for this kind of information, only the signatures
|
||||
ret.append((d, '', str(args), retType))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def signature_from_docstring(doc, obj_name):
|
||||
args = '()'
|
||||
try:
|
||||
found = False
|
||||
if len(doc) > 0:
|
||||
if IS_IPY:
|
||||
# Handle case where we have the situation below
|
||||
# sort(self, object cmp, object key)
|
||||
# sort(self, object cmp, object key, bool reverse)
|
||||
# sort(self)
|
||||
# sort(self, object cmp)
|
||||
|
||||
# Or: sort(self: list, cmp: object, key: object)
|
||||
# sort(self: list, cmp: object, key: object, reverse: bool)
|
||||
# sort(self: list)
|
||||
# sort(self: list, cmp: object)
|
||||
if obj_name:
|
||||
name = obj_name + '('
|
||||
|
||||
# Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line.
|
||||
lines = doc.splitlines()
|
||||
if len(lines) == 1:
|
||||
c = doc.count(name)
|
||||
if c > 1:
|
||||
doc = ('\n' + name).join(doc.split(name))
|
||||
|
||||
major = ''
|
||||
for line in doc.splitlines():
|
||||
if line.startswith(name) and line.endswith(')'):
|
||||
if len(line) > len(major):
|
||||
major = line
|
||||
if major:
|
||||
args = major[major.index('('):]
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
i = doc.find('->')
|
||||
if i < 0:
|
||||
i = doc.find('--')
|
||||
if i < 0:
|
||||
i = doc.find('\n')
|
||||
if i < 0:
|
||||
i = doc.find('\r')
|
||||
|
||||
if i > 0:
|
||||
s = doc[0:i]
|
||||
s = s.strip()
|
||||
|
||||
# let's see if we have a docstring in the first line
|
||||
if s[-1] == ')':
|
||||
start = s.find('(')
|
||||
if start >= 0:
|
||||
end = s.find('[')
|
||||
if end <= 0:
|
||||
end = s.find(')')
|
||||
if end <= 0:
|
||||
end = len(s)
|
||||
|
||||
args = s[start:end]
|
||||
if not args[-1] == ')':
|
||||
args = args + ')'
|
||||
|
||||
# now, get rid of unwanted chars
|
||||
l = len(args) - 1
|
||||
r = []
|
||||
for i in range(len(args)):
|
||||
if i == 0 or i == l:
|
||||
r.append(args[i])
|
||||
else:
|
||||
r.append(check_char(args[i]))
|
||||
|
||||
args = ''.join(r)
|
||||
|
||||
if IS_IPY:
|
||||
if args.startswith('(self:'):
|
||||
i = args.find(',')
|
||||
if i >= 0:
|
||||
args = '(self' + args[i:]
|
||||
else:
|
||||
args = '(self)'
|
||||
i = args.find(')')
|
||||
if i > 0:
|
||||
args = args[:i + 1]
|
||||
|
||||
except:
|
||||
pass
|
||||
return args, doc
|
|
@ -0,0 +1,492 @@
|
|||
import traceback
|
||||
from io import StringIO
|
||||
from java.lang import StringBuffer # @UnresolvedImport
|
||||
from java.lang import String # @UnresolvedImport
|
||||
import java.lang # @UnresolvedImport
|
||||
import sys
|
||||
from _pydev_bundle._pydev_tipper_common import do_find
|
||||
|
||||
from org.python.core import PyReflectedFunction # @UnresolvedImport
|
||||
|
||||
from org.python import core # @UnresolvedImport
|
||||
from org.python.core import PyClass # @UnresolvedImport
|
||||
|
||||
# completion types.
|
||||
TYPE_IMPORT = '0'
|
||||
TYPE_CLASS = '1'
|
||||
TYPE_FUNCTION = '2'
|
||||
TYPE_ATTR = '3'
|
||||
TYPE_BUILTIN = '4'
|
||||
TYPE_PARAM = '5'
|
||||
|
||||
|
||||
def _imp(name):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if '.' in name:
|
||||
sub = name[0:name.rfind('.')]
|
||||
return _imp(sub)
|
||||
else:
|
||||
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
|
||||
raise RuntimeError(s)
|
||||
|
||||
|
||||
import java.util
|
||||
_java_rt_file = getattr(java.util, '__file__', None)
|
||||
|
||||
|
||||
def Find(name):
|
||||
f = None
|
||||
if name.startswith('__builtin__'):
|
||||
if name == '__builtin__.str':
|
||||
name = 'org.python.core.PyString'
|
||||
elif name == '__builtin__.dict':
|
||||
name = 'org.python.core.PyDictionary'
|
||||
|
||||
mod = _imp(name)
|
||||
parent = mod
|
||||
foundAs = ''
|
||||
|
||||
try:
|
||||
f = getattr(mod, '__file__', None)
|
||||
except:
|
||||
f = None
|
||||
|
||||
components = name.split('.')
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
# this happens in the following case:
|
||||
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
# but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
if hasattr(mod, '__file__'):
|
||||
f = mod.__file__
|
||||
else:
|
||||
if len(foundAs) > 0:
|
||||
foundAs = foundAs + '.'
|
||||
foundAs = foundAs + comp
|
||||
|
||||
old_comp = comp
|
||||
|
||||
if f is None and name.startswith('java.lang'):
|
||||
# Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5).
|
||||
f = _java_rt_file
|
||||
|
||||
if f is not None:
|
||||
if f.endswith('.pyc'):
|
||||
f = f[:-1]
|
||||
elif f.endswith('$py.class'):
|
||||
f = f[:-len('$py.class')] + '.py'
|
||||
return f, mod, parent, foundAs
|
||||
|
||||
|
||||
def format_param_class_name(paramClassName):
|
||||
if paramClassName.startswith('<type \'') and paramClassName.endswith('\'>'):
|
||||
paramClassName = paramClassName[len('<type \''):-2]
|
||||
if paramClassName.startswith('['):
|
||||
if paramClassName == '[C':
|
||||
paramClassName = 'char[]'
|
||||
|
||||
elif paramClassName == '[B':
|
||||
paramClassName = 'byte[]'
|
||||
|
||||
elif paramClassName == '[I':
|
||||
paramClassName = 'int[]'
|
||||
|
||||
elif paramClassName.startswith('[L') and paramClassName.endswith(';'):
|
||||
paramClassName = paramClassName[2:-1]
|
||||
paramClassName += '[]'
|
||||
return paramClassName
|
||||
|
||||
|
||||
def generate_tip(data, log=None):
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
tips = generate_imports_tip_for_module(mod)
|
||||
return f, tips
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Info
|
||||
#=======================================================================================================================
|
||||
class Info:
|
||||
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.doc = kwargs.get('doc', None)
|
||||
self.args = kwargs.get('args', ()) # tuple of strings
|
||||
self.varargs = kwargs.get('varargs', None) # string
|
||||
self.kwargs = kwargs.get('kwargs', None) # string
|
||||
self.ret = kwargs.get('ret', None) # string
|
||||
|
||||
def basic_as_str(self):
|
||||
'''@returns this class information as a string (just basic format)
|
||||
'''
|
||||
args = self.args
|
||||
s = 'function:%s args=%s, varargs=%s, kwargs=%s, docs:%s' % \
|
||||
(self.name, args, self.varargs, self.kwargs, self.doc)
|
||||
return s
|
||||
|
||||
def get_as_doc(self):
|
||||
s = str(self.name)
|
||||
if self.doc:
|
||||
s += '\n@doc %s\n' % str(self.doc)
|
||||
|
||||
if self.args:
|
||||
s += '\n@params '
|
||||
for arg in self.args:
|
||||
s += str(format_param_class_name(arg))
|
||||
s += ' '
|
||||
|
||||
if self.varargs:
|
||||
s += '\n@varargs '
|
||||
s += str(self.varargs)
|
||||
|
||||
if self.kwargs:
|
||||
s += '\n@kwargs '
|
||||
s += str(self.kwargs)
|
||||
|
||||
if self.ret:
|
||||
s += '\n@return '
|
||||
s += str(format_param_class_name(str(self.ret)))
|
||||
|
||||
return str(s)
|
||||
|
||||
|
||||
def isclass(cls):
|
||||
return isinstance(cls, core.PyClass) or type(cls) == java.lang.Class
|
||||
|
||||
|
||||
def ismethod(func):
|
||||
'''this function should return the information gathered on a function
|
||||
|
||||
@param func: this is the function we want to get info on
|
||||
@return a tuple where:
|
||||
0 = indicates whether the parameter passed is a method or not
|
||||
1 = a list of classes 'Info', with the info gathered from the function
|
||||
this is a list because when we have methods from java with the same name and different signatures,
|
||||
we actually have many methods, each with its own set of arguments
|
||||
'''
|
||||
|
||||
try:
|
||||
if isinstance(func, core.PyFunction):
|
||||
# ok, this is from python, created by jython
|
||||
# print_ ' PyFunction'
|
||||
|
||||
def getargs(func_code):
|
||||
"""Get information about the arguments accepted by a code object.
|
||||
|
||||
Three things are returned: (args, varargs, varkw), where 'args' is
|
||||
a list of argument names (possibly containing nested lists), and
|
||||
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
|
||||
|
||||
nargs = func_code.co_argcount
|
||||
names = func_code.co_varnames
|
||||
args = list(names[:nargs])
|
||||
step = 0
|
||||
|
||||
if not hasattr(func_code, 'CO_VARARGS'):
|
||||
from org.python.core import CodeFlag # @UnresolvedImport
|
||||
co_varargs_flag = CodeFlag.CO_VARARGS.flag
|
||||
co_varkeywords_flag = CodeFlag.CO_VARKEYWORDS.flag
|
||||
else:
|
||||
co_varargs_flag = func_code.CO_VARARGS
|
||||
co_varkeywords_flag = func_code.CO_VARKEYWORDS
|
||||
|
||||
varargs = None
|
||||
if func_code.co_flags & co_varargs_flag:
|
||||
varargs = func_code.co_varnames[nargs]
|
||||
nargs = nargs + 1
|
||||
varkw = None
|
||||
if func_code.co_flags & co_varkeywords_flag:
|
||||
varkw = func_code.co_varnames[nargs]
|
||||
return args, varargs, varkw
|
||||
|
||||
args = getargs(func.func_code)
|
||||
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
|
||||
|
||||
if isinstance(func, core.PyMethod):
|
||||
# this is something from java itself, and jython just wrapped it...
|
||||
|
||||
# things to play in func:
|
||||
# ['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
|
||||
# 'im_func', 'im_self', 'toString']
|
||||
# print_ ' PyMethod'
|
||||
# that's the PyReflectedFunction... keep going to get it
|
||||
func = func.im_func
|
||||
|
||||
if isinstance(func, PyReflectedFunction):
|
||||
# this is something from java itself, and jython just wrapped it...
|
||||
|
||||
# print_ ' PyReflectedFunction'
|
||||
|
||||
infos = []
|
||||
for i in range(len(func.argslist)):
|
||||
# things to play in func.argslist[i]:
|
||||
|
||||
# 'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
|
||||
# 'flags', 'isStatic', 'matches', 'precedence']
|
||||
|
||||
# print_ ' ', func.argslist[i].data.__class__
|
||||
# func.argslist[i].data.__class__ == java.lang.reflect.Method
|
||||
|
||||
if func.argslist[i]:
|
||||
met = func.argslist[i].data
|
||||
name = met.getName()
|
||||
try:
|
||||
ret = met.getReturnType()
|
||||
except AttributeError:
|
||||
ret = ''
|
||||
parameterTypes = met.getParameterTypes()
|
||||
|
||||
args = []
|
||||
for j in range(len(parameterTypes)):
|
||||
paramTypesClass = parameterTypes[j]
|
||||
try:
|
||||
try:
|
||||
paramClassName = paramTypesClass.getName()
|
||||
except:
|
||||
paramClassName = paramTypesClass.getName(paramTypesClass)
|
||||
except AttributeError:
|
||||
try:
|
||||
paramClassName = repr(paramTypesClass) # should be something like <type 'object'>
|
||||
paramClassName = paramClassName.split('\'')[1]
|
||||
except:
|
||||
paramClassName = repr(paramTypesClass) # just in case something else happens... it will at least be visible
|
||||
# if the parameter equals [C, it means it it a char array, so, let's change it
|
||||
|
||||
a = format_param_class_name(paramClassName)
|
||||
# a = a.replace('[]','Array')
|
||||
# a = a.replace('Object', 'obj')
|
||||
# a = a.replace('String', 's')
|
||||
# a = a.replace('Integer', 'i')
|
||||
# a = a.replace('Char', 'c')
|
||||
# a = a.replace('Double', 'd')
|
||||
args.append(a) # so we don't leave invalid code
|
||||
|
||||
info = Info(name, args=args, ret=ret)
|
||||
# print_ info.basic_as_str()
|
||||
infos.append(info)
|
||||
|
||||
return 1, infos
|
||||
except Exception:
|
||||
s = StringIO()
|
||||
traceback.print_exc(file=s)
|
||||
return 1, [Info(str('ERROR'), doc=s.getvalue())]
|
||||
|
||||
return 0, None
|
||||
|
||||
|
||||
def ismodule(mod):
|
||||
# java modules... do we have other way to know that?
|
||||
if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \
|
||||
and hasattr(mod, '__name__'):
|
||||
return 1
|
||||
|
||||
return isinstance(mod, core.PyModule)
|
||||
|
||||
|
||||
def dir_obj(obj):
|
||||
ret = []
|
||||
found = java.util.HashMap()
|
||||
original = obj
|
||||
if hasattr(obj, '__class__'):
|
||||
if obj.__class__ == java.lang.Class:
|
||||
|
||||
# get info about superclasses
|
||||
classes = []
|
||||
classes.append(obj)
|
||||
try:
|
||||
c = obj.getSuperclass()
|
||||
except TypeError:
|
||||
# may happen on jython when getting the java.lang.Class class
|
||||
c = obj.getSuperclass(obj)
|
||||
|
||||
while c != None:
|
||||
classes.append(c)
|
||||
c = c.getSuperclass()
|
||||
|
||||
# get info about interfaces
|
||||
interfs = []
|
||||
for obj in classes:
|
||||
try:
|
||||
interfs.extend(obj.getInterfaces())
|
||||
except TypeError:
|
||||
interfs.extend(obj.getInterfaces(obj))
|
||||
classes.extend(interfs)
|
||||
|
||||
# now is the time when we actually get info on the declared methods and fields
|
||||
for obj in classes:
|
||||
try:
|
||||
declaredMethods = obj.getDeclaredMethods()
|
||||
except TypeError:
|
||||
declaredMethods = obj.getDeclaredMethods(obj)
|
||||
|
||||
try:
|
||||
declaredFields = obj.getDeclaredFields()
|
||||
except TypeError:
|
||||
declaredFields = obj.getDeclaredFields(obj)
|
||||
|
||||
for i in range(len(declaredMethods)):
|
||||
name = declaredMethods[i].getName()
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
for i in range(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
elif isclass(obj.__class__):
|
||||
d = dir(obj.__class__)
|
||||
for name in d:
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
# this simple dir does not always get all the info, that's why we have the part before
|
||||
# (e.g.: if we do a dir on String, some methods that are from other interfaces such as
|
||||
# charAt don't appear)
|
||||
d = dir(original)
|
||||
for name in d:
|
||||
if found.get(name) != 1:
|
||||
ret.append(name)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def format_arg(arg):
|
||||
'''formats an argument to be shown
|
||||
'''
|
||||
|
||||
s = str(arg)
|
||||
dot = s.rfind('.')
|
||||
if dot >= 0:
|
||||
s = s[dot + 1:]
|
||||
|
||||
s = s.replace(';', '')
|
||||
s = s.replace('[]', 'Array')
|
||||
if len(s) > 0:
|
||||
c = s[0].lower()
|
||||
s = c + s[1:]
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def search_definition(data):
|
||||
'''@return file, line, col
|
||||
'''
|
||||
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
try:
|
||||
return do_find(f, mod), foundAs
|
||||
except:
|
||||
return do_find(f, parent), foundAs
|
||||
|
||||
|
||||
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
|
||||
'''
|
||||
@param obj_to_complete: the object from where we should get the completions
|
||||
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
|
||||
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
|
||||
@param filter: a callable that receives the name and decides if it should be appended or not to the results
|
||||
@return: list of tuples, so that each tuple represents a completion with:
|
||||
name, doc, args, type (from the TYPE_* constants)
|
||||
'''
|
||||
ret = []
|
||||
|
||||
if dir_comps is None:
|
||||
dir_comps = dir_obj(obj_to_complete)
|
||||
|
||||
for d in dir_comps:
|
||||
|
||||
if d is None:
|
||||
continue
|
||||
|
||||
if not filter(d):
|
||||
continue
|
||||
|
||||
args = ''
|
||||
doc = ''
|
||||
retType = TYPE_BUILTIN
|
||||
|
||||
try:
|
||||
obj = getattr(obj_to_complete, d)
|
||||
except (AttributeError, java.lang.NoClassDefFoundError):
|
||||
# jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
|
||||
# we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
|
||||
# for that we need a custom classloader... we have references from it in the below places:
|
||||
#
|
||||
# http://mindprod.com/jgloss/classloader.html
|
||||
# http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
|
||||
# http://freshmeat.net/articles/view/1643/
|
||||
#
|
||||
# note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
|
||||
# before the run, everything goes fine.
|
||||
#
|
||||
# The code below ilustrates what I mean...
|
||||
#
|
||||
# import sys
|
||||
# sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
|
||||
#
|
||||
# import junit.framework
|
||||
# print_ dir(junit.framework) #shows the TestCase class here
|
||||
#
|
||||
# import junit.framework.TestCase
|
||||
#
|
||||
# raises the error:
|
||||
# Traceback (innermost last):
|
||||
# File "<console>", line 1, in ?
|
||||
# ImportError: No module named TestCase
|
||||
#
|
||||
# whereas if we had added the jar to the classpath before, everything would be fine by now...
|
||||
|
||||
ret.append((d, '', '', retType))
|
||||
# that's ok, private things cannot be gotten...
|
||||
continue
|
||||
else:
|
||||
|
||||
isMet = ismethod(obj)
|
||||
if isMet[0] and isMet[1]:
|
||||
info = isMet[1][0]
|
||||
try:
|
||||
args, vargs, kwargs = info.args, info.varargs, info.kwargs
|
||||
doc = info.get_as_doc()
|
||||
r = ''
|
||||
for a in (args):
|
||||
if len(r) > 0:
|
||||
r += ', '
|
||||
r += format_arg(a)
|
||||
args = '(%s)' % (r)
|
||||
except TypeError:
|
||||
traceback.print_exc()
|
||||
args = '()'
|
||||
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
# add token and doc to return - assure only strings.
|
||||
ret.append((d, doc, args, retType))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar')
|
||||
sys.stdout.write('%s\n' % Find('junit.framework.TestCase'))
|
|
@ -0,0 +1,24 @@
|
|||
import traceback
|
||||
import sys
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Log:
|
||||
|
||||
def __init__(self):
|
||||
self._contents = []
|
||||
|
||||
def add_content(self, *content):
|
||||
self._contents.append(' '.join(content))
|
||||
|
||||
def add_exception(self):
|
||||
s = StringIO()
|
||||
exc_info = sys.exc_info()
|
||||
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
|
||||
self._contents.append(s.getvalue())
|
||||
|
||||
def get_contents(self):
|
||||
return '\n'.join(self._contents)
|
||||
|
||||
def clear_log(self):
|
||||
del self._contents[:]
|
|
@ -0,0 +1,110 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def find_in_pythonpath(module_name):
|
||||
# Check all the occurrences where we could match the given module/package in the PYTHONPATH.
|
||||
#
|
||||
# This is a simplistic approach, but probably covers most of the cases we're interested in
|
||||
# (i.e.: this may fail in more elaborate cases of import customization or .zip imports, but
|
||||
# this should be rare in general).
|
||||
found_at = []
|
||||
|
||||
parts = module_name.split('.') # split because we need to convert mod.name to mod/name
|
||||
for path in sys.path:
|
||||
target = os.path.join(path, *parts)
|
||||
target_py = target + '.py'
|
||||
if os.path.isdir(target):
|
||||
found_at.append(target)
|
||||
if os.path.exists(target_py):
|
||||
found_at.append(target_py)
|
||||
return found_at
|
||||
|
||||
|
||||
class DebuggerInitializationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class VerifyShadowedImport(object):
|
||||
|
||||
def __init__(self, import_name):
|
||||
self.import_name = import_name
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if exc_type is not None:
|
||||
if exc_type == DebuggerInitializationError:
|
||||
return False # It's already an error we generated.
|
||||
|
||||
# We couldn't even import it...
|
||||
found_at = find_in_pythonpath(self.import_name)
|
||||
|
||||
if len(found_at) <= 1:
|
||||
# It wasn't found anywhere or there was just 1 occurrence.
|
||||
# Let's just return to show the original error.
|
||||
return False
|
||||
|
||||
# We found more than 1 occurrence of the same module in the PYTHONPATH
|
||||
# (the user module and the standard library module).
|
||||
# Let's notify the user as it seems that the module was shadowed.
|
||||
msg = self._generate_shadowed_import_message(found_at)
|
||||
raise DebuggerInitializationError(msg)
|
||||
|
||||
def _generate_shadowed_import_message(self, found_at):
|
||||
msg = '''It was not possible to initialize the debugger due to a module name conflict.
|
||||
|
||||
i.e.: the module "%(import_name)s" could not be imported because it is shadowed by:
|
||||
%(found_at)s
|
||||
Please rename this file/folder so that the original module from the standard library can be imported.''' % {
|
||||
'import_name': self.import_name, 'found_at': found_at[0]}
|
||||
|
||||
return msg
|
||||
|
||||
def check(self, module, expected_attributes):
|
||||
msg = ''
|
||||
for expected_attribute in expected_attributes:
|
||||
try:
|
||||
getattr(module, expected_attribute)
|
||||
except:
|
||||
msg = self._generate_shadowed_import_message([module.__file__])
|
||||
break
|
||||
|
||||
if msg:
|
||||
raise DebuggerInitializationError(msg)
|
||||
|
||||
|
||||
with VerifyShadowedImport('threading') as verify_shadowed:
|
||||
import threading; verify_shadowed.check(threading, ['Thread', 'settrace', 'setprofile', 'Lock', 'RLock', 'current_thread'])
|
||||
|
||||
with VerifyShadowedImport('time') as verify_shadowed:
|
||||
import time; verify_shadowed.check(time, ['sleep', 'time', 'mktime'])
|
||||
|
||||
with VerifyShadowedImport('socket') as verify_shadowed:
|
||||
import socket; verify_shadowed.check(socket, ['socket', 'gethostname', 'getaddrinfo'])
|
||||
|
||||
with VerifyShadowedImport('select') as verify_shadowed:
|
||||
import select; verify_shadowed.check(select, ['select'])
|
||||
|
||||
with VerifyShadowedImport('code') as verify_shadowed:
|
||||
import code as _code; verify_shadowed.check(_code, ['compile_command', 'InteractiveInterpreter'])
|
||||
|
||||
with VerifyShadowedImport('_thread') as verify_shadowed:
|
||||
import _thread as thread; verify_shadowed.check(thread, ['start_new_thread', 'start_new', 'allocate_lock'])
|
||||
|
||||
with VerifyShadowedImport('queue') as verify_shadowed:
|
||||
import queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque'])
|
||||
|
||||
with VerifyShadowedImport('xmlrpclib') as verify_shadowed:
|
||||
import xmlrpc.client as xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server'])
|
||||
|
||||
with VerifyShadowedImport('xmlrpc.server') as verify_shadowed:
|
||||
import xmlrpc.server as xmlrpcserver; verify_shadowed.check(xmlrpcserver, ['SimpleXMLRPCServer'])
|
||||
|
||||
with VerifyShadowedImport('http.server') as verify_shadowed:
|
||||
import http.server as BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler'])
|
||||
|
||||
# If set, this is a version of the threading.enumerate that doesn't have the patching to remove the pydevd threads.
|
||||
# Note: as it can't be set during execution, don't import the name (import the module and access it through its name).
|
||||
pydevd_saved_threading_enumerate = None
|
|
@ -0,0 +1,73 @@
|
|||
import sys
|
||||
|
||||
|
||||
def patch_sys_module():
|
||||
|
||||
def patched_exc_info(fun):
|
||||
|
||||
def pydev_debugger_exc_info():
|
||||
type, value, traceback = fun()
|
||||
if type == ImportError:
|
||||
# we should not show frame added by plugin_import call
|
||||
if traceback and hasattr(traceback, "tb_next"):
|
||||
return type, value, traceback.tb_next
|
||||
return type, value, traceback
|
||||
|
||||
return pydev_debugger_exc_info
|
||||
|
||||
system_exc_info = sys.exc_info
|
||||
sys.exc_info = patched_exc_info(system_exc_info)
|
||||
if not hasattr(sys, "system_exc_info"):
|
||||
sys.system_exc_info = system_exc_info
|
||||
|
||||
|
||||
def patched_reload(orig_reload):
|
||||
|
||||
def pydev_debugger_reload(module):
|
||||
orig_reload(module)
|
||||
if module.__name__ == "sys":
|
||||
# if sys module was reloaded we should patch it again
|
||||
patch_sys_module()
|
||||
|
||||
return pydev_debugger_reload
|
||||
|
||||
|
||||
def patch_reload():
|
||||
import builtins # Py3
|
||||
|
||||
if hasattr(builtins, "reload"):
|
||||
sys.builtin_orig_reload = builtins.reload
|
||||
builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable
|
||||
try:
|
||||
import imp
|
||||
sys.imp_orig_reload = imp.reload
|
||||
imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
import importlib
|
||||
sys.importlib_orig_reload = importlib.reload # @UndefinedVariable
|
||||
importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable
|
||||
except:
|
||||
pass
|
||||
|
||||
del builtins
|
||||
|
||||
|
||||
def cancel_patches_in_sys_module():
|
||||
sys.exc_info = sys.system_exc_info # @UndefinedVariable
|
||||
import builtins # Py3
|
||||
|
||||
if hasattr(sys, "builtin_orig_reload"):
|
||||
builtins.reload = sys.builtin_orig_reload
|
||||
|
||||
if hasattr(sys, "imp_orig_reload"):
|
||||
import imp
|
||||
imp.reload = sys.imp_orig_reload
|
||||
|
||||
if hasattr(sys, "importlib_orig_reload"):
|
||||
import importlib
|
||||
importlib.reload = sys.importlib_orig_reload
|
||||
|
||||
del builtins
|
|
@ -0,0 +1,52 @@
|
|||
import inspect
|
||||
import re
|
||||
|
||||
|
||||
def do_find(f, mod):
|
||||
import linecache
|
||||
if inspect.ismodule(mod):
|
||||
return f, 0, 0
|
||||
|
||||
lines = linecache.getlines(f)
|
||||
|
||||
if inspect.isclass(mod):
|
||||
name = mod.__name__
|
||||
pat = re.compile(r'^\s*class\s*' + name + r'\b')
|
||||
for i in range(len(lines)):
|
||||
if pat.match(lines[i]):
|
||||
return f, i, 0
|
||||
|
||||
return f, 0, 0
|
||||
|
||||
if inspect.ismethod(mod):
|
||||
mod = mod.im_func
|
||||
|
||||
if inspect.isfunction(mod):
|
||||
try:
|
||||
mod = mod.func_code
|
||||
except AttributeError:
|
||||
mod = mod.__code__ # python 3k
|
||||
|
||||
if inspect.istraceback(mod):
|
||||
mod = mod.tb_frame
|
||||
|
||||
if inspect.isframe(mod):
|
||||
mod = mod.f_code
|
||||
|
||||
if inspect.iscode(mod):
|
||||
if not hasattr(mod, 'co_filename'):
|
||||
return None, 0, 0
|
||||
|
||||
if not hasattr(mod, 'co_firstlineno'):
|
||||
return mod.co_filename, 0, 0
|
||||
|
||||
lnum = mod.co_firstlineno
|
||||
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
|
||||
while lnum > 0:
|
||||
if pat.match(lines[lnum]):
|
||||
break
|
||||
lnum -= 1
|
||||
|
||||
return f, lnum, 0
|
||||
|
||||
raise RuntimeError('Do not know about: ' + f + ' ' + str(mod))
|
|
@ -0,0 +1,353 @@
|
|||
'''
|
||||
Sample usage to track changes in a thread.
|
||||
|
||||
import threading
|
||||
import time
|
||||
watcher = fsnotify.Watcher()
|
||||
watcher.accepted_file_extensions = {'.py', '.pyw'}
|
||||
|
||||
# Configure target values to compute throttling.
|
||||
# Note: internal sleep times will be updated based on
|
||||
# profiling the actual application runtime to match
|
||||
# those values.
|
||||
|
||||
watcher.target_time_for_single_scan = 2.
|
||||
watcher.target_time_for_notification = 4.
|
||||
|
||||
watcher.set_tracked_paths([target_dir])
|
||||
|
||||
def start_watching(): # Called from thread
|
||||
for change_enum, change_path in watcher.iter_changes():
|
||||
if change_enum == fsnotify.Change.added:
|
||||
print('Added: ', change_path)
|
||||
elif change_enum == fsnotify.Change.modified:
|
||||
print('Modified: ', change_path)
|
||||
elif change_enum == fsnotify.Change.deleted:
|
||||
print('Deleted: ', change_path)
|
||||
|
||||
t = threading.Thread(target=start_watching)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
try:
|
||||
...
|
||||
finally:
|
||||
watcher.dispose()
|
||||
|
||||
|
||||
Note: changes are only reported for files (added/modified/deleted), not directories.
|
||||
'''
|
||||
import threading
|
||||
import sys
|
||||
from os.path import basename
|
||||
from _pydev_bundle import pydev_log
|
||||
from os import scandir
|
||||
|
||||
try:
|
||||
from enum import IntEnum
|
||||
except:
|
||||
|
||||
class IntEnum(object):
|
||||
pass
|
||||
|
||||
import time
|
||||
|
||||
__author__ = 'Fabio Zadrozny'
|
||||
__email__ = 'fabiofz@gmail.com'
|
||||
__version__ = '0.1.5' # Version here and in setup.py
|
||||
|
||||
|
||||
class Change(IntEnum):
|
||||
added = 1
|
||||
modified = 2
|
||||
deleted = 3
|
||||
|
||||
|
||||
class _SingleVisitInfo(object):
|
||||
|
||||
def __init__(self):
|
||||
self.count = 0
|
||||
self.visited_dirs = set()
|
||||
self.file_to_mtime = {}
|
||||
self.last_sleep_time = time.time()
|
||||
|
||||
|
||||
class _PathWatcher(object):
|
||||
'''
|
||||
Helper to watch a single path.
|
||||
'''
|
||||
|
||||
def __init__(self, root_path, accept_directory, accept_file, single_visit_info, max_recursion_level, sleep_time=.0):
|
||||
'''
|
||||
:type root_path: str
|
||||
:type accept_directory: Callback[str, bool]
|
||||
:type accept_file: Callback[str, bool]
|
||||
:type max_recursion_level: int
|
||||
:type sleep_time: float
|
||||
'''
|
||||
self.accept_directory = accept_directory
|
||||
self.accept_file = accept_file
|
||||
self._max_recursion_level = max_recursion_level
|
||||
|
||||
self._root_path = root_path
|
||||
|
||||
# Initial sleep value for throttling, it'll be auto-updated based on the
|
||||
# Watcher.target_time_for_single_scan.
|
||||
self.sleep_time = sleep_time
|
||||
|
||||
self.sleep_at_elapsed = 1. / 30.
|
||||
|
||||
# When created, do the initial snapshot right away!
|
||||
old_file_to_mtime = {}
|
||||
self._check(single_visit_info, lambda _change: None, old_file_to_mtime)
|
||||
|
||||
def __eq__(self, o):
|
||||
if isinstance(o, _PathWatcher):
|
||||
return self._root_path == o._root_path
|
||||
|
||||
return False
|
||||
|
||||
def __ne__(self, o):
|
||||
return not self == o
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._root_path)
|
||||
|
||||
def _check_dir(self, dir_path, single_visit_info, append_change, old_file_to_mtime, level):
|
||||
# This is the actual poll loop
|
||||
if dir_path in single_visit_info.visited_dirs or level > self._max_recursion_level:
|
||||
return
|
||||
single_visit_info.visited_dirs.add(dir_path)
|
||||
try:
|
||||
if isinstance(dir_path, bytes):
|
||||
try:
|
||||
dir_path = dir_path.decode(sys.getfilesystemencoding())
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
dir_path = dir_path.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
return # Ignore if we can't deal with the path.
|
||||
|
||||
new_files = single_visit_info.file_to_mtime
|
||||
|
||||
for entry in scandir(dir_path):
|
||||
single_visit_info.count += 1
|
||||
|
||||
# Throttle if needed inside the loop
|
||||
# to avoid consuming too much CPU.
|
||||
if single_visit_info.count % 300 == 0:
|
||||
if self.sleep_time > 0:
|
||||
t = time.time()
|
||||
diff = t - single_visit_info.last_sleep_time
|
||||
if diff > self.sleep_at_elapsed:
|
||||
time.sleep(self.sleep_time)
|
||||
single_visit_info.last_sleep_time = time.time()
|
||||
|
||||
if entry.is_dir():
|
||||
if self.accept_directory(entry.path):
|
||||
self._check_dir(entry.path, single_visit_info, append_change, old_file_to_mtime, level + 1)
|
||||
|
||||
elif self.accept_file(entry.path):
|
||||
stat = entry.stat()
|
||||
mtime = (stat.st_mtime_ns, stat.st_size)
|
||||
path = entry.path
|
||||
new_files[path] = mtime
|
||||
|
||||
old_mtime = old_file_to_mtime.pop(path, None)
|
||||
if not old_mtime:
|
||||
append_change((Change.added, path))
|
||||
elif old_mtime != mtime:
|
||||
append_change((Change.modified, path))
|
||||
|
||||
except OSError:
|
||||
pass # Directory was removed in the meanwhile.
|
||||
|
||||
def _check(self, single_visit_info, append_change, old_file_to_mtime):
|
||||
self._check_dir(self._root_path, single_visit_info, append_change, old_file_to_mtime, 0)
|
||||
|
||||
|
||||
class Watcher(object):
|
||||
|
||||
# By default (if accept_directory is not specified), these will be the
|
||||
# ignored directories.
|
||||
ignored_dirs = {u'.git', u'__pycache__', u'.idea', u'node_modules', u'.metadata'}
|
||||
|
||||
# By default (if accept_file is not specified), these will be the
|
||||
# accepted files.
|
||||
accepted_file_extensions = ()
|
||||
|
||||
# Set to the target value for doing full scan of all files (adds a sleep inside the poll loop
|
||||
# which processes files to reach the target time).
|
||||
# Lower values will consume more CPU
|
||||
# Set to 0.0 to have no sleeps (which will result in a higher cpu load).
|
||||
target_time_for_single_scan = 2.0
|
||||
|
||||
# Set the target value from the start of one scan to the start of another scan (adds a
|
||||
# sleep after a full poll is done to reach the target time).
|
||||
# Lower values will consume more CPU.
|
||||
# Set to 0.0 to have a new scan start right away without any sleeps.
|
||||
target_time_for_notification = 4.0
|
||||
|
||||
# Set to True to print the time for a single poll through all the paths.
|
||||
print_poll_time = False
|
||||
|
||||
# This is the maximum recursion level.
|
||||
max_recursion_level = 10
|
||||
|
||||
def __init__(self, accept_directory=None, accept_file=None):
|
||||
'''
|
||||
:param Callable[str, bool] accept_directory:
|
||||
Callable that returns whether a directory should be watched.
|
||||
Note: if passed it'll override the `ignored_dirs`
|
||||
|
||||
:param Callable[str, bool] accept_file:
|
||||
Callable that returns whether a file should be watched.
|
||||
Note: if passed it'll override the `accepted_file_extensions`.
|
||||
'''
|
||||
self._path_watchers = set()
|
||||
self._disposed = threading.Event()
|
||||
|
||||
if accept_directory is None:
|
||||
accept_directory = lambda dir_path: basename(dir_path) not in self.ignored_dirs
|
||||
if accept_file is None:
|
||||
accept_file = lambda path_name: \
|
||||
not self.accepted_file_extensions or path_name.endswith(self.accepted_file_extensions)
|
||||
self.accept_file = accept_file
|
||||
self.accept_directory = accept_directory
|
||||
self._single_visit_info = _SingleVisitInfo()
|
||||
|
||||
@property
|
||||
def accept_directory(self):
|
||||
return self._accept_directory
|
||||
|
||||
@accept_directory.setter
|
||||
def accept_directory(self, accept_directory):
|
||||
self._accept_directory = accept_directory
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher.accept_directory = accept_directory
|
||||
|
||||
@property
|
||||
def accept_file(self):
|
||||
return self._accept_file
|
||||
|
||||
@accept_file.setter
|
||||
def accept_file(self, accept_file):
|
||||
self._accept_file = accept_file
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher.accept_file = accept_file
|
||||
|
||||
def dispose(self):
|
||||
self._disposed.set()
|
||||
|
||||
@property
|
||||
def path_watchers(self):
|
||||
return tuple(self._path_watchers)
|
||||
|
||||
def set_tracked_paths(self, paths):
|
||||
"""
|
||||
Note: always resets all path trackers to track the passed paths.
|
||||
"""
|
||||
if not isinstance(paths, (list, tuple, set)):
|
||||
paths = (paths,)
|
||||
|
||||
# Sort by the path len so that the bigger paths come first (so,
|
||||
# if there's any nesting we want the nested paths to be visited
|
||||
# before the parent paths so that the max_recursion_level is correct).
|
||||
paths = sorted(set(paths), key=lambda path:-len(path))
|
||||
path_watchers = set()
|
||||
|
||||
self._single_visit_info = _SingleVisitInfo()
|
||||
|
||||
initial_time = time.time()
|
||||
for path in paths:
|
||||
sleep_time = 0. # When collecting the first time, sleep_time should be 0!
|
||||
path_watcher = _PathWatcher(
|
||||
path,
|
||||
self.accept_directory,
|
||||
self.accept_file,
|
||||
self._single_visit_info,
|
||||
max_recursion_level=self.max_recursion_level,
|
||||
sleep_time=sleep_time,
|
||||
)
|
||||
|
||||
path_watchers.add(path_watcher)
|
||||
|
||||
actual_time = (time.time() - initial_time)
|
||||
|
||||
pydev_log.debug('Tracking the following paths for changes: %s', paths)
|
||||
pydev_log.debug('Time to track: %.2fs', actual_time)
|
||||
pydev_log.debug('Folders found: %s', len(self._single_visit_info.visited_dirs))
|
||||
pydev_log.debug('Files found: %s', len(self._single_visit_info.file_to_mtime))
|
||||
self._path_watchers = path_watchers
|
||||
|
||||
def iter_changes(self):
|
||||
'''
|
||||
Continuously provides changes (until dispose() is called).
|
||||
|
||||
Changes provided are tuples with the Change enum and filesystem path.
|
||||
|
||||
:rtype: Iterable[Tuple[Change, str]]
|
||||
'''
|
||||
while not self._disposed.is_set():
|
||||
initial_time = time.time()
|
||||
|
||||
old_visit_info = self._single_visit_info
|
||||
old_file_to_mtime = old_visit_info.file_to_mtime
|
||||
changes = []
|
||||
append_change = changes.append
|
||||
|
||||
self._single_visit_info = single_visit_info = _SingleVisitInfo()
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher._check(single_visit_info, append_change, old_file_to_mtime)
|
||||
|
||||
# Note that we pop entries while visiting, so, what remained is what's deleted.
|
||||
for entry in old_file_to_mtime:
|
||||
append_change((Change.deleted, entry))
|
||||
|
||||
for change in changes:
|
||||
yield change
|
||||
|
||||
actual_time = (time.time() - initial_time)
|
||||
if self.print_poll_time:
|
||||
print('--- Total poll time: %.3fs' % actual_time)
|
||||
|
||||
if actual_time > 0:
|
||||
if self.target_time_for_single_scan <= 0.0:
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher.sleep_time = 0.0
|
||||
else:
|
||||
perc = self.target_time_for_single_scan / actual_time
|
||||
|
||||
# Prevent from changing the values too much (go slowly into the right
|
||||
# direction).
|
||||
# (to prevent from cases where the user puts the machine on sleep and
|
||||
# values become too skewed).
|
||||
if perc > 2.:
|
||||
perc = 2.
|
||||
elif perc < 0.5:
|
||||
perc = 0.5
|
||||
|
||||
for path_watcher in self._path_watchers:
|
||||
if path_watcher.sleep_time <= 0.0:
|
||||
path_watcher.sleep_time = 0.001
|
||||
new_sleep_time = path_watcher.sleep_time * perc
|
||||
|
||||
# Prevent from changing the values too much (go slowly into the right
|
||||
# direction).
|
||||
# (to prevent from cases where the user puts the machine on sleep and
|
||||
# values become too skewed).
|
||||
diff_sleep_time = new_sleep_time - path_watcher.sleep_time
|
||||
path_watcher.sleep_time += (diff_sleep_time / (3.0 * len(self._path_watchers)))
|
||||
|
||||
if actual_time > 0:
|
||||
self._disposed.wait(actual_time)
|
||||
|
||||
if path_watcher.sleep_time < 0.001:
|
||||
path_watcher.sleep_time = 0.001
|
||||
|
||||
# print('new sleep time: %s' % path_watcher.sleep_time)
|
||||
|
||||
diff = self.target_time_for_notification - actual_time
|
||||
if diff > 0.:
|
||||
self._disposed.wait(diff)
|
||||
|
Binary file not shown.
|
@ -0,0 +1,639 @@
|
|||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
|
||||
from _pydev_bundle._pydev_calltip_util import get_description
|
||||
from _pydevd_bundle import pydevd_vars
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from _pydevd_bundle.pydevd_constants import (IS_JYTHON, NEXT_VALUE_SEPARATOR, get_global_debugger,
|
||||
silence_warnings_decorator)
|
||||
from contextlib import contextmanager
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle.pydevd_utils import interrupt_main_thread
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# BaseStdIn
|
||||
# =======================================================================================================================
|
||||
class BaseStdIn:
|
||||
|
||||
def __init__(self, original_stdin=sys.stdin, *args, **kwargs):
|
||||
try:
|
||||
self.encoding = sys.stdin.encoding
|
||||
except:
|
||||
# Not sure if it's available in all Python versions...
|
||||
pass
|
||||
self.original_stdin = original_stdin
|
||||
|
||||
try:
|
||||
self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute!
|
||||
except:
|
||||
# Not sure if it's available in all Python versions...
|
||||
pass
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything
|
||||
# This could happen if the user had done input('enter number).<-- upon entering this, that message would appear,
|
||||
# which is not something we want.
|
||||
return '\n'
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
pass # not available StdIn (but it can be expected to be in the stream interface)
|
||||
|
||||
def flush(self, *args, **kwargs):
|
||||
pass # not available StdIn (but it can be expected to be in the stream interface)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
# in the interactive interpreter, a read and a readline are the same.
|
||||
return self.readline()
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
pass # expected in StdIn
|
||||
|
||||
def __iter__(self):
|
||||
# BaseStdIn would not be considered as Iterable in Python 3 without explicit `__iter__` implementation
|
||||
return self.original_stdin.__iter__()
|
||||
|
||||
def __getattr__(self, item):
|
||||
# it's called if the attribute wasn't found
|
||||
if hasattr(self.original_stdin, item):
|
||||
return getattr(self.original_stdin, item)
|
||||
raise AttributeError("%s has no attribute %s" % (self.original_stdin, item))
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# StdIn
|
||||
# =======================================================================================================================
|
||||
class StdIn(BaseStdIn):
|
||||
'''
|
||||
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
|
||||
'''
|
||||
|
||||
def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin):
|
||||
BaseStdIn.__init__(self, original_stdin)
|
||||
self.interpreter = interpreter
|
||||
self.client_port = client_port
|
||||
self.host = host
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# Ok, callback into the client to get the new input
|
||||
try:
|
||||
server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
|
||||
requested_input = server.RequestInput()
|
||||
if not requested_input:
|
||||
return '\n' # Yes, a readline must return something (otherwise we can get an EOFError on the input() call).
|
||||
else:
|
||||
# readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character).
|
||||
requested_input += '\n'
|
||||
return requested_input
|
||||
except KeyboardInterrupt:
|
||||
raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console
|
||||
except:
|
||||
return '\n'
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
pass # expected in StdIn
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DebugConsoleStdIn
|
||||
#=======================================================================================================================
|
||||
class DebugConsoleStdIn(BaseStdIn):
|
||||
'''
|
||||
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
|
||||
'''
|
||||
|
||||
def __init__(self, py_db, original_stdin):
|
||||
'''
|
||||
:param py_db:
|
||||
If None, get_global_debugger() is used.
|
||||
'''
|
||||
BaseStdIn.__init__(self, original_stdin)
|
||||
self._py_db = py_db
|
||||
self._in_notification = 0
|
||||
|
||||
def __send_input_requested_message(self, is_started):
|
||||
try:
|
||||
py_db = self._py_db
|
||||
if py_db is None:
|
||||
py_db = get_global_debugger()
|
||||
|
||||
if py_db is None:
|
||||
return
|
||||
|
||||
cmd = py_db.cmd_factory.make_input_requested_message(is_started)
|
||||
py_db.writer.add_command(cmd)
|
||||
except Exception:
|
||||
pydev_log.exception()
|
||||
|
||||
@contextmanager
|
||||
def notify_input_requested(self):
|
||||
self._in_notification += 1
|
||||
if self._in_notification == 1:
|
||||
self.__send_input_requested_message(True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._in_notification -= 1
|
||||
if self._in_notification == 0:
|
||||
self.__send_input_requested_message(False)
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
with self.notify_input_requested():
|
||||
return self.original_stdin.readline(*args, **kwargs)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
with self.notify_input_requested():
|
||||
return self.original_stdin.read(*args, **kwargs)
|
||||
|
||||
|
||||
class CodeFragment:
|
||||
|
||||
def __init__(self, text, is_single_line=True):
|
||||
self.text = text
|
||||
self.is_single_line = is_single_line
|
||||
|
||||
def append(self, code_fragment):
|
||||
self.text = self.text + "\n" + code_fragment.text
|
||||
if not code_fragment.is_single_line:
|
||||
self.is_single_line = False
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# BaseInterpreterInterface
|
||||
# =======================================================================================================================
|
||||
class BaseInterpreterInterface:
|
||||
|
||||
def __init__(self, mainThread, connect_status_queue=None):
|
||||
self.mainThread = mainThread
|
||||
self.interruptable = False
|
||||
self.exec_queue = _queue.Queue(0)
|
||||
self.buffer = None
|
||||
self.banner_shown = False
|
||||
self.connect_status_queue = connect_status_queue
|
||||
self.mpl_modules_for_patching = {}
|
||||
self.init_mpl_modules_for_patching()
|
||||
|
||||
def build_banner(self):
|
||||
return 'print({0})\n'.format(repr(self.get_greeting_msg()))
|
||||
|
||||
def get_greeting_msg(self):
|
||||
return 'PyDev console: starting.\n'
|
||||
|
||||
def init_mpl_modules_for_patching(self):
|
||||
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot
|
||||
self.mpl_modules_for_patching = {
|
||||
"matplotlib": lambda: activate_matplotlib(self.enableGui),
|
||||
"matplotlib.pyplot": activate_pyplot,
|
||||
"pylab": activate_pylab
|
||||
}
|
||||
|
||||
def need_more_for_code(self, source):
|
||||
# PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations
|
||||
|
||||
# Strangely even the IPython console is_complete said it was complete
|
||||
# even with a continuation char at the end.
|
||||
if source.endswith('\\'):
|
||||
return True
|
||||
|
||||
if hasattr(self.interpreter, 'is_complete'):
|
||||
return not self.interpreter.is_complete(source)
|
||||
try:
|
||||
# At this point, it should always be single.
|
||||
# If we don't do this, things as:
|
||||
#
|
||||
# for i in range(10): print(i)
|
||||
#
|
||||
# (in a single line) don't work.
|
||||
# Note that it won't give an error and code will be None (so, it'll
|
||||
# use execMultipleLines in the next call in this case).
|
||||
symbol = 'single'
|
||||
code = self.interpreter.compile(source, '<input>', symbol)
|
||||
except (OverflowError, SyntaxError, ValueError):
|
||||
# Case 1
|
||||
return False
|
||||
if code is None:
|
||||
# Case 2
|
||||
return True
|
||||
|
||||
# Case 3
|
||||
return False
|
||||
|
||||
def need_more(self, code_fragment):
|
||||
if self.buffer is None:
|
||||
self.buffer = code_fragment
|
||||
else:
|
||||
self.buffer.append(code_fragment)
|
||||
|
||||
return self.need_more_for_code(self.buffer.text)
|
||||
|
||||
def create_std_in(self, debugger=None, original_std_in=None):
|
||||
if debugger is None:
|
||||
return StdIn(self, self.host, self.client_port, original_stdin=original_std_in)
|
||||
else:
|
||||
return DebugConsoleStdIn(py_db=debugger, original_stdin=original_std_in)
|
||||
|
||||
def add_exec(self, code_fragment, debugger=None):
|
||||
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
|
||||
# (showtraceback does it on python 3.5 onwards)
|
||||
sys.excepthook = sys.__excepthook__
|
||||
try:
|
||||
original_in = sys.stdin
|
||||
try:
|
||||
help = None
|
||||
if 'pydoc' in sys.modules:
|
||||
pydoc = sys.modules['pydoc'] # Don't import it if it still is not there.
|
||||
|
||||
if hasattr(pydoc, 'help'):
|
||||
# You never know how will the API be changed, so, let's code defensively here
|
||||
help = pydoc.help
|
||||
if not hasattr(help, 'input'):
|
||||
help = None
|
||||
except:
|
||||
# Just ignore any error here
|
||||
pass
|
||||
|
||||
more = False
|
||||
try:
|
||||
sys.stdin = self.create_std_in(debugger, original_in)
|
||||
try:
|
||||
if help is not None:
|
||||
# This will enable the help() function to work.
|
||||
try:
|
||||
try:
|
||||
help.input = sys.stdin
|
||||
except AttributeError:
|
||||
help._input = sys.stdin
|
||||
except:
|
||||
help = None
|
||||
if not self._input_error_printed:
|
||||
self._input_error_printed = True
|
||||
sys.stderr.write('\nError when trying to update pydoc.help.input\n')
|
||||
sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n')
|
||||
traceback.print_exc()
|
||||
|
||||
try:
|
||||
self.start_exec()
|
||||
if hasattr(self, 'debugger'):
|
||||
self.debugger.enable_tracing()
|
||||
|
||||
more = self.do_add_exec(code_fragment)
|
||||
|
||||
if hasattr(self, 'debugger'):
|
||||
self.debugger.disable_tracing()
|
||||
|
||||
self.finish_exec(more)
|
||||
finally:
|
||||
if help is not None:
|
||||
try:
|
||||
try:
|
||||
help.input = original_in
|
||||
except AttributeError:
|
||||
help._input = original_in
|
||||
except:
|
||||
pass
|
||||
|
||||
finally:
|
||||
sys.stdin = original_in
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
sys.__excepthook__ = sys.excepthook
|
||||
|
||||
return more
|
||||
|
||||
def do_add_exec(self, codeFragment):
|
||||
'''
|
||||
Subclasses should override.
|
||||
|
||||
@return: more (True if more input is needed to complete the statement and False if the statement is complete).
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_namespace(self):
|
||||
'''
|
||||
Subclasses should override.
|
||||
|
||||
@return: dict with namespace.
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def __resolve_reference__(self, text):
|
||||
"""
|
||||
|
||||
:type text: str
|
||||
"""
|
||||
obj = None
|
||||
if '.' not in text:
|
||||
try:
|
||||
obj = self.get_namespace()[text]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if obj is None:
|
||||
try:
|
||||
obj = self.get_namespace()['__builtins__'][text]
|
||||
except:
|
||||
pass
|
||||
|
||||
if obj is None:
|
||||
try:
|
||||
obj = getattr(self.get_namespace()['__builtins__'], text, None)
|
||||
except:
|
||||
pass
|
||||
|
||||
else:
|
||||
try:
|
||||
last_dot = text.rindex('.')
|
||||
parent_context = text[0:last_dot]
|
||||
res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace())
|
||||
obj = getattr(res, text[last_dot + 1:])
|
||||
except:
|
||||
pass
|
||||
return obj
|
||||
|
||||
def getDescription(self, text):
|
||||
try:
|
||||
obj = self.__resolve_reference__(text)
|
||||
if obj is None:
|
||||
return ''
|
||||
return get_description(obj)
|
||||
except:
|
||||
return ''
|
||||
|
||||
def do_exec_code(self, code, is_single_line):
|
||||
try:
|
||||
code_fragment = CodeFragment(code, is_single_line)
|
||||
more = self.need_more(code_fragment)
|
||||
if not more:
|
||||
code_fragment = self.buffer
|
||||
self.buffer = None
|
||||
self.exec_queue.put(code_fragment)
|
||||
|
||||
return more
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def execLine(self, line):
|
||||
return self.do_exec_code(line, True)
|
||||
|
||||
def execMultipleLines(self, lines):
|
||||
if IS_JYTHON:
|
||||
more = False
|
||||
for line in lines.split('\n'):
|
||||
more = self.do_exec_code(line, True)
|
||||
return more
|
||||
else:
|
||||
return self.do_exec_code(lines, False)
|
||||
|
||||
def interrupt(self):
|
||||
self.buffer = None # Also clear the buffer when it's interrupted.
|
||||
try:
|
||||
if self.interruptable:
|
||||
# Fix for #PyDev-500: Console interrupt can't interrupt on sleep
|
||||
interrupt_main_thread(self.mainThread)
|
||||
|
||||
self.finish_exec(False)
|
||||
return True
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
sys.exit(0)
|
||||
|
||||
def start_exec(self):
|
||||
self.interruptable = True
|
||||
|
||||
def get_server(self):
|
||||
if getattr(self, 'host', None) is not None:
|
||||
return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
|
||||
else:
|
||||
return None
|
||||
|
||||
server = property(get_server)
|
||||
|
||||
def ShowConsole(self):
|
||||
server = self.get_server()
|
||||
if server is not None:
|
||||
server.ShowConsole()
|
||||
|
||||
def finish_exec(self, more):
|
||||
self.interruptable = False
|
||||
|
||||
server = self.get_server()
|
||||
|
||||
if server is not None:
|
||||
return server.NotifyFinished(more)
|
||||
else:
|
||||
return True
|
||||
|
||||
def getFrame(self):
|
||||
xml = StringIO()
|
||||
hidden_ns = self.get_ipython_hidden_vars_dict()
|
||||
xml.write("<xml>")
|
||||
xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns))
|
||||
xml.write("</xml>")
|
||||
|
||||
return xml.getvalue()
|
||||
|
||||
@silence_warnings_decorator
|
||||
def getVariable(self, attributes):
|
||||
xml = StringIO()
|
||||
xml.write("<xml>")
|
||||
val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes)
|
||||
if val_dict is None:
|
||||
val_dict = {}
|
||||
|
||||
for k, val in val_dict.items():
|
||||
val = val_dict[k]
|
||||
evaluate_full_value = pydevd_xml.should_evaluate_full_value(val)
|
||||
xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value))
|
||||
|
||||
xml.write("</xml>")
|
||||
|
||||
return xml.getvalue()
|
||||
|
||||
def getArray(self, attr, roffset, coffset, rows, cols, format):
|
||||
name = attr.split("\t")[-1]
|
||||
array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace())
|
||||
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
|
||||
|
||||
def evaluate(self, expression):
|
||||
xml = StringIO()
|
||||
xml.write("<xml>")
|
||||
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
|
||||
xml.write(pydevd_vars.var_to_xml(result, expression))
|
||||
xml.write("</xml>")
|
||||
return xml.getvalue()
|
||||
|
||||
@silence_warnings_decorator
|
||||
def loadFullValue(self, seq, scope_attrs):
|
||||
"""
|
||||
Evaluate full value for async Console variables in a separate thread and send results to IDE side
|
||||
:param seq: id of command
|
||||
:param scope_attrs: a sequence of variables with their attributes separated by NEXT_VALUE_SEPARATOR
|
||||
(i.e.: obj\tattr1\tattr2NEXT_VALUE_SEPARATORobj2\attr1\tattr2)
|
||||
:return:
|
||||
"""
|
||||
frame_variables = self.get_namespace()
|
||||
var_objects = []
|
||||
vars = scope_attrs.split(NEXT_VALUE_SEPARATOR)
|
||||
for var_attrs in vars:
|
||||
if '\t' in var_attrs:
|
||||
name, attrs = var_attrs.split('\t', 1)
|
||||
|
||||
else:
|
||||
name = var_attrs
|
||||
attrs = None
|
||||
if name in frame_variables:
|
||||
var_object = pydevd_vars.resolve_var_object(frame_variables[name], attrs)
|
||||
var_objects.append((var_object, name))
|
||||
else:
|
||||
var_object = pydevd_vars.eval_in_context(name, frame_variables, frame_variables)
|
||||
var_objects.append((var_object, name))
|
||||
|
||||
from _pydevd_bundle.pydevd_comm import GetValueAsyncThreadConsole
|
||||
py_db = getattr(self, 'debugger', None)
|
||||
|
||||
if py_db is None:
|
||||
py_db = get_global_debugger()
|
||||
|
||||
if py_db is None:
|
||||
from pydevd import PyDB
|
||||
py_db = PyDB()
|
||||
|
||||
t = GetValueAsyncThreadConsole(py_db, self.get_server(), seq, var_objects)
|
||||
t.start()
|
||||
|
||||
def changeVariable(self, attr, value):
|
||||
|
||||
def do_change_variable():
|
||||
Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace())
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_change_variable)
|
||||
|
||||
def connectToDebugger(self, debuggerPort, debugger_options=None):
|
||||
'''
|
||||
Used to show console with variables connection.
|
||||
Mainly, monkey-patches things in the debugger structure so that the debugger protocol works.
|
||||
'''
|
||||
|
||||
if debugger_options is None:
|
||||
debugger_options = {}
|
||||
env_key = "PYDEVD_EXTRA_ENVS"
|
||||
if env_key in debugger_options:
|
||||
for (env_name, value) in debugger_options[env_key].items():
|
||||
existing_value = os.environ.get(env_name, None)
|
||||
if existing_value:
|
||||
os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value)
|
||||
else:
|
||||
os.environ[env_name] = value
|
||||
if env_name == "PYTHONPATH":
|
||||
sys.path.append(value)
|
||||
|
||||
del debugger_options[env_key]
|
||||
|
||||
def do_connect_to_debugger():
|
||||
try:
|
||||
# Try to import the packages needed to attach the debugger
|
||||
import pydevd
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
except:
|
||||
# This happens on Jython embedded in host eclipse
|
||||
traceback.print_exc()
|
||||
sys.stderr.write('pydevd is not available, cannot connect\n')
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import set_thread_id
|
||||
from _pydev_bundle import pydev_localhost
|
||||
set_thread_id(threading.current_thread(), "console_main")
|
||||
|
||||
VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java
|
||||
VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java
|
||||
f = FakeFrame()
|
||||
f.f_back = None
|
||||
f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic).
|
||||
f.f_locals = self.get_namespace()
|
||||
|
||||
self.debugger = pydevd.PyDB()
|
||||
self.debugger.add_fake_frame(thread_id=VIRTUAL_CONSOLE_ID, frame_id=VIRTUAL_FRAME_ID, frame=f)
|
||||
try:
|
||||
pydevd.apply_debugger_options(debugger_options)
|
||||
self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort)
|
||||
self.debugger.prepare_to_run()
|
||||
self.debugger.disable_tracing()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
sys.stderr.write('Failed to connect to target debugger.\n')
|
||||
|
||||
# Register to process commands when idle
|
||||
self.debugrunning = False
|
||||
try:
|
||||
import pydevconsole
|
||||
pydevconsole.set_debug_hook(self.debugger.process_internal_commands)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n')
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_connect_to_debugger)
|
||||
|
||||
return ('connect complete',)
|
||||
|
||||
def handshake(self):
|
||||
if self.connect_status_queue is not None:
|
||||
self.connect_status_queue.put(True)
|
||||
return "PyCharm"
|
||||
|
||||
def get_connect_status_queue(self):
|
||||
return self.connect_status_queue
|
||||
|
||||
def hello(self, input_str):
|
||||
# Don't care what the input string is
|
||||
return ("Hello eclipse",)
|
||||
|
||||
def enableGui(self, guiname):
|
||||
''' Enable the GUI specified in guiname (see inputhook for list).
|
||||
As with IPython, enabling multiple GUIs isn't an error, but
|
||||
only the last one's main loop runs and it may not work
|
||||
'''
|
||||
|
||||
def do_enable_gui():
|
||||
from _pydev_bundle.pydev_versioncheck import versionok_for_gui
|
||||
if versionok_for_gui():
|
||||
try:
|
||||
from pydev_ipython.inputhook import enable_gui
|
||||
enable_gui(guiname)
|
||||
except:
|
||||
sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname)
|
||||
traceback.print_exc()
|
||||
elif guiname not in ['none', '', None]:
|
||||
# Only print a warning if the guiname was going to do something
|
||||
sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname)
|
||||
# Return value does not matter, so return back what was sent
|
||||
return guiname
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_enable_gui)
|
||||
|
||||
def get_ipython_hidden_vars_dict(self):
|
||||
return None
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# FakeFrame
|
||||
# =======================================================================================================================
|
||||
class FakeFrame:
|
||||
'''
|
||||
Used to show console with variables connection.
|
||||
A class to be used as a mock of a frame.
|
||||
'''
|
|
@ -0,0 +1,40 @@
|
|||
|
||||
import sys
|
||||
import traceback
|
||||
from types import ModuleType
|
||||
from _pydevd_bundle.pydevd_constants import DebugInfoHolder
|
||||
|
||||
import builtins
|
||||
|
||||
|
||||
class ImportHookManager(ModuleType):
|
||||
|
||||
def __init__(self, name, system_import):
|
||||
ModuleType.__init__(self, name)
|
||||
self._system_import = system_import
|
||||
self._modules_to_patch = {}
|
||||
|
||||
def add_module_name(self, module_name, activate_function):
|
||||
self._modules_to_patch[module_name] = activate_function
|
||||
|
||||
def do_import(self, name, *args, **kwargs):
|
||||
module = self._system_import(name, *args, **kwargs)
|
||||
try:
|
||||
activate_func = self._modules_to_patch.pop(name, None)
|
||||
if activate_func:
|
||||
activate_func() # call activate function
|
||||
except:
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
|
||||
traceback.print_exc()
|
||||
|
||||
# Restore normal system importer to reduce performance impact
|
||||
# of calling this method every time an import statement is invoked
|
||||
if not self._modules_to_patch:
|
||||
builtins.__import__ = self._system_import
|
||||
|
||||
return module
|
||||
|
||||
|
||||
import_hook_manager = ImportHookManager(__name__ + '.import_hook', builtins.__import__)
|
||||
builtins.__import__ = import_hook_manager.do_import
|
||||
sys.modules[import_hook_manager.__name__] = import_hook_manager
|
|
@ -0,0 +1,13 @@
|
|||
from _pydev_bundle._pydev_saved_modules import xmlrpclib
|
||||
from _pydev_bundle._pydev_saved_modules import xmlrpcserver
|
||||
|
||||
SimpleXMLRPCServer = xmlrpcserver.SimpleXMLRPCServer
|
||||
|
||||
from _pydev_bundle._pydev_execfile import execfile
|
||||
|
||||
from _pydev_bundle._pydev_saved_modules import _queue
|
||||
|
||||
from _pydevd_bundle.pydevd_exec2 import Exec
|
||||
|
||||
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
import sys
|
||||
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
|
||||
|
||||
import traceback
|
||||
|
||||
# Uncomment to force PyDev standard shell.
|
||||
# raise ImportError()
|
||||
|
||||
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# InterpreterInterface
|
||||
#=======================================================================================================================
|
||||
class InterpreterInterface(BaseInterpreterInterface):
|
||||
'''
|
||||
The methods in this class should be registered in the xml-rpc server.
|
||||
'''
|
||||
|
||||
def __init__(self, host, client_port, main_thread, show_banner=True, connect_status_queue=None):
|
||||
BaseInterpreterInterface.__init__(self, main_thread, connect_status_queue)
|
||||
self.client_port = client_port
|
||||
self.host = host
|
||||
self.interpreter = get_pydev_frontend(host, client_port)
|
||||
self._input_error_printed = False
|
||||
self.notification_succeeded = False
|
||||
self.notification_tries = 0
|
||||
self.notification_max_tries = 3
|
||||
self.show_banner = show_banner
|
||||
|
||||
self.notify_about_magic()
|
||||
|
||||
def get_greeting_msg(self):
|
||||
if self.show_banner:
|
||||
self.interpreter.show_banner()
|
||||
return self.interpreter.get_greeting_msg()
|
||||
|
||||
def do_add_exec(self, code_fragment):
|
||||
self.notify_about_magic()
|
||||
if code_fragment.text.rstrip().endswith('??'):
|
||||
print('IPython-->')
|
||||
try:
|
||||
res = bool(self.interpreter.add_exec(code_fragment.text))
|
||||
finally:
|
||||
if code_fragment.text.rstrip().endswith('??'):
|
||||
print('<--IPython')
|
||||
|
||||
return res
|
||||
|
||||
def get_namespace(self):
|
||||
return self.interpreter.get_namespace()
|
||||
|
||||
def getCompletions(self, text, act_tok):
|
||||
return self.interpreter.getCompletions(text, act_tok)
|
||||
|
||||
def close(self):
|
||||
sys.exit(0)
|
||||
|
||||
def notify_about_magic(self):
|
||||
if not self.notification_succeeded:
|
||||
self.notification_tries += 1
|
||||
if self.notification_tries > self.notification_max_tries:
|
||||
return
|
||||
completions = self.getCompletions("%", "%")
|
||||
magic_commands = [x[0] for x in completions]
|
||||
|
||||
server = self.get_server()
|
||||
|
||||
if server is not None:
|
||||
try:
|
||||
server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic())
|
||||
self.notification_succeeded = True
|
||||
except:
|
||||
self.notification_succeeded = False
|
||||
|
||||
def get_ipython_hidden_vars_dict(self):
|
||||
try:
|
||||
if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'):
|
||||
user_ns_hidden = self.interpreter.ipython.user_ns_hidden
|
||||
if isinstance(user_ns_hidden, dict):
|
||||
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
|
||||
user_hidden_dict = user_ns_hidden.copy()
|
||||
else:
|
||||
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
|
||||
user_hidden_dict = dict([(key, val) for key, val in self.interpreter.ipython.user_ns.items()
|
||||
if key in user_ns_hidden])
|
||||
|
||||
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`
|
||||
user_hidden_dict.setdefault('_', '')
|
||||
user_hidden_dict.setdefault('__', '')
|
||||
user_hidden_dict.setdefault('___', '')
|
||||
|
||||
return user_hidden_dict
|
||||
except:
|
||||
# Getting IPython variables shouldn't break loading frame variables
|
||||
traceback.print_exc()
|
||||
|
|
@ -0,0 +1,516 @@
|
|||
# TODO that would make IPython integration better
|
||||
# - show output other times then when enter was pressed
|
||||
# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit)
|
||||
# - support Ctrl-D (Ctrl-Z on Windows)
|
||||
# - use IPython (numbered) prompts in PyDev
|
||||
# - better integration of IPython and PyDev completions
|
||||
# - some of the semantics on handling the code completion are not correct:
|
||||
# eg: Start a line with % and then type c should give %cd as a completion by it doesn't
|
||||
# however type %c and request completions and %cd is given as an option
|
||||
# eg: Completing a magic when user typed it without the leading % causes the % to be inserted
|
||||
# to the left of what should be the first colon.
|
||||
"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend
|
||||
for IPython 0.11 to 1.0+.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import codeop
|
||||
import traceback
|
||||
|
||||
from IPython.core.error import UsageError
|
||||
from IPython.core.completer import IPCompleter
|
||||
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
|
||||
from IPython.core.usage import default_banner_parts
|
||||
from IPython.utils.strdispatch import StrDispatch
|
||||
import IPython.core.release as IPythonRelease
|
||||
from IPython.terminal.interactiveshell import TerminalInteractiveShell
|
||||
try:
|
||||
from traitlets import CBool, Unicode
|
||||
except ImportError:
|
||||
from IPython.utils.traitlets import CBool, Unicode
|
||||
from IPython.core import release
|
||||
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib
|
||||
|
||||
default_pydev_banner_parts = default_banner_parts
|
||||
|
||||
default_pydev_banner = ''.join(default_pydev_banner_parts)
|
||||
|
||||
|
||||
def show_in_pager(self, strng, *args, **kwargs):
|
||||
""" Run a string through pager """
|
||||
# On PyDev we just output the string, there are scroll bars in the console
|
||||
# to handle "paging". This is the same behaviour as when TERM==dump (see
|
||||
# page.py)
|
||||
# for compatibility with mime-bundle form:
|
||||
if isinstance(strng, dict):
|
||||
strng = strng.get('text/plain', strng)
|
||||
print(strng)
|
||||
|
||||
|
||||
def create_editor_hook(pydev_host, pydev_client_port):
|
||||
|
||||
def call_editor(filename, line=0, wait=True):
|
||||
""" Open an editor in PyDev """
|
||||
if line is None:
|
||||
line = 0
|
||||
|
||||
# Make sure to send an absolution path because unlike most editor hooks
|
||||
# we don't launch a process. This is more like what happens in the zmqshell
|
||||
filename = os.path.abspath(filename)
|
||||
|
||||
# import sys
|
||||
# sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
|
||||
|
||||
# Tell PyDev to open the editor
|
||||
server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port))
|
||||
server.IPythonEditor(filename, str(line))
|
||||
|
||||
if wait:
|
||||
input("Press Enter when done editing:")
|
||||
|
||||
return call_editor
|
||||
|
||||
|
||||
class PyDevIPCompleter(IPCompleter):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
""" Create a Completer that reuses the advanced completion support of PyDev
|
||||
in addition to the completion support provided by IPython """
|
||||
IPCompleter.__init__(self, *args, **kwargs)
|
||||
# Use PyDev for python matches, see getCompletions below
|
||||
if self.python_matches in self.matchers:
|
||||
# `self.python_matches` matches attributes or global python names
|
||||
self.matchers.remove(self.python_matches)
|
||||
|
||||
|
||||
class PyDevIPCompleter6(IPCompleter):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
""" Create a Completer that reuses the advanced completion support of PyDev
|
||||
in addition to the completion support provided by IPython """
|
||||
IPCompleter.__init__(self, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def matchers(self):
|
||||
"""All active matcher routines for completion"""
|
||||
# To remove python_matches we now have to override it as it's now a property in the superclass.
|
||||
return [
|
||||
self.file_matches,
|
||||
self.magic_matches,
|
||||
self.python_func_kw_matches,
|
||||
self.dict_key_matches,
|
||||
]
|
||||
|
||||
@matchers.setter
|
||||
def matchers(self, value):
|
||||
# To stop the init in IPCompleter raising an AttributeError we now have to specify a setter as it's now a property in the superclass.
|
||||
return
|
||||
|
||||
|
||||
class PyDevTerminalInteractiveShell(TerminalInteractiveShell):
|
||||
banner1 = Unicode(default_pydev_banner, config=True,
|
||||
help="""The part of the banner to be printed before the profile"""
|
||||
)
|
||||
|
||||
# TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd)
|
||||
# for now, just disable term_title
|
||||
term_title = CBool(False)
|
||||
|
||||
# Note in version 0.11 there is no guard in the IPython code about displaying a
|
||||
# warning, so with 0.11 you get:
|
||||
# WARNING: Readline services not available or not loaded.
|
||||
# WARNING: The auto-indent feature requires the readline library
|
||||
# Disable readline, readline type code is all handled by PyDev (on Java side)
|
||||
readline_use = CBool(False)
|
||||
# autoindent has no meaning in PyDev (PyDev always handles that on the Java side),
|
||||
# and attempting to enable it will print a warning in the absence of readline.
|
||||
autoindent = CBool(False)
|
||||
# Force console to not give warning about color scheme choice and default to NoColor.
|
||||
# TODO It would be nice to enable colors in PyDev but:
|
||||
# - The PyDev Console (Eclipse Console) does not support the full range of colors, so the
|
||||
# effect isn't as nice anyway at the command line
|
||||
# - If done, the color scheme should default to LightBG, but actually be dependent on
|
||||
# any settings the user has (such as if a dark theme is in use, then Linux is probably
|
||||
# a better theme).
|
||||
colors_force = CBool(True)
|
||||
colors = Unicode("NoColor")
|
||||
# Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and
|
||||
# the `simple_prompt` flag is needed
|
||||
simple_prompt = CBool(True)
|
||||
|
||||
# In the PyDev Console, GUI control is done via hookable XML-RPC server
|
||||
@staticmethod
|
||||
def enable_gui(gui=None, app=None):
|
||||
"""Switch amongst GUI input hooks by name.
|
||||
"""
|
||||
# Deferred import
|
||||
from pydev_ipython.inputhook import enable_gui as real_enable_gui
|
||||
try:
|
||||
return real_enable_gui(gui, app)
|
||||
except ValueError as e:
|
||||
raise UsageError("%s" % e)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to hooks
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_history(self):
|
||||
# Disable history so that we don't have an additional thread for that
|
||||
# (and we don't use the history anyways).
|
||||
self.config.HistoryManager.enabled = False
|
||||
super(PyDevTerminalInteractiveShell, self).init_history()
|
||||
|
||||
def init_hooks(self):
|
||||
super(PyDevTerminalInteractiveShell, self).init_hooks()
|
||||
self.set_hook('show_in_pager', show_in_pager)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to exceptions
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def showtraceback(self, exc_tuple=None, *args, **kwargs):
|
||||
# IPython does a lot of clever stuff with Exceptions. However mostly
|
||||
# it is related to IPython running in a terminal instead of an IDE.
|
||||
# (e.g. it prints out snippets of code around the stack trace)
|
||||
# PyDev does a lot of clever stuff too, so leave exception handling
|
||||
# with default print_exc that PyDev can parse and do its clever stuff
|
||||
# with (e.g. it puts links back to the original source code)
|
||||
try:
|
||||
if exc_tuple is None:
|
||||
etype, value, tb = sys.exc_info()
|
||||
else:
|
||||
etype, value, tb = exc_tuple
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
if tb is not None:
|
||||
traceback.print_exception(etype, value, tb)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to text completion
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
# The way to construct an IPCompleter changed in most versions,
|
||||
# so we have a custom, per version implementation of the construction
|
||||
|
||||
def _new_completer_100(self):
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
alias_table=self.alias_manager.alias_table,
|
||||
use_readline=self.has_readline,
|
||||
parent=self,
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_234(self):
|
||||
# correct for IPython versions 2.x, 3.x, 4.x
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
use_readline=self.has_readline,
|
||||
parent=self,
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_500(self):
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
use_readline=False,
|
||||
parent=self
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_600(self):
|
||||
completer = PyDevIPCompleter6(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
use_readline=False,
|
||||
parent=self
|
||||
)
|
||||
return completer
|
||||
|
||||
def add_completer_hooks(self):
|
||||
from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer
|
||||
try:
|
||||
from IPython.core.completerlib import reset_completer
|
||||
except ImportError:
|
||||
# reset_completer was added for rel-0.13
|
||||
reset_completer = None
|
||||
self.configurables.append(self.Completer)
|
||||
|
||||
# Add custom completers to the basic ones built into IPCompleter
|
||||
sdisp = self.strdispatchers.get('complete_command', StrDispatch())
|
||||
self.strdispatchers['complete_command'] = sdisp
|
||||
self.Completer.custom_completers = sdisp
|
||||
|
||||
self.set_hook('complete_command', module_completer, str_key='import')
|
||||
self.set_hook('complete_command', module_completer, str_key='from')
|
||||
self.set_hook('complete_command', magic_run_completer, str_key='%run')
|
||||
self.set_hook('complete_command', cd_completer, str_key='%cd')
|
||||
if reset_completer:
|
||||
self.set_hook('complete_command', reset_completer, str_key='%reset')
|
||||
|
||||
def init_completer(self):
|
||||
"""Initialize the completion machinery.
|
||||
|
||||
This creates a completer that provides the completions that are
|
||||
IPython specific. We use this to supplement PyDev's core code
|
||||
completions.
|
||||
"""
|
||||
# PyDev uses its own completer and custom hooks so that it uses
|
||||
# most completions from PyDev's core completer which provides
|
||||
# extra information.
|
||||
# See getCompletions for where the two sets of results are merged
|
||||
|
||||
if IPythonRelease._version_major >= 6:
|
||||
self.Completer = self._new_completer_600()
|
||||
elif IPythonRelease._version_major >= 5:
|
||||
self.Completer = self._new_completer_500()
|
||||
elif IPythonRelease._version_major >= 2:
|
||||
self.Completer = self._new_completer_234()
|
||||
elif IPythonRelease._version_major >= 1:
|
||||
self.Completer = self._new_completer_100()
|
||||
|
||||
if hasattr(self.Completer, 'use_jedi'):
|
||||
self.Completer.use_jedi = False
|
||||
|
||||
self.add_completer_hooks()
|
||||
|
||||
if IPythonRelease._version_major <= 3:
|
||||
# Only configure readline if we truly are using readline. IPython can
|
||||
# do tab-completion over the network, in GUIs, etc, where readline
|
||||
# itself may be absent
|
||||
if self.has_readline:
|
||||
self.set_readline_completer()
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to aliases
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_alias(self):
|
||||
# InteractiveShell defines alias's we want, but TerminalInteractiveShell defines
|
||||
# ones we don't. So don't use super and instead go right to InteractiveShell
|
||||
InteractiveShell.init_alias(self)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to exiting
|
||||
#-------------------------------------------------------------------------
|
||||
def ask_exit(self):
|
||||
""" Ask the shell to exit. Can be overiden and used as a callback. """
|
||||
# TODO PyDev's console does not have support from the Python side to exit
|
||||
# the console. If user forces the exit (with sys.exit()) then the console
|
||||
# simply reports errors. e.g.:
|
||||
# >>> import sys
|
||||
# >>> sys.exit()
|
||||
# Failed to create input stream: Connection refused
|
||||
# >>>
|
||||
# Console already exited with value: 0 while waiting for an answer.
|
||||
# Error stream:
|
||||
# Output stream:
|
||||
# >>>
|
||||
#
|
||||
# Alternatively if you use the non-IPython shell this is what happens
|
||||
# >>> exit()
|
||||
# <type 'exceptions.SystemExit'>:None
|
||||
# >>>
|
||||
# <type 'exceptions.SystemExit'>:None
|
||||
# >>>
|
||||
#
|
||||
super(PyDevTerminalInteractiveShell, self).ask_exit()
|
||||
print('To exit the PyDev Console, terminate the console within IDE.')
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to magics
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_magics(self):
|
||||
super(PyDevTerminalInteractiveShell, self).init_magics()
|
||||
# TODO Any additional magics for PyDev?
|
||||
|
||||
|
||||
InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# _PyDevFrontEnd
|
||||
#=======================================================================================================================
|
||||
class _PyDevFrontEnd:
|
||||
|
||||
version = release.__version__
|
||||
|
||||
def __init__(self):
|
||||
# Create and initialize our IPython instance.
|
||||
if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None:
|
||||
self.ipython = PyDevTerminalInteractiveShell._instance
|
||||
else:
|
||||
self.ipython = PyDevTerminalInteractiveShell.instance()
|
||||
|
||||
self._curr_exec_line = 0
|
||||
self._curr_exec_lines = []
|
||||
|
||||
def show_banner(self):
|
||||
self.ipython.show_banner()
|
||||
|
||||
def update(self, globals, locals):
|
||||
ns = self.ipython.user_ns
|
||||
|
||||
for key, value in list(ns.items()):
|
||||
if key not in locals:
|
||||
locals[key] = value
|
||||
|
||||
self.ipython.user_global_ns.clear()
|
||||
self.ipython.user_global_ns.update(globals)
|
||||
self.ipython.user_ns = locals
|
||||
|
||||
if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'):
|
||||
self.ipython.history_manager.save_thread.pydev_do_not_trace = True # don't trace ipython history saving thread
|
||||
|
||||
def complete(self, string):
|
||||
try:
|
||||
if string:
|
||||
return self.ipython.complete(None, line=string, cursor_pos=string.__len__())
|
||||
else:
|
||||
return self.ipython.complete(string, string, 0)
|
||||
except:
|
||||
# Silence completer exceptions
|
||||
pass
|
||||
|
||||
def is_complete(self, string):
|
||||
# Based on IPython 0.10.1
|
||||
|
||||
if string in ('', '\n'):
|
||||
# Prefiltering, eg through ipython0, may return an empty
|
||||
# string although some operations have been accomplished. We
|
||||
# thus want to consider an empty string as a complete
|
||||
# statement.
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
# Add line returns here, to make sure that the statement is
|
||||
# complete (except if '\' was used).
|
||||
# This should probably be done in a different place (like
|
||||
# maybe 'prefilter_input' method? For now, this works.
|
||||
clean_string = string.rstrip('\n')
|
||||
if not clean_string.endswith('\\'):
|
||||
clean_string += '\n\n'
|
||||
|
||||
is_complete = codeop.compile_command(
|
||||
clean_string,
|
||||
"<string>",
|
||||
"exec"
|
||||
)
|
||||
except Exception:
|
||||
# XXX: Hack: return True so that the
|
||||
# code gets executed and the error captured.
|
||||
is_complete = True
|
||||
return is_complete
|
||||
|
||||
def getCompletions(self, text, act_tok):
|
||||
# Get completions from IPython and from PyDev and merge the results
|
||||
# IPython only gives context free list of completions, while PyDev
|
||||
# gives detailed information about completions.
|
||||
try:
|
||||
TYPE_IPYTHON = '11'
|
||||
TYPE_IPYTHON_MAGIC = '12'
|
||||
_line, ipython_completions = self.complete(text)
|
||||
|
||||
from _pydev_bundle._pydev_completer import Completer
|
||||
completer = Completer(self.get_namespace(), None)
|
||||
ret = completer.complete(act_tok)
|
||||
append = ret.append
|
||||
ip = self.ipython
|
||||
pydev_completions = set([f[0] for f in ret])
|
||||
for ipython_completion in ipython_completions:
|
||||
|
||||
# PyCharm was not expecting completions with '%'...
|
||||
# Could be fixed in the backend, but it's probably better
|
||||
# fixing it at PyCharm.
|
||||
# if ipython_completion.startswith('%'):
|
||||
# ipython_completion = ipython_completion[1:]
|
||||
|
||||
if ipython_completion not in pydev_completions:
|
||||
pydev_completions.add(ipython_completion)
|
||||
inf = ip.object_inspect(ipython_completion)
|
||||
if inf['type_name'] == 'Magic function':
|
||||
pydev_type = TYPE_IPYTHON_MAGIC
|
||||
else:
|
||||
pydev_type = TYPE_IPYTHON
|
||||
pydev_doc = inf['docstring']
|
||||
if pydev_doc is None:
|
||||
pydev_doc = ''
|
||||
append((ipython_completion, pydev_doc, '', pydev_type))
|
||||
return ret
|
||||
except:
|
||||
import traceback;traceback.print_exc()
|
||||
return []
|
||||
|
||||
def get_namespace(self):
|
||||
return self.ipython.user_ns
|
||||
|
||||
def clear_buffer(self):
|
||||
del self._curr_exec_lines[:]
|
||||
|
||||
def add_exec(self, line):
|
||||
if self._curr_exec_lines:
|
||||
self._curr_exec_lines.append(line)
|
||||
|
||||
buf = '\n'.join(self._curr_exec_lines)
|
||||
|
||||
if self.is_complete(buf):
|
||||
self._curr_exec_line += 1
|
||||
self.ipython.run_cell(buf)
|
||||
del self._curr_exec_lines[:]
|
||||
return False # execute complete (no more)
|
||||
|
||||
return True # needs more
|
||||
else:
|
||||
|
||||
if not self.is_complete(line):
|
||||
# Did not execute
|
||||
self._curr_exec_lines.append(line)
|
||||
return True # needs more
|
||||
else:
|
||||
self._curr_exec_line += 1
|
||||
self.ipython.run_cell(line, store_history=True)
|
||||
# hist = self.ipython.history_manager.output_hist_reprs
|
||||
# rep = hist.get(self._curr_exec_line, None)
|
||||
# if rep is not None:
|
||||
# print(rep)
|
||||
return False # execute complete (no more)
|
||||
|
||||
def is_automagic(self):
|
||||
return self.ipython.automagic
|
||||
|
||||
def get_greeting_msg(self):
|
||||
return 'PyDev console: using IPython %s\n' % self.version
|
||||
|
||||
|
||||
class _PyDevFrontEndContainer:
|
||||
_instance = None
|
||||
_last_host_port = None
|
||||
|
||||
|
||||
def get_pydev_frontend(pydev_host, pydev_client_port):
|
||||
if _PyDevFrontEndContainer._instance is None:
|
||||
_PyDevFrontEndContainer._instance = _PyDevFrontEnd()
|
||||
|
||||
if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port):
|
||||
_PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port
|
||||
|
||||
# Back channel to PyDev to open editors (in the future other
|
||||
# info may go back this way. This is the same channel that is
|
||||
# used to get stdin, see StdIn in pydev_console_utils)
|
||||
_PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port)
|
||||
|
||||
# Note: setting the callback directly because setting it with set_hook would actually create a chain instead
|
||||
# of ovewriting at each new call).
|
||||
# _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
|
||||
|
||||
return _PyDevFrontEndContainer._instance
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
|
||||
# Hack for https://www.brainwy.com/tracker/PyDev/363 (i.e.: calling is_alive() can throw AssertionError under some
|
||||
# circumstances).
|
||||
# It is required to debug threads started by start_new_thread in Python 3.4
|
||||
_temp = threading.Thread()
|
||||
if hasattr(_temp, '_is_stopped'): # Python 3.x has this
|
||||
|
||||
def is_thread_alive(t):
|
||||
return not t._is_stopped
|
||||
|
||||
elif hasattr(_temp, '_Thread__stopped'): # Python 2.x has this
|
||||
|
||||
def is_thread_alive(t):
|
||||
return not t._Thread__stopped
|
||||
|
||||
else:
|
||||
|
||||
# Jython wraps a native java thread and thus only obeys the public API.
|
||||
def is_thread_alive(t):
|
||||
return t.is_alive()
|
||||
|
||||
del _temp
|
|
@ -0,0 +1,67 @@
|
|||
from _pydev_bundle._pydev_saved_modules import socket
|
||||
import sys
|
||||
|
||||
IS_JYTHON = sys.platform.find('java') != -1
|
||||
|
||||
_cache = None
|
||||
|
||||
|
||||
def get_localhost():
|
||||
'''
|
||||
Should return 127.0.0.1 in ipv4 and ::1 in ipv6
|
||||
|
||||
localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work
|
||||
properly and takes a lot of time (had this issue on the pyunit server).
|
||||
|
||||
Using the IP directly solves the problem.
|
||||
'''
|
||||
# TODO: Needs better investigation!
|
||||
|
||||
global _cache
|
||||
if _cache is None:
|
||||
try:
|
||||
for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP):
|
||||
config = addr_info[4]
|
||||
if config[0] == '127.0.0.1':
|
||||
_cache = '127.0.0.1'
|
||||
return _cache
|
||||
except:
|
||||
# Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case.
|
||||
_cache = '127.0.0.1'
|
||||
else:
|
||||
_cache = 'localhost'
|
||||
|
||||
return _cache
|
||||
|
||||
|
||||
def get_socket_names(n_sockets, close=False):
|
||||
socket_names = []
|
||||
sockets = []
|
||||
for _ in range(n_sockets):
|
||||
if IS_JYTHON:
|
||||
# Although the option which would be pure java *should* work for Jython, the socket being returned is still 0
|
||||
# (i.e.: it doesn't give the local port bound, only the original port, which was 0).
|
||||
from java.net import ServerSocket
|
||||
sock = ServerSocket(0)
|
||||
socket_name = get_localhost(), sock.getLocalPort()
|
||||
else:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind((get_localhost(), 0))
|
||||
socket_name = sock.getsockname()
|
||||
|
||||
sockets.append(sock)
|
||||
socket_names.append(socket_name)
|
||||
|
||||
if close:
|
||||
for s in sockets:
|
||||
s.close()
|
||||
return socket_names
|
||||
|
||||
|
||||
def get_socket_name(close=False):
|
||||
return get_socket_names(1, close)[0]
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(get_socket_name())
|
|
@ -0,0 +1,228 @@
|
|||
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, SHOW_COMPILE_CYTHON_COMMAND_LINE, NULL, LOG_TIME
|
||||
from contextlib import contextmanager
|
||||
import traceback
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class _LoggingGlobals(object):
|
||||
|
||||
_warn_once_map = {}
|
||||
_debug_stream_filename = None
|
||||
_debug_stream = sys.stderr
|
||||
_debug_stream_initialized = False
|
||||
|
||||
|
||||
def initialize_debug_stream(reinitialize=False):
|
||||
'''
|
||||
:param bool reinitialize:
|
||||
Reinitialize is used to update the debug stream after a fork (thus, if it wasn't
|
||||
initialized, we don't need to do anything).
|
||||
'''
|
||||
if reinitialize:
|
||||
if not _LoggingGlobals._debug_stream_initialized:
|
||||
return
|
||||
else:
|
||||
if _LoggingGlobals._debug_stream_initialized:
|
||||
return
|
||||
|
||||
_LoggingGlobals._debug_stream_initialized = True
|
||||
|
||||
# Note: we cannot initialize with sys.stderr because when forking we may end up logging things in 'os' calls.
|
||||
_LoggingGlobals._debug_stream = NULL
|
||||
_LoggingGlobals._debug_stream_filename = None
|
||||
|
||||
if not DebugInfoHolder.PYDEVD_DEBUG_FILE:
|
||||
_LoggingGlobals._debug_stream = sys.stderr
|
||||
else:
|
||||
# Add pid to the filename.
|
||||
try:
|
||||
dirname = os.path.dirname(DebugInfoHolder.PYDEVD_DEBUG_FILE)
|
||||
basename = os.path.basename(DebugInfoHolder.PYDEVD_DEBUG_FILE)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except:
|
||||
pass # Ignore error if it already exists.
|
||||
|
||||
name, ext = os.path.splitext(basename)
|
||||
debug_file = os.path.join(dirname, name + '.' + str(os.getpid()) + ext)
|
||||
_LoggingGlobals._debug_stream = open(debug_file, 'w')
|
||||
_LoggingGlobals._debug_stream_filename = debug_file
|
||||
except:
|
||||
_LoggingGlobals._debug_stream = sys.stderr
|
||||
# Don't fail when trying to setup logging, just show the exception.
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def list_log_files(pydevd_debug_file):
|
||||
log_files = []
|
||||
dirname = os.path.dirname(pydevd_debug_file)
|
||||
basename = os.path.basename(pydevd_debug_file)
|
||||
if os.path.isdir(dirname):
|
||||
name, ext = os.path.splitext(basename)
|
||||
for f in os.listdir(dirname):
|
||||
if f.startswith(name) and f.endswith(ext):
|
||||
log_files.append(os.path.join(dirname, f))
|
||||
return log_files
|
||||
|
||||
|
||||
@contextmanager
|
||||
def log_context(trace_level, stream):
|
||||
'''
|
||||
To be used to temporarily change the logging settings.
|
||||
'''
|
||||
original_trace_level = DebugInfoHolder.DEBUG_TRACE_LEVEL
|
||||
original_debug_stream = _LoggingGlobals._debug_stream
|
||||
original_pydevd_debug_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
|
||||
original_debug_stream_filename = _LoggingGlobals._debug_stream_filename
|
||||
original_initialized = _LoggingGlobals._debug_stream_initialized
|
||||
|
||||
DebugInfoHolder.DEBUG_TRACE_LEVEL = trace_level
|
||||
_LoggingGlobals._debug_stream = stream
|
||||
_LoggingGlobals._debug_stream_initialized = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
DebugInfoHolder.DEBUG_TRACE_LEVEL = original_trace_level
|
||||
_LoggingGlobals._debug_stream = original_debug_stream
|
||||
DebugInfoHolder.PYDEVD_DEBUG_FILE = original_pydevd_debug_file
|
||||
_LoggingGlobals._debug_stream_filename = original_debug_stream_filename
|
||||
_LoggingGlobals._debug_stream_initialized = original_initialized
|
||||
|
||||
|
||||
import time
|
||||
_last_log_time = time.time()
|
||||
|
||||
|
||||
def _pydevd_log(level, msg, *args):
|
||||
'''
|
||||
Levels are:
|
||||
|
||||
0 most serious warnings/errors (always printed)
|
||||
1 warnings/significant events
|
||||
2 informational trace
|
||||
3 verbose mode
|
||||
'''
|
||||
if level <= DebugInfoHolder.DEBUG_TRACE_LEVEL:
|
||||
# yes, we can have errors printing if the console of the program has been finished (and we're still trying to print something)
|
||||
try:
|
||||
try:
|
||||
if args:
|
||||
msg = msg % args
|
||||
except:
|
||||
msg = '%s - %s' % (msg, args)
|
||||
|
||||
if LOG_TIME:
|
||||
global _last_log_time
|
||||
new_log_time = time.time()
|
||||
time_diff = new_log_time - _last_log_time
|
||||
_last_log_time = new_log_time
|
||||
msg = '%.2fs - %s\n' % (time_diff, msg,)
|
||||
else:
|
||||
msg = '%s\n' % (msg,)
|
||||
try:
|
||||
try:
|
||||
initialize_debug_stream() # Do it as late as possible
|
||||
_LoggingGlobals._debug_stream.write(msg)
|
||||
except TypeError:
|
||||
if isinstance(msg, bytes):
|
||||
# Depending on the StringIO flavor, it may only accept unicode.
|
||||
msg = msg.decode('utf-8', 'replace')
|
||||
_LoggingGlobals._debug_stream.write(msg)
|
||||
except UnicodeEncodeError:
|
||||
# When writing to the stream it's possible that the string can't be represented
|
||||
# in the encoding expected (in this case, convert it to the stream encoding
|
||||
# or ascii if we can't find one suitable using a suitable replace).
|
||||
encoding = getattr(_LoggingGlobals._debug_stream, 'encoding', 'ascii')
|
||||
msg = msg.encode(encoding, 'backslashreplace')
|
||||
msg = msg.decode(encoding)
|
||||
_LoggingGlobals._debug_stream.write(msg)
|
||||
|
||||
_LoggingGlobals._debug_stream.flush()
|
||||
except:
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
def _pydevd_log_exception(msg='', *args):
|
||||
if msg or args:
|
||||
_pydevd_log(0, msg, *args)
|
||||
try:
|
||||
initialize_debug_stream() # Do it as late as possible
|
||||
traceback.print_exc(file=_LoggingGlobals._debug_stream)
|
||||
_LoggingGlobals._debug_stream.flush()
|
||||
except:
|
||||
raise
|
||||
|
||||
|
||||
def verbose(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
|
||||
_pydevd_log(3, msg, *args)
|
||||
|
||||
|
||||
def debug(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
|
||||
_pydevd_log(2, msg, *args)
|
||||
|
||||
|
||||
def info(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1:
|
||||
_pydevd_log(1, msg, *args)
|
||||
|
||||
|
||||
warn = info
|
||||
|
||||
|
||||
def critical(msg, *args):
|
||||
_pydevd_log(0, msg, *args)
|
||||
|
||||
|
||||
def exception(msg='', *args):
|
||||
try:
|
||||
_pydevd_log_exception(msg, *args)
|
||||
except:
|
||||
pass # Should never fail (even at interpreter shutdown).
|
||||
|
||||
|
||||
error = exception
|
||||
|
||||
|
||||
def error_once(msg, *args):
|
||||
try:
|
||||
if args:
|
||||
message = msg % args
|
||||
else:
|
||||
message = str(msg)
|
||||
except:
|
||||
message = '%s - %s' % (msg, args)
|
||||
|
||||
if message not in _LoggingGlobals._warn_once_map:
|
||||
_LoggingGlobals._warn_once_map[message] = True
|
||||
critical(message)
|
||||
|
||||
|
||||
def exception_once(msg, *args):
|
||||
try:
|
||||
if args:
|
||||
message = msg % args
|
||||
else:
|
||||
message = str(msg)
|
||||
except:
|
||||
message = '%s - %s' % (msg, args)
|
||||
|
||||
if message not in _LoggingGlobals._warn_once_map:
|
||||
_LoggingGlobals._warn_once_map[message] = True
|
||||
exception(message)
|
||||
|
||||
|
||||
def debug_once(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
|
||||
error_once(msg, *args)
|
||||
|
||||
|
||||
def show_compile_cython_command_line():
|
||||
if SHOW_COMPILE_CYTHON_COMMAND_LINE:
|
||||
dirname = os.path.dirname(os.path.dirname(__file__))
|
||||
error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build.",
|
||||
sys.executable, os.path.join(dirname, 'setup_pydevd_cython.py'))
|
||||
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,216 @@
|
|||
from __future__ import nested_scopes
|
||||
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
import os
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
|
||||
def set_trace_in_qt():
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
py_db = get_global_debugger()
|
||||
if py_db is not None:
|
||||
threading.current_thread() # Create the dummy thread for qt.
|
||||
py_db.enable_tracing()
|
||||
|
||||
|
||||
_patched_qt = False
|
||||
|
||||
|
||||
def patch_qt(qt_support_mode):
|
||||
'''
|
||||
This method patches qt (PySide2, PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
|
||||
'''
|
||||
if not qt_support_mode:
|
||||
return
|
||||
|
||||
if qt_support_mode is True or qt_support_mode == 'True':
|
||||
# do not break backward compatibility
|
||||
qt_support_mode = 'auto'
|
||||
|
||||
if qt_support_mode == 'auto':
|
||||
qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto')
|
||||
|
||||
# Avoid patching more than once
|
||||
global _patched_qt
|
||||
if _patched_qt:
|
||||
return
|
||||
|
||||
pydev_log.debug('Qt support mode: %s', qt_support_mode)
|
||||
|
||||
_patched_qt = True
|
||||
|
||||
if qt_support_mode == 'auto':
|
||||
|
||||
patch_qt_on_import = None
|
||||
try:
|
||||
import PySide2 # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyside2'
|
||||
except:
|
||||
try:
|
||||
import Pyside # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyside'
|
||||
except:
|
||||
try:
|
||||
import PyQt5 # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyqt5'
|
||||
except:
|
||||
try:
|
||||
import PyQt4 # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyqt4'
|
||||
except:
|
||||
return
|
||||
|
||||
if qt_support_mode == 'pyside2':
|
||||
try:
|
||||
import PySide2.QtCore # @UnresolvedImport
|
||||
_internal_patch_qt(PySide2.QtCore, qt_support_mode)
|
||||
except:
|
||||
return
|
||||
|
||||
elif qt_support_mode == 'pyside':
|
||||
try:
|
||||
import PySide.QtCore # @UnresolvedImport
|
||||
_internal_patch_qt(PySide.QtCore, qt_support_mode)
|
||||
except:
|
||||
return
|
||||
|
||||
elif qt_support_mode == 'pyqt5':
|
||||
try:
|
||||
import PyQt5.QtCore # @UnresolvedImport
|
||||
_internal_patch_qt(PyQt5.QtCore)
|
||||
except:
|
||||
return
|
||||
|
||||
elif qt_support_mode == 'pyqt4':
|
||||
# Ok, we have an issue here:
|
||||
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
|
||||
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
|
||||
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
|
||||
# that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue
|
||||
# as they only implements v2).
|
||||
patch_qt_on_import = 'PyQt4'
|
||||
|
||||
def get_qt_core_module():
|
||||
import PyQt4.QtCore # @UnresolvedImport
|
||||
return PyQt4.QtCore
|
||||
|
||||
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module)
|
||||
|
||||
else:
|
||||
raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,))
|
||||
|
||||
|
||||
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module):
|
||||
# I don't like this approach very much as we have to patch __import__, but I like even less
|
||||
# asking the user to configure something in the client side...
|
||||
# So, our approach is to patch PyQt4 right before the user tries to import it (at which
|
||||
# point he should've set the sip api version properly already anyways).
|
||||
|
||||
pydev_log.debug('Setting up Qt post-import monkeypatch.')
|
||||
|
||||
dotted = patch_qt_on_import + '.'
|
||||
original_import = __import__
|
||||
|
||||
from _pydev_bundle._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
|
||||
|
||||
patch_sys_module()
|
||||
patch_reload()
|
||||
|
||||
def patched_import(name, *args, **kwargs):
|
||||
if patch_qt_on_import == name or name.startswith(dotted):
|
||||
builtins.__import__ = original_import
|
||||
cancel_patches_in_sys_module()
|
||||
_internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module
|
||||
return original_import(name, *args, **kwargs)
|
||||
|
||||
import builtins # Py3
|
||||
|
||||
builtins.__import__ = patched_import
|
||||
|
||||
|
||||
def _internal_patch_qt(QtCore, qt_support_mode='auto'):
|
||||
pydev_log.debug('Patching Qt: %s', QtCore)
|
||||
|
||||
_original_thread_init = QtCore.QThread.__init__
|
||||
_original_runnable_init = QtCore.QRunnable.__init__
|
||||
_original_QThread = QtCore.QThread
|
||||
|
||||
class FuncWrapper:
|
||||
|
||||
def __init__(self, original):
|
||||
self._original = original
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
set_trace_in_qt()
|
||||
return self._original(*args, **kwargs)
|
||||
|
||||
class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal
|
||||
|
||||
try:
|
||||
_signal = QtCore.Signal() # @UndefinedVariable
|
||||
except:
|
||||
_signal = QtCore.pyqtSignal() # @UndefinedVariable
|
||||
|
||||
def __init__(self, thread, original_started):
|
||||
QtCore.QObject.__init__(self)
|
||||
self.thread = thread
|
||||
self.original_started = original_started
|
||||
if qt_support_mode in ('pyside', 'pyside2'):
|
||||
self._signal = original_started
|
||||
else:
|
||||
self._signal.connect(self._on_call)
|
||||
self.original_started.connect(self._signal)
|
||||
|
||||
def connect(self, func, *args, **kwargs):
|
||||
if qt_support_mode in ('pyside', 'pyside2'):
|
||||
return self._signal.connect(FuncWrapper(func), *args, **kwargs)
|
||||
else:
|
||||
return self._signal.connect(func, *args, **kwargs)
|
||||
|
||||
def disconnect(self, *args, **kwargs):
|
||||
return self._signal.disconnect(*args, **kwargs)
|
||||
|
||||
def emit(self, *args, **kwargs):
|
||||
return self._signal.emit(*args, **kwargs)
|
||||
|
||||
def _on_call(self, *args, **kwargs):
|
||||
set_trace_in_qt()
|
||||
|
||||
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
_original_thread_init(self, *args, **kwargs)
|
||||
|
||||
# In PyQt5 the program hangs when we try to call original run method of QThread class.
|
||||
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
|
||||
if self.__class__.run == _original_QThread.run:
|
||||
self.run = self._exec_run
|
||||
else:
|
||||
self._original_run = self.run
|
||||
self.run = self._new_run
|
||||
self._original_started = self.started
|
||||
self.started = StartedSignalWrapper(self, self.started)
|
||||
|
||||
def _exec_run(self):
|
||||
set_trace_in_qt()
|
||||
self.exec_()
|
||||
return None
|
||||
|
||||
def _new_run(self):
|
||||
set_trace_in_qt()
|
||||
return self._original_run()
|
||||
|
||||
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
_original_runnable_init(self, *args, **kwargs)
|
||||
|
||||
self._original_run = self.run
|
||||
self.run = self._new_run
|
||||
|
||||
def _new_run(self):
|
||||
set_trace_in_qt()
|
||||
return self._original_run()
|
||||
|
||||
QtCore.QThread = ThreadWrapper
|
||||
QtCore.QRunnable = RunnableWrapper
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/94/bd/ed/1929d0b338b3b5bc9e3f2ae4f5ebbec66800594d98430c6dbf6b77b68e
|
|
@ -0,0 +1,180 @@
|
|||
"""
|
||||
The UserModuleDeleter and runfile methods are copied from
|
||||
Spyder and carry their own license agreement.
|
||||
http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py
|
||||
|
||||
Spyder License Agreement (MIT License)
|
||||
--------------------------------------
|
||||
|
||||
Copyright (c) 2009-2012 Pierre Raybaut
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from _pydev_bundle._pydev_execfile import execfile
|
||||
|
||||
|
||||
# The following classes and functions are mainly intended to be used from
|
||||
# an interactive Python session
|
||||
class UserModuleDeleter:
|
||||
"""
|
||||
User Module Deleter (UMD) aims at deleting user modules
|
||||
to force Python to deeply reload them during import
|
||||
|
||||
pathlist [list]: ignore list in terms of module path
|
||||
namelist [list]: ignore list in terms of module name
|
||||
"""
|
||||
|
||||
def __init__(self, namelist=None, pathlist=None):
|
||||
if namelist is None:
|
||||
namelist = []
|
||||
self.namelist = namelist
|
||||
if pathlist is None:
|
||||
pathlist = []
|
||||
self.pathlist = pathlist
|
||||
try:
|
||||
# ignore all files in org.python.pydev/pysrc
|
||||
import pydev_pysrc, inspect
|
||||
self.pathlist.append(os.path.dirname(pydev_pysrc.__file__))
|
||||
except:
|
||||
pass
|
||||
self.previous_modules = list(sys.modules.keys())
|
||||
|
||||
def is_module_ignored(self, modname, modpath):
|
||||
for path in [sys.prefix] + self.pathlist:
|
||||
if modpath.startswith(path):
|
||||
return True
|
||||
else:
|
||||
return set(modname.split('.')) & set(self.namelist)
|
||||
|
||||
def run(self, verbose=False):
|
||||
"""
|
||||
Del user modules to force Python to deeply reload them
|
||||
|
||||
Do not del modules which are considered as system modules, i.e.
|
||||
modules installed in subdirectories of Python interpreter's binary
|
||||
Do not del C modules
|
||||
"""
|
||||
log = []
|
||||
modules_copy = dict(sys.modules)
|
||||
for modname, module in modules_copy.items():
|
||||
if modname == 'aaaaa':
|
||||
print(modname, module)
|
||||
print(self.previous_modules)
|
||||
if modname not in self.previous_modules:
|
||||
modpath = getattr(module, '__file__', None)
|
||||
if modpath is None:
|
||||
# *module* is a C module that is statically linked into the
|
||||
# interpreter. There is no way to know its path, so we
|
||||
# choose to ignore it.
|
||||
continue
|
||||
if not self.is_module_ignored(modname, modpath):
|
||||
log.append(modname)
|
||||
del sys.modules[modname]
|
||||
if verbose and log:
|
||||
print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted",
|
||||
": " + ", ".join(log)))
|
||||
|
||||
|
||||
__umd__ = None
|
||||
|
||||
_get_globals_callback = None
|
||||
|
||||
|
||||
def _set_globals_function(get_globals):
|
||||
global _get_globals_callback
|
||||
_get_globals_callback = get_globals
|
||||
|
||||
|
||||
def _get_globals():
|
||||
"""Return current Python interpreter globals namespace"""
|
||||
if _get_globals_callback is not None:
|
||||
return _get_globals_callback()
|
||||
else:
|
||||
try:
|
||||
from __main__ import __dict__ as namespace
|
||||
except ImportError:
|
||||
try:
|
||||
# The import fails on IronPython
|
||||
import __main__
|
||||
namespace = __main__.__dict__
|
||||
except:
|
||||
namespace
|
||||
shell = namespace.get('__ipythonshell__')
|
||||
if shell is not None and hasattr(shell, 'user_ns'):
|
||||
# IPython 0.12+ kernel
|
||||
return shell.user_ns
|
||||
else:
|
||||
# Python interpreter
|
||||
return namespace
|
||||
return namespace
|
||||
|
||||
|
||||
def runfile(filename, args=None, wdir=None, namespace=None):
|
||||
"""
|
||||
Run filename
|
||||
args: command line arguments (string)
|
||||
wdir: working directory
|
||||
"""
|
||||
try:
|
||||
if hasattr(filename, 'decode'):
|
||||
filename = filename.decode('utf-8')
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
global __umd__
|
||||
if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true":
|
||||
if __umd__ is None:
|
||||
namelist = os.environ.get("PYDEV_UMD_NAMELIST", None)
|
||||
if namelist is not None:
|
||||
namelist = namelist.split(',')
|
||||
__umd__ = UserModuleDeleter(namelist=namelist)
|
||||
else:
|
||||
verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true"
|
||||
__umd__.run(verbose=verbose)
|
||||
if args is not None and not isinstance(args, (bytes, str)):
|
||||
raise TypeError("expected a character buffer object")
|
||||
if namespace is None:
|
||||
namespace = _get_globals()
|
||||
if '__file__' in namespace:
|
||||
old_file = namespace['__file__']
|
||||
else:
|
||||
old_file = None
|
||||
namespace['__file__'] = filename
|
||||
sys.argv = [filename]
|
||||
if args is not None:
|
||||
for arg in args.split():
|
||||
sys.argv.append(arg)
|
||||
if wdir is not None:
|
||||
try:
|
||||
if hasattr(wdir, 'decode'):
|
||||
wdir = wdir.decode('utf-8')
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
os.chdir(wdir)
|
||||
execfile(filename, namespace)
|
||||
sys.argv = ['']
|
||||
if old_file is None:
|
||||
del namespace['__file__']
|
||||
else:
|
||||
namespace['__file__'] = old_file
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/56/91/a9/d5265e0cc3e29adfbee5dabb2c37c1ea62b23c645c02c4140b0b851eec
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,857 @@
|
|||
from __future__ import nested_scopes
|
||||
|
||||
import fnmatch
|
||||
import os.path
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
|
||||
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
|
||||
import re
|
||||
import time
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Configuration
|
||||
#=======================================================================================================================
|
||||
class Configuration:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
files_or_dirs='',
|
||||
verbosity=2,
|
||||
include_tests=None,
|
||||
tests=None,
|
||||
port=None,
|
||||
files_to_tests=None,
|
||||
jobs=1,
|
||||
split_jobs='tests',
|
||||
coverage_output_dir=None,
|
||||
coverage_include=None,
|
||||
coverage_output_file=None,
|
||||
exclude_files=None,
|
||||
exclude_tests=None,
|
||||
include_files=None,
|
||||
django=False,
|
||||
):
|
||||
self.files_or_dirs = files_or_dirs
|
||||
self.verbosity = verbosity
|
||||
self.include_tests = include_tests
|
||||
self.tests = tests
|
||||
self.port = port
|
||||
self.files_to_tests = files_to_tests
|
||||
self.jobs = jobs
|
||||
self.split_jobs = split_jobs
|
||||
self.django = django
|
||||
|
||||
if include_tests:
|
||||
assert isinstance(include_tests, (list, tuple))
|
||||
|
||||
if exclude_files:
|
||||
assert isinstance(exclude_files, (list, tuple))
|
||||
|
||||
if exclude_tests:
|
||||
assert isinstance(exclude_tests, (list, tuple))
|
||||
|
||||
self.exclude_files = exclude_files
|
||||
self.include_files = include_files
|
||||
self.exclude_tests = exclude_tests
|
||||
|
||||
self.coverage_output_dir = coverage_output_dir
|
||||
self.coverage_include = coverage_include
|
||||
self.coverage_output_file = coverage_output_file
|
||||
|
||||
def __str__(self):
|
||||
return '''Configuration
|
||||
- files_or_dirs: %s
|
||||
- verbosity: %s
|
||||
- tests: %s
|
||||
- port: %s
|
||||
- files_to_tests: %s
|
||||
- jobs: %s
|
||||
- split_jobs: %s
|
||||
|
||||
- include_files: %s
|
||||
- include_tests: %s
|
||||
|
||||
- exclude_files: %s
|
||||
- exclude_tests: %s
|
||||
|
||||
- coverage_output_dir: %s
|
||||
- coverage_include_dir: %s
|
||||
- coverage_output_file: %s
|
||||
|
||||
- django: %s
|
||||
''' % (
|
||||
self.files_or_dirs,
|
||||
self.verbosity,
|
||||
self.tests,
|
||||
self.port,
|
||||
self.files_to_tests,
|
||||
self.jobs,
|
||||
self.split_jobs,
|
||||
|
||||
self.include_files,
|
||||
self.include_tests,
|
||||
|
||||
self.exclude_files,
|
||||
self.exclude_tests,
|
||||
|
||||
self.coverage_output_dir,
|
||||
self.coverage_include,
|
||||
self.coverage_output_file,
|
||||
|
||||
self.django,
|
||||
)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# parse_cmdline
|
||||
#=======================================================================================================================
|
||||
def parse_cmdline(argv=None):
|
||||
"""
|
||||
Parses command line and returns test directories, verbosity, test filter and test suites
|
||||
|
||||
usage:
|
||||
runfiles.py -v|--verbosity <level> -t|--tests <Test.test1,Test2> dirs|files
|
||||
|
||||
Multiprocessing options:
|
||||
jobs=number (with the number of jobs to be used to run the tests)
|
||||
split_jobs='module'|'tests'
|
||||
if == module, a given job will always receive all the tests from a module
|
||||
if == tests, the tests will be split independently of their originating module (default)
|
||||
|
||||
--exclude_files = comma-separated list of patterns with files to exclude (fnmatch style)
|
||||
--include_files = comma-separated list of patterns with files to include (fnmatch style)
|
||||
--exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style)
|
||||
|
||||
Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored!
|
||||
"""
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
verbosity = 2
|
||||
include_tests = None
|
||||
tests = None
|
||||
port = None
|
||||
jobs = 1
|
||||
split_jobs = 'tests'
|
||||
files_to_tests = {}
|
||||
coverage_output_dir = None
|
||||
coverage_include = None
|
||||
exclude_files = None
|
||||
exclude_tests = None
|
||||
include_files = None
|
||||
django = False
|
||||
|
||||
from _pydev_bundle._pydev_getopt import gnu_getopt
|
||||
optlist, dirs = gnu_getopt(
|
||||
argv[1:], "",
|
||||
[
|
||||
"verbosity=",
|
||||
"tests=",
|
||||
|
||||
"port=",
|
||||
"config_file=",
|
||||
|
||||
"jobs=",
|
||||
"split_jobs=",
|
||||
|
||||
"include_tests=",
|
||||
"include_files=",
|
||||
|
||||
"exclude_files=",
|
||||
"exclude_tests=",
|
||||
|
||||
"coverage_output_dir=",
|
||||
"coverage_include=",
|
||||
|
||||
"django="
|
||||
]
|
||||
)
|
||||
|
||||
for opt, value in optlist:
|
||||
if opt in ("-v", "--verbosity"):
|
||||
verbosity = value
|
||||
|
||||
elif opt in ("-p", "--port"):
|
||||
port = int(value)
|
||||
|
||||
elif opt in ("-j", "--jobs"):
|
||||
jobs = int(value)
|
||||
|
||||
elif opt in ("-s", "--split_jobs"):
|
||||
split_jobs = value
|
||||
if split_jobs not in ('module', 'tests'):
|
||||
raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,))
|
||||
|
||||
elif opt in ("-d", "--coverage_output_dir",):
|
||||
coverage_output_dir = value.strip()
|
||||
|
||||
elif opt in ("-i", "--coverage_include",):
|
||||
coverage_include = value.strip()
|
||||
|
||||
elif opt in ("-I", "--include_tests"):
|
||||
include_tests = value.split(',')
|
||||
|
||||
elif opt in ("-E", "--exclude_files"):
|
||||
exclude_files = value.split(',')
|
||||
|
||||
elif opt in ("-F", "--include_files"):
|
||||
include_files = value.split(',')
|
||||
|
||||
elif opt in ("-e", "--exclude_tests"):
|
||||
exclude_tests = value.split(',')
|
||||
|
||||
elif opt in ("-t", "--tests"):
|
||||
tests = value.split(',')
|
||||
|
||||
elif opt in ("--django",):
|
||||
django = value.strip() in ['true', 'True', '1']
|
||||
|
||||
elif opt in ("-c", "--config_file"):
|
||||
config_file = value.strip()
|
||||
if os.path.exists(config_file):
|
||||
f = open(config_file, 'r')
|
||||
try:
|
||||
config_file_contents = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
if config_file_contents:
|
||||
config_file_contents = config_file_contents.strip()
|
||||
|
||||
if config_file_contents:
|
||||
for line in config_file_contents.splitlines():
|
||||
file_and_test = line.split('|')
|
||||
if len(file_and_test) == 2:
|
||||
file, test = file_and_test
|
||||
if file in files_to_tests:
|
||||
files_to_tests[file].append(test)
|
||||
else:
|
||||
files_to_tests[file] = [test]
|
||||
|
||||
else:
|
||||
sys.stderr.write('Could not find config file: %s\n' % (config_file,))
|
||||
|
||||
if type([]) != type(dirs):
|
||||
dirs = [dirs]
|
||||
|
||||
ret_dirs = []
|
||||
for d in dirs:
|
||||
if '|' in d:
|
||||
# paths may come from the ide separated by |
|
||||
ret_dirs.extend(d.split('|'))
|
||||
else:
|
||||
ret_dirs.append(d)
|
||||
|
||||
verbosity = int(verbosity)
|
||||
|
||||
if tests:
|
||||
if verbosity > 4:
|
||||
sys.stdout.write('--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n')
|
||||
exclude_files = exclude_tests = include_files = None
|
||||
|
||||
config = Configuration(
|
||||
ret_dirs,
|
||||
verbosity,
|
||||
include_tests,
|
||||
tests,
|
||||
port,
|
||||
files_to_tests,
|
||||
jobs,
|
||||
split_jobs,
|
||||
coverage_output_dir,
|
||||
coverage_include,
|
||||
exclude_files=exclude_files,
|
||||
exclude_tests=exclude_tests,
|
||||
include_files=include_files,
|
||||
django=django,
|
||||
)
|
||||
|
||||
if verbosity > 5:
|
||||
sys.stdout.write(str(config) + '\n')
|
||||
return config
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevTestRunner
|
||||
#=======================================================================================================================
|
||||
class PydevTestRunner(object):
|
||||
""" finds and runs a file or directory of files as a unit test """
|
||||
|
||||
__py_extensions = ["*.py", "*.pyw"]
|
||||
__exclude_files = ["__init__.*"]
|
||||
|
||||
# Just to check that only this attributes will be written to this file
|
||||
__slots__ = [
|
||||
'verbosity', # Always used
|
||||
|
||||
'files_to_tests', # If this one is given, the ones below are not used
|
||||
|
||||
'files_or_dirs', # Files or directories received in the command line
|
||||
'include_tests', # The filter used to collect the tests
|
||||
'tests', # Strings with the tests to be run
|
||||
|
||||
'jobs', # Integer with the number of jobs that should be used to run the test cases
|
||||
'split_jobs', # String with 'tests' or 'module' (how should the jobs be split)
|
||||
|
||||
'configuration',
|
||||
'coverage',
|
||||
]
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.verbosity = configuration.verbosity
|
||||
|
||||
self.jobs = configuration.jobs
|
||||
self.split_jobs = configuration.split_jobs
|
||||
|
||||
files_to_tests = configuration.files_to_tests
|
||||
if files_to_tests:
|
||||
self.files_to_tests = files_to_tests
|
||||
self.files_or_dirs = list(files_to_tests.keys())
|
||||
self.tests = None
|
||||
else:
|
||||
self.files_to_tests = {}
|
||||
self.files_or_dirs = configuration.files_or_dirs
|
||||
self.tests = configuration.tests
|
||||
|
||||
self.configuration = configuration
|
||||
self.__adjust_path()
|
||||
|
||||
def __adjust_path(self):
|
||||
""" add the current file or directory to the python path """
|
||||
path_to_append = None
|
||||
for n in range(len(self.files_or_dirs)):
|
||||
dir_name = self.__unixify(self.files_or_dirs[n])
|
||||
if os.path.isdir(dir_name):
|
||||
if not dir_name.endswith("/"):
|
||||
self.files_or_dirs[n] = dir_name + "/"
|
||||
path_to_append = os.path.normpath(dir_name)
|
||||
elif os.path.isfile(dir_name):
|
||||
path_to_append = os.path.dirname(dir_name)
|
||||
else:
|
||||
if not os.path.exists(dir_name):
|
||||
block_line = '*' * 120
|
||||
sys.stderr.write('\n%s\n* PyDev test runner error: %s does not exist.\n%s\n' % (block_line, dir_name, block_line))
|
||||
return
|
||||
msg = ("unknown type. \n%s\nshould be file or a directory.\n" % (dir_name))
|
||||
raise RuntimeError(msg)
|
||||
if path_to_append is not None:
|
||||
# Add it as the last one (so, first things are resolved against the default dirs and
|
||||
# if none resolves, then we try a relative import).
|
||||
sys.path.append(path_to_append)
|
||||
|
||||
def __is_valid_py_file(self, fname):
|
||||
""" tests that a particular file contains the proper file extension
|
||||
and is not in the list of files to exclude """
|
||||
is_valid_fname = 0
|
||||
for invalid_fname in self.__class__.__exclude_files:
|
||||
is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname))
|
||||
if_valid_ext = 0
|
||||
for ext in self.__class__.__py_extensions:
|
||||
if_valid_ext += int(fnmatch.fnmatch(fname, ext))
|
||||
return is_valid_fname > 0 and if_valid_ext > 0
|
||||
|
||||
def __unixify(self, s):
|
||||
""" stupid windows. converts the backslash to forwardslash for consistency """
|
||||
return os.path.normpath(s).replace(os.sep, "/")
|
||||
|
||||
def __importify(self, s, dir=False):
|
||||
""" turns directory separators into dots and removes the ".py*" extension
|
||||
so the string can be used as import statement """
|
||||
if not dir:
|
||||
dirname, fname = os.path.split(s)
|
||||
|
||||
if fname.count('.') > 1:
|
||||
# if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it...
|
||||
return
|
||||
|
||||
imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]]
|
||||
|
||||
if len(imp_stmt_pieces[0]) == 0:
|
||||
imp_stmt_pieces = imp_stmt_pieces[1:]
|
||||
|
||||
return ".".join(imp_stmt_pieces)
|
||||
|
||||
else: # handle dir
|
||||
return s.replace("\\", "/").replace("/", ".")
|
||||
|
||||
def __add_files(self, pyfiles, root, files):
|
||||
""" if files match, appends them to pyfiles. used by os.path.walk fcn """
|
||||
for fname in files:
|
||||
if self.__is_valid_py_file(fname):
|
||||
name_without_base_dir = self.__unixify(os.path.join(root, fname))
|
||||
pyfiles.append(name_without_base_dir)
|
||||
|
||||
def find_import_files(self):
|
||||
""" return a list of files to import """
|
||||
if self.files_to_tests:
|
||||
pyfiles = self.files_to_tests.keys()
|
||||
else:
|
||||
pyfiles = []
|
||||
|
||||
for base_dir in self.files_or_dirs:
|
||||
if os.path.isdir(base_dir):
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
# Note: handling directories that should be excluded from the search because
|
||||
# they don't have __init__.py
|
||||
exclude = {}
|
||||
for d in dirs:
|
||||
for init in ['__init__.py', '__init__.pyo', '__init__.pyc', '__init__.pyw', '__init__$py.class']:
|
||||
if os.path.exists(os.path.join(root, d, init).replace('\\', '/')):
|
||||
break
|
||||
else:
|
||||
exclude[d] = 1
|
||||
|
||||
if exclude:
|
||||
new = []
|
||||
for d in dirs:
|
||||
if d not in exclude:
|
||||
new.append(d)
|
||||
|
||||
dirs[:] = new
|
||||
|
||||
self.__add_files(pyfiles, root, files)
|
||||
|
||||
elif os.path.isfile(base_dir):
|
||||
pyfiles.append(base_dir)
|
||||
|
||||
if self.configuration.exclude_files or self.configuration.include_files:
|
||||
ret = []
|
||||
for f in pyfiles:
|
||||
add = True
|
||||
basename = os.path.basename(f)
|
||||
if self.configuration.include_files:
|
||||
add = False
|
||||
|
||||
for pat in self.configuration.include_files:
|
||||
if fnmatch.fnmatchcase(basename, pat):
|
||||
add = True
|
||||
break
|
||||
|
||||
if not add:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped file: %s (did not match any include_files pattern: %s)\n' % (f, self.configuration.include_files))
|
||||
|
||||
elif self.configuration.exclude_files:
|
||||
for pat in self.configuration.exclude_files:
|
||||
if fnmatch.fnmatchcase(basename, pat):
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped file: %s (matched exclude_files pattern: %s)\n' % (f, pat))
|
||||
|
||||
elif self.verbosity > 2:
|
||||
sys.stdout.write('Skipped file: %s\n' % (f,))
|
||||
|
||||
add = False
|
||||
break
|
||||
|
||||
if add:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Adding file: %s for test discovery.\n' % (f,))
|
||||
ret.append(f)
|
||||
|
||||
pyfiles = ret
|
||||
|
||||
return pyfiles
|
||||
|
||||
def __get_module_from_str(self, modname, print_exception, pyfile):
|
||||
""" Import the module in the given import path.
|
||||
* Returns the "final" module, so importing "coilib40.subject.visu"
|
||||
returns the "visu" module, not the "coilib40" as returned by __import__ """
|
||||
try:
|
||||
mod = __import__(modname)
|
||||
for part in modname.split('.')[1:]:
|
||||
mod = getattr(mod, part)
|
||||
return mod
|
||||
except:
|
||||
if print_exception:
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
from _pydevd_bundle import pydevd_io
|
||||
buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
|
||||
buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
|
||||
try:
|
||||
import traceback;traceback.print_exc()
|
||||
sys.stderr.write('ERROR: Module: %s could not be imported (file: %s).\n' % (modname, pyfile))
|
||||
finally:
|
||||
pydevd_io.end_redirect('stderr')
|
||||
pydevd_io.end_redirect('stdout')
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0)
|
||||
|
||||
return None
|
||||
|
||||
def remove_duplicates_keeping_order(self, seq):
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
return [x for x in seq if not (x in seen or seen_add(x))]
|
||||
|
||||
def find_modules_from_files(self, pyfiles):
|
||||
""" returns a list of modules given a list of files """
|
||||
# let's make sure that the paths we want are in the pythonpath...
|
||||
imports = [(s, self.__importify(s)) for s in pyfiles]
|
||||
|
||||
sys_path = [os.path.normpath(path) for path in sys.path]
|
||||
sys_path = self.remove_duplicates_keeping_order(sys_path)
|
||||
|
||||
system_paths = []
|
||||
for s in sys_path:
|
||||
system_paths.append(self.__importify(s, True))
|
||||
|
||||
ret = []
|
||||
for pyfile, imp in imports:
|
||||
if imp is None:
|
||||
continue # can happen if a file is not a valid module
|
||||
choices = []
|
||||
for s in system_paths:
|
||||
if imp.startswith(s):
|
||||
add = imp[len(s) + 1:]
|
||||
if add:
|
||||
choices.append(add)
|
||||
# sys.stdout.write(' ' + add + ' ')
|
||||
|
||||
if not choices:
|
||||
sys.stdout.write('PYTHONPATH not found for file: %s\n' % imp)
|
||||
else:
|
||||
for i, import_str in enumerate(choices):
|
||||
print_exception = i == len(choices) - 1
|
||||
mod = self.__get_module_from_str(import_str, print_exception, pyfile)
|
||||
if mod is not None:
|
||||
ret.append((pyfile, mod, import_str))
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
#===================================================================================================================
|
||||
# GetTestCaseNames
|
||||
#===================================================================================================================
|
||||
class GetTestCaseNames:
|
||||
"""Yes, we need a class for that (cannot use outer context on jython 2.1)"""
|
||||
|
||||
def __init__(self, accepted_classes, accepted_methods):
|
||||
self.accepted_classes = accepted_classes
|
||||
self.accepted_methods = accepted_methods
|
||||
|
||||
def __call__(self, testCaseClass):
|
||||
"""Return a sorted sequence of method names found within testCaseClass"""
|
||||
testFnNames = []
|
||||
className = testCaseClass.__name__
|
||||
|
||||
if className in self.accepted_classes:
|
||||
for attrname in dir(testCaseClass):
|
||||
# If a class is chosen, we select all the 'test' methods'
|
||||
if attrname.startswith('test') and hasattr(getattr(testCaseClass, attrname), '__call__'):
|
||||
testFnNames.append(attrname)
|
||||
|
||||
else:
|
||||
for attrname in dir(testCaseClass):
|
||||
# If we have the class+method name, we must do a full check and have an exact match.
|
||||
if className + '.' + attrname in self.accepted_methods:
|
||||
if hasattr(getattr(testCaseClass, attrname), '__call__'):
|
||||
testFnNames.append(attrname)
|
||||
|
||||
# sorted() is not available in jython 2.1
|
||||
testFnNames.sort()
|
||||
return testFnNames
|
||||
|
||||
def _decorate_test_suite(self, suite, pyfile, module_name):
|
||||
import unittest
|
||||
if isinstance(suite, unittest.TestSuite):
|
||||
add = False
|
||||
suite.__pydev_pyfile__ = pyfile
|
||||
suite.__pydev_module_name__ = module_name
|
||||
|
||||
for t in suite._tests:
|
||||
t.__pydev_pyfile__ = pyfile
|
||||
t.__pydev_module_name__ = module_name
|
||||
if self._decorate_test_suite(t, pyfile, module_name):
|
||||
add = True
|
||||
|
||||
return add
|
||||
|
||||
elif isinstance(suite, unittest.TestCase):
|
||||
return True
|
||||
|
||||
else:
|
||||
return False
|
||||
|
||||
def find_tests_from_modules(self, file_and_modules_and_module_name):
|
||||
""" returns the unittests given a list of modules """
|
||||
# Use our own suite!
|
||||
from _pydev_runfiles import pydev_runfiles_unittest
|
||||
import unittest
|
||||
unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite
|
||||
loader = unittest.TestLoader()
|
||||
|
||||
ret = []
|
||||
if self.files_to_tests:
|
||||
for pyfile, m, module_name in file_and_modules_and_module_name:
|
||||
accepted_classes = {}
|
||||
accepted_methods = {}
|
||||
tests = self.files_to_tests[pyfile]
|
||||
for t in tests:
|
||||
accepted_methods[t] = t
|
||||
|
||||
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
|
||||
|
||||
suite = loader.loadTestsFromModule(m)
|
||||
if self._decorate_test_suite(suite, pyfile, module_name):
|
||||
ret.append(suite)
|
||||
return ret
|
||||
|
||||
if self.tests:
|
||||
accepted_classes = {}
|
||||
accepted_methods = {}
|
||||
|
||||
for t in self.tests:
|
||||
splitted = t.split('.')
|
||||
if len(splitted) == 1:
|
||||
accepted_classes[t] = t
|
||||
|
||||
elif len(splitted) == 2:
|
||||
accepted_methods[t] = t
|
||||
|
||||
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
|
||||
|
||||
for pyfile, m, module_name in file_and_modules_and_module_name:
|
||||
suite = loader.loadTestsFromModule(m)
|
||||
if self._decorate_test_suite(suite, pyfile, module_name):
|
||||
ret.append(suite)
|
||||
|
||||
return ret
|
||||
|
||||
def filter_tests(self, test_objs, internal_call=False):
|
||||
""" based on a filter name, only return those tests that have
|
||||
the test case names that match """
|
||||
import unittest
|
||||
if not internal_call:
|
||||
if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests:
|
||||
# No need to filter if we have nothing to filter!
|
||||
return test_objs
|
||||
|
||||
if self.verbosity > 1:
|
||||
if self.configuration.include_tests:
|
||||
sys.stdout.write('Tests to include: %s\n' % (self.configuration.include_tests,))
|
||||
|
||||
if self.tests:
|
||||
sys.stdout.write('Tests to run: %s\n' % (self.tests,))
|
||||
|
||||
if self.configuration.exclude_tests:
|
||||
sys.stdout.write('Tests to exclude: %s\n' % (self.configuration.exclude_tests,))
|
||||
|
||||
test_suite = []
|
||||
for test_obj in test_objs:
|
||||
|
||||
if isinstance(test_obj, unittest.TestSuite):
|
||||
# Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests).
|
||||
if test_obj._tests:
|
||||
test_obj._tests = self.filter_tests(test_obj._tests, True)
|
||||
if test_obj._tests: # Only add the suite if we still have tests there.
|
||||
test_suite.append(test_obj)
|
||||
|
||||
elif isinstance(test_obj, unittest.TestCase):
|
||||
try:
|
||||
testMethodName = test_obj._TestCase__testMethodName
|
||||
except AttributeError:
|
||||
# changed in python 2.5
|
||||
testMethodName = test_obj._testMethodName
|
||||
|
||||
add = True
|
||||
if self.configuration.exclude_tests:
|
||||
for pat in self.configuration.exclude_tests:
|
||||
if fnmatch.fnmatchcase(testMethodName, pat):
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped test: %s (matched exclude_tests pattern: %s)\n' % (testMethodName, pat))
|
||||
|
||||
elif self.verbosity > 2:
|
||||
sys.stdout.write('Skipped test: %s\n' % (testMethodName,))
|
||||
|
||||
add = False
|
||||
break
|
||||
|
||||
if add:
|
||||
if self.__match_tests(self.tests, test_obj, testMethodName):
|
||||
include = True
|
||||
if self.configuration.include_tests:
|
||||
include = False
|
||||
for pat in self.configuration.include_tests:
|
||||
if fnmatch.fnmatchcase(testMethodName, pat):
|
||||
include = True
|
||||
break
|
||||
if include:
|
||||
test_suite.append(test_obj)
|
||||
else:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped test: %s (did not match any include_tests pattern %s)\n' % (
|
||||
testMethodName, self.configuration.include_tests,))
|
||||
return test_suite
|
||||
|
||||
def iter_tests(self, test_objs):
|
||||
# Note: not using yield because of Jython 2.1.
|
||||
import unittest
|
||||
tests = []
|
||||
for test_obj in test_objs:
|
||||
if isinstance(test_obj, unittest.TestSuite):
|
||||
tests.extend(self.iter_tests(test_obj._tests))
|
||||
|
||||
elif isinstance(test_obj, unittest.TestCase):
|
||||
tests.append(test_obj)
|
||||
return tests
|
||||
|
||||
def list_test_names(self, test_objs):
|
||||
names = []
|
||||
for tc in self.iter_tests(test_objs):
|
||||
try:
|
||||
testMethodName = tc._TestCase__testMethodName
|
||||
except AttributeError:
|
||||
# changed in python 2.5
|
||||
testMethodName = tc._testMethodName
|
||||
names.append(testMethodName)
|
||||
return names
|
||||
|
||||
def __match_tests(self, tests, test_case, test_method_name):
|
||||
if not tests:
|
||||
return 1
|
||||
|
||||
for t in tests:
|
||||
class_and_method = t.split('.')
|
||||
if len(class_and_method) == 1:
|
||||
# only class name
|
||||
if class_and_method[0] == test_case.__class__.__name__:
|
||||
return 1
|
||||
|
||||
elif len(class_and_method) == 2:
|
||||
if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
def __match(self, filter_list, name):
|
||||
""" returns whether a test name matches the test filter """
|
||||
if filter_list is None:
|
||||
return 1
|
||||
for f in filter_list:
|
||||
if re.match(f, name):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def run_tests(self, handle_coverage=True):
|
||||
""" runs all tests """
|
||||
sys.stdout.write("Finding files... ")
|
||||
files = self.find_import_files()
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('%s ... done.\n' % (self.files_or_dirs))
|
||||
else:
|
||||
sys.stdout.write('done.\n')
|
||||
sys.stdout.write("Importing test modules ... ")
|
||||
|
||||
if handle_coverage:
|
||||
coverage_files, coverage = start_coverage_support(self.configuration)
|
||||
|
||||
file_and_modules_and_module_name = self.find_modules_from_files(files)
|
||||
sys.stdout.write("done.\n")
|
||||
|
||||
all_tests = self.find_tests_from_modules(file_and_modules_and_module_name)
|
||||
all_tests = self.filter_tests(all_tests)
|
||||
|
||||
from _pydev_runfiles import pydev_runfiles_unittest
|
||||
test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests)
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases())
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
def run_tests():
|
||||
executed_in_parallel = False
|
||||
if self.jobs > 1:
|
||||
from _pydev_runfiles import pydev_runfiles_parallel
|
||||
|
||||
# What may happen is that the number of jobs needed is lower than the number of jobs requested
|
||||
# (e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will
|
||||
# return False and won't run any tests.
|
||||
executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel(
|
||||
all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include)
|
||||
|
||||
if not executed_in_parallel:
|
||||
# If in coverage, we don't need to pass anything here (coverage is already enabled for this execution).
|
||||
runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity)
|
||||
sys.stdout.write('\n')
|
||||
runner.run(test_suite)
|
||||
|
||||
if self.configuration.django:
|
||||
get_django_test_suite_runner()(run_tests).run_tests([])
|
||||
else:
|
||||
run_tests()
|
||||
|
||||
if handle_coverage:
|
||||
coverage.stop()
|
||||
coverage.save()
|
||||
|
||||
total_time = 'Finished in: %.2f secs.' % (time.time() - start_time,)
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time)
|
||||
|
||||
|
||||
DJANGO_TEST_SUITE_RUNNER = None
|
||||
|
||||
|
||||
def get_django_test_suite_runner():
|
||||
global DJANGO_TEST_SUITE_RUNNER
|
||||
if DJANGO_TEST_SUITE_RUNNER:
|
||||
return DJANGO_TEST_SUITE_RUNNER
|
||||
try:
|
||||
# django >= 1.8
|
||||
import django
|
||||
from django.test.runner import DiscoverRunner
|
||||
|
||||
class MyDjangoTestSuiteRunner(DiscoverRunner):
|
||||
|
||||
def __init__(self, on_run_suite):
|
||||
django.setup()
|
||||
DiscoverRunner.__init__(self)
|
||||
self.on_run_suite = on_run_suite
|
||||
|
||||
def build_suite(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def suite_result(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def run_suite(self, *args, **kwargs):
|
||||
self.on_run_suite()
|
||||
|
||||
except:
|
||||
# django < 1.8
|
||||
try:
|
||||
from django.test.simple import DjangoTestSuiteRunner
|
||||
except:
|
||||
|
||||
class DjangoTestSuiteRunner:
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def run_tests(self, *args, **kwargs):
|
||||
raise AssertionError("Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported.")
|
||||
|
||||
class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner):
|
||||
|
||||
def __init__(self, on_run_suite):
|
||||
DjangoTestSuiteRunner.__init__(self)
|
||||
self.on_run_suite = on_run_suite
|
||||
|
||||
def build_suite(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def suite_result(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def run_suite(self, *args, **kwargs):
|
||||
self.on_run_suite()
|
||||
|
||||
DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner
|
||||
return DJANGO_TEST_SUITE_RUNNER
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# main
|
||||
#=======================================================================================================================
|
||||
def main(configuration):
|
||||
PydevTestRunner(configuration).run_tests()
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/b1/68/c0/446b75fb84995d80cd54c2f6d1b59ddc8046e6cb4f82ced0dc29b8b9bf
|
|
@ -0,0 +1,207 @@
|
|||
from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport
|
||||
from nose.plugins.base import Plugin # @UnresolvedImport
|
||||
import sys
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
|
||||
from contextlib import contextmanager
|
||||
from io import StringIO
|
||||
import traceback
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevPlugin
|
||||
#=======================================================================================================================
|
||||
class PydevPlugin(Plugin):
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.configuration = configuration
|
||||
Plugin.__init__(self)
|
||||
|
||||
def begin(self):
|
||||
# Called before any test is run (it's always called, with multiprocess or not)
|
||||
self.start_time = time.time()
|
||||
self.coverage_files, self.coverage = start_coverage_support(self.configuration)
|
||||
|
||||
def finalize(self, result):
|
||||
# Called after all tests are run (it's always called, with multiprocess or not)
|
||||
self.coverage.stop()
|
||||
self.coverage.save()
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished('Finished in: %.2f secs.' % (time.time() - self.start_time,))
|
||||
|
||||
#===================================================================================================================
|
||||
# Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate
|
||||
# so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this
|
||||
# process).
|
||||
#===================================================================================================================
|
||||
|
||||
class Sentinel(object):
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def _without_user_address(self, test):
|
||||
# #PyDev-1095: Conflict between address in test and test.address() in PydevPlugin().report_cond()
|
||||
user_test_instance = test.test
|
||||
user_address = self.Sentinel
|
||||
user_class_address = self.Sentinel
|
||||
try:
|
||||
if 'address' in user_test_instance.__dict__:
|
||||
user_address = user_test_instance.__dict__.pop('address')
|
||||
except:
|
||||
# Just ignore anything here.
|
||||
pass
|
||||
try:
|
||||
user_class_address = user_test_instance.__class__.address
|
||||
del user_test_instance.__class__.address
|
||||
except:
|
||||
# Just ignore anything here.
|
||||
pass
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if user_address is not self.Sentinel:
|
||||
user_test_instance.__dict__['address'] = user_address
|
||||
|
||||
if user_class_address is not self.Sentinel:
|
||||
user_test_instance.__class__.address = user_class_address
|
||||
|
||||
def _get_test_address(self, test):
|
||||
try:
|
||||
if hasattr(test, 'address'):
|
||||
with self._without_user_address(test):
|
||||
address = test.address()
|
||||
|
||||
# test.address() is something as:
|
||||
# ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1')
|
||||
#
|
||||
# and we must pass: location, test
|
||||
# E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1']
|
||||
address = address[0], address[2]
|
||||
else:
|
||||
# multiprocess
|
||||
try:
|
||||
address = test[0], test[1]
|
||||
except TypeError:
|
||||
# It may be an error at setup, in which case it's not really a test, but a Context object.
|
||||
f = test.context.__file__
|
||||
if f.endswith('.pyc'):
|
||||
f = f[:-1]
|
||||
elif f.endswith('$py.class'):
|
||||
f = f[:-len('$py.class')] + '.py'
|
||||
address = f, '?'
|
||||
except:
|
||||
sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n")
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("\n\n\n")
|
||||
address = '?', '?'
|
||||
return address
|
||||
|
||||
def report_cond(self, cond, test, captured_output, error=''):
|
||||
'''
|
||||
@param cond: fail, error, ok
|
||||
'''
|
||||
|
||||
address = self._get_test_address(test)
|
||||
|
||||
error_contents = self.get_io_from_error(error)
|
||||
try:
|
||||
time_str = '%.2f' % (time.time() - test._pydev_start_time)
|
||||
except:
|
||||
time_str = '?'
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str)
|
||||
|
||||
def startTest(self, test):
|
||||
test._pydev_start_time = time.time()
|
||||
file, test = self._get_test_address(test)
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(file, test)
|
||||
|
||||
def get_io_from_error(self, err):
|
||||
if type(err) == type(()):
|
||||
if len(err) != 3:
|
||||
if len(err) == 2:
|
||||
return err[1] # multiprocess
|
||||
s = StringIO()
|
||||
etype, value, tb = err
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
traceback.print_exception(etype, value, tb, file=s)
|
||||
return s.getvalue()
|
||||
return err
|
||||
|
||||
def get_captured_output(self, test):
|
||||
if hasattr(test, 'capturedOutput') and test.capturedOutput:
|
||||
return test.capturedOutput
|
||||
return ''
|
||||
|
||||
def addError(self, test, err):
|
||||
self.report_cond(
|
||||
'error',
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
err,
|
||||
)
|
||||
|
||||
def addFailure(self, test, err):
|
||||
self.report_cond(
|
||||
'fail',
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
err,
|
||||
)
|
||||
|
||||
def addSuccess(self, test):
|
||||
self.report_cond(
|
||||
'ok',
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
'',
|
||||
)
|
||||
|
||||
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON = None
|
||||
|
||||
|
||||
def start_pydev_nose_plugin_singleton(configuration):
|
||||
global PYDEV_NOSE_PLUGIN_SINGLETON
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration)
|
||||
return PYDEV_NOSE_PLUGIN_SINGLETON
|
||||
|
||||
|
||||
original = MultiProcessTestRunner.consolidate
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# new_consolidate
|
||||
#=======================================================================================================================
|
||||
def new_consolidate(self, result, batch_result):
|
||||
'''
|
||||
Used so that it can work with the multiprocess plugin.
|
||||
Monkeypatched because nose seems a bit unsupported at this time (ideally
|
||||
the plugin would have this support by default).
|
||||
'''
|
||||
ret = original(self, result, batch_result)
|
||||
|
||||
parent_frame = sys._getframe().f_back
|
||||
# addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4
|
||||
# so, convert it to what report_cond expects
|
||||
addr = parent_frame.f_locals['addr']
|
||||
i = addr.rindex(':')
|
||||
addr = [addr[:i], addr[i + 1:]]
|
||||
|
||||
output, testsRun, failures, errors, errorClasses = batch_result
|
||||
if failures or errors:
|
||||
for failure in failures:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('fail', addr, output, failure)
|
||||
|
||||
for error in errors:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('error', addr, output, error)
|
||||
else:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('ok', addr, output)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
MultiProcessTestRunner.consolidate = new_consolidate
|
|
@ -0,0 +1,267 @@
|
|||
import unittest
|
||||
from _pydev_bundle._pydev_saved_modules import thread
|
||||
import queue as Queue
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
import os
|
||||
import threading
|
||||
import sys
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# flatten_test_suite
|
||||
#=======================================================================================================================
|
||||
def flatten_test_suite(test_suite, ret):
|
||||
if isinstance(test_suite, unittest.TestSuite):
|
||||
for t in test_suite._tests:
|
||||
flatten_test_suite(t, ret)
|
||||
|
||||
elif isinstance(test_suite, unittest.TestCase):
|
||||
ret.append(test_suite)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# execute_tests_in_parallel
|
||||
#=======================================================================================================================
|
||||
def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include):
|
||||
'''
|
||||
@param tests: list(PydevTestSuite)
|
||||
A list with the suites to be run
|
||||
|
||||
@param split: str
|
||||
Either 'module' or the number of tests that should be run in each batch
|
||||
|
||||
@param coverage_files: list(file)
|
||||
A list with the files that should be used for giving coverage information (if empty, coverage information
|
||||
should not be gathered).
|
||||
|
||||
@param coverage_include: str
|
||||
The pattern that should be included in the coverage.
|
||||
|
||||
@return: bool
|
||||
Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1
|
||||
should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be
|
||||
run.
|
||||
|
||||
It may also return False if in debug mode (in which case, multi-processes are not accepted)
|
||||
'''
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
if get_global_debugger() is not None:
|
||||
return False
|
||||
except:
|
||||
pass # Ignore any error here.
|
||||
|
||||
# This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When
|
||||
# split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests
|
||||
# from a given module.
|
||||
tests_queue = []
|
||||
|
||||
queue_elements = []
|
||||
if split == 'module':
|
||||
module_to_tests = {}
|
||||
for test in tests:
|
||||
lst = []
|
||||
flatten_test_suite(test, lst)
|
||||
for test in lst:
|
||||
key = (test.__pydev_pyfile__, test.__pydev_module_name__)
|
||||
module_to_tests.setdefault(key, []).append(test)
|
||||
|
||||
for key, tests in module_to_tests.items():
|
||||
queue_elements.append(tests)
|
||||
|
||||
if len(queue_elements) < jobs:
|
||||
# Don't create jobs we will never use.
|
||||
jobs = len(queue_elements)
|
||||
|
||||
elif split == 'tests':
|
||||
for test in tests:
|
||||
lst = []
|
||||
flatten_test_suite(test, lst)
|
||||
for test in lst:
|
||||
queue_elements.append([test])
|
||||
|
||||
if len(queue_elements) < jobs:
|
||||
# Don't create jobs we will never use.
|
||||
jobs = len(queue_elements)
|
||||
|
||||
else:
|
||||
raise AssertionError('Do not know how to handle: %s' % (split,))
|
||||
|
||||
for test_cases in queue_elements:
|
||||
test_queue_elements = []
|
||||
for test_case in test_cases:
|
||||
try:
|
||||
test_name = test_case.__class__.__name__ + "." + test_case._testMethodName
|
||||
except AttributeError:
|
||||
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
test_name = test_case.__class__.__name__ + "." + test_case._TestCase__testMethodName
|
||||
|
||||
test_queue_elements.append(test_case.__pydev_pyfile__ + '|' + test_name)
|
||||
|
||||
tests_queue.append(test_queue_elements)
|
||||
|
||||
if jobs < 2:
|
||||
return False
|
||||
|
||||
sys.stdout.write('Running tests in parallel with: %s jobs.\n' % (jobs,))
|
||||
|
||||
queue = Queue.Queue()
|
||||
for item in tests_queue:
|
||||
queue.put(item, block=False)
|
||||
|
||||
providers = []
|
||||
clients = []
|
||||
for i in range(jobs):
|
||||
test_cases_provider = CommunicationThread(queue)
|
||||
providers.append(test_cases_provider)
|
||||
|
||||
test_cases_provider.start()
|
||||
port = test_cases_provider.port
|
||||
|
||||
if coverage_files:
|
||||
clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include))
|
||||
else:
|
||||
clients.append(ClientThread(i, port, verbosity))
|
||||
|
||||
for client in clients:
|
||||
client.start()
|
||||
|
||||
client_alive = True
|
||||
while client_alive:
|
||||
client_alive = False
|
||||
for client in clients:
|
||||
# Wait for all the clients to exit.
|
||||
if not client.finished:
|
||||
client_alive = True
|
||||
time.sleep(.2)
|
||||
break
|
||||
|
||||
for provider in providers:
|
||||
provider.shutdown()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# CommunicationThread
|
||||
#=======================================================================================================================
|
||||
class CommunicationThread(threading.Thread):
|
||||
|
||||
def __init__(self, tests_queue):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.queue = tests_queue
|
||||
self.finished = False
|
||||
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
# Create server
|
||||
server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False)
|
||||
server.register_function(self.GetTestsToRun)
|
||||
server.register_function(self.notifyStartTest)
|
||||
server.register_function(self.notifyTest)
|
||||
server.register_function(self.notifyCommands)
|
||||
self.port = server.socket.getsockname()[1]
|
||||
self.server = server
|
||||
|
||||
def GetTestsToRun(self, job_id):
|
||||
'''
|
||||
@param job_id:
|
||||
|
||||
@return: list(str)
|
||||
Each entry is a string in the format: filename|Test.testName
|
||||
'''
|
||||
try:
|
||||
ret = self.queue.get(block=False)
|
||||
return ret
|
||||
except: # Any exception getting from the queue (empty or not) means we finished our work on providing the tests.
|
||||
self.finished = True
|
||||
return []
|
||||
|
||||
def notifyCommands(self, job_id, commands):
|
||||
# Batch notification.
|
||||
for command in commands:
|
||||
getattr(self, command[0])(job_id, *command[1], **command[2])
|
||||
|
||||
return True
|
||||
|
||||
def notifyStartTest(self, job_id, *args, **kwargs):
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs)
|
||||
return True
|
||||
|
||||
def notifyTest(self, job_id, *args, **kwargs):
|
||||
pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs)
|
||||
return True
|
||||
|
||||
def shutdown(self):
|
||||
if hasattr(self.server, 'shutdown'):
|
||||
self.server.shutdown()
|
||||
else:
|
||||
self._shutdown = True
|
||||
|
||||
def run(self):
|
||||
if hasattr(self.server, 'shutdown'):
|
||||
self.server.serve_forever()
|
||||
else:
|
||||
self._shutdown = False
|
||||
while not self._shutdown:
|
||||
self.server.handle_request()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Client
|
||||
#=======================================================================================================================
|
||||
class ClientThread(threading.Thread):
|
||||
|
||||
def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.port = port
|
||||
self.job_id = job_id
|
||||
self.verbosity = verbosity
|
||||
self.finished = False
|
||||
self.coverage_output_file = coverage_output_file
|
||||
self.coverage_include = coverage_include
|
||||
|
||||
def _reader_thread(self, pipe, target):
|
||||
while True:
|
||||
target.write(pipe.read(1))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
from _pydev_runfiles import pydev_runfiles_parallel_client
|
||||
# TODO: Support Jython:
|
||||
#
|
||||
# For jython, instead of using sys.executable, we should use:
|
||||
# r'D:\bin\jdk_1_5_09\bin\java.exe',
|
||||
# '-classpath',
|
||||
# 'D:/bin/jython-2.2.1/jython.jar',
|
||||
# 'org.python.util.jython',
|
||||
|
||||
args = [
|
||||
sys.executable,
|
||||
pydev_runfiles_parallel_client.__file__,
|
||||
str(self.job_id),
|
||||
str(self.port),
|
||||
str(self.verbosity),
|
||||
]
|
||||
|
||||
if self.coverage_output_file and self.coverage_include:
|
||||
args.append(self.coverage_output_file)
|
||||
args.append(self.coverage_include)
|
||||
|
||||
import subprocess
|
||||
if False:
|
||||
proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
thread.start_new_thread(self._reader_thread, (proc.stdout, sys.stdout))
|
||||
|
||||
thread.start_new_thread(target=self._reader_thread, args=(proc.stderr, sys.stderr))
|
||||
else:
|
||||
proc = subprocess.Popen(args, env=os.environ, shell=False)
|
||||
proc.wait()
|
||||
|
||||
finally:
|
||||
self.finished = True
|
||||
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/c3/09/7c/501a46452a93bf0f1509d9764bb1b43be4b25ed3a654520f2b3e2887d1
|
|
@ -0,0 +1,306 @@
|
|||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import pickle
|
||||
import zlib
|
||||
import base64
|
||||
import os
|
||||
from pydevd_file_utils import canonical_normalized_path
|
||||
import pytest
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
#=========================================================================
|
||||
# Load filters with tests we should skip
|
||||
#=========================================================================
|
||||
py_test_accept_filter = None
|
||||
|
||||
|
||||
def _load_filters():
|
||||
global py_test_accept_filter
|
||||
if py_test_accept_filter is None:
|
||||
py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP')
|
||||
if py_test_accept_filter:
|
||||
py_test_accept_filter = pickle.loads(
|
||||
zlib.decompress(base64.b64decode(py_test_accept_filter)))
|
||||
|
||||
# Newer versions of pytest resolve symlinks, so, we
|
||||
# may need to filter with a resolved path too.
|
||||
new_dct = {}
|
||||
for filename, value in py_test_accept_filter.items():
|
||||
new_dct[canonical_normalized_path(str(Path(filename).resolve()))] = value
|
||||
|
||||
py_test_accept_filter.update(new_dct)
|
||||
|
||||
else:
|
||||
py_test_accept_filter = {}
|
||||
|
||||
|
||||
def is_in_xdist_node():
|
||||
main_pid = os.environ.get('PYDEV_MAIN_PID')
|
||||
if main_pid and main_pid != str(os.getpid()):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
connected = False
|
||||
|
||||
|
||||
def connect_to_server_for_communication_to_xml_rpc_on_xdist():
|
||||
global connected
|
||||
if connected:
|
||||
return
|
||||
connected = True
|
||||
if is_in_xdist_node():
|
||||
port = os.environ.get('PYDEV_PYTEST_SERVER')
|
||||
if not port:
|
||||
sys.stderr.write(
|
||||
'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n')
|
||||
else:
|
||||
pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True)
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] <= 2
|
||||
PY3 = not PY2
|
||||
|
||||
|
||||
class State:
|
||||
start_time = time.time()
|
||||
buf_err = None
|
||||
buf_out = None
|
||||
|
||||
|
||||
def start_redirect():
|
||||
if State.buf_out is not None:
|
||||
return
|
||||
from _pydevd_bundle import pydevd_io
|
||||
State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
|
||||
State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
|
||||
|
||||
|
||||
def get_curr_output():
|
||||
buf_out = State.buf_out
|
||||
buf_err = State.buf_err
|
||||
return buf_out.getvalue() if buf_out is not None else '', buf_err.getvalue() if buf_err is not None else ''
|
||||
|
||||
|
||||
def pytest_unconfigure():
|
||||
if is_in_xdist_node():
|
||||
return
|
||||
# Only report that it finished when on the main node (we don't want to report
|
||||
# the finish on each separate node).
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished(
|
||||
'Finished in: %.2f secs.' % (time.time() - State.start_time,))
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(session, config, items):
|
||||
# A note: in xdist, this is not called on the main process, only in the
|
||||
# secondary nodes, so, we'll actually make the filter and report it multiple
|
||||
# times.
|
||||
connect_to_server_for_communication_to_xml_rpc_on_xdist()
|
||||
|
||||
_load_filters()
|
||||
if not py_test_accept_filter:
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
|
||||
return # Keep on going (nothing to filter)
|
||||
|
||||
new_items = []
|
||||
for item in items:
|
||||
f = canonical_normalized_path(str(item.parent.fspath))
|
||||
name = item.name
|
||||
|
||||
if f not in py_test_accept_filter:
|
||||
# print('Skip file: %s' % (f,))
|
||||
continue # Skip the file
|
||||
|
||||
i = name.find('[')
|
||||
name_without_parametrize = None
|
||||
if i > 0:
|
||||
name_without_parametrize = name[:i]
|
||||
|
||||
accept_tests = py_test_accept_filter[f]
|
||||
|
||||
if item.cls is not None:
|
||||
class_name = item.cls.__name__
|
||||
else:
|
||||
class_name = None
|
||||
for test in accept_tests:
|
||||
if test == name:
|
||||
# Direct match of the test (just go on with the default
|
||||
# loading)
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if name_without_parametrize is not None and test == name_without_parametrize:
|
||||
# This happens when parameterizing pytest tests on older versions
|
||||
# of pytest where the test name doesn't include the fixture name
|
||||
# in it.
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if class_name is not None:
|
||||
if test == class_name + '.' + name:
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if name_without_parametrize is not None and test == class_name + '.' + name_without_parametrize:
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if class_name == test:
|
||||
new_items.append(item)
|
||||
break
|
||||
else:
|
||||
pass
|
||||
# print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests))
|
||||
|
||||
# Modify the original list
|
||||
items[:] = new_items
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
|
||||
|
||||
|
||||
try:
|
||||
"""
|
||||
pytest > 5.4 uses own version of TerminalWriter based on py.io.TerminalWriter
|
||||
and assumes there is a specific method TerminalWriter._write_source
|
||||
so try load pytest version first or fallback to default one
|
||||
"""
|
||||
from _pytest._io import TerminalWriter
|
||||
except ImportError:
|
||||
from py.io import TerminalWriter
|
||||
|
||||
|
||||
def _get_error_contents_from_report(report):
|
||||
if report.longrepr is not None:
|
||||
try:
|
||||
tw = TerminalWriter(stringio=True)
|
||||
stringio = tw.stringio
|
||||
except TypeError:
|
||||
import io
|
||||
stringio = io.StringIO()
|
||||
tw = TerminalWriter(file=stringio)
|
||||
tw.hasmarkup = False
|
||||
report.toterminal(tw)
|
||||
exc = stringio.getvalue()
|
||||
s = exc.strip()
|
||||
if s:
|
||||
return s
|
||||
|
||||
return ''
|
||||
|
||||
|
||||
def pytest_collectreport(report):
|
||||
error_contents = _get_error_contents_from_report(report)
|
||||
if error_contents:
|
||||
report_test('fail', '<collect errors>', '<collect errors>', '', error_contents, 0.0)
|
||||
|
||||
|
||||
def append_strings(s1, s2):
|
||||
if s1.__class__ == s2.__class__:
|
||||
return s1 + s2
|
||||
|
||||
# Prefer str
|
||||
if isinstance(s1, bytes):
|
||||
s1 = s1.decode('utf-8', 'replace')
|
||||
|
||||
if isinstance(s2, bytes):
|
||||
s2 = s2.decode('utf-8', 'replace')
|
||||
|
||||
return s1 + s2
|
||||
|
||||
|
||||
def pytest_runtest_logreport(report):
|
||||
if is_in_xdist_node():
|
||||
# When running with xdist, we don't want the report to be called from the node, only
|
||||
# from the main process.
|
||||
return
|
||||
report_duration = report.duration
|
||||
report_when = report.when
|
||||
report_outcome = report.outcome
|
||||
|
||||
if hasattr(report, 'wasxfail'):
|
||||
if report_outcome != 'skipped':
|
||||
report_outcome = 'passed'
|
||||
|
||||
if report_outcome == 'passed':
|
||||
# passed on setup/teardown: no need to report if in setup or teardown
|
||||
# (only on the actual test if it passed).
|
||||
if report_when in ('setup', 'teardown'):
|
||||
return
|
||||
|
||||
status = 'ok'
|
||||
|
||||
elif report_outcome == 'skipped':
|
||||
status = 'skip'
|
||||
|
||||
else:
|
||||
# It has only passed, skipped and failed (no error), so, let's consider
|
||||
# error if not on call.
|
||||
if report_when in ('setup', 'teardown'):
|
||||
status = 'error'
|
||||
|
||||
else:
|
||||
# any error in the call (not in setup or teardown) is considered a
|
||||
# regular failure.
|
||||
status = 'fail'
|
||||
|
||||
# This will work if pytest is not capturing it, if it is, nothing will
|
||||
# come from here...
|
||||
captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '')
|
||||
for type_section, value in report.sections:
|
||||
if value:
|
||||
if type_section in ('err', 'stderr', 'Captured stderr call'):
|
||||
error_contents = append_strings(error_contents, value)
|
||||
else:
|
||||
captured_output = append_strings(error_contents, value)
|
||||
|
||||
filename = getattr(report, 'pydev_fspath_strpath', '<unable to get>')
|
||||
test = report.location[2]
|
||||
|
||||
if report_outcome != 'skipped':
|
||||
# On skipped, we'll have a traceback for the skip, which is not what we
|
||||
# want.
|
||||
exc = _get_error_contents_from_report(report)
|
||||
if exc:
|
||||
if error_contents:
|
||||
error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n')
|
||||
error_contents = append_strings(error_contents, exc)
|
||||
|
||||
report_test(status, filename, test, captured_output, error_contents, report_duration)
|
||||
|
||||
|
||||
def report_test(status, filename, test, captured_output, error_contents, duration):
|
||||
'''
|
||||
@param filename: 'D:\\src\\mod1\\hello.py'
|
||||
@param test: 'TestCase.testMet1'
|
||||
@param status: fail, error, ok
|
||||
'''
|
||||
time_str = '%.2f' % (duration,)
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
status, captured_output, error_contents, filename, test, time_str)
|
||||
|
||||
|
||||
if not hasattr(pytest, 'hookimpl'):
|
||||
raise AssertionError('Please upgrade pytest (the current version of pytest: %s is unsupported)' % (pytest.__version__,))
|
||||
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_runtest_makereport(item, call):
|
||||
outcome = yield
|
||||
report = outcome.get_result()
|
||||
report.pydev_fspath_strpath = item.fspath.strpath
|
||||
report.pydev_captured_output, report.pydev_error_contents = get_curr_output()
|
||||
|
||||
|
||||
@pytest.mark.tryfirst
|
||||
def pytest_runtest_setup(item):
|
||||
'''
|
||||
Note: with xdist will be on a secondary process.
|
||||
'''
|
||||
# We have our own redirection: if xdist does its redirection, we'll have
|
||||
# nothing in our contents (which is OK), but if it does, we'll get nothing
|
||||
# from pytest but will get our own here.
|
||||
start_redirect()
|
||||
filename = item.fspath.strpath
|
||||
test = item.location[2]
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(filename, test)
|
|
@ -0,0 +1,150 @@
|
|||
import unittest as python_unittest
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
from _pydevd_bundle import pydevd_io
|
||||
import traceback
|
||||
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
|
||||
from io import StringIO
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevTextTestRunner
|
||||
#=======================================================================================================================
|
||||
class PydevTextTestRunner(python_unittest.TextTestRunner):
|
||||
|
||||
def _makeResult(self):
|
||||
return PydevTestResult(self.stream, self.descriptions, self.verbosity)
|
||||
|
||||
|
||||
_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevTestResult
|
||||
#=======================================================================================================================
|
||||
class PydevTestResult(_PythonTextTestResult):
|
||||
|
||||
def addSubTest(self, test, subtest, err):
|
||||
"""Called at the end of a subtest.
|
||||
'err' is None if the subtest ended successfully, otherwise it's a
|
||||
tuple of values as returned by sys.exc_info().
|
||||
"""
|
||||
_PythonTextTestResult.addSubTest(self, test, subtest, err)
|
||||
if err is not None:
|
||||
subdesc = subtest._subDescription()
|
||||
error = (test, self._exc_info_to_string(err, test))
|
||||
self._reportErrors([error], [], '', '%s %s' % (self.get_test_name(test), subdesc))
|
||||
|
||||
def startTest(self, test):
|
||||
_PythonTextTestResult.startTest(self, test)
|
||||
self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std='both')
|
||||
self.start_time = time.time()
|
||||
self._current_errors_stack = []
|
||||
self._current_failures_stack = []
|
||||
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._testMethodName
|
||||
except AttributeError:
|
||||
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(
|
||||
test.__pydev_pyfile__, test_name)
|
||||
|
||||
def get_test_name(self, test):
|
||||
try:
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._testMethodName
|
||||
except AttributeError:
|
||||
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
|
||||
# Support for class/module exceptions (test is instance of _ErrorHolder)
|
||||
except:
|
||||
test_name = test.description.split()[1][1:-1] + ' <' + test.description.split()[0] + '>'
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return '<unable to get test name>'
|
||||
return test_name
|
||||
|
||||
def stopTest(self, test):
|
||||
end_time = time.time()
|
||||
pydevd_io.end_redirect(std='both')
|
||||
|
||||
_PythonTextTestResult.stopTest(self, test)
|
||||
|
||||
captured_output = self.buf.getvalue()
|
||||
del self.buf
|
||||
error_contents = ''
|
||||
test_name = self.get_test_name(test)
|
||||
|
||||
diff_time = '%.2f' % (end_time - self.start_time)
|
||||
|
||||
skipped = False
|
||||
outcome = getattr(test, '_outcome', None)
|
||||
if outcome is not None:
|
||||
skipped = bool(getattr(outcome, 'skipped', None))
|
||||
|
||||
if skipped:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'skip', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
elif not self._current_errors_stack and not self._current_failures_stack:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'ok', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
else:
|
||||
self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name)
|
||||
|
||||
def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=''):
|
||||
error_contents = []
|
||||
for test, s in errors + failures:
|
||||
if type(s) == type((1,)): # If it's a tuple (for jython 2.1)
|
||||
sio = StringIO()
|
||||
traceback.print_exception(s[0], s[1], s[2], file=sio)
|
||||
s = sio.getvalue()
|
||||
error_contents.append(s)
|
||||
|
||||
sep = '\n' + self.separator1
|
||||
error_contents = sep.join(error_contents)
|
||||
|
||||
if errors and not failures:
|
||||
try:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
except:
|
||||
file_start = error_contents.find('File "')
|
||||
file_end = error_contents.find('", ', file_start)
|
||||
if file_start != -1 and file_end != -1:
|
||||
file = error_contents[file_start + 6:file_end]
|
||||
else:
|
||||
file = '<unable to get file>'
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', captured_output, error_contents, file, test_name, diff_time)
|
||||
|
||||
elif failures and not errors:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'fail', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
|
||||
else: # Ok, we got both, errors and failures. Let's mark it as an error in the end.
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
|
||||
def addError(self, test, err):
|
||||
_PythonTextTestResult.addError(self, test, err)
|
||||
# Support for class/module exceptions (test is instance of _ErrorHolder)
|
||||
if not hasattr(self, '_current_errors_stack') or test.__class__.__name__ == '_ErrorHolder':
|
||||
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
|
||||
self._reportErrors([self.errors[-1]], [], '', self.get_test_name(test))
|
||||
else:
|
||||
self._current_errors_stack.append(self.errors[-1])
|
||||
|
||||
def addFailure(self, test, err):
|
||||
_PythonTextTestResult.addFailure(self, test, err)
|
||||
if not hasattr(self, '_current_failures_stack'):
|
||||
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
|
||||
self._reportErrors([], [self.failures[-1]], '', self.get_test_name(test))
|
||||
else:
|
||||
self._current_failures_stack.append(self.failures[-1])
|
||||
|
||||
|
||||
class PydevTestSuite(python_unittest.TestSuite):
|
||||
pass
|
|
@ -0,0 +1,257 @@
|
|||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
|
||||
from _pydevd_bundle.pydevd_constants import Null
|
||||
|
||||
Queue = _queue.Queue
|
||||
|
||||
# This may happen in IronPython (in Python it shouldn't happen as there are
|
||||
# 'fast' replacements that are used in xmlrpclib.py)
|
||||
warnings.filterwarnings(
|
||||
'ignore', 'The xmllib module is obsolete.*', DeprecationWarning)
|
||||
|
||||
file_system_encoding = getfilesystemencoding()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# _ServerHolder
|
||||
#=======================================================================================================================
|
||||
class _ServerHolder:
|
||||
'''
|
||||
Helper so that we don't have to use a global here.
|
||||
'''
|
||||
SERVER = None
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# set_server
|
||||
#=======================================================================================================================
|
||||
def set_server(server):
|
||||
_ServerHolder.SERVER = server
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ParallelNotification
|
||||
#=======================================================================================================================
|
||||
class ParallelNotification(object):
|
||||
|
||||
def __init__(self, method, args):
|
||||
self.method = method
|
||||
self.args = args
|
||||
|
||||
def to_tuple(self):
|
||||
return self.method, self.args
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# KillServer
|
||||
#=======================================================================================================================
|
||||
class KillServer(object):
|
||||
pass
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ServerFacade
|
||||
#=======================================================================================================================
|
||||
class ServerFacade(object):
|
||||
|
||||
def __init__(self, notifications_queue):
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
def notifyTestsCollected(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args))
|
||||
|
||||
def notifyConnected(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args))
|
||||
|
||||
def notifyTestRunFinished(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args))
|
||||
|
||||
def notifyStartTest(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args))
|
||||
|
||||
def notifyTest(self, *args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
new_args.append(_encode_if_needed(arg))
|
||||
args = tuple(new_args)
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args))
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ServerComm
|
||||
#=======================================================================================================================
|
||||
class ServerComm(threading.Thread):
|
||||
|
||||
def __init__(self, notifications_queue, port, daemon=False):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting!
|
||||
self.finished = False
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
# It is necessary to specify an encoding, that matches
|
||||
# the encoding of all bytes-strings passed into an
|
||||
# XMLRPC call: "All 8-bit strings in the data structure are assumed to use the
|
||||
# packet encoding. Unicode strings are automatically converted,
|
||||
# where necessary."
|
||||
# Byte strings most likely come from file names.
|
||||
encoding = file_system_encoding
|
||||
if encoding == "mbcs":
|
||||
# Windos symbolic name for the system encoding CP_ACP.
|
||||
# We need to convert it into a encoding that is recognized by Java.
|
||||
# Unfortunately this is not always possible. You could use
|
||||
# GetCPInfoEx and get a name similar to "windows-1251". Then
|
||||
# you need a table to translate on a best effort basis. Much to complicated.
|
||||
# ISO-8859-1 is good enough.
|
||||
encoding = "ISO-8859-1"
|
||||
|
||||
self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port),
|
||||
encoding=encoding)
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
kill_found = False
|
||||
commands = []
|
||||
command = self.notifications_queue.get(block=True)
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
|
||||
try:
|
||||
while True:
|
||||
command = self.notifications_queue.get(block=False) # No block to create a batch.
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
except:
|
||||
pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once.
|
||||
|
||||
if commands:
|
||||
try:
|
||||
self.server.notifyCommands(commands)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
if kill_found:
|
||||
self.finished = True
|
||||
return
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# initialize_server
|
||||
#=======================================================================================================================
|
||||
def initialize_server(port, daemon=False):
|
||||
if _ServerHolder.SERVER is None:
|
||||
if port is not None:
|
||||
notifications_queue = Queue()
|
||||
_ServerHolder.SERVER = ServerFacade(notifications_queue)
|
||||
_ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon)
|
||||
_ServerHolder.SERVER_COMM.start()
|
||||
else:
|
||||
# Create a null server, so that we keep the interface even without any connection.
|
||||
_ServerHolder.SERVER = Null()
|
||||
_ServerHolder.SERVER_COMM = Null()
|
||||
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyConnected()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyTest
|
||||
#=======================================================================================================================
|
||||
def notifyTestsCollected(tests_count):
|
||||
assert tests_count is not None
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyTestsCollected(tests_count)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyStartTest
|
||||
#=======================================================================================================================
|
||||
def notifyStartTest(file, test):
|
||||
'''
|
||||
@param file: the tests file (c:/temp/test.py)
|
||||
@param test: the test ran (i.e.: TestCase.test1)
|
||||
'''
|
||||
assert file is not None
|
||||
if test is None:
|
||||
test = '' # Could happen if we have an import error importing module.
|
||||
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyStartTest(file, test)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def _encode_if_needed(obj):
|
||||
# In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object))
|
||||
if isinstance(obj, str): # Unicode in py3
|
||||
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
|
||||
|
||||
elif isinstance(obj, bytes):
|
||||
try:
|
||||
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
|
||||
except:
|
||||
return xmlrpclib.Binary(obj) # bytes already
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyTest
|
||||
#=======================================================================================================================
|
||||
def notifyTest(cond, captured_output, error_contents, file, test, time):
|
||||
'''
|
||||
@param cond: ok, fail, error
|
||||
@param captured_output: output captured from stdout
|
||||
@param captured_output: output captured from stderr
|
||||
@param file: the tests file (c:/temp/test.py)
|
||||
@param test: the test ran (i.e.: TestCase.test1)
|
||||
@param time: float with the number of seconds elapsed
|
||||
'''
|
||||
assert cond is not None
|
||||
assert captured_output is not None
|
||||
assert error_contents is not None
|
||||
assert file is not None
|
||||
if test is None:
|
||||
test = '' # Could happen if we have an import error importing module.
|
||||
assert time is not None
|
||||
try:
|
||||
captured_output = _encode_if_needed(captured_output)
|
||||
error_contents = _encode_if_needed(error_contents)
|
||||
|
||||
_ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyTestRunFinished
|
||||
#=======================================================================================================================
|
||||
def notifyTestRunFinished(total_time):
|
||||
assert total_time is not None
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyTestRunFinished(total_time)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# force_server_kill
|
||||
#=======================================================================================================================
|
||||
def force_server_kill():
|
||||
_ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer())
|
|
@ -0,0 +1 @@
|
|||
/home/runner/.cache/pip/pool/e3/b0/c4/4298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue