Add base vars and sudo check
This commit is contained in:
parent
c151fd6910
commit
054f5ad80c
8733 changed files with 137813 additions and 15 deletions
18
venv/lib/python3.8/site-packages/debugpy/common/__init__.py
Normal file
18
venv/lib/python3.8/site-packages/debugpy/common/__init__.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import typing
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
__all__: list[str]
|
||||
|
||||
__all__ = []
|
||||
|
||||
# The lower time bound for assuming that the process hasn't spawned successfully.
|
||||
PROCESS_SPAWN_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_SPAWN_TIMEOUT", 15))
|
||||
|
||||
# The lower time bound for assuming that the process hasn't exited gracefully.
|
||||
PROCESS_EXIT_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_EXIT_TIMEOUT", 5))
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
292
venv/lib/python3.8/site-packages/debugpy/common/json.py
Normal file
292
venv/lib/python3.8/site-packages/debugpy/common/json.py
Normal file
|
@ -0,0 +1,292 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
"""Improved JSON serialization.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
import json
|
||||
import numbers
|
||||
import operator
|
||||
|
||||
|
||||
JsonDecoder = json.JSONDecoder
|
||||
|
||||
|
||||
class JsonEncoder(json.JSONEncoder):
|
||||
"""Customizable JSON encoder.
|
||||
|
||||
If the object implements __getstate__, then that method is invoked, and its
|
||||
result is serialized instead of the object itself.
|
||||
"""
|
||||
|
||||
def default(self, value):
|
||||
try:
|
||||
get_state = value.__getstate__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
return get_state()
|
||||
return super().default(value)
|
||||
|
||||
|
||||
class JsonObject(object):
|
||||
"""A wrapped Python object that formats itself as JSON when asked for a string
|
||||
representation via str() or format().
|
||||
"""
|
||||
|
||||
json_encoder_factory = JsonEncoder
|
||||
"""Used by __format__ when format_spec is not empty."""
|
||||
|
||||
json_encoder = json_encoder_factory(indent=4)
|
||||
"""The default encoder used by __format__ when format_spec is empty."""
|
||||
|
||||
def __init__(self, value):
|
||||
assert not isinstance(value, JsonObject)
|
||||
self.value = value
|
||||
|
||||
def __getstate__(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def __repr__(self):
|
||||
return builtins.repr(self.value)
|
||||
|
||||
def __str__(self):
|
||||
return format(self)
|
||||
|
||||
def __format__(self, format_spec):
|
||||
"""If format_spec is empty, uses self.json_encoder to serialize self.value
|
||||
as a string. Otherwise, format_spec is treated as an argument list to be
|
||||
passed to self.json_encoder_factory - which defaults to JSONEncoder - and
|
||||
then the resulting formatter is used to serialize self.value as a string.
|
||||
|
||||
Example::
|
||||
|
||||
format("{0} {0:indent=4,sort_keys=True}", json.repr(x))
|
||||
"""
|
||||
if format_spec:
|
||||
# At this point, format_spec is a string that looks something like
|
||||
# "indent=4,sort_keys=True". What we want is to build a function call
|
||||
# from that which looks like:
|
||||
#
|
||||
# json_encoder_factory(indent=4,sort_keys=True)
|
||||
#
|
||||
# which we can then eval() to create our encoder instance.
|
||||
make_encoder = "json_encoder_factory(" + format_spec + ")"
|
||||
encoder = eval(
|
||||
make_encoder, {"json_encoder_factory": self.json_encoder_factory}
|
||||
)
|
||||
else:
|
||||
encoder = self.json_encoder
|
||||
return encoder.encode(self.value)
|
||||
|
||||
|
||||
# JSON property validators, for use with MessageDict.
|
||||
#
|
||||
# A validator is invoked with the actual value of the JSON property passed to it as
|
||||
# the sole argument; or if the property is missing in JSON, then () is passed. Note
|
||||
# that None represents an actual null in JSON, while () is a missing value.
|
||||
#
|
||||
# The validator must either raise TypeError or ValueError describing why the property
|
||||
# value is invalid, or else return the value of the property, possibly after performing
|
||||
# some substitutions - e.g. replacing () with some default value.
|
||||
|
||||
|
||||
def _converter(value, classinfo):
|
||||
"""Convert value (str) to number, otherwise return None if is not possible"""
|
||||
for one_info in classinfo:
|
||||
if issubclass(one_info, numbers.Number):
|
||||
try:
|
||||
return one_info(value)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
def of_type(*classinfo, **kwargs):
|
||||
"""Returns a validator for a JSON property that requires it to have a value of
|
||||
the specified type. If optional=True, () is also allowed.
|
||||
|
||||
The meaning of classinfo is the same as for isinstance().
|
||||
"""
|
||||
|
||||
assert len(classinfo)
|
||||
optional = kwargs.pop("optional", False)
|
||||
assert not len(kwargs)
|
||||
|
||||
def validate(value):
|
||||
if (optional and value == ()) or isinstance(value, classinfo):
|
||||
return value
|
||||
else:
|
||||
converted_value = _converter(value, classinfo)
|
||||
if converted_value:
|
||||
return converted_value
|
||||
|
||||
if not optional and value == ():
|
||||
raise ValueError("must be specified")
|
||||
raise TypeError("must be " + " or ".join(t.__name__ for t in classinfo))
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def default(default):
|
||||
"""Returns a validator for a JSON property with a default value.
|
||||
|
||||
The validator will only allow property values that have the same type as the
|
||||
specified default value.
|
||||
"""
|
||||
|
||||
def validate(value):
|
||||
if value == ():
|
||||
return default
|
||||
elif isinstance(value, type(default)):
|
||||
return value
|
||||
else:
|
||||
raise TypeError("must be {0}".format(type(default).__name__))
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def enum(*values, **kwargs):
|
||||
"""Returns a validator for a JSON enum.
|
||||
|
||||
The validator will only allow the property to have one of the specified values.
|
||||
|
||||
If optional=True, and the property is missing, the first value specified is used
|
||||
as the default.
|
||||
"""
|
||||
|
||||
assert len(values)
|
||||
optional = kwargs.pop("optional", False)
|
||||
assert not len(kwargs)
|
||||
|
||||
def validate(value):
|
||||
if optional and value == ():
|
||||
return values[0]
|
||||
elif value in values:
|
||||
return value
|
||||
else:
|
||||
raise ValueError("must be one of: {0!r}".format(list(values)))
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def array(validate_item=False, vectorize=False, size=None):
|
||||
"""Returns a validator for a JSON array.
|
||||
|
||||
If the property is missing, it is treated as if it were []. Otherwise, it must
|
||||
be a list.
|
||||
|
||||
If validate_item=False, it's treated as if it were (lambda x: x) - i.e. any item
|
||||
is considered valid, and is unchanged. If validate_item is a type or a tuple,
|
||||
it's treated as if it were json.of_type(validate).
|
||||
|
||||
Every item in the list is replaced with validate_item(item) in-place, propagating
|
||||
any exceptions raised by the latter. If validate_item is a type or a tuple, it is
|
||||
treated as if it were json.of_type(validate_item).
|
||||
|
||||
If vectorize=True, and the value is neither a list nor a dict, it is treated as
|
||||
if it were a single-element list containing that single value - e.g. "foo" is
|
||||
then the same as ["foo"]; but {} is an error, and not [{}].
|
||||
|
||||
If size is not None, it can be an int, a tuple of one int, a tuple of two ints,
|
||||
or a set. If it's an int, the array must have exactly that many elements. If it's
|
||||
a tuple of one int, it's the minimum length. If it's a tuple of two ints, they
|
||||
are the minimum and the maximum lengths. If it's a set, it's the set of sizes that
|
||||
are valid - e.g. for {2, 4}, the array can be either 2 or 4 elements long.
|
||||
"""
|
||||
|
||||
if not validate_item:
|
||||
validate_item = lambda x: x
|
||||
elif isinstance(validate_item, type) or isinstance(validate_item, tuple):
|
||||
validate_item = of_type(validate_item)
|
||||
|
||||
if size is None:
|
||||
validate_size = lambda _: True
|
||||
elif isinstance(size, set):
|
||||
size = {operator.index(n) for n in size}
|
||||
validate_size = lambda value: (
|
||||
len(value) in size
|
||||
or "must have {0} elements".format(
|
||||
" or ".join(str(n) for n in sorted(size))
|
||||
)
|
||||
)
|
||||
elif isinstance(size, tuple):
|
||||
assert 1 <= len(size) <= 2
|
||||
size = tuple(operator.index(n) for n in size)
|
||||
min_len, max_len = (size + (None,))[0:2]
|
||||
validate_size = lambda value: (
|
||||
"must have at least {0} elements".format(min_len)
|
||||
if len(value) < min_len
|
||||
else "must have at most {0} elements".format(max_len)
|
||||
if max_len is not None and len(value) < max_len
|
||||
else True
|
||||
)
|
||||
else:
|
||||
size = operator.index(size)
|
||||
validate_size = lambda value: (
|
||||
len(value) == size or "must have {0} elements".format(size)
|
||||
)
|
||||
|
||||
def validate(value):
|
||||
if value == ():
|
||||
value = []
|
||||
elif vectorize and not isinstance(value, (list, dict)):
|
||||
value = [value]
|
||||
|
||||
of_type(list)(value)
|
||||
|
||||
size_err = validate_size(value) # True if valid, str if error
|
||||
if size_err is not True:
|
||||
raise ValueError(size_err)
|
||||
|
||||
for i, item in enumerate(value):
|
||||
try:
|
||||
value[i] = validate_item(item)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise type(exc)(f"[{repr(i)}] {exc}")
|
||||
return value
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def object(validate_value=False):
|
||||
"""Returns a validator for a JSON object.
|
||||
|
||||
If the property is missing, it is treated as if it were {}. Otherwise, it must
|
||||
be a dict.
|
||||
|
||||
If validate_value=False, it's treated as if it were (lambda x: x) - i.e. any
|
||||
value is considered valid, and is unchanged. If validate_value is a type or a
|
||||
tuple, it's treated as if it were json.of_type(validate_value).
|
||||
|
||||
Every value in the dict is replaced with validate_value(value) in-place, propagating
|
||||
any exceptions raised by the latter. If validate_value is a type or a tuple, it is
|
||||
treated as if it were json.of_type(validate_value). Keys are not affected.
|
||||
"""
|
||||
|
||||
if isinstance(validate_value, type) or isinstance(validate_value, tuple):
|
||||
validate_value = of_type(validate_value)
|
||||
|
||||
def validate(value):
|
||||
if value == ():
|
||||
return {}
|
||||
|
||||
of_type(dict)(value)
|
||||
if validate_value:
|
||||
for k, v in value.items():
|
||||
try:
|
||||
value[k] = validate_value(v)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise type(exc)(f"[{repr(k)}] {exc}")
|
||||
return value
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def repr(value):
|
||||
return JsonObject(value)
|
||||
|
||||
|
||||
dumps = json.dumps
|
||||
loads = json.loads
|
384
venv/lib/python3.8/site-packages/debugpy/common/log.py
Normal file
384
venv/lib/python3.8/site-packages/debugpy/common/log.py
Normal file
|
@ -0,0 +1,384 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import atexit
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import io
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
import debugpy
|
||||
from debugpy.common import json, timestamp, util
|
||||
|
||||
|
||||
LEVELS = ("debug", "info", "warning", "error")
|
||||
"""Logging levels, lowest to highest importance.
|
||||
"""
|
||||
|
||||
log_dir = os.getenv("DEBUGPY_LOG_DIR")
|
||||
"""If not None, debugger logs its activity to a file named debugpy.*-<pid>.log
|
||||
in the specified directory, where <pid> is the return value of os.getpid().
|
||||
"""
|
||||
|
||||
timestamp_format = "09.3f"
|
||||
"""Format spec used for timestamps. Can be changed to dial precision up or down.
|
||||
"""
|
||||
|
||||
_lock = threading.RLock()
|
||||
_tls = threading.local()
|
||||
_files = {} # filename -> LogFile
|
||||
_levels = set() # combined for all log files
|
||||
|
||||
|
||||
def _update_levels():
|
||||
global _levels
|
||||
_levels = frozenset(level for file in _files.values() for level in file.levels)
|
||||
|
||||
|
||||
class LogFile(object):
|
||||
def __init__(self, filename, file, levels=LEVELS, close_file=True):
|
||||
info("Also logging to {0}.", json.repr(filename))
|
||||
self.filename = filename
|
||||
self.file = file
|
||||
self.close_file = close_file
|
||||
self._levels = frozenset(levels)
|
||||
|
||||
with _lock:
|
||||
_files[self.filename] = self
|
||||
_update_levels()
|
||||
info(
|
||||
"{0} {1}\n{2} {3} ({4}-bit)\ndebugpy {5}",
|
||||
platform.platform(),
|
||||
platform.machine(),
|
||||
platform.python_implementation(),
|
||||
platform.python_version(),
|
||||
64 if sys.maxsize > 2 ** 32 else 32,
|
||||
debugpy.__version__,
|
||||
_to_files=[self],
|
||||
)
|
||||
|
||||
@property
|
||||
def levels(self):
|
||||
return self._levels
|
||||
|
||||
@levels.setter
|
||||
def levels(self, value):
|
||||
with _lock:
|
||||
self._levels = frozenset(LEVELS if value is all else value)
|
||||
_update_levels()
|
||||
|
||||
def write(self, level, output):
|
||||
if level in self.levels:
|
||||
try:
|
||||
self.file.write(output)
|
||||
self.file.flush()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
with _lock:
|
||||
del _files[self.filename]
|
||||
_update_levels()
|
||||
info("Not logging to {0} anymore.", json.repr(self.filename))
|
||||
|
||||
if self.close_file:
|
||||
try:
|
||||
self.file.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
|
||||
class NoLog(object):
|
||||
file = filename = None
|
||||
|
||||
__bool__ = __nonzero__ = lambda self: False
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
# Used to inject a newline into stderr if logging there, to clean up the output
|
||||
# when it's intermixed with regular prints from other sources.
|
||||
def newline(level="info"):
|
||||
with _lock:
|
||||
stderr.write(level, "\n")
|
||||
|
||||
|
||||
def write(level, text, _to_files=all):
|
||||
assert level in LEVELS
|
||||
|
||||
t = timestamp.current()
|
||||
format_string = "{0}+{1:" + timestamp_format + "}: "
|
||||
prefix = format_string.format(level[0].upper(), t)
|
||||
|
||||
text = getattr(_tls, "prefix", "") + text
|
||||
indent = "\n" + (" " * len(prefix))
|
||||
output = indent.join(text.split("\n"))
|
||||
output = prefix + output + "\n\n"
|
||||
|
||||
with _lock:
|
||||
if _to_files is all:
|
||||
_to_files = _files.values()
|
||||
for file in _to_files:
|
||||
file.write(level, output)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def write_format(level, format_string, *args, **kwargs):
|
||||
# Don't spend cycles doing expensive formatting if we don't have to. Errors are
|
||||
# always formatted, so that error() can return the text even if it's not logged.
|
||||
if level != "error" and level not in _levels:
|
||||
return
|
||||
|
||||
try:
|
||||
text = format_string.format(*args, **kwargs)
|
||||
except Exception:
|
||||
reraise_exception()
|
||||
|
||||
return write(level, text, kwargs.pop("_to_files", all))
|
||||
|
||||
|
||||
debug = functools.partial(write_format, "debug")
|
||||
info = functools.partial(write_format, "info")
|
||||
warning = functools.partial(write_format, "warning")
|
||||
|
||||
|
||||
def error(*args, **kwargs):
|
||||
"""Logs an error.
|
||||
|
||||
Returns the output wrapped in AssertionError. Thus, the following::
|
||||
|
||||
raise log.error(s, ...)
|
||||
|
||||
has the same effect as::
|
||||
|
||||
log.error(...)
|
||||
assert False, (s.format(...))
|
||||
"""
|
||||
return AssertionError(write_format("error", *args, **kwargs))
|
||||
|
||||
|
||||
def _exception(format_string="", *args, **kwargs):
|
||||
level = kwargs.pop("level", "error")
|
||||
exc_info = kwargs.pop("exc_info", sys.exc_info())
|
||||
|
||||
if format_string:
|
||||
format_string += "\n\n"
|
||||
format_string += "{exception}\nStack where logged:\n{stack}"
|
||||
|
||||
exception = "".join(traceback.format_exception(*exc_info))
|
||||
|
||||
f = inspect.currentframe()
|
||||
f = f.f_back if f else f # don't log this frame
|
||||
try:
|
||||
stack = "".join(traceback.format_stack(f))
|
||||
finally:
|
||||
del f # avoid cycles
|
||||
|
||||
write_format(
|
||||
level, format_string, *args, exception=exception, stack=stack, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def swallow_exception(format_string="", *args, **kwargs):
|
||||
"""Logs an exception with full traceback.
|
||||
|
||||
If format_string is specified, it is formatted with format(*args, **kwargs), and
|
||||
prepended to the exception traceback on a separate line.
|
||||
|
||||
If exc_info is specified, the exception it describes will be logged. Otherwise,
|
||||
sys.exc_info() - i.e. the exception being handled currently - will be logged.
|
||||
|
||||
If level is specified, the exception will be logged as a message of that level.
|
||||
The default is "error".
|
||||
"""
|
||||
|
||||
_exception(format_string, *args, **kwargs)
|
||||
|
||||
|
||||
def reraise_exception(format_string="", *args, **kwargs):
|
||||
"""Like swallow_exception(), but re-raises the current exception after logging it."""
|
||||
|
||||
assert "exc_info" not in kwargs
|
||||
_exception(format_string, *args, **kwargs)
|
||||
raise
|
||||
|
||||
|
||||
def to_file(filename=None, prefix=None, levels=LEVELS):
|
||||
"""Starts logging all messages at the specified levels to the designated file.
|
||||
|
||||
Either filename or prefix must be specified, but not both.
|
||||
|
||||
If filename is specified, it designates the log file directly.
|
||||
|
||||
If prefix is specified, the log file is automatically created in options.log_dir,
|
||||
with filename computed as prefix + os.getpid(). If log_dir is None, no log file
|
||||
is created, and the function returns immediately.
|
||||
|
||||
If the file with the specified or computed name is already being used as a log
|
||||
file, it is not overwritten, but its levels are updated as specified.
|
||||
|
||||
The function returns an object with a close() method. When the object is closed,
|
||||
logs are not written into that file anymore. Alternatively, the returned object
|
||||
can be used in a with-statement:
|
||||
|
||||
with log.to_file("some.log"):
|
||||
# now also logging to some.log
|
||||
# not logging to some.log anymore
|
||||
"""
|
||||
|
||||
assert (filename is not None) ^ (prefix is not None)
|
||||
|
||||
if filename is None:
|
||||
if log_dir is None:
|
||||
return NoLog()
|
||||
try:
|
||||
os.makedirs(log_dir)
|
||||
except OSError:
|
||||
pass
|
||||
filename = f"{log_dir}/{prefix}-{os.getpid()}.log"
|
||||
|
||||
file = _files.get(filename)
|
||||
if file is None:
|
||||
file = LogFile(filename, io.open(filename, "w", encoding="utf-8"), levels)
|
||||
else:
|
||||
file.levels = levels
|
||||
return file
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefixed(format_string, *args, **kwargs):
|
||||
"""Adds a prefix to all messages logged from the current thread for the duration
|
||||
of the context manager.
|
||||
"""
|
||||
prefix = format_string.format(*args, **kwargs)
|
||||
old_prefix = getattr(_tls, "prefix", "")
|
||||
_tls.prefix = prefix + old_prefix
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_tls.prefix = old_prefix
|
||||
|
||||
|
||||
def describe_environment(header):
|
||||
import sysconfig
|
||||
import site # noqa
|
||||
|
||||
result = [header, "\n\n"]
|
||||
|
||||
def report(s, *args, **kwargs):
|
||||
result.append(s.format(*args, **kwargs))
|
||||
|
||||
def report_paths(get_paths, label=None):
|
||||
prefix = f" {label or get_paths}: "
|
||||
|
||||
expr = None
|
||||
if not callable(get_paths):
|
||||
expr = get_paths
|
||||
get_paths = lambda: util.evaluate(expr)
|
||||
try:
|
||||
paths = get_paths()
|
||||
except AttributeError:
|
||||
report("{0}<missing>\n", prefix)
|
||||
return
|
||||
except Exception:
|
||||
swallow_exception(
|
||||
"Error evaluating {0}",
|
||||
repr(expr) if expr else util.srcnameof(get_paths),
|
||||
)
|
||||
return
|
||||
|
||||
if not isinstance(paths, (list, tuple)):
|
||||
paths = [paths]
|
||||
|
||||
for p in sorted(paths):
|
||||
report("{0}{1}", prefix, p)
|
||||
rp = os.path.realpath(p)
|
||||
if p != rp:
|
||||
report("({0})", rp)
|
||||
report("\n")
|
||||
|
||||
prefix = " " * len(prefix)
|
||||
|
||||
report("System paths:\n")
|
||||
report_paths("sys.prefix")
|
||||
report_paths("sys.base_prefix")
|
||||
report_paths("sys.real_prefix")
|
||||
report_paths("site.getsitepackages()")
|
||||
report_paths("site.getusersitepackages()")
|
||||
|
||||
site_packages = [
|
||||
p
|
||||
for p in sys.path
|
||||
if os.path.exists(p) and os.path.basename(p) == "site-packages"
|
||||
]
|
||||
report_paths(lambda: site_packages, "sys.path (site-packages)")
|
||||
|
||||
for name in sysconfig.get_path_names():
|
||||
expr = "sysconfig.get_path({0!r})".format(name)
|
||||
report_paths(expr)
|
||||
|
||||
report_paths("os.__file__")
|
||||
report_paths("threading.__file__")
|
||||
report_paths("debugpy.__file__")
|
||||
|
||||
result = "".join(result).rstrip("\n")
|
||||
info("{0}", result)
|
||||
|
||||
|
||||
stderr = LogFile(
|
||||
"<stderr>",
|
||||
sys.stderr,
|
||||
levels=os.getenv("DEBUGPY_LOG_STDERR", "warning error").split(),
|
||||
close_file=False,
|
||||
)
|
||||
|
||||
|
||||
@atexit.register
|
||||
def _close_files():
|
||||
for file in tuple(_files.values()):
|
||||
file.close()
|
||||
|
||||
|
||||
# The following are helper shortcuts for printf debugging. They must never be used
|
||||
# in production code.
|
||||
|
||||
|
||||
def _repr(value): # pragma: no cover
|
||||
warning("$REPR {0!r}", value)
|
||||
|
||||
|
||||
def _vars(*names): # pragma: no cover
|
||||
locals = inspect.currentframe().f_back.f_locals
|
||||
if names:
|
||||
locals = {name: locals[name] for name in names if name in locals}
|
||||
warning("$VARS {0!r}", locals)
|
||||
|
||||
|
||||
def _stack(): # pragma: no cover
|
||||
stack = "\n".join(traceback.format_stack())
|
||||
warning("$STACK:\n\n{0}", stack)
|
||||
|
||||
|
||||
def _threads(): # pragma: no cover
|
||||
output = "\n".join([str(t) for t in threading.enumerate()])
|
||||
warning("$THREADS:\n\n{0}", output)
|
1505
venv/lib/python3.8/site-packages/debugpy/common/messaging.py
Normal file
1505
venv/lib/python3.8/site-packages/debugpy/common/messaging.py
Normal file
File diff suppressed because it is too large
Load diff
185
venv/lib/python3.8/site-packages/debugpy/common/singleton.py
Normal file
185
venv/lib/python3.8/site-packages/debugpy/common/singleton.py
Normal file
|
@ -0,0 +1,185 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import functools
|
||||
import threading
|
||||
|
||||
|
||||
class Singleton(object):
|
||||
"""A base class for a class of a singleton object.
|
||||
|
||||
For any derived class T, the first invocation of T() will create the instance,
|
||||
and any future invocations of T() will return that instance.
|
||||
|
||||
Concurrent invocations of T() from different threads are safe.
|
||||
"""
|
||||
|
||||
# A dual-lock scheme is necessary to be thread safe while avoiding deadlocks.
|
||||
# _lock_lock is shared by all singleton types, and is used to construct their
|
||||
# respective _lock instances when invoked for a new type. Then _lock is used
|
||||
# to synchronize all further access for that type, including __init__. This way,
|
||||
# __init__ for any given singleton can access another singleton, and not get
|
||||
# deadlocked if that other singleton is trying to access it.
|
||||
_lock_lock = threading.RLock()
|
||||
_lock = None
|
||||
|
||||
# Specific subclasses will get their own _instance set in __new__.
|
||||
_instance = None
|
||||
|
||||
_is_shared = None # True if shared, False if exclusive
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# Allow arbitrary args and kwargs if shared=False, because that is guaranteed
|
||||
# to construct a new singleton if it succeeds. Otherwise, this call might end
|
||||
# up returning an existing instance, which might have been constructed with
|
||||
# different arguments, so allowing them is misleading.
|
||||
assert not kwargs.get("shared", False) or (len(args) + len(kwargs)) == 0, (
|
||||
"Cannot use constructor arguments when accessing a Singleton without "
|
||||
"specifying shared=False."
|
||||
)
|
||||
|
||||
# Avoid locking as much as possible with repeated double-checks - the most
|
||||
# common path is when everything is already allocated.
|
||||
if not cls._instance:
|
||||
# If there's no per-type lock, allocate it.
|
||||
if cls._lock is None:
|
||||
with cls._lock_lock:
|
||||
if cls._lock is None:
|
||||
cls._lock = threading.RLock()
|
||||
|
||||
# Now that we have a per-type lock, we can synchronize construction.
|
||||
if not cls._instance:
|
||||
with cls._lock:
|
||||
if not cls._instance:
|
||||
cls._instance = object.__new__(cls)
|
||||
# To prevent having __init__ invoked multiple times, call
|
||||
# it here directly, and then replace it with a stub that
|
||||
# does nothing - that stub will get auto-invoked on return,
|
||||
# and on all future singleton accesses.
|
||||
cls._instance.__init__()
|
||||
cls.__init__ = lambda *args, **kwargs: None
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initializes the singleton instance. Guaranteed to only be invoked once for
|
||||
any given type derived from Singleton.
|
||||
|
||||
If shared=False, the caller is requesting a singleton instance for their own
|
||||
exclusive use. This is only allowed if the singleton has not been created yet;
|
||||
if so, it is created and marked as being in exclusive use. While it is marked
|
||||
as such, all attempts to obtain an existing instance of it immediately raise
|
||||
an exception. The singleton can eventually be promoted to shared use by calling
|
||||
share() on it.
|
||||
"""
|
||||
|
||||
shared = kwargs.pop("shared", True)
|
||||
with self:
|
||||
if shared:
|
||||
assert (
|
||||
type(self)._is_shared is not False
|
||||
), "Cannot access a non-shared Singleton."
|
||||
type(self)._is_shared = True
|
||||
else:
|
||||
assert type(self)._is_shared is None, "Singleton is already created."
|
||||
|
||||
def __enter__(self):
|
||||
"""Lock this singleton to prevent concurrent access."""
|
||||
type(self)._lock.acquire()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
"""Unlock this singleton to allow concurrent access."""
|
||||
type(self)._lock.release()
|
||||
|
||||
def share(self):
|
||||
"""Share this singleton, if it was originally created with shared=False."""
|
||||
type(self)._is_shared = True
|
||||
|
||||
|
||||
class ThreadSafeSingleton(Singleton):
|
||||
"""A singleton that incorporates a lock for thread-safe access to its members.
|
||||
|
||||
The lock can be acquired using the context manager protocol, and thus idiomatic
|
||||
use is in conjunction with a with-statement. For example, given derived class T::
|
||||
|
||||
with T() as t:
|
||||
t.x = t.frob(t.y)
|
||||
|
||||
All access to the singleton from the outside should follow this pattern for both
|
||||
attributes and method calls. Singleton members can assume that self is locked by
|
||||
the caller while they're executing, but recursive locking of the same singleton
|
||||
on the same thread is also permitted.
|
||||
"""
|
||||
|
||||
threadsafe_attrs = frozenset()
|
||||
"""Names of attributes that are guaranteed to be used in a thread-safe manner.
|
||||
|
||||
This is typically used in conjunction with share() to simplify synchronization.
|
||||
"""
|
||||
|
||||
readonly_attrs = frozenset()
|
||||
"""Names of attributes that are readonly. These can be read without locking, but
|
||||
cannot be written at all.
|
||||
|
||||
Every derived class gets its own separate set. Thus, for any given singleton type
|
||||
T, an attribute can be made readonly after setting it, with T.readonly_attrs.add().
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# Make sure each derived class gets a separate copy.
|
||||
type(self).readonly_attrs = set(type(self).readonly_attrs)
|
||||
|
||||
# Prevent callers from reading or writing attributes without locking, except for
|
||||
# reading attributes listed in threadsafe_attrs, and methods specifically marked
|
||||
# with @threadsafe_method. Such methods should perform the necessary locking to
|
||||
# ensure thread safety for the callers.
|
||||
|
||||
@staticmethod
|
||||
def assert_locked(self):
|
||||
lock = type(self)._lock
|
||||
assert lock.acquire(blocking=False), (
|
||||
"ThreadSafeSingleton accessed without locking. Either use with-statement, "
|
||||
"or if it is a method or property, mark it as @threadsafe_method or with "
|
||||
"@autolocked_method, as appropriate."
|
||||
)
|
||||
lock.release()
|
||||
|
||||
def __getattribute__(self, name):
|
||||
value = object.__getattribute__(self, name)
|
||||
if name not in (type(self).threadsafe_attrs | type(self).readonly_attrs):
|
||||
if not getattr(value, "is_threadsafe_method", False):
|
||||
ThreadSafeSingleton.assert_locked(self)
|
||||
return value
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
assert name not in type(self).readonly_attrs, "This attribute is read-only."
|
||||
if name not in type(self).threadsafe_attrs:
|
||||
ThreadSafeSingleton.assert_locked(self)
|
||||
return object.__setattr__(self, name, value)
|
||||
|
||||
|
||||
def threadsafe_method(func):
|
||||
"""Marks a method of a ThreadSafeSingleton-derived class as inherently thread-safe.
|
||||
|
||||
A method so marked must either not use any singleton state, or lock it appropriately.
|
||||
"""
|
||||
|
||||
func.is_threadsafe_method = True
|
||||
return func
|
||||
|
||||
|
||||
def autolocked_method(func):
|
||||
"""Automatically synchronizes all calls of a method of a ThreadSafeSingleton-derived
|
||||
class by locking the singleton for the duration of each call.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
@threadsafe_method
|
||||
def lock_and_call(self, *args, **kwargs):
|
||||
with self:
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return lock_and_call
|
122
venv/lib/python3.8/site-packages/debugpy/common/sockets.py
Normal file
122
venv/lib/python3.8/site-packages/debugpy/common/sockets.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from debugpy.common import log
|
||||
from debugpy.common.util import hide_thread_from_debugger
|
||||
|
||||
|
||||
def create_server(host, port=0, backlog=socket.SOMAXCONN, timeout=None):
|
||||
"""Return a local server socket listening on the given port."""
|
||||
|
||||
assert backlog > 0
|
||||
if host is None:
|
||||
host = "127.0.0.1"
|
||||
if port is None:
|
||||
port = 0
|
||||
|
||||
try:
|
||||
server = _new_sock()
|
||||
server.bind((host, port))
|
||||
if timeout is not None:
|
||||
server.settimeout(timeout)
|
||||
server.listen(backlog)
|
||||
except Exception:
|
||||
server.close()
|
||||
raise
|
||||
return server
|
||||
|
||||
|
||||
def create_client():
|
||||
"""Return a client socket that may be connected to a remote address."""
|
||||
return _new_sock()
|
||||
|
||||
|
||||
def _new_sock():
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
|
||||
if sys.platform == "win32":
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
|
||||
else:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
# Set TCP keepalive on an open socket.
|
||||
# It activates after 1 second (TCP_KEEPIDLE,) of idleness,
|
||||
# then sends a keepalive ping once every 3 seconds (TCP_KEEPINTVL),
|
||||
# and closes the connection after 5 failed ping (TCP_KEEPCNT), or 15 seconds
|
||||
try:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
try:
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
try:
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
try:
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
return sock
|
||||
|
||||
|
||||
def shut_down(sock, how=socket.SHUT_RDWR):
|
||||
"""Shut down the given socket."""
|
||||
sock.shutdown(how)
|
||||
|
||||
|
||||
def close_socket(sock):
|
||||
"""Shutdown and close the socket."""
|
||||
try:
|
||||
shut_down(sock)
|
||||
except Exception:
|
||||
pass
|
||||
sock.close()
|
||||
|
||||
|
||||
def serve(name, handler, host, port=0, backlog=socket.SOMAXCONN, timeout=None):
|
||||
"""Accepts TCP connections on the specified host and port, and invokes the
|
||||
provided handler function for every new connection.
|
||||
|
||||
Returns the created server socket.
|
||||
"""
|
||||
|
||||
assert backlog > 0
|
||||
|
||||
try:
|
||||
listener = create_server(host, port, backlog, timeout)
|
||||
except Exception:
|
||||
log.reraise_exception(
|
||||
"Error listening for incoming {0} connections on {1}:{2}:", name, host, port
|
||||
)
|
||||
host, port = listener.getsockname()
|
||||
log.info("Listening for incoming {0} connections on {1}:{2}...", name, host, port)
|
||||
|
||||
def accept_worker():
|
||||
while True:
|
||||
try:
|
||||
sock, (other_host, other_port) = listener.accept()
|
||||
except (OSError, socket.error):
|
||||
# Listener socket has been closed.
|
||||
break
|
||||
|
||||
log.info(
|
||||
"Accepted incoming {0} connection from {1}:{2}.",
|
||||
name,
|
||||
other_host,
|
||||
other_port,
|
||||
)
|
||||
handler(sock)
|
||||
|
||||
thread = threading.Thread(target=accept_worker)
|
||||
thread.daemon = True
|
||||
hide_thread_from_debugger(thread)
|
||||
thread.start()
|
||||
|
||||
return listener
|
62
venv/lib/python3.8/site-packages/debugpy/common/stacks.py
Normal file
62
venv/lib/python3.8/site-packages/debugpy/common/stacks.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
"""Provides facilities to dump all stacks of all threads in the process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
from debugpy.common import log
|
||||
|
||||
|
||||
def dump():
|
||||
"""Dump stacks of all threads in this process, except for the current thread."""
|
||||
|
||||
tid = threading.current_thread().ident
|
||||
pid = os.getpid()
|
||||
|
||||
log.info("Dumping stacks for process {0}...", pid)
|
||||
|
||||
for t_ident, frame in sys._current_frames().items():
|
||||
if t_ident == tid:
|
||||
continue
|
||||
|
||||
for t in threading.enumerate():
|
||||
if t.ident == tid:
|
||||
t_name = t.name
|
||||
t_daemon = t.daemon
|
||||
break
|
||||
else:
|
||||
t_name = t_daemon = "<unknown>"
|
||||
|
||||
stack = "".join(traceback.format_stack(frame))
|
||||
log.info(
|
||||
"Stack of thread {0} (tid={1}, pid={2}, daemon={3}):\n\n{4}",
|
||||
t_name,
|
||||
t_ident,
|
||||
pid,
|
||||
t_daemon,
|
||||
stack,
|
||||
)
|
||||
|
||||
log.info("Finished dumping stacks for process {0}.", pid)
|
||||
|
||||
|
||||
def dump_after(secs):
|
||||
"""Invokes dump() on a background thread after waiting for the specified time."""
|
||||
|
||||
def dumper():
|
||||
time.sleep(secs)
|
||||
try:
|
||||
dump()
|
||||
except:
|
||||
log.swallow_exception()
|
||||
|
||||
thread = threading.Thread(target=dumper)
|
||||
thread.daemon = True
|
||||
thread.start()
|
22
venv/lib/python3.8/site-packages/debugpy/common/timestamp.py
Normal file
22
venv/lib/python3.8/site-packages/debugpy/common/timestamp.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
"""Provides monotonic timestamps with a resetable zero.
|
||||
"""
|
||||
|
||||
import time
|
||||
|
||||
__all__ = ["current", "reset"]
|
||||
|
||||
|
||||
def current():
|
||||
return time.monotonic() - timestamp_zero
|
||||
|
||||
|
||||
def reset():
|
||||
global timestamp_zero
|
||||
timestamp_zero = time.monotonic()
|
||||
|
||||
|
||||
reset()
|
164
venv/lib/python3.8/site-packages/debugpy/common/util.py
Normal file
164
venv/lib/python3.8/site-packages/debugpy/common/util.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def evaluate(code, path=__file__, mode="eval"):
|
||||
# Setting file path here to avoid breaking here if users have set
|
||||
# "break on exception raised" setting. This code can potentially run
|
||||
# in user process and is indistinguishable if the path is not set.
|
||||
# We use the path internally to skip exception inside the debugger.
|
||||
expr = compile(code, path, "eval")
|
||||
return eval(expr, {}, sys.modules)
|
||||
|
||||
|
||||
class Observable(object):
|
||||
"""An object with change notifications."""
|
||||
|
||||
observers = () # used when attributes are set before __init__ is invoked
|
||||
|
||||
def __init__(self):
|
||||
self.observers = []
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
try:
|
||||
return super().__setattr__(name, value)
|
||||
finally:
|
||||
for ob in self.observers:
|
||||
ob(self, name)
|
||||
|
||||
|
||||
class Env(dict):
|
||||
"""A dict for environment variables."""
|
||||
|
||||
@staticmethod
|
||||
def snapshot():
|
||||
"""Returns a snapshot of the current environment."""
|
||||
return Env(os.environ)
|
||||
|
||||
def copy(self, updated_from=None):
|
||||
result = Env(self)
|
||||
if updated_from is not None:
|
||||
result.update(updated_from)
|
||||
return result
|
||||
|
||||
def prepend_to(self, key, entry):
|
||||
"""Prepends a new entry to a PATH-style environment variable, creating
|
||||
it if it doesn't exist already.
|
||||
"""
|
||||
try:
|
||||
tail = os.path.pathsep + self[key]
|
||||
except KeyError:
|
||||
tail = ""
|
||||
self[key] = entry + tail
|
||||
|
||||
|
||||
def force_str(s, encoding, errors="strict"):
|
||||
"""Converts s to str, using the provided encoding. If s is already str,
|
||||
it is returned as is.
|
||||
"""
|
||||
return s.decode(encoding, errors) if isinstance(s, bytes) else str(s)
|
||||
|
||||
|
||||
def force_bytes(s, encoding, errors="strict"):
|
||||
"""Converts s to bytes, using the provided encoding. If s is already bytes,
|
||||
it is returned as is.
|
||||
|
||||
If errors="strict" and s is bytes, its encoding is verified by decoding it;
|
||||
UnicodeError is raised if it cannot be decoded.
|
||||
"""
|
||||
if isinstance(s, str):
|
||||
return s.encode(encoding, errors)
|
||||
else:
|
||||
s = bytes(s)
|
||||
if errors == "strict":
|
||||
# Return value ignored - invoked solely for verification.
|
||||
s.decode(encoding, errors)
|
||||
return s
|
||||
|
||||
|
||||
def force_ascii(s, errors="strict"):
|
||||
"""Same as force_bytes(s, "ascii", errors)"""
|
||||
return force_bytes(s, "ascii", errors)
|
||||
|
||||
|
||||
def force_utf8(s, errors="strict"):
|
||||
"""Same as force_bytes(s, "utf8", errors)"""
|
||||
return force_bytes(s, "utf8", errors)
|
||||
|
||||
|
||||
def nameof(obj, quote=False):
|
||||
"""Returns the most descriptive name of a Python module, class, or function,
|
||||
as a Unicode string
|
||||
|
||||
If quote=True, name is quoted with repr().
|
||||
|
||||
Best-effort, but guaranteed to not fail - always returns something.
|
||||
"""
|
||||
|
||||
try:
|
||||
name = obj.__qualname__
|
||||
except Exception:
|
||||
try:
|
||||
name = obj.__name__
|
||||
except Exception:
|
||||
# Fall back to raw repr(), and skip quoting.
|
||||
try:
|
||||
name = repr(obj)
|
||||
except Exception:
|
||||
return "<unknown>"
|
||||
else:
|
||||
quote = False
|
||||
|
||||
if quote:
|
||||
try:
|
||||
name = repr(name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return force_str(name, "utf-8", "replace")
|
||||
|
||||
|
||||
def srcnameof(obj):
|
||||
"""Returns the most descriptive name of a Python module, class, or function,
|
||||
including source information (filename and linenumber), if available.
|
||||
|
||||
Best-effort, but guaranteed to not fail - always returns something.
|
||||
"""
|
||||
|
||||
name = nameof(obj, quote=True)
|
||||
|
||||
# Get the source information if possible.
|
||||
try:
|
||||
src_file = inspect.getsourcefile(obj)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
name += f" (file {src_file!r}"
|
||||
try:
|
||||
_, src_lineno = inspect.getsourcelines(obj)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
name += f", line {src_lineno}"
|
||||
name += ")"
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def hide_debugpy_internals():
|
||||
"""Returns True if the caller should hide something from debugpy."""
|
||||
return "DEBUGPY_TRACE_DEBUGPY" not in os.environ
|
||||
|
||||
|
||||
def hide_thread_from_debugger(thread):
|
||||
"""Disables tracing for the given thread if DEBUGPY_TRACE_DEBUGPY is not set.
|
||||
DEBUGPY_TRACE_DEBUGPY is used to debug debugpy with debugpy
|
||||
"""
|
||||
if hide_debugpy_internals():
|
||||
thread.pydev_do_not_trace = True
|
||||
thread.is_pydev_daemon_thread = True
|
Loading…
Add table
Add a link
Reference in a new issue