-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmetamodule.py
160 lines (129 loc) · 5.54 KB
/
metamodule.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
# This is metamodule.py.
# Copyright (C) 2014-2015 Nathaniel J. Smith <njs@pobox.com>
# Released under a 2-clause BSD license; see the LICENSE file for details.
"A tiny Python module for taking control of your library's public API."
__version__ = "1.1+dev"
import sys
import warnings
from types import ModuleType
__all__ = ["install", "FancyModule"]
try:
basestring
except NameError:
basestring = str
class FancyModule(ModuleType):
"""A ModuleType subclass providing lazy imports and
warn-on-attribute-access.
If you add a module name to the __auto_import__ set it will be
automatically imported on first access. (Note, however, that it is
generally recommended that you not use this feature unless you have a real
need for it, because if something goes wrong with your import then this
can cause the error to be reported at a strange and confusing place.)
If you do::
__warn_on_access__[NAME] = (VALUE, WARNING OBJECT)
then the given NAME will be accessible as an attribute on the module, with
the given VALUE, and also raising the given WARNING OBJECT every time it
is accessed.
"""
def __metamodule_init__(self):
# This method is like __init__, except that the weird way metamodule
# objects are constructed means that we end up using
# ModuleType.__init__, and our subclass __init__ never gets called. So
# our setup code calls __metamodule_init__ explicitly. Do *not* call
# ModuleType.__init__ from this method!
self.__auto_import__ = set()
self.__warn_on_access__ = {}
def __getattr__(self, name):
if name in self.__auto_import__:
assert "." not in name
__import__(self.__name__ + "." + name)
return getattr(self, name)
if name in self.__warn_on_access__:
value, warning = self.__warn_on_access__[name]
warnings.warn(warning, stacklevel=2)
return value
raise AttributeError(name)
def __dir__(self):
result = set(self.__dict__)
result.update(self.__auto_import__)
# We intentionally don't return the "warn on access" items here,
# because (a) you shouldn't be using them, and (b) it creates problems
# for things like test discovery that want to iterate through all
# attribues.
return sorted(result)
def __repr__(self):
r = ModuleType.__repr__(self)
# Older versions of ModuleType.__repr__ unconditionally say "<module
# ...>" without taking the actual class into account.
if r.startswith("<module "):
r = "<%s%s" % (self.__class__.__name__,
r[len("<module"):])
return r
def install(name, class_=FancyModule):
"""Install a metamodule class into the module with name 'name'.
Generally used via the idiom::
import metamodule
metamodule.install(__name__)
del metamodule
By default it will use metamodule's built-in FancyModule type, but you can
also specify your own ModuleType subclass if you want. Your subclass's
__init__ method will *not* be called, but if you define a
__metamodule_init__ method then it *will* be called.
"""
orig_module = sys.modules[name]
if isinstance(orig_module, class_):
return
try:
orig_module.__class__ = class_
new_module = orig_module
except TypeError:
new_module = _hacky_make_metamodule(orig_module, class_)
getattr(type(new_module), "__metamodule_init__", lambda self: None)(
new_module)
sys.modules[name] = new_module
def _hacky_make_metamodule(orig_module, class_):
# Construct the new module instance by hand, calling only ModuleType
# methods, so as to simulate what happens in the __class__ assignment
# path.
new_module = ModuleType.__new__(class_)
ModuleType.__init__(new_module, orig_module.__name__, orig_module.__doc__)
# Now we jump through hoops to get at the module object guts...
import ctypes
# These are the only fields in the module object in CPython 1.0
# through 2.7.
fields = [
("PyObject_HEAD", ctypes.c_byte * object.__basicsize__),
("md_dict", ctypes.c_void_p),
]
data_fields = ["md_dict"]
# 3.0 adds PEP 3121 stuff:
if (3,) <= sys.version_info:
fields += [("md_def", ctypes.c_void_p),
("md_state", ctypes.c_void_p),
]
data_fields += ["md_def", "md_state"]
# 3.4 adds md_weaklist and md_name
if (3, 4) <= sys.version_info:
fields += [("md_weaklist", ctypes.c_void_p),
("md_name", ctypes.c_void_p),
]
# don't try to mess with md_weaklist, that seems unlikely to end
# well.
data_fields += ["md_name"]
if (3, 5) <= sys.version_info:
raise RuntimeError("Sorry, I can't read the future!")
class CModule(ctypes.Structure):
_fields_ = fields
corig_module = ctypes.cast(id(orig_module), ctypes.POINTER(CModule))
cnew_module = ctypes.cast(id(new_module), ctypes.POINTER(CModule))
# And now we swap the two module's internal data fields. This makes
# reference counting easier, plus prevents the destruction of orig_module
# from cleaning up the objects we are still using.
for data_field in data_fields:
_swap_attr(corig_module.contents, cnew_module.contents, data_field)
return new_module
def _swap_attr(obj1, obj2, attr):
tmp1 = getattr(obj1, attr)
tmp2 = getattr(obj2, attr)
setattr(obj1, attr, tmp2)
setattr(obj2, attr, tmp1)