Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- evalkit_tf446/lib/python3.10/idlelib/HISTORY.txt +296 -0
- evalkit_tf446/lib/python3.10/idlelib/autocomplete.py +228 -0
- evalkit_tf446/lib/python3.10/idlelib/autoexpand.py +96 -0
- evalkit_tf446/lib/python3.10/idlelib/query.py +392 -0
- evalkit_tf446/lib/python3.10/idlelib/replace.py +307 -0
- evalkit_tf446/lib/python3.10/idlelib/rpc.py +635 -0
- evalkit_tf446/lib/python3.10/idlelib/run.py +642 -0
- evalkit_tf446/lib/python3.10/lib2to3/pgen2/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/pgen2/__pycache__/token.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/lib2to3/pgen2/__pycache__/tokenize.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSans-Oblique.ttf +3 -0
- evalkit_tf446/lib/python3.10/site-packages/einops/__init__.py +15 -0
- evalkit_tf446/lib/python3.10/site-packages/einops/_backends.py +682 -0
- evalkit_tf446/lib/python3.10/site-packages/einops/einops.py +793 -0
- evalkit_tf446/lib/python3.10/site-packages/einops/packing.py +191 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/_exceptions.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/_position_node_finder.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/executing.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/version.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/_exceptions.py +22 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/executing.py +1160 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/py.typed +0 -0
- evalkit_tf446/lib/python3.10/site-packages/executing/version.py +1 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/__init__.py +34 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/common.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/signals.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/uri_validate.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/common.py +432 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/__init__.py +23 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__init__.py +365 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-310.pyc +0 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/__init__.py +8 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py +215 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py +158 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py +244 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py +14 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py +163 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py +82 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/errors.py +76 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/parameters.py +133 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/request_validator.py +849 -0
- evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/signature.py +852 -0
.gitattributes
CHANGED
|
@@ -2634,3 +2634,4 @@ evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSans-ExtraLight.tt
|
|
| 2634 |
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed.ttf filter=lfs diff=lfs merge=lfs -text
|
| 2635 |
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed-Oblique.ttf filter=lfs diff=lfs merge=lfs -text
|
| 2636 |
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed-BoldOblique.ttf filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 2634 |
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed.ttf filter=lfs diff=lfs merge=lfs -text
|
| 2635 |
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed-Oblique.ttf filter=lfs diff=lfs merge=lfs -text
|
| 2636 |
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSansCondensed-BoldOblique.ttf filter=lfs diff=lfs merge=lfs -text
|
| 2637 |
+
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSans-Oblique.ttf filter=lfs diff=lfs merge=lfs -text
|
evalkit_tf446/lib/python3.10/idlelib/HISTORY.txt
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
IDLE History
|
| 2 |
+
============
|
| 3 |
+
|
| 4 |
+
This file contains the release messages for previous IDLE releases.
|
| 5 |
+
As you read on you go back to the dark ages of IDLE's history.
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
What's New in IDLEfork 0.8.1?
|
| 9 |
+
=============================
|
| 10 |
+
|
| 11 |
+
*Release date: 22-Jul-2001*
|
| 12 |
+
|
| 13 |
+
- New tarball released as a result of the 'revitalisation' of the IDLEfork
|
| 14 |
+
project.
|
| 15 |
+
|
| 16 |
+
- This release requires python 2.1 or better. Compatibility with earlier
|
| 17 |
+
versions of python (especially ancient ones like 1.5x) is no longer a
|
| 18 |
+
priority in IDLEfork development.
|
| 19 |
+
|
| 20 |
+
- This release is based on a merging of the earlier IDLE fork work with current
|
| 21 |
+
cvs IDLE (post IDLE version 0.8), with some minor additional coding by Kurt
|
| 22 |
+
B. Kaiser and Stephen M. Gava.
|
| 23 |
+
|
| 24 |
+
- This release is basically functional but also contains some known breakages,
|
| 25 |
+
particularly with running things from the shell window. Also the debugger is
|
| 26 |
+
not working, but I believe this was the case with the previous IDLE fork
|
| 27 |
+
release (0.7.1) as well.
|
| 28 |
+
|
| 29 |
+
- This release is being made now to mark the point at which IDLEfork is
|
| 30 |
+
launching into a new stage of development.
|
| 31 |
+
|
| 32 |
+
- IDLEfork CVS will now be branched to enable further development and
|
| 33 |
+
exploration of the two "execution in a remote process" patches submitted by
|
| 34 |
+
David Scherer (David's is currently in IDLEfork) and GvR, while stabilisation
|
| 35 |
+
and development of less heavyweight improvements (like user customisation)
|
| 36 |
+
can continue on the trunk.
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
What's New in IDLEfork 0.7.1?
|
| 40 |
+
==============================
|
| 41 |
+
|
| 42 |
+
*Release date: 15-Aug-2000*
|
| 43 |
+
|
| 44 |
+
- First project tarball released.
|
| 45 |
+
|
| 46 |
+
- This was the first release of IDLE fork, which at this stage was a
|
| 47 |
+
combination of IDLE 0.5 and the VPython idle fork, with additional changes
|
| 48 |
+
coded by David Scherer, Peter Schneider-Kamp and Nicholas Riley.
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
IDLEfork 0.7.1 - 29 May 2000
|
| 53 |
+
-----------------------------
|
| 54 |
+
|
| 55 |
+
David Scherer <[email protected]>
|
| 56 |
+
|
| 57 |
+
- This is a modification of the CVS version of IDLE 0.5, updated as of
|
| 58 |
+
2000-03-09. It is alpha software and might be unstable. If it breaks, you
|
| 59 |
+
get to keep both pieces.
|
| 60 |
+
|
| 61 |
+
- If you have problems or suggestions, you should either contact me or post to
|
| 62 |
+
the list at http://www.python.org/mailman/listinfo/idle-dev (making it clear
|
| 63 |
+
that you are using this modified version of IDLE).
|
| 64 |
+
|
| 65 |
+
- Changes:
|
| 66 |
+
|
| 67 |
+
- The ExecBinding module, a replacement for ScriptBinding, executes programs
|
| 68 |
+
in a separate process, piping standard I/O through an RPC mechanism to an
|
| 69 |
+
OnDemandOutputWindow in IDLE. It supports executing unnamed programs
|
| 70 |
+
(through a temporary file). It does not yet support debugging.
|
| 71 |
+
|
| 72 |
+
- When running programs with ExecBinding, tracebacks will be clipped to
|
| 73 |
+
exclude system modules. If, however, a system module calls back into the
|
| 74 |
+
user program, that part of the traceback will be shown.
|
| 75 |
+
|
| 76 |
+
- The OnDemandOutputWindow class has been improved. In particular, it now
|
| 77 |
+
supports a readline() function used to implement user input, and a
|
| 78 |
+
scroll_clear() operation which is used to hide the output of a previous run
|
| 79 |
+
by scrolling it out of the window.
|
| 80 |
+
|
| 81 |
+
- Startup behavior has been changed. By default IDLE starts up with just a
|
| 82 |
+
blank editor window, rather than an interactive window. Opening a file in
|
| 83 |
+
such a blank window replaces the (nonexistent) contents of that window
|
| 84 |
+
instead of creating another window. Because of the need to have a
|
| 85 |
+
well-known port for the ExecBinding protocol, only one copy of IDLE can be
|
| 86 |
+
running. Additional invocations use the RPC mechanism to report their
|
| 87 |
+
command line arguments to the copy already running.
|
| 88 |
+
|
| 89 |
+
- The menus have been reorganized. In particular, the excessively large
|
| 90 |
+
'edit' menu has been split up into 'edit', 'format', and 'run'.
|
| 91 |
+
|
| 92 |
+
- 'Python Documentation' now works on Windows, if the win32api module is
|
| 93 |
+
present.
|
| 94 |
+
|
| 95 |
+
- A few key bindings have been changed: F1 now loads Python Documentation
|
| 96 |
+
instead of the IDLE help; shift-TAB is now a synonym for unindent.
|
| 97 |
+
|
| 98 |
+
- New modules:
|
| 99 |
+
|
| 100 |
+
ExecBinding.py Executes program through loader
|
| 101 |
+
loader.py Bootstraps user program
|
| 102 |
+
protocol.py RPC protocol
|
| 103 |
+
Remote.py User-process interpreter
|
| 104 |
+
spawn.py OS-specific code to start programs
|
| 105 |
+
|
| 106 |
+
- Files modified:
|
| 107 |
+
|
| 108 |
+
autoindent.py ( bindings tweaked )
|
| 109 |
+
bindings.py ( menus reorganized )
|
| 110 |
+
config.txt ( execbinding enabled )
|
| 111 |
+
editorwindow.py ( new menus, fixed 'Python Documentation' )
|
| 112 |
+
filelist.py ( hook for "open in same window" )
|
| 113 |
+
formatparagraph.py ( bindings tweaked )
|
| 114 |
+
idle.bat ( removed absolute pathname )
|
| 115 |
+
idle.pyw ( weird bug due to import with same name? )
|
| 116 |
+
iobinding.py ( open in same window, EOL convention )
|
| 117 |
+
keydefs.py ( bindings tweaked )
|
| 118 |
+
outputwindow.py ( readline, scroll_clear, etc )
|
| 119 |
+
pyshell.py ( changed startup behavior )
|
| 120 |
+
readme.txt ( <Recursion on file with id=1234567> )
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
IDLE 0.5 - February 2000 - Release Notes
|
| 125 |
+
----------------------------------------
|
| 126 |
+
|
| 127 |
+
This is an early release of IDLE, my own attempt at a Tkinter-based
|
| 128 |
+
IDE for Python.
|
| 129 |
+
|
| 130 |
+
(For a more detailed change log, see the file ChangeLog.)
|
| 131 |
+
|
| 132 |
+
FEATURES
|
| 133 |
+
|
| 134 |
+
IDLE has the following features:
|
| 135 |
+
|
| 136 |
+
- coded in 100% pure Python, using the Tkinter GUI toolkit (i.e. Tcl/Tk)
|
| 137 |
+
|
| 138 |
+
- cross-platform: works on Windows and Unix (on the Mac, there are
|
| 139 |
+
currently problems with Tcl/Tk)
|
| 140 |
+
|
| 141 |
+
- multi-window text editor with multiple undo, Python colorizing
|
| 142 |
+
and many other features, e.g. smart indent and call tips
|
| 143 |
+
|
| 144 |
+
- Python shell window (a.k.a. interactive interpreter)
|
| 145 |
+
|
| 146 |
+
- debugger (not complete, but you can set breakpoints, view and step)
|
| 147 |
+
|
| 148 |
+
USAGE
|
| 149 |
+
|
| 150 |
+
The main program is in the file "idle.py"; on Unix, you should be able
|
| 151 |
+
to run it by typing "./idle.py" to your shell. On Windows, you can
|
| 152 |
+
run it by double-clicking it; you can use idle.pyw to avoid popping up
|
| 153 |
+
a DOS console. If you want to pass command line arguments on Windows,
|
| 154 |
+
use the batch file idle.bat.
|
| 155 |
+
|
| 156 |
+
Command line arguments: files passed on the command line are executed,
|
| 157 |
+
not opened for editing, unless you give the -e command line option.
|
| 158 |
+
Try "./idle.py -h" to see other command line options.
|
| 159 |
+
|
| 160 |
+
IDLE requires Python 1.5.2, so it is currently only usable with a
|
| 161 |
+
Python 1.5.2 distribution. (An older version of IDLE is distributed
|
| 162 |
+
with Python 1.5.2; you can drop this version on top of it.)
|
| 163 |
+
|
| 164 |
+
COPYRIGHT
|
| 165 |
+
|
| 166 |
+
IDLE is covered by the standard Python copyright notice
|
| 167 |
+
(http://www.python.org/doc/Copyright.html).
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
New in IDLE 0.5 (2/15/2000)
|
| 171 |
+
---------------------------
|
| 172 |
+
|
| 173 |
+
Tons of stuff, much of it contributed by Tim Peters and Mark Hammond:
|
| 174 |
+
|
| 175 |
+
- Status bar, displaying current line/column (Moshe Zadka).
|
| 176 |
+
|
| 177 |
+
- Better stack viewer, using tree widget. (XXX Only used by Stack
|
| 178 |
+
Viewer menu, not by the debugger.)
|
| 179 |
+
|
| 180 |
+
- Format paragraph now recognizes Python block comments and reformats
|
| 181 |
+
them correctly (MH)
|
| 182 |
+
|
| 183 |
+
- New version of pyclbr.py parses top-level functions and understands
|
| 184 |
+
much more of Python's syntax; this is reflected in the class and path
|
| 185 |
+
browsers (TP)
|
| 186 |
+
|
| 187 |
+
- Much better auto-indent; knows how to indent the insides of
|
| 188 |
+
multi-line statements (TP)
|
| 189 |
+
|
| 190 |
+
- Call tip window pops up when you type the name of a known function
|
| 191 |
+
followed by an open parenthesis. Hit ESC or click elsewhere in the
|
| 192 |
+
window to close the tip window (MH)
|
| 193 |
+
|
| 194 |
+
- Comment out region now inserts ## to make it stand out more (TP)
|
| 195 |
+
|
| 196 |
+
- New path and class browsers based on a tree widget that looks
|
| 197 |
+
familiar to Windows users
|
| 198 |
+
|
| 199 |
+
- Reworked script running commands to be more intuitive: I/O now
|
| 200 |
+
always goes to the *Python Shell* window, and raw_input() works
|
| 201 |
+
correctly. You use F5 to import/reload a module: this adds the module
|
| 202 |
+
name to the __main__ namespace. You use Control-F5 to run a script:
|
| 203 |
+
this runs the script *in* the __main__ namespace. The latter also
|
| 204 |
+
sets sys.argv[] to the script name
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
New in IDLE 0.4 (4/7/99)
|
| 208 |
+
------------------------
|
| 209 |
+
|
| 210 |
+
Most important change: a new menu entry "File -> Path browser", shows
|
| 211 |
+
a 4-column hierarchical browser which lets you browse sys.path,
|
| 212 |
+
directories, modules, and classes. Yes, it's a superset of the Class
|
| 213 |
+
browser menu entry. There's also a new internal module,
|
| 214 |
+
MultiScrolledLists.py, which provides the framework for this dialog.
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
New in IDLE 0.3 (2/17/99)
|
| 218 |
+
-------------------------
|
| 219 |
+
|
| 220 |
+
Most important changes:
|
| 221 |
+
|
| 222 |
+
- Enabled support for running a module, with or without the debugger.
|
| 223 |
+
Output goes to a new window. Pressing F5 in a module is effectively a
|
| 224 |
+
reload of that module; Control-F5 loads it under the debugger.
|
| 225 |
+
|
| 226 |
+
- Re-enable tearing off the Windows menu, and make a torn-off Windows
|
| 227 |
+
menu update itself whenever a window is opened or closed.
|
| 228 |
+
|
| 229 |
+
- Menu items can now be have a checkbox (when the menu label starts
|
| 230 |
+
with "!"); use this for the Debugger and "Auto-open stack viewer"
|
| 231 |
+
(was: JIT stack viewer) menu items.
|
| 232 |
+
|
| 233 |
+
- Added a Quit button to the Debugger API.
|
| 234 |
+
|
| 235 |
+
- The current directory is explicitly inserted into sys.path.
|
| 236 |
+
|
| 237 |
+
- Fix the debugger (when using Python 1.5.2b2) to use canonical
|
| 238 |
+
filenames for breakpoints, so these actually work. (There's still a
|
| 239 |
+
lot of work to be done to the management of breakpoints in the
|
| 240 |
+
debugger though.)
|
| 241 |
+
|
| 242 |
+
- Closing a window that is still colorizing now actually works.
|
| 243 |
+
|
| 244 |
+
- Allow dragging of the separator between the two list boxes in the
|
| 245 |
+
class browser.
|
| 246 |
+
|
| 247 |
+
- Bind ESC to "close window" of the debugger, stack viewer and class
|
| 248 |
+
browser. It removes the selection highlighting in regular text
|
| 249 |
+
windows. (These are standard Windows conventions.)
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
New in IDLE 0.2 (1/8/99)
|
| 253 |
+
------------------------
|
| 254 |
+
|
| 255 |
+
Lots of changes; here are the highlights:
|
| 256 |
+
|
| 257 |
+
General:
|
| 258 |
+
|
| 259 |
+
- You can now write and configure your own IDLE extension modules; see
|
| 260 |
+
extend.txt.
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
File menu:
|
| 264 |
+
|
| 265 |
+
The command to open the Python shell window is now in the File menu.
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
Edit menu:
|
| 269 |
+
|
| 270 |
+
New Find dialog with more options; replace dialog; find in files dialog.
|
| 271 |
+
|
| 272 |
+
Commands to tabify or untabify a region.
|
| 273 |
+
|
| 274 |
+
Command to format a paragraph.
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
Debug menu:
|
| 278 |
+
|
| 279 |
+
JIT (Just-In-Time) stack viewer toggle -- if set, the stack viewer
|
| 280 |
+
automaticall pops up when you get a traceback.
|
| 281 |
+
|
| 282 |
+
Windows menu:
|
| 283 |
+
|
| 284 |
+
Zoom height -- make the window full height.
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
Help menu:
|
| 288 |
+
|
| 289 |
+
The help text now show up in a regular window so you can search and
|
| 290 |
+
even edit it if you like.
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
IDLE 0.1 was distributed with the Python 1.5.2b1 release on 12/22/98.
|
| 295 |
+
|
| 296 |
+
======================================================================
|
evalkit_tf446/lib/python3.10/idlelib/autocomplete.py
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Complete either attribute names or file names.
|
| 2 |
+
|
| 3 |
+
Either on demand or after a user-selected delay after a key character,
|
| 4 |
+
pop up a list of candidates.
|
| 5 |
+
"""
|
| 6 |
+
import __main__
|
| 7 |
+
import keyword
|
| 8 |
+
import os
|
| 9 |
+
import string
|
| 10 |
+
import sys
|
| 11 |
+
|
| 12 |
+
# Modified keyword list is used in fetch_completions.
|
| 13 |
+
completion_kwds = [s for s in keyword.kwlist
|
| 14 |
+
if s not in {'True', 'False', 'None'}] # In builtins.
|
| 15 |
+
completion_kwds.extend(('match', 'case')) # Context keywords.
|
| 16 |
+
completion_kwds.sort()
|
| 17 |
+
|
| 18 |
+
# Two types of completions; defined here for autocomplete_w import below.
|
| 19 |
+
ATTRS, FILES = 0, 1
|
| 20 |
+
from idlelib import autocomplete_w
|
| 21 |
+
from idlelib.config import idleConf
|
| 22 |
+
from idlelib.hyperparser import HyperParser
|
| 23 |
+
|
| 24 |
+
# Tuples passed to open_completions.
|
| 25 |
+
# EvalFunc, Complete, WantWin, Mode
|
| 26 |
+
FORCE = True, False, True, None # Control-Space.
|
| 27 |
+
TAB = False, True, True, None # Tab.
|
| 28 |
+
TRY_A = False, False, False, ATTRS # '.' for attributes.
|
| 29 |
+
TRY_F = False, False, False, FILES # '/' in quotes for file name.
|
| 30 |
+
|
| 31 |
+
# This string includes all chars that may be in an identifier.
|
| 32 |
+
# TODO Update this here and elsewhere.
|
| 33 |
+
ID_CHARS = string.ascii_letters + string.digits + "_"
|
| 34 |
+
|
| 35 |
+
SEPS = f"{os.sep}{os.altsep if os.altsep else ''}"
|
| 36 |
+
TRIGGERS = f".{SEPS}"
|
| 37 |
+
|
| 38 |
+
class AutoComplete:
|
| 39 |
+
|
| 40 |
+
def __init__(self, editwin=None, tags=None):
|
| 41 |
+
self.editwin = editwin
|
| 42 |
+
if editwin is not None: # not in subprocess or no-gui test
|
| 43 |
+
self.text = editwin.text
|
| 44 |
+
self.tags = tags
|
| 45 |
+
self.autocompletewindow = None
|
| 46 |
+
# id of delayed call, and the index of the text insert when
|
| 47 |
+
# the delayed call was issued. If _delayed_completion_id is
|
| 48 |
+
# None, there is no delayed call.
|
| 49 |
+
self._delayed_completion_id = None
|
| 50 |
+
self._delayed_completion_index = None
|
| 51 |
+
|
| 52 |
+
@classmethod
|
| 53 |
+
def reload(cls):
|
| 54 |
+
cls.popupwait = idleConf.GetOption(
|
| 55 |
+
"extensions", "AutoComplete", "popupwait", type="int", default=0)
|
| 56 |
+
|
| 57 |
+
def _make_autocomplete_window(self): # Makes mocking easier.
|
| 58 |
+
return autocomplete_w.AutoCompleteWindow(self.text, tags=self.tags)
|
| 59 |
+
|
| 60 |
+
def _remove_autocomplete_window(self, event=None):
|
| 61 |
+
if self.autocompletewindow:
|
| 62 |
+
self.autocompletewindow.hide_window()
|
| 63 |
+
self.autocompletewindow = None
|
| 64 |
+
|
| 65 |
+
def force_open_completions_event(self, event):
|
| 66 |
+
"(^space) Open completion list, even if a function call is needed."
|
| 67 |
+
self.open_completions(FORCE)
|
| 68 |
+
return "break"
|
| 69 |
+
|
| 70 |
+
def autocomplete_event(self, event):
|
| 71 |
+
"(tab) Complete word or open list if multiple options."
|
| 72 |
+
if hasattr(event, "mc_state") and event.mc_state or\
|
| 73 |
+
not self.text.get("insert linestart", "insert").strip():
|
| 74 |
+
# A modifier was pressed along with the tab or
|
| 75 |
+
# there is only previous whitespace on this line, so tab.
|
| 76 |
+
return None
|
| 77 |
+
if self.autocompletewindow and self.autocompletewindow.is_active():
|
| 78 |
+
self.autocompletewindow.complete()
|
| 79 |
+
return "break"
|
| 80 |
+
else:
|
| 81 |
+
opened = self.open_completions(TAB)
|
| 82 |
+
return "break" if opened else None
|
| 83 |
+
|
| 84 |
+
def try_open_completions_event(self, event=None):
|
| 85 |
+
"(./) Open completion list after pause with no movement."
|
| 86 |
+
lastchar = self.text.get("insert-1c")
|
| 87 |
+
if lastchar in TRIGGERS:
|
| 88 |
+
args = TRY_A if lastchar == "." else TRY_F
|
| 89 |
+
self._delayed_completion_index = self.text.index("insert")
|
| 90 |
+
if self._delayed_completion_id is not None:
|
| 91 |
+
self.text.after_cancel(self._delayed_completion_id)
|
| 92 |
+
self._delayed_completion_id = self.text.after(
|
| 93 |
+
self.popupwait, self._delayed_open_completions, args)
|
| 94 |
+
|
| 95 |
+
def _delayed_open_completions(self, args):
|
| 96 |
+
"Call open_completions if index unchanged."
|
| 97 |
+
self._delayed_completion_id = None
|
| 98 |
+
if self.text.index("insert") == self._delayed_completion_index:
|
| 99 |
+
self.open_completions(args)
|
| 100 |
+
|
| 101 |
+
def open_completions(self, args):
|
| 102 |
+
"""Find the completions and create the AutoCompleteWindow.
|
| 103 |
+
Return True if successful (no syntax error or so found).
|
| 104 |
+
If complete is True, then if there's nothing to complete and no
|
| 105 |
+
start of completion, won't open completions and return False.
|
| 106 |
+
If mode is given, will open a completion list only in this mode.
|
| 107 |
+
"""
|
| 108 |
+
evalfuncs, complete, wantwin, mode = args
|
| 109 |
+
# Cancel another delayed call, if it exists.
|
| 110 |
+
if self._delayed_completion_id is not None:
|
| 111 |
+
self.text.after_cancel(self._delayed_completion_id)
|
| 112 |
+
self._delayed_completion_id = None
|
| 113 |
+
|
| 114 |
+
hp = HyperParser(self.editwin, "insert")
|
| 115 |
+
curline = self.text.get("insert linestart", "insert")
|
| 116 |
+
i = j = len(curline)
|
| 117 |
+
if hp.is_in_string() and (not mode or mode==FILES):
|
| 118 |
+
# Find the beginning of the string.
|
| 119 |
+
# fetch_completions will look at the file system to determine
|
| 120 |
+
# whether the string value constitutes an actual file name
|
| 121 |
+
# XXX could consider raw strings here and unescape the string
|
| 122 |
+
# value if it's not raw.
|
| 123 |
+
self._remove_autocomplete_window()
|
| 124 |
+
mode = FILES
|
| 125 |
+
# Find last separator or string start
|
| 126 |
+
while i and curline[i-1] not in "'\"" + SEPS:
|
| 127 |
+
i -= 1
|
| 128 |
+
comp_start = curline[i:j]
|
| 129 |
+
j = i
|
| 130 |
+
# Find string start
|
| 131 |
+
while i and curline[i-1] not in "'\"":
|
| 132 |
+
i -= 1
|
| 133 |
+
comp_what = curline[i:j]
|
| 134 |
+
elif hp.is_in_code() and (not mode or mode==ATTRS):
|
| 135 |
+
self._remove_autocomplete_window()
|
| 136 |
+
mode = ATTRS
|
| 137 |
+
while i and (curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127):
|
| 138 |
+
i -= 1
|
| 139 |
+
comp_start = curline[i:j]
|
| 140 |
+
if i and curline[i-1] == '.': # Need object with attributes.
|
| 141 |
+
hp.set_index("insert-%dc" % (len(curline)-(i-1)))
|
| 142 |
+
comp_what = hp.get_expression()
|
| 143 |
+
if (not comp_what or
|
| 144 |
+
(not evalfuncs and comp_what.find('(') != -1)):
|
| 145 |
+
return None
|
| 146 |
+
else:
|
| 147 |
+
comp_what = ""
|
| 148 |
+
else:
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
if complete and not comp_what and not comp_start:
|
| 152 |
+
return None
|
| 153 |
+
comp_lists = self.fetch_completions(comp_what, mode)
|
| 154 |
+
if not comp_lists[0]:
|
| 155 |
+
return None
|
| 156 |
+
self.autocompletewindow = self._make_autocomplete_window()
|
| 157 |
+
return not self.autocompletewindow.show_window(
|
| 158 |
+
comp_lists, "insert-%dc" % len(comp_start),
|
| 159 |
+
complete, mode, wantwin)
|
| 160 |
+
|
| 161 |
+
def fetch_completions(self, what, mode):
|
| 162 |
+
"""Return a pair of lists of completions for something. The first list
|
| 163 |
+
is a sublist of the second. Both are sorted.
|
| 164 |
+
|
| 165 |
+
If there is a Python subprocess, get the comp. list there. Otherwise,
|
| 166 |
+
either fetch_completions() is running in the subprocess itself or it
|
| 167 |
+
was called in an IDLE EditorWindow before any script had been run.
|
| 168 |
+
|
| 169 |
+
The subprocess environment is that of the most recently run script. If
|
| 170 |
+
two unrelated modules are being edited some calltips in the current
|
| 171 |
+
module may be inoperative if the module was not the last to run.
|
| 172 |
+
"""
|
| 173 |
+
try:
|
| 174 |
+
rpcclt = self.editwin.flist.pyshell.interp.rpcclt
|
| 175 |
+
except:
|
| 176 |
+
rpcclt = None
|
| 177 |
+
if rpcclt:
|
| 178 |
+
return rpcclt.remotecall("exec", "get_the_completion_list",
|
| 179 |
+
(what, mode), {})
|
| 180 |
+
else:
|
| 181 |
+
if mode == ATTRS:
|
| 182 |
+
if what == "": # Main module names.
|
| 183 |
+
namespace = {**__main__.__builtins__.__dict__,
|
| 184 |
+
**__main__.__dict__}
|
| 185 |
+
bigl = eval("dir()", namespace)
|
| 186 |
+
bigl.extend(completion_kwds)
|
| 187 |
+
bigl.sort()
|
| 188 |
+
if "__all__" in bigl:
|
| 189 |
+
smalll = sorted(eval("__all__", namespace))
|
| 190 |
+
else:
|
| 191 |
+
smalll = [s for s in bigl if s[:1] != '_']
|
| 192 |
+
else:
|
| 193 |
+
try:
|
| 194 |
+
entity = self.get_entity(what)
|
| 195 |
+
bigl = dir(entity)
|
| 196 |
+
bigl.sort()
|
| 197 |
+
if "__all__" in bigl:
|
| 198 |
+
smalll = sorted(entity.__all__)
|
| 199 |
+
else:
|
| 200 |
+
smalll = [s for s in bigl if s[:1] != '_']
|
| 201 |
+
except:
|
| 202 |
+
return [], []
|
| 203 |
+
|
| 204 |
+
elif mode == FILES:
|
| 205 |
+
if what == "":
|
| 206 |
+
what = "."
|
| 207 |
+
try:
|
| 208 |
+
expandedpath = os.path.expanduser(what)
|
| 209 |
+
bigl = os.listdir(expandedpath)
|
| 210 |
+
bigl.sort()
|
| 211 |
+
smalll = [s for s in bigl if s[:1] != '.']
|
| 212 |
+
except OSError:
|
| 213 |
+
return [], []
|
| 214 |
+
|
| 215 |
+
if not smalll:
|
| 216 |
+
smalll = bigl
|
| 217 |
+
return smalll, bigl
|
| 218 |
+
|
| 219 |
+
def get_entity(self, name):
|
| 220 |
+
"Lookup name in a namespace spanning sys.modules and __main.dict__."
|
| 221 |
+
return eval(name, {**sys.modules, **__main__.__dict__})
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
AutoComplete.reload()
|
| 225 |
+
|
| 226 |
+
if __name__ == '__main__':
|
| 227 |
+
from unittest import main
|
| 228 |
+
main('idlelib.idle_test.test_autocomplete', verbosity=2)
|
evalkit_tf446/lib/python3.10/idlelib/autoexpand.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''Complete the current word before the cursor with words in the editor.
|
| 2 |
+
|
| 3 |
+
Each menu selection or shortcut key selection replaces the word with a
|
| 4 |
+
different word with the same prefix. The search for matches begins
|
| 5 |
+
before the target and moves toward the top of the editor. It then starts
|
| 6 |
+
after the cursor and moves down. It then returns to the original word and
|
| 7 |
+
the cycle starts again.
|
| 8 |
+
|
| 9 |
+
Changing the current text line or leaving the cursor in a different
|
| 10 |
+
place before requesting the next selection causes AutoExpand to reset
|
| 11 |
+
its state.
|
| 12 |
+
|
| 13 |
+
There is only one instance of Autoexpand.
|
| 14 |
+
'''
|
| 15 |
+
import re
|
| 16 |
+
import string
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class AutoExpand:
|
| 20 |
+
wordchars = string.ascii_letters + string.digits + "_"
|
| 21 |
+
|
| 22 |
+
def __init__(self, editwin):
|
| 23 |
+
self.text = editwin.text
|
| 24 |
+
self.bell = self.text.bell
|
| 25 |
+
self.state = None
|
| 26 |
+
|
| 27 |
+
def expand_word_event(self, event):
|
| 28 |
+
"Replace the current word with the next expansion."
|
| 29 |
+
curinsert = self.text.index("insert")
|
| 30 |
+
curline = self.text.get("insert linestart", "insert lineend")
|
| 31 |
+
if not self.state:
|
| 32 |
+
words = self.getwords()
|
| 33 |
+
index = 0
|
| 34 |
+
else:
|
| 35 |
+
words, index, insert, line = self.state
|
| 36 |
+
if insert != curinsert or line != curline:
|
| 37 |
+
words = self.getwords()
|
| 38 |
+
index = 0
|
| 39 |
+
if not words:
|
| 40 |
+
self.bell()
|
| 41 |
+
return "break"
|
| 42 |
+
word = self.getprevword()
|
| 43 |
+
self.text.delete("insert - %d chars" % len(word), "insert")
|
| 44 |
+
newword = words[index]
|
| 45 |
+
index = (index + 1) % len(words)
|
| 46 |
+
if index == 0:
|
| 47 |
+
self.bell() # Warn we cycled around
|
| 48 |
+
self.text.insert("insert", newword)
|
| 49 |
+
curinsert = self.text.index("insert")
|
| 50 |
+
curline = self.text.get("insert linestart", "insert lineend")
|
| 51 |
+
self.state = words, index, curinsert, curline
|
| 52 |
+
return "break"
|
| 53 |
+
|
| 54 |
+
def getwords(self):
|
| 55 |
+
"Return a list of words that match the prefix before the cursor."
|
| 56 |
+
word = self.getprevword()
|
| 57 |
+
if not word:
|
| 58 |
+
return []
|
| 59 |
+
before = self.text.get("1.0", "insert wordstart")
|
| 60 |
+
wbefore = re.findall(r"\b" + word + r"\w+\b", before)
|
| 61 |
+
del before
|
| 62 |
+
after = self.text.get("insert wordend", "end")
|
| 63 |
+
wafter = re.findall(r"\b" + word + r"\w+\b", after)
|
| 64 |
+
del after
|
| 65 |
+
if not wbefore and not wafter:
|
| 66 |
+
return []
|
| 67 |
+
words = []
|
| 68 |
+
dict = {}
|
| 69 |
+
# search backwards through words before
|
| 70 |
+
wbefore.reverse()
|
| 71 |
+
for w in wbefore:
|
| 72 |
+
if dict.get(w):
|
| 73 |
+
continue
|
| 74 |
+
words.append(w)
|
| 75 |
+
dict[w] = w
|
| 76 |
+
# search onwards through words after
|
| 77 |
+
for w in wafter:
|
| 78 |
+
if dict.get(w):
|
| 79 |
+
continue
|
| 80 |
+
words.append(w)
|
| 81 |
+
dict[w] = w
|
| 82 |
+
words.append(word)
|
| 83 |
+
return words
|
| 84 |
+
|
| 85 |
+
def getprevword(self):
|
| 86 |
+
"Return the word prefix before the cursor."
|
| 87 |
+
line = self.text.get("insert linestart", "insert")
|
| 88 |
+
i = len(line)
|
| 89 |
+
while i > 0 and line[i-1] in self.wordchars:
|
| 90 |
+
i = i-1
|
| 91 |
+
return line[i:]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
if __name__ == '__main__':
|
| 95 |
+
from unittest import main
|
| 96 |
+
main('idlelib.idle_test.test_autoexpand', verbosity=2)
|
evalkit_tf446/lib/python3.10/idlelib/query.py
ADDED
|
@@ -0,0 +1,392 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Dialogs that query users and verify the answer before accepting.
|
| 3 |
+
|
| 4 |
+
Query is the generic base class for a popup dialog.
|
| 5 |
+
The user must either enter a valid answer or close the dialog.
|
| 6 |
+
Entries are validated when <Return> is entered or [Ok] is clicked.
|
| 7 |
+
Entries are ignored when [Cancel] or [X] are clicked.
|
| 8 |
+
The 'return value' is .result set to either a valid answer or None.
|
| 9 |
+
|
| 10 |
+
Subclass SectionName gets a name for a new config file section.
|
| 11 |
+
Configdialog uses it for new highlight theme and keybinding set names.
|
| 12 |
+
Subclass ModuleName gets a name for File => Open Module.
|
| 13 |
+
Subclass HelpSource gets menu item and path for additions to Help menu.
|
| 14 |
+
"""
|
| 15 |
+
# Query and Section name result from splitting GetCfgSectionNameDialog
|
| 16 |
+
# of configSectionNameDialog.py (temporarily config_sec.py) into
|
| 17 |
+
# generic and specific parts. 3.6 only, July 2016.
|
| 18 |
+
# ModuleName.entry_ok came from editor.EditorWindow.load_module.
|
| 19 |
+
# HelpSource was extracted from configHelpSourceEdit.py (temporarily
|
| 20 |
+
# config_help.py), with darwin code moved from ok to path_ok.
|
| 21 |
+
|
| 22 |
+
import importlib.util, importlib.abc
|
| 23 |
+
import os
|
| 24 |
+
import shlex
|
| 25 |
+
from sys import executable, platform # Platform is set for one test.
|
| 26 |
+
|
| 27 |
+
from tkinter import Toplevel, StringVar, BooleanVar, W, E, S
|
| 28 |
+
from tkinter.ttk import Frame, Button, Entry, Label, Checkbutton
|
| 29 |
+
from tkinter import filedialog
|
| 30 |
+
from tkinter.font import Font
|
| 31 |
+
from tkinter.simpledialog import _setup_dialog
|
| 32 |
+
|
| 33 |
+
class Query(Toplevel):
|
| 34 |
+
"""Base class for getting verified answer from a user.
|
| 35 |
+
|
| 36 |
+
For this base class, accept any non-blank string.
|
| 37 |
+
"""
|
| 38 |
+
def __init__(self, parent, title, message, *, text0='', used_names={},
|
| 39 |
+
_htest=False, _utest=False):
|
| 40 |
+
"""Create modal popup, return when destroyed.
|
| 41 |
+
|
| 42 |
+
Additional subclass init must be done before this unless
|
| 43 |
+
_utest=True is passed to suppress wait_window().
|
| 44 |
+
|
| 45 |
+
title - string, title of popup dialog
|
| 46 |
+
message - string, informational message to display
|
| 47 |
+
text0 - initial value for entry
|
| 48 |
+
used_names - names already in use
|
| 49 |
+
_htest - bool, change box location when running htest
|
| 50 |
+
_utest - bool, leave window hidden and not modal
|
| 51 |
+
"""
|
| 52 |
+
self.parent = parent # Needed for Font call.
|
| 53 |
+
self.message = message
|
| 54 |
+
self.text0 = text0
|
| 55 |
+
self.used_names = used_names
|
| 56 |
+
|
| 57 |
+
Toplevel.__init__(self, parent)
|
| 58 |
+
self.withdraw() # Hide while configuring, especially geometry.
|
| 59 |
+
self.title(title)
|
| 60 |
+
self.transient(parent)
|
| 61 |
+
if not _utest: # Otherwise fail when directly run unittest.
|
| 62 |
+
self.grab_set()
|
| 63 |
+
|
| 64 |
+
_setup_dialog(self)
|
| 65 |
+
if self._windowingsystem == 'aqua':
|
| 66 |
+
self.bind("<Command-.>", self.cancel)
|
| 67 |
+
self.bind('<Key-Escape>', self.cancel)
|
| 68 |
+
self.protocol("WM_DELETE_WINDOW", self.cancel)
|
| 69 |
+
self.bind('<Key-Return>', self.ok)
|
| 70 |
+
self.bind("<KP_Enter>", self.ok)
|
| 71 |
+
|
| 72 |
+
self.create_widgets()
|
| 73 |
+
self.update_idletasks() # Need here for winfo_reqwidth below.
|
| 74 |
+
self.geometry( # Center dialog over parent (or below htest box).
|
| 75 |
+
"+%d+%d" % (
|
| 76 |
+
parent.winfo_rootx() +
|
| 77 |
+
(parent.winfo_width()/2 - self.winfo_reqwidth()/2),
|
| 78 |
+
parent.winfo_rooty() +
|
| 79 |
+
((parent.winfo_height()/2 - self.winfo_reqheight()/2)
|
| 80 |
+
if not _htest else 150)
|
| 81 |
+
) )
|
| 82 |
+
self.resizable(height=False, width=False)
|
| 83 |
+
|
| 84 |
+
if not _utest:
|
| 85 |
+
self.deiconify() # Unhide now that geometry set.
|
| 86 |
+
self.entry.focus_set()
|
| 87 |
+
self.wait_window()
|
| 88 |
+
|
| 89 |
+
def create_widgets(self, ok_text='OK'): # Do not replace.
|
| 90 |
+
"""Create entry (rows, extras, buttons.
|
| 91 |
+
|
| 92 |
+
Entry stuff on rows 0-2, spanning cols 0-2.
|
| 93 |
+
Buttons on row 99, cols 1, 2.
|
| 94 |
+
"""
|
| 95 |
+
# Bind to self the widgets needed for entry_ok or unittest.
|
| 96 |
+
self.frame = frame = Frame(self, padding=10)
|
| 97 |
+
frame.grid(column=0, row=0, sticky='news')
|
| 98 |
+
frame.grid_columnconfigure(0, weight=1)
|
| 99 |
+
|
| 100 |
+
entrylabel = Label(frame, anchor='w', justify='left',
|
| 101 |
+
text=self.message)
|
| 102 |
+
self.entryvar = StringVar(self, self.text0)
|
| 103 |
+
self.entry = Entry(frame, width=30, textvariable=self.entryvar)
|
| 104 |
+
self.error_font = Font(name='TkCaptionFont',
|
| 105 |
+
exists=True, root=self.parent)
|
| 106 |
+
self.entry_error = Label(frame, text=' ', foreground='red',
|
| 107 |
+
font=self.error_font)
|
| 108 |
+
# Display or blank error by setting ['text'] =.
|
| 109 |
+
entrylabel.grid(column=0, row=0, columnspan=3, padx=5, sticky=W)
|
| 110 |
+
self.entry.grid(column=0, row=1, columnspan=3, padx=5, sticky=W+E,
|
| 111 |
+
pady=[10,0])
|
| 112 |
+
self.entry_error.grid(column=0, row=2, columnspan=3, padx=5,
|
| 113 |
+
sticky=W+E)
|
| 114 |
+
|
| 115 |
+
self.create_extra()
|
| 116 |
+
|
| 117 |
+
self.button_ok = Button(
|
| 118 |
+
frame, text=ok_text, default='active', command=self.ok)
|
| 119 |
+
self.button_cancel = Button(
|
| 120 |
+
frame, text='Cancel', command=self.cancel)
|
| 121 |
+
|
| 122 |
+
self.button_ok.grid(column=1, row=99, padx=5)
|
| 123 |
+
self.button_cancel.grid(column=2, row=99, padx=5)
|
| 124 |
+
|
| 125 |
+
def create_extra(self): pass # Override to add widgets.
|
| 126 |
+
|
| 127 |
+
def showerror(self, message, widget=None):
|
| 128 |
+
#self.bell(displayof=self)
|
| 129 |
+
(widget or self.entry_error)['text'] = 'ERROR: ' + message
|
| 130 |
+
|
| 131 |
+
def entry_ok(self): # Example: usually replace.
|
| 132 |
+
"Return non-blank entry or None."
|
| 133 |
+
entry = self.entry.get().strip()
|
| 134 |
+
if not entry:
|
| 135 |
+
self.showerror('blank line.')
|
| 136 |
+
return None
|
| 137 |
+
return entry
|
| 138 |
+
|
| 139 |
+
def ok(self, event=None): # Do not replace.
|
| 140 |
+
'''If entry is valid, bind it to 'result' and destroy tk widget.
|
| 141 |
+
|
| 142 |
+
Otherwise leave dialog open for user to correct entry or cancel.
|
| 143 |
+
'''
|
| 144 |
+
self.entry_error['text'] = ''
|
| 145 |
+
entry = self.entry_ok()
|
| 146 |
+
if entry is not None:
|
| 147 |
+
self.result = entry
|
| 148 |
+
self.destroy()
|
| 149 |
+
else:
|
| 150 |
+
# [Ok] moves focus. (<Return> does not.) Move it back.
|
| 151 |
+
self.entry.focus_set()
|
| 152 |
+
|
| 153 |
+
def cancel(self, event=None): # Do not replace.
|
| 154 |
+
"Set dialog result to None and destroy tk widget."
|
| 155 |
+
self.result = None
|
| 156 |
+
self.destroy()
|
| 157 |
+
|
| 158 |
+
def destroy(self):
|
| 159 |
+
self.grab_release()
|
| 160 |
+
super().destroy()
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class SectionName(Query):
|
| 164 |
+
"Get a name for a config file section name."
|
| 165 |
+
# Used in ConfigDialog.GetNewKeysName, .GetNewThemeName (837)
|
| 166 |
+
|
| 167 |
+
def __init__(self, parent, title, message, used_names,
|
| 168 |
+
*, _htest=False, _utest=False):
|
| 169 |
+
super().__init__(parent, title, message, used_names=used_names,
|
| 170 |
+
_htest=_htest, _utest=_utest)
|
| 171 |
+
|
| 172 |
+
def entry_ok(self):
|
| 173 |
+
"Return sensible ConfigParser section name or None."
|
| 174 |
+
name = self.entry.get().strip()
|
| 175 |
+
if not name:
|
| 176 |
+
self.showerror('no name specified.')
|
| 177 |
+
return None
|
| 178 |
+
elif len(name)>30:
|
| 179 |
+
self.showerror('name is longer than 30 characters.')
|
| 180 |
+
return None
|
| 181 |
+
elif name in self.used_names:
|
| 182 |
+
self.showerror('name is already in use.')
|
| 183 |
+
return None
|
| 184 |
+
return name
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class ModuleName(Query):
|
| 188 |
+
"Get a module name for Open Module menu entry."
|
| 189 |
+
# Used in open_module (editor.EditorWindow until move to iobinding).
|
| 190 |
+
|
| 191 |
+
def __init__(self, parent, title, message, text0,
|
| 192 |
+
*, _htest=False, _utest=False):
|
| 193 |
+
super().__init__(parent, title, message, text0=text0,
|
| 194 |
+
_htest=_htest, _utest=_utest)
|
| 195 |
+
|
| 196 |
+
def entry_ok(self):
|
| 197 |
+
"Return entered module name as file path or None."
|
| 198 |
+
name = self.entry.get().strip()
|
| 199 |
+
if not name:
|
| 200 |
+
self.showerror('no name specified.')
|
| 201 |
+
return None
|
| 202 |
+
# XXX Ought to insert current file's directory in front of path.
|
| 203 |
+
try:
|
| 204 |
+
spec = importlib.util.find_spec(name)
|
| 205 |
+
except (ValueError, ImportError) as msg:
|
| 206 |
+
self.showerror(str(msg))
|
| 207 |
+
return None
|
| 208 |
+
if spec is None:
|
| 209 |
+
self.showerror("module not found.")
|
| 210 |
+
return None
|
| 211 |
+
if not isinstance(spec.loader, importlib.abc.SourceLoader):
|
| 212 |
+
self.showerror("not a source-based module.")
|
| 213 |
+
return None
|
| 214 |
+
try:
|
| 215 |
+
file_path = spec.loader.get_filename(name)
|
| 216 |
+
except AttributeError:
|
| 217 |
+
self.showerror("loader does not support get_filename.")
|
| 218 |
+
return None
|
| 219 |
+
except ImportError:
|
| 220 |
+
# Some special modules require this (e.g. os.path)
|
| 221 |
+
try:
|
| 222 |
+
file_path = spec.loader.get_filename()
|
| 223 |
+
except TypeError:
|
| 224 |
+
self.showerror("loader failed to get filename.")
|
| 225 |
+
return None
|
| 226 |
+
return file_path
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class Goto(Query):
|
| 230 |
+
"Get a positive line number for editor Go To Line."
|
| 231 |
+
# Used in editor.EditorWindow.goto_line_event.
|
| 232 |
+
|
| 233 |
+
def entry_ok(self):
|
| 234 |
+
try:
|
| 235 |
+
lineno = int(self.entry.get())
|
| 236 |
+
except ValueError:
|
| 237 |
+
self.showerror('not a base 10 integer.')
|
| 238 |
+
return None
|
| 239 |
+
if lineno <= 0:
|
| 240 |
+
self.showerror('not a positive integer.')
|
| 241 |
+
return None
|
| 242 |
+
return lineno
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class HelpSource(Query):
|
| 246 |
+
"Get menu name and help source for Help menu."
|
| 247 |
+
# Used in ConfigDialog.HelpListItemAdd/Edit, (941/9)
|
| 248 |
+
|
| 249 |
+
def __init__(self, parent, title, *, menuitem='', filepath='',
|
| 250 |
+
used_names={}, _htest=False, _utest=False):
|
| 251 |
+
"""Get menu entry and url/local file for Additional Help.
|
| 252 |
+
|
| 253 |
+
User enters a name for the Help resource and a web url or file
|
| 254 |
+
name. The user can browse for the file.
|
| 255 |
+
"""
|
| 256 |
+
self.filepath = filepath
|
| 257 |
+
message = 'Name for item on Help menu:'
|
| 258 |
+
super().__init__(
|
| 259 |
+
parent, title, message, text0=menuitem,
|
| 260 |
+
used_names=used_names, _htest=_htest, _utest=_utest)
|
| 261 |
+
|
| 262 |
+
def create_extra(self):
|
| 263 |
+
"Add path widjets to rows 10-12."
|
| 264 |
+
frame = self.frame
|
| 265 |
+
pathlabel = Label(frame, anchor='w', justify='left',
|
| 266 |
+
text='Help File Path: Enter URL or browse for file')
|
| 267 |
+
self.pathvar = StringVar(self, self.filepath)
|
| 268 |
+
self.path = Entry(frame, textvariable=self.pathvar, width=40)
|
| 269 |
+
browse = Button(frame, text='Browse', width=8,
|
| 270 |
+
command=self.browse_file)
|
| 271 |
+
self.path_error = Label(frame, text=' ', foreground='red',
|
| 272 |
+
font=self.error_font)
|
| 273 |
+
|
| 274 |
+
pathlabel.grid(column=0, row=10, columnspan=3, padx=5, pady=[10,0],
|
| 275 |
+
sticky=W)
|
| 276 |
+
self.path.grid(column=0, row=11, columnspan=2, padx=5, sticky=W+E,
|
| 277 |
+
pady=[10,0])
|
| 278 |
+
browse.grid(column=2, row=11, padx=5, sticky=W+S)
|
| 279 |
+
self.path_error.grid(column=0, row=12, columnspan=3, padx=5,
|
| 280 |
+
sticky=W+E)
|
| 281 |
+
|
| 282 |
+
def askfilename(self, filetypes, initdir, initfile): # htest #
|
| 283 |
+
# Extracted from browse_file so can mock for unittests.
|
| 284 |
+
# Cannot unittest as cannot simulate button clicks.
|
| 285 |
+
# Test by running htest, such as by running this file.
|
| 286 |
+
return filedialog.Open(parent=self, filetypes=filetypes)\
|
| 287 |
+
.show(initialdir=initdir, initialfile=initfile)
|
| 288 |
+
|
| 289 |
+
def browse_file(self):
|
| 290 |
+
filetypes = [
|
| 291 |
+
("HTML Files", "*.htm *.html", "TEXT"),
|
| 292 |
+
("PDF Files", "*.pdf", "TEXT"),
|
| 293 |
+
("Windows Help Files", "*.chm"),
|
| 294 |
+
("Text Files", "*.txt", "TEXT"),
|
| 295 |
+
("All Files", "*")]
|
| 296 |
+
path = self.pathvar.get()
|
| 297 |
+
if path:
|
| 298 |
+
dir, base = os.path.split(path)
|
| 299 |
+
else:
|
| 300 |
+
base = None
|
| 301 |
+
if platform[:3] == 'win':
|
| 302 |
+
dir = os.path.join(os.path.dirname(executable), 'Doc')
|
| 303 |
+
if not os.path.isdir(dir):
|
| 304 |
+
dir = os.getcwd()
|
| 305 |
+
else:
|
| 306 |
+
dir = os.getcwd()
|
| 307 |
+
file = self.askfilename(filetypes, dir, base)
|
| 308 |
+
if file:
|
| 309 |
+
self.pathvar.set(file)
|
| 310 |
+
|
| 311 |
+
item_ok = SectionName.entry_ok # localize for test override
|
| 312 |
+
|
| 313 |
+
def path_ok(self):
|
| 314 |
+
"Simple validity check for menu file path"
|
| 315 |
+
path = self.path.get().strip()
|
| 316 |
+
if not path: #no path specified
|
| 317 |
+
self.showerror('no help file path specified.', self.path_error)
|
| 318 |
+
return None
|
| 319 |
+
elif not path.startswith(('www.', 'http')):
|
| 320 |
+
if path[:5] == 'file:':
|
| 321 |
+
path = path[5:]
|
| 322 |
+
if not os.path.exists(path):
|
| 323 |
+
self.showerror('help file path does not exist.',
|
| 324 |
+
self.path_error)
|
| 325 |
+
return None
|
| 326 |
+
if platform == 'darwin': # for Mac Safari
|
| 327 |
+
path = "file://" + path
|
| 328 |
+
return path
|
| 329 |
+
|
| 330 |
+
def entry_ok(self):
|
| 331 |
+
"Return apparently valid (name, path) or None"
|
| 332 |
+
self.path_error['text'] = ''
|
| 333 |
+
name = self.item_ok()
|
| 334 |
+
path = self.path_ok()
|
| 335 |
+
return None if name is None or path is None else (name, path)
|
| 336 |
+
|
| 337 |
+
class CustomRun(Query):
|
| 338 |
+
"""Get settings for custom run of module.
|
| 339 |
+
|
| 340 |
+
1. Command line arguments to extend sys.argv.
|
| 341 |
+
2. Whether to restart Shell or not.
|
| 342 |
+
"""
|
| 343 |
+
# Used in runscript.run_custom_event
|
| 344 |
+
|
| 345 |
+
def __init__(self, parent, title, *, cli_args=[],
|
| 346 |
+
_htest=False, _utest=False):
|
| 347 |
+
"""cli_args is a list of strings.
|
| 348 |
+
|
| 349 |
+
The list is assigned to the default Entry StringVar.
|
| 350 |
+
The strings are displayed joined by ' ' for display.
|
| 351 |
+
"""
|
| 352 |
+
message = 'Command Line Arguments for sys.argv:'
|
| 353 |
+
super().__init__(
|
| 354 |
+
parent, title, message, text0=cli_args,
|
| 355 |
+
_htest=_htest, _utest=_utest)
|
| 356 |
+
|
| 357 |
+
def create_extra(self):
|
| 358 |
+
"Add run mode on rows 10-12."
|
| 359 |
+
frame = self.frame
|
| 360 |
+
self.restartvar = BooleanVar(self, value=True)
|
| 361 |
+
restart = Checkbutton(frame, variable=self.restartvar, onvalue=True,
|
| 362 |
+
offvalue=False, text='Restart shell')
|
| 363 |
+
self.args_error = Label(frame, text=' ', foreground='red',
|
| 364 |
+
font=self.error_font)
|
| 365 |
+
|
| 366 |
+
restart.grid(column=0, row=10, columnspan=3, padx=5, sticky='w')
|
| 367 |
+
self.args_error.grid(column=0, row=12, columnspan=3, padx=5,
|
| 368 |
+
sticky='we')
|
| 369 |
+
|
| 370 |
+
def cli_args_ok(self):
|
| 371 |
+
"Validity check and parsing for command line arguments."
|
| 372 |
+
cli_string = self.entry.get().strip()
|
| 373 |
+
try:
|
| 374 |
+
cli_args = shlex.split(cli_string, posix=True)
|
| 375 |
+
except ValueError as err:
|
| 376 |
+
self.showerror(str(err))
|
| 377 |
+
return None
|
| 378 |
+
return cli_args
|
| 379 |
+
|
| 380 |
+
def entry_ok(self):
|
| 381 |
+
"Return apparently valid (cli_args, restart) or None."
|
| 382 |
+
cli_args = self.cli_args_ok()
|
| 383 |
+
restart = self.restartvar.get()
|
| 384 |
+
return None if cli_args is None else (cli_args, restart)
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
if __name__ == '__main__':
|
| 388 |
+
from unittest import main
|
| 389 |
+
main('idlelib.idle_test.test_query', verbosity=2, exit=False)
|
| 390 |
+
|
| 391 |
+
from idlelib.idle_test.htest import run
|
| 392 |
+
run(Query, HelpSource, CustomRun)
|
evalkit_tf446/lib/python3.10/idlelib/replace.py
ADDED
|
@@ -0,0 +1,307 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Replace dialog for IDLE. Inherits SearchDialogBase for GUI.
|
| 2 |
+
Uses idlelib.searchengine.SearchEngine for search capability.
|
| 3 |
+
Defines various replace related functions like replace, replace all,
|
| 4 |
+
and replace+find.
|
| 5 |
+
"""
|
| 6 |
+
import re
|
| 7 |
+
|
| 8 |
+
from tkinter import StringVar, TclError
|
| 9 |
+
|
| 10 |
+
from idlelib.searchbase import SearchDialogBase
|
| 11 |
+
from idlelib import searchengine
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def replace(text, insert_tags=None):
|
| 15 |
+
"""Create or reuse a singleton ReplaceDialog instance.
|
| 16 |
+
|
| 17 |
+
The singleton dialog saves user entries and preferences
|
| 18 |
+
across instances.
|
| 19 |
+
|
| 20 |
+
Args:
|
| 21 |
+
text: Text widget containing the text to be searched.
|
| 22 |
+
"""
|
| 23 |
+
root = text._root()
|
| 24 |
+
engine = searchengine.get(root)
|
| 25 |
+
if not hasattr(engine, "_replacedialog"):
|
| 26 |
+
engine._replacedialog = ReplaceDialog(root, engine)
|
| 27 |
+
dialog = engine._replacedialog
|
| 28 |
+
dialog.open(text, insert_tags=insert_tags)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ReplaceDialog(SearchDialogBase):
|
| 32 |
+
"Dialog for finding and replacing a pattern in text."
|
| 33 |
+
|
| 34 |
+
title = "Replace Dialog"
|
| 35 |
+
icon = "Replace"
|
| 36 |
+
|
| 37 |
+
def __init__(self, root, engine):
|
| 38 |
+
"""Create search dialog for finding and replacing text.
|
| 39 |
+
|
| 40 |
+
Uses SearchDialogBase as the basis for the GUI and a
|
| 41 |
+
searchengine instance to prepare the search.
|
| 42 |
+
|
| 43 |
+
Attributes:
|
| 44 |
+
replvar: StringVar containing 'Replace with:' value.
|
| 45 |
+
replent: Entry widget for replvar. Created in
|
| 46 |
+
create_entries().
|
| 47 |
+
ok: Boolean used in searchengine.search_text to indicate
|
| 48 |
+
whether the search includes the selection.
|
| 49 |
+
"""
|
| 50 |
+
super().__init__(root, engine)
|
| 51 |
+
self.replvar = StringVar(root)
|
| 52 |
+
self.insert_tags = None
|
| 53 |
+
|
| 54 |
+
def open(self, text, insert_tags=None):
|
| 55 |
+
"""Make dialog visible on top of others and ready to use.
|
| 56 |
+
|
| 57 |
+
Also, highlight the currently selected text and set the
|
| 58 |
+
search to include the current selection (self.ok).
|
| 59 |
+
|
| 60 |
+
Args:
|
| 61 |
+
text: Text widget being searched.
|
| 62 |
+
"""
|
| 63 |
+
SearchDialogBase.open(self, text)
|
| 64 |
+
try:
|
| 65 |
+
first = text.index("sel.first")
|
| 66 |
+
except TclError:
|
| 67 |
+
first = None
|
| 68 |
+
try:
|
| 69 |
+
last = text.index("sel.last")
|
| 70 |
+
except TclError:
|
| 71 |
+
last = None
|
| 72 |
+
first = first or text.index("insert")
|
| 73 |
+
last = last or first
|
| 74 |
+
self.show_hit(first, last)
|
| 75 |
+
self.ok = True
|
| 76 |
+
self.insert_tags = insert_tags
|
| 77 |
+
|
| 78 |
+
def create_entries(self):
|
| 79 |
+
"Create base and additional label and text entry widgets."
|
| 80 |
+
SearchDialogBase.create_entries(self)
|
| 81 |
+
self.replent = self.make_entry("Replace with:", self.replvar)[0]
|
| 82 |
+
|
| 83 |
+
def create_command_buttons(self):
|
| 84 |
+
"""Create base and additional command buttons.
|
| 85 |
+
|
| 86 |
+
The additional buttons are for Find, Replace,
|
| 87 |
+
Replace+Find, and Replace All.
|
| 88 |
+
"""
|
| 89 |
+
SearchDialogBase.create_command_buttons(self)
|
| 90 |
+
self.make_button("Find", self.find_it)
|
| 91 |
+
self.make_button("Replace", self.replace_it)
|
| 92 |
+
self.make_button("Replace+Find", self.default_command, isdef=True)
|
| 93 |
+
self.make_button("Replace All", self.replace_all)
|
| 94 |
+
|
| 95 |
+
def find_it(self, event=None):
|
| 96 |
+
"Handle the Find button."
|
| 97 |
+
self.do_find(False)
|
| 98 |
+
|
| 99 |
+
def replace_it(self, event=None):
|
| 100 |
+
"""Handle the Replace button.
|
| 101 |
+
|
| 102 |
+
If the find is successful, then perform replace.
|
| 103 |
+
"""
|
| 104 |
+
if self.do_find(self.ok):
|
| 105 |
+
self.do_replace()
|
| 106 |
+
|
| 107 |
+
def default_command(self, event=None):
|
| 108 |
+
"""Handle the Replace+Find button as the default command.
|
| 109 |
+
|
| 110 |
+
First performs a replace and then, if the replace was
|
| 111 |
+
successful, a find next.
|
| 112 |
+
"""
|
| 113 |
+
if self.do_find(self.ok):
|
| 114 |
+
if self.do_replace(): # Only find next match if replace succeeded.
|
| 115 |
+
# A bad re can cause it to fail.
|
| 116 |
+
self.do_find(False)
|
| 117 |
+
|
| 118 |
+
def _replace_expand(self, m, repl):
|
| 119 |
+
"Expand replacement text if regular expression."
|
| 120 |
+
if self.engine.isre():
|
| 121 |
+
try:
|
| 122 |
+
new = m.expand(repl)
|
| 123 |
+
except re.error:
|
| 124 |
+
self.engine.report_error(repl, 'Invalid Replace Expression')
|
| 125 |
+
new = None
|
| 126 |
+
else:
|
| 127 |
+
new = repl
|
| 128 |
+
|
| 129 |
+
return new
|
| 130 |
+
|
| 131 |
+
def replace_all(self, event=None):
|
| 132 |
+
"""Handle the Replace All button.
|
| 133 |
+
|
| 134 |
+
Search text for occurrences of the Find value and replace
|
| 135 |
+
each of them. The 'wrap around' value controls the start
|
| 136 |
+
point for searching. If wrap isn't set, then the searching
|
| 137 |
+
starts at the first occurrence after the current selection;
|
| 138 |
+
if wrap is set, the replacement starts at the first line.
|
| 139 |
+
The replacement is always done top-to-bottom in the text.
|
| 140 |
+
"""
|
| 141 |
+
prog = self.engine.getprog()
|
| 142 |
+
if not prog:
|
| 143 |
+
return
|
| 144 |
+
repl = self.replvar.get()
|
| 145 |
+
text = self.text
|
| 146 |
+
res = self.engine.search_text(text, prog)
|
| 147 |
+
if not res:
|
| 148 |
+
self.bell()
|
| 149 |
+
return
|
| 150 |
+
text.tag_remove("sel", "1.0", "end")
|
| 151 |
+
text.tag_remove("hit", "1.0", "end")
|
| 152 |
+
line = res[0]
|
| 153 |
+
col = res[1].start()
|
| 154 |
+
if self.engine.iswrap():
|
| 155 |
+
line = 1
|
| 156 |
+
col = 0
|
| 157 |
+
ok = True
|
| 158 |
+
first = last = None
|
| 159 |
+
# XXX ought to replace circular instead of top-to-bottom when wrapping
|
| 160 |
+
text.undo_block_start()
|
| 161 |
+
while res := self.engine.search_forward(
|
| 162 |
+
text, prog, line, col, wrap=False, ok=ok):
|
| 163 |
+
line, m = res
|
| 164 |
+
chars = text.get("%d.0" % line, "%d.0" % (line+1))
|
| 165 |
+
orig = m.group()
|
| 166 |
+
new = self._replace_expand(m, repl)
|
| 167 |
+
if new is None:
|
| 168 |
+
break
|
| 169 |
+
i, j = m.span()
|
| 170 |
+
first = "%d.%d" % (line, i)
|
| 171 |
+
last = "%d.%d" % (line, j)
|
| 172 |
+
if new == orig:
|
| 173 |
+
text.mark_set("insert", last)
|
| 174 |
+
else:
|
| 175 |
+
text.mark_set("insert", first)
|
| 176 |
+
if first != last:
|
| 177 |
+
text.delete(first, last)
|
| 178 |
+
if new:
|
| 179 |
+
text.insert(first, new, self.insert_tags)
|
| 180 |
+
col = i + len(new)
|
| 181 |
+
ok = False
|
| 182 |
+
text.undo_block_stop()
|
| 183 |
+
if first and last:
|
| 184 |
+
self.show_hit(first, last)
|
| 185 |
+
self.close()
|
| 186 |
+
|
| 187 |
+
def do_find(self, ok=False):
|
| 188 |
+
"""Search for and highlight next occurrence of pattern in text.
|
| 189 |
+
|
| 190 |
+
No text replacement is done with this option.
|
| 191 |
+
"""
|
| 192 |
+
if not self.engine.getprog():
|
| 193 |
+
return False
|
| 194 |
+
text = self.text
|
| 195 |
+
res = self.engine.search_text(text, None, ok)
|
| 196 |
+
if not res:
|
| 197 |
+
self.bell()
|
| 198 |
+
return False
|
| 199 |
+
line, m = res
|
| 200 |
+
i, j = m.span()
|
| 201 |
+
first = "%d.%d" % (line, i)
|
| 202 |
+
last = "%d.%d" % (line, j)
|
| 203 |
+
self.show_hit(first, last)
|
| 204 |
+
self.ok = True
|
| 205 |
+
return True
|
| 206 |
+
|
| 207 |
+
def do_replace(self):
|
| 208 |
+
"Replace search pattern in text with replacement value."
|
| 209 |
+
prog = self.engine.getprog()
|
| 210 |
+
if not prog:
|
| 211 |
+
return False
|
| 212 |
+
text = self.text
|
| 213 |
+
try:
|
| 214 |
+
first = pos = text.index("sel.first")
|
| 215 |
+
last = text.index("sel.last")
|
| 216 |
+
except TclError:
|
| 217 |
+
pos = None
|
| 218 |
+
if not pos:
|
| 219 |
+
first = last = pos = text.index("insert")
|
| 220 |
+
line, col = searchengine.get_line_col(pos)
|
| 221 |
+
chars = text.get("%d.0" % line, "%d.0" % (line+1))
|
| 222 |
+
m = prog.match(chars, col)
|
| 223 |
+
if not prog:
|
| 224 |
+
return False
|
| 225 |
+
new = self._replace_expand(m, self.replvar.get())
|
| 226 |
+
if new is None:
|
| 227 |
+
return False
|
| 228 |
+
text.mark_set("insert", first)
|
| 229 |
+
text.undo_block_start()
|
| 230 |
+
if m.group():
|
| 231 |
+
text.delete(first, last)
|
| 232 |
+
if new:
|
| 233 |
+
text.insert(first, new, self.insert_tags)
|
| 234 |
+
text.undo_block_stop()
|
| 235 |
+
self.show_hit(first, text.index("insert"))
|
| 236 |
+
self.ok = False
|
| 237 |
+
return True
|
| 238 |
+
|
| 239 |
+
def show_hit(self, first, last):
|
| 240 |
+
"""Highlight text between first and last indices.
|
| 241 |
+
|
| 242 |
+
Text is highlighted via the 'hit' tag and the marked
|
| 243 |
+
section is brought into view.
|
| 244 |
+
|
| 245 |
+
The colors from the 'hit' tag aren't currently shown
|
| 246 |
+
when the text is displayed. This is due to the 'sel'
|
| 247 |
+
tag being added first, so the colors in the 'sel'
|
| 248 |
+
config are seen instead of the colors for 'hit'.
|
| 249 |
+
"""
|
| 250 |
+
text = self.text
|
| 251 |
+
text.mark_set("insert", first)
|
| 252 |
+
text.tag_remove("sel", "1.0", "end")
|
| 253 |
+
text.tag_add("sel", first, last)
|
| 254 |
+
text.tag_remove("hit", "1.0", "end")
|
| 255 |
+
if first == last:
|
| 256 |
+
text.tag_add("hit", first)
|
| 257 |
+
else:
|
| 258 |
+
text.tag_add("hit", first, last)
|
| 259 |
+
text.see("insert")
|
| 260 |
+
text.update_idletasks()
|
| 261 |
+
|
| 262 |
+
def close(self, event=None):
|
| 263 |
+
"Close the dialog and remove hit tags."
|
| 264 |
+
SearchDialogBase.close(self, event)
|
| 265 |
+
self.text.tag_remove("hit", "1.0", "end")
|
| 266 |
+
self.insert_tags = None
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def _replace_dialog(parent): # htest #
|
| 270 |
+
from tkinter import Toplevel, Text, END, SEL
|
| 271 |
+
from tkinter.ttk import Frame, Button
|
| 272 |
+
|
| 273 |
+
top = Toplevel(parent)
|
| 274 |
+
top.title("Test ReplaceDialog")
|
| 275 |
+
x, y = map(int, parent.geometry().split('+')[1:])
|
| 276 |
+
top.geometry("+%d+%d" % (x, y + 175))
|
| 277 |
+
|
| 278 |
+
# mock undo delegator methods
|
| 279 |
+
def undo_block_start():
|
| 280 |
+
pass
|
| 281 |
+
|
| 282 |
+
def undo_block_stop():
|
| 283 |
+
pass
|
| 284 |
+
|
| 285 |
+
frame = Frame(top)
|
| 286 |
+
frame.pack()
|
| 287 |
+
text = Text(frame, inactiveselectbackground='gray')
|
| 288 |
+
text.undo_block_start = undo_block_start
|
| 289 |
+
text.undo_block_stop = undo_block_stop
|
| 290 |
+
text.pack()
|
| 291 |
+
text.insert("insert","This is a sample sTring\nPlus MORE.")
|
| 292 |
+
text.focus_set()
|
| 293 |
+
|
| 294 |
+
def show_replace():
|
| 295 |
+
text.tag_add(SEL, "1.0", END)
|
| 296 |
+
replace(text)
|
| 297 |
+
text.tag_remove(SEL, "1.0", END)
|
| 298 |
+
|
| 299 |
+
button = Button(frame, text="Replace", command=show_replace)
|
| 300 |
+
button.pack()
|
| 301 |
+
|
| 302 |
+
if __name__ == '__main__':
|
| 303 |
+
from unittest import main
|
| 304 |
+
main('idlelib.idle_test.test_replace', verbosity=2, exit=False)
|
| 305 |
+
|
| 306 |
+
from idlelib.idle_test.htest import run
|
| 307 |
+
run(_replace_dialog)
|
evalkit_tf446/lib/python3.10/idlelib/rpc.py
ADDED
|
@@ -0,0 +1,635 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""RPC Implementation, originally written for the Python Idle IDE
|
| 2 |
+
|
| 3 |
+
For security reasons, GvR requested that Idle's Python execution server process
|
| 4 |
+
connect to the Idle process, which listens for the connection. Since Idle has
|
| 5 |
+
only one client per server, this was not a limitation.
|
| 6 |
+
|
| 7 |
+
+---------------------------------+ +-------------+
|
| 8 |
+
| socketserver.BaseRequestHandler | | SocketIO |
|
| 9 |
+
+---------------------------------+ +-------------+
|
| 10 |
+
^ | register() |
|
| 11 |
+
| | unregister()|
|
| 12 |
+
| +-------------+
|
| 13 |
+
| ^ ^
|
| 14 |
+
| | |
|
| 15 |
+
| + -------------------+ |
|
| 16 |
+
| | |
|
| 17 |
+
+-------------------------+ +-----------------+
|
| 18 |
+
| RPCHandler | | RPCClient |
|
| 19 |
+
| [attribute of RPCServer]| | |
|
| 20 |
+
+-------------------------+ +-----------------+
|
| 21 |
+
|
| 22 |
+
The RPCServer handler class is expected to provide register/unregister methods.
|
| 23 |
+
RPCHandler inherits the mix-in class SocketIO, which provides these methods.
|
| 24 |
+
|
| 25 |
+
See the Idle run.main() docstring for further information on how this was
|
| 26 |
+
accomplished in Idle.
|
| 27 |
+
|
| 28 |
+
"""
|
| 29 |
+
import builtins
|
| 30 |
+
import copyreg
|
| 31 |
+
import io
|
| 32 |
+
import marshal
|
| 33 |
+
import os
|
| 34 |
+
import pickle
|
| 35 |
+
import queue
|
| 36 |
+
import select
|
| 37 |
+
import socket
|
| 38 |
+
import socketserver
|
| 39 |
+
import struct
|
| 40 |
+
import sys
|
| 41 |
+
import threading
|
| 42 |
+
import traceback
|
| 43 |
+
import types
|
| 44 |
+
|
| 45 |
+
def unpickle_code(ms):
|
| 46 |
+
"Return code object from marshal string ms."
|
| 47 |
+
co = marshal.loads(ms)
|
| 48 |
+
assert isinstance(co, types.CodeType)
|
| 49 |
+
return co
|
| 50 |
+
|
| 51 |
+
def pickle_code(co):
|
| 52 |
+
"Return unpickle function and tuple with marshalled co code object."
|
| 53 |
+
assert isinstance(co, types.CodeType)
|
| 54 |
+
ms = marshal.dumps(co)
|
| 55 |
+
return unpickle_code, (ms,)
|
| 56 |
+
|
| 57 |
+
def dumps(obj, protocol=None):
|
| 58 |
+
"Return pickled (or marshalled) string for obj."
|
| 59 |
+
# IDLE passes 'None' to select pickle.DEFAULT_PROTOCOL.
|
| 60 |
+
f = io.BytesIO()
|
| 61 |
+
p = CodePickler(f, protocol)
|
| 62 |
+
p.dump(obj)
|
| 63 |
+
return f.getvalue()
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class CodePickler(pickle.Pickler):
|
| 67 |
+
dispatch_table = {types.CodeType: pickle_code, **copyreg.dispatch_table}
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
BUFSIZE = 8*1024
|
| 71 |
+
LOCALHOST = '127.0.0.1'
|
| 72 |
+
|
| 73 |
+
class RPCServer(socketserver.TCPServer):
|
| 74 |
+
|
| 75 |
+
def __init__(self, addr, handlerclass=None):
|
| 76 |
+
if handlerclass is None:
|
| 77 |
+
handlerclass = RPCHandler
|
| 78 |
+
socketserver.TCPServer.__init__(self, addr, handlerclass)
|
| 79 |
+
|
| 80 |
+
def server_bind(self):
|
| 81 |
+
"Override TCPServer method, no bind() phase for connecting entity"
|
| 82 |
+
pass
|
| 83 |
+
|
| 84 |
+
def server_activate(self):
|
| 85 |
+
"""Override TCPServer method, connect() instead of listen()
|
| 86 |
+
|
| 87 |
+
Due to the reversed connection, self.server_address is actually the
|
| 88 |
+
address of the Idle Client to which we are connecting.
|
| 89 |
+
|
| 90 |
+
"""
|
| 91 |
+
self.socket.connect(self.server_address)
|
| 92 |
+
|
| 93 |
+
def get_request(self):
|
| 94 |
+
"Override TCPServer method, return already connected socket"
|
| 95 |
+
return self.socket, self.server_address
|
| 96 |
+
|
| 97 |
+
def handle_error(self, request, client_address):
|
| 98 |
+
"""Override TCPServer method
|
| 99 |
+
|
| 100 |
+
Error message goes to __stderr__. No error message if exiting
|
| 101 |
+
normally or socket raised EOF. Other exceptions not handled in
|
| 102 |
+
server code will cause os._exit.
|
| 103 |
+
|
| 104 |
+
"""
|
| 105 |
+
try:
|
| 106 |
+
raise
|
| 107 |
+
except SystemExit:
|
| 108 |
+
raise
|
| 109 |
+
except:
|
| 110 |
+
erf = sys.__stderr__
|
| 111 |
+
print('\n' + '-'*40, file=erf)
|
| 112 |
+
print('Unhandled server exception!', file=erf)
|
| 113 |
+
print('Thread: %s' % threading.current_thread().name, file=erf)
|
| 114 |
+
print('Client Address: ', client_address, file=erf)
|
| 115 |
+
print('Request: ', repr(request), file=erf)
|
| 116 |
+
traceback.print_exc(file=erf)
|
| 117 |
+
print('\n*** Unrecoverable, server exiting!', file=erf)
|
| 118 |
+
print('-'*40, file=erf)
|
| 119 |
+
os._exit(0)
|
| 120 |
+
|
| 121 |
+
#----------------- end class RPCServer --------------------
|
| 122 |
+
|
| 123 |
+
objecttable = {}
|
| 124 |
+
request_queue = queue.Queue(0)
|
| 125 |
+
response_queue = queue.Queue(0)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class SocketIO:
|
| 129 |
+
|
| 130 |
+
nextseq = 0
|
| 131 |
+
|
| 132 |
+
def __init__(self, sock, objtable=None, debugging=None):
|
| 133 |
+
self.sockthread = threading.current_thread()
|
| 134 |
+
if debugging is not None:
|
| 135 |
+
self.debugging = debugging
|
| 136 |
+
self.sock = sock
|
| 137 |
+
if objtable is None:
|
| 138 |
+
objtable = objecttable
|
| 139 |
+
self.objtable = objtable
|
| 140 |
+
self.responses = {}
|
| 141 |
+
self.cvars = {}
|
| 142 |
+
|
| 143 |
+
def close(self):
|
| 144 |
+
sock = self.sock
|
| 145 |
+
self.sock = None
|
| 146 |
+
if sock is not None:
|
| 147 |
+
sock.close()
|
| 148 |
+
|
| 149 |
+
def exithook(self):
|
| 150 |
+
"override for specific exit action"
|
| 151 |
+
os._exit(0)
|
| 152 |
+
|
| 153 |
+
def debug(self, *args):
|
| 154 |
+
if not self.debugging:
|
| 155 |
+
return
|
| 156 |
+
s = self.location + " " + str(threading.current_thread().name)
|
| 157 |
+
for a in args:
|
| 158 |
+
s = s + " " + str(a)
|
| 159 |
+
print(s, file=sys.__stderr__)
|
| 160 |
+
|
| 161 |
+
def register(self, oid, object):
|
| 162 |
+
self.objtable[oid] = object
|
| 163 |
+
|
| 164 |
+
def unregister(self, oid):
|
| 165 |
+
try:
|
| 166 |
+
del self.objtable[oid]
|
| 167 |
+
except KeyError:
|
| 168 |
+
pass
|
| 169 |
+
|
| 170 |
+
def localcall(self, seq, request):
|
| 171 |
+
self.debug("localcall:", request)
|
| 172 |
+
try:
|
| 173 |
+
how, (oid, methodname, args, kwargs) = request
|
| 174 |
+
except TypeError:
|
| 175 |
+
return ("ERROR", "Bad request format")
|
| 176 |
+
if oid not in self.objtable:
|
| 177 |
+
return ("ERROR", "Unknown object id: %r" % (oid,))
|
| 178 |
+
obj = self.objtable[oid]
|
| 179 |
+
if methodname == "__methods__":
|
| 180 |
+
methods = {}
|
| 181 |
+
_getmethods(obj, methods)
|
| 182 |
+
return ("OK", methods)
|
| 183 |
+
if methodname == "__attributes__":
|
| 184 |
+
attributes = {}
|
| 185 |
+
_getattributes(obj, attributes)
|
| 186 |
+
return ("OK", attributes)
|
| 187 |
+
if not hasattr(obj, methodname):
|
| 188 |
+
return ("ERROR", "Unsupported method name: %r" % (methodname,))
|
| 189 |
+
method = getattr(obj, methodname)
|
| 190 |
+
try:
|
| 191 |
+
if how == 'CALL':
|
| 192 |
+
ret = method(*args, **kwargs)
|
| 193 |
+
if isinstance(ret, RemoteObject):
|
| 194 |
+
ret = remoteref(ret)
|
| 195 |
+
return ("OK", ret)
|
| 196 |
+
elif how == 'QUEUE':
|
| 197 |
+
request_queue.put((seq, (method, args, kwargs)))
|
| 198 |
+
return("QUEUED", None)
|
| 199 |
+
else:
|
| 200 |
+
return ("ERROR", "Unsupported message type: %s" % how)
|
| 201 |
+
except SystemExit:
|
| 202 |
+
raise
|
| 203 |
+
except KeyboardInterrupt:
|
| 204 |
+
raise
|
| 205 |
+
except OSError:
|
| 206 |
+
raise
|
| 207 |
+
except Exception as ex:
|
| 208 |
+
return ("CALLEXC", ex)
|
| 209 |
+
except:
|
| 210 |
+
msg = "*** Internal Error: rpc.py:SocketIO.localcall()\n\n"\
|
| 211 |
+
" Object: %s \n Method: %s \n Args: %s\n"
|
| 212 |
+
print(msg % (oid, method, args), file=sys.__stderr__)
|
| 213 |
+
traceback.print_exc(file=sys.__stderr__)
|
| 214 |
+
return ("EXCEPTION", None)
|
| 215 |
+
|
| 216 |
+
def remotecall(self, oid, methodname, args, kwargs):
|
| 217 |
+
self.debug("remotecall:asynccall: ", oid, methodname)
|
| 218 |
+
seq = self.asynccall(oid, methodname, args, kwargs)
|
| 219 |
+
return self.asyncreturn(seq)
|
| 220 |
+
|
| 221 |
+
def remotequeue(self, oid, methodname, args, kwargs):
|
| 222 |
+
self.debug("remotequeue:asyncqueue: ", oid, methodname)
|
| 223 |
+
seq = self.asyncqueue(oid, methodname, args, kwargs)
|
| 224 |
+
return self.asyncreturn(seq)
|
| 225 |
+
|
| 226 |
+
def asynccall(self, oid, methodname, args, kwargs):
|
| 227 |
+
request = ("CALL", (oid, methodname, args, kwargs))
|
| 228 |
+
seq = self.newseq()
|
| 229 |
+
if threading.current_thread() != self.sockthread:
|
| 230 |
+
cvar = threading.Condition()
|
| 231 |
+
self.cvars[seq] = cvar
|
| 232 |
+
self.debug(("asynccall:%d:" % seq), oid, methodname, args, kwargs)
|
| 233 |
+
self.putmessage((seq, request))
|
| 234 |
+
return seq
|
| 235 |
+
|
| 236 |
+
def asyncqueue(self, oid, methodname, args, kwargs):
|
| 237 |
+
request = ("QUEUE", (oid, methodname, args, kwargs))
|
| 238 |
+
seq = self.newseq()
|
| 239 |
+
if threading.current_thread() != self.sockthread:
|
| 240 |
+
cvar = threading.Condition()
|
| 241 |
+
self.cvars[seq] = cvar
|
| 242 |
+
self.debug(("asyncqueue:%d:" % seq), oid, methodname, args, kwargs)
|
| 243 |
+
self.putmessage((seq, request))
|
| 244 |
+
return seq
|
| 245 |
+
|
| 246 |
+
def asyncreturn(self, seq):
|
| 247 |
+
self.debug("asyncreturn:%d:call getresponse(): " % seq)
|
| 248 |
+
response = self.getresponse(seq, wait=0.05)
|
| 249 |
+
self.debug(("asyncreturn:%d:response: " % seq), response)
|
| 250 |
+
return self.decoderesponse(response)
|
| 251 |
+
|
| 252 |
+
def decoderesponse(self, response):
|
| 253 |
+
how, what = response
|
| 254 |
+
if how == "OK":
|
| 255 |
+
return what
|
| 256 |
+
if how == "QUEUED":
|
| 257 |
+
return None
|
| 258 |
+
if how == "EXCEPTION":
|
| 259 |
+
self.debug("decoderesponse: EXCEPTION")
|
| 260 |
+
return None
|
| 261 |
+
if how == "EOF":
|
| 262 |
+
self.debug("decoderesponse: EOF")
|
| 263 |
+
self.decode_interrupthook()
|
| 264 |
+
return None
|
| 265 |
+
if how == "ERROR":
|
| 266 |
+
self.debug("decoderesponse: Internal ERROR:", what)
|
| 267 |
+
raise RuntimeError(what)
|
| 268 |
+
if how == "CALLEXC":
|
| 269 |
+
self.debug("decoderesponse: Call Exception:", what)
|
| 270 |
+
raise what
|
| 271 |
+
raise SystemError(how, what)
|
| 272 |
+
|
| 273 |
+
def decode_interrupthook(self):
|
| 274 |
+
""
|
| 275 |
+
raise EOFError
|
| 276 |
+
|
| 277 |
+
def mainloop(self):
|
| 278 |
+
"""Listen on socket until I/O not ready or EOF
|
| 279 |
+
|
| 280 |
+
pollresponse() will loop looking for seq number None, which
|
| 281 |
+
never comes, and exit on EOFError.
|
| 282 |
+
|
| 283 |
+
"""
|
| 284 |
+
try:
|
| 285 |
+
self.getresponse(myseq=None, wait=0.05)
|
| 286 |
+
except EOFError:
|
| 287 |
+
self.debug("mainloop:return")
|
| 288 |
+
return
|
| 289 |
+
|
| 290 |
+
def getresponse(self, myseq, wait):
|
| 291 |
+
response = self._getresponse(myseq, wait)
|
| 292 |
+
if response is not None:
|
| 293 |
+
how, what = response
|
| 294 |
+
if how == "OK":
|
| 295 |
+
response = how, self._proxify(what)
|
| 296 |
+
return response
|
| 297 |
+
|
| 298 |
+
def _proxify(self, obj):
|
| 299 |
+
if isinstance(obj, RemoteProxy):
|
| 300 |
+
return RPCProxy(self, obj.oid)
|
| 301 |
+
if isinstance(obj, list):
|
| 302 |
+
return list(map(self._proxify, obj))
|
| 303 |
+
# XXX Check for other types -- not currently needed
|
| 304 |
+
return obj
|
| 305 |
+
|
| 306 |
+
def _getresponse(self, myseq, wait):
|
| 307 |
+
self.debug("_getresponse:myseq:", myseq)
|
| 308 |
+
if threading.current_thread() is self.sockthread:
|
| 309 |
+
# this thread does all reading of requests or responses
|
| 310 |
+
while True:
|
| 311 |
+
response = self.pollresponse(myseq, wait)
|
| 312 |
+
if response is not None:
|
| 313 |
+
return response
|
| 314 |
+
else:
|
| 315 |
+
# wait for notification from socket handling thread
|
| 316 |
+
cvar = self.cvars[myseq]
|
| 317 |
+
cvar.acquire()
|
| 318 |
+
while myseq not in self.responses:
|
| 319 |
+
cvar.wait()
|
| 320 |
+
response = self.responses[myseq]
|
| 321 |
+
self.debug("_getresponse:%s: thread woke up: response: %s" %
|
| 322 |
+
(myseq, response))
|
| 323 |
+
del self.responses[myseq]
|
| 324 |
+
del self.cvars[myseq]
|
| 325 |
+
cvar.release()
|
| 326 |
+
return response
|
| 327 |
+
|
| 328 |
+
def newseq(self):
|
| 329 |
+
self.nextseq = seq = self.nextseq + 2
|
| 330 |
+
return seq
|
| 331 |
+
|
| 332 |
+
def putmessage(self, message):
|
| 333 |
+
self.debug("putmessage:%d:" % message[0])
|
| 334 |
+
try:
|
| 335 |
+
s = dumps(message)
|
| 336 |
+
except pickle.PicklingError:
|
| 337 |
+
print("Cannot pickle:", repr(message), file=sys.__stderr__)
|
| 338 |
+
raise
|
| 339 |
+
s = struct.pack("<i", len(s)) + s
|
| 340 |
+
while len(s) > 0:
|
| 341 |
+
try:
|
| 342 |
+
r, w, x = select.select([], [self.sock], [])
|
| 343 |
+
n = self.sock.send(s[:BUFSIZE])
|
| 344 |
+
except (AttributeError, TypeError):
|
| 345 |
+
raise OSError("socket no longer exists")
|
| 346 |
+
s = s[n:]
|
| 347 |
+
|
| 348 |
+
buff = b''
|
| 349 |
+
bufneed = 4
|
| 350 |
+
bufstate = 0 # meaning: 0 => reading count; 1 => reading data
|
| 351 |
+
|
| 352 |
+
def pollpacket(self, wait):
|
| 353 |
+
self._stage0()
|
| 354 |
+
if len(self.buff) < self.bufneed:
|
| 355 |
+
r, w, x = select.select([self.sock.fileno()], [], [], wait)
|
| 356 |
+
if len(r) == 0:
|
| 357 |
+
return None
|
| 358 |
+
try:
|
| 359 |
+
s = self.sock.recv(BUFSIZE)
|
| 360 |
+
except OSError:
|
| 361 |
+
raise EOFError
|
| 362 |
+
if len(s) == 0:
|
| 363 |
+
raise EOFError
|
| 364 |
+
self.buff += s
|
| 365 |
+
self._stage0()
|
| 366 |
+
return self._stage1()
|
| 367 |
+
|
| 368 |
+
def _stage0(self):
|
| 369 |
+
if self.bufstate == 0 and len(self.buff) >= 4:
|
| 370 |
+
s = self.buff[:4]
|
| 371 |
+
self.buff = self.buff[4:]
|
| 372 |
+
self.bufneed = struct.unpack("<i", s)[0]
|
| 373 |
+
self.bufstate = 1
|
| 374 |
+
|
| 375 |
+
def _stage1(self):
|
| 376 |
+
if self.bufstate == 1 and len(self.buff) >= self.bufneed:
|
| 377 |
+
packet = self.buff[:self.bufneed]
|
| 378 |
+
self.buff = self.buff[self.bufneed:]
|
| 379 |
+
self.bufneed = 4
|
| 380 |
+
self.bufstate = 0
|
| 381 |
+
return packet
|
| 382 |
+
|
| 383 |
+
def pollmessage(self, wait):
|
| 384 |
+
packet = self.pollpacket(wait)
|
| 385 |
+
if packet is None:
|
| 386 |
+
return None
|
| 387 |
+
try:
|
| 388 |
+
message = pickle.loads(packet)
|
| 389 |
+
except pickle.UnpicklingError:
|
| 390 |
+
print("-----------------------", file=sys.__stderr__)
|
| 391 |
+
print("cannot unpickle packet:", repr(packet), file=sys.__stderr__)
|
| 392 |
+
traceback.print_stack(file=sys.__stderr__)
|
| 393 |
+
print("-----------------------", file=sys.__stderr__)
|
| 394 |
+
raise
|
| 395 |
+
return message
|
| 396 |
+
|
| 397 |
+
def pollresponse(self, myseq, wait):
|
| 398 |
+
"""Handle messages received on the socket.
|
| 399 |
+
|
| 400 |
+
Some messages received may be asynchronous 'call' or 'queue' requests,
|
| 401 |
+
and some may be responses for other threads.
|
| 402 |
+
|
| 403 |
+
'call' requests are passed to self.localcall() with the expectation of
|
| 404 |
+
immediate execution, during which time the socket is not serviced.
|
| 405 |
+
|
| 406 |
+
'queue' requests are used for tasks (which may block or hang) to be
|
| 407 |
+
processed in a different thread. These requests are fed into
|
| 408 |
+
request_queue by self.localcall(). Responses to queued requests are
|
| 409 |
+
taken from response_queue and sent across the link with the associated
|
| 410 |
+
sequence numbers. Messages in the queues are (sequence_number,
|
| 411 |
+
request/response) tuples and code using this module removing messages
|
| 412 |
+
from the request_queue is responsible for returning the correct
|
| 413 |
+
sequence number in the response_queue.
|
| 414 |
+
|
| 415 |
+
pollresponse() will loop until a response message with the myseq
|
| 416 |
+
sequence number is received, and will save other responses in
|
| 417 |
+
self.responses and notify the owning thread.
|
| 418 |
+
|
| 419 |
+
"""
|
| 420 |
+
while True:
|
| 421 |
+
# send queued response if there is one available
|
| 422 |
+
try:
|
| 423 |
+
qmsg = response_queue.get(0)
|
| 424 |
+
except queue.Empty:
|
| 425 |
+
pass
|
| 426 |
+
else:
|
| 427 |
+
seq, response = qmsg
|
| 428 |
+
message = (seq, ('OK', response))
|
| 429 |
+
self.putmessage(message)
|
| 430 |
+
# poll for message on link
|
| 431 |
+
try:
|
| 432 |
+
message = self.pollmessage(wait)
|
| 433 |
+
if message is None: # socket not ready
|
| 434 |
+
return None
|
| 435 |
+
except EOFError:
|
| 436 |
+
self.handle_EOF()
|
| 437 |
+
return None
|
| 438 |
+
except AttributeError:
|
| 439 |
+
return None
|
| 440 |
+
seq, resq = message
|
| 441 |
+
how = resq[0]
|
| 442 |
+
self.debug("pollresponse:%d:myseq:%s" % (seq, myseq))
|
| 443 |
+
# process or queue a request
|
| 444 |
+
if how in ("CALL", "QUEUE"):
|
| 445 |
+
self.debug("pollresponse:%d:localcall:call:" % seq)
|
| 446 |
+
response = self.localcall(seq, resq)
|
| 447 |
+
self.debug("pollresponse:%d:localcall:response:%s"
|
| 448 |
+
% (seq, response))
|
| 449 |
+
if how == "CALL":
|
| 450 |
+
self.putmessage((seq, response))
|
| 451 |
+
elif how == "QUEUE":
|
| 452 |
+
# don't acknowledge the 'queue' request!
|
| 453 |
+
pass
|
| 454 |
+
continue
|
| 455 |
+
# return if completed message transaction
|
| 456 |
+
elif seq == myseq:
|
| 457 |
+
return resq
|
| 458 |
+
# must be a response for a different thread:
|
| 459 |
+
else:
|
| 460 |
+
cv = self.cvars.get(seq, None)
|
| 461 |
+
# response involving unknown sequence number is discarded,
|
| 462 |
+
# probably intended for prior incarnation of server
|
| 463 |
+
if cv is not None:
|
| 464 |
+
cv.acquire()
|
| 465 |
+
self.responses[seq] = resq
|
| 466 |
+
cv.notify()
|
| 467 |
+
cv.release()
|
| 468 |
+
continue
|
| 469 |
+
|
| 470 |
+
def handle_EOF(self):
|
| 471 |
+
"action taken upon link being closed by peer"
|
| 472 |
+
self.EOFhook()
|
| 473 |
+
self.debug("handle_EOF")
|
| 474 |
+
for key in self.cvars:
|
| 475 |
+
cv = self.cvars[key]
|
| 476 |
+
cv.acquire()
|
| 477 |
+
self.responses[key] = ('EOF', None)
|
| 478 |
+
cv.notify()
|
| 479 |
+
cv.release()
|
| 480 |
+
# call our (possibly overridden) exit function
|
| 481 |
+
self.exithook()
|
| 482 |
+
|
| 483 |
+
def EOFhook(self):
|
| 484 |
+
"Classes using rpc client/server can override to augment EOF action"
|
| 485 |
+
pass
|
| 486 |
+
|
| 487 |
+
#----------------- end class SocketIO --------------------
|
| 488 |
+
|
| 489 |
+
class RemoteObject:
|
| 490 |
+
# Token mix-in class
|
| 491 |
+
pass
|
| 492 |
+
|
| 493 |
+
|
| 494 |
+
def remoteref(obj):
|
| 495 |
+
oid = id(obj)
|
| 496 |
+
objecttable[oid] = obj
|
| 497 |
+
return RemoteProxy(oid)
|
| 498 |
+
|
| 499 |
+
|
| 500 |
+
class RemoteProxy:
|
| 501 |
+
|
| 502 |
+
def __init__(self, oid):
|
| 503 |
+
self.oid = oid
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
class RPCHandler(socketserver.BaseRequestHandler, SocketIO):
|
| 507 |
+
|
| 508 |
+
debugging = False
|
| 509 |
+
location = "#S" # Server
|
| 510 |
+
|
| 511 |
+
def __init__(self, sock, addr, svr):
|
| 512 |
+
svr.current_handler = self ## cgt xxx
|
| 513 |
+
SocketIO.__init__(self, sock)
|
| 514 |
+
socketserver.BaseRequestHandler.__init__(self, sock, addr, svr)
|
| 515 |
+
|
| 516 |
+
def handle(self):
|
| 517 |
+
"handle() method required by socketserver"
|
| 518 |
+
self.mainloop()
|
| 519 |
+
|
| 520 |
+
def get_remote_proxy(self, oid):
|
| 521 |
+
return RPCProxy(self, oid)
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
class RPCClient(SocketIO):
|
| 525 |
+
|
| 526 |
+
debugging = False
|
| 527 |
+
location = "#C" # Client
|
| 528 |
+
|
| 529 |
+
nextseq = 1 # Requests coming from the client are odd numbered
|
| 530 |
+
|
| 531 |
+
def __init__(self, address, family=socket.AF_INET, type=socket.SOCK_STREAM):
|
| 532 |
+
self.listening_sock = socket.socket(family, type)
|
| 533 |
+
self.listening_sock.bind(address)
|
| 534 |
+
self.listening_sock.listen(1)
|
| 535 |
+
|
| 536 |
+
def accept(self):
|
| 537 |
+
working_sock, address = self.listening_sock.accept()
|
| 538 |
+
if self.debugging:
|
| 539 |
+
print("****** Connection request from ", address, file=sys.__stderr__)
|
| 540 |
+
if address[0] == LOCALHOST:
|
| 541 |
+
SocketIO.__init__(self, working_sock)
|
| 542 |
+
else:
|
| 543 |
+
print("** Invalid host: ", address, file=sys.__stderr__)
|
| 544 |
+
raise OSError
|
| 545 |
+
|
| 546 |
+
def get_remote_proxy(self, oid):
|
| 547 |
+
return RPCProxy(self, oid)
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
class RPCProxy:
|
| 551 |
+
|
| 552 |
+
__methods = None
|
| 553 |
+
__attributes = None
|
| 554 |
+
|
| 555 |
+
def __init__(self, sockio, oid):
|
| 556 |
+
self.sockio = sockio
|
| 557 |
+
self.oid = oid
|
| 558 |
+
|
| 559 |
+
def __getattr__(self, name):
|
| 560 |
+
if self.__methods is None:
|
| 561 |
+
self.__getmethods()
|
| 562 |
+
if self.__methods.get(name):
|
| 563 |
+
return MethodProxy(self.sockio, self.oid, name)
|
| 564 |
+
if self.__attributes is None:
|
| 565 |
+
self.__getattributes()
|
| 566 |
+
if name in self.__attributes:
|
| 567 |
+
value = self.sockio.remotecall(self.oid, '__getattribute__',
|
| 568 |
+
(name,), {})
|
| 569 |
+
return value
|
| 570 |
+
else:
|
| 571 |
+
raise AttributeError(name)
|
| 572 |
+
|
| 573 |
+
def __getattributes(self):
|
| 574 |
+
self.__attributes = self.sockio.remotecall(self.oid,
|
| 575 |
+
"__attributes__", (), {})
|
| 576 |
+
|
| 577 |
+
def __getmethods(self):
|
| 578 |
+
self.__methods = self.sockio.remotecall(self.oid,
|
| 579 |
+
"__methods__", (), {})
|
| 580 |
+
|
| 581 |
+
def _getmethods(obj, methods):
|
| 582 |
+
# Helper to get a list of methods from an object
|
| 583 |
+
# Adds names to dictionary argument 'methods'
|
| 584 |
+
for name in dir(obj):
|
| 585 |
+
attr = getattr(obj, name)
|
| 586 |
+
if callable(attr):
|
| 587 |
+
methods[name] = 1
|
| 588 |
+
if isinstance(obj, type):
|
| 589 |
+
for super in obj.__bases__:
|
| 590 |
+
_getmethods(super, methods)
|
| 591 |
+
|
| 592 |
+
def _getattributes(obj, attributes):
|
| 593 |
+
for name in dir(obj):
|
| 594 |
+
attr = getattr(obj, name)
|
| 595 |
+
if not callable(attr):
|
| 596 |
+
attributes[name] = 1
|
| 597 |
+
|
| 598 |
+
|
| 599 |
+
class MethodProxy:
|
| 600 |
+
|
| 601 |
+
def __init__(self, sockio, oid, name):
|
| 602 |
+
self.sockio = sockio
|
| 603 |
+
self.oid = oid
|
| 604 |
+
self.name = name
|
| 605 |
+
|
| 606 |
+
def __call__(self, /, *args, **kwargs):
|
| 607 |
+
value = self.sockio.remotecall(self.oid, self.name, args, kwargs)
|
| 608 |
+
return value
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
# XXX KBK 09Sep03 We need a proper unit test for this module. Previously
|
| 612 |
+
# existing test code was removed at Rev 1.27 (r34098).
|
| 613 |
+
|
| 614 |
+
def displayhook(value):
|
| 615 |
+
"""Override standard display hook to use non-locale encoding"""
|
| 616 |
+
if value is None:
|
| 617 |
+
return
|
| 618 |
+
# Set '_' to None to avoid recursion
|
| 619 |
+
builtins._ = None
|
| 620 |
+
text = repr(value)
|
| 621 |
+
try:
|
| 622 |
+
sys.stdout.write(text)
|
| 623 |
+
except UnicodeEncodeError:
|
| 624 |
+
# let's use ascii while utf8-bmp codec doesn't present
|
| 625 |
+
encoding = 'ascii'
|
| 626 |
+
bytes = text.encode(encoding, 'backslashreplace')
|
| 627 |
+
text = bytes.decode(encoding, 'strict')
|
| 628 |
+
sys.stdout.write(text)
|
| 629 |
+
sys.stdout.write("\n")
|
| 630 |
+
builtins._ = value
|
| 631 |
+
|
| 632 |
+
|
| 633 |
+
if __name__ == '__main__':
|
| 634 |
+
from unittest import main
|
| 635 |
+
main('idlelib.idle_test.test_rpc', verbosity=2,)
|
evalkit_tf446/lib/python3.10/idlelib/run.py
ADDED
|
@@ -0,0 +1,642 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" idlelib.run
|
| 2 |
+
|
| 3 |
+
Simplified, pyshell.ModifiedInterpreter spawns a subprocess with
|
| 4 |
+
f'''{sys.executable} -c "__import__('idlelib.run').run.main()"'''
|
| 5 |
+
'.run' is needed because __import__ returns idlelib, not idlelib.run.
|
| 6 |
+
"""
|
| 7 |
+
import contextlib
|
| 8 |
+
import functools
|
| 9 |
+
import io
|
| 10 |
+
import linecache
|
| 11 |
+
import queue
|
| 12 |
+
import sys
|
| 13 |
+
import textwrap
|
| 14 |
+
import time
|
| 15 |
+
import traceback
|
| 16 |
+
import _thread as thread
|
| 17 |
+
import threading
|
| 18 |
+
import warnings
|
| 19 |
+
|
| 20 |
+
import idlelib # testing
|
| 21 |
+
from idlelib import autocomplete # AutoComplete, fetch_encodings
|
| 22 |
+
from idlelib import calltip # Calltip
|
| 23 |
+
from idlelib import debugger_r # start_debugger
|
| 24 |
+
from idlelib import debugobj_r # remote_object_tree_item
|
| 25 |
+
from idlelib import iomenu # encoding
|
| 26 |
+
from idlelib import rpc # multiple objects
|
| 27 |
+
from idlelib import stackviewer # StackTreeItem
|
| 28 |
+
import __main__
|
| 29 |
+
|
| 30 |
+
import tkinter # Use tcl and, if startup fails, messagebox.
|
| 31 |
+
if not hasattr(sys.modules['idlelib.run'], 'firstrun'):
|
| 32 |
+
# Undo modifications of tkinter by idlelib imports; see bpo-25507.
|
| 33 |
+
for mod in ('simpledialog', 'messagebox', 'font',
|
| 34 |
+
'dialog', 'filedialog', 'commondialog',
|
| 35 |
+
'ttk'):
|
| 36 |
+
delattr(tkinter, mod)
|
| 37 |
+
del sys.modules['tkinter.' + mod]
|
| 38 |
+
# Avoid AttributeError if run again; see bpo-37038.
|
| 39 |
+
sys.modules['idlelib.run'].firstrun = False
|
| 40 |
+
|
| 41 |
+
LOCALHOST = '127.0.0.1'
|
| 42 |
+
|
| 43 |
+
try:
|
| 44 |
+
eof = 'Ctrl-D (end-of-file)'
|
| 45 |
+
exit.eof = eof
|
| 46 |
+
quit.eof = eof
|
| 47 |
+
except NameError: # In case subprocess started with -S (maybe in future).
|
| 48 |
+
pass
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def idle_formatwarning(message, category, filename, lineno, line=None):
|
| 52 |
+
"""Format warnings the IDLE way."""
|
| 53 |
+
|
| 54 |
+
s = "\nWarning (from warnings module):\n"
|
| 55 |
+
s += ' File \"%s\", line %s\n' % (filename, lineno)
|
| 56 |
+
if line is None:
|
| 57 |
+
line = linecache.getline(filename, lineno)
|
| 58 |
+
line = line.strip()
|
| 59 |
+
if line:
|
| 60 |
+
s += " %s\n" % line
|
| 61 |
+
s += "%s: %s\n" % (category.__name__, message)
|
| 62 |
+
return s
|
| 63 |
+
|
| 64 |
+
def idle_showwarning_subproc(
|
| 65 |
+
message, category, filename, lineno, file=None, line=None):
|
| 66 |
+
"""Show Idle-format warning after replacing warnings.showwarning.
|
| 67 |
+
|
| 68 |
+
The only difference is the formatter called.
|
| 69 |
+
"""
|
| 70 |
+
if file is None:
|
| 71 |
+
file = sys.stderr
|
| 72 |
+
try:
|
| 73 |
+
file.write(idle_formatwarning(
|
| 74 |
+
message, category, filename, lineno, line))
|
| 75 |
+
except OSError:
|
| 76 |
+
pass # the file (probably stderr) is invalid - this warning gets lost.
|
| 77 |
+
|
| 78 |
+
_warnings_showwarning = None
|
| 79 |
+
|
| 80 |
+
def capture_warnings(capture):
|
| 81 |
+
"Replace warning.showwarning with idle_showwarning_subproc, or reverse."
|
| 82 |
+
|
| 83 |
+
global _warnings_showwarning
|
| 84 |
+
if capture:
|
| 85 |
+
if _warnings_showwarning is None:
|
| 86 |
+
_warnings_showwarning = warnings.showwarning
|
| 87 |
+
warnings.showwarning = idle_showwarning_subproc
|
| 88 |
+
else:
|
| 89 |
+
if _warnings_showwarning is not None:
|
| 90 |
+
warnings.showwarning = _warnings_showwarning
|
| 91 |
+
_warnings_showwarning = None
|
| 92 |
+
|
| 93 |
+
capture_warnings(True)
|
| 94 |
+
tcl = tkinter.Tcl()
|
| 95 |
+
|
| 96 |
+
def handle_tk_events(tcl=tcl):
|
| 97 |
+
"""Process any tk events that are ready to be dispatched if tkinter
|
| 98 |
+
has been imported, a tcl interpreter has been created and tk has been
|
| 99 |
+
loaded."""
|
| 100 |
+
tcl.eval("update")
|
| 101 |
+
|
| 102 |
+
# Thread shared globals: Establish a queue between a subthread (which handles
|
| 103 |
+
# the socket) and the main thread (which runs user code), plus global
|
| 104 |
+
# completion, exit and interruptable (the main thread) flags:
|
| 105 |
+
|
| 106 |
+
exit_now = False
|
| 107 |
+
quitting = False
|
| 108 |
+
interruptable = False
|
| 109 |
+
|
| 110 |
+
def main(del_exitfunc=False):
|
| 111 |
+
"""Start the Python execution server in a subprocess
|
| 112 |
+
|
| 113 |
+
In the Python subprocess, RPCServer is instantiated with handlerclass
|
| 114 |
+
MyHandler, which inherits register/unregister methods from RPCHandler via
|
| 115 |
+
the mix-in class SocketIO.
|
| 116 |
+
|
| 117 |
+
When the RPCServer 'server' is instantiated, the TCPServer initialization
|
| 118 |
+
creates an instance of run.MyHandler and calls its handle() method.
|
| 119 |
+
handle() instantiates a run.Executive object, passing it a reference to the
|
| 120 |
+
MyHandler object. That reference is saved as attribute rpchandler of the
|
| 121 |
+
Executive instance. The Executive methods have access to the reference and
|
| 122 |
+
can pass it on to entities that they command
|
| 123 |
+
(e.g. debugger_r.Debugger.start_debugger()). The latter, in turn, can
|
| 124 |
+
call MyHandler(SocketIO) register/unregister methods via the reference to
|
| 125 |
+
register and unregister themselves.
|
| 126 |
+
|
| 127 |
+
"""
|
| 128 |
+
global exit_now
|
| 129 |
+
global quitting
|
| 130 |
+
global no_exitfunc
|
| 131 |
+
no_exitfunc = del_exitfunc
|
| 132 |
+
#time.sleep(15) # test subprocess not responding
|
| 133 |
+
try:
|
| 134 |
+
assert(len(sys.argv) > 1)
|
| 135 |
+
port = int(sys.argv[-1])
|
| 136 |
+
except:
|
| 137 |
+
print("IDLE Subprocess: no IP port passed in sys.argv.",
|
| 138 |
+
file=sys.__stderr__)
|
| 139 |
+
return
|
| 140 |
+
|
| 141 |
+
capture_warnings(True)
|
| 142 |
+
sys.argv[:] = [""]
|
| 143 |
+
sockthread = threading.Thread(target=manage_socket,
|
| 144 |
+
name='SockThread',
|
| 145 |
+
args=((LOCALHOST, port),))
|
| 146 |
+
sockthread.daemon = True
|
| 147 |
+
sockthread.start()
|
| 148 |
+
while True:
|
| 149 |
+
try:
|
| 150 |
+
if exit_now:
|
| 151 |
+
try:
|
| 152 |
+
exit()
|
| 153 |
+
except KeyboardInterrupt:
|
| 154 |
+
# exiting but got an extra KBI? Try again!
|
| 155 |
+
continue
|
| 156 |
+
try:
|
| 157 |
+
request = rpc.request_queue.get(block=True, timeout=0.05)
|
| 158 |
+
except queue.Empty:
|
| 159 |
+
request = None
|
| 160 |
+
# Issue 32207: calling handle_tk_events here adds spurious
|
| 161 |
+
# queue.Empty traceback to event handling exceptions.
|
| 162 |
+
if request:
|
| 163 |
+
seq, (method, args, kwargs) = request
|
| 164 |
+
ret = method(*args, **kwargs)
|
| 165 |
+
rpc.response_queue.put((seq, ret))
|
| 166 |
+
else:
|
| 167 |
+
handle_tk_events()
|
| 168 |
+
except KeyboardInterrupt:
|
| 169 |
+
if quitting:
|
| 170 |
+
exit_now = True
|
| 171 |
+
continue
|
| 172 |
+
except SystemExit:
|
| 173 |
+
capture_warnings(False)
|
| 174 |
+
raise
|
| 175 |
+
except:
|
| 176 |
+
type, value, tb = sys.exc_info()
|
| 177 |
+
try:
|
| 178 |
+
print_exception()
|
| 179 |
+
rpc.response_queue.put((seq, None))
|
| 180 |
+
except:
|
| 181 |
+
# Link didn't work, print same exception to __stderr__
|
| 182 |
+
traceback.print_exception(type, value, tb, file=sys.__stderr__)
|
| 183 |
+
exit()
|
| 184 |
+
else:
|
| 185 |
+
continue
|
| 186 |
+
|
| 187 |
+
def manage_socket(address):
|
| 188 |
+
for i in range(3):
|
| 189 |
+
time.sleep(i)
|
| 190 |
+
try:
|
| 191 |
+
server = MyRPCServer(address, MyHandler)
|
| 192 |
+
break
|
| 193 |
+
except OSError as err:
|
| 194 |
+
print("IDLE Subprocess: OSError: " + err.args[1] +
|
| 195 |
+
", retrying....", file=sys.__stderr__)
|
| 196 |
+
socket_error = err
|
| 197 |
+
else:
|
| 198 |
+
print("IDLE Subprocess: Connection to "
|
| 199 |
+
"IDLE GUI failed, exiting.", file=sys.__stderr__)
|
| 200 |
+
show_socket_error(socket_error, address)
|
| 201 |
+
global exit_now
|
| 202 |
+
exit_now = True
|
| 203 |
+
return
|
| 204 |
+
server.handle_request() # A single request only
|
| 205 |
+
|
| 206 |
+
def show_socket_error(err, address):
|
| 207 |
+
"Display socket error from manage_socket."
|
| 208 |
+
import tkinter
|
| 209 |
+
from tkinter.messagebox import showerror
|
| 210 |
+
root = tkinter.Tk()
|
| 211 |
+
fix_scaling(root)
|
| 212 |
+
root.withdraw()
|
| 213 |
+
showerror(
|
| 214 |
+
"Subprocess Connection Error",
|
| 215 |
+
f"IDLE's subprocess can't connect to {address[0]}:{address[1]}.\n"
|
| 216 |
+
f"Fatal OSError #{err.errno}: {err.strerror}.\n"
|
| 217 |
+
"See the 'Startup failure' section of the IDLE doc, online at\n"
|
| 218 |
+
"https://docs.python.org/3/library/idle.html#startup-failure",
|
| 219 |
+
parent=root)
|
| 220 |
+
root.destroy()
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def get_message_lines(typ, exc, tb):
|
| 224 |
+
"Return line composing the exception message."
|
| 225 |
+
if typ in (AttributeError, NameError):
|
| 226 |
+
# 3.10+ hints are not directly accessible from python (#44026).
|
| 227 |
+
err = io.StringIO()
|
| 228 |
+
with contextlib.redirect_stderr(err):
|
| 229 |
+
sys.__excepthook__(typ, exc, tb)
|
| 230 |
+
return [err.getvalue().split("\n")[-2] + "\n"]
|
| 231 |
+
else:
|
| 232 |
+
return traceback.format_exception_only(typ, exc)
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def print_exception():
|
| 236 |
+
import linecache
|
| 237 |
+
linecache.checkcache()
|
| 238 |
+
flush_stdout()
|
| 239 |
+
efile = sys.stderr
|
| 240 |
+
typ, val, tb = excinfo = sys.exc_info()
|
| 241 |
+
sys.last_type, sys.last_value, sys.last_traceback = excinfo
|
| 242 |
+
seen = set()
|
| 243 |
+
|
| 244 |
+
def print_exc(typ, exc, tb):
|
| 245 |
+
seen.add(id(exc))
|
| 246 |
+
context = exc.__context__
|
| 247 |
+
cause = exc.__cause__
|
| 248 |
+
if cause is not None and id(cause) not in seen:
|
| 249 |
+
print_exc(type(cause), cause, cause.__traceback__)
|
| 250 |
+
print("\nThe above exception was the direct cause "
|
| 251 |
+
"of the following exception:\n", file=efile)
|
| 252 |
+
elif (context is not None and
|
| 253 |
+
not exc.__suppress_context__ and
|
| 254 |
+
id(context) not in seen):
|
| 255 |
+
print_exc(type(context), context, context.__traceback__)
|
| 256 |
+
print("\nDuring handling of the above exception, "
|
| 257 |
+
"another exception occurred:\n", file=efile)
|
| 258 |
+
if tb:
|
| 259 |
+
tbe = traceback.extract_tb(tb)
|
| 260 |
+
print('Traceback (most recent call last):', file=efile)
|
| 261 |
+
exclude = ("run.py", "rpc.py", "threading.py", "queue.py",
|
| 262 |
+
"debugger_r.py", "bdb.py")
|
| 263 |
+
cleanup_traceback(tbe, exclude)
|
| 264 |
+
traceback.print_list(tbe, file=efile)
|
| 265 |
+
lines = get_message_lines(typ, exc, tb)
|
| 266 |
+
for line in lines:
|
| 267 |
+
print(line, end='', file=efile)
|
| 268 |
+
|
| 269 |
+
print_exc(typ, val, tb)
|
| 270 |
+
|
| 271 |
+
def cleanup_traceback(tb, exclude):
|
| 272 |
+
"Remove excluded traces from beginning/end of tb; get cached lines"
|
| 273 |
+
orig_tb = tb[:]
|
| 274 |
+
while tb:
|
| 275 |
+
for rpcfile in exclude:
|
| 276 |
+
if tb[0][0].count(rpcfile):
|
| 277 |
+
break # found an exclude, break for: and delete tb[0]
|
| 278 |
+
else:
|
| 279 |
+
break # no excludes, have left RPC code, break while:
|
| 280 |
+
del tb[0]
|
| 281 |
+
while tb:
|
| 282 |
+
for rpcfile in exclude:
|
| 283 |
+
if tb[-1][0].count(rpcfile):
|
| 284 |
+
break
|
| 285 |
+
else:
|
| 286 |
+
break
|
| 287 |
+
del tb[-1]
|
| 288 |
+
if len(tb) == 0:
|
| 289 |
+
# exception was in IDLE internals, don't prune!
|
| 290 |
+
tb[:] = orig_tb[:]
|
| 291 |
+
print("** IDLE Internal Exception: ", file=sys.stderr)
|
| 292 |
+
rpchandler = rpc.objecttable['exec'].rpchandler
|
| 293 |
+
for i in range(len(tb)):
|
| 294 |
+
fn, ln, nm, line = tb[i]
|
| 295 |
+
if nm == '?':
|
| 296 |
+
nm = "-toplevel-"
|
| 297 |
+
if not line and fn.startswith("<pyshell#"):
|
| 298 |
+
line = rpchandler.remotecall('linecache', 'getline',
|
| 299 |
+
(fn, ln), {})
|
| 300 |
+
tb[i] = fn, ln, nm, line
|
| 301 |
+
|
| 302 |
+
def flush_stdout():
|
| 303 |
+
"""XXX How to do this now?"""
|
| 304 |
+
|
| 305 |
+
def exit():
|
| 306 |
+
"""Exit subprocess, possibly after first clearing exit functions.
|
| 307 |
+
|
| 308 |
+
If config-main.cfg/.def 'General' 'delete-exitfunc' is True, then any
|
| 309 |
+
functions registered with atexit will be removed before exiting.
|
| 310 |
+
(VPython support)
|
| 311 |
+
|
| 312 |
+
"""
|
| 313 |
+
if no_exitfunc:
|
| 314 |
+
import atexit
|
| 315 |
+
atexit._clear()
|
| 316 |
+
capture_warnings(False)
|
| 317 |
+
sys.exit(0)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def fix_scaling(root):
|
| 321 |
+
"""Scale fonts on HiDPI displays."""
|
| 322 |
+
import tkinter.font
|
| 323 |
+
scaling = float(root.tk.call('tk', 'scaling'))
|
| 324 |
+
if scaling > 1.4:
|
| 325 |
+
for name in tkinter.font.names(root):
|
| 326 |
+
font = tkinter.font.Font(root=root, name=name, exists=True)
|
| 327 |
+
size = int(font['size'])
|
| 328 |
+
if size < 0:
|
| 329 |
+
font['size'] = round(-0.75*size)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def fixdoc(fun, text):
|
| 333 |
+
tem = (fun.__doc__ + '\n\n') if fun.__doc__ is not None else ''
|
| 334 |
+
fun.__doc__ = tem + textwrap.fill(textwrap.dedent(text))
|
| 335 |
+
|
| 336 |
+
RECURSIONLIMIT_DELTA = 30
|
| 337 |
+
|
| 338 |
+
def install_recursionlimit_wrappers():
|
| 339 |
+
"""Install wrappers to always add 30 to the recursion limit."""
|
| 340 |
+
# see: bpo-26806
|
| 341 |
+
|
| 342 |
+
@functools.wraps(sys.setrecursionlimit)
|
| 343 |
+
def setrecursionlimit(*args, **kwargs):
|
| 344 |
+
# mimic the original sys.setrecursionlimit()'s input handling
|
| 345 |
+
if kwargs:
|
| 346 |
+
raise TypeError(
|
| 347 |
+
"setrecursionlimit() takes no keyword arguments")
|
| 348 |
+
try:
|
| 349 |
+
limit, = args
|
| 350 |
+
except ValueError:
|
| 351 |
+
raise TypeError(f"setrecursionlimit() takes exactly one "
|
| 352 |
+
f"argument ({len(args)} given)")
|
| 353 |
+
if not limit > 0:
|
| 354 |
+
raise ValueError(
|
| 355 |
+
"recursion limit must be greater or equal than 1")
|
| 356 |
+
|
| 357 |
+
return setrecursionlimit.__wrapped__(limit + RECURSIONLIMIT_DELTA)
|
| 358 |
+
|
| 359 |
+
fixdoc(setrecursionlimit, f"""\
|
| 360 |
+
This IDLE wrapper adds {RECURSIONLIMIT_DELTA} to prevent possible
|
| 361 |
+
uninterruptible loops.""")
|
| 362 |
+
|
| 363 |
+
@functools.wraps(sys.getrecursionlimit)
|
| 364 |
+
def getrecursionlimit():
|
| 365 |
+
return getrecursionlimit.__wrapped__() - RECURSIONLIMIT_DELTA
|
| 366 |
+
|
| 367 |
+
fixdoc(getrecursionlimit, f"""\
|
| 368 |
+
This IDLE wrapper subtracts {RECURSIONLIMIT_DELTA} to compensate
|
| 369 |
+
for the {RECURSIONLIMIT_DELTA} IDLE adds when setting the limit.""")
|
| 370 |
+
|
| 371 |
+
# add the delta to the default recursion limit, to compensate
|
| 372 |
+
sys.setrecursionlimit(sys.getrecursionlimit() + RECURSIONLIMIT_DELTA)
|
| 373 |
+
|
| 374 |
+
sys.setrecursionlimit = setrecursionlimit
|
| 375 |
+
sys.getrecursionlimit = getrecursionlimit
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
def uninstall_recursionlimit_wrappers():
|
| 379 |
+
"""Uninstall the recursion limit wrappers from the sys module.
|
| 380 |
+
|
| 381 |
+
IDLE only uses this for tests. Users can import run and call
|
| 382 |
+
this to remove the wrapping.
|
| 383 |
+
"""
|
| 384 |
+
if (
|
| 385 |
+
getattr(sys.setrecursionlimit, '__wrapped__', None) and
|
| 386 |
+
getattr(sys.getrecursionlimit, '__wrapped__', None)
|
| 387 |
+
):
|
| 388 |
+
sys.setrecursionlimit = sys.setrecursionlimit.__wrapped__
|
| 389 |
+
sys.getrecursionlimit = sys.getrecursionlimit.__wrapped__
|
| 390 |
+
sys.setrecursionlimit(sys.getrecursionlimit() - RECURSIONLIMIT_DELTA)
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
class MyRPCServer(rpc.RPCServer):
|
| 394 |
+
|
| 395 |
+
def handle_error(self, request, client_address):
|
| 396 |
+
"""Override RPCServer method for IDLE
|
| 397 |
+
|
| 398 |
+
Interrupt the MainThread and exit server if link is dropped.
|
| 399 |
+
|
| 400 |
+
"""
|
| 401 |
+
global quitting
|
| 402 |
+
try:
|
| 403 |
+
raise
|
| 404 |
+
except SystemExit:
|
| 405 |
+
raise
|
| 406 |
+
except EOFError:
|
| 407 |
+
global exit_now
|
| 408 |
+
exit_now = True
|
| 409 |
+
thread.interrupt_main()
|
| 410 |
+
except:
|
| 411 |
+
erf = sys.__stderr__
|
| 412 |
+
print(textwrap.dedent(f"""
|
| 413 |
+
{'-'*40}
|
| 414 |
+
Unhandled exception in user code execution server!'
|
| 415 |
+
Thread: {threading.current_thread().name}
|
| 416 |
+
IDLE Client Address: {client_address}
|
| 417 |
+
Request: {request!r}
|
| 418 |
+
"""), file=erf)
|
| 419 |
+
traceback.print_exc(limit=-20, file=erf)
|
| 420 |
+
print(textwrap.dedent(f"""
|
| 421 |
+
*** Unrecoverable, server exiting!
|
| 422 |
+
|
| 423 |
+
Users should never see this message; it is likely transient.
|
| 424 |
+
If this recurs, report this with a copy of the message
|
| 425 |
+
and an explanation of how to make it repeat.
|
| 426 |
+
{'-'*40}"""), file=erf)
|
| 427 |
+
quitting = True
|
| 428 |
+
thread.interrupt_main()
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
# Pseudofiles for shell-remote communication (also used in pyshell)
|
| 432 |
+
|
| 433 |
+
class StdioFile(io.TextIOBase):
|
| 434 |
+
|
| 435 |
+
def __init__(self, shell, tags, encoding='utf-8', errors='strict'):
|
| 436 |
+
self.shell = shell
|
| 437 |
+
self.tags = tags
|
| 438 |
+
self._encoding = encoding
|
| 439 |
+
self._errors = errors
|
| 440 |
+
|
| 441 |
+
@property
|
| 442 |
+
def encoding(self):
|
| 443 |
+
return self._encoding
|
| 444 |
+
|
| 445 |
+
@property
|
| 446 |
+
def errors(self):
|
| 447 |
+
return self._errors
|
| 448 |
+
|
| 449 |
+
@property
|
| 450 |
+
def name(self):
|
| 451 |
+
return '<%s>' % self.tags
|
| 452 |
+
|
| 453 |
+
def isatty(self):
|
| 454 |
+
return True
|
| 455 |
+
|
| 456 |
+
|
| 457 |
+
class StdOutputFile(StdioFile):
|
| 458 |
+
|
| 459 |
+
def writable(self):
|
| 460 |
+
return True
|
| 461 |
+
|
| 462 |
+
def write(self, s):
|
| 463 |
+
if self.closed:
|
| 464 |
+
raise ValueError("write to closed file")
|
| 465 |
+
s = str.encode(s, self.encoding, self.errors).decode(self.encoding, self.errors)
|
| 466 |
+
return self.shell.write(s, self.tags)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
class StdInputFile(StdioFile):
|
| 470 |
+
_line_buffer = ''
|
| 471 |
+
|
| 472 |
+
def readable(self):
|
| 473 |
+
return True
|
| 474 |
+
|
| 475 |
+
def read(self, size=-1):
|
| 476 |
+
if self.closed:
|
| 477 |
+
raise ValueError("read from closed file")
|
| 478 |
+
if size is None:
|
| 479 |
+
size = -1
|
| 480 |
+
elif not isinstance(size, int):
|
| 481 |
+
raise TypeError('must be int, not ' + type(size).__name__)
|
| 482 |
+
result = self._line_buffer
|
| 483 |
+
self._line_buffer = ''
|
| 484 |
+
if size < 0:
|
| 485 |
+
while line := self.shell.readline():
|
| 486 |
+
result += line
|
| 487 |
+
else:
|
| 488 |
+
while len(result) < size:
|
| 489 |
+
line = self.shell.readline()
|
| 490 |
+
if not line: break
|
| 491 |
+
result += line
|
| 492 |
+
self._line_buffer = result[size:]
|
| 493 |
+
result = result[:size]
|
| 494 |
+
return result
|
| 495 |
+
|
| 496 |
+
def readline(self, size=-1):
|
| 497 |
+
if self.closed:
|
| 498 |
+
raise ValueError("read from closed file")
|
| 499 |
+
if size is None:
|
| 500 |
+
size = -1
|
| 501 |
+
elif not isinstance(size, int):
|
| 502 |
+
raise TypeError('must be int, not ' + type(size).__name__)
|
| 503 |
+
line = self._line_buffer or self.shell.readline()
|
| 504 |
+
if size < 0:
|
| 505 |
+
size = len(line)
|
| 506 |
+
eol = line.find('\n', 0, size)
|
| 507 |
+
if eol >= 0:
|
| 508 |
+
size = eol + 1
|
| 509 |
+
self._line_buffer = line[size:]
|
| 510 |
+
return line[:size]
|
| 511 |
+
|
| 512 |
+
def close(self):
|
| 513 |
+
self.shell.close()
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
class MyHandler(rpc.RPCHandler):
|
| 517 |
+
|
| 518 |
+
def handle(self):
|
| 519 |
+
"""Override base method"""
|
| 520 |
+
executive = Executive(self)
|
| 521 |
+
self.register("exec", executive)
|
| 522 |
+
self.console = self.get_remote_proxy("console")
|
| 523 |
+
sys.stdin = StdInputFile(self.console, "stdin",
|
| 524 |
+
iomenu.encoding, iomenu.errors)
|
| 525 |
+
sys.stdout = StdOutputFile(self.console, "stdout",
|
| 526 |
+
iomenu.encoding, iomenu.errors)
|
| 527 |
+
sys.stderr = StdOutputFile(self.console, "stderr",
|
| 528 |
+
iomenu.encoding, "backslashreplace")
|
| 529 |
+
|
| 530 |
+
sys.displayhook = rpc.displayhook
|
| 531 |
+
# page help() text to shell.
|
| 532 |
+
import pydoc # import must be done here to capture i/o binding
|
| 533 |
+
pydoc.pager = pydoc.plainpager
|
| 534 |
+
|
| 535 |
+
# Keep a reference to stdin so that it won't try to exit IDLE if
|
| 536 |
+
# sys.stdin gets changed from within IDLE's shell. See issue17838.
|
| 537 |
+
self._keep_stdin = sys.stdin
|
| 538 |
+
|
| 539 |
+
install_recursionlimit_wrappers()
|
| 540 |
+
|
| 541 |
+
self.interp = self.get_remote_proxy("interp")
|
| 542 |
+
rpc.RPCHandler.getresponse(self, myseq=None, wait=0.05)
|
| 543 |
+
|
| 544 |
+
def exithook(self):
|
| 545 |
+
"override SocketIO method - wait for MainThread to shut us down"
|
| 546 |
+
time.sleep(10)
|
| 547 |
+
|
| 548 |
+
def EOFhook(self):
|
| 549 |
+
"Override SocketIO method - terminate wait on callback and exit thread"
|
| 550 |
+
global quitting
|
| 551 |
+
quitting = True
|
| 552 |
+
thread.interrupt_main()
|
| 553 |
+
|
| 554 |
+
def decode_interrupthook(self):
|
| 555 |
+
"interrupt awakened thread"
|
| 556 |
+
global quitting
|
| 557 |
+
quitting = True
|
| 558 |
+
thread.interrupt_main()
|
| 559 |
+
|
| 560 |
+
|
| 561 |
+
class Executive:
|
| 562 |
+
|
| 563 |
+
def __init__(self, rpchandler):
|
| 564 |
+
self.rpchandler = rpchandler
|
| 565 |
+
if idlelib.testing is False:
|
| 566 |
+
self.locals = __main__.__dict__
|
| 567 |
+
self.calltip = calltip.Calltip()
|
| 568 |
+
self.autocomplete = autocomplete.AutoComplete()
|
| 569 |
+
else:
|
| 570 |
+
self.locals = {}
|
| 571 |
+
|
| 572 |
+
def runcode(self, code):
|
| 573 |
+
global interruptable
|
| 574 |
+
try:
|
| 575 |
+
self.user_exc_info = None
|
| 576 |
+
interruptable = True
|
| 577 |
+
try:
|
| 578 |
+
exec(code, self.locals)
|
| 579 |
+
finally:
|
| 580 |
+
interruptable = False
|
| 581 |
+
except SystemExit as e:
|
| 582 |
+
if e.args: # SystemExit called with an argument.
|
| 583 |
+
ob = e.args[0]
|
| 584 |
+
if not isinstance(ob, (type(None), int)):
|
| 585 |
+
print('SystemExit: ' + str(ob), file=sys.stderr)
|
| 586 |
+
# Return to the interactive prompt.
|
| 587 |
+
except:
|
| 588 |
+
self.user_exc_info = sys.exc_info() # For testing, hook, viewer.
|
| 589 |
+
if quitting:
|
| 590 |
+
exit()
|
| 591 |
+
if sys.excepthook is sys.__excepthook__:
|
| 592 |
+
print_exception()
|
| 593 |
+
else:
|
| 594 |
+
try:
|
| 595 |
+
sys.excepthook(*self.user_exc_info)
|
| 596 |
+
except:
|
| 597 |
+
self.user_exc_info = sys.exc_info() # For testing.
|
| 598 |
+
print_exception()
|
| 599 |
+
jit = self.rpchandler.console.getvar("<<toggle-jit-stack-viewer>>")
|
| 600 |
+
if jit:
|
| 601 |
+
self.rpchandler.interp.open_remote_stack_viewer()
|
| 602 |
+
else:
|
| 603 |
+
flush_stdout()
|
| 604 |
+
|
| 605 |
+
def interrupt_the_server(self):
|
| 606 |
+
if interruptable:
|
| 607 |
+
thread.interrupt_main()
|
| 608 |
+
|
| 609 |
+
def start_the_debugger(self, gui_adap_oid):
|
| 610 |
+
return debugger_r.start_debugger(self.rpchandler, gui_adap_oid)
|
| 611 |
+
|
| 612 |
+
def stop_the_debugger(self, idb_adap_oid):
|
| 613 |
+
"Unregister the Idb Adapter. Link objects and Idb then subject to GC"
|
| 614 |
+
self.rpchandler.unregister(idb_adap_oid)
|
| 615 |
+
|
| 616 |
+
def get_the_calltip(self, name):
|
| 617 |
+
return self.calltip.fetch_tip(name)
|
| 618 |
+
|
| 619 |
+
def get_the_completion_list(self, what, mode):
|
| 620 |
+
return self.autocomplete.fetch_completions(what, mode)
|
| 621 |
+
|
| 622 |
+
def stackviewer(self, flist_oid=None):
|
| 623 |
+
if self.user_exc_info:
|
| 624 |
+
typ, val, tb = self.user_exc_info
|
| 625 |
+
else:
|
| 626 |
+
return None
|
| 627 |
+
flist = None
|
| 628 |
+
if flist_oid is not None:
|
| 629 |
+
flist = self.rpchandler.get_remote_proxy(flist_oid)
|
| 630 |
+
while tb and tb.tb_frame.f_globals["__name__"] in ["rpc", "run"]:
|
| 631 |
+
tb = tb.tb_next
|
| 632 |
+
sys.last_type = typ
|
| 633 |
+
sys.last_value = val
|
| 634 |
+
item = stackviewer.StackTreeItem(flist, tb)
|
| 635 |
+
return debugobj_r.remote_object_tree_item(item)
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
if __name__ == '__main__':
|
| 639 |
+
from unittest import main
|
| 640 |
+
main('idlelib.idle_test.test_run', verbosity=2)
|
| 641 |
+
|
| 642 |
+
capture_warnings(False) # Make sure turned off; see bpo-18081.
|
evalkit_tf446/lib/python3.10/lib2to3/pgen2/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (422 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/pgen2/__pycache__/token.cpython-310.pyc
ADDED
|
Binary file (2.14 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/lib2to3/pgen2/__pycache__/tokenize.cpython-310.pyc
ADDED
|
Binary file (15.4 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSans-Oblique.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3bc9c02fefcadd517e5a158b2f34233dd354d67f4302486d88e84bca467d1d43
|
| 3 |
+
size 611556
|
evalkit_tf446/lib/python3.10/site-packages/einops/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__author__ = 'Alex Rogozhnikov'
|
| 2 |
+
__version__ = '0.6.1'
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class EinopsError(RuntimeError):
|
| 6 |
+
""" Runtime error thrown by einops """
|
| 7 |
+
pass
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
__all__ = ['rearrange', 'reduce', 'repeat', 'einsum',
|
| 11 |
+
'pack', 'unpack',
|
| 12 |
+
'parse_shape', 'asnumpy', 'EinopsError']
|
| 13 |
+
|
| 14 |
+
from .einops import rearrange, reduce, repeat, einsum, parse_shape, asnumpy
|
| 15 |
+
from .packing import pack, unpack
|
evalkit_tf446/lib/python3.10/site-packages/einops/_backends.py
ADDED
|
@@ -0,0 +1,682 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backends in `einops` are organized to meet the following requirements
|
| 3 |
+
- backends are not imported unless those are actually needed, because
|
| 4 |
+
- backends may not be installed
|
| 5 |
+
- importing all available backends will drive to significant memory footprint
|
| 6 |
+
- backends may by present but installed with errors (but never used),
|
| 7 |
+
importing may drive to crashes
|
| 8 |
+
- backend should be either symbolic or imperative (tensorflow is for both, but that causes problems)
|
| 9 |
+
- this determines which methods (from_numpy/to_numpy or create_symbol/eval_symbol) should be defined
|
| 10 |
+
- if backend can't (temporarily) provide symbols for shape dimensions, UnknownSize objects are used
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
import sys
|
| 14 |
+
import warnings
|
| 15 |
+
|
| 16 |
+
__author__ = 'Alex Rogozhnikov'
|
| 17 |
+
|
| 18 |
+
_backends: dict = {}
|
| 19 |
+
_debug_importing = False
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def get_backend(tensor) -> 'AbstractBackend':
|
| 23 |
+
"""
|
| 24 |
+
Takes a correct backend (e.g. numpy backend if tensor is numpy.ndarray) for a tensor.
|
| 25 |
+
If needed, imports package and creates backend
|
| 26 |
+
"""
|
| 27 |
+
for framework_name, backend in _backends.items():
|
| 28 |
+
if backend.is_appropriate_type(tensor):
|
| 29 |
+
return backend
|
| 30 |
+
|
| 31 |
+
# Find backend subclasses recursively
|
| 32 |
+
backend_subclasses = []
|
| 33 |
+
backends = AbstractBackend.__subclasses__()
|
| 34 |
+
while backends:
|
| 35 |
+
backend = backends.pop()
|
| 36 |
+
backends += backend.__subclasses__()
|
| 37 |
+
backend_subclasses.append(backend)
|
| 38 |
+
|
| 39 |
+
for BackendSubclass in backend_subclasses:
|
| 40 |
+
if _debug_importing:
|
| 41 |
+
print('Testing for subclass of ', BackendSubclass)
|
| 42 |
+
if BackendSubclass.framework_name not in _backends:
|
| 43 |
+
# check that module was already imported. Otherwise it can't be imported
|
| 44 |
+
if BackendSubclass.framework_name in sys.modules:
|
| 45 |
+
if _debug_importing:
|
| 46 |
+
print('Imported backend for ', BackendSubclass.framework_name)
|
| 47 |
+
backend = BackendSubclass()
|
| 48 |
+
_backends[backend.framework_name] = backend
|
| 49 |
+
if backend.is_appropriate_type(tensor):
|
| 50 |
+
return backend
|
| 51 |
+
|
| 52 |
+
raise RuntimeError('Tensor type unknown to einops {}'.format(type(tensor)))
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class AbstractBackend:
|
| 56 |
+
""" Base backend class, major part of methods are only for debugging purposes. """
|
| 57 |
+
framework_name: str
|
| 58 |
+
|
| 59 |
+
def is_appropriate_type(self, tensor):
|
| 60 |
+
""" helper method should recognize tensors it can handle """
|
| 61 |
+
raise NotImplementedError()
|
| 62 |
+
|
| 63 |
+
def from_numpy(self, x):
|
| 64 |
+
raise NotImplementedError("framework doesn't support imperative execution")
|
| 65 |
+
|
| 66 |
+
def to_numpy(self, x):
|
| 67 |
+
raise NotImplementedError("framework doesn't support imperative execution")
|
| 68 |
+
|
| 69 |
+
def create_symbol(self, shape):
|
| 70 |
+
raise NotImplementedError("framework doesn't support symbolic computations")
|
| 71 |
+
|
| 72 |
+
def eval_symbol(self, symbol, input_dict):
|
| 73 |
+
raise NotImplementedError("framework doesn't support symbolic computations")
|
| 74 |
+
|
| 75 |
+
def arange(self, start, stop):
|
| 76 |
+
# supplementary method used only in testing, so should implement CPU version
|
| 77 |
+
raise NotImplementedError("framework doesn't implement arange")
|
| 78 |
+
|
| 79 |
+
def shape(self, x):
|
| 80 |
+
"""shape should return a tuple with integers or "shape symbols" (which will evaluate to actual size)"""
|
| 81 |
+
return x.shape
|
| 82 |
+
|
| 83 |
+
def reshape(self, x, shape):
|
| 84 |
+
return x.reshape(shape)
|
| 85 |
+
|
| 86 |
+
def transpose(self, x, axes):
|
| 87 |
+
return x.transpose(axes)
|
| 88 |
+
|
| 89 |
+
def reduce(self, x, operation, axes):
|
| 90 |
+
return getattr(x, operation)(axis=axes)
|
| 91 |
+
|
| 92 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 93 |
+
raise NotImplementedError()
|
| 94 |
+
|
| 95 |
+
def add_axis(self, x, new_position):
|
| 96 |
+
raise NotImplementedError()
|
| 97 |
+
|
| 98 |
+
def add_axes(self, x, n_axes, pos2len):
|
| 99 |
+
repeats = [1] * n_axes
|
| 100 |
+
for axis_position, axis_length in pos2len.items():
|
| 101 |
+
x = self.add_axis(x, axis_position)
|
| 102 |
+
repeats[axis_position] = axis_length
|
| 103 |
+
return self.tile(x, tuple(repeats))
|
| 104 |
+
|
| 105 |
+
def tile(self, x, repeats):
|
| 106 |
+
"""repeats is a number of """
|
| 107 |
+
raise NotImplementedError()
|
| 108 |
+
|
| 109 |
+
def concat(self, tensors, axis: int):
|
| 110 |
+
""" concatenates tensors along axis.
|
| 111 |
+
Assume identical across tensors: devices, dtypes and shapes except selected axis."""
|
| 112 |
+
raise NotImplementedError()
|
| 113 |
+
|
| 114 |
+
def is_float_type(self, x):
|
| 115 |
+
# some backends (torch) can't compute average for non-floating types.
|
| 116 |
+
# Decided to drop average for all backends if type is not floating
|
| 117 |
+
raise NotImplementedError()
|
| 118 |
+
|
| 119 |
+
def layers(self):
|
| 120 |
+
raise NotImplementedError("backend does not provide layers")
|
| 121 |
+
|
| 122 |
+
def __repr__(self):
|
| 123 |
+
return "<einops backend for {}>".format(self.framework_name)
|
| 124 |
+
|
| 125 |
+
def einsum(self, pattern, *x):
|
| 126 |
+
raise NotImplementedError("backend does not support einsum")
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class UnknownSize:
|
| 130 |
+
""" pseudo-symbol for symbolic frameworks which do not provide symbols for shape elements """
|
| 131 |
+
|
| 132 |
+
def __floordiv__(self, other):
|
| 133 |
+
return self
|
| 134 |
+
|
| 135 |
+
def __eq__(self, other):
|
| 136 |
+
return True # we don't know actual size
|
| 137 |
+
|
| 138 |
+
def __mul__(self, other):
|
| 139 |
+
return self
|
| 140 |
+
|
| 141 |
+
def __rmul__(self, other):
|
| 142 |
+
return self
|
| 143 |
+
|
| 144 |
+
def __hash__(self):
|
| 145 |
+
return hash(None)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class NumpyBackend(AbstractBackend):
|
| 149 |
+
framework_name = 'numpy'
|
| 150 |
+
|
| 151 |
+
def __init__(self):
|
| 152 |
+
import numpy
|
| 153 |
+
self.np = numpy
|
| 154 |
+
|
| 155 |
+
def is_appropriate_type(self, tensor):
|
| 156 |
+
return isinstance(tensor, self.np.ndarray)
|
| 157 |
+
|
| 158 |
+
def from_numpy(self, x):
|
| 159 |
+
return x
|
| 160 |
+
|
| 161 |
+
def to_numpy(self, x):
|
| 162 |
+
return x
|
| 163 |
+
|
| 164 |
+
def arange(self, start, stop):
|
| 165 |
+
return self.np.arange(start, stop)
|
| 166 |
+
|
| 167 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 168 |
+
return self.np.stack(tensors)
|
| 169 |
+
|
| 170 |
+
def tile(self, x, repeats):
|
| 171 |
+
return self.np.tile(x, repeats)
|
| 172 |
+
|
| 173 |
+
def concat(self, tensors, axis: int):
|
| 174 |
+
return self.np.concatenate(tensors, axis=axis)
|
| 175 |
+
|
| 176 |
+
def is_float_type(self, x):
|
| 177 |
+
return x.dtype in ('float16', 'float32', 'float64', 'float128', 'bfloat16')
|
| 178 |
+
|
| 179 |
+
def add_axis(self, x, new_position):
|
| 180 |
+
return self.np.expand_dims(x, new_position)
|
| 181 |
+
|
| 182 |
+
def einsum(self, pattern, *x):
|
| 183 |
+
return self.np.einsum(pattern, *x)
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
class JaxBackend(NumpyBackend):
|
| 187 |
+
framework_name = 'jax'
|
| 188 |
+
|
| 189 |
+
def __init__(self):
|
| 190 |
+
super(JaxBackend, self).__init__()
|
| 191 |
+
self.onp = self.np
|
| 192 |
+
|
| 193 |
+
import jax.numpy
|
| 194 |
+
self.np = jax.numpy
|
| 195 |
+
|
| 196 |
+
def from_numpy(self, x):
|
| 197 |
+
return self.np.asarray(x)
|
| 198 |
+
|
| 199 |
+
def to_numpy(self, x):
|
| 200 |
+
return self.onp.asarray(x)
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
class GluonBackend(AbstractBackend):
|
| 204 |
+
framework_name = 'mxnet.ndarray'
|
| 205 |
+
|
| 206 |
+
def __init__(self):
|
| 207 |
+
import mxnet
|
| 208 |
+
self.mx = mxnet
|
| 209 |
+
|
| 210 |
+
def is_appropriate_type(self, tensor):
|
| 211 |
+
return isinstance(tensor, self.mx.nd.NDArray)
|
| 212 |
+
|
| 213 |
+
def from_numpy(self, x):
|
| 214 |
+
if len(x.shape) == 0:
|
| 215 |
+
x = x[None] # poor support of scalars in mxnet, otherwise mxnet can't attach gradients
|
| 216 |
+
var = self.mx.nd.array(x, dtype=x.dtype)
|
| 217 |
+
var.attach_grad()
|
| 218 |
+
return var
|
| 219 |
+
|
| 220 |
+
def to_numpy(self, x):
|
| 221 |
+
return self.mx.nd.NDArray.asnumpy(x)
|
| 222 |
+
|
| 223 |
+
def reshape(self, x, shape):
|
| 224 |
+
if len(shape) == 0:
|
| 225 |
+
return x # poor support of scalars in mxnet
|
| 226 |
+
return x.reshape(shape)
|
| 227 |
+
|
| 228 |
+
def arange(self, start, stop):
|
| 229 |
+
return self.mx.nd.arange(start, stop)
|
| 230 |
+
|
| 231 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 232 |
+
return self.mx.nd.stack(*tensors)
|
| 233 |
+
|
| 234 |
+
def tile(self, x, repeats):
|
| 235 |
+
return self.mx.nd.tile(x, repeats)
|
| 236 |
+
|
| 237 |
+
def concat(self, tensors, axis: int):
|
| 238 |
+
return self.mx.nd.concat(*tensors, dim=axis)
|
| 239 |
+
|
| 240 |
+
def add_axis(self, x, new_position):
|
| 241 |
+
return self.mx.nd.expand_dims(x, new_position)
|
| 242 |
+
|
| 243 |
+
def is_float_type(self, x):
|
| 244 |
+
return 'float' in str(x.dtype)
|
| 245 |
+
|
| 246 |
+
def layers(self):
|
| 247 |
+
from .layers import gluon
|
| 248 |
+
return gluon
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class TorchBackend(AbstractBackend):
|
| 252 |
+
framework_name = 'torch'
|
| 253 |
+
|
| 254 |
+
def __init__(self):
|
| 255 |
+
import torch
|
| 256 |
+
self.torch = torch
|
| 257 |
+
|
| 258 |
+
def is_appropriate_type(self, tensor):
|
| 259 |
+
return isinstance(tensor, self.torch.Tensor)
|
| 260 |
+
|
| 261 |
+
def from_numpy(self, x):
|
| 262 |
+
variable = self.torch.from_numpy(x)
|
| 263 |
+
if self.is_float_type(variable):
|
| 264 |
+
# attach grad only to floating types
|
| 265 |
+
variable.requires_grad = True
|
| 266 |
+
return variable
|
| 267 |
+
|
| 268 |
+
def to_numpy(self, x):
|
| 269 |
+
return x.detach().cpu().numpy()
|
| 270 |
+
|
| 271 |
+
def arange(self, start, stop):
|
| 272 |
+
return self.torch.arange(start, stop, dtype=self.torch.int64)
|
| 273 |
+
|
| 274 |
+
def reduce(self, x, operation, reduced_axes):
|
| 275 |
+
if operation == 'min':
|
| 276 |
+
return x.amin(dim=reduced_axes)
|
| 277 |
+
elif operation == 'max':
|
| 278 |
+
return x.amax(dim=reduced_axes)
|
| 279 |
+
elif operation == 'sum':
|
| 280 |
+
return x.sum(dim=reduced_axes)
|
| 281 |
+
elif operation == 'mean':
|
| 282 |
+
return x.mean(dim=reduced_axes)
|
| 283 |
+
elif operation == 'prod':
|
| 284 |
+
for i in list(sorted(reduced_axes))[::-1]:
|
| 285 |
+
x = x.prod(dim=i)
|
| 286 |
+
return x
|
| 287 |
+
else:
|
| 288 |
+
raise NotImplementedError('Unknown reduction ', operation)
|
| 289 |
+
|
| 290 |
+
def transpose(self, x, axes):
|
| 291 |
+
return x.permute(axes)
|
| 292 |
+
|
| 293 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 294 |
+
return self.torch.stack(tensors)
|
| 295 |
+
|
| 296 |
+
def add_axes(self, x, n_axes, pos2len):
|
| 297 |
+
repeats = [-1] * n_axes
|
| 298 |
+
for axis_position, axis_length in pos2len.items():
|
| 299 |
+
x = self.add_axis(x, axis_position)
|
| 300 |
+
repeats[axis_position] = axis_length
|
| 301 |
+
return x.expand(repeats)
|
| 302 |
+
|
| 303 |
+
def tile(self, x, repeats):
|
| 304 |
+
return x.repeat(repeats)
|
| 305 |
+
|
| 306 |
+
def concat(self, tensors, axis: int):
|
| 307 |
+
return self.torch.cat(tensors, dim=axis)
|
| 308 |
+
|
| 309 |
+
def add_axis(self, x, new_position):
|
| 310 |
+
return self.torch.unsqueeze(x, new_position)
|
| 311 |
+
|
| 312 |
+
def is_float_type(self, x):
|
| 313 |
+
return x.dtype in [self.torch.float16, self.torch.float32, self.torch.float64, self.torch.bfloat16]
|
| 314 |
+
|
| 315 |
+
def layers(self):
|
| 316 |
+
from .layers import torch
|
| 317 |
+
return torch
|
| 318 |
+
|
| 319 |
+
def einsum(self, pattern, *x):
|
| 320 |
+
return self.torch.einsum(pattern, *x)
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
class CupyBackend(AbstractBackend):
|
| 324 |
+
framework_name = 'cupy'
|
| 325 |
+
|
| 326 |
+
def __init__(self):
|
| 327 |
+
import cupy
|
| 328 |
+
self.cupy = cupy
|
| 329 |
+
|
| 330 |
+
def is_appropriate_type(self, tensor):
|
| 331 |
+
return isinstance(tensor, self.cupy.ndarray)
|
| 332 |
+
|
| 333 |
+
def from_numpy(self, x):
|
| 334 |
+
return self.cupy.asarray(x)
|
| 335 |
+
|
| 336 |
+
def to_numpy(self, x):
|
| 337 |
+
return self.cupy.asnumpy(x)
|
| 338 |
+
|
| 339 |
+
def arange(self, start, stop):
|
| 340 |
+
return self.cupy.arange(start, stop)
|
| 341 |
+
|
| 342 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 343 |
+
return self.cupy.stack(tensors)
|
| 344 |
+
|
| 345 |
+
def tile(self, x, repeats):
|
| 346 |
+
return self.cupy.tile(x, repeats)
|
| 347 |
+
|
| 348 |
+
def concat(self, tensors, axis: int):
|
| 349 |
+
return self.cupy.concatenate(tensors, axis=axis)
|
| 350 |
+
|
| 351 |
+
def add_axis(self, x, new_position):
|
| 352 |
+
return self.cupy.expand_dims(x, new_position)
|
| 353 |
+
|
| 354 |
+
def is_float_type(self, x):
|
| 355 |
+
return x.dtype in ('float16', 'float32', 'float64', 'float128', 'bfloat16')
|
| 356 |
+
|
| 357 |
+
def einsum(self, pattern, *x):
|
| 358 |
+
return self.cupy.einsum(pattern, *x)
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
class ChainerBackend(AbstractBackend):
|
| 362 |
+
framework_name = 'chainer'
|
| 363 |
+
|
| 364 |
+
def __init__(self):
|
| 365 |
+
import chainer
|
| 366 |
+
import numpy
|
| 367 |
+
self.numpy = numpy
|
| 368 |
+
self.chainer = chainer
|
| 369 |
+
|
| 370 |
+
def is_appropriate_type(self, tensor):
|
| 371 |
+
return isinstance(tensor, self.chainer.Variable)
|
| 372 |
+
|
| 373 |
+
def from_numpy(self, x):
|
| 374 |
+
return self.chainer.Variable(x.astype('float32'))
|
| 375 |
+
|
| 376 |
+
def to_numpy(self, x):
|
| 377 |
+
if isinstance(x, self.chainer.Variable):
|
| 378 |
+
x = x.data
|
| 379 |
+
return x
|
| 380 |
+
|
| 381 |
+
def arange(self, start, stop):
|
| 382 |
+
return self.numpy.arange(start, stop)
|
| 383 |
+
|
| 384 |
+
def reduce(self, x, operation, axes):
|
| 385 |
+
return getattr(self.chainer.functions, operation)(x, axis=axes)
|
| 386 |
+
|
| 387 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 388 |
+
return self.chainer.functions.stack(tensors)
|
| 389 |
+
|
| 390 |
+
def tile(self, x, repeats):
|
| 391 |
+
return self.chainer.functions.tile(x, repeats)
|
| 392 |
+
|
| 393 |
+
def concat(self, tensors, axis: int):
|
| 394 |
+
return self.chainer.functions.concat(tensors, axis=axis)
|
| 395 |
+
|
| 396 |
+
def add_axis(self, x, new_position):
|
| 397 |
+
return self.chainer.functions.expand_dims(x, new_position)
|
| 398 |
+
|
| 399 |
+
def is_float_type(self, x):
|
| 400 |
+
return x.dtype in ('float16', 'float32', 'float64', 'float128', 'bfloat16')
|
| 401 |
+
|
| 402 |
+
def layers(self):
|
| 403 |
+
from .layers import chainer
|
| 404 |
+
return chainer
|
| 405 |
+
|
| 406 |
+
def einsum(self, pattern, *x):
|
| 407 |
+
return self.chainer.functions.einsum(pattern, *x)
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
class HashableTuple:
|
| 411 |
+
"""Overcomes non-hashability of symbolic elements"""
|
| 412 |
+
|
| 413 |
+
def __init__(self, elements: tuple):
|
| 414 |
+
self.elements = elements
|
| 415 |
+
|
| 416 |
+
def __iter__(self):
|
| 417 |
+
for x in self.elements:
|
| 418 |
+
yield x
|
| 419 |
+
|
| 420 |
+
def __len__(self):
|
| 421 |
+
return len(self.elements)
|
| 422 |
+
|
| 423 |
+
def __getitem__(self, item):
|
| 424 |
+
return self.elements[item]
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
class TensorflowBackend(AbstractBackend):
|
| 428 |
+
framework_name = 'tensorflow'
|
| 429 |
+
|
| 430 |
+
def __init__(self):
|
| 431 |
+
import tensorflow
|
| 432 |
+
self.tf = tensorflow
|
| 433 |
+
|
| 434 |
+
def is_appropriate_type(self, tensor):
|
| 435 |
+
return isinstance(tensor, (self.tf.Tensor, self.tf.Variable))
|
| 436 |
+
|
| 437 |
+
def from_numpy(self, x):
|
| 438 |
+
assert self.tf.executing_eagerly()
|
| 439 |
+
return self.tf.convert_to_tensor(x)
|
| 440 |
+
|
| 441 |
+
def to_numpy(self, x):
|
| 442 |
+
assert self.tf.executing_eagerly()
|
| 443 |
+
return x.numpy()
|
| 444 |
+
|
| 445 |
+
def arange(self, start, stop):
|
| 446 |
+
return self.tf.range(start, stop)
|
| 447 |
+
|
| 448 |
+
def shape(self, x):
|
| 449 |
+
if self.tf.executing_eagerly():
|
| 450 |
+
return tuple(UnknownSize() if d is None else int(d) for d in x.shape)
|
| 451 |
+
else:
|
| 452 |
+
static_shape = x.shape.as_list()
|
| 453 |
+
tf_shape = self.tf.shape(x)
|
| 454 |
+
# use the static shape where known, otherwise use the TF shape components
|
| 455 |
+
shape = tuple([s or tf_shape[dim] for dim, s in enumerate(static_shape)])
|
| 456 |
+
try:
|
| 457 |
+
hash(shape)
|
| 458 |
+
return shape
|
| 459 |
+
except:
|
| 460 |
+
# unhashable symbols in shape. Wrap tuple to be hashable.
|
| 461 |
+
return HashableTuple(shape)
|
| 462 |
+
|
| 463 |
+
def reduce(self, x, operation, axes):
|
| 464 |
+
return getattr(self.tf, 'reduce_' + operation)(x, axis=axes)
|
| 465 |
+
|
| 466 |
+
def reshape(self, x, shape):
|
| 467 |
+
return self.tf.reshape(x, shape)
|
| 468 |
+
|
| 469 |
+
def transpose(self, x, axes):
|
| 470 |
+
return self.tf.transpose(x, axes)
|
| 471 |
+
|
| 472 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 473 |
+
return self.tf.stack(tensors)
|
| 474 |
+
|
| 475 |
+
def tile(self, x, repeats):
|
| 476 |
+
return self.tf.tile(x, repeats)
|
| 477 |
+
|
| 478 |
+
def concat(self, tensors, axis: int):
|
| 479 |
+
return self.tf.concat(tensors, axis=axis)
|
| 480 |
+
|
| 481 |
+
def add_axis(self, x, new_position):
|
| 482 |
+
return self.tf.expand_dims(x, new_position)
|
| 483 |
+
|
| 484 |
+
def is_float_type(self, x):
|
| 485 |
+
return x.dtype in ('float16', 'float32', 'float64', 'float128', 'bfloat16')
|
| 486 |
+
|
| 487 |
+
def layers(self):
|
| 488 |
+
from .layers import tensorflow
|
| 489 |
+
return tensorflow
|
| 490 |
+
|
| 491 |
+
def einsum(self, pattern, *x):
|
| 492 |
+
return self.tf.einsum(pattern, *x)
|
| 493 |
+
|
| 494 |
+
|
| 495 |
+
class KerasBackend(AbstractBackend):
|
| 496 |
+
framework_name = 'tensorflow.keras'
|
| 497 |
+
|
| 498 |
+
def __init__(self):
|
| 499 |
+
import tensorflow as tf
|
| 500 |
+
self.tf = tf
|
| 501 |
+
self.keras = tf.keras
|
| 502 |
+
self.K = tf.keras.backend
|
| 503 |
+
|
| 504 |
+
def is_appropriate_type(self, tensor):
|
| 505 |
+
return self.tf.is_tensor(tensor) and self.K.is_keras_tensor(tensor)
|
| 506 |
+
|
| 507 |
+
def create_symbol(self, shape):
|
| 508 |
+
return self.keras.Input(batch_shape=shape)
|
| 509 |
+
|
| 510 |
+
def eval_symbol(self, symbol, input_dict):
|
| 511 |
+
model = self.keras.models.Model([var for (var, _) in input_dict], symbol)
|
| 512 |
+
return model.predict_on_batch([val for (_, val) in input_dict])
|
| 513 |
+
|
| 514 |
+
def arange(self, start, stop):
|
| 515 |
+
return self.K.arange(start, stop)
|
| 516 |
+
|
| 517 |
+
def shape(self, x):
|
| 518 |
+
shape = self.K.shape(x) # tf tensor
|
| 519 |
+
return HashableTuple(tuple(shape))
|
| 520 |
+
|
| 521 |
+
def reduce(self, x, operation, axes):
|
| 522 |
+
return getattr(self.K, operation)(x, axis=axes)
|
| 523 |
+
|
| 524 |
+
def reshape(self, x, shape):
|
| 525 |
+
return self.K.reshape(x, shape)
|
| 526 |
+
|
| 527 |
+
def transpose(self, x, axes):
|
| 528 |
+
return self.K.permute_dimensions(x, axes)
|
| 529 |
+
|
| 530 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 531 |
+
return self.K.stack(tensors)
|
| 532 |
+
|
| 533 |
+
def tile(self, x, repeats):
|
| 534 |
+
return self.K.tile(x, repeats)
|
| 535 |
+
|
| 536 |
+
def concat(self, tensors, axis: int):
|
| 537 |
+
return self.K.concatenate(tensors, axis=axis)
|
| 538 |
+
|
| 539 |
+
def add_axis(self, x, new_position):
|
| 540 |
+
return self.K.expand_dims(x, new_position)
|
| 541 |
+
|
| 542 |
+
def is_float_type(self, x):
|
| 543 |
+
return 'float' in self.K.dtype(x)
|
| 544 |
+
|
| 545 |
+
def layers(self):
|
| 546 |
+
from .layers import keras
|
| 547 |
+
return keras
|
| 548 |
+
|
| 549 |
+
|
| 550 |
+
class OneFlowBackend(AbstractBackend):
|
| 551 |
+
framework_name = "oneflow"
|
| 552 |
+
|
| 553 |
+
def __init__(self):
|
| 554 |
+
import oneflow as flow
|
| 555 |
+
self.flow = flow
|
| 556 |
+
|
| 557 |
+
def is_appropriate_type(self, tensor):
|
| 558 |
+
return isinstance(tensor, self.flow.Tensor)
|
| 559 |
+
|
| 560 |
+
def from_numpy(self, x):
|
| 561 |
+
variable = self.flow.from_numpy(x)
|
| 562 |
+
if self.is_float_type(variable):
|
| 563 |
+
# attach grad only to floating types
|
| 564 |
+
variable.requires_grad = True
|
| 565 |
+
return variable
|
| 566 |
+
|
| 567 |
+
def to_numpy(self, x):
|
| 568 |
+
return x.detach().cpu().numpy()
|
| 569 |
+
|
| 570 |
+
def arange(self, start, stop):
|
| 571 |
+
return self.flow.arange(start, stop, dtype=self.flow.int64)
|
| 572 |
+
|
| 573 |
+
def reduce(self, x, operation, reduced_axes):
|
| 574 |
+
for axis in sorted(reduced_axes, reverse=True):
|
| 575 |
+
if operation == 'min':
|
| 576 |
+
x, _ = x.min(dim=axis)
|
| 577 |
+
elif operation == 'max':
|
| 578 |
+
x, _ = x.max(dim=axis)
|
| 579 |
+
elif operation in ['sum', 'mean', 'prod']:
|
| 580 |
+
x = getattr(x, operation)(dim=axis)
|
| 581 |
+
else:
|
| 582 |
+
raise NotImplementedError('Unknown reduction ', operation)
|
| 583 |
+
return x
|
| 584 |
+
|
| 585 |
+
def transpose(self, x, axes):
|
| 586 |
+
return x.permute(axes)
|
| 587 |
+
|
| 588 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 589 |
+
return self.flow.stack(tensors)
|
| 590 |
+
|
| 591 |
+
def add_axes(self, x, n_axes, pos2len):
|
| 592 |
+
repeats = [-1] * n_axes
|
| 593 |
+
for axis_position, axis_length in pos2len.items():
|
| 594 |
+
x = self.add_axis(x, axis_position)
|
| 595 |
+
repeats[axis_position] = axis_length
|
| 596 |
+
return x.expand(*repeats)
|
| 597 |
+
|
| 598 |
+
def tile(self, x, repeats):
|
| 599 |
+
return x.repeat(repeats)
|
| 600 |
+
|
| 601 |
+
def concat(self, tensors, axis: int):
|
| 602 |
+
return self.flow.concat(tensors, dim=axis)
|
| 603 |
+
|
| 604 |
+
def add_axis(self, x, new_position):
|
| 605 |
+
return self.flow.unsqueeze(x, new_position)
|
| 606 |
+
|
| 607 |
+
def is_float_type(self, x):
|
| 608 |
+
return x.dtype in [self.flow.float16, self.flow.float32, self.flow.float64]
|
| 609 |
+
|
| 610 |
+
def layers(self):
|
| 611 |
+
from .layers import oneflow
|
| 612 |
+
return oneflow
|
| 613 |
+
|
| 614 |
+
def einsum(self, pattern, *x):
|
| 615 |
+
return self.flow.einsum(pattern, *x)
|
| 616 |
+
|
| 617 |
+
|
| 618 |
+
class PaddleBackend(AbstractBackend):
|
| 619 |
+
framework_name = "paddle"
|
| 620 |
+
|
| 621 |
+
def __init__(self):
|
| 622 |
+
import paddle
|
| 623 |
+
self.paddle = paddle
|
| 624 |
+
|
| 625 |
+
def is_appropriate_type(self, tensor):
|
| 626 |
+
return isinstance(tensor, (self.paddle.Tensor, self.paddle.static.Variable))
|
| 627 |
+
|
| 628 |
+
def from_numpy(self, x):
|
| 629 |
+
tensor = self.paddle.to_tensor(x)
|
| 630 |
+
tensor.stop_gradient = False
|
| 631 |
+
return tensor
|
| 632 |
+
|
| 633 |
+
def to_numpy(self, x):
|
| 634 |
+
return x.detach().numpy()
|
| 635 |
+
|
| 636 |
+
def arange(self, start, stop):
|
| 637 |
+
return self.paddle.arange(start, stop, dtype=self.paddle.int64)
|
| 638 |
+
|
| 639 |
+
def reduce(self, x, operation, axes):
|
| 640 |
+
# TODO: Support the reduce operation to output a 0D Tensor
|
| 641 |
+
if len(axes) == x.ndim:
|
| 642 |
+
return super().reduce(x, operation, axes).squeeze(0)
|
| 643 |
+
else:
|
| 644 |
+
return super().reduce(x, operation, axes)
|
| 645 |
+
|
| 646 |
+
def transpose(self, x, axes):
|
| 647 |
+
return x.transpose(axes)
|
| 648 |
+
|
| 649 |
+
def add_axes(self, x, n_axes, pos2len):
|
| 650 |
+
repeats = [-1] * n_axes
|
| 651 |
+
for axis_position, axis_length in pos2len.items():
|
| 652 |
+
x = self.add_axis(x, axis_position)
|
| 653 |
+
repeats[axis_position] = axis_length
|
| 654 |
+
return x.expand(repeats)
|
| 655 |
+
|
| 656 |
+
def stack_on_zeroth_dimension(self, tensors: list):
|
| 657 |
+
return self.paddle.stack(tensors)
|
| 658 |
+
|
| 659 |
+
def reshape(self, x, shape):
|
| 660 |
+
return x.reshape(shape)
|
| 661 |
+
|
| 662 |
+
def tile(self, x, repeats):
|
| 663 |
+
return x.tile(repeats)
|
| 664 |
+
|
| 665 |
+
def concat(self, tensors, axis: int):
|
| 666 |
+
return self.paddle.concat(tensors, axis=axis)
|
| 667 |
+
|
| 668 |
+
def add_axis(self, x, new_position):
|
| 669 |
+
return x.unsqueeze(new_position)
|
| 670 |
+
|
| 671 |
+
def is_float_type(self, x):
|
| 672 |
+
return x.dtype in [self.paddle.float16, self.paddle.float32, self.paddle.float64]
|
| 673 |
+
|
| 674 |
+
def layers(self):
|
| 675 |
+
from .layers import paddle
|
| 676 |
+
return paddle
|
| 677 |
+
|
| 678 |
+
def einsum(self, pattern, *x):
|
| 679 |
+
return self.paddle.einsum(pattern, *x)
|
| 680 |
+
|
| 681 |
+
def shape(self, x):
|
| 682 |
+
return tuple(x.shape)
|
evalkit_tf446/lib/python3.10/site-packages/einops/einops.py
ADDED
|
@@ -0,0 +1,793 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import itertools
|
| 3 |
+
import string
|
| 4 |
+
import typing
|
| 5 |
+
from collections import OrderedDict
|
| 6 |
+
from typing import Set, Tuple, List, Dict, Union, Callable, Optional, TypeVar, cast, Any
|
| 7 |
+
|
| 8 |
+
if typing.TYPE_CHECKING:
|
| 9 |
+
import numpy as np
|
| 10 |
+
|
| 11 |
+
from . import EinopsError
|
| 12 |
+
from ._backends import get_backend
|
| 13 |
+
from .parsing import ParsedExpression, _ellipsis, AnonymousAxis
|
| 14 |
+
|
| 15 |
+
Tensor = TypeVar('Tensor')
|
| 16 |
+
ReductionCallable = Callable[[Tensor, Tuple[int, ...]], Tensor]
|
| 17 |
+
Reduction = Union[str, ReductionCallable]
|
| 18 |
+
|
| 19 |
+
_reductions = ('min', 'max', 'sum', 'mean', 'prod')
|
| 20 |
+
# magic integers are required to stay within
|
| 21 |
+
# traceable subset of language
|
| 22 |
+
_ellipsis_not_in_parenthesis: List[int] = [-999]
|
| 23 |
+
_unknown_axis_length = -999999
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def is_ellipsis_not_in_parenthesis(group: List[int]) -> bool:
|
| 27 |
+
if len(group) != 1:
|
| 28 |
+
return False
|
| 29 |
+
return group[0] == -999
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _product(sequence: List[int]) -> int:
|
| 33 |
+
""" minimalistic product that works both with numbers and symbols. Supports empty lists """
|
| 34 |
+
result = 1
|
| 35 |
+
for element in sequence:
|
| 36 |
+
result *= element
|
| 37 |
+
return result
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _reduce_axes(tensor, reduction_type: Reduction, reduced_axes: List[int], backend):
|
| 41 |
+
if callable(reduction_type):
|
| 42 |
+
# custom callable
|
| 43 |
+
return reduction_type(tensor, tuple(reduced_axes))
|
| 44 |
+
else:
|
| 45 |
+
# one of built-in operations
|
| 46 |
+
if len(reduced_axes) == 0:
|
| 47 |
+
return tensor
|
| 48 |
+
assert reduction_type in _reductions
|
| 49 |
+
if reduction_type == 'mean':
|
| 50 |
+
if not backend.is_float_type(tensor):
|
| 51 |
+
raise NotImplementedError('reduce_mean is not available for non-floating tensors')
|
| 52 |
+
return backend.reduce(tensor, reduction_type, tuple(reduced_axes))
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _optimize_transformation(init_shapes, reduced_axes, axes_reordering, final_shapes):
|
| 56 |
+
# 'collapses' neighboring axes if those participate in the result pattern in the same order
|
| 57 |
+
# TODO add support for added_axes
|
| 58 |
+
assert len(axes_reordering) + len(reduced_axes) == len(init_shapes)
|
| 59 |
+
# joining consecutive axes that will be reduced
|
| 60 |
+
# possibly we can skip this if all backends can optimize this (not sure)
|
| 61 |
+
reduced_axes = tuple(sorted(reduced_axes))
|
| 62 |
+
for i in range(len(reduced_axes) - 1)[::-1]:
|
| 63 |
+
if reduced_axes[i] + 1 == reduced_axes[i + 1]:
|
| 64 |
+
removed_axis = reduced_axes[i + 1]
|
| 65 |
+
removed_length = init_shapes[removed_axis]
|
| 66 |
+
init_shapes = init_shapes[:removed_axis] + init_shapes[removed_axis + 1:]
|
| 67 |
+
init_shapes[removed_axis - 1] *= removed_length
|
| 68 |
+
reduced_axes = reduced_axes[:i + 1] + tuple(axis - 1 for axis in reduced_axes[i + 2:])
|
| 69 |
+
|
| 70 |
+
# removing axes that are moved together during reshape
|
| 71 |
+
def build_mapping():
|
| 72 |
+
init_to_final = {}
|
| 73 |
+
for axis in range(len(init_shapes)):
|
| 74 |
+
if axis in reduced_axes:
|
| 75 |
+
init_to_final[axis] = None
|
| 76 |
+
else:
|
| 77 |
+
after_reduction = sum(x is not None for x in init_to_final.values())
|
| 78 |
+
init_to_final[axis] = list(axes_reordering).index(after_reduction)
|
| 79 |
+
return init_to_final
|
| 80 |
+
|
| 81 |
+
init_axis_to_final_axis = build_mapping()
|
| 82 |
+
|
| 83 |
+
for init_axis in range(len(init_shapes) - 1)[::-1]:
|
| 84 |
+
if init_axis_to_final_axis[init_axis] is None:
|
| 85 |
+
continue
|
| 86 |
+
if init_axis_to_final_axis[init_axis + 1] is None:
|
| 87 |
+
continue
|
| 88 |
+
if init_axis_to_final_axis[init_axis] + 1 == init_axis_to_final_axis[init_axis + 1]:
|
| 89 |
+
removed_axis = init_axis + 1
|
| 90 |
+
removed_length = init_shapes[removed_axis]
|
| 91 |
+
removed_axis_after_reduction = sum(x not in reduced_axes for x in range(removed_axis))
|
| 92 |
+
|
| 93 |
+
reduced_axes = tuple(axis if axis < removed_axis else axis - 1 for axis in reduced_axes)
|
| 94 |
+
init_shapes = init_shapes[:removed_axis] + init_shapes[removed_axis + 1:]
|
| 95 |
+
init_shapes[removed_axis - 1] *= removed_length
|
| 96 |
+
old_reordering = axes_reordering
|
| 97 |
+
axes_reordering = []
|
| 98 |
+
for axis in old_reordering:
|
| 99 |
+
if axis == removed_axis_after_reduction:
|
| 100 |
+
pass
|
| 101 |
+
elif axis < removed_axis_after_reduction:
|
| 102 |
+
axes_reordering.append(axis)
|
| 103 |
+
else:
|
| 104 |
+
axes_reordering.append(axis - 1)
|
| 105 |
+
init_axis_to_final_axis = build_mapping()
|
| 106 |
+
|
| 107 |
+
return init_shapes, reduced_axes, axes_reordering, final_shapes
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
CookedRecipe = Tuple[List[int], List[int], List[int], Dict[int, int], List[int]]
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class TransformRecipe:
|
| 114 |
+
"""
|
| 115 |
+
Recipe describes actual computation pathway.
|
| 116 |
+
Recipe can be applied to a tensor or variable.
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
# structure is non-mutable. In future, this can be non-mutable dataclass (python 3.7+)
|
| 120 |
+
|
| 121 |
+
def __init__(self,
|
| 122 |
+
# list of expressions (or just sizes) for elementary axes as they appear in left expression.
|
| 123 |
+
# this is what (after computing unknown parts) will be a shape after first transposition.
|
| 124 |
+
# If ellipsis is present, it forms one dimension here (in the right position).
|
| 125 |
+
elementary_axes_lengths: List[int],
|
| 126 |
+
# each dimension in input can help to reconstruct length of one elementary axis
|
| 127 |
+
# or verify one of dimensions. Each element points to element of elementary_axes_lengths
|
| 128 |
+
input_composite_axes: List[Tuple[List[int], List[int]]],
|
| 129 |
+
# indices of axes to be squashed
|
| 130 |
+
reduced_elementary_axes: List[int],
|
| 131 |
+
# in which order should axes be reshuffled after reduction
|
| 132 |
+
axes_permutation: List[int],
|
| 133 |
+
# at which positions which of elementary axes should appear
|
| 134 |
+
added_axes: Dict[int, int],
|
| 135 |
+
# ids of axes as they appear in result, again pointers to elementary_axes_lengths,
|
| 136 |
+
# only used to infer result dimensions
|
| 137 |
+
output_composite_axes: List[List[int]],
|
| 138 |
+
# positions of ellipsis in lhs and rhs of expression
|
| 139 |
+
ellipsis_position_in_lhs: Optional[int] = None,
|
| 140 |
+
):
|
| 141 |
+
self.elementary_axes_lengths: List[int] = elementary_axes_lengths
|
| 142 |
+
self.input_composite_axes: List[Tuple[List[int], List[int]]] = input_composite_axes
|
| 143 |
+
self.output_composite_axes: List[List[int]] = output_composite_axes
|
| 144 |
+
self.axes_permutation: List[int] = axes_permutation
|
| 145 |
+
self.added_axes: Dict[int, int] = added_axes
|
| 146 |
+
# This is redundant information, but more convenient to use
|
| 147 |
+
self.reduced_elementary_axes: List[int] = reduced_elementary_axes
|
| 148 |
+
# setting to a large number to avoid handling Nones in reconstruct_from_shape
|
| 149 |
+
self.ellipsis_position_in_lhs: int = ellipsis_position_in_lhs if ellipsis_position_in_lhs is not None else 10000
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def _reconstruct_from_shape_uncached(self: TransformRecipe, shape: List[int]) -> CookedRecipe:
|
| 153 |
+
"""
|
| 154 |
+
Reconstruct all actual parameters using shape.
|
| 155 |
+
Shape is a tuple that may contain integers, shape symbols (tf, keras, theano) and UnknownSize (keras, mxnet)
|
| 156 |
+
known axes can be integers or symbols, but not Nones.
|
| 157 |
+
"""
|
| 158 |
+
axes_lengths: List[int] = list(self.elementary_axes_lengths)
|
| 159 |
+
if self.ellipsis_position_in_lhs != 10000:
|
| 160 |
+
if len(shape) < len(self.input_composite_axes) - 1:
|
| 161 |
+
raise EinopsError('Expected at least {} dimensions, got {}'.format(
|
| 162 |
+
len(self.input_composite_axes) - 1, len(shape)))
|
| 163 |
+
else:
|
| 164 |
+
if len(shape) != len(self.input_composite_axes):
|
| 165 |
+
raise EinopsError('Expected {} dimensions, got {}'.format(len(self.input_composite_axes), len(shape)))
|
| 166 |
+
|
| 167 |
+
ellipsis_shape: List[int] = []
|
| 168 |
+
for input_axis, (known_axes, unknown_axes) in enumerate(self.input_composite_axes):
|
| 169 |
+
before_ellipsis = input_axis
|
| 170 |
+
after_ellipsis = input_axis + len(shape) - len(self.input_composite_axes)
|
| 171 |
+
if input_axis == self.ellipsis_position_in_lhs:
|
| 172 |
+
assert len(known_axes) == 0 and len(unknown_axes) == 1
|
| 173 |
+
unknown_axis: int = unknown_axes[0]
|
| 174 |
+
ellipsis_shape = shape[before_ellipsis:after_ellipsis + 1]
|
| 175 |
+
for d in ellipsis_shape:
|
| 176 |
+
if d is None:
|
| 177 |
+
raise EinopsError("Couldn't infer shape for one or more axes represented by ellipsis")
|
| 178 |
+
total_dim_size: int = _product(ellipsis_shape)
|
| 179 |
+
axes_lengths[unknown_axis] = total_dim_size
|
| 180 |
+
else:
|
| 181 |
+
if input_axis < self.ellipsis_position_in_lhs:
|
| 182 |
+
length = shape[before_ellipsis]
|
| 183 |
+
else:
|
| 184 |
+
length = shape[after_ellipsis]
|
| 185 |
+
known_product = 1
|
| 186 |
+
for axis in known_axes:
|
| 187 |
+
known_product *= axes_lengths[axis]
|
| 188 |
+
|
| 189 |
+
if len(unknown_axes) == 0:
|
| 190 |
+
if isinstance(length, int) and isinstance(known_product, int) and length != known_product:
|
| 191 |
+
raise EinopsError('Shape mismatch, {} != {}'.format(length, known_product))
|
| 192 |
+
# this is enforced when recipe is created
|
| 193 |
+
# elif len(unknown_axes) > 1:
|
| 194 |
+
# raise EinopsError(
|
| 195 |
+
# "Lengths of two or more axes in parenthesis not provided (dim={}), can't infer dimensions".
|
| 196 |
+
# format(known_product)
|
| 197 |
+
# )
|
| 198 |
+
else:
|
| 199 |
+
if isinstance(length, int) and isinstance(known_product, int) and length % known_product != 0:
|
| 200 |
+
raise EinopsError("Shape mismatch, can't divide axis of length {} in chunks of {}".format(
|
| 201 |
+
length, known_product))
|
| 202 |
+
|
| 203 |
+
unknown_axis = unknown_axes[0]
|
| 204 |
+
inferred_length: int = length // known_product
|
| 205 |
+
axes_lengths[unknown_axis] = inferred_length
|
| 206 |
+
|
| 207 |
+
# at this point all axes_lengths are computed (either have values or variables, but not Nones)
|
| 208 |
+
|
| 209 |
+
# TODO more readable expression
|
| 210 |
+
init_shapes = axes_lengths[:len(axes_lengths) - len(self.added_axes)]
|
| 211 |
+
final_shapes: List[int] = []
|
| 212 |
+
for output_axis, grouping in enumerate(self.output_composite_axes):
|
| 213 |
+
if is_ellipsis_not_in_parenthesis(grouping):
|
| 214 |
+
final_shapes.extend(ellipsis_shape)
|
| 215 |
+
else:
|
| 216 |
+
lengths = [axes_lengths[elementary_axis] for elementary_axis in grouping]
|
| 217 |
+
final_shapes.append(_product(lengths))
|
| 218 |
+
reduced_axes = self.reduced_elementary_axes
|
| 219 |
+
axes_reordering = self.axes_permutation
|
| 220 |
+
added_axes: Dict[int, int] = {
|
| 221 |
+
pos: axes_lengths[pos_in_elementary] for pos, pos_in_elementary in self.added_axes.items()}
|
| 222 |
+
# if optimize:
|
| 223 |
+
# assert len(self.added_axes) == 0
|
| 224 |
+
# return _optimize_transformation(init_shapes, reduced_axes, axes_reordering, final_shapes)
|
| 225 |
+
return init_shapes, reduced_axes, axes_reordering, added_axes, final_shapes
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
_reconstruct_from_shape = functools.lru_cache(1024)(_reconstruct_from_shape_uncached)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def _apply_recipe(recipe: TransformRecipe, tensor: Tensor, reduction_type: Reduction) -> Tensor:
|
| 232 |
+
# this method works for all backends but not compilable with
|
| 233 |
+
backend = get_backend(tensor)
|
| 234 |
+
init_shapes, reduced_axes, axes_reordering, added_axes, final_shapes = \
|
| 235 |
+
_reconstruct_from_shape(recipe, backend.shape(tensor))
|
| 236 |
+
tensor = backend.reshape(tensor, init_shapes)
|
| 237 |
+
tensor = _reduce_axes(tensor, reduction_type=reduction_type, reduced_axes=reduced_axes, backend=backend)
|
| 238 |
+
tensor = backend.transpose(tensor, axes_reordering)
|
| 239 |
+
if len(added_axes) > 0:
|
| 240 |
+
tensor = backend.add_axes(tensor, n_axes=len(axes_reordering) + len(added_axes), pos2len=added_axes)
|
| 241 |
+
return backend.reshape(tensor, final_shapes)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
@functools.lru_cache(256)
|
| 245 |
+
def _prepare_transformation_recipe(pattern: str,
|
| 246 |
+
operation: Reduction,
|
| 247 |
+
axes_lengths: Tuple[Tuple, ...]) -> TransformRecipe:
|
| 248 |
+
""" Perform initial parsing of pattern and provided supplementary info
|
| 249 |
+
axes_lengths is a tuple of tuples (axis_name, axis_length)
|
| 250 |
+
"""
|
| 251 |
+
left_str, rght_str = pattern.split('->')
|
| 252 |
+
left = ParsedExpression(left_str)
|
| 253 |
+
rght = ParsedExpression(rght_str)
|
| 254 |
+
|
| 255 |
+
# checking that axes are in agreement - new axes appear only in repeat, while disappear only in reduction
|
| 256 |
+
if not left.has_ellipsis and rght.has_ellipsis:
|
| 257 |
+
raise EinopsError('Ellipsis found in right side, but not left side of a pattern {}'.format(pattern))
|
| 258 |
+
if left.has_ellipsis and left.has_ellipsis_parenthesized:
|
| 259 |
+
raise EinopsError('Ellipsis is parenthesis in the left side is not allowed: {}'.format(pattern))
|
| 260 |
+
if operation == 'rearrange':
|
| 261 |
+
difference = set.symmetric_difference(left.identifiers, rght.identifiers)
|
| 262 |
+
if left.has_non_unitary_anonymous_axes or rght.has_non_unitary_anonymous_axes:
|
| 263 |
+
raise EinopsError('Non-unitary anonymous axes are not supported in rearrange (exception is length 1)')
|
| 264 |
+
if len(difference) > 0:
|
| 265 |
+
raise EinopsError('Identifiers only on one side of expression (should be on both): {}'.format(difference))
|
| 266 |
+
elif operation == 'repeat':
|
| 267 |
+
difference = set.difference(left.identifiers, rght.identifiers)
|
| 268 |
+
if len(difference) > 0:
|
| 269 |
+
raise EinopsError('Unexpected identifiers on the left side of repeat: {}'.format(difference))
|
| 270 |
+
axes_without_size = set.difference({ax for ax in rght.identifiers if not isinstance(ax, AnonymousAxis)},
|
| 271 |
+
{*left.identifiers, *(ax for ax, _ in axes_lengths)})
|
| 272 |
+
if len(axes_without_size) > 0:
|
| 273 |
+
raise EinopsError('Specify sizes for new axes in repeat: {}'.format(axes_without_size))
|
| 274 |
+
elif operation in _reductions or callable(operation):
|
| 275 |
+
difference = set.difference(rght.identifiers, left.identifiers)
|
| 276 |
+
if len(difference) > 0:
|
| 277 |
+
raise EinopsError('Unexpected identifiers on the right side of reduce {}: {}'.format(operation, difference))
|
| 278 |
+
else:
|
| 279 |
+
raise EinopsError('Unknown reduction {}. Expect one of {}.'.format(operation, _reductions))
|
| 280 |
+
|
| 281 |
+
# parsing all dimensions to find out lengths
|
| 282 |
+
axis_name2known_length: Dict[Union[str, AnonymousAxis], int] = OrderedDict()
|
| 283 |
+
for composite_axis in left.composition:
|
| 284 |
+
for axis_name in composite_axis:
|
| 285 |
+
if isinstance(axis_name, AnonymousAxis):
|
| 286 |
+
axis_name2known_length[axis_name] = axis_name.value
|
| 287 |
+
else:
|
| 288 |
+
axis_name2known_length[axis_name] = _unknown_axis_length
|
| 289 |
+
|
| 290 |
+
# axis_ids_after_first_reshape = range(len(axis_name2known_length)) at this point
|
| 291 |
+
|
| 292 |
+
repeat_axes_names = []
|
| 293 |
+
for axis_name in rght.identifiers:
|
| 294 |
+
if axis_name not in axis_name2known_length:
|
| 295 |
+
if isinstance(axis_name, AnonymousAxis):
|
| 296 |
+
axis_name2known_length[axis_name] = axis_name.value
|
| 297 |
+
else:
|
| 298 |
+
axis_name2known_length[axis_name] = _unknown_axis_length
|
| 299 |
+
repeat_axes_names.append(axis_name)
|
| 300 |
+
|
| 301 |
+
axis_name2position = {name: position for position, name in enumerate(axis_name2known_length)}
|
| 302 |
+
reduced_axes: List[int] = [position for axis, position in axis_name2position.items() if
|
| 303 |
+
axis not in rght.identifiers]
|
| 304 |
+
reduced_axes = list(sorted(reduced_axes))
|
| 305 |
+
|
| 306 |
+
for elementary_axis, axis_length in axes_lengths:
|
| 307 |
+
if not ParsedExpression.check_axis_name(elementary_axis):
|
| 308 |
+
raise EinopsError('Invalid name for an axis', elementary_axis)
|
| 309 |
+
if elementary_axis not in axis_name2known_length:
|
| 310 |
+
raise EinopsError('Axis {} is not used in transform'.format(elementary_axis))
|
| 311 |
+
axis_name2known_length[elementary_axis] = axis_length
|
| 312 |
+
|
| 313 |
+
input_axes_known_unknown = []
|
| 314 |
+
# some of shapes will be inferred later - all information is prepared for faster inference
|
| 315 |
+
for composite_axis in left.composition:
|
| 316 |
+
known: Set[str] = {axis for axis in composite_axis if axis_name2known_length[axis] != _unknown_axis_length}
|
| 317 |
+
unknown: Set[str] = {axis for axis in composite_axis if axis_name2known_length[axis] == _unknown_axis_length}
|
| 318 |
+
if len(unknown) > 1:
|
| 319 |
+
raise EinopsError('Could not infer sizes for {}'.format(unknown))
|
| 320 |
+
assert len(unknown) + len(known) == len(composite_axis)
|
| 321 |
+
input_axes_known_unknown.append(
|
| 322 |
+
([axis_name2position[axis] for axis in known],
|
| 323 |
+
[axis_name2position[axis] for axis in unknown])
|
| 324 |
+
)
|
| 325 |
+
|
| 326 |
+
axis_position_after_reduction: Dict[str, int] = {}
|
| 327 |
+
for axis_name in itertools.chain(*left.composition):
|
| 328 |
+
if axis_name in rght.identifiers:
|
| 329 |
+
axis_position_after_reduction[axis_name] = len(axis_position_after_reduction)
|
| 330 |
+
|
| 331 |
+
result_axes_grouping: List[List[int]] = []
|
| 332 |
+
for composite_axis in rght.composition:
|
| 333 |
+
if composite_axis == _ellipsis:
|
| 334 |
+
result_axes_grouping.append(_ellipsis_not_in_parenthesis)
|
| 335 |
+
else:
|
| 336 |
+
result_axes_grouping.append([axis_name2position[axis] for axis in composite_axis])
|
| 337 |
+
|
| 338 |
+
ordered_axis_right = list(itertools.chain(*rght.composition))
|
| 339 |
+
axes_permutation = [
|
| 340 |
+
axis_position_after_reduction[axis] for axis in ordered_axis_right if axis in left.identifiers]
|
| 341 |
+
added_axes = {i: axis_name2position[axis_name] for i, axis_name in enumerate(ordered_axis_right)
|
| 342 |
+
if axis_name not in left.identifiers}
|
| 343 |
+
|
| 344 |
+
ellipsis_left = None if _ellipsis not in left.composition else left.composition.index(_ellipsis)
|
| 345 |
+
|
| 346 |
+
return TransformRecipe(
|
| 347 |
+
elementary_axes_lengths=list(axis_name2known_length.values()),
|
| 348 |
+
input_composite_axes=input_axes_known_unknown,
|
| 349 |
+
reduced_elementary_axes=reduced_axes,
|
| 350 |
+
axes_permutation=axes_permutation,
|
| 351 |
+
added_axes=added_axes,
|
| 352 |
+
output_composite_axes=result_axes_grouping,
|
| 353 |
+
ellipsis_position_in_lhs=ellipsis_left,
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
def reduce(tensor: Tensor, pattern: str, reduction: Reduction, **axes_lengths: int) -> Tensor:
|
| 358 |
+
"""
|
| 359 |
+
einops.reduce provides combination of reordering and reduction using reader-friendly notation.
|
| 360 |
+
|
| 361 |
+
Examples for reduce operation:
|
| 362 |
+
|
| 363 |
+
```python
|
| 364 |
+
>>> x = np.random.randn(100, 32, 64)
|
| 365 |
+
|
| 366 |
+
# perform max-reduction on the first axis
|
| 367 |
+
>>> y = reduce(x, 't b c -> b c', 'max')
|
| 368 |
+
|
| 369 |
+
# same as previous, but with clearer axes meaning
|
| 370 |
+
>>> y = reduce(x, 'time batch channel -> batch channel', 'max')
|
| 371 |
+
|
| 372 |
+
>>> x = np.random.randn(10, 20, 30, 40)
|
| 373 |
+
|
| 374 |
+
# 2d max-pooling with kernel size = 2 * 2 for image processing
|
| 375 |
+
>>> y1 = reduce(x, 'b c (h1 h2) (w1 w2) -> b c h1 w1', 'max', h2=2, w2=2)
|
| 376 |
+
|
| 377 |
+
# if one wants to go back to the original height and width, depth-to-space trick can be applied
|
| 378 |
+
>>> y2 = rearrange(y1, 'b (c h2 w2) h1 w1 -> b c (h1 h2) (w1 w2)', h2=2, w2=2)
|
| 379 |
+
>>> assert parse_shape(x, 'b _ h w') == parse_shape(y2, 'b _ h w')
|
| 380 |
+
|
| 381 |
+
# Adaptive 2d max-pooling to 3 * 4 grid
|
| 382 |
+
>>> reduce(x, 'b c (h1 h2) (w1 w2) -> b c h1 w1', 'max', h1=3, w1=4).shape
|
| 383 |
+
(10, 20, 3, 4)
|
| 384 |
+
|
| 385 |
+
# Global average pooling
|
| 386 |
+
>>> reduce(x, 'b c h w -> b c', 'mean').shape
|
| 387 |
+
(10, 20)
|
| 388 |
+
|
| 389 |
+
# Subtracting mean over batch for each channel
|
| 390 |
+
>>> y = x - reduce(x, 'b c h w -> () c () ()', 'mean')
|
| 391 |
+
|
| 392 |
+
# Subtracting per-image mean for each channel
|
| 393 |
+
>>> y = x - reduce(x, 'b c h w -> b c () ()', 'mean')
|
| 394 |
+
|
| 395 |
+
```
|
| 396 |
+
|
| 397 |
+
Parameters:
|
| 398 |
+
tensor: tensor: tensor of any supported library (e.g. numpy.ndarray, tensorflow, pytorch).
|
| 399 |
+
list of tensors is also accepted, those should be of the same type and shape
|
| 400 |
+
pattern: string, reduction pattern
|
| 401 |
+
reduction: one of available reductions ('min', 'max', 'sum', 'mean', 'prod'), case-sensitive
|
| 402 |
+
alternatively, a callable f(tensor, reduced_axes) -> tensor can be provided.
|
| 403 |
+
This allows using various reductions, examples: np.max, tf.reduce_logsumexp, torch.var, etc.
|
| 404 |
+
axes_lengths: any additional specifications for dimensions
|
| 405 |
+
|
| 406 |
+
Returns:
|
| 407 |
+
tensor of the same type as input
|
| 408 |
+
"""
|
| 409 |
+
try:
|
| 410 |
+
hashable_axes_lengths = tuple(sorted(axes_lengths.items()))
|
| 411 |
+
recipe = _prepare_transformation_recipe(pattern, reduction, axes_lengths=hashable_axes_lengths)
|
| 412 |
+
return _apply_recipe(recipe, tensor, reduction_type=reduction)
|
| 413 |
+
except EinopsError as e:
|
| 414 |
+
message = ' Error while processing {}-reduction pattern "{}".'.format(reduction, pattern)
|
| 415 |
+
if not isinstance(tensor, list):
|
| 416 |
+
message += '\n Input tensor shape: {}. '.format(get_backend(tensor).shape(tensor))
|
| 417 |
+
else:
|
| 418 |
+
message += '\n Input is list. '
|
| 419 |
+
message += 'Additional info: {}.'.format(axes_lengths)
|
| 420 |
+
raise EinopsError(message + '\n {}'.format(e))
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
def rearrange(tensor: Union[Tensor, List[Tensor]], pattern: str, **axes_lengths) -> Tensor:
|
| 425 |
+
"""
|
| 426 |
+
einops.rearrange is a reader-friendly smart element reordering for multidimensional tensors.
|
| 427 |
+
This operation includes functionality of transpose (axes permutation), reshape (view), squeeze, unsqueeze,
|
| 428 |
+
stack, concatenate and other operations.
|
| 429 |
+
|
| 430 |
+
Examples for rearrange operation:
|
| 431 |
+
|
| 432 |
+
```python
|
| 433 |
+
# suppose we have a set of 32 images in "h w c" format (height-width-channel)
|
| 434 |
+
>>> images = [np.random.randn(30, 40, 3) for _ in range(32)]
|
| 435 |
+
|
| 436 |
+
# stack along first (batch) axis, output is a single array
|
| 437 |
+
>>> rearrange(images, 'b h w c -> b h w c').shape
|
| 438 |
+
(32, 30, 40, 3)
|
| 439 |
+
|
| 440 |
+
# concatenate images along height (vertical axis), 960 = 32 * 30
|
| 441 |
+
>>> rearrange(images, 'b h w c -> (b h) w c').shape
|
| 442 |
+
(960, 40, 3)
|
| 443 |
+
|
| 444 |
+
# concatenated images along horizontal axis, 1280 = 32 * 40
|
| 445 |
+
>>> rearrange(images, 'b h w c -> h (b w) c').shape
|
| 446 |
+
(30, 1280, 3)
|
| 447 |
+
|
| 448 |
+
# reordered axes to "b c h w" format for deep learning
|
| 449 |
+
>>> rearrange(images, 'b h w c -> b c h w').shape
|
| 450 |
+
(32, 3, 30, 40)
|
| 451 |
+
|
| 452 |
+
# flattened each image into a vector, 3600 = 30 * 40 * 3
|
| 453 |
+
>>> rearrange(images, 'b h w c -> b (c h w)').shape
|
| 454 |
+
(32, 3600)
|
| 455 |
+
|
| 456 |
+
# split each image into 4 smaller (top-left, top-right, bottom-left, bottom-right), 128 = 32 * 2 * 2
|
| 457 |
+
>>> rearrange(images, 'b (h1 h) (w1 w) c -> (b h1 w1) h w c', h1=2, w1=2).shape
|
| 458 |
+
(128, 15, 20, 3)
|
| 459 |
+
|
| 460 |
+
# space-to-depth operation
|
| 461 |
+
>>> rearrange(images, 'b (h h1) (w w1) c -> b h w (c h1 w1)', h1=2, w1=2).shape
|
| 462 |
+
(32, 15, 20, 12)
|
| 463 |
+
|
| 464 |
+
```
|
| 465 |
+
|
| 466 |
+
When composing axes, C-order enumeration used (consecutive elements have different last axis)
|
| 467 |
+
Find more examples in einops tutorial.
|
| 468 |
+
|
| 469 |
+
Parameters:
|
| 470 |
+
tensor: tensor of any supported library (e.g. numpy.ndarray, tensorflow, pytorch).
|
| 471 |
+
list of tensors is also accepted, those should be of the same type and shape
|
| 472 |
+
pattern: string, rearrangement pattern
|
| 473 |
+
axes_lengths: any additional specifications for dimensions
|
| 474 |
+
|
| 475 |
+
Returns:
|
| 476 |
+
tensor of the same type as input. If possible, a view to the original tensor is returned.
|
| 477 |
+
|
| 478 |
+
"""
|
| 479 |
+
if isinstance(tensor, list):
|
| 480 |
+
if len(tensor) == 0:
|
| 481 |
+
raise TypeError("Rearrange can't be applied to an empty list")
|
| 482 |
+
tensor = get_backend(tensor[0]).stack_on_zeroth_dimension(tensor)
|
| 483 |
+
return reduce(cast(Tensor, tensor), pattern, reduction='rearrange', **axes_lengths)
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
def repeat(tensor: Tensor, pattern: str, **axes_lengths) -> Tensor:
|
| 487 |
+
"""
|
| 488 |
+
einops.repeat allows reordering elements and repeating them in arbitrary combinations.
|
| 489 |
+
This operation includes functionality of repeat, tile, broadcast functions.
|
| 490 |
+
|
| 491 |
+
Examples for repeat operation:
|
| 492 |
+
|
| 493 |
+
```python
|
| 494 |
+
# a grayscale image (of shape height x width)
|
| 495 |
+
>>> image = np.random.randn(30, 40)
|
| 496 |
+
|
| 497 |
+
# change it to RGB format by repeating in each channel
|
| 498 |
+
>>> repeat(image, 'h w -> h w c', c=3).shape
|
| 499 |
+
(30, 40, 3)
|
| 500 |
+
|
| 501 |
+
# repeat image 2 times along height (vertical axis)
|
| 502 |
+
>>> repeat(image, 'h w -> (repeat h) w', repeat=2).shape
|
| 503 |
+
(60, 40)
|
| 504 |
+
|
| 505 |
+
# repeat image 2 time along height and 3 times along width
|
| 506 |
+
>>> repeat(image, 'h w -> (h2 h) (w3 w)', h2=2, w3=3).shape
|
| 507 |
+
(60, 120)
|
| 508 |
+
|
| 509 |
+
# convert each pixel to a small square 2x2. Upsample image by 2x
|
| 510 |
+
>>> repeat(image, 'h w -> (h h2) (w w2)', h2=2, w2=2).shape
|
| 511 |
+
(60, 80)
|
| 512 |
+
|
| 513 |
+
# pixelate image first by downsampling by 2x, then upsampling
|
| 514 |
+
>>> downsampled = reduce(image, '(h h2) (w w2) -> h w', 'mean', h2=2, w2=2)
|
| 515 |
+
>>> repeat(downsampled, 'h w -> (h h2) (w w2)', h2=2, w2=2).shape
|
| 516 |
+
(30, 40)
|
| 517 |
+
|
| 518 |
+
```
|
| 519 |
+
|
| 520 |
+
When composing axes, C-order enumeration used (consecutive elements have different last axis)
|
| 521 |
+
Find more examples in einops tutorial.
|
| 522 |
+
|
| 523 |
+
Parameters:
|
| 524 |
+
tensor: tensor of any supported library (e.g. numpy.ndarray, tensorflow, pytorch).
|
| 525 |
+
list of tensors is also accepted, those should be of the same type and shape
|
| 526 |
+
pattern: string, rearrangement pattern
|
| 527 |
+
axes_lengths: any additional specifications for dimensions
|
| 528 |
+
|
| 529 |
+
Returns:
|
| 530 |
+
Tensor of the same type as input. If possible, a view to the original tensor is returned.
|
| 531 |
+
|
| 532 |
+
"""
|
| 533 |
+
return reduce(tensor, pattern, reduction='repeat', **axes_lengths)
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
def parse_shape(x, pattern: str) -> dict:
|
| 537 |
+
"""
|
| 538 |
+
Parse a tensor shape to dictionary mapping axes names to their lengths.
|
| 539 |
+
|
| 540 |
+
```python
|
| 541 |
+
# Use underscore to skip the dimension in parsing.
|
| 542 |
+
>>> x = np.zeros([2, 3, 5, 7])
|
| 543 |
+
>>> parse_shape(x, 'batch _ h w')
|
| 544 |
+
{'batch': 2, 'h': 5, 'w': 7}
|
| 545 |
+
|
| 546 |
+
# `parse_shape` output can be used to specify axes_lengths for other operations:
|
| 547 |
+
>>> y = np.zeros([700])
|
| 548 |
+
>>> rearrange(y, '(b c h w) -> b c h w', **parse_shape(x, 'b _ h w')).shape
|
| 549 |
+
(2, 10, 5, 7)
|
| 550 |
+
|
| 551 |
+
```
|
| 552 |
+
|
| 553 |
+
For symbolic frameworks may return symbols, not integers.
|
| 554 |
+
|
| 555 |
+
Parameters:
|
| 556 |
+
x: tensor of any of supported frameworks
|
| 557 |
+
pattern: str, space separated names for axes, underscore means skip axis
|
| 558 |
+
|
| 559 |
+
Returns:
|
| 560 |
+
dict, maps axes names to their lengths
|
| 561 |
+
"""
|
| 562 |
+
exp = ParsedExpression(pattern, allow_underscore=True)
|
| 563 |
+
shape = get_backend(x).shape(x)
|
| 564 |
+
if exp.has_composed_axes():
|
| 565 |
+
raise RuntimeError("Can't parse shape with composite axes: {pattern} {shape}".format(
|
| 566 |
+
pattern=pattern, shape=shape))
|
| 567 |
+
if len(shape) != len(exp.composition):
|
| 568 |
+
if exp.has_ellipsis:
|
| 569 |
+
if len(shape) < len(exp.composition) - 1:
|
| 570 |
+
raise RuntimeError("Can't parse shape with this number of dimensions: {pattern} {shape}".format(
|
| 571 |
+
pattern=pattern, shape=shape))
|
| 572 |
+
else:
|
| 573 |
+
raise RuntimeError("Can't parse shape with different number of dimensions: {pattern} {shape}".format(
|
| 574 |
+
pattern=pattern, shape=shape))
|
| 575 |
+
if exp.has_ellipsis:
|
| 576 |
+
ellipsis_idx = exp.composition.index(_ellipsis)
|
| 577 |
+
composition = (exp.composition[:ellipsis_idx] +
|
| 578 |
+
['_'] * (len(shape) - len(exp.composition) + 1) +
|
| 579 |
+
exp.composition[ellipsis_idx + 1:])
|
| 580 |
+
else:
|
| 581 |
+
composition = exp.composition
|
| 582 |
+
result = {}
|
| 583 |
+
for (axis_name,), axis_length in zip(composition, shape): # type: ignore
|
| 584 |
+
if axis_name != '_':
|
| 585 |
+
result[axis_name] = axis_length
|
| 586 |
+
return result
|
| 587 |
+
|
| 588 |
+
|
| 589 |
+
# this one is probably not needed in the public API
|
| 590 |
+
def _enumerate_directions(x):
|
| 591 |
+
"""
|
| 592 |
+
For an n-dimensional tensor, returns tensors to enumerate each axis.
|
| 593 |
+
```python
|
| 594 |
+
x = np.zeros([2, 3, 4]) # or any other tensor
|
| 595 |
+
i, j, k = _enumerate_directions(x)
|
| 596 |
+
result = i + 2*j + 3*k
|
| 597 |
+
```
|
| 598 |
+
|
| 599 |
+
`result[i, j, k] = i + 2j + 3k`, and also has the same shape as result
|
| 600 |
+
Works very similarly to numpy.ogrid (open indexing grid)
|
| 601 |
+
"""
|
| 602 |
+
backend = get_backend(x)
|
| 603 |
+
shape = backend.shape(x)
|
| 604 |
+
result = []
|
| 605 |
+
for axis_id, axis_length in enumerate(shape):
|
| 606 |
+
shape = [1] * len(shape)
|
| 607 |
+
shape[axis_id] = axis_length
|
| 608 |
+
result.append(backend.reshape(backend.arange(0, axis_length), shape))
|
| 609 |
+
return result
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
# to avoid importing numpy
|
| 613 |
+
np_ndarray = Any
|
| 614 |
+
|
| 615 |
+
|
| 616 |
+
def asnumpy(tensor) -> np_ndarray:
|
| 617 |
+
"""
|
| 618 |
+
Convert a tensor of an imperative framework (i.e. numpy/cupy/torch/jax/etc.) to `numpy.ndarray`
|
| 619 |
+
|
| 620 |
+
Parameters:
|
| 621 |
+
tensor: tensor of any of known imperative framework
|
| 622 |
+
|
| 623 |
+
Returns:
|
| 624 |
+
`numpy.ndarray`, converted to numpy
|
| 625 |
+
"""
|
| 626 |
+
return get_backend(tensor).to_numpy(tensor)
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
def _validate_einsum_axis_name(axis_name):
|
| 630 |
+
if len(axis_name) == 0:
|
| 631 |
+
raise NotImplementedError("Singleton () axes are not yet supported in einsum.")
|
| 632 |
+
if len(axis_name) > 1:
|
| 633 |
+
raise NotImplementedError("Shape rearrangement is not yet supported in einsum.")
|
| 634 |
+
|
| 635 |
+
axis_name = axis_name[0]
|
| 636 |
+
|
| 637 |
+
if isinstance(axis_name, AnonymousAxis):
|
| 638 |
+
raise NotImplementedError("Anonymous axes are not yet supported in einsum.")
|
| 639 |
+
if len(axis_name) == 0:
|
| 640 |
+
raise RuntimeError("Encountered empty axis name in einsum.")
|
| 641 |
+
if not isinstance(axis_name, str):
|
| 642 |
+
raise RuntimeError("Axis name in einsum must be a string.")
|
| 643 |
+
|
| 644 |
+
|
| 645 |
+
@functools.lru_cache(256)
|
| 646 |
+
def _compactify_pattern_for_einsum(pattern: str) -> str:
|
| 647 |
+
if "->" not in pattern:
|
| 648 |
+
# numpy allows this, so make sure users
|
| 649 |
+
# don't accidentally do something like this.
|
| 650 |
+
raise ValueError("Einsum pattern must contain '->'.")
|
| 651 |
+
lefts_str, right_str = pattern.split('->')
|
| 652 |
+
|
| 653 |
+
lefts = [
|
| 654 |
+
ParsedExpression(left, allow_underscore=True, allow_duplicates=True)
|
| 655 |
+
for left in lefts_str.split(',')
|
| 656 |
+
]
|
| 657 |
+
|
| 658 |
+
right = ParsedExpression(right_str, allow_underscore=True)
|
| 659 |
+
|
| 660 |
+
# Start from 'a' and go up to 'Z'
|
| 661 |
+
output_axis_names = string.ascii_letters
|
| 662 |
+
i = 0
|
| 663 |
+
axis_name_mapping = {}
|
| 664 |
+
|
| 665 |
+
left_patterns = []
|
| 666 |
+
for left in lefts:
|
| 667 |
+
left_pattern = ""
|
| 668 |
+
for raw_axis_name in left.composition:
|
| 669 |
+
|
| 670 |
+
if raw_axis_name == _ellipsis:
|
| 671 |
+
left_pattern += '...'
|
| 672 |
+
continue
|
| 673 |
+
|
| 674 |
+
_validate_einsum_axis_name(raw_axis_name)
|
| 675 |
+
axis_name = raw_axis_name[0]
|
| 676 |
+
if axis_name not in axis_name_mapping:
|
| 677 |
+
if i >= len(output_axis_names):
|
| 678 |
+
raise RuntimeError("Too many axes in einsum.")
|
| 679 |
+
axis_name_mapping[axis_name] = output_axis_names[i]
|
| 680 |
+
i += 1
|
| 681 |
+
|
| 682 |
+
left_pattern += axis_name_mapping[axis_name]
|
| 683 |
+
left_patterns.append(left_pattern)
|
| 684 |
+
|
| 685 |
+
compact_pattern = ",".join(left_patterns) + "->"
|
| 686 |
+
|
| 687 |
+
for raw_axis_name in right.composition:
|
| 688 |
+
if raw_axis_name == _ellipsis:
|
| 689 |
+
compact_pattern += '...'
|
| 690 |
+
continue
|
| 691 |
+
|
| 692 |
+
_validate_einsum_axis_name(raw_axis_name)
|
| 693 |
+
axis_name = raw_axis_name[0]
|
| 694 |
+
|
| 695 |
+
if axis_name not in axis_name_mapping:
|
| 696 |
+
raise EinopsError(f"Unknown axis {axis_name} on right side of einsum {pattern}.")
|
| 697 |
+
|
| 698 |
+
compact_pattern += axis_name_mapping[axis_name]
|
| 699 |
+
|
| 700 |
+
return compact_pattern
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
# dunders in overloads turn arguments into positional-only.
|
| 704 |
+
# After python 3.7 EOL this should be replaced with '/' as the last argument.
|
| 705 |
+
|
| 706 |
+
@typing.overload
|
| 707 |
+
def einsum(__tensor: Tensor, __pattern: str) -> Tensor: ...
|
| 708 |
+
@typing.overload
|
| 709 |
+
def einsum(__tensor1: Tensor, __tensor2: Tensor, __pattern: str) -> Tensor: ...
|
| 710 |
+
@typing.overload
|
| 711 |
+
def einsum(__tensor1: Tensor, __tensor2: Tensor, __tensor3: Tensor, __pattern: str) -> Tensor: ...
|
| 712 |
+
@typing.overload
|
| 713 |
+
def einsum(__tensor1: Tensor, __tensor2: Tensor, __tensor3: Tensor, __tensor4: Tensor, __pattern: str) -> Tensor: ...
|
| 714 |
+
|
| 715 |
+
|
| 716 |
+
def einsum(*tensors_and_pattern: Union[Tensor, str]) -> Tensor:
|
| 717 |
+
"""
|
| 718 |
+
einops.einsum calls einsum operations with einops-style named
|
| 719 |
+
axes indexing, computing tensor products with an arbitrary
|
| 720 |
+
number of tensors. Unlike typical einsum syntax, here you must
|
| 721 |
+
pass tensors first, and then the pattern.
|
| 722 |
+
|
| 723 |
+
Also, note that rearrange operations such as `"(batch chan) out"`,
|
| 724 |
+
or singleton axes `()`, are not currently supported.
|
| 725 |
+
|
| 726 |
+
Examples:
|
| 727 |
+
|
| 728 |
+
For a given pattern such as:
|
| 729 |
+
```python
|
| 730 |
+
>>> x, y, z = np.random.randn(3, 20, 20, 20)
|
| 731 |
+
>>> output = einsum(x, y, z, "a b c, c b d, a g k -> a b k")
|
| 732 |
+
|
| 733 |
+
```
|
| 734 |
+
the following formula is computed:
|
| 735 |
+
```tex
|
| 736 |
+
output[a, b, k] =
|
| 737 |
+
\sum_{c, d, g} x[a, b, c] * y[c, b, d] * z[a, g, k]
|
| 738 |
+
```
|
| 739 |
+
where the summation over `c`, `d`, and `g` is performed
|
| 740 |
+
because those axes names do not appear on the right-hand side.
|
| 741 |
+
|
| 742 |
+
Let's see some additional examples:
|
| 743 |
+
```python
|
| 744 |
+
# Filter a set of images:
|
| 745 |
+
>>> batched_images = np.random.randn(128, 16, 16)
|
| 746 |
+
>>> filters = np.random.randn(16, 16, 30)
|
| 747 |
+
>>> result = einsum(batched_images, filters,
|
| 748 |
+
... "batch h w, h w channel -> batch channel")
|
| 749 |
+
>>> result.shape
|
| 750 |
+
(128, 30)
|
| 751 |
+
|
| 752 |
+
# Matrix multiplication, with an unknown input shape:
|
| 753 |
+
>>> batch_shape = (50, 30)
|
| 754 |
+
>>> data = np.random.randn(*batch_shape, 20)
|
| 755 |
+
>>> weights = np.random.randn(10, 20)
|
| 756 |
+
>>> result = einsum(weights, data,
|
| 757 |
+
... "out_dim in_dim, ... in_dim -> ... out_dim")
|
| 758 |
+
>>> result.shape
|
| 759 |
+
(50, 30, 10)
|
| 760 |
+
|
| 761 |
+
# Matrix trace on a single tensor:
|
| 762 |
+
>>> matrix = np.random.randn(10, 10)
|
| 763 |
+
>>> result = einsum(matrix, "i i ->")
|
| 764 |
+
>>> result.shape
|
| 765 |
+
()
|
| 766 |
+
|
| 767 |
+
```
|
| 768 |
+
|
| 769 |
+
Parameters:
|
| 770 |
+
tensors_and_pattern:
|
| 771 |
+
tensors: tensors of any supported library (numpy, tensorflow, pytorch, jax).
|
| 772 |
+
pattern: string, einsum pattern, with commas
|
| 773 |
+
separating specifications for each tensor.
|
| 774 |
+
pattern should be provided after all tensors.
|
| 775 |
+
|
| 776 |
+
Returns:
|
| 777 |
+
Tensor of the same type as input, after processing with einsum.
|
| 778 |
+
|
| 779 |
+
"""
|
| 780 |
+
if len(tensors_and_pattern) <= 1:
|
| 781 |
+
raise ValueError(
|
| 782 |
+
"`einops.einsum` takes at minimum two arguments: the tensors (at least one),"
|
| 783 |
+
" followed by the pattern."
|
| 784 |
+
)
|
| 785 |
+
pattern = tensors_and_pattern[-1]
|
| 786 |
+
if not isinstance(pattern, str):
|
| 787 |
+
raise ValueError(
|
| 788 |
+
"The last argument passed to `einops.einsum` must be a string,"
|
| 789 |
+
" representing the einsum pattern."
|
| 790 |
+
)
|
| 791 |
+
tensors = tensors_and_pattern[:-1]
|
| 792 |
+
pattern = _compactify_pattern_for_einsum(pattern)
|
| 793 |
+
return get_backend(tensors[0]).einsum(pattern, *tensors)
|
evalkit_tf446/lib/python3.10/site-packages/einops/packing.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import lru_cache
|
| 2 |
+
from typing import List, Union, TypeVar, Tuple, Sequence
|
| 3 |
+
|
| 4 |
+
from einops import EinopsError
|
| 5 |
+
|
| 6 |
+
from einops._backends import get_backend
|
| 7 |
+
from einops.parsing import ParsedExpression
|
| 8 |
+
|
| 9 |
+
Tensor = TypeVar('Tensor')
|
| 10 |
+
|
| 11 |
+
Shape = Union[Tuple[int, ...], List[int]]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@lru_cache(maxsize=128)
|
| 15 |
+
def analyze_pattern(pattern: str, opname: str) -> Tuple[int, int, int]:
|
| 16 |
+
# Maybe some validation of identifiers?
|
| 17 |
+
axes = pattern.split()
|
| 18 |
+
axes_set = set(axes)
|
| 19 |
+
if len(axes) != len(axes_set):
|
| 20 |
+
raise EinopsError(f'Duplicates in axes names in {opname}(..., "{pattern}")')
|
| 21 |
+
if '*' not in axes_set:
|
| 22 |
+
raise EinopsError(f'No *-axis in {opname}(..., "{pattern}")')
|
| 23 |
+
for axis in axes:
|
| 24 |
+
if axis != '*':
|
| 25 |
+
is_valid, reason = ParsedExpression.check_axis_name_return_reason(axis)
|
| 26 |
+
if not is_valid:
|
| 27 |
+
raise EinopsError(f'Invalid axis name {axis} in {opname}(..., "{pattern}")')
|
| 28 |
+
n_axes_before = axes.index('*')
|
| 29 |
+
n_axes_after = len(axes) - n_axes_before - 1
|
| 30 |
+
min_axes = n_axes_before + n_axes_after
|
| 31 |
+
return n_axes_before, n_axes_after, min_axes
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def pack(tensors: Sequence[Tensor], pattern: str) -> Tuple[Tensor, List[Shape]]:
|
| 35 |
+
"""
|
| 36 |
+
Packs several tensors into one.
|
| 37 |
+
See einops tutorial for introduction into packing (and how it replaces stack and concatenation).
|
| 38 |
+
|
| 39 |
+
Parameters:
|
| 40 |
+
tensors: tensors to be packed, can be of different dimensionality
|
| 41 |
+
pattern: pattern that is shared for all inputs and output, e.g. "i j * k" or "batch seq *"
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
(packed_tensor, packed_shapes aka PS)
|
| 45 |
+
|
| 46 |
+
Example:
|
| 47 |
+
```python
|
| 48 |
+
>>> from numpy import zeros as Z
|
| 49 |
+
>>> inputs = [Z([2, 3, 5]), Z([2, 3, 7, 5]), Z([2, 3, 7, 9, 5])]
|
| 50 |
+
>>> packed, ps = pack(inputs, 'i j * k')
|
| 51 |
+
>>> packed.shape, ps
|
| 52 |
+
((2, 3, 71, 5), [(), (7,), (7, 9)])
|
| 53 |
+
```
|
| 54 |
+
|
| 55 |
+
In this example, axes were matched to: i=2, j=3, k=5 based on order (first, second, and last).
|
| 56 |
+
All other axes were 'packed' and concatenated.
|
| 57 |
+
PS (packed shapes) contains information about axes that were matched to '*' in every input.
|
| 58 |
+
Resulting tensor has as many elements as all inputs in total.
|
| 59 |
+
|
| 60 |
+
Packing can be reversed with unpack, which additionally needs PS (packed shapes) to reconstruct order.
|
| 61 |
+
|
| 62 |
+
```python
|
| 63 |
+
>>> inputs_unpacked = unpack(packed, ps, 'i j * k')
|
| 64 |
+
>>> [x.shape for x in inputs_unpacked]
|
| 65 |
+
[(2, 3, 5), (2, 3, 7, 5), (2, 3, 7, 9, 5)]
|
| 66 |
+
```
|
| 67 |
+
|
| 68 |
+
Read the tutorial for introduction and application scenarios.
|
| 69 |
+
"""
|
| 70 |
+
n_axes_before, n_axes_after, min_axes = analyze_pattern(pattern, 'pack')
|
| 71 |
+
|
| 72 |
+
# packing zero tensors is illegal
|
| 73 |
+
backend = get_backend(tensors[0])
|
| 74 |
+
|
| 75 |
+
reshaped_tensors: List[Tensor] = []
|
| 76 |
+
packed_shapes: List[Shape] = []
|
| 77 |
+
for i, tensor in enumerate(tensors):
|
| 78 |
+
shape = backend.shape(tensor)
|
| 79 |
+
if len(shape) < min_axes:
|
| 80 |
+
raise EinopsError(f'packed tensor #{i} (enumeration starts with 0) has shape {shape}, '
|
| 81 |
+
f'while pattern {pattern} assumes at least {min_axes} axes')
|
| 82 |
+
axis_after_packed_axes = len(shape) - n_axes_after
|
| 83 |
+
packed_shapes.append(shape[n_axes_before:axis_after_packed_axes])
|
| 84 |
+
reshaped_tensors.append(
|
| 85 |
+
backend.reshape(tensor, (*shape[:n_axes_before], -1, *shape[axis_after_packed_axes:]))
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
return backend.concat(reshaped_tensors, axis=n_axes_before), packed_shapes
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def prod(x: Shape) -> int:
|
| 92 |
+
result = 1
|
| 93 |
+
for i in x:
|
| 94 |
+
result *= i
|
| 95 |
+
return result
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def unpack(tensor: Tensor, packed_shapes: List[Shape], pattern: str) -> List[Tensor]:
|
| 99 |
+
"""
|
| 100 |
+
Unpacks a single tensor into several by splitting over a selected axes.
|
| 101 |
+
See einops tutorial for introduction into packing (and how it replaces stack and concatenation).
|
| 102 |
+
|
| 103 |
+
Parameters:
|
| 104 |
+
tensor: tensor to be unpacked
|
| 105 |
+
packed_shapes: packed_shapes (aka PS) is a list of shapes that take place of '*' in each output.
|
| 106 |
+
output will contain a single tensor for every provided shape
|
| 107 |
+
pattern: pattern that is shared for input and all outputs, e.g. "i j * k" or "batch seq *",
|
| 108 |
+
where * designates an axis to be unpacked
|
| 109 |
+
|
| 110 |
+
Returns:
|
| 111 |
+
list of tensors
|
| 112 |
+
|
| 113 |
+
If framework supports views, results are views to the original tensor.
|
| 114 |
+
|
| 115 |
+
Example:
|
| 116 |
+
```python
|
| 117 |
+
>>> from numpy import zeros as Z
|
| 118 |
+
>>> inputs = [Z([2, 3, 5]), Z([2, 3, 7, 5]), Z([2, 3, 7, 9, 5])]
|
| 119 |
+
>>> packed, ps = pack(inputs, 'i j * k')
|
| 120 |
+
>>> packed.shape, ps
|
| 121 |
+
((2, 3, 71, 5), [(), (7,), (7, 9)])
|
| 122 |
+
```
|
| 123 |
+
|
| 124 |
+
In this example, axes were matched to: i=2, j=3, k=5 based on order (first, second, and last).
|
| 125 |
+
All other axes were 'packed' and concatenated.
|
| 126 |
+
PS (packed shapes) contains information about axes that were matched to '*' in every input.
|
| 127 |
+
Resulting tensor has as many elements as all inputs in total.
|
| 128 |
+
|
| 129 |
+
Packing can be reversed with unpack, which additionally needs PS (packed shapes) to reconstruct order.
|
| 130 |
+
|
| 131 |
+
```python
|
| 132 |
+
>>> inputs_unpacked = unpack(packed, ps, 'i j * k')
|
| 133 |
+
>>> [x.shape for x in inputs_unpacked]
|
| 134 |
+
[(2, 3, 5), (2, 3, 7, 5), (2, 3, 7, 9, 5)]
|
| 135 |
+
```
|
| 136 |
+
|
| 137 |
+
Read the tutorial for introduction and application scenarios.
|
| 138 |
+
"""
|
| 139 |
+
n_axes_before, n_axes_after, min_axes = analyze_pattern(pattern, opname='unpack')
|
| 140 |
+
|
| 141 |
+
backend = get_backend(tensor)
|
| 142 |
+
input_shape = backend.shape(tensor)
|
| 143 |
+
if len(input_shape) != n_axes_before + 1 + n_axes_after:
|
| 144 |
+
raise EinopsError(f'unpack(..., {pattern}) received input of wrong dim with shape {input_shape}')
|
| 145 |
+
|
| 146 |
+
unpacked_axis: int = n_axes_before
|
| 147 |
+
|
| 148 |
+
lengths_of_composed_axes: List[int] = [
|
| 149 |
+
-1 if -1 in p_shape else prod(p_shape)
|
| 150 |
+
for p_shape in packed_shapes
|
| 151 |
+
]
|
| 152 |
+
|
| 153 |
+
n_unknown_composed_axes = sum(x == -1 for x in lengths_of_composed_axes)
|
| 154 |
+
if n_unknown_composed_axes > 1:
|
| 155 |
+
raise EinopsError(
|
| 156 |
+
f"unpack(..., {pattern}) received more than one -1 in {packed_shapes} and can't infer dimensions"
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
# following manipulations allow to skip some shape verifications
|
| 160 |
+
# and leave it to backends
|
| 161 |
+
|
| 162 |
+
# [[], [2, 3], [4], [-1, 5], [6]] < examples of packed_axis
|
| 163 |
+
# split positions when computed should be
|
| 164 |
+
# [0, 1, 7, 11, N-6 , N ], where N = length of axis
|
| 165 |
+
split_positions = [0] * len(packed_shapes) + [input_shape[unpacked_axis]]
|
| 166 |
+
if n_unknown_composed_axes == 0:
|
| 167 |
+
for i, x in enumerate(lengths_of_composed_axes[:-1]):
|
| 168 |
+
split_positions[i + 1] = split_positions[i] + x
|
| 169 |
+
else:
|
| 170 |
+
unknown_composed_axis: int = lengths_of_composed_axes.index(-1)
|
| 171 |
+
for i in range(unknown_composed_axis):
|
| 172 |
+
split_positions[i + 1] = split_positions[i] + lengths_of_composed_axes[i]
|
| 173 |
+
for j in range(unknown_composed_axis + 1, len(lengths_of_composed_axes))[::-1]:
|
| 174 |
+
split_positions[j] = split_positions[j + 1] - lengths_of_composed_axes[j]
|
| 175 |
+
|
| 176 |
+
shape_start = input_shape[:unpacked_axis]
|
| 177 |
+
shape_end = input_shape[unpacked_axis + 1:]
|
| 178 |
+
slice_filler = (slice(None, None),) * unpacked_axis
|
| 179 |
+
try:
|
| 180 |
+
return [
|
| 181 |
+
backend.reshape(
|
| 182 |
+
# shortest way slice arbitrary axis
|
| 183 |
+
tensor[(*slice_filler, slice(split_positions[i], split_positions[i + 1]))],
|
| 184 |
+
(*shape_start, *element_shape, *shape_end)
|
| 185 |
+
)
|
| 186 |
+
for i, element_shape in enumerate(packed_shapes)
|
| 187 |
+
]
|
| 188 |
+
except BaseException:
|
| 189 |
+
# this hits if there is an error during reshapes, which means passed shapes were incorrect
|
| 190 |
+
raise RuntimeError(f'Error during unpack(..., "{pattern}"): could not split axis of size {split_positions[-1]}'
|
| 191 |
+
f' into requested {packed_shapes}')
|
evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (845 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/_exceptions.cpython-310.pyc
ADDED
|
Binary file (989 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/_position_node_finder.cpython-310.pyc
ADDED
|
Binary file (17.5 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/executing.cpython-310.pyc
ADDED
|
Binary file (31.6 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/executing/__pycache__/version.cpython-310.pyc
ADDED
|
Binary file (188 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/executing/_exceptions.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
class KnownIssue(Exception):
|
| 3 |
+
"""
|
| 4 |
+
Raised in case of an known problem. Mostly because of cpython bugs.
|
| 5 |
+
Executing.node gets set to None in this case.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
pass
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class VerifierFailure(Exception):
|
| 12 |
+
"""
|
| 13 |
+
Thrown for an unexpected mapping from instruction to ast node
|
| 14 |
+
Executing.node gets set to None in this case.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __init__(self, title, node, instruction):
|
| 18 |
+
# type: (object, object, object) -> None
|
| 19 |
+
self.node = node
|
| 20 |
+
self.instruction = instruction
|
| 21 |
+
|
| 22 |
+
super().__init__(title) # type: ignore[call-arg]
|
evalkit_tf446/lib/python3.10/site-packages/executing/executing.py
ADDED
|
@@ -0,0 +1,1160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
MIT License
|
| 3 |
+
|
| 4 |
+
Copyright (c) 2021 Alex Hall
|
| 5 |
+
|
| 6 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 7 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 8 |
+
in the Software without restriction, including without limitation the rights
|
| 9 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 10 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 11 |
+
furnished to do so, subject to the following conditions:
|
| 12 |
+
|
| 13 |
+
The above copyright notice and this permission notice shall be included in all
|
| 14 |
+
copies or substantial portions of the Software.
|
| 15 |
+
|
| 16 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 17 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 18 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 19 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 20 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 21 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 22 |
+
SOFTWARE.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
import __future__
|
| 26 |
+
import ast
|
| 27 |
+
import dis
|
| 28 |
+
import inspect
|
| 29 |
+
import io
|
| 30 |
+
import linecache
|
| 31 |
+
import re
|
| 32 |
+
import sys
|
| 33 |
+
import types
|
| 34 |
+
from collections import defaultdict
|
| 35 |
+
from copy import deepcopy
|
| 36 |
+
from functools import lru_cache
|
| 37 |
+
from itertools import islice
|
| 38 |
+
from itertools import zip_longest
|
| 39 |
+
from operator import attrgetter
|
| 40 |
+
from pathlib import Path
|
| 41 |
+
from threading import RLock
|
| 42 |
+
from tokenize import detect_encoding
|
| 43 |
+
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator, List, Optional, Sequence, Set, Sized, Tuple, \
|
| 44 |
+
Type, TypeVar, Union, cast
|
| 45 |
+
|
| 46 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 47 |
+
from asttokens import ASTTokens, ASTText
|
| 48 |
+
from asttokens.asttokens import ASTTextBase
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
function_node_types = (ast.FunctionDef, ast.AsyncFunctionDef) # type: Tuple[Type, ...]
|
| 52 |
+
|
| 53 |
+
cache = lru_cache(maxsize=None)
|
| 54 |
+
|
| 55 |
+
# Type class used to expand out the definition of AST to include fields added by this library
|
| 56 |
+
# It's not actually used for anything other than type checking though!
|
| 57 |
+
class EnhancedAST(ast.AST):
|
| 58 |
+
parent = None # type: EnhancedAST
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class Instruction(dis.Instruction):
|
| 62 |
+
lineno = None # type: int
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
# Type class used to expand out the definition of AST to include fields added by this library
|
| 66 |
+
# It's not actually used for anything other than type checking though!
|
| 67 |
+
class EnhancedInstruction(Instruction):
|
| 68 |
+
_copied = None # type: bool
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def assert_(condition, message=""):
|
| 73 |
+
# type: (Any, str) -> None
|
| 74 |
+
"""
|
| 75 |
+
Like an assert statement, but unaffected by -O
|
| 76 |
+
:param condition: value that is expected to be truthy
|
| 77 |
+
:type message: Any
|
| 78 |
+
"""
|
| 79 |
+
if not condition:
|
| 80 |
+
raise AssertionError(str(message))
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def get_instructions(co):
|
| 84 |
+
# type: (types.CodeType) -> Iterator[EnhancedInstruction]
|
| 85 |
+
lineno = co.co_firstlineno
|
| 86 |
+
for inst in dis.get_instructions(co):
|
| 87 |
+
inst = cast(EnhancedInstruction, inst)
|
| 88 |
+
lineno = inst.starts_line or lineno
|
| 89 |
+
assert_(lineno)
|
| 90 |
+
inst.lineno = lineno
|
| 91 |
+
yield inst
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
TESTING = 0
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class NotOneValueFound(Exception):
|
| 98 |
+
def __init__(self,msg,values=[]):
|
| 99 |
+
# type: (str, Sequence) -> None
|
| 100 |
+
self.values=values
|
| 101 |
+
super(NotOneValueFound,self).__init__(msg)
|
| 102 |
+
|
| 103 |
+
T = TypeVar('T')
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def only(it):
|
| 107 |
+
# type: (Iterable[T]) -> T
|
| 108 |
+
if isinstance(it, Sized):
|
| 109 |
+
if len(it) != 1:
|
| 110 |
+
raise NotOneValueFound('Expected one value, found %s' % len(it))
|
| 111 |
+
# noinspection PyTypeChecker
|
| 112 |
+
return list(it)[0]
|
| 113 |
+
|
| 114 |
+
lst = tuple(islice(it, 2))
|
| 115 |
+
if len(lst) == 0:
|
| 116 |
+
raise NotOneValueFound('Expected one value, found 0')
|
| 117 |
+
if len(lst) > 1:
|
| 118 |
+
raise NotOneValueFound('Expected one value, found several',lst)
|
| 119 |
+
return lst[0]
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class Source(object):
|
| 123 |
+
"""
|
| 124 |
+
The source code of a single file and associated metadata.
|
| 125 |
+
|
| 126 |
+
The main method of interest is the classmethod `executing(frame)`.
|
| 127 |
+
|
| 128 |
+
If you want an instance of this class, don't construct it.
|
| 129 |
+
Ideally use the classmethod `for_frame(frame)`.
|
| 130 |
+
If you don't have a frame, use `for_filename(filename [, module_globals])`.
|
| 131 |
+
These methods cache instances by filename, so at most one instance exists per filename.
|
| 132 |
+
|
| 133 |
+
Attributes:
|
| 134 |
+
- filename
|
| 135 |
+
- text
|
| 136 |
+
- lines
|
| 137 |
+
- tree: AST parsed from text, or None if text is not valid Python
|
| 138 |
+
All nodes in the tree have an extra `parent` attribute
|
| 139 |
+
|
| 140 |
+
Other methods of interest:
|
| 141 |
+
- statements_at_line
|
| 142 |
+
- asttokens
|
| 143 |
+
- code_qualname
|
| 144 |
+
"""
|
| 145 |
+
|
| 146 |
+
def __init__(self, filename, lines):
|
| 147 |
+
# type: (str, Sequence[str]) -> None
|
| 148 |
+
"""
|
| 149 |
+
Don't call this constructor, see the class docstring.
|
| 150 |
+
"""
|
| 151 |
+
|
| 152 |
+
self.filename = filename
|
| 153 |
+
self.text = ''.join(lines)
|
| 154 |
+
self.lines = [line.rstrip('\r\n') for line in lines]
|
| 155 |
+
|
| 156 |
+
self._nodes_by_line = defaultdict(list)
|
| 157 |
+
self.tree = None
|
| 158 |
+
self._qualnames = {}
|
| 159 |
+
self._asttokens = None # type: Optional[ASTTokens]
|
| 160 |
+
self._asttext = None # type: Optional[ASTText]
|
| 161 |
+
|
| 162 |
+
try:
|
| 163 |
+
self.tree = ast.parse(self.text, filename=filename)
|
| 164 |
+
except (SyntaxError, ValueError):
|
| 165 |
+
pass
|
| 166 |
+
else:
|
| 167 |
+
for node in ast.walk(self.tree):
|
| 168 |
+
for child in ast.iter_child_nodes(node):
|
| 169 |
+
cast(EnhancedAST, child).parent = cast(EnhancedAST, node)
|
| 170 |
+
for lineno in node_linenos(node):
|
| 171 |
+
self._nodes_by_line[lineno].append(node)
|
| 172 |
+
|
| 173 |
+
visitor = QualnameVisitor()
|
| 174 |
+
visitor.visit(self.tree)
|
| 175 |
+
self._qualnames = visitor.qualnames
|
| 176 |
+
|
| 177 |
+
@classmethod
|
| 178 |
+
def for_frame(cls, frame, use_cache=True):
|
| 179 |
+
# type: (types.FrameType, bool) -> "Source"
|
| 180 |
+
"""
|
| 181 |
+
Returns the `Source` object corresponding to the file the frame is executing in.
|
| 182 |
+
"""
|
| 183 |
+
return cls.for_filename(frame.f_code.co_filename, frame.f_globals or {}, use_cache)
|
| 184 |
+
|
| 185 |
+
@classmethod
|
| 186 |
+
def for_filename(
|
| 187 |
+
cls,
|
| 188 |
+
filename,
|
| 189 |
+
module_globals=None,
|
| 190 |
+
use_cache=True, # noqa no longer used
|
| 191 |
+
):
|
| 192 |
+
# type: (Union[str, Path], Optional[Dict[str, Any]], bool) -> "Source"
|
| 193 |
+
if isinstance(filename, Path):
|
| 194 |
+
filename = str(filename)
|
| 195 |
+
|
| 196 |
+
def get_lines():
|
| 197 |
+
# type: () -> List[str]
|
| 198 |
+
return linecache.getlines(cast(str, filename), module_globals)
|
| 199 |
+
|
| 200 |
+
# Save the current linecache entry, then ensure the cache is up to date.
|
| 201 |
+
entry = linecache.cache.get(filename) # type: ignore[attr-defined]
|
| 202 |
+
linecache.checkcache(filename)
|
| 203 |
+
lines = get_lines()
|
| 204 |
+
if entry is not None and not lines:
|
| 205 |
+
# There was an entry, checkcache removed it, and nothing replaced it.
|
| 206 |
+
# This means the file wasn't simply changed (because the `lines` wouldn't be empty)
|
| 207 |
+
# but rather the file was found not to exist, probably because `filename` was fake.
|
| 208 |
+
# Restore the original entry so that we still have something.
|
| 209 |
+
linecache.cache[filename] = entry # type: ignore[attr-defined]
|
| 210 |
+
lines = get_lines()
|
| 211 |
+
|
| 212 |
+
return cls._for_filename_and_lines(filename, tuple(lines))
|
| 213 |
+
|
| 214 |
+
@classmethod
|
| 215 |
+
def _for_filename_and_lines(cls, filename, lines):
|
| 216 |
+
# type: (str, Sequence[str]) -> "Source"
|
| 217 |
+
source_cache = cls._class_local('__source_cache_with_lines', {}) # type: Dict[Tuple[str, Sequence[str]], Source]
|
| 218 |
+
try:
|
| 219 |
+
return source_cache[(filename, lines)]
|
| 220 |
+
except KeyError:
|
| 221 |
+
pass
|
| 222 |
+
|
| 223 |
+
result = source_cache[(filename, lines)] = cls(filename, lines)
|
| 224 |
+
return result
|
| 225 |
+
|
| 226 |
+
@classmethod
|
| 227 |
+
def lazycache(cls, frame):
|
| 228 |
+
# type: (types.FrameType) -> None
|
| 229 |
+
linecache.lazycache(frame.f_code.co_filename, frame.f_globals)
|
| 230 |
+
|
| 231 |
+
@classmethod
|
| 232 |
+
def executing(cls, frame_or_tb):
|
| 233 |
+
# type: (Union[types.TracebackType, types.FrameType]) -> "Executing"
|
| 234 |
+
"""
|
| 235 |
+
Returns an `Executing` object representing the operation
|
| 236 |
+
currently executing in the given frame or traceback object.
|
| 237 |
+
"""
|
| 238 |
+
if isinstance(frame_or_tb, types.TracebackType):
|
| 239 |
+
# https://docs.python.org/3/reference/datamodel.html#traceback-objects
|
| 240 |
+
# "tb_lineno gives the line number where the exception occurred;
|
| 241 |
+
# tb_lasti indicates the precise instruction.
|
| 242 |
+
# The line number and last instruction in the traceback may differ
|
| 243 |
+
# from the line number of its frame object
|
| 244 |
+
# if the exception occurred in a try statement with no matching except clause
|
| 245 |
+
# or with a finally clause."
|
| 246 |
+
tb = frame_or_tb
|
| 247 |
+
frame = tb.tb_frame
|
| 248 |
+
lineno = tb.tb_lineno
|
| 249 |
+
lasti = tb.tb_lasti
|
| 250 |
+
else:
|
| 251 |
+
frame = frame_or_tb
|
| 252 |
+
lineno = frame.f_lineno
|
| 253 |
+
lasti = frame.f_lasti
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
code = frame.f_code
|
| 258 |
+
key = (code, id(code), lasti)
|
| 259 |
+
executing_cache = cls._class_local('__executing_cache', {}) # type: Dict[Tuple[types.CodeType, int, int], Any]
|
| 260 |
+
|
| 261 |
+
args = executing_cache.get(key)
|
| 262 |
+
if not args:
|
| 263 |
+
node = stmts = decorator = None
|
| 264 |
+
source = cls.for_frame(frame)
|
| 265 |
+
tree = source.tree
|
| 266 |
+
if tree:
|
| 267 |
+
try:
|
| 268 |
+
stmts = source.statements_at_line(lineno)
|
| 269 |
+
if stmts:
|
| 270 |
+
if is_ipython_cell_code(code):
|
| 271 |
+
decorator, node = find_node_ipython(frame, lasti, stmts, source)
|
| 272 |
+
else:
|
| 273 |
+
node_finder = NodeFinder(frame, stmts, tree, lasti, source)
|
| 274 |
+
node = node_finder.result
|
| 275 |
+
decorator = node_finder.decorator
|
| 276 |
+
|
| 277 |
+
if node:
|
| 278 |
+
new_stmts = {statement_containing_node(node)}
|
| 279 |
+
assert_(new_stmts <= stmts)
|
| 280 |
+
stmts = new_stmts
|
| 281 |
+
except Exception:
|
| 282 |
+
if TESTING:
|
| 283 |
+
raise
|
| 284 |
+
|
| 285 |
+
executing_cache[key] = args = source, node, stmts, decorator
|
| 286 |
+
|
| 287 |
+
return Executing(frame, *args)
|
| 288 |
+
|
| 289 |
+
@classmethod
|
| 290 |
+
def _class_local(cls, name, default):
|
| 291 |
+
# type: (str, T) -> T
|
| 292 |
+
"""
|
| 293 |
+
Returns an attribute directly associated with this class
|
| 294 |
+
(as opposed to subclasses), setting default if necessary
|
| 295 |
+
"""
|
| 296 |
+
# classes have a mappingproxy preventing us from using setdefault
|
| 297 |
+
result = cls.__dict__.get(name, default)
|
| 298 |
+
setattr(cls, name, result)
|
| 299 |
+
return result
|
| 300 |
+
|
| 301 |
+
@cache
|
| 302 |
+
def statements_at_line(self, lineno):
|
| 303 |
+
# type: (int) -> Set[EnhancedAST]
|
| 304 |
+
"""
|
| 305 |
+
Returns the statement nodes overlapping the given line.
|
| 306 |
+
|
| 307 |
+
Returns at most one statement unless semicolons are present.
|
| 308 |
+
|
| 309 |
+
If the `text` attribute is not valid python, meaning
|
| 310 |
+
`tree` is None, returns an empty set.
|
| 311 |
+
|
| 312 |
+
Otherwise, `Source.for_frame(frame).statements_at_line(frame.f_lineno)`
|
| 313 |
+
should return at least one statement.
|
| 314 |
+
"""
|
| 315 |
+
|
| 316 |
+
return {
|
| 317 |
+
statement_containing_node(node)
|
| 318 |
+
for node in
|
| 319 |
+
self._nodes_by_line[lineno]
|
| 320 |
+
}
|
| 321 |
+
|
| 322 |
+
def asttext(self):
|
| 323 |
+
# type: () -> ASTText
|
| 324 |
+
"""
|
| 325 |
+
Returns an ASTText object for getting the source of specific AST nodes.
|
| 326 |
+
|
| 327 |
+
See http://asttokens.readthedocs.io/en/latest/api-index.html
|
| 328 |
+
"""
|
| 329 |
+
from asttokens import ASTText # must be installed separately
|
| 330 |
+
|
| 331 |
+
if self._asttext is None:
|
| 332 |
+
self._asttext = ASTText(self.text, tree=self.tree, filename=self.filename)
|
| 333 |
+
|
| 334 |
+
return self._asttext
|
| 335 |
+
|
| 336 |
+
def asttokens(self):
|
| 337 |
+
# type: () -> ASTTokens
|
| 338 |
+
"""
|
| 339 |
+
Returns an ASTTokens object for getting the source of specific AST nodes.
|
| 340 |
+
|
| 341 |
+
See http://asttokens.readthedocs.io/en/latest/api-index.html
|
| 342 |
+
"""
|
| 343 |
+
import asttokens # must be installed separately
|
| 344 |
+
|
| 345 |
+
if self._asttokens is None:
|
| 346 |
+
if hasattr(asttokens, 'ASTText'):
|
| 347 |
+
self._asttokens = self.asttext().asttokens
|
| 348 |
+
else: # pragma: no cover
|
| 349 |
+
self._asttokens = asttokens.ASTTokens(self.text, tree=self.tree, filename=self.filename)
|
| 350 |
+
return self._asttokens
|
| 351 |
+
|
| 352 |
+
def _asttext_base(self):
|
| 353 |
+
# type: () -> ASTTextBase
|
| 354 |
+
import asttokens # must be installed separately
|
| 355 |
+
|
| 356 |
+
if hasattr(asttokens, 'ASTText'):
|
| 357 |
+
return self.asttext()
|
| 358 |
+
else: # pragma: no cover
|
| 359 |
+
return self.asttokens()
|
| 360 |
+
|
| 361 |
+
@staticmethod
|
| 362 |
+
def decode_source(source):
|
| 363 |
+
# type: (Union[str, bytes]) -> str
|
| 364 |
+
if isinstance(source, bytes):
|
| 365 |
+
encoding = Source.detect_encoding(source)
|
| 366 |
+
return source.decode(encoding)
|
| 367 |
+
else:
|
| 368 |
+
return source
|
| 369 |
+
|
| 370 |
+
@staticmethod
|
| 371 |
+
def detect_encoding(source):
|
| 372 |
+
# type: (bytes) -> str
|
| 373 |
+
return detect_encoding(io.BytesIO(source).readline)[0]
|
| 374 |
+
|
| 375 |
+
def code_qualname(self, code):
|
| 376 |
+
# type: (types.CodeType) -> str
|
| 377 |
+
"""
|
| 378 |
+
Imitates the __qualname__ attribute of functions for code objects.
|
| 379 |
+
Given:
|
| 380 |
+
|
| 381 |
+
- A function `func`
|
| 382 |
+
- A frame `frame` for an execution of `func`, meaning:
|
| 383 |
+
`frame.f_code is func.__code__`
|
| 384 |
+
|
| 385 |
+
`Source.for_frame(frame).code_qualname(frame.f_code)`
|
| 386 |
+
will be equal to `func.__qualname__`*. Works for Python 2 as well,
|
| 387 |
+
where of course no `__qualname__` attribute exists.
|
| 388 |
+
|
| 389 |
+
Falls back to `code.co_name` if there is no appropriate qualname.
|
| 390 |
+
|
| 391 |
+
Based on https://github.com/wbolster/qualname
|
| 392 |
+
|
| 393 |
+
(* unless `func` is a lambda
|
| 394 |
+
nested inside another lambda on the same line, in which case
|
| 395 |
+
the outer lambda's qualname will be returned for the codes
|
| 396 |
+
of both lambdas)
|
| 397 |
+
"""
|
| 398 |
+
assert_(code.co_filename == self.filename)
|
| 399 |
+
return self._qualnames.get((code.co_name, code.co_firstlineno), code.co_name)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
class Executing(object):
|
| 403 |
+
"""
|
| 404 |
+
Information about the operation a frame is currently executing.
|
| 405 |
+
|
| 406 |
+
Generally you will just want `node`, which is the AST node being executed,
|
| 407 |
+
or None if it's unknown.
|
| 408 |
+
|
| 409 |
+
If a decorator is currently being called, then:
|
| 410 |
+
- `node` is a function or class definition
|
| 411 |
+
- `decorator` is the expression in `node.decorator_list` being called
|
| 412 |
+
- `statements == {node}`
|
| 413 |
+
"""
|
| 414 |
+
|
| 415 |
+
def __init__(self, frame, source, node, stmts, decorator):
|
| 416 |
+
# type: (types.FrameType, Source, EnhancedAST, Set[ast.stmt], Optional[EnhancedAST]) -> None
|
| 417 |
+
self.frame = frame
|
| 418 |
+
self.source = source
|
| 419 |
+
self.node = node
|
| 420 |
+
self.statements = stmts
|
| 421 |
+
self.decorator = decorator
|
| 422 |
+
|
| 423 |
+
def code_qualname(self):
|
| 424 |
+
# type: () -> str
|
| 425 |
+
return self.source.code_qualname(self.frame.f_code)
|
| 426 |
+
|
| 427 |
+
def text(self):
|
| 428 |
+
# type: () -> str
|
| 429 |
+
return self.source._asttext_base().get_text(self.node)
|
| 430 |
+
|
| 431 |
+
def text_range(self):
|
| 432 |
+
# type: () -> Tuple[int, int]
|
| 433 |
+
return self.source._asttext_base().get_text_range(self.node)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class QualnameVisitor(ast.NodeVisitor):
|
| 437 |
+
def __init__(self):
|
| 438 |
+
# type: () -> None
|
| 439 |
+
super(QualnameVisitor, self).__init__()
|
| 440 |
+
self.stack = [] # type: List[str]
|
| 441 |
+
self.qualnames = {} # type: Dict[Tuple[str, int], str]
|
| 442 |
+
|
| 443 |
+
def add_qualname(self, node, name=None):
|
| 444 |
+
# type: (ast.AST, Optional[str]) -> None
|
| 445 |
+
name = name or node.name # type: ignore[attr-defined]
|
| 446 |
+
self.stack.append(name)
|
| 447 |
+
if getattr(node, 'decorator_list', ()):
|
| 448 |
+
lineno = node.decorator_list[0].lineno # type: ignore[attr-defined]
|
| 449 |
+
else:
|
| 450 |
+
lineno = node.lineno # type: ignore[attr-defined]
|
| 451 |
+
self.qualnames.setdefault((name, lineno), ".".join(self.stack))
|
| 452 |
+
|
| 453 |
+
def visit_FunctionDef(self, node, name=None):
|
| 454 |
+
# type: (ast.AST, Optional[str]) -> None
|
| 455 |
+
assert isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.Lambda)), node
|
| 456 |
+
self.add_qualname(node, name)
|
| 457 |
+
self.stack.append('<locals>')
|
| 458 |
+
children = [] # type: Sequence[ast.AST]
|
| 459 |
+
if isinstance(node, ast.Lambda):
|
| 460 |
+
children = [node.body]
|
| 461 |
+
else:
|
| 462 |
+
children = node.body
|
| 463 |
+
for child in children:
|
| 464 |
+
self.visit(child)
|
| 465 |
+
self.stack.pop()
|
| 466 |
+
self.stack.pop()
|
| 467 |
+
|
| 468 |
+
# Find lambdas in the function definition outside the body,
|
| 469 |
+
# e.g. decorators or default arguments
|
| 470 |
+
# Based on iter_child_nodes
|
| 471 |
+
for field, child in ast.iter_fields(node):
|
| 472 |
+
if field == 'body':
|
| 473 |
+
continue
|
| 474 |
+
if isinstance(child, ast.AST):
|
| 475 |
+
self.visit(child)
|
| 476 |
+
elif isinstance(child, list):
|
| 477 |
+
for grandchild in child:
|
| 478 |
+
if isinstance(grandchild, ast.AST):
|
| 479 |
+
self.visit(grandchild)
|
| 480 |
+
|
| 481 |
+
visit_AsyncFunctionDef = visit_FunctionDef
|
| 482 |
+
|
| 483 |
+
def visit_Lambda(self, node):
|
| 484 |
+
# type: (ast.AST) -> None
|
| 485 |
+
assert isinstance(node, ast.Lambda)
|
| 486 |
+
self.visit_FunctionDef(node, '<lambda>')
|
| 487 |
+
|
| 488 |
+
def visit_ClassDef(self, node):
|
| 489 |
+
# type: (ast.AST) -> None
|
| 490 |
+
assert isinstance(node, ast.ClassDef)
|
| 491 |
+
self.add_qualname(node)
|
| 492 |
+
self.generic_visit(node)
|
| 493 |
+
self.stack.pop()
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
future_flags = sum(
|
| 500 |
+
getattr(__future__, fname).compiler_flag for fname in __future__.all_feature_names
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
def compile_similar_to(source, matching_code):
|
| 505 |
+
# type: (ast.Module, types.CodeType) -> Any
|
| 506 |
+
return compile(
|
| 507 |
+
source,
|
| 508 |
+
matching_code.co_filename,
|
| 509 |
+
'exec',
|
| 510 |
+
flags=future_flags & matching_code.co_flags,
|
| 511 |
+
dont_inherit=True,
|
| 512 |
+
)
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
sentinel = 'io8urthglkjdghvljusketgIYRFYUVGHFRTBGVHKGF78678957647698'
|
| 516 |
+
|
| 517 |
+
def is_rewritten_by_pytest(code):
|
| 518 |
+
# type: (types.CodeType) -> bool
|
| 519 |
+
return any(
|
| 520 |
+
bc.opname != "LOAD_CONST" and isinstance(bc.argval,str) and bc.argval.startswith("@py")
|
| 521 |
+
for bc in get_instructions(code)
|
| 522 |
+
)
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
class SentinelNodeFinder(object):
|
| 526 |
+
result = None # type: EnhancedAST
|
| 527 |
+
|
| 528 |
+
def __init__(self, frame, stmts, tree, lasti, source):
|
| 529 |
+
# type: (types.FrameType, Set[EnhancedAST], ast.Module, int, Source) -> None
|
| 530 |
+
assert_(stmts)
|
| 531 |
+
self.frame = frame
|
| 532 |
+
self.tree = tree
|
| 533 |
+
self.code = code = frame.f_code
|
| 534 |
+
self.is_pytest = is_rewritten_by_pytest(code)
|
| 535 |
+
|
| 536 |
+
if self.is_pytest:
|
| 537 |
+
self.ignore_linenos = frozenset(assert_linenos(tree))
|
| 538 |
+
else:
|
| 539 |
+
self.ignore_linenos = frozenset()
|
| 540 |
+
|
| 541 |
+
self.decorator = None
|
| 542 |
+
|
| 543 |
+
self.instruction = instruction = self.get_actual_current_instruction(lasti)
|
| 544 |
+
op_name = instruction.opname
|
| 545 |
+
extra_filter = lambda e: True # type: Callable[[Any], bool]
|
| 546 |
+
ctx = type(None) # type: Type
|
| 547 |
+
|
| 548 |
+
typ = type(None) # type: Type
|
| 549 |
+
if op_name.startswith('CALL_'):
|
| 550 |
+
typ = ast.Call
|
| 551 |
+
elif op_name.startswith(('BINARY_SUBSCR', 'SLICE+')):
|
| 552 |
+
typ = ast.Subscript
|
| 553 |
+
ctx = ast.Load
|
| 554 |
+
elif op_name.startswith('BINARY_'):
|
| 555 |
+
typ = ast.BinOp
|
| 556 |
+
op_type = dict(
|
| 557 |
+
BINARY_POWER=ast.Pow,
|
| 558 |
+
BINARY_MULTIPLY=ast.Mult,
|
| 559 |
+
BINARY_MATRIX_MULTIPLY=getattr(ast, "MatMult", ()),
|
| 560 |
+
BINARY_FLOOR_DIVIDE=ast.FloorDiv,
|
| 561 |
+
BINARY_TRUE_DIVIDE=ast.Div,
|
| 562 |
+
BINARY_MODULO=ast.Mod,
|
| 563 |
+
BINARY_ADD=ast.Add,
|
| 564 |
+
BINARY_SUBTRACT=ast.Sub,
|
| 565 |
+
BINARY_LSHIFT=ast.LShift,
|
| 566 |
+
BINARY_RSHIFT=ast.RShift,
|
| 567 |
+
BINARY_AND=ast.BitAnd,
|
| 568 |
+
BINARY_XOR=ast.BitXor,
|
| 569 |
+
BINARY_OR=ast.BitOr,
|
| 570 |
+
)[op_name]
|
| 571 |
+
extra_filter = lambda e: isinstance(e.op, op_type)
|
| 572 |
+
elif op_name.startswith('UNARY_'):
|
| 573 |
+
typ = ast.UnaryOp
|
| 574 |
+
op_type = dict(
|
| 575 |
+
UNARY_POSITIVE=ast.UAdd,
|
| 576 |
+
UNARY_NEGATIVE=ast.USub,
|
| 577 |
+
UNARY_NOT=ast.Not,
|
| 578 |
+
UNARY_INVERT=ast.Invert,
|
| 579 |
+
)[op_name]
|
| 580 |
+
extra_filter = lambda e: isinstance(e.op, op_type)
|
| 581 |
+
elif op_name in ('LOAD_ATTR', 'LOAD_METHOD', 'LOOKUP_METHOD'):
|
| 582 |
+
typ = ast.Attribute
|
| 583 |
+
ctx = ast.Load
|
| 584 |
+
extra_filter = lambda e: attr_names_match(e.attr, instruction.argval)
|
| 585 |
+
elif op_name in ('LOAD_NAME', 'LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_DEREF', 'LOAD_CLASSDEREF'):
|
| 586 |
+
typ = ast.Name
|
| 587 |
+
ctx = ast.Load
|
| 588 |
+
extra_filter = lambda e: e.id == instruction.argval
|
| 589 |
+
elif op_name in ('COMPARE_OP', 'IS_OP', 'CONTAINS_OP'):
|
| 590 |
+
typ = ast.Compare
|
| 591 |
+
extra_filter = lambda e: len(e.ops) == 1
|
| 592 |
+
elif op_name.startswith(('STORE_SLICE', 'STORE_SUBSCR')):
|
| 593 |
+
ctx = ast.Store
|
| 594 |
+
typ = ast.Subscript
|
| 595 |
+
elif op_name.startswith('STORE_ATTR'):
|
| 596 |
+
ctx = ast.Store
|
| 597 |
+
typ = ast.Attribute
|
| 598 |
+
extra_filter = lambda e: attr_names_match(e.attr, instruction.argval)
|
| 599 |
+
else:
|
| 600 |
+
raise RuntimeError(op_name)
|
| 601 |
+
|
| 602 |
+
with lock:
|
| 603 |
+
exprs = {
|
| 604 |
+
cast(EnhancedAST, node)
|
| 605 |
+
for stmt in stmts
|
| 606 |
+
for node in ast.walk(stmt)
|
| 607 |
+
if isinstance(node, typ)
|
| 608 |
+
if isinstance(getattr(node, "ctx", None), ctx)
|
| 609 |
+
if extra_filter(node)
|
| 610 |
+
if statement_containing_node(node) == stmt
|
| 611 |
+
}
|
| 612 |
+
|
| 613 |
+
if ctx == ast.Store:
|
| 614 |
+
# No special bytecode tricks here.
|
| 615 |
+
# We can handle multiple assigned attributes with different names,
|
| 616 |
+
# but only one assigned subscript.
|
| 617 |
+
self.result = only(exprs)
|
| 618 |
+
return
|
| 619 |
+
|
| 620 |
+
matching = list(self.matching_nodes(exprs))
|
| 621 |
+
if not matching and typ == ast.Call:
|
| 622 |
+
self.find_decorator(stmts)
|
| 623 |
+
else:
|
| 624 |
+
self.result = only(matching)
|
| 625 |
+
|
| 626 |
+
def find_decorator(self, stmts):
|
| 627 |
+
# type: (Union[List[EnhancedAST], Set[EnhancedAST]]) -> None
|
| 628 |
+
stmt = only(stmts)
|
| 629 |
+
assert_(isinstance(stmt, (ast.ClassDef, function_node_types)))
|
| 630 |
+
decorators = stmt.decorator_list # type: ignore[attr-defined]
|
| 631 |
+
assert_(decorators)
|
| 632 |
+
line_instructions = [
|
| 633 |
+
inst
|
| 634 |
+
for inst in self.clean_instructions(self.code)
|
| 635 |
+
if inst.lineno == self.frame.f_lineno
|
| 636 |
+
]
|
| 637 |
+
last_decorator_instruction_index = [
|
| 638 |
+
i
|
| 639 |
+
for i, inst in enumerate(line_instructions)
|
| 640 |
+
if inst.opname == "CALL_FUNCTION"
|
| 641 |
+
][-1]
|
| 642 |
+
assert_(
|
| 643 |
+
line_instructions[last_decorator_instruction_index + 1].opname.startswith(
|
| 644 |
+
"STORE_"
|
| 645 |
+
)
|
| 646 |
+
)
|
| 647 |
+
decorator_instructions = line_instructions[
|
| 648 |
+
last_decorator_instruction_index
|
| 649 |
+
- len(decorators)
|
| 650 |
+
+ 1 : last_decorator_instruction_index
|
| 651 |
+
+ 1
|
| 652 |
+
]
|
| 653 |
+
assert_({inst.opname for inst in decorator_instructions} == {"CALL_FUNCTION"})
|
| 654 |
+
decorator_index = decorator_instructions.index(self.instruction)
|
| 655 |
+
decorator = decorators[::-1][decorator_index]
|
| 656 |
+
self.decorator = decorator
|
| 657 |
+
self.result = stmt
|
| 658 |
+
|
| 659 |
+
def clean_instructions(self, code):
|
| 660 |
+
# type: (types.CodeType) -> List[EnhancedInstruction]
|
| 661 |
+
return [
|
| 662 |
+
inst
|
| 663 |
+
for inst in get_instructions(code)
|
| 664 |
+
if inst.opname not in ("EXTENDED_ARG", "NOP")
|
| 665 |
+
if inst.lineno not in self.ignore_linenos
|
| 666 |
+
]
|
| 667 |
+
|
| 668 |
+
def get_original_clean_instructions(self):
|
| 669 |
+
# type: () -> List[EnhancedInstruction]
|
| 670 |
+
result = self.clean_instructions(self.code)
|
| 671 |
+
|
| 672 |
+
# pypy sometimes (when is not clear)
|
| 673 |
+
# inserts JUMP_IF_NOT_DEBUG instructions in bytecode
|
| 674 |
+
# If they're not present in our compiled instructions,
|
| 675 |
+
# ignore them in the original bytecode
|
| 676 |
+
if not any(
|
| 677 |
+
inst.opname == "JUMP_IF_NOT_DEBUG"
|
| 678 |
+
for inst in self.compile_instructions()
|
| 679 |
+
):
|
| 680 |
+
result = [
|
| 681 |
+
inst for inst in result
|
| 682 |
+
if inst.opname != "JUMP_IF_NOT_DEBUG"
|
| 683 |
+
]
|
| 684 |
+
|
| 685 |
+
return result
|
| 686 |
+
|
| 687 |
+
def matching_nodes(self, exprs):
|
| 688 |
+
# type: (Set[EnhancedAST]) -> Iterator[EnhancedAST]
|
| 689 |
+
original_instructions = self.get_original_clean_instructions()
|
| 690 |
+
original_index = only(
|
| 691 |
+
i
|
| 692 |
+
for i, inst in enumerate(original_instructions)
|
| 693 |
+
if inst == self.instruction
|
| 694 |
+
)
|
| 695 |
+
for expr_index, expr in enumerate(exprs):
|
| 696 |
+
setter = get_setter(expr)
|
| 697 |
+
assert setter is not None
|
| 698 |
+
# noinspection PyArgumentList
|
| 699 |
+
replacement = ast.BinOp(
|
| 700 |
+
left=expr,
|
| 701 |
+
op=ast.Pow(),
|
| 702 |
+
right=ast.Str(s=sentinel),
|
| 703 |
+
)
|
| 704 |
+
ast.fix_missing_locations(replacement)
|
| 705 |
+
setter(replacement)
|
| 706 |
+
try:
|
| 707 |
+
instructions = self.compile_instructions()
|
| 708 |
+
finally:
|
| 709 |
+
setter(expr)
|
| 710 |
+
|
| 711 |
+
if sys.version_info >= (3, 10):
|
| 712 |
+
try:
|
| 713 |
+
handle_jumps(instructions, original_instructions)
|
| 714 |
+
except Exception:
|
| 715 |
+
# Give other candidates a chance
|
| 716 |
+
if TESTING or expr_index < len(exprs) - 1:
|
| 717 |
+
continue
|
| 718 |
+
raise
|
| 719 |
+
|
| 720 |
+
indices = [
|
| 721 |
+
i
|
| 722 |
+
for i, instruction in enumerate(instructions)
|
| 723 |
+
if instruction.argval == sentinel
|
| 724 |
+
]
|
| 725 |
+
|
| 726 |
+
# There can be several indices when the bytecode is duplicated,
|
| 727 |
+
# as happens in a finally block in 3.9+
|
| 728 |
+
# First we remove the opcodes caused by our modifications
|
| 729 |
+
for index_num, sentinel_index in enumerate(indices):
|
| 730 |
+
# Adjustment for removing sentinel instructions below
|
| 731 |
+
# in past iterations
|
| 732 |
+
sentinel_index -= index_num * 2
|
| 733 |
+
|
| 734 |
+
assert_(instructions.pop(sentinel_index).opname == 'LOAD_CONST')
|
| 735 |
+
assert_(instructions.pop(sentinel_index).opname == 'BINARY_POWER')
|
| 736 |
+
|
| 737 |
+
# Then we see if any of the instruction indices match
|
| 738 |
+
for index_num, sentinel_index in enumerate(indices):
|
| 739 |
+
sentinel_index -= index_num * 2
|
| 740 |
+
new_index = sentinel_index - 1
|
| 741 |
+
|
| 742 |
+
if new_index != original_index:
|
| 743 |
+
continue
|
| 744 |
+
|
| 745 |
+
original_inst = original_instructions[original_index]
|
| 746 |
+
new_inst = instructions[new_index]
|
| 747 |
+
|
| 748 |
+
# In Python 3.9+, changing 'not x in y' to 'not sentinel_transformation(x in y)'
|
| 749 |
+
# changes a CONTAINS_OP(invert=1) to CONTAINS_OP(invert=0),<sentinel stuff>,UNARY_NOT
|
| 750 |
+
if (
|
| 751 |
+
original_inst.opname == new_inst.opname in ('CONTAINS_OP', 'IS_OP')
|
| 752 |
+
and original_inst.arg != new_inst.arg # type: ignore[attr-defined]
|
| 753 |
+
and (
|
| 754 |
+
original_instructions[original_index + 1].opname
|
| 755 |
+
!= instructions[new_index + 1].opname == 'UNARY_NOT'
|
| 756 |
+
)):
|
| 757 |
+
# Remove the difference for the upcoming assert
|
| 758 |
+
instructions.pop(new_index + 1)
|
| 759 |
+
|
| 760 |
+
# Check that the modified instructions don't have anything unexpected
|
| 761 |
+
# 3.10 is a bit too weird to assert this in all cases but things still work
|
| 762 |
+
if sys.version_info < (3, 10):
|
| 763 |
+
for inst1, inst2 in zip_longest(
|
| 764 |
+
original_instructions, instructions
|
| 765 |
+
):
|
| 766 |
+
assert_(inst1 and inst2 and opnames_match(inst1, inst2))
|
| 767 |
+
|
| 768 |
+
yield expr
|
| 769 |
+
|
| 770 |
+
def compile_instructions(self):
|
| 771 |
+
# type: () -> List[EnhancedInstruction]
|
| 772 |
+
module_code = compile_similar_to(self.tree, self.code)
|
| 773 |
+
code = only(self.find_codes(module_code))
|
| 774 |
+
return self.clean_instructions(code)
|
| 775 |
+
|
| 776 |
+
def find_codes(self, root_code):
|
| 777 |
+
# type: (types.CodeType) -> list
|
| 778 |
+
checks = [
|
| 779 |
+
attrgetter('co_firstlineno'),
|
| 780 |
+
attrgetter('co_freevars'),
|
| 781 |
+
attrgetter('co_cellvars'),
|
| 782 |
+
lambda c: is_ipython_cell_code_name(c.co_name) or c.co_name,
|
| 783 |
+
] # type: List[Callable]
|
| 784 |
+
if not self.is_pytest:
|
| 785 |
+
checks += [
|
| 786 |
+
attrgetter('co_names'),
|
| 787 |
+
attrgetter('co_varnames'),
|
| 788 |
+
]
|
| 789 |
+
|
| 790 |
+
def matches(c):
|
| 791 |
+
# type: (types.CodeType) -> bool
|
| 792 |
+
return all(
|
| 793 |
+
f(c) == f(self.code)
|
| 794 |
+
for f in checks
|
| 795 |
+
)
|
| 796 |
+
|
| 797 |
+
code_options = []
|
| 798 |
+
if matches(root_code):
|
| 799 |
+
code_options.append(root_code)
|
| 800 |
+
|
| 801 |
+
def finder(code):
|
| 802 |
+
# type: (types.CodeType) -> None
|
| 803 |
+
for const in code.co_consts:
|
| 804 |
+
if not inspect.iscode(const):
|
| 805 |
+
continue
|
| 806 |
+
|
| 807 |
+
if matches(const):
|
| 808 |
+
code_options.append(const)
|
| 809 |
+
finder(const)
|
| 810 |
+
|
| 811 |
+
finder(root_code)
|
| 812 |
+
return code_options
|
| 813 |
+
|
| 814 |
+
def get_actual_current_instruction(self, lasti):
|
| 815 |
+
# type: (int) -> EnhancedInstruction
|
| 816 |
+
"""
|
| 817 |
+
Get the instruction corresponding to the current
|
| 818 |
+
frame offset, skipping EXTENDED_ARG instructions
|
| 819 |
+
"""
|
| 820 |
+
# Don't use get_original_clean_instructions
|
| 821 |
+
# because we need the actual instructions including
|
| 822 |
+
# EXTENDED_ARG
|
| 823 |
+
instructions = list(get_instructions(self.code))
|
| 824 |
+
index = only(
|
| 825 |
+
i
|
| 826 |
+
for i, inst in enumerate(instructions)
|
| 827 |
+
if inst.offset == lasti
|
| 828 |
+
)
|
| 829 |
+
|
| 830 |
+
while True:
|
| 831 |
+
instruction = instructions[index]
|
| 832 |
+
if instruction.opname != "EXTENDED_ARG":
|
| 833 |
+
return instruction
|
| 834 |
+
index += 1
|
| 835 |
+
|
| 836 |
+
|
| 837 |
+
|
| 838 |
+
def non_sentinel_instructions(instructions, start):
|
| 839 |
+
# type: (List[EnhancedInstruction], int) -> Iterator[Tuple[int, EnhancedInstruction]]
|
| 840 |
+
"""
|
| 841 |
+
Yields (index, instruction) pairs excluding the basic
|
| 842 |
+
instructions introduced by the sentinel transformation
|
| 843 |
+
"""
|
| 844 |
+
skip_power = False
|
| 845 |
+
for i, inst in islice(enumerate(instructions), start, None):
|
| 846 |
+
if inst.argval == sentinel:
|
| 847 |
+
assert_(inst.opname == "LOAD_CONST")
|
| 848 |
+
skip_power = True
|
| 849 |
+
continue
|
| 850 |
+
elif skip_power:
|
| 851 |
+
assert_(inst.opname == "BINARY_POWER")
|
| 852 |
+
skip_power = False
|
| 853 |
+
continue
|
| 854 |
+
yield i, inst
|
| 855 |
+
|
| 856 |
+
|
| 857 |
+
def walk_both_instructions(original_instructions, original_start, instructions, start):
|
| 858 |
+
# type: (List[EnhancedInstruction], int, List[EnhancedInstruction], int) -> Iterator[Tuple[int, EnhancedInstruction, int, EnhancedInstruction]]
|
| 859 |
+
"""
|
| 860 |
+
Yields matching indices and instructions from the new and original instructions,
|
| 861 |
+
leaving out changes made by the sentinel transformation.
|
| 862 |
+
"""
|
| 863 |
+
original_iter = islice(enumerate(original_instructions), original_start, None)
|
| 864 |
+
new_iter = non_sentinel_instructions(instructions, start)
|
| 865 |
+
inverted_comparison = False
|
| 866 |
+
while True:
|
| 867 |
+
try:
|
| 868 |
+
original_i, original_inst = next(original_iter)
|
| 869 |
+
new_i, new_inst = next(new_iter)
|
| 870 |
+
except StopIteration:
|
| 871 |
+
return
|
| 872 |
+
if (
|
| 873 |
+
inverted_comparison
|
| 874 |
+
and original_inst.opname != new_inst.opname == "UNARY_NOT"
|
| 875 |
+
):
|
| 876 |
+
new_i, new_inst = next(new_iter)
|
| 877 |
+
inverted_comparison = (
|
| 878 |
+
original_inst.opname == new_inst.opname in ("CONTAINS_OP", "IS_OP")
|
| 879 |
+
and original_inst.arg != new_inst.arg # type: ignore[attr-defined]
|
| 880 |
+
)
|
| 881 |
+
yield original_i, original_inst, new_i, new_inst
|
| 882 |
+
|
| 883 |
+
|
| 884 |
+
def handle_jumps(instructions, original_instructions):
|
| 885 |
+
# type: (List[EnhancedInstruction], List[EnhancedInstruction]) -> None
|
| 886 |
+
"""
|
| 887 |
+
Transforms instructions in place until it looks more like original_instructions.
|
| 888 |
+
This is only needed in 3.10+ where optimisations lead to more drastic changes
|
| 889 |
+
after the sentinel transformation.
|
| 890 |
+
Replaces JUMP instructions that aren't also present in original_instructions
|
| 891 |
+
with the sections that they jump to until a raise or return.
|
| 892 |
+
In some other cases duplication found in `original_instructions`
|
| 893 |
+
is replicated in `instructions`.
|
| 894 |
+
"""
|
| 895 |
+
while True:
|
| 896 |
+
for original_i, original_inst, new_i, new_inst in walk_both_instructions(
|
| 897 |
+
original_instructions, 0, instructions, 0
|
| 898 |
+
):
|
| 899 |
+
if opnames_match(original_inst, new_inst):
|
| 900 |
+
continue
|
| 901 |
+
|
| 902 |
+
if "JUMP" in new_inst.opname and "JUMP" not in original_inst.opname:
|
| 903 |
+
# Find where the new instruction is jumping to, ignoring
|
| 904 |
+
# instructions which have been copied in previous iterations
|
| 905 |
+
start = only(
|
| 906 |
+
i
|
| 907 |
+
for i, inst in enumerate(instructions)
|
| 908 |
+
if inst.offset == new_inst.argval
|
| 909 |
+
and not getattr(inst, "_copied", False)
|
| 910 |
+
)
|
| 911 |
+
# Replace the jump instruction with the jumped to section of instructions
|
| 912 |
+
# That section may also be deleted if it's not similarly duplicated
|
| 913 |
+
# in original_instructions
|
| 914 |
+
new_instructions = handle_jump(
|
| 915 |
+
original_instructions, original_i, instructions, start
|
| 916 |
+
)
|
| 917 |
+
assert new_instructions is not None
|
| 918 |
+
instructions[new_i : new_i + 1] = new_instructions
|
| 919 |
+
else:
|
| 920 |
+
# Extract a section of original_instructions from original_i to return/raise
|
| 921 |
+
orig_section = []
|
| 922 |
+
for section_inst in original_instructions[original_i:]:
|
| 923 |
+
orig_section.append(section_inst)
|
| 924 |
+
if section_inst.opname in ("RETURN_VALUE", "RAISE_VARARGS"):
|
| 925 |
+
break
|
| 926 |
+
else:
|
| 927 |
+
# No return/raise - this is just a mismatch we can't handle
|
| 928 |
+
raise AssertionError
|
| 929 |
+
|
| 930 |
+
instructions[new_i:new_i] = only(find_new_matching(orig_section, instructions))
|
| 931 |
+
|
| 932 |
+
# instructions has been modified, the for loop can't sensibly continue
|
| 933 |
+
# Restart it from the beginning, checking for other issues
|
| 934 |
+
break
|
| 935 |
+
|
| 936 |
+
else: # No mismatched jumps found, we're done
|
| 937 |
+
return
|
| 938 |
+
|
| 939 |
+
|
| 940 |
+
def find_new_matching(orig_section, instructions):
|
| 941 |
+
# type: (List[EnhancedInstruction], List[EnhancedInstruction]) -> Iterator[List[EnhancedInstruction]]
|
| 942 |
+
"""
|
| 943 |
+
Yields sections of `instructions` which match `orig_section`.
|
| 944 |
+
The yielded sections include sentinel instructions, but these
|
| 945 |
+
are ignored when checking for matches.
|
| 946 |
+
"""
|
| 947 |
+
for start in range(len(instructions) - len(orig_section)):
|
| 948 |
+
indices, dup_section = zip(
|
| 949 |
+
*islice(
|
| 950 |
+
non_sentinel_instructions(instructions, start),
|
| 951 |
+
len(orig_section),
|
| 952 |
+
)
|
| 953 |
+
)
|
| 954 |
+
if len(dup_section) < len(orig_section):
|
| 955 |
+
return
|
| 956 |
+
if sections_match(orig_section, dup_section):
|
| 957 |
+
yield instructions[start:indices[-1] + 1]
|
| 958 |
+
|
| 959 |
+
|
| 960 |
+
def handle_jump(original_instructions, original_start, instructions, start):
|
| 961 |
+
# type: (List[EnhancedInstruction], int, List[EnhancedInstruction], int) -> Optional[List[EnhancedInstruction]]
|
| 962 |
+
"""
|
| 963 |
+
Returns the section of instructions starting at `start` and ending
|
| 964 |
+
with a RETURN_VALUE or RAISE_VARARGS instruction.
|
| 965 |
+
There should be a matching section in original_instructions starting at original_start.
|
| 966 |
+
If that section doesn't appear elsewhere in original_instructions,
|
| 967 |
+
then also delete the returned section of instructions.
|
| 968 |
+
"""
|
| 969 |
+
for original_j, original_inst, new_j, new_inst in walk_both_instructions(
|
| 970 |
+
original_instructions, original_start, instructions, start
|
| 971 |
+
):
|
| 972 |
+
assert_(opnames_match(original_inst, new_inst))
|
| 973 |
+
if original_inst.opname in ("RETURN_VALUE", "RAISE_VARARGS"):
|
| 974 |
+
inlined = deepcopy(instructions[start : new_j + 1])
|
| 975 |
+
for inl in inlined:
|
| 976 |
+
inl._copied = True
|
| 977 |
+
orig_section = original_instructions[original_start : original_j + 1]
|
| 978 |
+
if not check_duplicates(
|
| 979 |
+
original_start, orig_section, original_instructions
|
| 980 |
+
):
|
| 981 |
+
instructions[start : new_j + 1] = []
|
| 982 |
+
return inlined
|
| 983 |
+
|
| 984 |
+
return None
|
| 985 |
+
|
| 986 |
+
|
| 987 |
+
def check_duplicates(original_i, orig_section, original_instructions):
|
| 988 |
+
# type: (int, List[EnhancedInstruction], List[EnhancedInstruction]) -> bool
|
| 989 |
+
"""
|
| 990 |
+
Returns True if a section of original_instructions starting somewhere other
|
| 991 |
+
than original_i and matching orig_section is found, i.e. orig_section is duplicated.
|
| 992 |
+
"""
|
| 993 |
+
for dup_start in range(len(original_instructions)):
|
| 994 |
+
if dup_start == original_i:
|
| 995 |
+
continue
|
| 996 |
+
dup_section = original_instructions[dup_start : dup_start + len(orig_section)]
|
| 997 |
+
if len(dup_section) < len(orig_section):
|
| 998 |
+
return False
|
| 999 |
+
if sections_match(orig_section, dup_section):
|
| 1000 |
+
return True
|
| 1001 |
+
|
| 1002 |
+
return False
|
| 1003 |
+
|
| 1004 |
+
def sections_match(orig_section, dup_section):
|
| 1005 |
+
# type: (List[EnhancedInstruction], List[EnhancedInstruction]) -> bool
|
| 1006 |
+
"""
|
| 1007 |
+
Returns True if the given lists of instructions have matching linenos and opnames.
|
| 1008 |
+
"""
|
| 1009 |
+
return all(
|
| 1010 |
+
(
|
| 1011 |
+
orig_inst.lineno == dup_inst.lineno
|
| 1012 |
+
# POP_BLOCKs have been found to have differing linenos in innocent cases
|
| 1013 |
+
or "POP_BLOCK" == orig_inst.opname == dup_inst.opname
|
| 1014 |
+
)
|
| 1015 |
+
and opnames_match(orig_inst, dup_inst)
|
| 1016 |
+
for orig_inst, dup_inst in zip(orig_section, dup_section)
|
| 1017 |
+
)
|
| 1018 |
+
|
| 1019 |
+
|
| 1020 |
+
def opnames_match(inst1, inst2):
|
| 1021 |
+
# type: (Instruction, Instruction) -> bool
|
| 1022 |
+
return (
|
| 1023 |
+
inst1.opname == inst2.opname
|
| 1024 |
+
or "JUMP" in inst1.opname
|
| 1025 |
+
and "JUMP" in inst2.opname
|
| 1026 |
+
or (inst1.opname == "PRINT_EXPR" and inst2.opname == "POP_TOP")
|
| 1027 |
+
or (
|
| 1028 |
+
inst1.opname in ("LOAD_METHOD", "LOOKUP_METHOD")
|
| 1029 |
+
and inst2.opname == "LOAD_ATTR"
|
| 1030 |
+
)
|
| 1031 |
+
or (inst1.opname == "CALL_METHOD" and inst2.opname == "CALL_FUNCTION")
|
| 1032 |
+
)
|
| 1033 |
+
|
| 1034 |
+
|
| 1035 |
+
def get_setter(node):
|
| 1036 |
+
# type: (EnhancedAST) -> Optional[Callable[[ast.AST], None]]
|
| 1037 |
+
parent = node.parent
|
| 1038 |
+
for name, field in ast.iter_fields(parent):
|
| 1039 |
+
if field is node:
|
| 1040 |
+
def setter(new_node):
|
| 1041 |
+
# type: (ast.AST) -> None
|
| 1042 |
+
return setattr(parent, name, new_node)
|
| 1043 |
+
return setter
|
| 1044 |
+
elif isinstance(field, list):
|
| 1045 |
+
for i, item in enumerate(field):
|
| 1046 |
+
if item is node:
|
| 1047 |
+
def setter(new_node):
|
| 1048 |
+
# type: (ast.AST) -> None
|
| 1049 |
+
field[i] = new_node
|
| 1050 |
+
|
| 1051 |
+
return setter
|
| 1052 |
+
return None
|
| 1053 |
+
|
| 1054 |
+
lock = RLock()
|
| 1055 |
+
|
| 1056 |
+
|
| 1057 |
+
@cache
|
| 1058 |
+
def statement_containing_node(node):
|
| 1059 |
+
# type: (ast.AST) -> EnhancedAST
|
| 1060 |
+
while not isinstance(node, ast.stmt):
|
| 1061 |
+
node = cast(EnhancedAST, node).parent
|
| 1062 |
+
return cast(EnhancedAST, node)
|
| 1063 |
+
|
| 1064 |
+
|
| 1065 |
+
def assert_linenos(tree):
|
| 1066 |
+
# type: (ast.AST) -> Iterator[int]
|
| 1067 |
+
for node in ast.walk(tree):
|
| 1068 |
+
if (
|
| 1069 |
+
hasattr(node, 'parent') and
|
| 1070 |
+
isinstance(statement_containing_node(node), ast.Assert)
|
| 1071 |
+
):
|
| 1072 |
+
for lineno in node_linenos(node):
|
| 1073 |
+
yield lineno
|
| 1074 |
+
|
| 1075 |
+
|
| 1076 |
+
def _extract_ipython_statement(stmt):
|
| 1077 |
+
# type: (EnhancedAST) -> ast.Module
|
| 1078 |
+
# IPython separates each statement in a cell to be executed separately
|
| 1079 |
+
# So NodeFinder should only compile one statement at a time or it
|
| 1080 |
+
# will find a code mismatch.
|
| 1081 |
+
while not isinstance(stmt.parent, ast.Module):
|
| 1082 |
+
stmt = stmt.parent
|
| 1083 |
+
# use `ast.parse` instead of `ast.Module` for better portability
|
| 1084 |
+
# python3.8 changes the signature of `ast.Module`
|
| 1085 |
+
# Inspired by https://github.com/pallets/werkzeug/pull/1552/files
|
| 1086 |
+
tree = ast.parse("")
|
| 1087 |
+
tree.body = [cast(ast.stmt, stmt)]
|
| 1088 |
+
ast.copy_location(tree, stmt)
|
| 1089 |
+
return tree
|
| 1090 |
+
|
| 1091 |
+
|
| 1092 |
+
def is_ipython_cell_code_name(code_name):
|
| 1093 |
+
# type: (str) -> bool
|
| 1094 |
+
return bool(re.match(r"(<module>|<cell line: \d+>)$", code_name))
|
| 1095 |
+
|
| 1096 |
+
|
| 1097 |
+
def is_ipython_cell_filename(filename):
|
| 1098 |
+
# type: (str) -> bool
|
| 1099 |
+
return bool(re.search(r"<ipython-input-|[/\\]ipykernel_\d+[/\\]", filename))
|
| 1100 |
+
|
| 1101 |
+
|
| 1102 |
+
def is_ipython_cell_code(code_obj):
|
| 1103 |
+
# type: (types.CodeType) -> bool
|
| 1104 |
+
return (
|
| 1105 |
+
is_ipython_cell_filename(code_obj.co_filename) and
|
| 1106 |
+
is_ipython_cell_code_name(code_obj.co_name)
|
| 1107 |
+
)
|
| 1108 |
+
|
| 1109 |
+
|
| 1110 |
+
def find_node_ipython(frame, lasti, stmts, source):
|
| 1111 |
+
# type: (types.FrameType, int, Set[EnhancedAST], Source) -> Tuple[Optional[Any], Optional[Any]]
|
| 1112 |
+
node = decorator = None
|
| 1113 |
+
for stmt in stmts:
|
| 1114 |
+
tree = _extract_ipython_statement(stmt)
|
| 1115 |
+
try:
|
| 1116 |
+
node_finder = NodeFinder(frame, stmts, tree, lasti, source)
|
| 1117 |
+
if (node or decorator) and (node_finder.result or node_finder.decorator):
|
| 1118 |
+
# Found potential nodes in separate statements,
|
| 1119 |
+
# cannot resolve ambiguity, give up here
|
| 1120 |
+
return None, None
|
| 1121 |
+
|
| 1122 |
+
node = node_finder.result
|
| 1123 |
+
decorator = node_finder.decorator
|
| 1124 |
+
except Exception:
|
| 1125 |
+
pass
|
| 1126 |
+
return decorator, node
|
| 1127 |
+
|
| 1128 |
+
|
| 1129 |
+
def attr_names_match(attr, argval):
|
| 1130 |
+
# type: (str, str) -> bool
|
| 1131 |
+
"""
|
| 1132 |
+
Checks that the user-visible attr (from ast) can correspond to
|
| 1133 |
+
the argval in the bytecode, i.e. the real attribute fetched internally,
|
| 1134 |
+
which may be mangled for private attributes.
|
| 1135 |
+
"""
|
| 1136 |
+
if attr == argval:
|
| 1137 |
+
return True
|
| 1138 |
+
if not attr.startswith("__"):
|
| 1139 |
+
return False
|
| 1140 |
+
return bool(re.match(r"^_\w+%s$" % attr, argval))
|
| 1141 |
+
|
| 1142 |
+
|
| 1143 |
+
def node_linenos(node):
|
| 1144 |
+
# type: (ast.AST) -> Iterator[int]
|
| 1145 |
+
if hasattr(node, "lineno"):
|
| 1146 |
+
linenos = [] # type: Sequence[int]
|
| 1147 |
+
if hasattr(node, "end_lineno") and isinstance(node, ast.expr):
|
| 1148 |
+
assert node.end_lineno is not None # type: ignore[attr-defined]
|
| 1149 |
+
linenos = range(node.lineno, node.end_lineno + 1) # type: ignore[attr-defined]
|
| 1150 |
+
else:
|
| 1151 |
+
linenos = [node.lineno] # type: ignore[attr-defined]
|
| 1152 |
+
for lineno in linenos:
|
| 1153 |
+
yield lineno
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
if sys.version_info >= (3, 11):
|
| 1157 |
+
from ._position_node_finder import PositionNodeFinder as NodeFinder
|
| 1158 |
+
else:
|
| 1159 |
+
NodeFinder = SentinelNodeFinder
|
| 1160 |
+
|
evalkit_tf446/lib/python3.10/site-packages/executing/py.typed
ADDED
|
File without changes
|
evalkit_tf446/lib/python3.10/site-packages/executing/version.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
__version__ = '2.1.0'
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/__init__.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib
|
| 3 |
+
~~~~~~~~
|
| 4 |
+
|
| 5 |
+
A generic, spec-compliant, thorough implementation of the OAuth
|
| 6 |
+
request-signing logic.
|
| 7 |
+
|
| 8 |
+
:copyright: (c) 2019 by The OAuthlib Community
|
| 9 |
+
:license: BSD, see LICENSE for details.
|
| 10 |
+
"""
|
| 11 |
+
import logging
|
| 12 |
+
from logging import NullHandler
|
| 13 |
+
|
| 14 |
+
__author__ = 'The OAuthlib Community'
|
| 15 |
+
__version__ = '3.2.2'
|
| 16 |
+
|
| 17 |
+
logging.getLogger('oauthlib').addHandler(NullHandler())
|
| 18 |
+
|
| 19 |
+
_DEBUG = False
|
| 20 |
+
|
| 21 |
+
def set_debug(debug_val):
|
| 22 |
+
"""Set value of debug flag
|
| 23 |
+
|
| 24 |
+
:param debug_val: Value to set. Must be a bool value.
|
| 25 |
+
"""
|
| 26 |
+
global _DEBUG
|
| 27 |
+
_DEBUG = debug_val
|
| 28 |
+
|
| 29 |
+
def get_debug():
|
| 30 |
+
"""Get debug mode value.
|
| 31 |
+
|
| 32 |
+
:return: `True` if debug mode is on, `False` otherwise
|
| 33 |
+
"""
|
| 34 |
+
return _DEBUG
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (984 Bytes). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/common.cpython-310.pyc
ADDED
|
Binary file (13.4 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/signals.cpython-310.pyc
ADDED
|
Binary file (1.92 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/__pycache__/uri_validate.cpython-310.pyc
ADDED
|
Binary file (3.77 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/common.py
ADDED
|
@@ -0,0 +1,432 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib.common
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides data structures and utilities common
|
| 6 |
+
to all implementations of OAuth.
|
| 7 |
+
"""
|
| 8 |
+
import collections
|
| 9 |
+
import datetime
|
| 10 |
+
import logging
|
| 11 |
+
import re
|
| 12 |
+
import time
|
| 13 |
+
import urllib.parse as urlparse
|
| 14 |
+
from urllib.parse import (
|
| 15 |
+
quote as _quote, unquote as _unquote, urlencode as _urlencode,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
from . import get_debug
|
| 19 |
+
|
| 20 |
+
try:
|
| 21 |
+
from secrets import SystemRandom, randbits
|
| 22 |
+
except ImportError:
|
| 23 |
+
from random import SystemRandom, getrandbits as randbits
|
| 24 |
+
|
| 25 |
+
UNICODE_ASCII_CHARACTER_SET = ('abcdefghijklmnopqrstuvwxyz'
|
| 26 |
+
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
| 27 |
+
'0123456789')
|
| 28 |
+
|
| 29 |
+
CLIENT_ID_CHARACTER_SET = (r' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMN'
|
| 30 |
+
'OPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}')
|
| 31 |
+
|
| 32 |
+
SANITIZE_PATTERN = re.compile(r'([^&;]*(?:password|token)[^=]*=)[^&;]+', re.IGNORECASE)
|
| 33 |
+
INVALID_HEX_PATTERN = re.compile(r'%[^0-9A-Fa-f]|%[0-9A-Fa-f][^0-9A-Fa-f]')
|
| 34 |
+
|
| 35 |
+
always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
| 36 |
+
'abcdefghijklmnopqrstuvwxyz'
|
| 37 |
+
'0123456789' '_.-')
|
| 38 |
+
|
| 39 |
+
log = logging.getLogger('oauthlib')
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
# 'safe' must be bytes (Python 2.6 requires bytes, other versions allow either)
|
| 43 |
+
def quote(s, safe=b'/'):
|
| 44 |
+
s = s.encode('utf-8') if isinstance(s, str) else s
|
| 45 |
+
s = _quote(s, safe)
|
| 46 |
+
# PY3 always returns unicode. PY2 may return either, depending on whether
|
| 47 |
+
# it had to modify the string.
|
| 48 |
+
if isinstance(s, bytes):
|
| 49 |
+
s = s.decode('utf-8')
|
| 50 |
+
return s
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def unquote(s):
|
| 54 |
+
s = _unquote(s)
|
| 55 |
+
# PY3 always returns unicode. PY2 seems to always return what you give it,
|
| 56 |
+
# which differs from quote's behavior. Just to be safe, make sure it is
|
| 57 |
+
# unicode before we return.
|
| 58 |
+
if isinstance(s, bytes):
|
| 59 |
+
s = s.decode('utf-8')
|
| 60 |
+
return s
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def urlencode(params):
|
| 64 |
+
utf8_params = encode_params_utf8(params)
|
| 65 |
+
urlencoded = _urlencode(utf8_params)
|
| 66 |
+
if isinstance(urlencoded, str):
|
| 67 |
+
return urlencoded
|
| 68 |
+
else:
|
| 69 |
+
return urlencoded.decode("utf-8")
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def encode_params_utf8(params):
|
| 73 |
+
"""Ensures that all parameters in a list of 2-element tuples are encoded to
|
| 74 |
+
bytestrings using UTF-8
|
| 75 |
+
"""
|
| 76 |
+
encoded = []
|
| 77 |
+
for k, v in params:
|
| 78 |
+
encoded.append((
|
| 79 |
+
k.encode('utf-8') if isinstance(k, str) else k,
|
| 80 |
+
v.encode('utf-8') if isinstance(v, str) else v))
|
| 81 |
+
return encoded
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def decode_params_utf8(params):
|
| 85 |
+
"""Ensures that all parameters in a list of 2-element tuples are decoded to
|
| 86 |
+
unicode using UTF-8.
|
| 87 |
+
"""
|
| 88 |
+
decoded = []
|
| 89 |
+
for k, v in params:
|
| 90 |
+
decoded.append((
|
| 91 |
+
k.decode('utf-8') if isinstance(k, bytes) else k,
|
| 92 |
+
v.decode('utf-8') if isinstance(v, bytes) else v))
|
| 93 |
+
return decoded
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
urlencoded = set(always_safe) | set('=&;:%+~,*@!()/?\'$')
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def urldecode(query):
|
| 100 |
+
"""Decode a query string in x-www-form-urlencoded format into a sequence
|
| 101 |
+
of two-element tuples.
|
| 102 |
+
|
| 103 |
+
Unlike urlparse.parse_qsl(..., strict_parsing=True) urldecode will enforce
|
| 104 |
+
correct formatting of the query string by validation. If validation fails
|
| 105 |
+
a ValueError will be raised. urllib.parse_qsl will only raise errors if
|
| 106 |
+
any of name-value pairs omits the equals sign.
|
| 107 |
+
"""
|
| 108 |
+
# Check if query contains invalid characters
|
| 109 |
+
if query and not set(query) <= urlencoded:
|
| 110 |
+
error = ("Error trying to decode a non urlencoded string. "
|
| 111 |
+
"Found invalid characters: %s "
|
| 112 |
+
"in the string: '%s'. "
|
| 113 |
+
"Please ensure the request/response body is "
|
| 114 |
+
"x-www-form-urlencoded.")
|
| 115 |
+
raise ValueError(error % (set(query) - urlencoded, query))
|
| 116 |
+
|
| 117 |
+
# Check for correctly hex encoded values using a regular expression
|
| 118 |
+
# All encoded values begin with % followed by two hex characters
|
| 119 |
+
# correct = %00, %A0, %0A, %FF
|
| 120 |
+
# invalid = %G0, %5H, %PO
|
| 121 |
+
if INVALID_HEX_PATTERN.search(query):
|
| 122 |
+
raise ValueError('Invalid hex encoding in query string.')
|
| 123 |
+
|
| 124 |
+
# We want to allow queries such as "c2" whereas urlparse.parse_qsl
|
| 125 |
+
# with the strict_parsing flag will not.
|
| 126 |
+
params = urlparse.parse_qsl(query, keep_blank_values=True)
|
| 127 |
+
|
| 128 |
+
# unicode all the things
|
| 129 |
+
return decode_params_utf8(params)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def extract_params(raw):
|
| 133 |
+
"""Extract parameters and return them as a list of 2-tuples.
|
| 134 |
+
|
| 135 |
+
Will successfully extract parameters from urlencoded query strings,
|
| 136 |
+
dicts, or lists of 2-tuples. Empty strings/dicts/lists will return an
|
| 137 |
+
empty list of parameters. Any other input will result in a return
|
| 138 |
+
value of None.
|
| 139 |
+
"""
|
| 140 |
+
if isinstance(raw, (bytes, str)):
|
| 141 |
+
try:
|
| 142 |
+
params = urldecode(raw)
|
| 143 |
+
except ValueError:
|
| 144 |
+
params = None
|
| 145 |
+
elif hasattr(raw, '__iter__'):
|
| 146 |
+
try:
|
| 147 |
+
dict(raw)
|
| 148 |
+
except ValueError:
|
| 149 |
+
params = None
|
| 150 |
+
except TypeError:
|
| 151 |
+
params = None
|
| 152 |
+
else:
|
| 153 |
+
params = list(raw.items() if isinstance(raw, dict) else raw)
|
| 154 |
+
params = decode_params_utf8(params)
|
| 155 |
+
else:
|
| 156 |
+
params = None
|
| 157 |
+
|
| 158 |
+
return params
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
def generate_nonce():
|
| 162 |
+
"""Generate pseudorandom nonce that is unlikely to repeat.
|
| 163 |
+
|
| 164 |
+
Per `section 3.3`_ of the OAuth 1 RFC 5849 spec.
|
| 165 |
+
Per `section 3.2.1`_ of the MAC Access Authentication spec.
|
| 166 |
+
|
| 167 |
+
A random 64-bit number is appended to the epoch timestamp for both
|
| 168 |
+
randomness and to decrease the likelihood of collisions.
|
| 169 |
+
|
| 170 |
+
.. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1
|
| 171 |
+
.. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3
|
| 172 |
+
"""
|
| 173 |
+
return str(str(randbits(64)) + generate_timestamp())
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def generate_timestamp():
|
| 177 |
+
"""Get seconds since epoch (UTC).
|
| 178 |
+
|
| 179 |
+
Per `section 3.3`_ of the OAuth 1 RFC 5849 spec.
|
| 180 |
+
Per `section 3.2.1`_ of the MAC Access Authentication spec.
|
| 181 |
+
|
| 182 |
+
.. _`section 3.2.1`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-3.2.1
|
| 183 |
+
.. _`section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3
|
| 184 |
+
"""
|
| 185 |
+
return str(int(time.time()))
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET):
|
| 189 |
+
"""Generates a non-guessable OAuth token
|
| 190 |
+
|
| 191 |
+
OAuth (1 and 2) does not specify the format of tokens except that they
|
| 192 |
+
should be strings of random characters. Tokens should not be guessable
|
| 193 |
+
and entropy when generating the random characters is important. Which is
|
| 194 |
+
why SystemRandom is used instead of the default random.choice method.
|
| 195 |
+
"""
|
| 196 |
+
rand = SystemRandom()
|
| 197 |
+
return ''.join(rand.choice(chars) for x in range(length))
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def generate_signed_token(private_pem, request):
|
| 201 |
+
import jwt
|
| 202 |
+
|
| 203 |
+
now = datetime.datetime.utcnow()
|
| 204 |
+
|
| 205 |
+
claims = {
|
| 206 |
+
'scope': request.scope,
|
| 207 |
+
'exp': now + datetime.timedelta(seconds=request.expires_in)
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
claims.update(request.claims)
|
| 211 |
+
|
| 212 |
+
token = jwt.encode(claims, private_pem, 'RS256')
|
| 213 |
+
token = to_unicode(token, "UTF-8")
|
| 214 |
+
|
| 215 |
+
return token
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def verify_signed_token(public_pem, token):
|
| 219 |
+
import jwt
|
| 220 |
+
|
| 221 |
+
return jwt.decode(token, public_pem, algorithms=['RS256'])
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def generate_client_id(length=30, chars=CLIENT_ID_CHARACTER_SET):
|
| 225 |
+
"""Generates an OAuth client_id
|
| 226 |
+
|
| 227 |
+
OAuth 2 specify the format of client_id in
|
| 228 |
+
https://tools.ietf.org/html/rfc6749#appendix-A.
|
| 229 |
+
"""
|
| 230 |
+
return generate_token(length, chars)
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def add_params_to_qs(query, params):
|
| 234 |
+
"""Extend a query with a list of two-tuples."""
|
| 235 |
+
if isinstance(params, dict):
|
| 236 |
+
params = params.items()
|
| 237 |
+
queryparams = urlparse.parse_qsl(query, keep_blank_values=True)
|
| 238 |
+
queryparams.extend(params)
|
| 239 |
+
return urlencode(queryparams)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def add_params_to_uri(uri, params, fragment=False):
|
| 243 |
+
"""Add a list of two-tuples to the uri query components."""
|
| 244 |
+
sch, net, path, par, query, fra = urlparse.urlparse(uri)
|
| 245 |
+
if fragment:
|
| 246 |
+
fra = add_params_to_qs(fra, params)
|
| 247 |
+
else:
|
| 248 |
+
query = add_params_to_qs(query, params)
|
| 249 |
+
return urlparse.urlunparse((sch, net, path, par, query, fra))
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def safe_string_equals(a, b):
|
| 253 |
+
""" Near-constant time string comparison.
|
| 254 |
+
|
| 255 |
+
Used in order to avoid timing attacks on sensitive information such
|
| 256 |
+
as secret keys during request verification (`rootLabs`_).
|
| 257 |
+
|
| 258 |
+
.. _`rootLabs`: http://rdist.root.org/2010/01/07/timing-independent-array-comparison/
|
| 259 |
+
|
| 260 |
+
"""
|
| 261 |
+
if len(a) != len(b):
|
| 262 |
+
return False
|
| 263 |
+
|
| 264 |
+
result = 0
|
| 265 |
+
for x, y in zip(a, b):
|
| 266 |
+
result |= ord(x) ^ ord(y)
|
| 267 |
+
return result == 0
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def to_unicode(data, encoding='UTF-8'):
|
| 271 |
+
"""Convert a number of different types of objects to unicode."""
|
| 272 |
+
if isinstance(data, str):
|
| 273 |
+
return data
|
| 274 |
+
|
| 275 |
+
if isinstance(data, bytes):
|
| 276 |
+
return str(data, encoding=encoding)
|
| 277 |
+
|
| 278 |
+
if hasattr(data, '__iter__'):
|
| 279 |
+
try:
|
| 280 |
+
dict(data)
|
| 281 |
+
except TypeError:
|
| 282 |
+
pass
|
| 283 |
+
except ValueError:
|
| 284 |
+
# Assume it's a one dimensional data structure
|
| 285 |
+
return (to_unicode(i, encoding) for i in data)
|
| 286 |
+
else:
|
| 287 |
+
# We support 2.6 which lacks dict comprehensions
|
| 288 |
+
if hasattr(data, 'items'):
|
| 289 |
+
data = data.items()
|
| 290 |
+
return {to_unicode(k, encoding): to_unicode(v, encoding) for k, v in data}
|
| 291 |
+
|
| 292 |
+
return data
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
class CaseInsensitiveDict(dict):
|
| 296 |
+
|
| 297 |
+
"""Basic case insensitive dict with strings only keys."""
|
| 298 |
+
|
| 299 |
+
proxy = {}
|
| 300 |
+
|
| 301 |
+
def __init__(self, data):
|
| 302 |
+
self.proxy = {k.lower(): k for k in data}
|
| 303 |
+
for k in data:
|
| 304 |
+
self[k] = data[k]
|
| 305 |
+
|
| 306 |
+
def __contains__(self, k):
|
| 307 |
+
return k.lower() in self.proxy
|
| 308 |
+
|
| 309 |
+
def __delitem__(self, k):
|
| 310 |
+
key = self.proxy[k.lower()]
|
| 311 |
+
super().__delitem__(key)
|
| 312 |
+
del self.proxy[k.lower()]
|
| 313 |
+
|
| 314 |
+
def __getitem__(self, k):
|
| 315 |
+
key = self.proxy[k.lower()]
|
| 316 |
+
return super().__getitem__(key)
|
| 317 |
+
|
| 318 |
+
def get(self, k, default=None):
|
| 319 |
+
return self[k] if k in self else default
|
| 320 |
+
|
| 321 |
+
def __setitem__(self, k, v):
|
| 322 |
+
super().__setitem__(k, v)
|
| 323 |
+
self.proxy[k.lower()] = k
|
| 324 |
+
|
| 325 |
+
def update(self, *args, **kwargs):
|
| 326 |
+
super().update(*args, **kwargs)
|
| 327 |
+
for k in dict(*args, **kwargs):
|
| 328 |
+
self.proxy[k.lower()] = k
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
class Request:
|
| 332 |
+
|
| 333 |
+
"""A malleable representation of a signable HTTP request.
|
| 334 |
+
|
| 335 |
+
Body argument may contain any data, but parameters will only be decoded if
|
| 336 |
+
they are one of:
|
| 337 |
+
|
| 338 |
+
* urlencoded query string
|
| 339 |
+
* dict
|
| 340 |
+
* list of 2-tuples
|
| 341 |
+
|
| 342 |
+
Anything else will be treated as raw body data to be passed through
|
| 343 |
+
unmolested.
|
| 344 |
+
"""
|
| 345 |
+
|
| 346 |
+
def __init__(self, uri, http_method='GET', body=None, headers=None,
|
| 347 |
+
encoding='utf-8'):
|
| 348 |
+
# Convert to unicode using encoding if given, else assume unicode
|
| 349 |
+
encode = lambda x: to_unicode(x, encoding) if encoding else x
|
| 350 |
+
|
| 351 |
+
self.uri = encode(uri)
|
| 352 |
+
self.http_method = encode(http_method)
|
| 353 |
+
self.headers = CaseInsensitiveDict(encode(headers or {}))
|
| 354 |
+
self.body = encode(body)
|
| 355 |
+
self.decoded_body = extract_params(self.body)
|
| 356 |
+
self.oauth_params = []
|
| 357 |
+
self.validator_log = {}
|
| 358 |
+
|
| 359 |
+
self._params = {
|
| 360 |
+
"access_token": None,
|
| 361 |
+
"client": None,
|
| 362 |
+
"client_id": None,
|
| 363 |
+
"client_secret": None,
|
| 364 |
+
"code": None,
|
| 365 |
+
"code_challenge": None,
|
| 366 |
+
"code_challenge_method": None,
|
| 367 |
+
"code_verifier": None,
|
| 368 |
+
"extra_credentials": None,
|
| 369 |
+
"grant_type": None,
|
| 370 |
+
"redirect_uri": None,
|
| 371 |
+
"refresh_token": None,
|
| 372 |
+
"request_token": None,
|
| 373 |
+
"response_type": None,
|
| 374 |
+
"scope": None,
|
| 375 |
+
"scopes": None,
|
| 376 |
+
"state": None,
|
| 377 |
+
"token": None,
|
| 378 |
+
"user": None,
|
| 379 |
+
"token_type_hint": None,
|
| 380 |
+
|
| 381 |
+
# OpenID Connect
|
| 382 |
+
"response_mode": None,
|
| 383 |
+
"nonce": None,
|
| 384 |
+
"display": None,
|
| 385 |
+
"prompt": None,
|
| 386 |
+
"claims": None,
|
| 387 |
+
"max_age": None,
|
| 388 |
+
"ui_locales": None,
|
| 389 |
+
"id_token_hint": None,
|
| 390 |
+
"login_hint": None,
|
| 391 |
+
"acr_values": None
|
| 392 |
+
}
|
| 393 |
+
self._params.update(dict(urldecode(self.uri_query)))
|
| 394 |
+
self._params.update(dict(self.decoded_body or []))
|
| 395 |
+
|
| 396 |
+
def __getattr__(self, name):
|
| 397 |
+
if name in self._params:
|
| 398 |
+
return self._params[name]
|
| 399 |
+
else:
|
| 400 |
+
raise AttributeError(name)
|
| 401 |
+
|
| 402 |
+
def __repr__(self):
|
| 403 |
+
if not get_debug():
|
| 404 |
+
return "<oauthlib.Request SANITIZED>"
|
| 405 |
+
body = self.body
|
| 406 |
+
headers = self.headers.copy()
|
| 407 |
+
if body:
|
| 408 |
+
body = SANITIZE_PATTERN.sub('\1<SANITIZED>', str(body))
|
| 409 |
+
if 'Authorization' in headers:
|
| 410 |
+
headers['Authorization'] = '<SANITIZED>'
|
| 411 |
+
return '<oauthlib.Request url="{}", http_method="{}", headers="{}", body="{}">'.format(
|
| 412 |
+
self.uri, self.http_method, headers, body)
|
| 413 |
+
|
| 414 |
+
@property
|
| 415 |
+
def uri_query(self):
|
| 416 |
+
return urlparse.urlparse(self.uri).query
|
| 417 |
+
|
| 418 |
+
@property
|
| 419 |
+
def uri_query_params(self):
|
| 420 |
+
if not self.uri_query:
|
| 421 |
+
return []
|
| 422 |
+
return urlparse.parse_qsl(self.uri_query, keep_blank_values=True,
|
| 423 |
+
strict_parsing=True)
|
| 424 |
+
|
| 425 |
+
@property
|
| 426 |
+
def duplicate_params(self):
|
| 427 |
+
seen_keys = collections.defaultdict(int)
|
| 428 |
+
all_keys = (p[0]
|
| 429 |
+
for p in (self.decoded_body or []) + self.uri_query_params)
|
| 430 |
+
for k in all_keys:
|
| 431 |
+
seen_keys[k] += 1
|
| 432 |
+
return [k for k, c in seen_keys.items() if c > 1]
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/__init__.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib.oauth1
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module is a wrapper for the most recent implementation of OAuth 1.0 Client
|
| 6 |
+
and Server classes.
|
| 7 |
+
"""
|
| 8 |
+
from .rfc5849 import (
|
| 9 |
+
SIGNATURE_HMAC, SIGNATURE_HMAC_SHA1, SIGNATURE_HMAC_SHA256,
|
| 10 |
+
SIGNATURE_HMAC_SHA512, SIGNATURE_PLAINTEXT, SIGNATURE_RSA,
|
| 11 |
+
SIGNATURE_RSA_SHA1, SIGNATURE_RSA_SHA256, SIGNATURE_RSA_SHA512,
|
| 12 |
+
SIGNATURE_TYPE_AUTH_HEADER, SIGNATURE_TYPE_BODY, SIGNATURE_TYPE_QUERY,
|
| 13 |
+
Client,
|
| 14 |
+
)
|
| 15 |
+
from .rfc5849.endpoints import (
|
| 16 |
+
AccessTokenEndpoint, AuthorizationEndpoint, RequestTokenEndpoint,
|
| 17 |
+
ResourceEndpoint, SignatureOnlyEndpoint, WebApplicationServer,
|
| 18 |
+
)
|
| 19 |
+
from .rfc5849.errors import (
|
| 20 |
+
InsecureTransportError, InvalidClientError, InvalidRequestError,
|
| 21 |
+
InvalidSignatureMethodError, OAuth1Error,
|
| 22 |
+
)
|
| 23 |
+
from .rfc5849.request_validator import RequestValidator
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.18 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__init__.py
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib.oauth1.rfc5849
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module is an implementation of various logic needed
|
| 6 |
+
for signing and checking OAuth 1.0 RFC 5849 requests.
|
| 7 |
+
|
| 8 |
+
It supports all three standard signature methods defined in RFC 5849:
|
| 9 |
+
|
| 10 |
+
- HMAC-SHA1
|
| 11 |
+
- RSA-SHA1
|
| 12 |
+
- PLAINTEXT
|
| 13 |
+
|
| 14 |
+
It also supports signature methods that are not defined in RFC 5849. These are
|
| 15 |
+
based on the standard ones but replace SHA-1 with the more secure SHA-256:
|
| 16 |
+
|
| 17 |
+
- HMAC-SHA256
|
| 18 |
+
- RSA-SHA256
|
| 19 |
+
|
| 20 |
+
"""
|
| 21 |
+
import base64
|
| 22 |
+
import hashlib
|
| 23 |
+
import logging
|
| 24 |
+
import urllib.parse as urlparse
|
| 25 |
+
|
| 26 |
+
from oauthlib.common import (
|
| 27 |
+
Request, generate_nonce, generate_timestamp, to_unicode, urlencode,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
from . import parameters, signature
|
| 31 |
+
|
| 32 |
+
log = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
# Available signature methods
|
| 35 |
+
#
|
| 36 |
+
# Note: SIGNATURE_HMAC and SIGNATURE_RSA are kept for backward compatibility
|
| 37 |
+
# with previous versions of this library, when it the only HMAC-based and
|
| 38 |
+
# RSA-based signature methods were HMAC-SHA1 and RSA-SHA1. But now that it
|
| 39 |
+
# supports other hashing algorithms besides SHA1, explicitly identifying which
|
| 40 |
+
# hashing algorithm is being used is recommended.
|
| 41 |
+
#
|
| 42 |
+
# Note: if additional values are defined here, don't forget to update the
|
| 43 |
+
# imports in "../__init__.py" so they are available outside this module.
|
| 44 |
+
|
| 45 |
+
SIGNATURE_HMAC_SHA1 = "HMAC-SHA1"
|
| 46 |
+
SIGNATURE_HMAC_SHA256 = "HMAC-SHA256"
|
| 47 |
+
SIGNATURE_HMAC_SHA512 = "HMAC-SHA512"
|
| 48 |
+
SIGNATURE_HMAC = SIGNATURE_HMAC_SHA1 # deprecated variable for HMAC-SHA1
|
| 49 |
+
|
| 50 |
+
SIGNATURE_RSA_SHA1 = "RSA-SHA1"
|
| 51 |
+
SIGNATURE_RSA_SHA256 = "RSA-SHA256"
|
| 52 |
+
SIGNATURE_RSA_SHA512 = "RSA-SHA512"
|
| 53 |
+
SIGNATURE_RSA = SIGNATURE_RSA_SHA1 # deprecated variable for RSA-SHA1
|
| 54 |
+
|
| 55 |
+
SIGNATURE_PLAINTEXT = "PLAINTEXT"
|
| 56 |
+
|
| 57 |
+
SIGNATURE_METHODS = (
|
| 58 |
+
SIGNATURE_HMAC_SHA1,
|
| 59 |
+
SIGNATURE_HMAC_SHA256,
|
| 60 |
+
SIGNATURE_HMAC_SHA512,
|
| 61 |
+
SIGNATURE_RSA_SHA1,
|
| 62 |
+
SIGNATURE_RSA_SHA256,
|
| 63 |
+
SIGNATURE_RSA_SHA512,
|
| 64 |
+
SIGNATURE_PLAINTEXT
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
SIGNATURE_TYPE_AUTH_HEADER = 'AUTH_HEADER'
|
| 68 |
+
SIGNATURE_TYPE_QUERY = 'QUERY'
|
| 69 |
+
SIGNATURE_TYPE_BODY = 'BODY'
|
| 70 |
+
|
| 71 |
+
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class Client:
|
| 75 |
+
|
| 76 |
+
"""A client used to sign OAuth 1.0 RFC 5849 requests."""
|
| 77 |
+
SIGNATURE_METHODS = {
|
| 78 |
+
SIGNATURE_HMAC_SHA1: signature.sign_hmac_sha1_with_client,
|
| 79 |
+
SIGNATURE_HMAC_SHA256: signature.sign_hmac_sha256_with_client,
|
| 80 |
+
SIGNATURE_HMAC_SHA512: signature.sign_hmac_sha512_with_client,
|
| 81 |
+
SIGNATURE_RSA_SHA1: signature.sign_rsa_sha1_with_client,
|
| 82 |
+
SIGNATURE_RSA_SHA256: signature.sign_rsa_sha256_with_client,
|
| 83 |
+
SIGNATURE_RSA_SHA512: signature.sign_rsa_sha512_with_client,
|
| 84 |
+
SIGNATURE_PLAINTEXT: signature.sign_plaintext_with_client
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
@classmethod
|
| 88 |
+
def register_signature_method(cls, method_name, method_callback):
|
| 89 |
+
cls.SIGNATURE_METHODS[method_name] = method_callback
|
| 90 |
+
|
| 91 |
+
def __init__(self, client_key,
|
| 92 |
+
client_secret=None,
|
| 93 |
+
resource_owner_key=None,
|
| 94 |
+
resource_owner_secret=None,
|
| 95 |
+
callback_uri=None,
|
| 96 |
+
signature_method=SIGNATURE_HMAC_SHA1,
|
| 97 |
+
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
|
| 98 |
+
rsa_key=None, verifier=None, realm=None,
|
| 99 |
+
encoding='utf-8', decoding=None,
|
| 100 |
+
nonce=None, timestamp=None):
|
| 101 |
+
"""Create an OAuth 1 client.
|
| 102 |
+
|
| 103 |
+
:param client_key: Client key (consumer key), mandatory.
|
| 104 |
+
:param resource_owner_key: Resource owner key (oauth token).
|
| 105 |
+
:param resource_owner_secret: Resource owner secret (oauth token secret).
|
| 106 |
+
:param callback_uri: Callback used when obtaining request token.
|
| 107 |
+
:param signature_method: SIGNATURE_HMAC, SIGNATURE_RSA or SIGNATURE_PLAINTEXT.
|
| 108 |
+
:param signature_type: SIGNATURE_TYPE_AUTH_HEADER (default),
|
| 109 |
+
SIGNATURE_TYPE_QUERY or SIGNATURE_TYPE_BODY
|
| 110 |
+
depending on where you want to embed the oauth
|
| 111 |
+
credentials.
|
| 112 |
+
:param rsa_key: RSA key used with SIGNATURE_RSA.
|
| 113 |
+
:param verifier: Verifier used when obtaining an access token.
|
| 114 |
+
:param realm: Realm (scope) to which access is being requested.
|
| 115 |
+
:param encoding: If you provide non-unicode input you may use this
|
| 116 |
+
to have oauthlib automatically convert.
|
| 117 |
+
:param decoding: If you wish that the returned uri, headers and body
|
| 118 |
+
from sign be encoded back from unicode, then set
|
| 119 |
+
decoding to your preferred encoding, i.e. utf-8.
|
| 120 |
+
:param nonce: Use this nonce instead of generating one. (Mainly for testing)
|
| 121 |
+
:param timestamp: Use this timestamp instead of using current. (Mainly for testing)
|
| 122 |
+
"""
|
| 123 |
+
# Convert to unicode using encoding if given, else assume unicode
|
| 124 |
+
encode = lambda x: to_unicode(x, encoding) if encoding else x
|
| 125 |
+
|
| 126 |
+
self.client_key = encode(client_key)
|
| 127 |
+
self.client_secret = encode(client_secret)
|
| 128 |
+
self.resource_owner_key = encode(resource_owner_key)
|
| 129 |
+
self.resource_owner_secret = encode(resource_owner_secret)
|
| 130 |
+
self.signature_method = encode(signature_method)
|
| 131 |
+
self.signature_type = encode(signature_type)
|
| 132 |
+
self.callback_uri = encode(callback_uri)
|
| 133 |
+
self.rsa_key = encode(rsa_key)
|
| 134 |
+
self.verifier = encode(verifier)
|
| 135 |
+
self.realm = encode(realm)
|
| 136 |
+
self.encoding = encode(encoding)
|
| 137 |
+
self.decoding = encode(decoding)
|
| 138 |
+
self.nonce = encode(nonce)
|
| 139 |
+
self.timestamp = encode(timestamp)
|
| 140 |
+
|
| 141 |
+
def __repr__(self):
|
| 142 |
+
attrs = vars(self).copy()
|
| 143 |
+
attrs['client_secret'] = '****' if attrs['client_secret'] else None
|
| 144 |
+
attrs['rsa_key'] = '****' if attrs['rsa_key'] else None
|
| 145 |
+
attrs[
|
| 146 |
+
'resource_owner_secret'] = '****' if attrs['resource_owner_secret'] else None
|
| 147 |
+
attribute_str = ', '.join('{}={}'.format(k, v) for k, v in attrs.items())
|
| 148 |
+
return '<{} {}>'.format(self.__class__.__name__, attribute_str)
|
| 149 |
+
|
| 150 |
+
def get_oauth_signature(self, request):
|
| 151 |
+
"""Get an OAuth signature to be used in signing a request
|
| 152 |
+
|
| 153 |
+
To satisfy `section 3.4.1.2`_ item 2, if the request argument's
|
| 154 |
+
headers dict attribute contains a Host item, its value will
|
| 155 |
+
replace any netloc part of the request argument's uri attribute
|
| 156 |
+
value.
|
| 157 |
+
|
| 158 |
+
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
|
| 159 |
+
"""
|
| 160 |
+
if self.signature_method == SIGNATURE_PLAINTEXT:
|
| 161 |
+
# fast-path
|
| 162 |
+
return signature.sign_plaintext(self.client_secret,
|
| 163 |
+
self.resource_owner_secret)
|
| 164 |
+
|
| 165 |
+
uri, headers, body = self._render(request)
|
| 166 |
+
|
| 167 |
+
collected_params = signature.collect_parameters(
|
| 168 |
+
uri_query=urlparse.urlparse(uri).query,
|
| 169 |
+
body=body,
|
| 170 |
+
headers=headers)
|
| 171 |
+
log.debug("Collected params: {}".format(collected_params))
|
| 172 |
+
|
| 173 |
+
normalized_params = signature.normalize_parameters(collected_params)
|
| 174 |
+
normalized_uri = signature.base_string_uri(uri, headers.get('Host', None))
|
| 175 |
+
log.debug("Normalized params: {}".format(normalized_params))
|
| 176 |
+
log.debug("Normalized URI: {}".format(normalized_uri))
|
| 177 |
+
|
| 178 |
+
base_string = signature.signature_base_string(request.http_method,
|
| 179 |
+
normalized_uri, normalized_params)
|
| 180 |
+
|
| 181 |
+
log.debug("Signing: signature base string: {}".format(base_string))
|
| 182 |
+
|
| 183 |
+
if self.signature_method not in self.SIGNATURE_METHODS:
|
| 184 |
+
raise ValueError('Invalid signature method.')
|
| 185 |
+
|
| 186 |
+
sig = self.SIGNATURE_METHODS[self.signature_method](base_string, self)
|
| 187 |
+
|
| 188 |
+
log.debug("Signature: {}".format(sig))
|
| 189 |
+
return sig
|
| 190 |
+
|
| 191 |
+
def get_oauth_params(self, request):
|
| 192 |
+
"""Get the basic OAuth parameters to be used in generating a signature.
|
| 193 |
+
"""
|
| 194 |
+
nonce = (generate_nonce()
|
| 195 |
+
if self.nonce is None else self.nonce)
|
| 196 |
+
timestamp = (generate_timestamp()
|
| 197 |
+
if self.timestamp is None else self.timestamp)
|
| 198 |
+
params = [
|
| 199 |
+
('oauth_nonce', nonce),
|
| 200 |
+
('oauth_timestamp', timestamp),
|
| 201 |
+
('oauth_version', '1.0'),
|
| 202 |
+
('oauth_signature_method', self.signature_method),
|
| 203 |
+
('oauth_consumer_key', self.client_key),
|
| 204 |
+
]
|
| 205 |
+
if self.resource_owner_key:
|
| 206 |
+
params.append(('oauth_token', self.resource_owner_key))
|
| 207 |
+
if self.callback_uri:
|
| 208 |
+
params.append(('oauth_callback', self.callback_uri))
|
| 209 |
+
if self.verifier:
|
| 210 |
+
params.append(('oauth_verifier', self.verifier))
|
| 211 |
+
|
| 212 |
+
# providing body hash for requests other than x-www-form-urlencoded
|
| 213 |
+
# as described in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-4.1.1
|
| 214 |
+
# 4.1.1. When to include the body hash
|
| 215 |
+
# * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies
|
| 216 |
+
# * [...] SHOULD include the oauth_body_hash parameter on all other requests.
|
| 217 |
+
# Note that SHA-1 is vulnerable. The spec acknowledges that in https://tools.ietf.org/html/draft-eaton-oauth-bodyhash-00#section-6.2
|
| 218 |
+
# At this time, no further effort has been made to replace SHA-1 for the OAuth Request Body Hash extension.
|
| 219 |
+
content_type = request.headers.get('Content-Type', None)
|
| 220 |
+
content_type_eligible = content_type and content_type.find('application/x-www-form-urlencoded') < 0
|
| 221 |
+
if request.body is not None and content_type_eligible:
|
| 222 |
+
params.append(('oauth_body_hash', base64.b64encode(hashlib.sha1(request.body.encode('utf-8')).digest()).decode('utf-8')))
|
| 223 |
+
|
| 224 |
+
return params
|
| 225 |
+
|
| 226 |
+
def _render(self, request, formencode=False, realm=None):
|
| 227 |
+
"""Render a signed request according to signature type
|
| 228 |
+
|
| 229 |
+
Returns a 3-tuple containing the request URI, headers, and body.
|
| 230 |
+
|
| 231 |
+
If the formencode argument is True and the body contains parameters, it
|
| 232 |
+
is escaped and returned as a valid formencoded string.
|
| 233 |
+
"""
|
| 234 |
+
# TODO what if there are body params on a header-type auth?
|
| 235 |
+
# TODO what if there are query params on a body-type auth?
|
| 236 |
+
|
| 237 |
+
uri, headers, body = request.uri, request.headers, request.body
|
| 238 |
+
|
| 239 |
+
# TODO: right now these prepare_* methods are very narrow in scope--they
|
| 240 |
+
# only affect their little thing. In some cases (for example, with
|
| 241 |
+
# header auth) it might be advantageous to allow these methods to touch
|
| 242 |
+
# other parts of the request, like the headers—so the prepare_headers
|
| 243 |
+
# method could also set the Content-Type header to x-www-form-urlencoded
|
| 244 |
+
# like the spec requires. This would be a fundamental change though, and
|
| 245 |
+
# I'm not sure how I feel about it.
|
| 246 |
+
if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER:
|
| 247 |
+
headers = parameters.prepare_headers(
|
| 248 |
+
request.oauth_params, request.headers, realm=realm)
|
| 249 |
+
elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None:
|
| 250 |
+
body = parameters.prepare_form_encoded_body(
|
| 251 |
+
request.oauth_params, request.decoded_body)
|
| 252 |
+
if formencode:
|
| 253 |
+
body = urlencode(body)
|
| 254 |
+
headers['Content-Type'] = 'application/x-www-form-urlencoded'
|
| 255 |
+
elif self.signature_type == SIGNATURE_TYPE_QUERY:
|
| 256 |
+
uri = parameters.prepare_request_uri_query(
|
| 257 |
+
request.oauth_params, request.uri)
|
| 258 |
+
else:
|
| 259 |
+
raise ValueError('Unknown signature type specified.')
|
| 260 |
+
|
| 261 |
+
return uri, headers, body
|
| 262 |
+
|
| 263 |
+
def sign(self, uri, http_method='GET', body=None, headers=None, realm=None):
|
| 264 |
+
"""Sign a request
|
| 265 |
+
|
| 266 |
+
Signs an HTTP request with the specified parts.
|
| 267 |
+
|
| 268 |
+
Returns a 3-tuple of the signed request's URI, headers, and body.
|
| 269 |
+
Note that http_method is not returned as it is unaffected by the OAuth
|
| 270 |
+
signing process. Also worth noting is that duplicate parameters
|
| 271 |
+
will be included in the signature, regardless of where they are
|
| 272 |
+
specified (query, body).
|
| 273 |
+
|
| 274 |
+
The body argument may be a dict, a list of 2-tuples, or a formencoded
|
| 275 |
+
string. The Content-Type header must be 'application/x-www-form-urlencoded'
|
| 276 |
+
if it is present.
|
| 277 |
+
|
| 278 |
+
If the body argument is not one of the above, it will be returned
|
| 279 |
+
verbatim as it is unaffected by the OAuth signing process. Attempting to
|
| 280 |
+
sign a request with non-formencoded data using the OAuth body signature
|
| 281 |
+
type is invalid and will raise an exception.
|
| 282 |
+
|
| 283 |
+
If the body does contain parameters, it will be returned as a properly-
|
| 284 |
+
formatted formencoded string.
|
| 285 |
+
|
| 286 |
+
Body may not be included if the http_method is either GET or HEAD as
|
| 287 |
+
this changes the semantic meaning of the request.
|
| 288 |
+
|
| 289 |
+
All string data MUST be unicode or be encoded with the same encoding
|
| 290 |
+
scheme supplied to the Client constructor, default utf-8. This includes
|
| 291 |
+
strings inside body dicts, for example.
|
| 292 |
+
"""
|
| 293 |
+
# normalize request data
|
| 294 |
+
request = Request(uri, http_method, body, headers,
|
| 295 |
+
encoding=self.encoding)
|
| 296 |
+
|
| 297 |
+
# sanity check
|
| 298 |
+
content_type = request.headers.get('Content-Type', None)
|
| 299 |
+
multipart = content_type and content_type.startswith('multipart/')
|
| 300 |
+
should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED
|
| 301 |
+
has_params = request.decoded_body is not None
|
| 302 |
+
# 3.4.1.3.1. Parameter Sources
|
| 303 |
+
# [Parameters are collected from the HTTP request entity-body, but only
|
| 304 |
+
# if [...]:
|
| 305 |
+
# * The entity-body is single-part.
|
| 306 |
+
if multipart and has_params:
|
| 307 |
+
raise ValueError(
|
| 308 |
+
"Headers indicate a multipart body but body contains parameters.")
|
| 309 |
+
# * The entity-body follows the encoding requirements of the
|
| 310 |
+
# "application/x-www-form-urlencoded" content-type as defined by
|
| 311 |
+
# [W3C.REC-html40-19980424].
|
| 312 |
+
elif should_have_params and not has_params:
|
| 313 |
+
raise ValueError(
|
| 314 |
+
"Headers indicate a formencoded body but body was not decodable.")
|
| 315 |
+
# * The HTTP request entity-header includes the "Content-Type"
|
| 316 |
+
# header field set to "application/x-www-form-urlencoded".
|
| 317 |
+
elif not should_have_params and has_params:
|
| 318 |
+
raise ValueError(
|
| 319 |
+
"Body contains parameters but Content-Type header was {} "
|
| 320 |
+
"instead of {}".format(content_type or "not set",
|
| 321 |
+
CONTENT_TYPE_FORM_URLENCODED))
|
| 322 |
+
|
| 323 |
+
# 3.5.2. Form-Encoded Body
|
| 324 |
+
# Protocol parameters can be transmitted in the HTTP request entity-
|
| 325 |
+
# body, but only if the following REQUIRED conditions are met:
|
| 326 |
+
# o The entity-body is single-part.
|
| 327 |
+
# o The entity-body follows the encoding requirements of the
|
| 328 |
+
# "application/x-www-form-urlencoded" content-type as defined by
|
| 329 |
+
# [W3C.REC-html40-19980424].
|
| 330 |
+
# o The HTTP request entity-header includes the "Content-Type" header
|
| 331 |
+
# field set to "application/x-www-form-urlencoded".
|
| 332 |
+
elif self.signature_type == SIGNATURE_TYPE_BODY and not (
|
| 333 |
+
should_have_params and has_params and not multipart):
|
| 334 |
+
raise ValueError(
|
| 335 |
+
'Body signatures may only be used with form-urlencoded content')
|
| 336 |
+
|
| 337 |
+
# We amend https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
|
| 338 |
+
# with the clause that parameters from body should only be included
|
| 339 |
+
# in non GET or HEAD requests. Extracting the request body parameters
|
| 340 |
+
# and including them in the signature base string would give semantic
|
| 341 |
+
# meaning to the body, which it should not have according to the
|
| 342 |
+
# HTTP 1.1 spec.
|
| 343 |
+
elif http_method.upper() in ('GET', 'HEAD') and has_params:
|
| 344 |
+
raise ValueError('GET/HEAD requests should not include body.')
|
| 345 |
+
|
| 346 |
+
# generate the basic OAuth parameters
|
| 347 |
+
request.oauth_params = self.get_oauth_params(request)
|
| 348 |
+
|
| 349 |
+
# generate the signature
|
| 350 |
+
request.oauth_params.append(
|
| 351 |
+
('oauth_signature', self.get_oauth_signature(request)))
|
| 352 |
+
|
| 353 |
+
# render the signed request and return it
|
| 354 |
+
uri, headers, body = self._render(request, formencode=True,
|
| 355 |
+
realm=(realm or self.realm))
|
| 356 |
+
|
| 357 |
+
if self.decoding:
|
| 358 |
+
log.debug('Encoding URI, headers and body to %s.', self.decoding)
|
| 359 |
+
uri = uri.encode(self.decoding)
|
| 360 |
+
body = body.encode(self.decoding) if body else body
|
| 361 |
+
new_headers = {}
|
| 362 |
+
for k, v in headers.items():
|
| 363 |
+
new_headers[k.encode(self.decoding)] = v.encode(self.decoding)
|
| 364 |
+
headers = new_headers
|
| 365 |
+
return uri, headers, body
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (10.5 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/errors.cpython-310.pyc
ADDED
|
Binary file (3.23 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/parameters.cpython-310.pyc
ADDED
|
Binary file (3.31 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/signature.cpython-310.pyc
ADDED
|
Binary file (17.8 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (2.82 kB). View file
|
|
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .access_token import AccessTokenEndpoint
|
| 2 |
+
from .authorization import AuthorizationEndpoint
|
| 3 |
+
from .base import BaseEndpoint
|
| 4 |
+
from .request_token import RequestTokenEndpoint
|
| 5 |
+
from .resource import ResourceEndpoint
|
| 6 |
+
from .signature_only import SignatureOnlyEndpoint
|
| 7 |
+
|
| 8 |
+
from .pre_configured import WebApplicationServer # isort:skip
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/access_token.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
oauthlib.oauth1.rfc5849.endpoints.access_token
|
| 4 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 5 |
+
|
| 6 |
+
This module is an implementation of the access token provider logic of
|
| 7 |
+
OAuth 1.0 RFC 5849. It validates the correctness of access token requests,
|
| 8 |
+
creates and persists tokens as well as create the proper response to be
|
| 9 |
+
returned to the client.
|
| 10 |
+
"""
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
from oauthlib.common import urlencode
|
| 14 |
+
|
| 15 |
+
from .. import errors
|
| 16 |
+
from .base import BaseEndpoint
|
| 17 |
+
|
| 18 |
+
log = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class AccessTokenEndpoint(BaseEndpoint):
|
| 22 |
+
|
| 23 |
+
"""An endpoint responsible for providing OAuth 1 access tokens.
|
| 24 |
+
|
| 25 |
+
Typical use is to instantiate with a request validator and invoke the
|
| 26 |
+
``create_access_token_response`` from a view function. The tuple returned
|
| 27 |
+
has all information necessary (body, status, headers) to quickly form
|
| 28 |
+
and return a proper response. See :doc:`/oauth1/validator` for details on which
|
| 29 |
+
validator methods to implement for this endpoint.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def create_access_token(self, request, credentials):
|
| 33 |
+
"""Create and save a new access token.
|
| 34 |
+
|
| 35 |
+
Similar to OAuth 2, indication of granted scopes will be included as a
|
| 36 |
+
space separated list in ``oauth_authorized_realms``.
|
| 37 |
+
|
| 38 |
+
:param request: OAuthlib request.
|
| 39 |
+
:type request: oauthlib.common.Request
|
| 40 |
+
:returns: The token as an urlencoded string.
|
| 41 |
+
"""
|
| 42 |
+
request.realms = self.request_validator.get_realms(
|
| 43 |
+
request.resource_owner_key, request)
|
| 44 |
+
token = {
|
| 45 |
+
'oauth_token': self.token_generator(),
|
| 46 |
+
'oauth_token_secret': self.token_generator(),
|
| 47 |
+
# Backport the authorized scopes indication used in OAuth2
|
| 48 |
+
'oauth_authorized_realms': ' '.join(request.realms)
|
| 49 |
+
}
|
| 50 |
+
token.update(credentials)
|
| 51 |
+
self.request_validator.save_access_token(token, request)
|
| 52 |
+
return urlencode(token.items())
|
| 53 |
+
|
| 54 |
+
def create_access_token_response(self, uri, http_method='GET', body=None,
|
| 55 |
+
headers=None, credentials=None):
|
| 56 |
+
"""Create an access token response, with a new request token if valid.
|
| 57 |
+
|
| 58 |
+
:param uri: The full URI of the token request.
|
| 59 |
+
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
|
| 60 |
+
:param body: The request body as a string.
|
| 61 |
+
:param headers: The request headers as a dict.
|
| 62 |
+
:param credentials: A list of extra credentials to include in the token.
|
| 63 |
+
:returns: A tuple of 3 elements.
|
| 64 |
+
1. A dict of headers to set on the response.
|
| 65 |
+
2. The response body as a string.
|
| 66 |
+
3. The response status code as an integer.
|
| 67 |
+
|
| 68 |
+
An example of a valid request::
|
| 69 |
+
|
| 70 |
+
>>> from your_validator import your_validator
|
| 71 |
+
>>> from oauthlib.oauth1 import AccessTokenEndpoint
|
| 72 |
+
>>> endpoint = AccessTokenEndpoint(your_validator)
|
| 73 |
+
>>> h, b, s = endpoint.create_access_token_response(
|
| 74 |
+
... 'https://your.provider/access_token?foo=bar',
|
| 75 |
+
... headers={
|
| 76 |
+
... 'Authorization': 'OAuth oauth_token=234lsdkf....'
|
| 77 |
+
... },
|
| 78 |
+
... credentials={
|
| 79 |
+
... 'my_specific': 'argument',
|
| 80 |
+
... })
|
| 81 |
+
>>> h
|
| 82 |
+
{'Content-Type': 'application/x-www-form-urlencoded'}
|
| 83 |
+
>>> b
|
| 84 |
+
'oauth_token=lsdkfol23w54jlksdef&oauth_token_secret=qwe089234lkjsdf&oauth_authorized_realms=movies+pics&my_specific=argument'
|
| 85 |
+
>>> s
|
| 86 |
+
200
|
| 87 |
+
|
| 88 |
+
An response to invalid request would have a different body and status::
|
| 89 |
+
|
| 90 |
+
>>> b
|
| 91 |
+
'error=invalid_request&description=missing+resource+owner+key'
|
| 92 |
+
>>> s
|
| 93 |
+
400
|
| 94 |
+
|
| 95 |
+
The same goes for an an unauthorized request:
|
| 96 |
+
|
| 97 |
+
>>> b
|
| 98 |
+
''
|
| 99 |
+
>>> s
|
| 100 |
+
401
|
| 101 |
+
"""
|
| 102 |
+
resp_headers = {'Content-Type': 'application/x-www-form-urlencoded'}
|
| 103 |
+
try:
|
| 104 |
+
request = self._create_request(uri, http_method, body, headers)
|
| 105 |
+
valid, processed_request = self.validate_access_token_request(
|
| 106 |
+
request)
|
| 107 |
+
if valid:
|
| 108 |
+
token = self.create_access_token(request, credentials or {})
|
| 109 |
+
self.request_validator.invalidate_request_token(
|
| 110 |
+
request.client_key,
|
| 111 |
+
request.resource_owner_key,
|
| 112 |
+
request)
|
| 113 |
+
return resp_headers, token, 200
|
| 114 |
+
else:
|
| 115 |
+
return {}, None, 401
|
| 116 |
+
except errors.OAuth1Error as e:
|
| 117 |
+
return resp_headers, e.urlencoded, e.status_code
|
| 118 |
+
|
| 119 |
+
def validate_access_token_request(self, request):
|
| 120 |
+
"""Validate an access token request.
|
| 121 |
+
|
| 122 |
+
:param request: OAuthlib request.
|
| 123 |
+
:type request: oauthlib.common.Request
|
| 124 |
+
:raises: OAuth1Error if the request is invalid.
|
| 125 |
+
:returns: A tuple of 2 elements.
|
| 126 |
+
1. The validation result (True or False).
|
| 127 |
+
2. The request object.
|
| 128 |
+
"""
|
| 129 |
+
self._check_transport_security(request)
|
| 130 |
+
self._check_mandatory_parameters(request)
|
| 131 |
+
|
| 132 |
+
if not request.resource_owner_key:
|
| 133 |
+
raise errors.InvalidRequestError(
|
| 134 |
+
description='Missing resource owner.')
|
| 135 |
+
|
| 136 |
+
if not self.request_validator.check_request_token(
|
| 137 |
+
request.resource_owner_key):
|
| 138 |
+
raise errors.InvalidRequestError(
|
| 139 |
+
description='Invalid resource owner key format.')
|
| 140 |
+
|
| 141 |
+
if not request.verifier:
|
| 142 |
+
raise errors.InvalidRequestError(
|
| 143 |
+
description='Missing verifier.')
|
| 144 |
+
|
| 145 |
+
if not self.request_validator.check_verifier(request.verifier):
|
| 146 |
+
raise errors.InvalidRequestError(
|
| 147 |
+
description='Invalid verifier format.')
|
| 148 |
+
|
| 149 |
+
if not self.request_validator.validate_timestamp_and_nonce(
|
| 150 |
+
request.client_key, request.timestamp, request.nonce, request,
|
| 151 |
+
request_token=request.resource_owner_key):
|
| 152 |
+
return False, request
|
| 153 |
+
|
| 154 |
+
# The server SHOULD return a 401 (Unauthorized) status code when
|
| 155 |
+
# receiving a request with invalid client credentials.
|
| 156 |
+
# Note: This is postponed in order to avoid timing attacks, instead
|
| 157 |
+
# a dummy client is assigned and used to maintain near constant
|
| 158 |
+
# time request verification.
|
| 159 |
+
#
|
| 160 |
+
# Note that early exit would enable client enumeration
|
| 161 |
+
valid_client = self.request_validator.validate_client_key(
|
| 162 |
+
request.client_key, request)
|
| 163 |
+
if not valid_client:
|
| 164 |
+
request.client_key = self.request_validator.dummy_client
|
| 165 |
+
|
| 166 |
+
# The server SHOULD return a 401 (Unauthorized) status code when
|
| 167 |
+
# receiving a request with invalid or expired token.
|
| 168 |
+
# Note: This is postponed in order to avoid timing attacks, instead
|
| 169 |
+
# a dummy token is assigned and used to maintain near constant
|
| 170 |
+
# time request verification.
|
| 171 |
+
#
|
| 172 |
+
# Note that early exit would enable resource owner enumeration
|
| 173 |
+
valid_resource_owner = self.request_validator.validate_request_token(
|
| 174 |
+
request.client_key, request.resource_owner_key, request)
|
| 175 |
+
if not valid_resource_owner:
|
| 176 |
+
request.resource_owner_key = self.request_validator.dummy_request_token
|
| 177 |
+
|
| 178 |
+
# The server MUST verify (Section 3.2) the validity of the request,
|
| 179 |
+
# ensure that the resource owner has authorized the provisioning of
|
| 180 |
+
# token credentials to the client, and ensure that the temporary
|
| 181 |
+
# credentials have not expired or been used before. The server MUST
|
| 182 |
+
# also verify the verification code received from the client.
|
| 183 |
+
# .. _`Section 3.2`: https://tools.ietf.org/html/rfc5849#section-3.2
|
| 184 |
+
#
|
| 185 |
+
# Note that early exit would enable resource owner authorization
|
| 186 |
+
# verifier enumertion.
|
| 187 |
+
valid_verifier = self.request_validator.validate_verifier(
|
| 188 |
+
request.client_key,
|
| 189 |
+
request.resource_owner_key,
|
| 190 |
+
request.verifier,
|
| 191 |
+
request)
|
| 192 |
+
|
| 193 |
+
valid_signature = self._check_signature(request, is_token_request=True)
|
| 194 |
+
|
| 195 |
+
# log the results to the validator_log
|
| 196 |
+
# this lets us handle internal reporting and analysis
|
| 197 |
+
request.validator_log['client'] = valid_client
|
| 198 |
+
request.validator_log['resource_owner'] = valid_resource_owner
|
| 199 |
+
request.validator_log['verifier'] = valid_verifier
|
| 200 |
+
request.validator_log['signature'] = valid_signature
|
| 201 |
+
|
| 202 |
+
# We delay checking validity until the very end, using dummy values for
|
| 203 |
+
# calculations and fetching secrets/keys to ensure the flow of every
|
| 204 |
+
# request remains almost identical regardless of whether valid values
|
| 205 |
+
# have been supplied. This ensures near constant time execution and
|
| 206 |
+
# prevents malicious users from guessing sensitive information
|
| 207 |
+
v = all((valid_client, valid_resource_owner, valid_verifier,
|
| 208 |
+
valid_signature))
|
| 209 |
+
if not v:
|
| 210 |
+
log.info("[Failure] request verification failed.")
|
| 211 |
+
log.info("Valid client:, %s", valid_client)
|
| 212 |
+
log.info("Valid token:, %s", valid_resource_owner)
|
| 213 |
+
log.info("Valid verifier:, %s", valid_verifier)
|
| 214 |
+
log.info("Valid signature:, %s", valid_signature)
|
| 215 |
+
return v, request
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/authorization.py
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
oauthlib.oauth1.rfc5849.endpoints.authorization
|
| 4 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 5 |
+
|
| 6 |
+
This module is an implementation of various logic needed
|
| 7 |
+
for signing and checking OAuth 1.0 RFC 5849 requests.
|
| 8 |
+
"""
|
| 9 |
+
from urllib.parse import urlencode
|
| 10 |
+
|
| 11 |
+
from oauthlib.common import add_params_to_uri
|
| 12 |
+
|
| 13 |
+
from .. import errors
|
| 14 |
+
from .base import BaseEndpoint
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class AuthorizationEndpoint(BaseEndpoint):
|
| 18 |
+
|
| 19 |
+
"""An endpoint responsible for letting authenticated users authorize access
|
| 20 |
+
to their protected resources to a client.
|
| 21 |
+
|
| 22 |
+
Typical use would be to have two views, one for displaying the authorization
|
| 23 |
+
form and one to process said form on submission.
|
| 24 |
+
|
| 25 |
+
The first view will want to utilize ``get_realms_and_credentials`` to fetch
|
| 26 |
+
requested realms and useful client credentials, such as name and
|
| 27 |
+
description, to be used when creating the authorization form.
|
| 28 |
+
|
| 29 |
+
During form processing you can use ``create_authorization_response`` to
|
| 30 |
+
validate the request, create a verifier as well as prepare the final
|
| 31 |
+
redirection URI used to send the user back to the client.
|
| 32 |
+
|
| 33 |
+
See :doc:`/oauth1/validator` for details on which validator methods to implement
|
| 34 |
+
for this endpoint.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def create_verifier(self, request, credentials):
|
| 38 |
+
"""Create and save a new request token.
|
| 39 |
+
|
| 40 |
+
:param request: OAuthlib request.
|
| 41 |
+
:type request: oauthlib.common.Request
|
| 42 |
+
:param credentials: A dict of extra token credentials.
|
| 43 |
+
:returns: The verifier as a dict.
|
| 44 |
+
"""
|
| 45 |
+
verifier = {
|
| 46 |
+
'oauth_token': request.resource_owner_key,
|
| 47 |
+
'oauth_verifier': self.token_generator(),
|
| 48 |
+
}
|
| 49 |
+
verifier.update(credentials)
|
| 50 |
+
self.request_validator.save_verifier(
|
| 51 |
+
request.resource_owner_key, verifier, request)
|
| 52 |
+
return verifier
|
| 53 |
+
|
| 54 |
+
def create_authorization_response(self, uri, http_method='GET', body=None,
|
| 55 |
+
headers=None, realms=None, credentials=None):
|
| 56 |
+
"""Create an authorization response, with a new request token if valid.
|
| 57 |
+
|
| 58 |
+
:param uri: The full URI of the token request.
|
| 59 |
+
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
|
| 60 |
+
:param body: The request body as a string.
|
| 61 |
+
:param headers: The request headers as a dict.
|
| 62 |
+
:param credentials: A list of credentials to include in the verifier.
|
| 63 |
+
:returns: A tuple of 3 elements.
|
| 64 |
+
1. A dict of headers to set on the response.
|
| 65 |
+
2. The response body as a string.
|
| 66 |
+
3. The response status code as an integer.
|
| 67 |
+
|
| 68 |
+
If the callback URI tied to the current token is "oob", a response with
|
| 69 |
+
a 200 status code will be returned. In this case, it may be desirable to
|
| 70 |
+
modify the response to better display the verifier to the client.
|
| 71 |
+
|
| 72 |
+
An example of an authorization request::
|
| 73 |
+
|
| 74 |
+
>>> from your_validator import your_validator
|
| 75 |
+
>>> from oauthlib.oauth1 import AuthorizationEndpoint
|
| 76 |
+
>>> endpoint = AuthorizationEndpoint(your_validator)
|
| 77 |
+
>>> h, b, s = endpoint.create_authorization_response(
|
| 78 |
+
... 'https://your.provider/authorize?oauth_token=...',
|
| 79 |
+
... credentials={
|
| 80 |
+
... 'extra': 'argument',
|
| 81 |
+
... })
|
| 82 |
+
>>> h
|
| 83 |
+
{'Location': 'https://the.client/callback?oauth_verifier=...&extra=argument'}
|
| 84 |
+
>>> b
|
| 85 |
+
None
|
| 86 |
+
>>> s
|
| 87 |
+
302
|
| 88 |
+
|
| 89 |
+
An example of a request with an "oob" callback::
|
| 90 |
+
|
| 91 |
+
>>> from your_validator import your_validator
|
| 92 |
+
>>> from oauthlib.oauth1 import AuthorizationEndpoint
|
| 93 |
+
>>> endpoint = AuthorizationEndpoint(your_validator)
|
| 94 |
+
>>> h, b, s = endpoint.create_authorization_response(
|
| 95 |
+
... 'https://your.provider/authorize?foo=bar',
|
| 96 |
+
... credentials={
|
| 97 |
+
... 'extra': 'argument',
|
| 98 |
+
... })
|
| 99 |
+
>>> h
|
| 100 |
+
{'Content-Type': 'application/x-www-form-urlencoded'}
|
| 101 |
+
>>> b
|
| 102 |
+
'oauth_verifier=...&extra=argument'
|
| 103 |
+
>>> s
|
| 104 |
+
200
|
| 105 |
+
"""
|
| 106 |
+
request = self._create_request(uri, http_method=http_method, body=body,
|
| 107 |
+
headers=headers)
|
| 108 |
+
|
| 109 |
+
if not request.resource_owner_key:
|
| 110 |
+
raise errors.InvalidRequestError(
|
| 111 |
+
'Missing mandatory parameter oauth_token.')
|
| 112 |
+
if not self.request_validator.verify_request_token(
|
| 113 |
+
request.resource_owner_key, request):
|
| 114 |
+
raise errors.InvalidClientError()
|
| 115 |
+
|
| 116 |
+
request.realms = realms
|
| 117 |
+
if (request.realms and not self.request_validator.verify_realms(
|
| 118 |
+
request.resource_owner_key, request.realms, request)):
|
| 119 |
+
raise errors.InvalidRequestError(
|
| 120 |
+
description=('User granted access to realms outside of '
|
| 121 |
+
'what the client may request.'))
|
| 122 |
+
|
| 123 |
+
verifier = self.create_verifier(request, credentials or {})
|
| 124 |
+
redirect_uri = self.request_validator.get_redirect_uri(
|
| 125 |
+
request.resource_owner_key, request)
|
| 126 |
+
if redirect_uri == 'oob':
|
| 127 |
+
response_headers = {
|
| 128 |
+
'Content-Type': 'application/x-www-form-urlencoded'}
|
| 129 |
+
response_body = urlencode(verifier)
|
| 130 |
+
return response_headers, response_body, 200
|
| 131 |
+
else:
|
| 132 |
+
populated_redirect = add_params_to_uri(
|
| 133 |
+
redirect_uri, verifier.items())
|
| 134 |
+
return {'Location': populated_redirect}, None, 302
|
| 135 |
+
|
| 136 |
+
def get_realms_and_credentials(self, uri, http_method='GET', body=None,
|
| 137 |
+
headers=None):
|
| 138 |
+
"""Fetch realms and credentials for the presented request token.
|
| 139 |
+
|
| 140 |
+
:param uri: The full URI of the token request.
|
| 141 |
+
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
|
| 142 |
+
:param body: The request body as a string.
|
| 143 |
+
:param headers: The request headers as a dict.
|
| 144 |
+
:returns: A tuple of 2 elements.
|
| 145 |
+
1. A list of request realms.
|
| 146 |
+
2. A dict of credentials which may be useful in creating the
|
| 147 |
+
authorization form.
|
| 148 |
+
"""
|
| 149 |
+
request = self._create_request(uri, http_method=http_method, body=body,
|
| 150 |
+
headers=headers)
|
| 151 |
+
|
| 152 |
+
if not self.request_validator.verify_request_token(
|
| 153 |
+
request.resource_owner_key, request):
|
| 154 |
+
raise errors.InvalidClientError()
|
| 155 |
+
|
| 156 |
+
realms = self.request_validator.get_realms(
|
| 157 |
+
request.resource_owner_key, request)
|
| 158 |
+
return realms, {'resource_owner_key': request.resource_owner_key}
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/base.py
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
oauthlib.oauth1.rfc5849.endpoints.base
|
| 4 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 5 |
+
|
| 6 |
+
This module is an implementation of various logic needed
|
| 7 |
+
for signing and checking OAuth 1.0 RFC 5849 requests.
|
| 8 |
+
"""
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
from oauthlib.common import CaseInsensitiveDict, Request, generate_token
|
| 12 |
+
|
| 13 |
+
from .. import (
|
| 14 |
+
CONTENT_TYPE_FORM_URLENCODED, SIGNATURE_HMAC_SHA1, SIGNATURE_HMAC_SHA256,
|
| 15 |
+
SIGNATURE_HMAC_SHA512, SIGNATURE_PLAINTEXT, SIGNATURE_RSA_SHA1,
|
| 16 |
+
SIGNATURE_RSA_SHA256, SIGNATURE_RSA_SHA512, SIGNATURE_TYPE_AUTH_HEADER,
|
| 17 |
+
SIGNATURE_TYPE_BODY, SIGNATURE_TYPE_QUERY, errors, signature, utils,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class BaseEndpoint:
|
| 22 |
+
|
| 23 |
+
def __init__(self, request_validator, token_generator=None):
|
| 24 |
+
self.request_validator = request_validator
|
| 25 |
+
self.token_generator = token_generator or generate_token
|
| 26 |
+
|
| 27 |
+
def _get_signature_type_and_params(self, request):
|
| 28 |
+
"""Extracts parameters from query, headers and body. Signature type
|
| 29 |
+
is set to the source in which parameters were found.
|
| 30 |
+
"""
|
| 31 |
+
# Per RFC5849, only the Authorization header may contain the 'realm'
|
| 32 |
+
# optional parameter.
|
| 33 |
+
header_params = signature.collect_parameters(headers=request.headers,
|
| 34 |
+
exclude_oauth_signature=False, with_realm=True)
|
| 35 |
+
body_params = signature.collect_parameters(body=request.body,
|
| 36 |
+
exclude_oauth_signature=False)
|
| 37 |
+
query_params = signature.collect_parameters(uri_query=request.uri_query,
|
| 38 |
+
exclude_oauth_signature=False)
|
| 39 |
+
|
| 40 |
+
params = []
|
| 41 |
+
params.extend(header_params)
|
| 42 |
+
params.extend(body_params)
|
| 43 |
+
params.extend(query_params)
|
| 44 |
+
signature_types_with_oauth_params = list(filter(lambda s: s[2], (
|
| 45 |
+
(SIGNATURE_TYPE_AUTH_HEADER, params,
|
| 46 |
+
utils.filter_oauth_params(header_params)),
|
| 47 |
+
(SIGNATURE_TYPE_BODY, params,
|
| 48 |
+
utils.filter_oauth_params(body_params)),
|
| 49 |
+
(SIGNATURE_TYPE_QUERY, params,
|
| 50 |
+
utils.filter_oauth_params(query_params))
|
| 51 |
+
)))
|
| 52 |
+
|
| 53 |
+
if len(signature_types_with_oauth_params) > 1:
|
| 54 |
+
found_types = [s[0] for s in signature_types_with_oauth_params]
|
| 55 |
+
raise errors.InvalidRequestError(
|
| 56 |
+
description=('oauth_ params must come from only 1 signature'
|
| 57 |
+
'type but were found in %s',
|
| 58 |
+
', '.join(found_types)))
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
signature_type, params, oauth_params = signature_types_with_oauth_params[
|
| 62 |
+
0]
|
| 63 |
+
except IndexError:
|
| 64 |
+
raise errors.InvalidRequestError(
|
| 65 |
+
description='Missing mandatory OAuth parameters.')
|
| 66 |
+
|
| 67 |
+
return signature_type, params, oauth_params
|
| 68 |
+
|
| 69 |
+
def _create_request(self, uri, http_method, body, headers):
|
| 70 |
+
# Only include body data from x-www-form-urlencoded requests
|
| 71 |
+
headers = CaseInsensitiveDict(headers or {})
|
| 72 |
+
if ("Content-Type" in headers and
|
| 73 |
+
CONTENT_TYPE_FORM_URLENCODED in headers["Content-Type"]):
|
| 74 |
+
request = Request(uri, http_method, body, headers)
|
| 75 |
+
else:
|
| 76 |
+
request = Request(uri, http_method, '', headers)
|
| 77 |
+
|
| 78 |
+
signature_type, params, oauth_params = (
|
| 79 |
+
self._get_signature_type_and_params(request))
|
| 80 |
+
|
| 81 |
+
# The server SHOULD return a 400 (Bad Request) status code when
|
| 82 |
+
# receiving a request with duplicated protocol parameters.
|
| 83 |
+
if len(dict(oauth_params)) != len(oauth_params):
|
| 84 |
+
raise errors.InvalidRequestError(
|
| 85 |
+
description='Duplicate OAuth1 entries.')
|
| 86 |
+
|
| 87 |
+
oauth_params = dict(oauth_params)
|
| 88 |
+
request.signature = oauth_params.get('oauth_signature')
|
| 89 |
+
request.client_key = oauth_params.get('oauth_consumer_key')
|
| 90 |
+
request.resource_owner_key = oauth_params.get('oauth_token')
|
| 91 |
+
request.nonce = oauth_params.get('oauth_nonce')
|
| 92 |
+
request.timestamp = oauth_params.get('oauth_timestamp')
|
| 93 |
+
request.redirect_uri = oauth_params.get('oauth_callback')
|
| 94 |
+
request.verifier = oauth_params.get('oauth_verifier')
|
| 95 |
+
request.signature_method = oauth_params.get('oauth_signature_method')
|
| 96 |
+
request.realm = dict(params).get('realm')
|
| 97 |
+
request.oauth_params = oauth_params
|
| 98 |
+
|
| 99 |
+
# Parameters to Client depend on signature method which may vary
|
| 100 |
+
# for each request. Note that HMAC-SHA1 and PLAINTEXT share parameters
|
| 101 |
+
request.params = [(k, v) for k, v in params if k != "oauth_signature"]
|
| 102 |
+
|
| 103 |
+
if 'realm' in request.headers.get('Authorization', ''):
|
| 104 |
+
request.params = [(k, v)
|
| 105 |
+
for k, v in request.params if k != "realm"]
|
| 106 |
+
|
| 107 |
+
return request
|
| 108 |
+
|
| 109 |
+
def _check_transport_security(self, request):
|
| 110 |
+
# TODO: move into oauthlib.common from oauth2.utils
|
| 111 |
+
if (self.request_validator.enforce_ssl and
|
| 112 |
+
not request.uri.lower().startswith("https://")):
|
| 113 |
+
raise errors.InsecureTransportError()
|
| 114 |
+
|
| 115 |
+
def _check_mandatory_parameters(self, request):
|
| 116 |
+
# The server SHOULD return a 400 (Bad Request) status code when
|
| 117 |
+
# receiving a request with missing parameters.
|
| 118 |
+
if not all((request.signature, request.client_key,
|
| 119 |
+
request.nonce, request.timestamp,
|
| 120 |
+
request.signature_method)):
|
| 121 |
+
raise errors.InvalidRequestError(
|
| 122 |
+
description='Missing mandatory OAuth parameters.')
|
| 123 |
+
|
| 124 |
+
# OAuth does not mandate a particular signature method, as each
|
| 125 |
+
# implementation can have its own unique requirements. Servers are
|
| 126 |
+
# free to implement and document their own custom methods.
|
| 127 |
+
# Recommending any particular method is beyond the scope of this
|
| 128 |
+
# specification. Implementers should review the Security
|
| 129 |
+
# Considerations section (`Section 4`_) before deciding on which
|
| 130 |
+
# method to support.
|
| 131 |
+
# .. _`Section 4`: https://tools.ietf.org/html/rfc5849#section-4
|
| 132 |
+
if (not request.signature_method in
|
| 133 |
+
self.request_validator.allowed_signature_methods):
|
| 134 |
+
raise errors.InvalidSignatureMethodError(
|
| 135 |
+
description="Invalid signature, {} not in {!r}.".format(
|
| 136 |
+
request.signature_method,
|
| 137 |
+
self.request_validator.allowed_signature_methods))
|
| 138 |
+
|
| 139 |
+
# Servers receiving an authenticated request MUST validate it by:
|
| 140 |
+
# If the "oauth_version" parameter is present, ensuring its value is
|
| 141 |
+
# "1.0".
|
| 142 |
+
if ('oauth_version' in request.oauth_params and
|
| 143 |
+
request.oauth_params['oauth_version'] != '1.0'):
|
| 144 |
+
raise errors.InvalidRequestError(
|
| 145 |
+
description='Invalid OAuth version.')
|
| 146 |
+
|
| 147 |
+
# The timestamp value MUST be a positive integer. Unless otherwise
|
| 148 |
+
# specified by the server's documentation, the timestamp is expressed
|
| 149 |
+
# in the number of seconds since January 1, 1970 00:00:00 GMT.
|
| 150 |
+
if len(request.timestamp) != 10:
|
| 151 |
+
raise errors.InvalidRequestError(
|
| 152 |
+
description='Invalid timestamp size')
|
| 153 |
+
|
| 154 |
+
try:
|
| 155 |
+
ts = int(request.timestamp)
|
| 156 |
+
|
| 157 |
+
except ValueError:
|
| 158 |
+
raise errors.InvalidRequestError(
|
| 159 |
+
description='Timestamp must be an integer.')
|
| 160 |
+
|
| 161 |
+
else:
|
| 162 |
+
# To avoid the need to retain an infinite number of nonce values for
|
| 163 |
+
# future checks, servers MAY choose to restrict the time period after
|
| 164 |
+
# which a request with an old timestamp is rejected.
|
| 165 |
+
if abs(time.time() - ts) > self.request_validator.timestamp_lifetime:
|
| 166 |
+
raise errors.InvalidRequestError(
|
| 167 |
+
description=('Timestamp given is invalid, differ from '
|
| 168 |
+
'allowed by over %s seconds.' % (
|
| 169 |
+
self.request_validator.timestamp_lifetime)))
|
| 170 |
+
|
| 171 |
+
# Provider specific validation of parameters, used to enforce
|
| 172 |
+
# restrictions such as character set and length.
|
| 173 |
+
if not self.request_validator.check_client_key(request.client_key):
|
| 174 |
+
raise errors.InvalidRequestError(
|
| 175 |
+
description='Invalid client key format.')
|
| 176 |
+
|
| 177 |
+
if not self.request_validator.check_nonce(request.nonce):
|
| 178 |
+
raise errors.InvalidRequestError(
|
| 179 |
+
description='Invalid nonce format.')
|
| 180 |
+
|
| 181 |
+
def _check_signature(self, request, is_token_request=False):
|
| 182 |
+
# ---- RSA Signature verification ----
|
| 183 |
+
if request.signature_method == SIGNATURE_RSA_SHA1 or \
|
| 184 |
+
request.signature_method == SIGNATURE_RSA_SHA256 or \
|
| 185 |
+
request.signature_method == SIGNATURE_RSA_SHA512:
|
| 186 |
+
# RSA-based signature method
|
| 187 |
+
|
| 188 |
+
# The server verifies the signature per `[RFC3447] section 8.2.2`_
|
| 189 |
+
# .. _`[RFC3447] section 8.2.2`: https://tools.ietf.org/html/rfc3447#section-8.2.1
|
| 190 |
+
|
| 191 |
+
rsa_key = self.request_validator.get_rsa_key(
|
| 192 |
+
request.client_key, request)
|
| 193 |
+
|
| 194 |
+
if request.signature_method == SIGNATURE_RSA_SHA1:
|
| 195 |
+
valid_signature = signature.verify_rsa_sha1(request, rsa_key)
|
| 196 |
+
elif request.signature_method == SIGNATURE_RSA_SHA256:
|
| 197 |
+
valid_signature = signature.verify_rsa_sha256(request, rsa_key)
|
| 198 |
+
elif request.signature_method == SIGNATURE_RSA_SHA512:
|
| 199 |
+
valid_signature = signature.verify_rsa_sha512(request, rsa_key)
|
| 200 |
+
else:
|
| 201 |
+
valid_signature = False
|
| 202 |
+
|
| 203 |
+
# ---- HMAC or Plaintext Signature verification ----
|
| 204 |
+
else:
|
| 205 |
+
# Non-RSA based signature method
|
| 206 |
+
|
| 207 |
+
# Servers receiving an authenticated request MUST validate it by:
|
| 208 |
+
# Recalculating the request signature independently as described in
|
| 209 |
+
# `Section 3.4`_ and comparing it to the value received from the
|
| 210 |
+
# client via the "oauth_signature" parameter.
|
| 211 |
+
# .. _`Section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
|
| 212 |
+
|
| 213 |
+
client_secret = self.request_validator.get_client_secret(
|
| 214 |
+
request.client_key, request)
|
| 215 |
+
|
| 216 |
+
resource_owner_secret = None
|
| 217 |
+
if request.resource_owner_key:
|
| 218 |
+
if is_token_request:
|
| 219 |
+
resource_owner_secret = \
|
| 220 |
+
self.request_validator.get_request_token_secret(
|
| 221 |
+
request.client_key, request.resource_owner_key,
|
| 222 |
+
request)
|
| 223 |
+
else:
|
| 224 |
+
resource_owner_secret = \
|
| 225 |
+
self.request_validator.get_access_token_secret(
|
| 226 |
+
request.client_key, request.resource_owner_key,
|
| 227 |
+
request)
|
| 228 |
+
|
| 229 |
+
if request.signature_method == SIGNATURE_HMAC_SHA1:
|
| 230 |
+
valid_signature = signature.verify_hmac_sha1(
|
| 231 |
+
request, client_secret, resource_owner_secret)
|
| 232 |
+
elif request.signature_method == SIGNATURE_HMAC_SHA256:
|
| 233 |
+
valid_signature = signature.verify_hmac_sha256(
|
| 234 |
+
request, client_secret, resource_owner_secret)
|
| 235 |
+
elif request.signature_method == SIGNATURE_HMAC_SHA512:
|
| 236 |
+
valid_signature = signature.verify_hmac_sha512(
|
| 237 |
+
request, client_secret, resource_owner_secret)
|
| 238 |
+
elif request.signature_method == SIGNATURE_PLAINTEXT:
|
| 239 |
+
valid_signature = signature.verify_plaintext(
|
| 240 |
+
request, client_secret, resource_owner_secret)
|
| 241 |
+
else:
|
| 242 |
+
valid_signature = False
|
| 243 |
+
|
| 244 |
+
return valid_signature
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/pre_configured.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from . import (
|
| 2 |
+
AccessTokenEndpoint, AuthorizationEndpoint, RequestTokenEndpoint,
|
| 3 |
+
ResourceEndpoint,
|
| 4 |
+
)
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class WebApplicationServer(RequestTokenEndpoint, AuthorizationEndpoint,
|
| 8 |
+
AccessTokenEndpoint, ResourceEndpoint):
|
| 9 |
+
|
| 10 |
+
def __init__(self, request_validator):
|
| 11 |
+
RequestTokenEndpoint.__init__(self, request_validator)
|
| 12 |
+
AuthorizationEndpoint.__init__(self, request_validator)
|
| 13 |
+
AccessTokenEndpoint.__init__(self, request_validator)
|
| 14 |
+
ResourceEndpoint.__init__(self, request_validator)
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/resource.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
oauthlib.oauth1.rfc5849.endpoints.resource
|
| 4 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 5 |
+
|
| 6 |
+
This module is an implementation of the resource protection provider logic of
|
| 7 |
+
OAuth 1.0 RFC 5849.
|
| 8 |
+
"""
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
from .. import errors
|
| 12 |
+
from .base import BaseEndpoint
|
| 13 |
+
|
| 14 |
+
log = logging.getLogger(__name__)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ResourceEndpoint(BaseEndpoint):
|
| 18 |
+
|
| 19 |
+
"""An endpoint responsible for protecting resources.
|
| 20 |
+
|
| 21 |
+
Typical use is to instantiate with a request validator and invoke the
|
| 22 |
+
``validate_protected_resource_request`` in a decorator around a view
|
| 23 |
+
function. If the request is valid, invoke and return the response of the
|
| 24 |
+
view. If invalid create and return an error response directly from the
|
| 25 |
+
decorator.
|
| 26 |
+
|
| 27 |
+
See :doc:`/oauth1/validator` for details on which validator methods to implement
|
| 28 |
+
for this endpoint.
|
| 29 |
+
|
| 30 |
+
An example decorator::
|
| 31 |
+
|
| 32 |
+
from functools import wraps
|
| 33 |
+
from your_validator import your_validator
|
| 34 |
+
from oauthlib.oauth1 import ResourceEndpoint
|
| 35 |
+
endpoint = ResourceEndpoint(your_validator)
|
| 36 |
+
|
| 37 |
+
def require_oauth(realms=None):
|
| 38 |
+
def decorator(f):
|
| 39 |
+
@wraps(f)
|
| 40 |
+
def wrapper(request, *args, **kwargs):
|
| 41 |
+
v, r = provider.validate_protected_resource_request(
|
| 42 |
+
request.url,
|
| 43 |
+
http_method=request.method,
|
| 44 |
+
body=request.data,
|
| 45 |
+
headers=request.headers,
|
| 46 |
+
realms=realms or [])
|
| 47 |
+
if v:
|
| 48 |
+
return f(*args, **kwargs)
|
| 49 |
+
else:
|
| 50 |
+
return abort(403)
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
def validate_protected_resource_request(self, uri, http_method='GET',
|
| 54 |
+
body=None, headers=None, realms=None):
|
| 55 |
+
"""Create a request token response, with a new request token if valid.
|
| 56 |
+
|
| 57 |
+
:param uri: The full URI of the token request.
|
| 58 |
+
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
|
| 59 |
+
:param body: The request body as a string.
|
| 60 |
+
:param headers: The request headers as a dict.
|
| 61 |
+
:param realms: A list of realms the resource is protected under.
|
| 62 |
+
This will be supplied to the ``validate_realms``
|
| 63 |
+
method of the request validator.
|
| 64 |
+
:returns: A tuple of 2 elements.
|
| 65 |
+
1. True if valid, False otherwise.
|
| 66 |
+
2. An oauthlib.common.Request object.
|
| 67 |
+
"""
|
| 68 |
+
try:
|
| 69 |
+
request = self._create_request(uri, http_method, body, headers)
|
| 70 |
+
except errors.OAuth1Error:
|
| 71 |
+
return False, None
|
| 72 |
+
|
| 73 |
+
try:
|
| 74 |
+
self._check_transport_security(request)
|
| 75 |
+
self._check_mandatory_parameters(request)
|
| 76 |
+
except errors.OAuth1Error:
|
| 77 |
+
return False, request
|
| 78 |
+
|
| 79 |
+
if not request.resource_owner_key:
|
| 80 |
+
return False, request
|
| 81 |
+
|
| 82 |
+
if not self.request_validator.check_access_token(
|
| 83 |
+
request.resource_owner_key):
|
| 84 |
+
return False, request
|
| 85 |
+
|
| 86 |
+
if not self.request_validator.validate_timestamp_and_nonce(
|
| 87 |
+
request.client_key, request.timestamp, request.nonce, request,
|
| 88 |
+
access_token=request.resource_owner_key):
|
| 89 |
+
return False, request
|
| 90 |
+
|
| 91 |
+
# The server SHOULD return a 401 (Unauthorized) status code when
|
| 92 |
+
# receiving a request with invalid client credentials.
|
| 93 |
+
# Note: This is postponed in order to avoid timing attacks, instead
|
| 94 |
+
# a dummy client is assigned and used to maintain near constant
|
| 95 |
+
# time request verification.
|
| 96 |
+
#
|
| 97 |
+
# Note that early exit would enable client enumeration
|
| 98 |
+
valid_client = self.request_validator.validate_client_key(
|
| 99 |
+
request.client_key, request)
|
| 100 |
+
if not valid_client:
|
| 101 |
+
request.client_key = self.request_validator.dummy_client
|
| 102 |
+
|
| 103 |
+
# The server SHOULD return a 401 (Unauthorized) status code when
|
| 104 |
+
# receiving a request with invalid or expired token.
|
| 105 |
+
# Note: This is postponed in order to avoid timing attacks, instead
|
| 106 |
+
# a dummy token is assigned and used to maintain near constant
|
| 107 |
+
# time request verification.
|
| 108 |
+
#
|
| 109 |
+
# Note that early exit would enable resource owner enumeration
|
| 110 |
+
valid_resource_owner = self.request_validator.validate_access_token(
|
| 111 |
+
request.client_key, request.resource_owner_key, request)
|
| 112 |
+
if not valid_resource_owner:
|
| 113 |
+
request.resource_owner_key = self.request_validator.dummy_access_token
|
| 114 |
+
|
| 115 |
+
# Note that `realm`_ is only used in authorization headers and how
|
| 116 |
+
# it should be interpreted is not included in the OAuth spec.
|
| 117 |
+
# However they could be seen as a scope or realm to which the
|
| 118 |
+
# client has access and as such every client should be checked
|
| 119 |
+
# to ensure it is authorized access to that scope or realm.
|
| 120 |
+
# .. _`realm`: https://tools.ietf.org/html/rfc2617#section-1.2
|
| 121 |
+
#
|
| 122 |
+
# Note that early exit would enable client realm access enumeration.
|
| 123 |
+
#
|
| 124 |
+
# The require_realm indicates this is the first step in the OAuth
|
| 125 |
+
# workflow where a client requests access to a specific realm.
|
| 126 |
+
# This first step (obtaining request token) need not require a realm
|
| 127 |
+
# and can then be identified by checking the require_resource_owner
|
| 128 |
+
# flag and absence of realm.
|
| 129 |
+
#
|
| 130 |
+
# Clients obtaining an access token will not supply a realm and it will
|
| 131 |
+
# not be checked. Instead the previously requested realm should be
|
| 132 |
+
# transferred from the request token to the access token.
|
| 133 |
+
#
|
| 134 |
+
# Access to protected resources will always validate the realm but note
|
| 135 |
+
# that the realm is now tied to the access token and not provided by
|
| 136 |
+
# the client.
|
| 137 |
+
valid_realm = self.request_validator.validate_realms(request.client_key,
|
| 138 |
+
request.resource_owner_key, request, uri=request.uri,
|
| 139 |
+
realms=realms)
|
| 140 |
+
|
| 141 |
+
valid_signature = self._check_signature(request)
|
| 142 |
+
|
| 143 |
+
# log the results to the validator_log
|
| 144 |
+
# this lets us handle internal reporting and analysis
|
| 145 |
+
request.validator_log['client'] = valid_client
|
| 146 |
+
request.validator_log['resource_owner'] = valid_resource_owner
|
| 147 |
+
request.validator_log['realm'] = valid_realm
|
| 148 |
+
request.validator_log['signature'] = valid_signature
|
| 149 |
+
|
| 150 |
+
# We delay checking validity until the very end, using dummy values for
|
| 151 |
+
# calculations and fetching secrets/keys to ensure the flow of every
|
| 152 |
+
# request remains almost identical regardless of whether valid values
|
| 153 |
+
# have been supplied. This ensures near constant time execution and
|
| 154 |
+
# prevents malicious users from guessing sensitive information
|
| 155 |
+
v = all((valid_client, valid_resource_owner, valid_realm,
|
| 156 |
+
valid_signature))
|
| 157 |
+
if not v:
|
| 158 |
+
log.info("[Failure] request verification failed.")
|
| 159 |
+
log.info("Valid client: %s", valid_client)
|
| 160 |
+
log.info("Valid token: %s", valid_resource_owner)
|
| 161 |
+
log.info("Valid realm: %s", valid_realm)
|
| 162 |
+
log.info("Valid signature: %s", valid_signature)
|
| 163 |
+
return v, request
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/endpoints/signature_only.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
oauthlib.oauth1.rfc5849.endpoints.signature_only
|
| 4 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 5 |
+
|
| 6 |
+
This module is an implementation of the signing logic of OAuth 1.0 RFC 5849.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
from .. import errors
|
| 12 |
+
from .base import BaseEndpoint
|
| 13 |
+
|
| 14 |
+
log = logging.getLogger(__name__)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class SignatureOnlyEndpoint(BaseEndpoint):
|
| 18 |
+
|
| 19 |
+
"""An endpoint only responsible for verifying an oauth signature."""
|
| 20 |
+
|
| 21 |
+
def validate_request(self, uri, http_method='GET',
|
| 22 |
+
body=None, headers=None):
|
| 23 |
+
"""Validate a signed OAuth request.
|
| 24 |
+
|
| 25 |
+
:param uri: The full URI of the token request.
|
| 26 |
+
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
|
| 27 |
+
:param body: The request body as a string.
|
| 28 |
+
:param headers: The request headers as a dict.
|
| 29 |
+
:returns: A tuple of 2 elements.
|
| 30 |
+
1. True if valid, False otherwise.
|
| 31 |
+
2. An oauthlib.common.Request object.
|
| 32 |
+
"""
|
| 33 |
+
try:
|
| 34 |
+
request = self._create_request(uri, http_method, body, headers)
|
| 35 |
+
except errors.OAuth1Error as err:
|
| 36 |
+
log.info(
|
| 37 |
+
'Exception caught while validating request, %s.' % err)
|
| 38 |
+
return False, None
|
| 39 |
+
|
| 40 |
+
try:
|
| 41 |
+
self._check_transport_security(request)
|
| 42 |
+
self._check_mandatory_parameters(request)
|
| 43 |
+
except errors.OAuth1Error as err:
|
| 44 |
+
log.info(
|
| 45 |
+
'Exception caught while validating request, %s.' % err)
|
| 46 |
+
return False, request
|
| 47 |
+
|
| 48 |
+
if not self.request_validator.validate_timestamp_and_nonce(
|
| 49 |
+
request.client_key, request.timestamp, request.nonce, request):
|
| 50 |
+
log.debug('[Failure] verification failed: timestamp/nonce')
|
| 51 |
+
return False, request
|
| 52 |
+
|
| 53 |
+
# The server SHOULD return a 401 (Unauthorized) status code when
|
| 54 |
+
# receiving a request with invalid client credentials.
|
| 55 |
+
# Note: This is postponed in order to avoid timing attacks, instead
|
| 56 |
+
# a dummy client is assigned and used to maintain near constant
|
| 57 |
+
# time request verification.
|
| 58 |
+
#
|
| 59 |
+
# Note that early exit would enable client enumeration
|
| 60 |
+
valid_client = self.request_validator.validate_client_key(
|
| 61 |
+
request.client_key, request)
|
| 62 |
+
if not valid_client:
|
| 63 |
+
request.client_key = self.request_validator.dummy_client
|
| 64 |
+
|
| 65 |
+
valid_signature = self._check_signature(request)
|
| 66 |
+
|
| 67 |
+
# log the results to the validator_log
|
| 68 |
+
# this lets us handle internal reporting and analysis
|
| 69 |
+
request.validator_log['client'] = valid_client
|
| 70 |
+
request.validator_log['signature'] = valid_signature
|
| 71 |
+
|
| 72 |
+
# We delay checking validity until the very end, using dummy values for
|
| 73 |
+
# calculations and fetching secrets/keys to ensure the flow of every
|
| 74 |
+
# request remains almost identical regardless of whether valid values
|
| 75 |
+
# have been supplied. This ensures near constant time execution and
|
| 76 |
+
# prevents malicious users from guessing sensitive information
|
| 77 |
+
v = all((valid_client, valid_signature))
|
| 78 |
+
if not v:
|
| 79 |
+
log.info("[Failure] request verification failed.")
|
| 80 |
+
log.info("Valid client: %s", valid_client)
|
| 81 |
+
log.info("Valid signature: %s", valid_signature)
|
| 82 |
+
return v, request
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/errors.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib.oauth1.rfc5849.errors
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Error used both by OAuth 1 clients and provicers to represent the spec
|
| 6 |
+
defined error responses for all four core grant types.
|
| 7 |
+
"""
|
| 8 |
+
from oauthlib.common import add_params_to_uri, urlencode
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class OAuth1Error(Exception):
|
| 12 |
+
error = None
|
| 13 |
+
description = ''
|
| 14 |
+
|
| 15 |
+
def __init__(self, description=None, uri=None, status_code=400,
|
| 16 |
+
request=None):
|
| 17 |
+
"""
|
| 18 |
+
description: A human-readable ASCII [USASCII] text providing
|
| 19 |
+
additional information, used to assist the client
|
| 20 |
+
developer in understanding the error that occurred.
|
| 21 |
+
Values for the "error_description" parameter MUST NOT
|
| 22 |
+
include characters outside the set
|
| 23 |
+
x20-21 / x23-5B / x5D-7E.
|
| 24 |
+
|
| 25 |
+
uri: A URI identifying a human-readable web page with information
|
| 26 |
+
about the error, used to provide the client developer with
|
| 27 |
+
additional information about the error. Values for the
|
| 28 |
+
"error_uri" parameter MUST conform to the URI- Reference
|
| 29 |
+
syntax, and thus MUST NOT include characters outside the set
|
| 30 |
+
x21 / x23-5B / x5D-7E.
|
| 31 |
+
|
| 32 |
+
state: A CSRF protection value received from the client.
|
| 33 |
+
|
| 34 |
+
request: Oauthlib Request object
|
| 35 |
+
"""
|
| 36 |
+
self.description = description or self.description
|
| 37 |
+
message = '({}) {}'.format(self.error, self.description)
|
| 38 |
+
if request:
|
| 39 |
+
message += ' ' + repr(request)
|
| 40 |
+
super().__init__(message)
|
| 41 |
+
|
| 42 |
+
self.uri = uri
|
| 43 |
+
self.status_code = status_code
|
| 44 |
+
|
| 45 |
+
def in_uri(self, uri):
|
| 46 |
+
return add_params_to_uri(uri, self.twotuples)
|
| 47 |
+
|
| 48 |
+
@property
|
| 49 |
+
def twotuples(self):
|
| 50 |
+
error = [('error', self.error)]
|
| 51 |
+
if self.description:
|
| 52 |
+
error.append(('error_description', self.description))
|
| 53 |
+
if self.uri:
|
| 54 |
+
error.append(('error_uri', self.uri))
|
| 55 |
+
return error
|
| 56 |
+
|
| 57 |
+
@property
|
| 58 |
+
def urlencoded(self):
|
| 59 |
+
return urlencode(self.twotuples)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class InsecureTransportError(OAuth1Error):
|
| 63 |
+
error = 'insecure_transport_protocol'
|
| 64 |
+
description = 'Only HTTPS connections are permitted.'
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class InvalidSignatureMethodError(OAuth1Error):
|
| 68 |
+
error = 'invalid_signature_method'
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class InvalidRequestError(OAuth1Error):
|
| 72 |
+
error = 'invalid_request'
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class InvalidClientError(OAuth1Error):
|
| 76 |
+
error = 'invalid_client'
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/parameters.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib.parameters
|
| 3 |
+
~~~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains methods related to `section 3.5`_ of the OAuth 1.0a spec.
|
| 6 |
+
|
| 7 |
+
.. _`section 3.5`: https://tools.ietf.org/html/rfc5849#section-3.5
|
| 8 |
+
"""
|
| 9 |
+
from urllib.parse import urlparse, urlunparse
|
| 10 |
+
|
| 11 |
+
from oauthlib.common import extract_params, urlencode
|
| 12 |
+
|
| 13 |
+
from . import utils
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# TODO: do we need filter_params now that oauth_params are handled by Request?
|
| 17 |
+
# We can easily pass in just oauth protocol params.
|
| 18 |
+
@utils.filter_params
|
| 19 |
+
def prepare_headers(oauth_params, headers=None, realm=None):
|
| 20 |
+
"""**Prepare the Authorization header.**
|
| 21 |
+
Per `section 3.5.1`_ of the spec.
|
| 22 |
+
|
| 23 |
+
Protocol parameters can be transmitted using the HTTP "Authorization"
|
| 24 |
+
header field as defined by `RFC2617`_ with the auth-scheme name set to
|
| 25 |
+
"OAuth" (case insensitive).
|
| 26 |
+
|
| 27 |
+
For example::
|
| 28 |
+
|
| 29 |
+
Authorization: OAuth realm="Example",
|
| 30 |
+
oauth_consumer_key="0685bd9184jfhq22",
|
| 31 |
+
oauth_token="ad180jjd733klru7",
|
| 32 |
+
oauth_signature_method="HMAC-SHA1",
|
| 33 |
+
oauth_signature="wOJIO9A2W5mFwDgiDvZbTSMK%2FPY%3D",
|
| 34 |
+
oauth_timestamp="137131200",
|
| 35 |
+
oauth_nonce="4572616e48616d6d65724c61686176",
|
| 36 |
+
oauth_version="1.0"
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
.. _`section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1
|
| 40 |
+
.. _`RFC2617`: https://tools.ietf.org/html/rfc2617
|
| 41 |
+
"""
|
| 42 |
+
headers = headers or {}
|
| 43 |
+
|
| 44 |
+
# Protocol parameters SHALL be included in the "Authorization" header
|
| 45 |
+
# field as follows:
|
| 46 |
+
authorization_header_parameters_parts = []
|
| 47 |
+
for oauth_parameter_name, value in oauth_params:
|
| 48 |
+
# 1. Parameter names and values are encoded per Parameter Encoding
|
| 49 |
+
# (`Section 3.6`_)
|
| 50 |
+
#
|
| 51 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 52 |
+
escaped_name = utils.escape(oauth_parameter_name)
|
| 53 |
+
escaped_value = utils.escape(value)
|
| 54 |
+
|
| 55 |
+
# 2. Each parameter's name is immediately followed by an "=" character
|
| 56 |
+
# (ASCII code 61), a """ character (ASCII code 34), the parameter
|
| 57 |
+
# value (MAY be empty), and another """ character (ASCII code 34).
|
| 58 |
+
part = '{}="{}"'.format(escaped_name, escaped_value)
|
| 59 |
+
|
| 60 |
+
authorization_header_parameters_parts.append(part)
|
| 61 |
+
|
| 62 |
+
# 3. Parameters are separated by a "," character (ASCII code 44) and
|
| 63 |
+
# OPTIONAL linear whitespace per `RFC2617`_.
|
| 64 |
+
#
|
| 65 |
+
# .. _`RFC2617`: https://tools.ietf.org/html/rfc2617
|
| 66 |
+
authorization_header_parameters = ', '.join(
|
| 67 |
+
authorization_header_parameters_parts)
|
| 68 |
+
|
| 69 |
+
# 4. The OPTIONAL "realm" parameter MAY be added and interpreted per
|
| 70 |
+
# `RFC2617 section 1.2`_.
|
| 71 |
+
#
|
| 72 |
+
# .. _`RFC2617 section 1.2`: https://tools.ietf.org/html/rfc2617#section-1.2
|
| 73 |
+
if realm:
|
| 74 |
+
# NOTE: realm should *not* be escaped
|
| 75 |
+
authorization_header_parameters = ('realm="%s", ' % realm +
|
| 76 |
+
authorization_header_parameters)
|
| 77 |
+
|
| 78 |
+
# the auth-scheme name set to "OAuth" (case insensitive).
|
| 79 |
+
authorization_header = 'OAuth %s' % authorization_header_parameters
|
| 80 |
+
|
| 81 |
+
# contribute the Authorization header to the given headers
|
| 82 |
+
full_headers = {}
|
| 83 |
+
full_headers.update(headers)
|
| 84 |
+
full_headers['Authorization'] = authorization_header
|
| 85 |
+
return full_headers
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _append_params(oauth_params, params):
|
| 89 |
+
"""Append OAuth params to an existing set of parameters.
|
| 90 |
+
|
| 91 |
+
Both params and oauth_params is must be lists of 2-tuples.
|
| 92 |
+
|
| 93 |
+
Per `section 3.5.2`_ and `3.5.3`_ of the spec.
|
| 94 |
+
|
| 95 |
+
.. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2
|
| 96 |
+
.. _`3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
merged = list(params)
|
| 100 |
+
merged.extend(oauth_params)
|
| 101 |
+
# The request URI / entity-body MAY include other request-specific
|
| 102 |
+
# parameters, in which case, the protocol parameters SHOULD be appended
|
| 103 |
+
# following the request-specific parameters, properly separated by an "&"
|
| 104 |
+
# character (ASCII code 38)
|
| 105 |
+
merged.sort(key=lambda i: i[0].startswith('oauth_'))
|
| 106 |
+
return merged
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def prepare_form_encoded_body(oauth_params, body):
|
| 110 |
+
"""Prepare the Form-Encoded Body.
|
| 111 |
+
|
| 112 |
+
Per `section 3.5.2`_ of the spec.
|
| 113 |
+
|
| 114 |
+
.. _`section 3.5.2`: https://tools.ietf.org/html/rfc5849#section-3.5.2
|
| 115 |
+
|
| 116 |
+
"""
|
| 117 |
+
# append OAuth params to the existing body
|
| 118 |
+
return _append_params(oauth_params, body)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def prepare_request_uri_query(oauth_params, uri):
|
| 122 |
+
"""Prepare the Request URI Query.
|
| 123 |
+
|
| 124 |
+
Per `section 3.5.3`_ of the spec.
|
| 125 |
+
|
| 126 |
+
.. _`section 3.5.3`: https://tools.ietf.org/html/rfc5849#section-3.5.3
|
| 127 |
+
|
| 128 |
+
"""
|
| 129 |
+
# append OAuth params to the existing set of query components
|
| 130 |
+
sch, net, path, par, query, fra = urlparse(uri)
|
| 131 |
+
query = urlencode(
|
| 132 |
+
_append_params(oauth_params, extract_params(query) or []))
|
| 133 |
+
return urlunparse((sch, net, path, par, query, fra))
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/request_validator.py
ADDED
|
@@ -0,0 +1,849 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
oauthlib.oauth1.rfc5849
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module is an implementation of various logic needed
|
| 6 |
+
for signing and checking OAuth 1.0 RFC 5849 requests.
|
| 7 |
+
"""
|
| 8 |
+
from . import SIGNATURE_METHODS, utils
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class RequestValidator:
|
| 12 |
+
|
| 13 |
+
"""A validator/datastore interaction base class for OAuth 1 providers.
|
| 14 |
+
|
| 15 |
+
OAuth providers should inherit from RequestValidator and implement the
|
| 16 |
+
methods and properties outlined below. Further details are provided in the
|
| 17 |
+
documentation for each method and property.
|
| 18 |
+
|
| 19 |
+
Methods used to check the format of input parameters. Common tests include
|
| 20 |
+
length, character set, membership, range or pattern. These tests are
|
| 21 |
+
referred to as `whitelisting or blacklisting`_. Whitelisting is better
|
| 22 |
+
but blacklisting can be useful to spot malicious activity.
|
| 23 |
+
The following have methods a default implementation:
|
| 24 |
+
|
| 25 |
+
- check_client_key
|
| 26 |
+
- check_request_token
|
| 27 |
+
- check_access_token
|
| 28 |
+
- check_nonce
|
| 29 |
+
- check_verifier
|
| 30 |
+
- check_realms
|
| 31 |
+
|
| 32 |
+
The methods above default to whitelist input parameters, checking that they
|
| 33 |
+
are alphanumerical and between a minimum and maximum length. Rather than
|
| 34 |
+
overloading the methods a few properties can be used to configure these
|
| 35 |
+
methods.
|
| 36 |
+
|
| 37 |
+
* @safe_characters -> (character set)
|
| 38 |
+
* @client_key_length -> (min, max)
|
| 39 |
+
* @request_token_length -> (min, max)
|
| 40 |
+
* @access_token_length -> (min, max)
|
| 41 |
+
* @nonce_length -> (min, max)
|
| 42 |
+
* @verifier_length -> (min, max)
|
| 43 |
+
* @realms -> [list, of, realms]
|
| 44 |
+
|
| 45 |
+
Methods used to validate/invalidate input parameters. These checks usually
|
| 46 |
+
hit either persistent or temporary storage such as databases or the
|
| 47 |
+
filesystem. See each methods documentation for detailed usage.
|
| 48 |
+
The following methods must be implemented:
|
| 49 |
+
|
| 50 |
+
- validate_client_key
|
| 51 |
+
- validate_request_token
|
| 52 |
+
- validate_access_token
|
| 53 |
+
- validate_timestamp_and_nonce
|
| 54 |
+
- validate_redirect_uri
|
| 55 |
+
- validate_requested_realms
|
| 56 |
+
- validate_realms
|
| 57 |
+
- validate_verifier
|
| 58 |
+
- invalidate_request_token
|
| 59 |
+
|
| 60 |
+
Methods used to retrieve sensitive information from storage.
|
| 61 |
+
The following methods must be implemented:
|
| 62 |
+
|
| 63 |
+
- get_client_secret
|
| 64 |
+
- get_request_token_secret
|
| 65 |
+
- get_access_token_secret
|
| 66 |
+
- get_rsa_key
|
| 67 |
+
- get_realms
|
| 68 |
+
- get_default_realms
|
| 69 |
+
- get_redirect_uri
|
| 70 |
+
|
| 71 |
+
Methods used to save credentials.
|
| 72 |
+
The following methods must be implemented:
|
| 73 |
+
|
| 74 |
+
- save_request_token
|
| 75 |
+
- save_verifier
|
| 76 |
+
- save_access_token
|
| 77 |
+
|
| 78 |
+
Methods used to verify input parameters. This methods are used during
|
| 79 |
+
authorizing request token by user (AuthorizationEndpoint), to check if
|
| 80 |
+
parameters are valid. During token authorization request is not signed,
|
| 81 |
+
thus 'validation' methods can not be used. The following methods must be
|
| 82 |
+
implemented:
|
| 83 |
+
|
| 84 |
+
- verify_realms
|
| 85 |
+
- verify_request_token
|
| 86 |
+
|
| 87 |
+
To prevent timing attacks it is necessary to not exit early even if the
|
| 88 |
+
client key or resource owner key is invalid. Instead dummy values should
|
| 89 |
+
be used during the remaining verification process. It is very important
|
| 90 |
+
that the dummy client and token are valid input parameters to the methods
|
| 91 |
+
get_client_secret, get_rsa_key and get_(access/request)_token_secret and
|
| 92 |
+
that the running time of those methods when given a dummy value remain
|
| 93 |
+
equivalent to the running time when given a valid client/resource owner.
|
| 94 |
+
The following properties must be implemented:
|
| 95 |
+
|
| 96 |
+
* @dummy_client
|
| 97 |
+
* @dummy_request_token
|
| 98 |
+
* @dummy_access_token
|
| 99 |
+
|
| 100 |
+
Example implementations have been provided, note that the database used is
|
| 101 |
+
a simple dictionary and serves only an illustrative purpose. Use whichever
|
| 102 |
+
database suits your project and how to access it is entirely up to you.
|
| 103 |
+
The methods are introduced in an order which should make understanding
|
| 104 |
+
their use more straightforward and as such it could be worth reading what
|
| 105 |
+
follows in chronological order.
|
| 106 |
+
|
| 107 |
+
.. _`whitelisting or blacklisting`: https://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
def __init__(self):
|
| 111 |
+
pass
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def allowed_signature_methods(self):
|
| 115 |
+
return SIGNATURE_METHODS
|
| 116 |
+
|
| 117 |
+
@property
|
| 118 |
+
def safe_characters(self):
|
| 119 |
+
return set(utils.UNICODE_ASCII_CHARACTER_SET)
|
| 120 |
+
|
| 121 |
+
@property
|
| 122 |
+
def client_key_length(self):
|
| 123 |
+
return 20, 30
|
| 124 |
+
|
| 125 |
+
@property
|
| 126 |
+
def request_token_length(self):
|
| 127 |
+
return 20, 30
|
| 128 |
+
|
| 129 |
+
@property
|
| 130 |
+
def access_token_length(self):
|
| 131 |
+
return 20, 30
|
| 132 |
+
|
| 133 |
+
@property
|
| 134 |
+
def timestamp_lifetime(self):
|
| 135 |
+
return 600
|
| 136 |
+
|
| 137 |
+
@property
|
| 138 |
+
def nonce_length(self):
|
| 139 |
+
return 20, 30
|
| 140 |
+
|
| 141 |
+
@property
|
| 142 |
+
def verifier_length(self):
|
| 143 |
+
return 20, 30
|
| 144 |
+
|
| 145 |
+
@property
|
| 146 |
+
def realms(self):
|
| 147 |
+
return []
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def enforce_ssl(self):
|
| 151 |
+
return True
|
| 152 |
+
|
| 153 |
+
def check_client_key(self, client_key):
|
| 154 |
+
"""Check that the client key only contains safe characters
|
| 155 |
+
and is no shorter than lower and no longer than upper.
|
| 156 |
+
"""
|
| 157 |
+
lower, upper = self.client_key_length
|
| 158 |
+
return (set(client_key) <= self.safe_characters and
|
| 159 |
+
lower <= len(client_key) <= upper)
|
| 160 |
+
|
| 161 |
+
def check_request_token(self, request_token):
|
| 162 |
+
"""Checks that the request token contains only safe characters
|
| 163 |
+
and is no shorter than lower and no longer than upper.
|
| 164 |
+
"""
|
| 165 |
+
lower, upper = self.request_token_length
|
| 166 |
+
return (set(request_token) <= self.safe_characters and
|
| 167 |
+
lower <= len(request_token) <= upper)
|
| 168 |
+
|
| 169 |
+
def check_access_token(self, request_token):
|
| 170 |
+
"""Checks that the token contains only safe characters
|
| 171 |
+
and is no shorter than lower and no longer than upper.
|
| 172 |
+
"""
|
| 173 |
+
lower, upper = self.access_token_length
|
| 174 |
+
return (set(request_token) <= self.safe_characters and
|
| 175 |
+
lower <= len(request_token) <= upper)
|
| 176 |
+
|
| 177 |
+
def check_nonce(self, nonce):
|
| 178 |
+
"""Checks that the nonce only contains only safe characters
|
| 179 |
+
and is no shorter than lower and no longer than upper.
|
| 180 |
+
"""
|
| 181 |
+
lower, upper = self.nonce_length
|
| 182 |
+
return (set(nonce) <= self.safe_characters and
|
| 183 |
+
lower <= len(nonce) <= upper)
|
| 184 |
+
|
| 185 |
+
def check_verifier(self, verifier):
|
| 186 |
+
"""Checks that the verifier contains only safe characters
|
| 187 |
+
and is no shorter than lower and no longer than upper.
|
| 188 |
+
"""
|
| 189 |
+
lower, upper = self.verifier_length
|
| 190 |
+
return (set(verifier) <= self.safe_characters and
|
| 191 |
+
lower <= len(verifier) <= upper)
|
| 192 |
+
|
| 193 |
+
def check_realms(self, realms):
|
| 194 |
+
"""Check that the realm is one of a set allowed realms."""
|
| 195 |
+
return all(r in self.realms for r in realms)
|
| 196 |
+
|
| 197 |
+
def _subclass_must_implement(self, fn):
|
| 198 |
+
"""
|
| 199 |
+
Returns a NotImplementedError for a function that should be implemented.
|
| 200 |
+
:param fn: name of the function
|
| 201 |
+
"""
|
| 202 |
+
m = "Missing function implementation in {}: {}".format(type(self), fn)
|
| 203 |
+
return NotImplementedError(m)
|
| 204 |
+
|
| 205 |
+
@property
|
| 206 |
+
def dummy_client(self):
|
| 207 |
+
"""Dummy client used when an invalid client key is supplied.
|
| 208 |
+
|
| 209 |
+
:returns: The dummy client key string.
|
| 210 |
+
|
| 211 |
+
The dummy client should be associated with either a client secret,
|
| 212 |
+
a rsa key or both depending on which signature methods are supported.
|
| 213 |
+
Providers should make sure that
|
| 214 |
+
|
| 215 |
+
get_client_secret(dummy_client)
|
| 216 |
+
get_rsa_key(dummy_client)
|
| 217 |
+
|
| 218 |
+
return a valid secret or key for the dummy client.
|
| 219 |
+
|
| 220 |
+
This method is used by
|
| 221 |
+
|
| 222 |
+
* AccessTokenEndpoint
|
| 223 |
+
* RequestTokenEndpoint
|
| 224 |
+
* ResourceEndpoint
|
| 225 |
+
* SignatureOnlyEndpoint
|
| 226 |
+
"""
|
| 227 |
+
raise self._subclass_must_implement("dummy_client")
|
| 228 |
+
|
| 229 |
+
@property
|
| 230 |
+
def dummy_request_token(self):
|
| 231 |
+
"""Dummy request token used when an invalid token was supplied.
|
| 232 |
+
|
| 233 |
+
:returns: The dummy request token string.
|
| 234 |
+
|
| 235 |
+
The dummy request token should be associated with a request token
|
| 236 |
+
secret such that get_request_token_secret(.., dummy_request_token)
|
| 237 |
+
returns a valid secret.
|
| 238 |
+
|
| 239 |
+
This method is used by
|
| 240 |
+
|
| 241 |
+
* AccessTokenEndpoint
|
| 242 |
+
"""
|
| 243 |
+
raise self._subclass_must_implement("dummy_request_token")
|
| 244 |
+
|
| 245 |
+
@property
|
| 246 |
+
def dummy_access_token(self):
|
| 247 |
+
"""Dummy access token used when an invalid token was supplied.
|
| 248 |
+
|
| 249 |
+
:returns: The dummy access token string.
|
| 250 |
+
|
| 251 |
+
The dummy access token should be associated with an access token
|
| 252 |
+
secret such that get_access_token_secret(.., dummy_access_token)
|
| 253 |
+
returns a valid secret.
|
| 254 |
+
|
| 255 |
+
This method is used by
|
| 256 |
+
|
| 257 |
+
* ResourceEndpoint
|
| 258 |
+
"""
|
| 259 |
+
raise self._subclass_must_implement("dummy_access_token")
|
| 260 |
+
|
| 261 |
+
def get_client_secret(self, client_key, request):
|
| 262 |
+
"""Retrieves the client secret associated with the client key.
|
| 263 |
+
|
| 264 |
+
:param client_key: The client/consumer key.
|
| 265 |
+
:param request: OAuthlib request.
|
| 266 |
+
:type request: oauthlib.common.Request
|
| 267 |
+
:returns: The client secret as a string.
|
| 268 |
+
|
| 269 |
+
This method must allow the use of a dummy client_key value.
|
| 270 |
+
Fetching the secret using the dummy key must take the same amount of
|
| 271 |
+
time as fetching a secret for a valid client::
|
| 272 |
+
|
| 273 |
+
# Unlikely to be near constant time as it uses two database
|
| 274 |
+
# lookups for a valid client, and only one for an invalid.
|
| 275 |
+
from your_datastore import ClientSecret
|
| 276 |
+
if ClientSecret.has(client_key):
|
| 277 |
+
return ClientSecret.get(client_key)
|
| 278 |
+
else:
|
| 279 |
+
return 'dummy'
|
| 280 |
+
|
| 281 |
+
# Aim to mimic number of latency inducing operations no matter
|
| 282 |
+
# whether the client is valid or not.
|
| 283 |
+
from your_datastore import ClientSecret
|
| 284 |
+
return ClientSecret.get(client_key, 'dummy')
|
| 285 |
+
|
| 286 |
+
Note that the returned key must be in plaintext.
|
| 287 |
+
|
| 288 |
+
This method is used by
|
| 289 |
+
|
| 290 |
+
* AccessTokenEndpoint
|
| 291 |
+
* RequestTokenEndpoint
|
| 292 |
+
* ResourceEndpoint
|
| 293 |
+
* SignatureOnlyEndpoint
|
| 294 |
+
"""
|
| 295 |
+
raise self._subclass_must_implement('get_client_secret')
|
| 296 |
+
|
| 297 |
+
def get_request_token_secret(self, client_key, token, request):
|
| 298 |
+
"""Retrieves the shared secret associated with the request token.
|
| 299 |
+
|
| 300 |
+
:param client_key: The client/consumer key.
|
| 301 |
+
:param token: The request token string.
|
| 302 |
+
:param request: OAuthlib request.
|
| 303 |
+
:type request: oauthlib.common.Request
|
| 304 |
+
:returns: The token secret as a string.
|
| 305 |
+
|
| 306 |
+
This method must allow the use of a dummy values and the running time
|
| 307 |
+
must be roughly equivalent to that of the running time of valid values::
|
| 308 |
+
|
| 309 |
+
# Unlikely to be near constant time as it uses two database
|
| 310 |
+
# lookups for a valid client, and only one for an invalid.
|
| 311 |
+
from your_datastore import RequestTokenSecret
|
| 312 |
+
if RequestTokenSecret.has(client_key):
|
| 313 |
+
return RequestTokenSecret.get((client_key, request_token))
|
| 314 |
+
else:
|
| 315 |
+
return 'dummy'
|
| 316 |
+
|
| 317 |
+
# Aim to mimic number of latency inducing operations no matter
|
| 318 |
+
# whether the client is valid or not.
|
| 319 |
+
from your_datastore import RequestTokenSecret
|
| 320 |
+
return ClientSecret.get((client_key, request_token), 'dummy')
|
| 321 |
+
|
| 322 |
+
Note that the returned key must be in plaintext.
|
| 323 |
+
|
| 324 |
+
This method is used by
|
| 325 |
+
|
| 326 |
+
* AccessTokenEndpoint
|
| 327 |
+
"""
|
| 328 |
+
raise self._subclass_must_implement('get_request_token_secret')
|
| 329 |
+
|
| 330 |
+
def get_access_token_secret(self, client_key, token, request):
|
| 331 |
+
"""Retrieves the shared secret associated with the access token.
|
| 332 |
+
|
| 333 |
+
:param client_key: The client/consumer key.
|
| 334 |
+
:param token: The access token string.
|
| 335 |
+
:param request: OAuthlib request.
|
| 336 |
+
:type request: oauthlib.common.Request
|
| 337 |
+
:returns: The token secret as a string.
|
| 338 |
+
|
| 339 |
+
This method must allow the use of a dummy values and the running time
|
| 340 |
+
must be roughly equivalent to that of the running time of valid values::
|
| 341 |
+
|
| 342 |
+
# Unlikely to be near constant time as it uses two database
|
| 343 |
+
# lookups for a valid client, and only one for an invalid.
|
| 344 |
+
from your_datastore import AccessTokenSecret
|
| 345 |
+
if AccessTokenSecret.has(client_key):
|
| 346 |
+
return AccessTokenSecret.get((client_key, request_token))
|
| 347 |
+
else:
|
| 348 |
+
return 'dummy'
|
| 349 |
+
|
| 350 |
+
# Aim to mimic number of latency inducing operations no matter
|
| 351 |
+
# whether the client is valid or not.
|
| 352 |
+
from your_datastore import AccessTokenSecret
|
| 353 |
+
return ClientSecret.get((client_key, request_token), 'dummy')
|
| 354 |
+
|
| 355 |
+
Note that the returned key must be in plaintext.
|
| 356 |
+
|
| 357 |
+
This method is used by
|
| 358 |
+
|
| 359 |
+
* ResourceEndpoint
|
| 360 |
+
"""
|
| 361 |
+
raise self._subclass_must_implement("get_access_token_secret")
|
| 362 |
+
|
| 363 |
+
def get_default_realms(self, client_key, request):
|
| 364 |
+
"""Get the default realms for a client.
|
| 365 |
+
|
| 366 |
+
:param client_key: The client/consumer key.
|
| 367 |
+
:param request: OAuthlib request.
|
| 368 |
+
:type request: oauthlib.common.Request
|
| 369 |
+
:returns: The list of default realms associated with the client.
|
| 370 |
+
|
| 371 |
+
The list of default realms will be set during client registration and
|
| 372 |
+
is outside the scope of OAuthLib.
|
| 373 |
+
|
| 374 |
+
This method is used by
|
| 375 |
+
|
| 376 |
+
* RequestTokenEndpoint
|
| 377 |
+
"""
|
| 378 |
+
raise self._subclass_must_implement("get_default_realms")
|
| 379 |
+
|
| 380 |
+
def get_realms(self, token, request):
|
| 381 |
+
"""Get realms associated with a request token.
|
| 382 |
+
|
| 383 |
+
:param token: The request token string.
|
| 384 |
+
:param request: OAuthlib request.
|
| 385 |
+
:type request: oauthlib.common.Request
|
| 386 |
+
:returns: The list of realms associated with the request token.
|
| 387 |
+
|
| 388 |
+
This method is used by
|
| 389 |
+
|
| 390 |
+
* AuthorizationEndpoint
|
| 391 |
+
* AccessTokenEndpoint
|
| 392 |
+
"""
|
| 393 |
+
raise self._subclass_must_implement("get_realms")
|
| 394 |
+
|
| 395 |
+
def get_redirect_uri(self, token, request):
|
| 396 |
+
"""Get the redirect URI associated with a request token.
|
| 397 |
+
|
| 398 |
+
:param token: The request token string.
|
| 399 |
+
:param request: OAuthlib request.
|
| 400 |
+
:type request: oauthlib.common.Request
|
| 401 |
+
:returns: The redirect URI associated with the request token.
|
| 402 |
+
|
| 403 |
+
It may be desirable to return a custom URI if the redirect is set to "oob".
|
| 404 |
+
In this case, the user will be redirected to the returned URI and at that
|
| 405 |
+
endpoint the verifier can be displayed.
|
| 406 |
+
|
| 407 |
+
This method is used by
|
| 408 |
+
|
| 409 |
+
* AuthorizationEndpoint
|
| 410 |
+
"""
|
| 411 |
+
raise self._subclass_must_implement("get_redirect_uri")
|
| 412 |
+
|
| 413 |
+
def get_rsa_key(self, client_key, request):
|
| 414 |
+
"""Retrieves a previously stored client provided RSA key.
|
| 415 |
+
|
| 416 |
+
:param client_key: The client/consumer key.
|
| 417 |
+
:param request: OAuthlib request.
|
| 418 |
+
:type request: oauthlib.common.Request
|
| 419 |
+
:returns: The rsa public key as a string.
|
| 420 |
+
|
| 421 |
+
This method must allow the use of a dummy client_key value. Fetching
|
| 422 |
+
the rsa key using the dummy key must take the same amount of time
|
| 423 |
+
as fetching a key for a valid client. The dummy key must also be of
|
| 424 |
+
the same bit length as client keys.
|
| 425 |
+
|
| 426 |
+
Note that the key must be returned in plaintext.
|
| 427 |
+
|
| 428 |
+
This method is used by
|
| 429 |
+
|
| 430 |
+
* AccessTokenEndpoint
|
| 431 |
+
* RequestTokenEndpoint
|
| 432 |
+
* ResourceEndpoint
|
| 433 |
+
* SignatureOnlyEndpoint
|
| 434 |
+
"""
|
| 435 |
+
raise self._subclass_must_implement("get_rsa_key")
|
| 436 |
+
|
| 437 |
+
def invalidate_request_token(self, client_key, request_token, request):
|
| 438 |
+
"""Invalidates a used request token.
|
| 439 |
+
|
| 440 |
+
:param client_key: The client/consumer key.
|
| 441 |
+
:param request_token: The request token string.
|
| 442 |
+
:param request: OAuthlib request.
|
| 443 |
+
:type request: oauthlib.common.Request
|
| 444 |
+
:returns: None
|
| 445 |
+
|
| 446 |
+
Per `Section 2.3`_ of the spec:
|
| 447 |
+
|
| 448 |
+
"The server MUST (...) ensure that the temporary
|
| 449 |
+
credentials have not expired or been used before."
|
| 450 |
+
|
| 451 |
+
.. _`Section 2.3`: https://tools.ietf.org/html/rfc5849#section-2.3
|
| 452 |
+
|
| 453 |
+
This method should ensure that provided token won't validate anymore.
|
| 454 |
+
It can be simply removing RequestToken from storage or setting
|
| 455 |
+
specific flag that makes it invalid (note that such flag should be
|
| 456 |
+
also validated during request token validation).
|
| 457 |
+
|
| 458 |
+
This method is used by
|
| 459 |
+
|
| 460 |
+
* AccessTokenEndpoint
|
| 461 |
+
"""
|
| 462 |
+
raise self._subclass_must_implement("invalidate_request_token")
|
| 463 |
+
|
| 464 |
+
def validate_client_key(self, client_key, request):
|
| 465 |
+
"""Validates that supplied client key is a registered and valid client.
|
| 466 |
+
|
| 467 |
+
:param client_key: The client/consumer key.
|
| 468 |
+
:param request: OAuthlib request.
|
| 469 |
+
:type request: oauthlib.common.Request
|
| 470 |
+
:returns: True or False
|
| 471 |
+
|
| 472 |
+
Note that if the dummy client is supplied it should validate in same
|
| 473 |
+
or nearly the same amount of time as a valid one.
|
| 474 |
+
|
| 475 |
+
Ensure latency inducing tasks are mimiced even for dummy clients.
|
| 476 |
+
For example, use::
|
| 477 |
+
|
| 478 |
+
from your_datastore import Client
|
| 479 |
+
try:
|
| 480 |
+
return Client.exists(client_key, access_token)
|
| 481 |
+
except DoesNotExist:
|
| 482 |
+
return False
|
| 483 |
+
|
| 484 |
+
Rather than::
|
| 485 |
+
|
| 486 |
+
from your_datastore import Client
|
| 487 |
+
if access_token == self.dummy_access_token:
|
| 488 |
+
return False
|
| 489 |
+
else:
|
| 490 |
+
return Client.exists(client_key, access_token)
|
| 491 |
+
|
| 492 |
+
This method is used by
|
| 493 |
+
|
| 494 |
+
* AccessTokenEndpoint
|
| 495 |
+
* RequestTokenEndpoint
|
| 496 |
+
* ResourceEndpoint
|
| 497 |
+
* SignatureOnlyEndpoint
|
| 498 |
+
"""
|
| 499 |
+
raise self._subclass_must_implement("validate_client_key")
|
| 500 |
+
|
| 501 |
+
def validate_request_token(self, client_key, token, request):
|
| 502 |
+
"""Validates that supplied request token is registered and valid.
|
| 503 |
+
|
| 504 |
+
:param client_key: The client/consumer key.
|
| 505 |
+
:param token: The request token string.
|
| 506 |
+
:param request: OAuthlib request.
|
| 507 |
+
:type request: oauthlib.common.Request
|
| 508 |
+
:returns: True or False
|
| 509 |
+
|
| 510 |
+
Note that if the dummy request_token is supplied it should validate in
|
| 511 |
+
the same nearly the same amount of time as a valid one.
|
| 512 |
+
|
| 513 |
+
Ensure latency inducing tasks are mimiced even for dummy clients.
|
| 514 |
+
For example, use::
|
| 515 |
+
|
| 516 |
+
from your_datastore import RequestToken
|
| 517 |
+
try:
|
| 518 |
+
return RequestToken.exists(client_key, access_token)
|
| 519 |
+
except DoesNotExist:
|
| 520 |
+
return False
|
| 521 |
+
|
| 522 |
+
Rather than::
|
| 523 |
+
|
| 524 |
+
from your_datastore import RequestToken
|
| 525 |
+
if access_token == self.dummy_access_token:
|
| 526 |
+
return False
|
| 527 |
+
else:
|
| 528 |
+
return RequestToken.exists(client_key, access_token)
|
| 529 |
+
|
| 530 |
+
This method is used by
|
| 531 |
+
|
| 532 |
+
* AccessTokenEndpoint
|
| 533 |
+
"""
|
| 534 |
+
raise self._subclass_must_implement("validate_request_token")
|
| 535 |
+
|
| 536 |
+
def validate_access_token(self, client_key, token, request):
|
| 537 |
+
"""Validates that supplied access token is registered and valid.
|
| 538 |
+
|
| 539 |
+
:param client_key: The client/consumer key.
|
| 540 |
+
:param token: The access token string.
|
| 541 |
+
:param request: OAuthlib request.
|
| 542 |
+
:type request: oauthlib.common.Request
|
| 543 |
+
:returns: True or False
|
| 544 |
+
|
| 545 |
+
Note that if the dummy access token is supplied it should validate in
|
| 546 |
+
the same or nearly the same amount of time as a valid one.
|
| 547 |
+
|
| 548 |
+
Ensure latency inducing tasks are mimiced even for dummy clients.
|
| 549 |
+
For example, use::
|
| 550 |
+
|
| 551 |
+
from your_datastore import AccessToken
|
| 552 |
+
try:
|
| 553 |
+
return AccessToken.exists(client_key, access_token)
|
| 554 |
+
except DoesNotExist:
|
| 555 |
+
return False
|
| 556 |
+
|
| 557 |
+
Rather than::
|
| 558 |
+
|
| 559 |
+
from your_datastore import AccessToken
|
| 560 |
+
if access_token == self.dummy_access_token:
|
| 561 |
+
return False
|
| 562 |
+
else:
|
| 563 |
+
return AccessToken.exists(client_key, access_token)
|
| 564 |
+
|
| 565 |
+
This method is used by
|
| 566 |
+
|
| 567 |
+
* ResourceEndpoint
|
| 568 |
+
"""
|
| 569 |
+
raise self._subclass_must_implement("validate_access_token")
|
| 570 |
+
|
| 571 |
+
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
|
| 572 |
+
request, request_token=None, access_token=None):
|
| 573 |
+
"""Validates that the nonce has not been used before.
|
| 574 |
+
|
| 575 |
+
:param client_key: The client/consumer key.
|
| 576 |
+
:param timestamp: The ``oauth_timestamp`` parameter.
|
| 577 |
+
:param nonce: The ``oauth_nonce`` parameter.
|
| 578 |
+
:param request_token: Request token string, if any.
|
| 579 |
+
:param access_token: Access token string, if any.
|
| 580 |
+
:param request: OAuthlib request.
|
| 581 |
+
:type request: oauthlib.common.Request
|
| 582 |
+
:returns: True or False
|
| 583 |
+
|
| 584 |
+
Per `Section 3.3`_ of the spec.
|
| 585 |
+
|
| 586 |
+
"A nonce is a random string, uniquely generated by the client to allow
|
| 587 |
+
the server to verify that a request has never been made before and
|
| 588 |
+
helps prevent replay attacks when requests are made over a non-secure
|
| 589 |
+
channel. The nonce value MUST be unique across all requests with the
|
| 590 |
+
same timestamp, client credentials, and token combinations."
|
| 591 |
+
|
| 592 |
+
.. _`Section 3.3`: https://tools.ietf.org/html/rfc5849#section-3.3
|
| 593 |
+
|
| 594 |
+
One of the first validation checks that will be made is for the validity
|
| 595 |
+
of the nonce and timestamp, which are associated with a client key and
|
| 596 |
+
possibly a token. If invalid then immediately fail the request
|
| 597 |
+
by returning False. If the nonce/timestamp pair has been used before and
|
| 598 |
+
you may just have detected a replay attack. Therefore it is an essential
|
| 599 |
+
part of OAuth security that you not allow nonce/timestamp reuse.
|
| 600 |
+
Note that this validation check is done before checking the validity of
|
| 601 |
+
the client and token.::
|
| 602 |
+
|
| 603 |
+
nonces_and_timestamps_database = [
|
| 604 |
+
(u'foo', 1234567890, u'rannoMstrInghere', u'bar')
|
| 605 |
+
]
|
| 606 |
+
|
| 607 |
+
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
|
| 608 |
+
request_token=None, access_token=None):
|
| 609 |
+
|
| 610 |
+
return ((client_key, timestamp, nonce, request_token or access_token)
|
| 611 |
+
not in self.nonces_and_timestamps_database)
|
| 612 |
+
|
| 613 |
+
This method is used by
|
| 614 |
+
|
| 615 |
+
* AccessTokenEndpoint
|
| 616 |
+
* RequestTokenEndpoint
|
| 617 |
+
* ResourceEndpoint
|
| 618 |
+
* SignatureOnlyEndpoint
|
| 619 |
+
"""
|
| 620 |
+
raise self._subclass_must_implement("validate_timestamp_and_nonce")
|
| 621 |
+
|
| 622 |
+
def validate_redirect_uri(self, client_key, redirect_uri, request):
|
| 623 |
+
"""Validates the client supplied redirection URI.
|
| 624 |
+
|
| 625 |
+
:param client_key: The client/consumer key.
|
| 626 |
+
:param redirect_uri: The URI the client which to redirect back to after
|
| 627 |
+
authorization is successful.
|
| 628 |
+
:param request: OAuthlib request.
|
| 629 |
+
:type request: oauthlib.common.Request
|
| 630 |
+
:returns: True or False
|
| 631 |
+
|
| 632 |
+
It is highly recommended that OAuth providers require their clients
|
| 633 |
+
to register all redirection URIs prior to using them in requests and
|
| 634 |
+
register them as absolute URIs. See `CWE-601`_ for more information
|
| 635 |
+
about open redirection attacks.
|
| 636 |
+
|
| 637 |
+
By requiring registration of all redirection URIs it should be
|
| 638 |
+
straightforward for the provider to verify whether the supplied
|
| 639 |
+
redirect_uri is valid or not.
|
| 640 |
+
|
| 641 |
+
Alternatively per `Section 2.1`_ of the spec:
|
| 642 |
+
|
| 643 |
+
"If the client is unable to receive callbacks or a callback URI has
|
| 644 |
+
been established via other means, the parameter value MUST be set to
|
| 645 |
+
"oob" (case sensitive), to indicate an out-of-band configuration."
|
| 646 |
+
|
| 647 |
+
.. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601
|
| 648 |
+
.. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1
|
| 649 |
+
|
| 650 |
+
This method is used by
|
| 651 |
+
|
| 652 |
+
* RequestTokenEndpoint
|
| 653 |
+
"""
|
| 654 |
+
raise self._subclass_must_implement("validate_redirect_uri")
|
| 655 |
+
|
| 656 |
+
def validate_requested_realms(self, client_key, realms, request):
|
| 657 |
+
"""Validates that the client may request access to the realm.
|
| 658 |
+
|
| 659 |
+
:param client_key: The client/consumer key.
|
| 660 |
+
:param realms: The list of realms that client is requesting access to.
|
| 661 |
+
:param request: OAuthlib request.
|
| 662 |
+
:type request: oauthlib.common.Request
|
| 663 |
+
:returns: True or False
|
| 664 |
+
|
| 665 |
+
This method is invoked when obtaining a request token and should
|
| 666 |
+
tie a realm to the request token and after user authorization
|
| 667 |
+
this realm restriction should transfer to the access token.
|
| 668 |
+
|
| 669 |
+
This method is used by
|
| 670 |
+
|
| 671 |
+
* RequestTokenEndpoint
|
| 672 |
+
"""
|
| 673 |
+
raise self._subclass_must_implement("validate_requested_realms")
|
| 674 |
+
|
| 675 |
+
def validate_realms(self, client_key, token, request, uri=None,
|
| 676 |
+
realms=None):
|
| 677 |
+
"""Validates access to the request realm.
|
| 678 |
+
|
| 679 |
+
:param client_key: The client/consumer key.
|
| 680 |
+
:param token: A request token string.
|
| 681 |
+
:param request: OAuthlib request.
|
| 682 |
+
:type request: oauthlib.common.Request
|
| 683 |
+
:param uri: The URI the realms is protecting.
|
| 684 |
+
:param realms: A list of realms that must have been granted to
|
| 685 |
+
the access token.
|
| 686 |
+
:returns: True or False
|
| 687 |
+
|
| 688 |
+
How providers choose to use the realm parameter is outside the OAuth
|
| 689 |
+
specification but it is commonly used to restrict access to a subset
|
| 690 |
+
of protected resources such as "photos".
|
| 691 |
+
|
| 692 |
+
realms is a convenience parameter which can be used to provide
|
| 693 |
+
a per view method pre-defined list of allowed realms.
|
| 694 |
+
|
| 695 |
+
Can be as simple as::
|
| 696 |
+
|
| 697 |
+
from your_datastore import RequestToken
|
| 698 |
+
request_token = RequestToken.get(token, None)
|
| 699 |
+
|
| 700 |
+
if not request_token:
|
| 701 |
+
return False
|
| 702 |
+
return set(request_token.realms).issuperset(set(realms))
|
| 703 |
+
|
| 704 |
+
This method is used by
|
| 705 |
+
|
| 706 |
+
* ResourceEndpoint
|
| 707 |
+
"""
|
| 708 |
+
raise self._subclass_must_implement("validate_realms")
|
| 709 |
+
|
| 710 |
+
def validate_verifier(self, client_key, token, verifier, request):
|
| 711 |
+
"""Validates a verification code.
|
| 712 |
+
|
| 713 |
+
:param client_key: The client/consumer key.
|
| 714 |
+
:param token: A request token string.
|
| 715 |
+
:param verifier: The authorization verifier string.
|
| 716 |
+
:param request: OAuthlib request.
|
| 717 |
+
:type request: oauthlib.common.Request
|
| 718 |
+
:returns: True or False
|
| 719 |
+
|
| 720 |
+
OAuth providers issue a verification code to clients after the
|
| 721 |
+
resource owner authorizes access. This code is used by the client to
|
| 722 |
+
obtain token credentials and the provider must verify that the
|
| 723 |
+
verifier is valid and associated with the client as well as the
|
| 724 |
+
resource owner.
|
| 725 |
+
|
| 726 |
+
Verifier validation should be done in near constant time
|
| 727 |
+
(to avoid verifier enumeration). To achieve this we need a
|
| 728 |
+
constant time string comparison which is provided by OAuthLib
|
| 729 |
+
in ``oauthlib.common.safe_string_equals``::
|
| 730 |
+
|
| 731 |
+
from your_datastore import Verifier
|
| 732 |
+
correct_verifier = Verifier.get(client_key, request_token)
|
| 733 |
+
from oauthlib.common import safe_string_equals
|
| 734 |
+
return safe_string_equals(verifier, correct_verifier)
|
| 735 |
+
|
| 736 |
+
This method is used by
|
| 737 |
+
|
| 738 |
+
* AccessTokenEndpoint
|
| 739 |
+
"""
|
| 740 |
+
raise self._subclass_must_implement("validate_verifier")
|
| 741 |
+
|
| 742 |
+
def verify_request_token(self, token, request):
|
| 743 |
+
"""Verify that the given OAuth1 request token is valid.
|
| 744 |
+
|
| 745 |
+
:param token: A request token string.
|
| 746 |
+
:param request: OAuthlib request.
|
| 747 |
+
:type request: oauthlib.common.Request
|
| 748 |
+
:returns: True or False
|
| 749 |
+
|
| 750 |
+
This method is used only in AuthorizationEndpoint to check whether the
|
| 751 |
+
oauth_token given in the authorization URL is valid or not.
|
| 752 |
+
This request is not signed and thus similar ``validate_request_token``
|
| 753 |
+
method can not be used.
|
| 754 |
+
|
| 755 |
+
This method is used by
|
| 756 |
+
|
| 757 |
+
* AuthorizationEndpoint
|
| 758 |
+
"""
|
| 759 |
+
raise self._subclass_must_implement("verify_request_token")
|
| 760 |
+
|
| 761 |
+
def verify_realms(self, token, realms, request):
|
| 762 |
+
"""Verify authorized realms to see if they match those given to token.
|
| 763 |
+
|
| 764 |
+
:param token: An access token string.
|
| 765 |
+
:param realms: A list of realms the client attempts to access.
|
| 766 |
+
:param request: OAuthlib request.
|
| 767 |
+
:type request: oauthlib.common.Request
|
| 768 |
+
:returns: True or False
|
| 769 |
+
|
| 770 |
+
This prevents the list of authorized realms sent by the client during
|
| 771 |
+
the authorization step to be altered to include realms outside what
|
| 772 |
+
was bound with the request token.
|
| 773 |
+
|
| 774 |
+
Can be as simple as::
|
| 775 |
+
|
| 776 |
+
valid_realms = self.get_realms(token)
|
| 777 |
+
return all((r in valid_realms for r in realms))
|
| 778 |
+
|
| 779 |
+
This method is used by
|
| 780 |
+
|
| 781 |
+
* AuthorizationEndpoint
|
| 782 |
+
"""
|
| 783 |
+
raise self._subclass_must_implement("verify_realms")
|
| 784 |
+
|
| 785 |
+
def save_access_token(self, token, request):
|
| 786 |
+
"""Save an OAuth1 access token.
|
| 787 |
+
|
| 788 |
+
:param token: A dict with token credentials.
|
| 789 |
+
:param request: OAuthlib request.
|
| 790 |
+
:type request: oauthlib.common.Request
|
| 791 |
+
|
| 792 |
+
The token dictionary will at minimum include
|
| 793 |
+
|
| 794 |
+
* ``oauth_token`` the access token string.
|
| 795 |
+
* ``oauth_token_secret`` the token specific secret used in signing.
|
| 796 |
+
* ``oauth_authorized_realms`` a space separated list of realms.
|
| 797 |
+
|
| 798 |
+
Client key can be obtained from ``request.client_key``.
|
| 799 |
+
|
| 800 |
+
The list of realms (not joined string) can be obtained from
|
| 801 |
+
``request.realm``.
|
| 802 |
+
|
| 803 |
+
This method is used by
|
| 804 |
+
|
| 805 |
+
* AccessTokenEndpoint
|
| 806 |
+
"""
|
| 807 |
+
raise self._subclass_must_implement("save_access_token")
|
| 808 |
+
|
| 809 |
+
def save_request_token(self, token, request):
|
| 810 |
+
"""Save an OAuth1 request token.
|
| 811 |
+
|
| 812 |
+
:param token: A dict with token credentials.
|
| 813 |
+
:param request: OAuthlib request.
|
| 814 |
+
:type request: oauthlib.common.Request
|
| 815 |
+
|
| 816 |
+
The token dictionary will at minimum include
|
| 817 |
+
|
| 818 |
+
* ``oauth_token`` the request token string.
|
| 819 |
+
* ``oauth_token_secret`` the token specific secret used in signing.
|
| 820 |
+
* ``oauth_callback_confirmed`` the string ``true``.
|
| 821 |
+
|
| 822 |
+
Client key can be obtained from ``request.client_key``.
|
| 823 |
+
|
| 824 |
+
This method is used by
|
| 825 |
+
|
| 826 |
+
* RequestTokenEndpoint
|
| 827 |
+
"""
|
| 828 |
+
raise self._subclass_must_implement("save_request_token")
|
| 829 |
+
|
| 830 |
+
def save_verifier(self, token, verifier, request):
|
| 831 |
+
"""Associate an authorization verifier with a request token.
|
| 832 |
+
|
| 833 |
+
:param token: A request token string.
|
| 834 |
+
:param verifier: A dictionary containing the oauth_verifier and
|
| 835 |
+
oauth_token
|
| 836 |
+
:param request: OAuthlib request.
|
| 837 |
+
:type request: oauthlib.common.Request
|
| 838 |
+
|
| 839 |
+
We need to associate verifiers with tokens for validation during the
|
| 840 |
+
access token request.
|
| 841 |
+
|
| 842 |
+
Note that unlike save_x_token token here is the ``oauth_token`` token
|
| 843 |
+
string from the request token saved previously.
|
| 844 |
+
|
| 845 |
+
This method is used by
|
| 846 |
+
|
| 847 |
+
* AuthorizationEndpoint
|
| 848 |
+
"""
|
| 849 |
+
raise self._subclass_must_implement("save_verifier")
|
evalkit_tf446/lib/python3.10/site-packages/oauthlib/oauth1/rfc5849/signature.py
ADDED
|
@@ -0,0 +1,852 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module is an implementation of `section 3.4`_ of RFC 5849.
|
| 3 |
+
|
| 4 |
+
**Usage**
|
| 5 |
+
|
| 6 |
+
Steps for signing a request:
|
| 7 |
+
|
| 8 |
+
1. Collect parameters from the request using ``collect_parameters``.
|
| 9 |
+
2. Normalize those parameters using ``normalize_parameters``.
|
| 10 |
+
3. Create the *base string URI* using ``base_string_uri``.
|
| 11 |
+
4. Create the *signature base string* from the above three components
|
| 12 |
+
using ``signature_base_string``.
|
| 13 |
+
5. Pass the *signature base string* and the client credentials to one of the
|
| 14 |
+
sign-with-client functions. The HMAC-based signing functions needs
|
| 15 |
+
client credentials with secrets. The RSA-based signing functions needs
|
| 16 |
+
client credentials with an RSA private key.
|
| 17 |
+
|
| 18 |
+
To verify a request, pass the request and credentials to one of the verify
|
| 19 |
+
functions. The HMAC-based signing functions needs the shared secrets. The
|
| 20 |
+
RSA-based verify functions needs the RSA public key.
|
| 21 |
+
|
| 22 |
+
**Scope**
|
| 23 |
+
|
| 24 |
+
All of the functions in this module should be considered internal to OAuthLib,
|
| 25 |
+
since they are not imported into the "oauthlib.oauth1" module. Programs using
|
| 26 |
+
OAuthLib should not use directly invoke any of the functions in this module.
|
| 27 |
+
|
| 28 |
+
**Deprecated functions**
|
| 29 |
+
|
| 30 |
+
The "sign_" methods that are not "_with_client" have been deprecated. They may
|
| 31 |
+
be removed in a future release. Since they are all internal functions, this
|
| 32 |
+
should have no impact on properly behaving programs.
|
| 33 |
+
|
| 34 |
+
.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
import binascii
|
| 38 |
+
import hashlib
|
| 39 |
+
import hmac
|
| 40 |
+
import ipaddress
|
| 41 |
+
import logging
|
| 42 |
+
import urllib.parse as urlparse
|
| 43 |
+
import warnings
|
| 44 |
+
|
| 45 |
+
from oauthlib.common import extract_params, safe_string_equals, urldecode
|
| 46 |
+
|
| 47 |
+
from . import utils
|
| 48 |
+
|
| 49 |
+
log = logging.getLogger(__name__)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
# ==== Common functions ==========================================
|
| 53 |
+
|
| 54 |
+
def signature_base_string(
|
| 55 |
+
http_method: str,
|
| 56 |
+
base_str_uri: str,
|
| 57 |
+
normalized_encoded_request_parameters: str) -> str:
|
| 58 |
+
"""
|
| 59 |
+
Construct the signature base string.
|
| 60 |
+
|
| 61 |
+
The *signature base string* is the value that is calculated and signed by
|
| 62 |
+
the client. It is also independently calculated by the server to verify
|
| 63 |
+
the signature, and therefore must produce the exact same value at both
|
| 64 |
+
ends or the signature won't verify.
|
| 65 |
+
|
| 66 |
+
The rules for calculating the *signature base string* are defined in
|
| 67 |
+
section 3.4.1.1`_ of RFC 5849.
|
| 68 |
+
|
| 69 |
+
.. _`section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
# The signature base string is constructed by concatenating together,
|
| 73 |
+
# in order, the following HTTP request elements:
|
| 74 |
+
|
| 75 |
+
# 1. The HTTP request method in uppercase. For example: "HEAD",
|
| 76 |
+
# "GET", "POST", etc. If the request uses a custom HTTP method, it
|
| 77 |
+
# MUST be encoded (`Section 3.6`_).
|
| 78 |
+
#
|
| 79 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 80 |
+
base_string = utils.escape(http_method.upper())
|
| 81 |
+
|
| 82 |
+
# 2. An "&" character (ASCII code 38).
|
| 83 |
+
base_string += '&'
|
| 84 |
+
|
| 85 |
+
# 3. The base string URI from `Section 3.4.1.2`_, after being encoded
|
| 86 |
+
# (`Section 3.6`_).
|
| 87 |
+
#
|
| 88 |
+
# .. _`Section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
|
| 89 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 90 |
+
base_string += utils.escape(base_str_uri)
|
| 91 |
+
|
| 92 |
+
# 4. An "&" character (ASCII code 38).
|
| 93 |
+
base_string += '&'
|
| 94 |
+
|
| 95 |
+
# 5. The request parameters as normalized in `Section 3.4.1.3.2`_, after
|
| 96 |
+
# being encoded (`Section 3.6`).
|
| 97 |
+
#
|
| 98 |
+
# .. _`Sec 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
|
| 99 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 100 |
+
base_string += utils.escape(normalized_encoded_request_parameters)
|
| 101 |
+
|
| 102 |
+
return base_string
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def base_string_uri(uri: str, host: str = None) -> str:
|
| 106 |
+
"""
|
| 107 |
+
Calculates the _base string URI_.
|
| 108 |
+
|
| 109 |
+
The *base string URI* is one of the components that make up the
|
| 110 |
+
*signature base string*.
|
| 111 |
+
|
| 112 |
+
The ``host`` is optional. If provided, it is used to override any host and
|
| 113 |
+
port values in the ``uri``. The value for ``host`` is usually extracted from
|
| 114 |
+
the "Host" request header from the HTTP request. Its value may be just the
|
| 115 |
+
hostname, or the hostname followed by a colon and a TCP/IP port number
|
| 116 |
+
(hostname:port). If a value for the``host`` is provided but it does not
|
| 117 |
+
contain a port number, the default port number is used (i.e. if the ``uri``
|
| 118 |
+
contained a port number, it will be discarded).
|
| 119 |
+
|
| 120 |
+
The rules for calculating the *base string URI* are defined in
|
| 121 |
+
section 3.4.1.2`_ of RFC 5849.
|
| 122 |
+
|
| 123 |
+
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
|
| 124 |
+
|
| 125 |
+
:param uri: URI
|
| 126 |
+
:param host: hostname with optional port number, separated by a colon
|
| 127 |
+
:return: base string URI
|
| 128 |
+
"""
|
| 129 |
+
|
| 130 |
+
if not isinstance(uri, str):
|
| 131 |
+
raise ValueError('uri must be a string.')
|
| 132 |
+
|
| 133 |
+
# FIXME: urlparse does not support unicode
|
| 134 |
+
output = urlparse.urlparse(uri)
|
| 135 |
+
scheme = output.scheme
|
| 136 |
+
hostname = output.hostname
|
| 137 |
+
port = output.port
|
| 138 |
+
path = output.path
|
| 139 |
+
params = output.params
|
| 140 |
+
|
| 141 |
+
# The scheme, authority, and path of the request resource URI `RFC3986`
|
| 142 |
+
# are included by constructing an "http" or "https" URI representing
|
| 143 |
+
# the request resource (without the query or fragment) as follows:
|
| 144 |
+
#
|
| 145 |
+
# .. _`RFC3986`: https://tools.ietf.org/html/rfc3986
|
| 146 |
+
|
| 147 |
+
if not scheme:
|
| 148 |
+
raise ValueError('missing scheme')
|
| 149 |
+
|
| 150 |
+
# Per `RFC 2616 section 5.1.2`_:
|
| 151 |
+
#
|
| 152 |
+
# Note that the absolute path cannot be empty; if none is present in
|
| 153 |
+
# the original URI, it MUST be given as "/" (the server root).
|
| 154 |
+
#
|
| 155 |
+
# .. _`RFC 2616 5.1.2`: https://tools.ietf.org/html/rfc2616#section-5.1.2
|
| 156 |
+
if not path:
|
| 157 |
+
path = '/'
|
| 158 |
+
|
| 159 |
+
# 1. The scheme and host MUST be in lowercase.
|
| 160 |
+
scheme = scheme.lower()
|
| 161 |
+
# Note: if ``host`` is used, it will be converted to lowercase below
|
| 162 |
+
if hostname is not None:
|
| 163 |
+
hostname = hostname.lower()
|
| 164 |
+
|
| 165 |
+
# 2. The host and port values MUST match the content of the HTTP
|
| 166 |
+
# request "Host" header field.
|
| 167 |
+
if host is not None:
|
| 168 |
+
# NOTE: override value in uri with provided host
|
| 169 |
+
# Host argument is equal to netloc. It means it's missing scheme.
|
| 170 |
+
# Add it back, before parsing.
|
| 171 |
+
|
| 172 |
+
host = host.lower()
|
| 173 |
+
host = f"{scheme}://{host}"
|
| 174 |
+
output = urlparse.urlparse(host)
|
| 175 |
+
hostname = output.hostname
|
| 176 |
+
port = output.port
|
| 177 |
+
|
| 178 |
+
# 3. The port MUST be included if it is not the default port for the
|
| 179 |
+
# scheme, and MUST be excluded if it is the default. Specifically,
|
| 180 |
+
# the port MUST be excluded when making an HTTP request `RFC2616`_
|
| 181 |
+
# to port 80 or when making an HTTPS request `RFC2818`_ to port 443.
|
| 182 |
+
# All other non-default port numbers MUST be included.
|
| 183 |
+
#
|
| 184 |
+
# .. _`RFC2616`: https://tools.ietf.org/html/rfc2616
|
| 185 |
+
# .. _`RFC2818`: https://tools.ietf.org/html/rfc2818
|
| 186 |
+
|
| 187 |
+
if hostname is None:
|
| 188 |
+
raise ValueError('missing host')
|
| 189 |
+
|
| 190 |
+
# NOTE: Try guessing if we're dealing with IP or hostname
|
| 191 |
+
try:
|
| 192 |
+
hostname = ipaddress.ip_address(hostname)
|
| 193 |
+
except ValueError:
|
| 194 |
+
pass
|
| 195 |
+
|
| 196 |
+
if isinstance(hostname, ipaddress.IPv6Address):
|
| 197 |
+
hostname = f"[{hostname}]"
|
| 198 |
+
elif isinstance(hostname, ipaddress.IPv4Address):
|
| 199 |
+
hostname = f"{hostname}"
|
| 200 |
+
|
| 201 |
+
if port is not None and not (0 < port <= 65535):
|
| 202 |
+
raise ValueError('port out of range') # 16-bit unsigned ints
|
| 203 |
+
if (scheme, port) in (('http', 80), ('https', 443)):
|
| 204 |
+
netloc = hostname # default port for scheme: exclude port num
|
| 205 |
+
elif port:
|
| 206 |
+
netloc = f"{hostname}:{port}" # use hostname:port
|
| 207 |
+
else:
|
| 208 |
+
netloc = hostname
|
| 209 |
+
|
| 210 |
+
v = urlparse.urlunparse((scheme, netloc, path, params, '', ''))
|
| 211 |
+
|
| 212 |
+
# RFC 5849 does not specify which characters are encoded in the
|
| 213 |
+
# "base string URI", nor how they are encoded - which is very bad, since
|
| 214 |
+
# the signatures won't match if there are any differences. Fortunately,
|
| 215 |
+
# most URIs only use characters that are clearly not encoded (e.g. digits
|
| 216 |
+
# and A-Z, a-z), so have avoided any differences between implementations.
|
| 217 |
+
#
|
| 218 |
+
# The example from its section 3.4.1.2 illustrates that spaces in
|
| 219 |
+
# the path are percent encoded. But it provides no guidance as to what other
|
| 220 |
+
# characters (if any) must be encoded (nor how); nor if characters in the
|
| 221 |
+
# other components are to be encoded or not.
|
| 222 |
+
#
|
| 223 |
+
# This implementation **assumes** that **only** the space is percent-encoded
|
| 224 |
+
# and it is done to the entire value (not just to spaces in the path).
|
| 225 |
+
#
|
| 226 |
+
# This code may need to be changed if it is discovered that other characters
|
| 227 |
+
# are expected to be encoded.
|
| 228 |
+
#
|
| 229 |
+
# Note: the "base string URI" returned by this function will be encoded
|
| 230 |
+
# again before being concatenated into the "signature base string". So any
|
| 231 |
+
# spaces in the URI will actually appear in the "signature base string"
|
| 232 |
+
# as "%2520" (the "%20" further encoded according to section 3.6).
|
| 233 |
+
|
| 234 |
+
return v.replace(' ', '%20')
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def collect_parameters(uri_query='', body=None, headers=None,
|
| 238 |
+
exclude_oauth_signature=True, with_realm=False):
|
| 239 |
+
"""
|
| 240 |
+
Gather the request parameters from all the parameter sources.
|
| 241 |
+
|
| 242 |
+
This function is used to extract all the parameters, which are then passed
|
| 243 |
+
to ``normalize_parameters`` to produce one of the components that make up
|
| 244 |
+
the *signature base string*.
|
| 245 |
+
|
| 246 |
+
Parameters starting with `oauth_` will be unescaped.
|
| 247 |
+
|
| 248 |
+
Body parameters must be supplied as a dict, a list of 2-tuples, or a
|
| 249 |
+
form encoded query string.
|
| 250 |
+
|
| 251 |
+
Headers must be supplied as a dict.
|
| 252 |
+
|
| 253 |
+
The rules where the parameters must be sourced from are defined in
|
| 254 |
+
`section 3.4.1.3.1`_ of RFC 5849.
|
| 255 |
+
|
| 256 |
+
.. _`Sec 3.4.1.3.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
|
| 257 |
+
"""
|
| 258 |
+
if body is None:
|
| 259 |
+
body = []
|
| 260 |
+
headers = headers or {}
|
| 261 |
+
params = []
|
| 262 |
+
|
| 263 |
+
# The parameters from the following sources are collected into a single
|
| 264 |
+
# list of name/value pairs:
|
| 265 |
+
|
| 266 |
+
# * The query component of the HTTP request URI as defined by
|
| 267 |
+
# `RFC3986, Section 3.4`_. The query component is parsed into a list
|
| 268 |
+
# of name/value pairs by treating it as an
|
| 269 |
+
# "application/x-www-form-urlencoded" string, separating the names
|
| 270 |
+
# and values and decoding them as defined by W3C.REC-html40-19980424
|
| 271 |
+
# `W3C-HTML-4.0`_, Section 17.13.4.
|
| 272 |
+
#
|
| 273 |
+
# .. _`RFC3986, Sec 3.4`: https://tools.ietf.org/html/rfc3986#section-3.4
|
| 274 |
+
# .. _`W3C-HTML-4.0`: https://www.w3.org/TR/1998/REC-html40-19980424/
|
| 275 |
+
if uri_query:
|
| 276 |
+
params.extend(urldecode(uri_query))
|
| 277 |
+
|
| 278 |
+
# * The OAuth HTTP "Authorization" header field (`Section 3.5.1`_) if
|
| 279 |
+
# present. The header's content is parsed into a list of name/value
|
| 280 |
+
# pairs excluding the "realm" parameter if present. The parameter
|
| 281 |
+
# values are decoded as defined by `Section 3.5.1`_.
|
| 282 |
+
#
|
| 283 |
+
# .. _`Section 3.5.1`: https://tools.ietf.org/html/rfc5849#section-3.5.1
|
| 284 |
+
if headers:
|
| 285 |
+
headers_lower = {k.lower(): v for k, v in headers.items()}
|
| 286 |
+
authorization_header = headers_lower.get('authorization')
|
| 287 |
+
if authorization_header is not None:
|
| 288 |
+
params.extend([i for i in utils.parse_authorization_header(
|
| 289 |
+
authorization_header) if with_realm or i[0] != 'realm'])
|
| 290 |
+
|
| 291 |
+
# * The HTTP request entity-body, but only if all of the following
|
| 292 |
+
# conditions are met:
|
| 293 |
+
# * The entity-body is single-part.
|
| 294 |
+
#
|
| 295 |
+
# * The entity-body follows the encoding requirements of the
|
| 296 |
+
# "application/x-www-form-urlencoded" content-type as defined by
|
| 297 |
+
# W3C.REC-html40-19980424 `W3C-HTML-4.0`_.
|
| 298 |
+
|
| 299 |
+
# * The HTTP request entity-header includes the "Content-Type"
|
| 300 |
+
# header field set to "application/x-www-form-urlencoded".
|
| 301 |
+
#
|
| 302 |
+
# .. _`W3C-HTML-4.0`: https://www.w3.org/TR/1998/REC-html40-19980424/
|
| 303 |
+
|
| 304 |
+
# TODO: enforce header param inclusion conditions
|
| 305 |
+
bodyparams = extract_params(body) or []
|
| 306 |
+
params.extend(bodyparams)
|
| 307 |
+
|
| 308 |
+
# ensure all oauth params are unescaped
|
| 309 |
+
unescaped_params = []
|
| 310 |
+
for k, v in params:
|
| 311 |
+
if k.startswith('oauth_'):
|
| 312 |
+
v = utils.unescape(v)
|
| 313 |
+
unescaped_params.append((k, v))
|
| 314 |
+
|
| 315 |
+
# The "oauth_signature" parameter MUST be excluded from the signature
|
| 316 |
+
# base string if present.
|
| 317 |
+
if exclude_oauth_signature:
|
| 318 |
+
unescaped_params = list(filter(lambda i: i[0] != 'oauth_signature',
|
| 319 |
+
unescaped_params))
|
| 320 |
+
|
| 321 |
+
return unescaped_params
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def normalize_parameters(params) -> str:
|
| 325 |
+
"""
|
| 326 |
+
Calculate the normalized request parameters.
|
| 327 |
+
|
| 328 |
+
The *normalized request parameters* is one of the components that make up
|
| 329 |
+
the *signature base string*.
|
| 330 |
+
|
| 331 |
+
The rules for parameter normalization are defined in `section 3.4.1.3.2`_ of
|
| 332 |
+
RFC 5849.
|
| 333 |
+
|
| 334 |
+
.. _`Sec 3.4.1.3.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
|
| 335 |
+
"""
|
| 336 |
+
|
| 337 |
+
# The parameters collected in `Section 3.4.1.3`_ are normalized into a
|
| 338 |
+
# single string as follows:
|
| 339 |
+
#
|
| 340 |
+
# .. _`Section 3.4.1.3`: https://tools.ietf.org/html/rfc5849#section-3.4.1.3
|
| 341 |
+
|
| 342 |
+
# 1. First, the name and value of each parameter are encoded
|
| 343 |
+
# (`Section 3.6`_).
|
| 344 |
+
#
|
| 345 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 346 |
+
key_values = [(utils.escape(k), utils.escape(v)) for k, v in params]
|
| 347 |
+
|
| 348 |
+
# 2. The parameters are sorted by name, using ascending byte value
|
| 349 |
+
# ordering. If two or more parameters share the same name, they
|
| 350 |
+
# are sorted by their value.
|
| 351 |
+
key_values.sort()
|
| 352 |
+
|
| 353 |
+
# 3. The name of each parameter is concatenated to its corresponding
|
| 354 |
+
# value using an "=" character (ASCII code 61) as a separator, even
|
| 355 |
+
# if the value is empty.
|
| 356 |
+
parameter_parts = ['{}={}'.format(k, v) for k, v in key_values]
|
| 357 |
+
|
| 358 |
+
# 4. The sorted name/value pairs are concatenated together into a
|
| 359 |
+
# single string by using an "&" character (ASCII code 38) as
|
| 360 |
+
# separator.
|
| 361 |
+
return '&'.join(parameter_parts)
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
# ==== Common functions for HMAC-based signature methods =========
|
| 365 |
+
|
| 366 |
+
def _sign_hmac(hash_algorithm_name: str,
|
| 367 |
+
sig_base_str: str,
|
| 368 |
+
client_secret: str,
|
| 369 |
+
resource_owner_secret: str):
|
| 370 |
+
"""
|
| 371 |
+
**HMAC-SHA256**
|
| 372 |
+
|
| 373 |
+
The "HMAC-SHA256" signature method uses the HMAC-SHA256 signature
|
| 374 |
+
algorithm as defined in `RFC4634`_::
|
| 375 |
+
|
| 376 |
+
digest = HMAC-SHA256 (key, text)
|
| 377 |
+
|
| 378 |
+
Per `section 3.4.2`_ of the spec.
|
| 379 |
+
|
| 380 |
+
.. _`RFC4634`: https://tools.ietf.org/html/rfc4634
|
| 381 |
+
.. _`section 3.4.2`: https://tools.ietf.org/html/rfc5849#section-3.4.2
|
| 382 |
+
"""
|
| 383 |
+
|
| 384 |
+
# The HMAC-SHA256 function variables are used in following way:
|
| 385 |
+
|
| 386 |
+
# text is set to the value of the signature base string from
|
| 387 |
+
# `Section 3.4.1.1`_.
|
| 388 |
+
#
|
| 389 |
+
# .. _`Section 3.4.1.1`: https://tools.ietf.org/html/rfc5849#section-3.4.1.1
|
| 390 |
+
text = sig_base_str
|
| 391 |
+
|
| 392 |
+
# key is set to the concatenated values of:
|
| 393 |
+
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
|
| 394 |
+
#
|
| 395 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 396 |
+
key = utils.escape(client_secret or '')
|
| 397 |
+
|
| 398 |
+
# 2. An "&" character (ASCII code 38), which MUST be included
|
| 399 |
+
# even when either secret is empty.
|
| 400 |
+
key += '&'
|
| 401 |
+
|
| 402 |
+
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
|
| 403 |
+
#
|
| 404 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 405 |
+
key += utils.escape(resource_owner_secret or '')
|
| 406 |
+
|
| 407 |
+
# Get the hashing algorithm to use
|
| 408 |
+
|
| 409 |
+
m = {
|
| 410 |
+
'SHA-1': hashlib.sha1,
|
| 411 |
+
'SHA-256': hashlib.sha256,
|
| 412 |
+
'SHA-512': hashlib.sha512,
|
| 413 |
+
}
|
| 414 |
+
hash_alg = m[hash_algorithm_name]
|
| 415 |
+
|
| 416 |
+
# Calculate the signature
|
| 417 |
+
|
| 418 |
+
# FIXME: HMAC does not support unicode!
|
| 419 |
+
key_utf8 = key.encode('utf-8')
|
| 420 |
+
text_utf8 = text.encode('utf-8')
|
| 421 |
+
signature = hmac.new(key_utf8, text_utf8, hash_alg)
|
| 422 |
+
|
| 423 |
+
# digest is used to set the value of the "oauth_signature" protocol
|
| 424 |
+
# parameter, after the result octet string is base64-encoded
|
| 425 |
+
# per `RFC2045, Section 6.8`.
|
| 426 |
+
#
|
| 427 |
+
# .. _`RFC2045, Sec 6.8`: https://tools.ietf.org/html/rfc2045#section-6.8
|
| 428 |
+
return binascii.b2a_base64(signature.digest())[:-1].decode('utf-8')
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def _verify_hmac(hash_algorithm_name: str,
|
| 432 |
+
request,
|
| 433 |
+
client_secret=None,
|
| 434 |
+
resource_owner_secret=None):
|
| 435 |
+
"""Verify a HMAC-SHA1 signature.
|
| 436 |
+
|
| 437 |
+
Per `section 3.4`_ of the spec.
|
| 438 |
+
|
| 439 |
+
.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
|
| 440 |
+
|
| 441 |
+
To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri
|
| 442 |
+
attribute MUST be an absolute URI whose netloc part identifies the
|
| 443 |
+
origin server or gateway on which the resource resides. Any Host
|
| 444 |
+
item of the request argument's headers dict attribute will be
|
| 445 |
+
ignored.
|
| 446 |
+
|
| 447 |
+
.. _`RFC2616 section 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2
|
| 448 |
+
|
| 449 |
+
"""
|
| 450 |
+
norm_params = normalize_parameters(request.params)
|
| 451 |
+
bs_uri = base_string_uri(request.uri)
|
| 452 |
+
sig_base_str = signature_base_string(request.http_method, bs_uri,
|
| 453 |
+
norm_params)
|
| 454 |
+
signature = _sign_hmac(hash_algorithm_name, sig_base_str,
|
| 455 |
+
client_secret, resource_owner_secret)
|
| 456 |
+
match = safe_string_equals(signature, request.signature)
|
| 457 |
+
if not match:
|
| 458 |
+
log.debug('Verify HMAC failed: signature base string: %s', sig_base_str)
|
| 459 |
+
return match
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
# ==== HMAC-SHA1 =================================================
|
| 463 |
+
|
| 464 |
+
def sign_hmac_sha1_with_client(sig_base_str, client):
|
| 465 |
+
return _sign_hmac('SHA-1', sig_base_str,
|
| 466 |
+
client.client_secret, client.resource_owner_secret)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def verify_hmac_sha1(request, client_secret=None, resource_owner_secret=None):
|
| 470 |
+
return _verify_hmac('SHA-1', request, client_secret, resource_owner_secret)
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def sign_hmac_sha1(base_string, client_secret, resource_owner_secret):
|
| 474 |
+
"""
|
| 475 |
+
Deprecated function for calculating a HMAC-SHA1 signature.
|
| 476 |
+
|
| 477 |
+
This function has been replaced by invoking ``sign_hmac`` with "SHA-1"
|
| 478 |
+
as the hash algorithm name.
|
| 479 |
+
|
| 480 |
+
This function was invoked by sign_hmac_sha1_with_client and
|
| 481 |
+
test_signatures.py, but does any application invoke it directly? If not,
|
| 482 |
+
it can be removed.
|
| 483 |
+
"""
|
| 484 |
+
warnings.warn('use sign_hmac_sha1_with_client instead of sign_hmac_sha1',
|
| 485 |
+
DeprecationWarning)
|
| 486 |
+
|
| 487 |
+
# For some unknown reason, the original implementation assumed base_string
|
| 488 |
+
# could either be bytes or str. The signature base string calculating
|
| 489 |
+
# function always returned a str, so the new ``sign_rsa`` only expects that.
|
| 490 |
+
|
| 491 |
+
base_string = base_string.decode('ascii') \
|
| 492 |
+
if isinstance(base_string, bytes) else base_string
|
| 493 |
+
|
| 494 |
+
return _sign_hmac('SHA-1', base_string,
|
| 495 |
+
client_secret, resource_owner_secret)
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
# ==== HMAC-SHA256 ===============================================
|
| 499 |
+
|
| 500 |
+
def sign_hmac_sha256_with_client(sig_base_str, client):
|
| 501 |
+
return _sign_hmac('SHA-256', sig_base_str,
|
| 502 |
+
client.client_secret, client.resource_owner_secret)
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
def verify_hmac_sha256(request, client_secret=None, resource_owner_secret=None):
|
| 506 |
+
return _verify_hmac('SHA-256', request,
|
| 507 |
+
client_secret, resource_owner_secret)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def sign_hmac_sha256(base_string, client_secret, resource_owner_secret):
|
| 511 |
+
"""
|
| 512 |
+
Deprecated function for calculating a HMAC-SHA256 signature.
|
| 513 |
+
|
| 514 |
+
This function has been replaced by invoking ``sign_hmac`` with "SHA-256"
|
| 515 |
+
as the hash algorithm name.
|
| 516 |
+
|
| 517 |
+
This function was invoked by sign_hmac_sha256_with_client and
|
| 518 |
+
test_signatures.py, but does any application invoke it directly? If not,
|
| 519 |
+
it can be removed.
|
| 520 |
+
"""
|
| 521 |
+
warnings.warn(
|
| 522 |
+
'use sign_hmac_sha256_with_client instead of sign_hmac_sha256',
|
| 523 |
+
DeprecationWarning)
|
| 524 |
+
|
| 525 |
+
# For some unknown reason, the original implementation assumed base_string
|
| 526 |
+
# could either be bytes or str. The signature base string calculating
|
| 527 |
+
# function always returned a str, so the new ``sign_rsa`` only expects that.
|
| 528 |
+
|
| 529 |
+
base_string = base_string.decode('ascii') \
|
| 530 |
+
if isinstance(base_string, bytes) else base_string
|
| 531 |
+
|
| 532 |
+
return _sign_hmac('SHA-256', base_string,
|
| 533 |
+
client_secret, resource_owner_secret)
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
# ==== HMAC-SHA512 ===============================================
|
| 537 |
+
|
| 538 |
+
def sign_hmac_sha512_with_client(sig_base_str: str,
|
| 539 |
+
client):
|
| 540 |
+
return _sign_hmac('SHA-512', sig_base_str,
|
| 541 |
+
client.client_secret, client.resource_owner_secret)
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
def verify_hmac_sha512(request,
|
| 545 |
+
client_secret: str = None,
|
| 546 |
+
resource_owner_secret: str = None):
|
| 547 |
+
return _verify_hmac('SHA-512', request,
|
| 548 |
+
client_secret, resource_owner_secret)
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
# ==== Common functions for RSA-based signature methods ==========
|
| 552 |
+
|
| 553 |
+
_jwt_rsa = {} # cache of RSA-hash implementations from PyJWT jwt.algorithms
|
| 554 |
+
|
| 555 |
+
|
| 556 |
+
def _get_jwt_rsa_algorithm(hash_algorithm_name: str):
|
| 557 |
+
"""
|
| 558 |
+
Obtains an RSAAlgorithm object that implements RSA with the hash algorithm.
|
| 559 |
+
|
| 560 |
+
This method maintains the ``_jwt_rsa`` cache.
|
| 561 |
+
|
| 562 |
+
Returns a jwt.algorithm.RSAAlgorithm.
|
| 563 |
+
"""
|
| 564 |
+
if hash_algorithm_name in _jwt_rsa:
|
| 565 |
+
# Found in cache: return it
|
| 566 |
+
return _jwt_rsa[hash_algorithm_name]
|
| 567 |
+
else:
|
| 568 |
+
# Not in cache: instantiate a new RSAAlgorithm
|
| 569 |
+
|
| 570 |
+
# PyJWT has some nice pycrypto/cryptography abstractions
|
| 571 |
+
import jwt.algorithms as jwt_algorithms
|
| 572 |
+
m = {
|
| 573 |
+
'SHA-1': jwt_algorithms.hashes.SHA1,
|
| 574 |
+
'SHA-256': jwt_algorithms.hashes.SHA256,
|
| 575 |
+
'SHA-512': jwt_algorithms.hashes.SHA512,
|
| 576 |
+
}
|
| 577 |
+
v = jwt_algorithms.RSAAlgorithm(m[hash_algorithm_name])
|
| 578 |
+
|
| 579 |
+
_jwt_rsa[hash_algorithm_name] = v # populate cache
|
| 580 |
+
|
| 581 |
+
return v
|
| 582 |
+
|
| 583 |
+
|
| 584 |
+
def _prepare_key_plus(alg, keystr):
|
| 585 |
+
"""
|
| 586 |
+
Prepare a PEM encoded key (public or private), by invoking the `prepare_key`
|
| 587 |
+
method on alg with the keystr.
|
| 588 |
+
|
| 589 |
+
The keystr should be a string or bytes. If the keystr is bytes, it is
|
| 590 |
+
decoded as UTF-8 before being passed to prepare_key. Otherwise, it
|
| 591 |
+
is passed directly.
|
| 592 |
+
"""
|
| 593 |
+
if isinstance(keystr, bytes):
|
| 594 |
+
keystr = keystr.decode('utf-8')
|
| 595 |
+
return alg.prepare_key(keystr)
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
def _sign_rsa(hash_algorithm_name: str,
|
| 599 |
+
sig_base_str: str,
|
| 600 |
+
rsa_private_key: str):
|
| 601 |
+
"""
|
| 602 |
+
Calculate the signature for an RSA-based signature method.
|
| 603 |
+
|
| 604 |
+
The ``alg`` is used to calculate the digest over the signature base string.
|
| 605 |
+
For the "RSA_SHA1" signature method, the alg must be SHA-1. While OAuth 1.0a
|
| 606 |
+
only defines the RSA-SHA1 signature method, this function can be used for
|
| 607 |
+
other non-standard signature methods that only differ from RSA-SHA1 by the
|
| 608 |
+
digest algorithm.
|
| 609 |
+
|
| 610 |
+
Signing for the RSA-SHA1 signature method is defined in
|
| 611 |
+
`section 3.4.3`_ of RFC 5849.
|
| 612 |
+
|
| 613 |
+
The RSASSA-PKCS1-v1_5 signature algorithm used defined by
|
| 614 |
+
`RFC3447, Section 8.2`_ (also known as PKCS#1), with the `alg` as the
|
| 615 |
+
hash function for EMSA-PKCS1-v1_5. To
|
| 616 |
+
use this method, the client MUST have established client credentials
|
| 617 |
+
with the server that included its RSA public key (in a manner that is
|
| 618 |
+
beyond the scope of this specification).
|
| 619 |
+
|
| 620 |
+
.. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3
|
| 621 |
+
.. _`RFC3447, Section 8.2`: https://tools.ietf.org/html/rfc3447#section-8.2
|
| 622 |
+
"""
|
| 623 |
+
|
| 624 |
+
# Get the implementation of RSA-hash
|
| 625 |
+
|
| 626 |
+
alg = _get_jwt_rsa_algorithm(hash_algorithm_name)
|
| 627 |
+
|
| 628 |
+
# Check private key
|
| 629 |
+
|
| 630 |
+
if not rsa_private_key:
|
| 631 |
+
raise ValueError('rsa_private_key required for RSA with ' +
|
| 632 |
+
alg.hash_alg.name + ' signature method')
|
| 633 |
+
|
| 634 |
+
# Convert the "signature base string" into a sequence of bytes (M)
|
| 635 |
+
#
|
| 636 |
+
# The signature base string, by definition, only contain printable US-ASCII
|
| 637 |
+
# characters. So encoding it as 'ascii' will always work. It will raise a
|
| 638 |
+
# ``UnicodeError`` if it can't encode the value, which will never happen
|
| 639 |
+
# if the signature base string was created correctly. Therefore, using
|
| 640 |
+
# 'ascii' encoding provides an extra level of error checking.
|
| 641 |
+
|
| 642 |
+
m = sig_base_str.encode('ascii')
|
| 643 |
+
|
| 644 |
+
# Perform signing: S = RSASSA-PKCS1-V1_5-SIGN (K, M)
|
| 645 |
+
|
| 646 |
+
key = _prepare_key_plus(alg, rsa_private_key)
|
| 647 |
+
s = alg.sign(m, key)
|
| 648 |
+
|
| 649 |
+
# base64-encoded per RFC2045 section 6.8.
|
| 650 |
+
#
|
| 651 |
+
# 1. While b2a_base64 implements base64 defined by RFC 3548. As used here,
|
| 652 |
+
# it is the same as base64 defined by RFC 2045.
|
| 653 |
+
# 2. b2a_base64 includes a "\n" at the end of its result ([:-1] removes it)
|
| 654 |
+
# 3. b2a_base64 produces a binary string. Use decode to produce a str.
|
| 655 |
+
# It should only contain only printable US-ASCII characters.
|
| 656 |
+
|
| 657 |
+
return binascii.b2a_base64(s)[:-1].decode('ascii')
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
def _verify_rsa(hash_algorithm_name: str,
|
| 661 |
+
request,
|
| 662 |
+
rsa_public_key: str):
|
| 663 |
+
"""
|
| 664 |
+
Verify a base64 encoded signature for a RSA-based signature method.
|
| 665 |
+
|
| 666 |
+
The ``alg`` is used to calculate the digest over the signature base string.
|
| 667 |
+
For the "RSA_SHA1" signature method, the alg must be SHA-1. While OAuth 1.0a
|
| 668 |
+
only defines the RSA-SHA1 signature method, this function can be used for
|
| 669 |
+
other non-standard signature methods that only differ from RSA-SHA1 by the
|
| 670 |
+
digest algorithm.
|
| 671 |
+
|
| 672 |
+
Verification for the RSA-SHA1 signature method is defined in
|
| 673 |
+
`section 3.4.3`_ of RFC 5849.
|
| 674 |
+
|
| 675 |
+
.. _`section 3.4.3`: https://tools.ietf.org/html/rfc5849#section-3.4.3
|
| 676 |
+
|
| 677 |
+
To satisfy `RFC2616 section 5.2`_ item 1, the request argument's uri
|
| 678 |
+
attribute MUST be an absolute URI whose netloc part identifies the
|
| 679 |
+
origin server or gateway on which the resource resides. Any Host
|
| 680 |
+
item of the request argument's headers dict attribute will be
|
| 681 |
+
ignored.
|
| 682 |
+
|
| 683 |
+
.. _`RFC2616 Sec 5.2`: https://tools.ietf.org/html/rfc2616#section-5.2
|
| 684 |
+
"""
|
| 685 |
+
|
| 686 |
+
try:
|
| 687 |
+
# Calculate the *signature base string* of the actual received request
|
| 688 |
+
|
| 689 |
+
norm_params = normalize_parameters(request.params)
|
| 690 |
+
bs_uri = base_string_uri(request.uri)
|
| 691 |
+
sig_base_str = signature_base_string(
|
| 692 |
+
request.http_method, bs_uri, norm_params)
|
| 693 |
+
|
| 694 |
+
# Obtain the signature that was received in the request
|
| 695 |
+
|
| 696 |
+
sig = binascii.a2b_base64(request.signature.encode('ascii'))
|
| 697 |
+
|
| 698 |
+
# Get the implementation of RSA-with-hash algorithm to use
|
| 699 |
+
|
| 700 |
+
alg = _get_jwt_rsa_algorithm(hash_algorithm_name)
|
| 701 |
+
|
| 702 |
+
# Verify the received signature was produced by the private key
|
| 703 |
+
# corresponding to the `rsa_public_key`, signing exact same
|
| 704 |
+
# *signature base string*.
|
| 705 |
+
#
|
| 706 |
+
# RSASSA-PKCS1-V1_5-VERIFY ((n, e), M, S)
|
| 707 |
+
|
| 708 |
+
key = _prepare_key_plus(alg, rsa_public_key)
|
| 709 |
+
|
| 710 |
+
# The signature base string only contain printable US-ASCII characters.
|
| 711 |
+
# The ``encode`` method with the default "strict" error handling will
|
| 712 |
+
# raise a ``UnicodeError`` if it can't encode the value. So using
|
| 713 |
+
# "ascii" will always work.
|
| 714 |
+
|
| 715 |
+
verify_ok = alg.verify(sig_base_str.encode('ascii'), key, sig)
|
| 716 |
+
|
| 717 |
+
if not verify_ok:
|
| 718 |
+
log.debug('Verify failed: RSA with ' + alg.hash_alg.name +
|
| 719 |
+
': signature base string=%s' + sig_base_str)
|
| 720 |
+
return verify_ok
|
| 721 |
+
|
| 722 |
+
except UnicodeError:
|
| 723 |
+
# A properly encoded signature will only contain printable US-ASCII
|
| 724 |
+
# characters. The ``encode`` method with the default "strict" error
|
| 725 |
+
# handling will raise a ``UnicodeError`` if it can't decode the value.
|
| 726 |
+
# So using "ascii" will work with all valid signatures. But an
|
| 727 |
+
# incorrectly or maliciously produced signature could contain other
|
| 728 |
+
# bytes.
|
| 729 |
+
#
|
| 730 |
+
# This implementation treats that situation as equivalent to the
|
| 731 |
+
# signature verification having failed.
|
| 732 |
+
#
|
| 733 |
+
# Note: simply changing the encode to use 'utf-8' will not remove this
|
| 734 |
+
# case, since an incorrect or malicious request can contain bytes which
|
| 735 |
+
# are invalid as UTF-8.
|
| 736 |
+
return False
|
| 737 |
+
|
| 738 |
+
|
| 739 |
+
# ==== RSA-SHA1 ==================================================
|
| 740 |
+
|
| 741 |
+
def sign_rsa_sha1_with_client(sig_base_str, client):
|
| 742 |
+
# For some reason, this function originally accepts both str and bytes.
|
| 743 |
+
# This behaviour is preserved here. But won't be done for the newer
|
| 744 |
+
# sign_rsa_sha256_with_client and sign_rsa_sha512_with_client functions,
|
| 745 |
+
# which will only accept strings. The function to calculate a
|
| 746 |
+
# "signature base string" always produces a string, so it is not clear
|
| 747 |
+
# why support for bytes would ever be needed.
|
| 748 |
+
sig_base_str = sig_base_str.decode('ascii')\
|
| 749 |
+
if isinstance(sig_base_str, bytes) else sig_base_str
|
| 750 |
+
|
| 751 |
+
return _sign_rsa('SHA-1', sig_base_str, client.rsa_key)
|
| 752 |
+
|
| 753 |
+
|
| 754 |
+
def verify_rsa_sha1(request, rsa_public_key: str):
|
| 755 |
+
return _verify_rsa('SHA-1', request, rsa_public_key)
|
| 756 |
+
|
| 757 |
+
|
| 758 |
+
def sign_rsa_sha1(base_string, rsa_private_key):
|
| 759 |
+
"""
|
| 760 |
+
Deprecated function for calculating a RSA-SHA1 signature.
|
| 761 |
+
|
| 762 |
+
This function has been replaced by invoking ``sign_rsa`` with "SHA-1"
|
| 763 |
+
as the hash algorithm name.
|
| 764 |
+
|
| 765 |
+
This function was invoked by sign_rsa_sha1_with_client and
|
| 766 |
+
test_signatures.py, but does any application invoke it directly? If not,
|
| 767 |
+
it can be removed.
|
| 768 |
+
"""
|
| 769 |
+
warnings.warn('use _sign_rsa("SHA-1", ...) instead of sign_rsa_sha1',
|
| 770 |
+
DeprecationWarning)
|
| 771 |
+
|
| 772 |
+
if isinstance(base_string, bytes):
|
| 773 |
+
base_string = base_string.decode('ascii')
|
| 774 |
+
|
| 775 |
+
return _sign_rsa('SHA-1', base_string, rsa_private_key)
|
| 776 |
+
|
| 777 |
+
|
| 778 |
+
# ==== RSA-SHA256 ================================================
|
| 779 |
+
|
| 780 |
+
def sign_rsa_sha256_with_client(sig_base_str: str, client):
|
| 781 |
+
return _sign_rsa('SHA-256', sig_base_str, client.rsa_key)
|
| 782 |
+
|
| 783 |
+
|
| 784 |
+
def verify_rsa_sha256(request, rsa_public_key: str):
|
| 785 |
+
return _verify_rsa('SHA-256', request, rsa_public_key)
|
| 786 |
+
|
| 787 |
+
|
| 788 |
+
# ==== RSA-SHA512 ================================================
|
| 789 |
+
|
| 790 |
+
def sign_rsa_sha512_with_client(sig_base_str: str, client):
|
| 791 |
+
return _sign_rsa('SHA-512', sig_base_str, client.rsa_key)
|
| 792 |
+
|
| 793 |
+
|
| 794 |
+
def verify_rsa_sha512(request, rsa_public_key: str):
|
| 795 |
+
return _verify_rsa('SHA-512', request, rsa_public_key)
|
| 796 |
+
|
| 797 |
+
|
| 798 |
+
# ==== PLAINTEXT =================================================
|
| 799 |
+
|
| 800 |
+
def sign_plaintext_with_client(_signature_base_string, client):
|
| 801 |
+
# _signature_base_string is not used because the signature with PLAINTEXT
|
| 802 |
+
# is just the secret: it isn't a real signature.
|
| 803 |
+
return sign_plaintext(client.client_secret, client.resource_owner_secret)
|
| 804 |
+
|
| 805 |
+
|
| 806 |
+
def sign_plaintext(client_secret, resource_owner_secret):
|
| 807 |
+
"""Sign a request using plaintext.
|
| 808 |
+
|
| 809 |
+
Per `section 3.4.4`_ of the spec.
|
| 810 |
+
|
| 811 |
+
The "PLAINTEXT" method does not employ a signature algorithm. It
|
| 812 |
+
MUST be used with a transport-layer mechanism such as TLS or SSL (or
|
| 813 |
+
sent over a secure channel with equivalent protections). It does not
|
| 814 |
+
utilize the signature base string or the "oauth_timestamp" and
|
| 815 |
+
"oauth_nonce" parameters.
|
| 816 |
+
|
| 817 |
+
.. _`section 3.4.4`: https://tools.ietf.org/html/rfc5849#section-3.4.4
|
| 818 |
+
|
| 819 |
+
"""
|
| 820 |
+
|
| 821 |
+
# The "oauth_signature" protocol parameter is set to the concatenated
|
| 822 |
+
# value of:
|
| 823 |
+
|
| 824 |
+
# 1. The client shared-secret, after being encoded (`Section 3.6`_).
|
| 825 |
+
#
|
| 826 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 827 |
+
signature = utils.escape(client_secret or '')
|
| 828 |
+
|
| 829 |
+
# 2. An "&" character (ASCII code 38), which MUST be included even
|
| 830 |
+
# when either secret is empty.
|
| 831 |
+
signature += '&'
|
| 832 |
+
|
| 833 |
+
# 3. The token shared-secret, after being encoded (`Section 3.6`_).
|
| 834 |
+
#
|
| 835 |
+
# .. _`Section 3.6`: https://tools.ietf.org/html/rfc5849#section-3.6
|
| 836 |
+
signature += utils.escape(resource_owner_secret or '')
|
| 837 |
+
|
| 838 |
+
return signature
|
| 839 |
+
|
| 840 |
+
|
| 841 |
+
def verify_plaintext(request, client_secret=None, resource_owner_secret=None):
|
| 842 |
+
"""Verify a PLAINTEXT signature.
|
| 843 |
+
|
| 844 |
+
Per `section 3.4`_ of the spec.
|
| 845 |
+
|
| 846 |
+
.. _`section 3.4`: https://tools.ietf.org/html/rfc5849#section-3.4
|
| 847 |
+
"""
|
| 848 |
+
signature = sign_plaintext(client_secret, resource_owner_secret)
|
| 849 |
+
match = safe_string_equals(signature, request.signature)
|
| 850 |
+
if not match:
|
| 851 |
+
log.debug('Verify PLAINTEXT failed')
|
| 852 |
+
return match
|