uawdijnntqw1x1x1
IP : 216.73.216.110
Hostname : 6.87.74.97.host.secureserver.net
Kernel : Linux 6.87.74.97.host.secureserver.net 4.18.0-553.83.1.el8_10.x86_64 #1 SMP Mon Nov 10 04:22:44 EST 2025 x86_64
Disable Function : None :)
OS : Linux
PATH:
/
home
/
emeraadmin
/
www
/
node_modules
/
jqGrid
/
..
/
parse-filepath
/
..
/
array-slice
/
..
/
..
/
4d695
/
lib2to3.zip
/
/
PK{��\vá�FAFA)__pycache__/refactor.cpython-36.opt-2.pycnu�[���3 \=m�@s8dZddlZddlZddlZddlZddlZddlZddlmZddl m Z mZmZddl mZddlmZmZddlmZd%d d�ZGdd �d e�Zdd�Zdd�Zdd�Zdd�Zejd&kr�ddlZejZdd�Zdd�Z neZeZeZ dd�Z!Gdd�de�Z"Gdd �d e#�Z$Gd!d"�d"e�Z%Gd#d$�d$e$�Z&dS)'z#Guido van Rossum <guido@python.org>�N)�chain�)�driver�tokenize�token)� find_root)�pytree�pygram)�btm_matcherTcCstt|ggdg�}tjj|j�}g}xLttj|��D]:}|jd�r2|jd�r2|rZ|dd�}|j |dd��q2W|S)N�*�fix_z.py�����) � __import__�os�path�dirname�__file__�sorted�listdir� startswith�endswith�append)Z fixer_pkgZ remove_prefixZpkgZ fixer_dirZ fix_names�name�r�(/usr/lib64/python3.6/lib2to3/refactor.py�get_all_fix_namessrc@seZdZdS)� _EveryNodeN)�__name__� __module__�__qualname__rrrrr+srcCs�t|tjtjf�r(|jdkr t�|jhSt|tj�rH|jrDt|j�St�t|tj �r�t �}x*|jD] }x|D]}|jt|��qlWqbW|Std|��dS)Nz$Oh no! I don't understand pattern %s) � isinstancerZNodePatternZLeafPattern�typerZNegatedPatternZcontent�_get_head_typesZWildcardPattern�set�update� Exception)Zpat�r�p�xrrrr$/s r$cCs�tjt�}g}x�|D]|}|jrjyt|j�}Wntk rJ|j|�Yq�XxB|D]}||j|�qRWq|jdk r�||jj|�q|j|�qWx,tt j jj�t j j �D]}||j|�q�Wt|�S)N)�collections�defaultdict�list�patternr$rrZ_accept_typerr �python_grammarZ symbol2number�values�tokens�extend�dict)Z fixer_listZ head_nodesZevery�fixerZheadsZ node_typerrr�_get_headnode_dictKs" r5cs�fdd�t�d�D�S)Ncsg|]}�d|�qS)�.r)�.0�fix_name)�pkg_namerr� <listcomp>hsz+get_fixers_from_package.<locals>.<listcomp>F)r)r9r)r9r�get_fixers_from_packageds r;cCs|S)Nr)�objrrr� _identityksr=rcCs|jdd�S)Nz � )�replace)�inputrrr�_from_system_newlinesrsrAcCs tjdkr|jdtj�S|SdS)Nr>)r�linesepr?)r@rrr�_to_system_newlinests rCcsTd}tjtj|�j���fdd�}ttjtjtj h�}t �}y�x�|�\}}||krVq@q@|tjkrl|rfPd}q@|tjko||dk�r,|�\}}|tjks�|dkr�P|�\}}|tjks�|dkr�P|�\}}|tj kr�|dkr�|�\}}xJ|tjk�r(|j|�|�\}}|tj k�s|d k�rP|�\}}q�Wq@Pq@WWntk �rJYnXt|�S) NFcst��}|d|dfS)Nrr)�next)�tok)�genrr�advance�sz(_detect_future_features.<locals>.advanceT�fromZ __future__�import�(�,)r�generate_tokens�io�StringIO�readline� frozensetr�NEWLINE�NL�COMMENTr%�STRING�NAME�OP�add� StopIteration)�sourceZhave_docstringrG�ignore�features�tp�valuer)rFr�_detect_future_featuressD r^c@seZdZdS)� FixerErrorN)rr r!rrrrr_�sr_c@s�eZdZddd�ZdZdZd4dd�Zdd �Zd d�Zdd �Z dd�Z dd�Zd5dd�Zd6dd�Z dd�Zd7dd�Zdd�Zd8dd�Zdd�Zd d!�Zd9d"d#�Zd:d$d%�Zd&Zd'Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�ZdS);�RefactoringToolF)�print_function�write_unchanged_filesZFixrNcCs2||_|pg|_|jj�|_|dk r0|jj|�|jdrDtj|_ntj |_|jj d�|_g|_t jd�|_g|_d|_tj|jtj|jd�|_|j�\|_|_g|_tj�|_g|_g|_xXt|j|j�D]F}|j r�|jj!|�q�||jkr�|jj"|�q�||jkr�|jj"|�q�Wt#|j�|_$t#|j�|_%dS)Nrarbr`F)�convert�logger)&�fixers�explicit�_default_options�copy�optionsr&r �!python_grammar_no_print_statement�grammarr/�getrb�errors�loggingZ getLoggerrd� fixer_log�wroterZDriverrrc� get_fixers� pre_order� post_order�files�bmZ BottomMatcher�BMZ bmi_pre_orderZbmi_post_orderrZ BM_compatibleZ add_fixerrr5�bmi_pre_order_heads�bmi_post_order_heads)�selfZfixer_namesrirfr4rrr�__init__�s< zRefactoringTool.__init__cCs\g}g}�x&|jD�]}t|iidg�}|jdd�d}|j|j�rV|t|j�d�}|jd�}|jdjdd�|D��}yt ||�}Wn$t k r�td||f��YnX||j|j �} | jr�|jd k r�||jkr�|jd |�q|jd|�| jdk�r|j| �q| jd k�r |j| �qtd| j��qWtjd�} |j| d�|j| d�||fS)Nrr6r�_�cSsg|]}|j��qSr)�title)r7r)rrrr:�sz.RefactoringTool.get_fixers.<locals>.<listcomp>zCan't find %s.%sTzSkipping optional fixer: %szAdding transformation: %sZpreZpostzIllegal fixer order: %rZ run_order)�key���)rer�rsplitr�FILE_PREFIX�len�split�CLASS_PREFIX�join�getattr�AttributeErrorr_rirorf�log_message� log_debug�orderr�operator� attrgetter�sort)ryZpre_order_fixersZpost_order_fixersZfix_mod_path�modr8�parts� class_nameZ fix_classr4Zkey_funcrrrrq�s8 zRefactoringTool.get_fixerscOs�dS)Nr)ry�msg�args�kwdsrrr� log_errorszRefactoringTool.log_errorcGs|r||}|jj|�dS)N)rd�info)ryr�r�rrrr�szRefactoringTool.log_messagecGs|r||}|jj|�dS)N)rd�debug)ryr�r�rrrr�szRefactoringTool.log_debugcCsdS)Nr)ry�old_text�new_text�filename�equalrrr�print_outputszRefactoringTool.print_outputcCs<x6|D].}tjj|�r&|j|||�q|j|||�qWdS)N)rr�isdir�refactor_dir� refactor_file)ry�items�write� doctests_onlyZdir_or_filerrr�refactor#s zRefactoringTool.refactorc Cs�tjd}x�tj|�D]�\}}}|jd|�|j�|j�xH|D]@}|jd�rBtjj|�d|krBtjj||�} |j | ||�qBWdd�|D�|dd�<qWdS)N�pyzDescending into %sr6rcSsg|]}|jd�s|�qS)r6)r)r7Zdnrrrr:>sz0RefactoringTool.refactor_dir.<locals>.<listcomp>) r�extsep�walkr�r�rr�splitextr�r�) ryZdir_namer�r�Zpy_ext�dirpathZdirnames� filenamesr�fullnamerrrr�,s zRefactoringTool.refactor_dircCs�yt|d�}Wn.tk r<}z|jd||�dSd}~XnXztj|j�d}Wd|j�Xt|d|d��}t|j ��|fSQRXdS)N�rbzCan't open %s: %srr()�encoding)NN) �open�OSErrorr�r�detect_encodingrO�close�_open_with_encodingrA�read)ryr��f�errr�rrr�_read_python_source@s z#RefactoringTool._read_python_sourcecCs�|j|�\}}|dkrdS|d7}|rn|jd|�|j||�}|jsL||kr`|j|||||�q�|jd|�nH|j||�}|js�|r�|jr�|jt|�dd�|||d�n|jd|�dS)Nr>zRefactoring doctests in %szNo doctest changes in %sr)r�r�zNo changes in %sr)r�r��refactor_docstringrb�processed_file�refactor_string�was_changed�str)ryr�r�r�r@r��output�treerrrr�PszRefactoringTool.refactor_filecCs�t|�}d|krtj|j_zJy|jj|�}Wn4tk r`}z|jd||jj |�dSd}~XnXWd|j|j_X||_ |jd|�|j||�|S)NrazCan't parse %s: %s: %szRefactoring %s) r^r rjrrkZparse_stringr'r�� __class__r�future_featuresr�� refactor_tree)ry�datarr[r�r�rrrr�gs zRefactoringTool.refactor_stringcCs�tjj�}|rN|jd�|j|d�}|js2||krB|j|d|�q�|jd�n:|j|d�}|jsj|r~|jr~|jt |�d|�n |jd�dS)NzRefactoring doctests in stdinz<stdin>zNo doctest changes in stdinzNo changes in stdin) �sys�stdinr�r�r�rbr�r�r�r�)ryr�r@r�r�rrr�refactor_stdin�s zRefactoringTool.refactor_stdinc Cs�x"t|j|j�D]}|j||�qW|j|j|j��|j|j|j��|jj|j ��}�xvt |j���rАx`|jjD�]R}||ko�||rv||j tjjdd�|jr�||j tjjd��x t||�D�]�}|||kr�||j|�yt|�Wntk �rw�YnX|j�r(||jk�r(q�|j|�}|r�|j||�}|dk r�|j|�x,|j�D] }|j�spg|_|jj|��q^W|jj|j ��}x2|D]*} | |k�r�g|| <|| j|| ��q�Wq�WqvWq\Wx$t|j|j�D]}|j||��q�W|jS)NT)r~�reverse)r~)rrrrsZ start_tree�traverse_byrwrxrvZrunZleaves�anyr0rer�rZBaseZdepthZkeep_line_orderZ get_linenor-�remover� ValueErrorZfixers_applied�match� transformr?rr2Zfinish_treer�) ryr�rr4Z match_set�node�results�newZnew_matchesZfxrrrrr��sJ $zRefactoringTool.refactor_treecCs^|sdSxP|D]H}xB||jD]4}|j|�}|r|j||�}|dk r|j|�|}qWqWdS)N)r#r�r�r?)ryreZ traversalr�r4r�r�rrrr��s zRefactoringTool.traverse_bycCs�|jj|�|dkr.|j|�d}|dkr.dS||k}|j||||�|r`|jd|�|js`dS|rv|j||||�n|jd|�dS)NrzNo changes to %szNot writing changes to %s)rtrr�r�r�rb� write_file)ryr�r�r�r�r�r�rrrr��szRefactoringTool.processed_filec%Cs�yt|d|d�}Wn.tk r@}z|jd||�dSd}~XnXzHy|jt|��Wn0tk r�}z|jd||�WYdd}~XnXWd|j�X|jd|�d|_dS)N�w)r�zCan't create %s: %szCan't write %s: %szWrote changes to %sT)r�r�r�r�rCr�r�rp)ryr�r�r�r�r�r�rrrr� s$ zRefactoringTool.write_filez>>> z... c Csg}d}d}d}d}x�|jdd�D]�}|d7}|j�j|j�r�|dk r\|j|j||||��|}|g}|j|j�} |d| �}q"|dk r�|j||j�s�|||jj�dkr�|j |�q"|dk r�|j|j||||��d}d}|j |�q"W|dk �r|j|j||||��dj |�S)NrT)�keependsrr>r|)� splitlines�lstripr�PS1r2�refactor_doctest�find�PS2�rstriprr�) ryr@r��result�blockZblock_lineno�indent�lineno�line�irrrr�%s: z"RefactoringTool.refactor_docstringc sy�j||��}Wndtk rv}zH�jjtj�rRx|D]}�jd|jd��q8W�jd|||j j |�|Sd}~XnX�j||��r t|�j dd�}|d|d�||dd�} }|d jd�s�|dd7<��j|jd�g}|�r |��fdd �|D�7}|S)Nz Source: %sr>z+Can't parse docstring in %s line %s: %s: %sT)r�rrcsg|]}��j|�qSr)r�)r7r�)r�ryrrr:jsz4RefactoringTool.refactor_doctest.<locals>.<listcomp>rr)�parse_blockr'rdZisEnabledForrn�DEBUGr�r�r�r�rr�r�r�rr��pop) ryr�r�r�r�r�r�r�r�Zclippedr)r�ryrr�Ps$ "z RefactoringTool.refactor_doctestcCs�|jrd}nd}|js$|jd|�n&|jd|�x|jD]}|j|�q8W|jrt|jd�x|jD]}|j|�qbW|jr�t|j�dkr�|jd�n|jdt|j��x&|jD]\}}}|j|f|�|�q�WdS) N�werez need to bezNo files %s modified.zFiles that %s modified:z$Warnings/messages while refactoring:rzThere was 1 error:zThere were %d errors:)rprtr�rormr�)ryr��file�messager�r�r�rrr� summarizems$ zRefactoringTool.summarizecCs"|jj|j|||��}t�|_|S)N)rZparse_tokens� wrap_toksrPr�)ryr�r�r�r�rrrr��szRefactoringTool.parse_blockccshtj|j||�j�}xN|D]F\}}\}}\} } }||d7}| |d7} ||||f| | f|fVqWdS)Nr)rrL� gen_lines�__next__)ryr�r�r�r1r#r]Zline0Zcol0Zline1Zcol1Z line_textrrrr��s zRefactoringTool.wrap_toksccs�||j}||j}|}xV|D]N}|j|�r@|t|�d�Vn(||j�dkrXdVntd||f��|}qWx dVqrWdS)Nr>zline=%r, prefix=%rr|)r�r�rr�r��AssertionError)ryr�r��prefix1Zprefix2�prefixr�rrrr��s zRefactoringTool.gen_lines)NN)FF)FF)FF)F)NFN)N)rr r!rgr�r�rzrqr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrr`�s: 4( O + r`c@seZdZdS)�MultiprocessingUnsupportedN)rr r!rrrrr��sr�csBeZdZ�fdd�Zd�fdd� Z�fdd�Z�fd d �Z�ZS)�MultiprocessRefactoringToolcs"tt|�j||�d|_d|_dS)N)�superr�rz�queue�output_lock)ryr��kwargs)r�rrrz�sz$MultiprocessRefactoringTool.__init__Frcs|dkrtt��j|||�Syddl�Wntk r@t�YnX�jdk rTtd���j��_�j ��_ ��fdd�t|�D�}z.x|D]}|j�q�Wtt��j|||�Wd�jj �xt|�D]}�jjd�q�Wx|D]}|j�r�|j �q�Wd�_XdS)Nrrz already doing multiple processescsg|]}�j�jd��qS))�target)ZProcess�_child)r7r�)�multiprocessingryrrr:�sz8MultiprocessRefactoringTool.refactor.<locals>.<listcomp>)r�r�r�r��ImportErrorr�r��RuntimeErrorZ JoinableQueueZLockr��range�startr��putZis_alive)ryr�r�r�Z num_processesZ processesr)r�)r�)r�ryrr��s2 z$MultiprocessRefactoringTool.refactorc sR|jj�}xB|dk rL|\}}ztt|�j||�Wd|jj�X|jj�}qWdS)N)r�rlr�r�r�Z task_done)ryZtaskr�r�)r�rrr��s z"MultiprocessRefactoringTool._childcs2|jdk r|jj||f�ntt|�j||�SdS)N)r�r�r�r�r�)ryr�r�)r�rrr��s z)MultiprocessRefactoringTool.refactor_file)FFr)rr r!rzr�r�r�� __classcell__rr)r�rr��s r�)T)rr)'� __author__rr�rnr�r+rM� itertoolsrZpgen2rrrZ fixer_utilrr|rr r rurr'rr$r5r;r=�version_info�codecsr�r�rArCr^r_�objectr`r�r�rrrr�<module>sD ( PK{��\H�4I7Q7Q#__pycache__/refactor.cpython-36.pycnu�[���3 \=m�@s<dZdZddlZddlZddlZddlZddlZddlZddlm Z ddl mZmZm Z ddlmZddlmZmZdd lmZd&dd�ZGd d�de�Zdd�Zdd�Zdd�Zdd�Zejd'kr�ddlZejZdd�Z dd�Z!neZeZ eZ!dd�Z"Gdd�de�Z#Gd d!�d!e$�Z%Gd"d#�d#e�Z&Gd$d%�d%e%�Z'dS)(z�Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. z#Guido van Rossum <guido@python.org>�N)�chain�)�driver�tokenize�token)� find_root)�pytree�pygram)�btm_matcherTcCstt|ggdg�}tjj|j�}g}xLttj|��D]:}|jd�r2|jd�r2|rZ|dd�}|j |dd��q2W|S)zEReturn a sorted list of all available fix names in the given package.�*�fix_z.py�N����) � __import__�os�path�dirname�__file__�sorted�listdir� startswith�endswith�append)Z fixer_pkgZ remove_prefixZpkgZ fixer_dirZ fix_names�name�r�(/usr/lib64/python3.6/lib2to3/refactor.py�get_all_fix_namessrc@seZdZdS)� _EveryNodeN)�__name__� __module__�__qualname__rrrrr+srcCs�t|tjtjf�r(|jdkr t�|jhSt|tj�rH|jrDt|j�St�t|tj �r�t �}x*|jD] }x|D]}|jt|��qlWqbW|Std|��dS)zf Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. Nz$Oh no! I don't understand pattern %s) � isinstancerZNodePatternZLeafPattern�typerZNegatedPatternZcontent�_get_head_typesZWildcardPattern�set�update� Exception)Zpat�r�p�xrrrr$/s r$cCs�tjt�}g}x�|D]|}|jrjyt|j�}Wntk rJ|j|�Yq�XxB|D]}||j|�qRWq|jdk r�||jj|�q|j|�qWx,tt j jj�t j j �D]}||j|�q�Wt|�S)z^ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. N)�collections�defaultdict�list�patternr$rrZ_accept_typerr �python_grammarZ symbol2number�values�tokens�extend�dict)Z fixer_listZ head_nodesZevery�fixerZheadsZ node_typerrr�_get_headnode_dictKs" r5cs�fdd�t�d�D�S)zN Return the fully qualified names for fixers in the package pkg_name. csg|]}�d|�qS)�.r)�.0�fix_name)�pkg_namerr� <listcomp>hsz+get_fixers_from_package.<locals>.<listcomp>F)r)r9r)r9r�get_fixers_from_packageds r;cCs|S)Nr)�objrrr� _identityksr=rcCs|jdd�S)Nz � )�replace)�inputrrr�_from_system_newlinesrsrAcCs tjdkr|jdtj�S|SdS)Nr>)r�linesepr?)r@rrr�_to_system_newlinests rCcsTd}tjtj|�j���fdd�}ttjtjtj h�}t �}y�x�|�\}}||krVq@q@|tjkrl|rfPd}q@|tjko||dk�r,|�\}}|tjks�|dkr�P|�\}}|tjks�|dkr�P|�\}}|tj kr�|dkr�|�\}}xJ|tjk�r(|j|�|�\}}|tj k�s|d k�rP|�\}}q�Wq@Pq@WWntk �rJYnXt|�S) NFcst��}|d|dfS)Nrr)�next)�tok)�genrr�advance�sz(_detect_future_features.<locals>.advanceT�fromZ __future__�import�(�,)r�generate_tokens�io�StringIO�readline� frozensetr�NEWLINE�NL�COMMENTr%�STRING�NAME�OP�add� StopIteration)�sourceZhave_docstringrG�ignore�features�tp�valuer)rFr�_detect_future_featuressD r^c@seZdZdZdS)� FixerErrorzA fixer could not be loaded.N)rr r!�__doc__rrrrr_�sr_c@s�eZdZddd�ZdZdZd4dd�Zdd �Zd d�Zdd �Z dd�Z dd�Zd5dd�Zd6dd�Z dd�Zd7dd�Zdd�Zd8dd�Zdd�Zd d!�Zd9d"d#�Zd:d$d%�Zd&Zd'Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�ZdS);�RefactoringToolF)�print_function�write_unchanged_filesZFixrNcCs2||_|pg|_|jj�|_|dk r0|jj|�|jdrDtj|_ntj |_|jj d�|_g|_t jd�|_g|_d|_tj|jtj|jd�|_|j�\|_|_g|_tj�|_g|_g|_xXt|j|j�D]F}|j r�|jj!|�q�||jkr�|jj"|�q�||jkr�|jj"|�q�Wt#|j�|_$t#|j�|_%dS)z�Initializer. Args: fixer_names: a list of fixers to import options: a dict with configuration. explicit: a list of fixers to run even if they are explicit. NrbrcraF)�convert�logger)&�fixers�explicit�_default_options�copy�optionsr&r �!python_grammar_no_print_statement�grammarr/�getrc�errors�loggingZ getLoggerre� fixer_log�wroterZDriverrrd� get_fixers� pre_order� post_order�files�bmZ BottomMatcher�BMZ bmi_pre_orderZbmi_post_orderrZ BM_compatibleZ add_fixerrr5�bmi_pre_order_heads�bmi_post_order_heads)�selfZfixer_namesrjrgr4rrr�__init__�s< zRefactoringTool.__init__cCs\g}g}�x&|jD�]}t|iidg�}|jdd�d}|j|j�rV|t|j�d�}|jd�}|jdjdd�|D��}yt ||�}Wn$t k r�td ||f��YnX||j|j �} | jr�|jd k r�||jkr�|jd|�q|jd|�| jd k�r|j| �q| jdk�r |j| �qtd| j��qWtjd�} |j| d�|j| d�||fS)aInspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. rr6rN�_�cSsg|]}|j��qSr)�title)r7r)rrrr:�sz.RefactoringTool.get_fixers.<locals>.<listcomp>zCan't find %s.%sTzSkipping optional fixer: %szAdding transformation: %sZpreZpostzIllegal fixer order: %rZ run_order)�key���)rfr�rsplitr�FILE_PREFIX�len�split�CLASS_PREFIX�join�getattr�AttributeErrorr_rjrprg�log_message� log_debug�orderr�operator� attrgetter�sort)rzZpre_order_fixersZpost_order_fixersZfix_mod_path�modr8�parts� class_nameZ fix_classr4Zkey_funcrrrrr�s8 zRefactoringTool.get_fixerscOs�dS)zCalled when an error occurs.Nr)rz�msg�args�kwdsrrr� log_errorszRefactoringTool.log_errorcGs|r||}|jj|�dS)zHook to log a message.N)re�info)rzr�r�rrrr�szRefactoringTool.log_messagecGs|r||}|jj|�dS)N)re�debug)rzr�r�rrrr�szRefactoringTool.log_debugcCsdS)zTCalled with the old version, new version, and filename of a refactored file.Nr)rz�old_text�new_text�filename�equalrrr�print_outputszRefactoringTool.print_outputcCs<x6|D].}tjj|�r&|j|||�q|j|||�qWdS)z)Refactor a list of files and directories.N)rr�isdir�refactor_dir� refactor_file)rz�items�write� doctests_onlyZdir_or_filerrr�refactor#s zRefactoringTool.refactorc Cs�tjd}x�tj|�D]�\}}}|jd|�|j�|j�xH|D]@}|jd�rBtjj|�d|krBtjj||�} |j | ||�qBWdd�|D�|dd�<qWdS)z�Descends down a directory and refactor every Python file found. Python files are assumed to have a .py extension. Files and subdirectories starting with '.' are skipped. �pyzDescending into %sr6rcSsg|]}|jd�s|�qS)r6)r)r7Zdnrrrr:>sz0RefactoringTool.refactor_dir.<locals>.<listcomp>N) r�extsep�walkr�r�rr�splitextr�r�) rzZdir_namer�r�Zpy_ext�dirpathZdirnames� filenamesr�fullnamerrrr�,s zRefactoringTool.refactor_dircCs�yt|d�}Wn.tk r<}z|jd||�dSd}~XnXztj|j�d}Wd|j�Xt|d|d��}t|j ��|fSQRXdS)zG Do our best to decode a Python source file correctly. �rbzCan't open %s: %sNrr()�encoding)NN) �open�OSErrorr�r�detect_encodingrO�close�_open_with_encodingrA�read)rzr��f�errr�rrr�_read_python_source@s z#RefactoringTool._read_python_sourcecCs�|j|�\}}|dkrdS|d7}|rn|jd|�|j||�}|jsL||kr`|j|||||�q�|jd|�nH|j||�}|js�|r�|jr�|jt|�dd�|||d�n|jd|�dS) zRefactors a file.Nr>zRefactoring doctests in %szNo doctest changes in %sr)r�r�zNo changes in %sr�)r�r��refactor_docstringrc�processed_file�refactor_string�was_changed�str)rzr�r�r�r@r��output�treerrrr�PszRefactoringTool.refactor_filecCs�t|�}d|krtj|j_zJy|jj|�}Wn4tk r`}z|jd||jj |�dSd}~XnXWd|j|j_X||_ |jd|�|j||�|S)aFRefactor a given input string. Args: data: a string holding the code to be refactored. name: a human-readable name for use in error/log messages. Returns: An AST corresponding to the refactored input stream; None if there were errors during the parse. rbzCan't parse %s: %s: %sNzRefactoring %s) r^r rkrrlZparse_stringr'r�� __class__r�future_featuresr�� refactor_tree)rz�datarr[r�r�rrrr�gs zRefactoringTool.refactor_stringcCs�tjj�}|rN|jd�|j|d�}|js2||krB|j|d|�q�|jd�n:|j|d�}|jsj|r~|jr~|jt |�d|�n |jd�dS)NzRefactoring doctests in stdinz<stdin>zNo doctest changes in stdinzNo changes in stdin) �sys�stdinr�r�r�rcr�r�r�r�)rzr�r@r�r�rrr�refactor_stdin�s zRefactoringTool.refactor_stdinc Cs�x"t|j|j�D]}|j||�qW|j|j|j��|j|j|j��|jj|j ��}�xvt |j���rАx`|jjD�]R}||ko�||rv||j tjjdd�|jr�||j tjjd��x t||�D�]�}|||kr�||j|�yt|�Wntk �rw�YnX|j�r(||jk�r(q�|j|�}|r�|j||�}|dk r�|j|�x,|j�D] }|j�spg|_|jj|��q^W|jj|j ��}x2|D]*} | |k�r�g|| <|| j|| ��q�Wq�WqvWq\Wx$t|j|j�D]}|j||��q�W|jS)a�Refactors a parse tree (modifying the tree in place). For compatible patterns the bottom matcher module is used. Otherwise the tree is traversed node-to-node for matches. Args: tree: a pytree.Node instance representing the root of the tree to be refactored. name: a human-readable name for this tree. Returns: True if the tree was modified, False otherwise. T)r�reverse)rN)rrsrtZ start_tree�traverse_byrxryrwZrunZleaves�anyr0rfr�rZBaseZdepthZkeep_line_orderZ get_linenor-�remover� ValueErrorZfixers_applied�match� transformr?rr2Zfinish_treer�) rzr�rr4Z match_set�node�results�newZnew_matchesZfxrrrrr��sJ $zRefactoringTool.refactor_treecCs^|sdSxP|D]H}xB||jD]4}|j|�}|r|j||�}|dk r|j|�|}qWqWdS)aTraverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None N)r#r�r�r?)rzrfZ traversalr�r4r�r�rrrr��s zRefactoringTool.traverse_bycCs�|jj|�|dkr.|j|�d}|dkr.dS||k}|j||||�|r`|jd|�|js`dS|rv|j||||�n|jd|�dS)zR Called when a file has been refactored and there may be changes. NrzNo changes to %szNot writing changes to %s)rurr�r�r�rc� write_file)rzr�r�r�r�r�r�rrrr��szRefactoringTool.processed_filec%Cs�yt|d|d�}Wn.tk r@}z|jd||�dSd}~XnXzHy|jt|��Wn0tk r�}z|jd||�WYdd}~XnXWd|j�X|jd|�d|_dS)z�Writes a string to a file. It first shows a unified diff between the old text and the new text, and then rewrites the file; the latter is only done if the write option is set. �w)r�zCan't create %s: %sNzCan't write %s: %szWrote changes to %sT)r�r�r�r�rCr�r�rq)rzr�r�r�r�r�r�rrrr� s$ zRefactoringTool.write_filez>>> z... c Csg}d}d}d}d}x�|jdd�D]�}|d7}|j�j|j�r�|dk r\|j|j||||��|}|g}|j|j�} |d| �}q"|dk r�|j||j�s�|||jj�dkr�|j |�q"|dk r�|j|j||||��d}d}|j |�q"W|dk �r|j|j||||��dj |�S)a�Refactors a docstring, looking for doctests. This returns a modified version of the input string. It looks for doctests, which start with a ">>>" prompt, and may be continued with "..." prompts, as long as the "..." is indented the same as the ">>>". (Unfortunately we can't use the doctest module's parser, since, like most parsers, it is not geared towards preserving the original source.) NrT)�keependsrr>r})� splitlines�lstripr�PS1r2�refactor_doctest�find�PS2�rstriprr�) rzr@r��result�blockZblock_lineno�indent�lineno�line�irrrr�%s: z"RefactoringTool.refactor_docstringc s(y�j||��}Wndtk rv}zH�jjtj�rRx|D]}�jd|jd��q8W�jd|||j j |�|Sd}~XnX�j||��r$t|�j dd�}|d|d�||dd�} }| dg|dks�t| ��|djd�s�|dd7<��j|jd�g}|�r$|��fd d �|D�7}|S) z�Refactors one doctest. A doctest is given as a block of lines, the first of which starts with ">>>" (possibly indented), while the remaining lines start with "..." (identically indented). z Source: %sr>z+Can't parse docstring in %s line %s: %s: %sNT)r�rrcsg|]}��j|�qSr)r�)r7r�)r�rzrrr:jsz4RefactoringTool.refactor_doctest.<locals>.<listcomp>r�r�)�parse_blockr'reZisEnabledForro�DEBUGr�r�r�r�rr�r�r��AssertionErrorrr��pop) rzr�r�r�r�r�r�r�r�Zclippedr)r�rzrr�Ps& "z RefactoringTool.refactor_doctestcCs�|jrd}nd}|js$|jd|�n&|jd|�x|jD]}|j|�q8W|jrt|jd�x|jD]}|j|�qbW|jr�t|j�dkr�|jd�n|jdt|j��x&|jD]\}}}|j|f|�|�q�WdS) N�werez need to bezNo files %s modified.zFiles that %s modified:z$Warnings/messages while refactoring:rzThere was 1 error:zThere were %d errors:)rqrur�rprnr�)rzr��file�messager�r�r�rrr� summarizems$ zRefactoringTool.summarizecCs"|jj|j|||��}t�|_|S)z�Parses a block into a tree. This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. )rZparse_tokens� wrap_toksrPr�)rzr�r�r�r�rrrr��szRefactoringTool.parse_blockccshtj|j||�j�}xN|D]F\}}\}}\} } }||d7}| |d7} ||||f| | f|fVqWdS)z;Wraps a tokenize stream to systematically modify start/end.rN)rrL� gen_lines�__next__)rzr�r�r�r1r#r]Zline0Zcol0Zline1Zcol1Z line_textrrrr��s zRefactoringTool.wrap_toksccs�||j}||j}|}xV|D]N}|j|�r@|t|�d�Vn(||j�dkrXdVntd||f��|}qWx dVqrWdS)z�Generates lines as expected by tokenize from a list of lines. This strips the first len(indent + self.PS1) characters off each line. Nr>zline=%r, prefix=%rr})r�r�rr�r�r�)rzr�r��prefix1Zprefix2�prefixr�rrrr��s zRefactoringTool.gen_lines)NN)FF)FF)FF)F)NFN)N)rr r!rhr�r�r{rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrra�s: 4( O + rac@seZdZdS)�MultiprocessingUnsupportedN)rr r!rrrrr��sr�csBeZdZ�fdd�Zd�fdd� Z�fdd�Z�fd d �Z�ZS)�MultiprocessRefactoringToolcs"tt|�j||�d|_d|_dS)N)�superr�r{�queue�output_lock)rzr��kwargs)r�rrr{�sz$MultiprocessRefactoringTool.__init__Frcs|dkrtt��j|||�Syddl�Wntk r@t�YnX�jdk rTtd���j��_�j ��_ ��fdd�t|�D�}z.x|D]}|j�q�Wtt��j|||�Wd�jj �xt|�D]}�jjd�q�Wx|D]}|j�r�|j �q�Wd�_XdS)Nrrz already doing multiple processescsg|]}�j�jd��qS))�target)ZProcess�_child)r7r�)�multiprocessingrzrrr:�sz8MultiprocessRefactoringTool.refactor.<locals>.<listcomp>)r�r�r�r��ImportErrorr�r��RuntimeErrorZ JoinableQueueZLockr��range�startr��putZis_alive)rzr�r�r�Z num_processesZ processesr)r�)r�)r�rzrr��s2 z$MultiprocessRefactoringTool.refactorc sR|jj�}xB|dk rL|\}}ztt|�j||�Wd|jj�X|jj�}qWdS)N)r�rmr�r�r�Z task_done)rzZtaskr�r�)r�rrr��s z"MultiprocessRefactoringTool._childcs2|jdk r|jj||f�ntt|�j||�SdS)N)r�r�r�r�r�)rzr�r�)r�rrr��s z)MultiprocessRefactoringTool.refactor_file)FFr)rr r!r{r�r�r�� __classcell__rr)r�rr��s r�)T)rr)(r`� __author__rr�ror�r+rM� itertoolsrZpgen2rrrZ fixer_utilrr}rr r rvrr'rr$r5r;r=�version_info�codecsr�r�rArCr^r_�objectrar�r�rrrr�<module> sF ( PK{��\`l��:�:'__pycache__/pytree.cpython-36.opt-2.pycnu�[���3 \�m�@s�dZddlZddlZddlmZdZiadd�ZGdd�de�Z Gd d �d e �Z Gdd�de �Zd d�ZGdd�de�Z Gdd�de �ZGdd�de �ZGdd�de �ZGdd�de �Zdd�ZdS)z#Guido van Rossum <guido@python.org>�N)�StringIOi���cCsHts<ddlm}x*|jj�D]\}}t|�tkr|t|<qWtj||�S)N�)�python_symbols)�_type_reprsZpygramr�__dict__�items�type�int� setdefault)Ztype_numr�name�val�r �&/usr/lib64/python3.6/lib2to3/pytree.py� type_reprsrc@s�eZdZdZdZfZdZdZdd�Zdd�Z dZ dd�Zd d �Zdd�Z d d�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zdd�Zdd�Zdd �Zejd%kr�d#d$�ZdS)&�BaseNFcOs tj|�S)N)�object�__new__)�cls�args�kwdsr r rr2szBase.__new__cCs|j|jk rtS|j|�S)N)� __class__�NotImplemented�_eq)�self�otherr r r�__eq__7szBase.__eq__cCst�dS)N)�NotImplementedError)rrr r rrCs zBase._eqcCst�dS)N)r)rr r r�cloneNsz Base.clonecCst�dS)N)r)rr r r� post_orderVszBase.post_ordercCst�dS)N)r)rr r r� pre_order^szBase.pre_ordercCs�t|t�s|g}g}d}x:|jjD].}||krF|dk r@|j|�d}q"|j|�q"W|jj�||j_x|D]}|j|_qlWd|_dS)NFT)� isinstance�list�parent�children�extend�append�changed)r�newZ l_children�found�ch�xr r r�replacefs zBase.replacecCs.|}x"t|t�s&|jsdS|jd}qW|jS)Nr)r �Leafr#�lineno)r�noder r r� get_lineno}szBase.get_linenocCs|jr|jj�d|_dS)NT)r"r&�was_changed)rr r rr&�s zBase.changedcCsJ|jrFx>t|jj�D].\}}||kr|jj�|jj|=d|_|SqWdS)N)r"� enumerater#r&)r�ir.r r r�remove�s zBase.removecCsZ|jdkrdSxFt|jj�D]6\}}||kry|jj|dStk rPdSXqWdS)Nr)r"r1r#� IndexError)rr2�childr r r�next_sibling�s zBase.next_siblingcCsP|jdkrdSx<t|jj�D],\}}||kr|dkr8dS|jj|dSqWdS)Nrr)r"r1r#)rr2r5r r r�prev_sibling�s zBase.prev_siblingccs"x|jD]}|j�EdHqWdS)N)r#�leaves)rr5r r rr8�szBase.leavescCs|jdkrdSd|jj�S)Nrr)r"�depth)rr r rr9�s z Base.depthcCs|j}|dkrdS|jS)N�)r6�prefix)rZnext_sibr r r� get_suffix�szBase.get_suffix�rcCst|�jd�S)N�ascii)�str�encode)rr r r�__str__�szBase.__str__)r=r)�__name__� __module__�__qualname__rr"r#r0Zwas_checkedrr�__hash__rrrrr+r/r&r3�propertyr6r7r8r9r<�sys�version_inforAr r r rr s. rc@s�eZdZddd�Zdd�Zdd�Zejdkr0eZd d�Z dd �Z dd�Zdd�Zdd�Z dd�Zee e�Zdd�Zdd�Zdd�ZdS)�NodeNcCsR||_t|�|_x|jD] }||_qW|dk r4||_|rH|dd�|_nd|_dS)N)rr!r#r"r;�fixers_applied)rrr#�contextr;rJr)r r r�__init__�s z Node.__init__cCsd|jjt|j�|jfS)Nz %s(%s, %r))rrBrrr#)rr r r�__repr__�sz Node.__repr__cCsdjtt|j��S)Nr:)�join�mapr?r#)rr r r�__unicode__�szNode.__unicode__r=rcCs|j|jf|j|jfkS)N)rr#)rrr r rr�szNode._eqcCst|jdd�|jD�|jd�S)NcSsg|]}|j��qSr )r)�.0r)r r r� <listcomp>szNode.clone.<locals>.<listcomp>)rJ)rIrr#rJ)rr r rrsz Node.cloneccs(x|jD]}|j�EdHqW|VdS)N)r#r)rr5r r rrszNode.post_orderccs(|Vx|jD]}|j�EdHqWdS)N)r#r)rr5r r rr szNode.pre_ordercCs|js dS|jdjS)Nr:r)r#r;)rr r r�_prefix_getterszNode._prefix_gettercCs|jr||jd_dS)Nr)r#r;)rr;r r r�_prefix_setterszNode._prefix_settercCs(||_d|j|_||j|<|j�dS)N)r"r#r&)rr2r5r r r� set_child!s zNode.set_childcCs ||_|jj||�|j�dS)N)r"r#�insertr&)rr2r5r r r�insert_child+szNode.insert_childcCs||_|jj|�|j�dS)N)r"r#r%r&)rr5r r r�append_child4szNode.append_child)NNN)r=r)rBrCrDrLrMrPrGrHrArrrrrSrTrFr;rUrWrXr r r rrI�s" rIc@s�eZdZdZdZdZddgfdd�Zdd�Zdd �Ze j dkrBeZdd�Zd d�Z dd�Zdd�Zdd�Zdd�Zdd�Zeee�ZdS)r,r:rNcCsF|dk r|\|_\|_|_||_||_|dk r4||_|dd�|_dS)N)�_prefixr-�columnr�valuerJ)rrr[rKr;rJr r rrLGsz Leaf.__init__cCsd|jj|j|jfS)Nz %s(%r, %r))rrBrr[)rr r rrMZsz Leaf.__repr__cCs|jt|j�S)N)r;r?r[)rr r rrP`szLeaf.__unicode__r=cCs|j|jf|j|jfkS)N)rr[)rrr r rrkszLeaf._eqcCs$t|j|j|j|j|jff|jd�S)N)rJ)r,rr[r;r-rZrJ)rr r rros z Leaf.cloneccs |VdS)Nr )rr r rr8uszLeaf.leavesccs |VdS)Nr )rr r rrxszLeaf.post_orderccs |VdS)Nr )rr r rr|szLeaf.pre_ordercCs|jS)N)rY)rr r rrS�szLeaf._prefix_gettercCs|j�||_dS)N)r&rY)rr;r r rrT�szLeaf._prefix_setter)r=r)rBrCrDrYr-rZrLrMrPrGrHrArrr8rrrSrTrFr;r r r rr,>s$ r,cCsN|\}}}}|s||jkr<t|�dkr.|dSt|||d�St|||d�SdS)Nrr)rK)Z number2symbol�lenrIr,)ZgrZraw_noderr[rKr#r r r�convert�sr]c@sLeZdZdZdZdZdd�Zdd�Zdd�Zddd �Z dd d�Z dd �ZdS)�BasePatternNcOs tj|�S)N)rr)rrrr r rr�szBasePattern.__new__cCsLt|j�|j|jg}x|r.|ddkr.|d=qWd|jjdjtt|��fS)Nrz%s(%s)z, ���r_) rr�contentrrrBrNrO�repr)rrr r rrM�s zBasePattern.__repr__cCs|S)Nr )rr r r�optimize�szBasePattern.optimizecCsn|jdk r|j|jkrdS|jdk rRd}|dk r4i}|j||�sDdS|rR|j|�|dk rj|jrj|||j<dS)NFT)rr`� _submatch�updater)rr.�results�rr r r�match�s zBasePattern.matchcCs t|�dkrdS|j|d|�S)NrFr)r\rg)r�nodesrer r r� match_seq�szBasePattern.match_seqccs&i}|r"|j|d|�r"d|fVdS)Nrr)rg)rrhrfr r r�generate_matches�szBasePattern.generate_matches)N)N)rBrCrDrr`rrrMrbrgrirjr r r rr^�s r^c@s*eZdZddd�Zd dd�Zd dd�ZdS)�LeafPatternNcCs&|dk r|dk r||_||_||_dS)N)rr`r)rrr`rr r rrL�s zLeafPattern.__init__cCst|t�sdStj|||�S)NF)r r,r^rg)rr.rer r rrgs zLeafPattern.matchcCs|j|jkS)N)r`r[)rr.rer r rrcs zLeafPattern._submatch)NNN)N)N)rBrCrDrLrgrcr r r rrk�s rkc@s$eZdZdZddd�Zddd�ZdS) �NodePatternFNcCsT|dk r|dk r>t|�}x$t|�D]\}}t|t�r"d|_q"W||_||_||_dS)NT)r!r1r �WildcardPattern� wildcardsrr`r)rrr`rr2�itemr r rrL%s zNodePattern.__init__cCs�|jrJx>t|j|j�D],\}}|t|j�kr|dk r>|j|�dSqWdSt|j�t|j�krbdSx*t|j|j�D]\}}|j||�srdSqrWdS)NTF)rnrjr`r#r\rd�ziprg)rr.re�crf� subpatternr5r r rrcBs zNodePattern._submatch)NNN)N)rBrCrDrnrLrcr r r rrl!s rlc@sZeZdZddedfdd�Zdd�Zddd�Zdd d �Zdd�Zd d�Z dd�Z dd�ZdS)rmNrcCs@|dk r$ttt|��}x|D]}qW||_||_||_||_dS)N)�tuplerOr`�min�maxr)rr`rtrur�altr r rrLls zWildcardPattern.__init__cCs�d}|jdk r<t|j�dkr<t|jd�dkr<|jdd}|jdkr�|jdkr�|jdkrft|jd�S|dk r�|j|jkr�|j�S|jdkr�t|t�r�|jdkr�|j|jkr�t|j|j|j|j|j|j�S|S)Nrr)r) r`r\rtrurlrrbr rm)rrrr r rrb�s zWildcardPattern.optimizecCs|j|g|�S)N)ri)rr.rer r rrg�szWildcardPattern.matchcCsRxL|j|�D]>\}}|t|�kr|dk rF|j|�|jrFt|�||j<dSqWdS)NTF)rjr\rdrr!)rrhrerqrfr r rri�s zWildcardPattern.match_seqccs:|jdkrXxJt|jdtt|�|j��D]*}i}|jrH|d|�||j<||fVq(Wn�|jdkrp|j|�Vn�ttd�r�tj }t �t_ z�y@x:|j|d�D]*\}}|jr�|d|�||j<||fVq�WWnRtk �rx:|j |�D],\}}|j�r |d|�||j<||fVq�WYnXWdttd��r4|t_ XdS)NrZ bare_name�getrefcountr)r`�rangertr\rur�_bare_name_matches�hasattrrG�stderrr�_recursive_matches�RuntimeError�_iterative_matches)rrh�countrfZsave_stderrr r rrj�s. " z WildcardPattern.generate_matchesccst|�}d|jkrdifVg}x>|jD]4}x.t||�D] \}}||fV|j||f�q8Wq(Wx�|�rg}x�|D]�\}} ||krr||jkrrxn|jD]d}x^t|||d��D]H\} }| dkr�i}|j| �|j|�|| |fV|j|| |f�q�Wq�WqrW|}qbWdS)Nr)r\rtr`rjr%rurd)rrhZnodelenrervrqrfZnew_results�c0�r0�c1�r1r r rr~�s* z"WildcardPattern._iterative_matchescCsxd}i}d}t|�}xH|r\||kr\d}x0|jD]&}|dj|||�r0|d7}d}Pq0WqW|d|�||j<||fS)NrFTr)r\r`rgr)rrhrrf�doneruZleafr r rry�s z"WildcardPattern._bare_name_matchesc cs�||jkrdifV||jkr�xr|jD]h}xbt||�D]T\}}xJ|j||d�|d�D].\}}i}|j|�|j|�|||fVqXWq6Wq&WdS)Nrr)rtrur`rjr|rd) rrhrrvr�r�r�r�rfr r rr|s " z"WildcardPattern._recursive_matches)N)N)rBrCrD�HUGErLrbrgrirjr~ryr|r r r rrm^s# -rmc@s.eZdZd dd�Zdd�Zdd�Zdd �ZdS)�NegatedPatternNcCs|dk r||_dS)N)r`)rr`r r rrLs zNegatedPattern.__init__cCsdS)NFr )rr.r r rrg)szNegatedPattern.matchcCst|�dkS)Nr)r\)rrhr r rri-szNegatedPattern.match_seqccsL|jdkr"t|�dkrHdifVn&x|jj|�D] \}}dSWdifVdS)Nr)r`r\rj)rrhrqrfr r rrj1s zNegatedPattern.generate_matches)N)rBrCrDrLrgrirjr r r rr�s r�c cs�|sdifVn�|d|dd�}}xl|j|�D]^\}}|sJ||fVq2xDt|||d��D].\}}i}|j|�|j|�|||fVq^Wq2WdS)Nrr)rjrd) Zpatternsrh�p�restr�r�r�r�rfr r rrj=s rj)� __author__rG�warnings�iorr�rrrrrIr,r]r^rkrlrmr�rjr r r r�<module> s$ 1nNV,==#PK{��\0'?QQ)__pycache__/refactor.cpython-36.opt-1.pycnu�[���3 \=m�@s<dZdZddlZddlZddlZddlZddlZddlZddlm Z ddl mZmZm Z ddlmZddlmZmZdd lmZd&dd�ZGd d�de�Zdd�Zdd�Zdd�Zdd�Zejd'kr�ddlZejZdd�Z dd�Z!neZeZ eZ!dd�Z"Gdd�de�Z#Gd d!�d!e$�Z%Gd"d#�d#e�Z&Gd$d%�d%e%�Z'dS)(z�Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. z#Guido van Rossum <guido@python.org>�N)�chain�)�driver�tokenize�token)� find_root)�pytree�pygram)�btm_matcherTcCstt|ggdg�}tjj|j�}g}xLttj|��D]:}|jd�r2|jd�r2|rZ|dd�}|j |dd��q2W|S)zEReturn a sorted list of all available fix names in the given package.�*�fix_z.py�N����) � __import__�os�path�dirname�__file__�sorted�listdir� startswith�endswith�append)Z fixer_pkgZ remove_prefixZpkgZ fixer_dirZ fix_names�name�r�(/usr/lib64/python3.6/lib2to3/refactor.py�get_all_fix_namessrc@seZdZdS)� _EveryNodeN)�__name__� __module__�__qualname__rrrrr+srcCs�t|tjtjf�r(|jdkr t�|jhSt|tj�rH|jrDt|j�St�t|tj �r�t �}x*|jD] }x|D]}|jt|��qlWqbW|Std|��dS)zf Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. Nz$Oh no! I don't understand pattern %s) � isinstancerZNodePatternZLeafPattern�typerZNegatedPatternZcontent�_get_head_typesZWildcardPattern�set�update� Exception)Zpat�r�p�xrrrr$/s r$cCs�tjt�}g}x�|D]|}|jrjyt|j�}Wntk rJ|j|�Yq�XxB|D]}||j|�qRWq|jdk r�||jj|�q|j|�qWx,tt j jj�t j j �D]}||j|�q�Wt|�S)z^ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. N)�collections�defaultdict�list�patternr$rrZ_accept_typerr �python_grammarZ symbol2number�values�tokens�extend�dict)Z fixer_listZ head_nodesZevery�fixerZheadsZ node_typerrr�_get_headnode_dictKs" r5cs�fdd�t�d�D�S)zN Return the fully qualified names for fixers in the package pkg_name. csg|]}�d|�qS)�.r)�.0�fix_name)�pkg_namerr� <listcomp>hsz+get_fixers_from_package.<locals>.<listcomp>F)r)r9r)r9r�get_fixers_from_packageds r;cCs|S)Nr)�objrrr� _identityksr=rcCs|jdd�S)Nz � )�replace)�inputrrr�_from_system_newlinesrsrAcCs tjdkr|jdtj�S|SdS)Nr>)r�linesepr?)r@rrr�_to_system_newlinests rCcsTd}tjtj|�j���fdd�}ttjtjtj h�}t �}y�x�|�\}}||krVq@q@|tjkrl|rfPd}q@|tjko||dk�r,|�\}}|tjks�|dkr�P|�\}}|tjks�|dkr�P|�\}}|tj kr�|dkr�|�\}}xJ|tjk�r(|j|�|�\}}|tj k�s|d k�rP|�\}}q�Wq@Pq@WWntk �rJYnXt|�S) NFcst��}|d|dfS)Nrr)�next)�tok)�genrr�advance�sz(_detect_future_features.<locals>.advanceT�fromZ __future__�import�(�,)r�generate_tokens�io�StringIO�readline� frozensetr�NEWLINE�NL�COMMENTr%�STRING�NAME�OP�add� StopIteration)�sourceZhave_docstringrG�ignore�features�tp�valuer)rFr�_detect_future_featuressD r^c@seZdZdZdS)� FixerErrorzA fixer could not be loaded.N)rr r!�__doc__rrrrr_�sr_c@s�eZdZddd�ZdZdZd4dd�Zdd �Zd d�Zdd �Z dd�Z dd�Zd5dd�Zd6dd�Z dd�Zd7dd�Zdd�Zd8dd�Zdd�Zd d!�Zd9d"d#�Zd:d$d%�Zd&Zd'Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1�Zd2d3�ZdS);�RefactoringToolF)�print_function�write_unchanged_filesZFixrNcCs2||_|pg|_|jj�|_|dk r0|jj|�|jdrDtj|_ntj |_|jj d�|_g|_t jd�|_g|_d|_tj|jtj|jd�|_|j�\|_|_g|_tj�|_g|_g|_xXt|j|j�D]F}|j r�|jj!|�q�||jkr�|jj"|�q�||jkr�|jj"|�q�Wt#|j�|_$t#|j�|_%dS)z�Initializer. Args: fixer_names: a list of fixers to import options: a dict with configuration. explicit: a list of fixers to run even if they are explicit. NrbrcraF)�convert�logger)&�fixers�explicit�_default_options�copy�optionsr&r �!python_grammar_no_print_statement�grammarr/�getrc�errors�loggingZ getLoggerre� fixer_log�wroterZDriverrrd� get_fixers� pre_order� post_order�files�bmZ BottomMatcher�BMZ bmi_pre_orderZbmi_post_orderrZ BM_compatibleZ add_fixerrr5�bmi_pre_order_heads�bmi_post_order_heads)�selfZfixer_namesrjrgr4rrr�__init__�s< zRefactoringTool.__init__cCs\g}g}�x&|jD�]}t|iidg�}|jdd�d}|j|j�rV|t|j�d�}|jd�}|jdjdd�|D��}yt ||�}Wn$t k r�td ||f��YnX||j|j �} | jr�|jd k r�||jkr�|jd|�q|jd|�| jd k�r|j| �q| jdk�r |j| �qtd| j��qWtjd�} |j| d�|j| d�||fS)aInspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. rr6rN�_�cSsg|]}|j��qSr)�title)r7r)rrrr:�sz.RefactoringTool.get_fixers.<locals>.<listcomp>zCan't find %s.%sTzSkipping optional fixer: %szAdding transformation: %sZpreZpostzIllegal fixer order: %rZ run_order)�key���)rfr�rsplitr�FILE_PREFIX�len�split�CLASS_PREFIX�join�getattr�AttributeErrorr_rjrprg�log_message� log_debug�orderr�operator� attrgetter�sort)rzZpre_order_fixersZpost_order_fixersZfix_mod_path�modr8�parts� class_nameZ fix_classr4Zkey_funcrrrrr�s8 zRefactoringTool.get_fixerscOs�dS)zCalled when an error occurs.Nr)rz�msg�args�kwdsrrr� log_errorszRefactoringTool.log_errorcGs|r||}|jj|�dS)zHook to log a message.N)re�info)rzr�r�rrrr�szRefactoringTool.log_messagecGs|r||}|jj|�dS)N)re�debug)rzr�r�rrrr�szRefactoringTool.log_debugcCsdS)zTCalled with the old version, new version, and filename of a refactored file.Nr)rz�old_text�new_text�filename�equalrrr�print_outputszRefactoringTool.print_outputcCs<x6|D].}tjj|�r&|j|||�q|j|||�qWdS)z)Refactor a list of files and directories.N)rr�isdir�refactor_dir� refactor_file)rz�items�write� doctests_onlyZdir_or_filerrr�refactor#s zRefactoringTool.refactorc Cs�tjd}x�tj|�D]�\}}}|jd|�|j�|j�xH|D]@}|jd�rBtjj|�d|krBtjj||�} |j | ||�qBWdd�|D�|dd�<qWdS)z�Descends down a directory and refactor every Python file found. Python files are assumed to have a .py extension. Files and subdirectories starting with '.' are skipped. �pyzDescending into %sr6rcSsg|]}|jd�s|�qS)r6)r)r7Zdnrrrr:>sz0RefactoringTool.refactor_dir.<locals>.<listcomp>N) r�extsep�walkr�r�rr�splitextr�r�) rzZdir_namer�r�Zpy_ext�dirpathZdirnames� filenamesr�fullnamerrrr�,s zRefactoringTool.refactor_dircCs�yt|d�}Wn.tk r<}z|jd||�dSd}~XnXztj|j�d}Wd|j�Xt|d|d��}t|j ��|fSQRXdS)zG Do our best to decode a Python source file correctly. �rbzCan't open %s: %sNrr()�encoding)NN) �open�OSErrorr�r�detect_encodingrO�close�_open_with_encodingrA�read)rzr��f�errr�rrr�_read_python_source@s z#RefactoringTool._read_python_sourcecCs�|j|�\}}|dkrdS|d7}|rn|jd|�|j||�}|jsL||kr`|j|||||�q�|jd|�nH|j||�}|js�|r�|jr�|jt|�dd�|||d�n|jd|�dS) zRefactors a file.Nr>zRefactoring doctests in %szNo doctest changes in %sr)r�r�zNo changes in %sr�)r�r��refactor_docstringrc�processed_file�refactor_string�was_changed�str)rzr�r�r�r@r��output�treerrrr�PszRefactoringTool.refactor_filecCs�t|�}d|krtj|j_zJy|jj|�}Wn4tk r`}z|jd||jj |�dSd}~XnXWd|j|j_X||_ |jd|�|j||�|S)aFRefactor a given input string. Args: data: a string holding the code to be refactored. name: a human-readable name for use in error/log messages. Returns: An AST corresponding to the refactored input stream; None if there were errors during the parse. rbzCan't parse %s: %s: %sNzRefactoring %s) r^r rkrrlZparse_stringr'r�� __class__r�future_featuresr�� refactor_tree)rz�datarr[r�r�rrrr�gs zRefactoringTool.refactor_stringcCs�tjj�}|rN|jd�|j|d�}|js2||krB|j|d|�q�|jd�n:|j|d�}|jsj|r~|jr~|jt |�d|�n |jd�dS)NzRefactoring doctests in stdinz<stdin>zNo doctest changes in stdinzNo changes in stdin) �sys�stdinr�r�r�rcr�r�r�r�)rzr�r@r�r�rrr�refactor_stdin�s zRefactoringTool.refactor_stdinc Cs�x"t|j|j�D]}|j||�qW|j|j|j��|j|j|j��|jj|j ��}�xvt |j���rАx`|jjD�]R}||ko�||rv||j tjjdd�|jr�||j tjjd��x t||�D�]�}|||kr�||j|�yt|�Wntk �rw�YnX|j�r(||jk�r(q�|j|�}|r�|j||�}|dk r�|j|�x,|j�D] }|j�spg|_|jj|��q^W|jj|j ��}x2|D]*} | |k�r�g|| <|| j|| ��q�Wq�WqvWq\Wx$t|j|j�D]}|j||��q�W|jS)a�Refactors a parse tree (modifying the tree in place). For compatible patterns the bottom matcher module is used. Otherwise the tree is traversed node-to-node for matches. Args: tree: a pytree.Node instance representing the root of the tree to be refactored. name: a human-readable name for this tree. Returns: True if the tree was modified, False otherwise. T)r�reverse)rN)rrsrtZ start_tree�traverse_byrxryrwZrunZleaves�anyr0rfr�rZBaseZdepthZkeep_line_orderZ get_linenor-�remover� ValueErrorZfixers_applied�match� transformr?rr2Zfinish_treer�) rzr�rr4Z match_set�node�results�newZnew_matchesZfxrrrrr��sJ $zRefactoringTool.refactor_treecCs^|sdSxP|D]H}xB||jD]4}|j|�}|r|j||�}|dk r|j|�|}qWqWdS)aTraverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None N)r#r�r�r?)rzrfZ traversalr�r4r�r�rrrr��s zRefactoringTool.traverse_bycCs�|jj|�|dkr.|j|�d}|dkr.dS||k}|j||||�|r`|jd|�|js`dS|rv|j||||�n|jd|�dS)zR Called when a file has been refactored and there may be changes. NrzNo changes to %szNot writing changes to %s)rurr�r�r�rc� write_file)rzr�r�r�r�r�r�rrrr��szRefactoringTool.processed_filec%Cs�yt|d|d�}Wn.tk r@}z|jd||�dSd}~XnXzHy|jt|��Wn0tk r�}z|jd||�WYdd}~XnXWd|j�X|jd|�d|_dS)z�Writes a string to a file. It first shows a unified diff between the old text and the new text, and then rewrites the file; the latter is only done if the write option is set. �w)r�zCan't create %s: %sNzCan't write %s: %szWrote changes to %sT)r�r�r�r�rCr�r�rq)rzr�r�r�r�r�r�rrrr� s$ zRefactoringTool.write_filez>>> z... c Csg}d}d}d}d}x�|jdd�D]�}|d7}|j�j|j�r�|dk r\|j|j||||��|}|g}|j|j�} |d| �}q"|dk r�|j||j�s�|||jj�dkr�|j |�q"|dk r�|j|j||||��d}d}|j |�q"W|dk �r|j|j||||��dj |�S)a�Refactors a docstring, looking for doctests. This returns a modified version of the input string. It looks for doctests, which start with a ">>>" prompt, and may be continued with "..." prompts, as long as the "..." is indented the same as the ">>>". (Unfortunately we can't use the doctest module's parser, since, like most parsers, it is not geared towards preserving the original source.) NrT)�keependsrr>r})� splitlines�lstripr�PS1r2�refactor_doctest�find�PS2�rstriprr�) rzr@r��result�blockZblock_lineno�indent�lineno�line�irrrr�%s: z"RefactoringTool.refactor_docstringc sy�j||��}Wndtk rv}zH�jjtj�rRx|D]}�jd|jd��q8W�jd|||j j |�|Sd}~XnX�j||��r t|�j dd�}|d|d�||dd�} }|djd�s�|dd7<��j|jd�g}|�r |��fd d �|D�7}|S) z�Refactors one doctest. A doctest is given as a block of lines, the first of which starts with ">>>" (possibly indented), while the remaining lines start with "..." (identically indented). z Source: %sr>z+Can't parse docstring in %s line %s: %s: %sNT)r�rrcsg|]}��j|�qSr)r�)r7r�)r�rzrrr:jsz4RefactoringTool.refactor_doctest.<locals>.<listcomp>r�r�)�parse_blockr'reZisEnabledForro�DEBUGr�r�r�r�rr�r�r�rr��pop) rzr�r�r�r�r�r�r�r�Zclippedr)r�rzrr�Ps$ "z RefactoringTool.refactor_doctestcCs�|jrd}nd}|js$|jd|�n&|jd|�x|jD]}|j|�q8W|jrt|jd�x|jD]}|j|�qbW|jr�t|j�dkr�|jd�n|jdt|j��x&|jD]\}}}|j|f|�|�q�WdS) N�werez need to bezNo files %s modified.zFiles that %s modified:z$Warnings/messages while refactoring:rzThere was 1 error:zThere were %d errors:)rqrur�rprnr�)rzr��file�messager�r�r�rrr� summarizems$ zRefactoringTool.summarizecCs"|jj|j|||��}t�|_|S)z�Parses a block into a tree. This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. )rZparse_tokens� wrap_toksrPr�)rzr�r�r�r�rrrr��szRefactoringTool.parse_blockccshtj|j||�j�}xN|D]F\}}\}}\} } }||d7}| |d7} ||||f| | f|fVqWdS)z;Wraps a tokenize stream to systematically modify start/end.rN)rrL� gen_lines�__next__)rzr�r�r�r1r#r]Zline0Zcol0Zline1Zcol1Z line_textrrrr��s zRefactoringTool.wrap_toksccs�||j}||j}|}xV|D]N}|j|�r@|t|�d�Vn(||j�dkrXdVntd||f��|}qWx dVqrWdS)z�Generates lines as expected by tokenize from a list of lines. This strips the first len(indent + self.PS1) characters off each line. Nr>zline=%r, prefix=%rr})r�r�rr�r��AssertionError)rzr�r��prefix1Zprefix2�prefixr�rrrr��s zRefactoringTool.gen_lines)NN)FF)FF)FF)F)NFN)N)rr r!rhr�r�r{rrr�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�r�rrrrra�s: 4( O + rac@seZdZdS)�MultiprocessingUnsupportedN)rr r!rrrrr��sr�csBeZdZ�fdd�Zd�fdd� Z�fdd�Z�fd d �Z�ZS)�MultiprocessRefactoringToolcs"tt|�j||�d|_d|_dS)N)�superr�r{�queue�output_lock)rzr��kwargs)r�rrr{�sz$MultiprocessRefactoringTool.__init__Frcs|dkrtt��j|||�Syddl�Wntk r@t�YnX�jdk rTtd���j��_�j ��_ ��fdd�t|�D�}z.x|D]}|j�q�Wtt��j|||�Wd�jj �xt|�D]}�jjd�q�Wx|D]}|j�r�|j �q�Wd�_XdS)Nrrz already doing multiple processescsg|]}�j�jd��qS))�target)ZProcess�_child)r7r�)�multiprocessingrzrrr:�sz8MultiprocessRefactoringTool.refactor.<locals>.<listcomp>)r�r�r�r��ImportErrorr�r��RuntimeErrorZ JoinableQueueZLockr��range�startr��putZis_alive)rzr�r�r�Z num_processesZ processesr)r�)r�)r�rzrr��s2 z$MultiprocessRefactoringTool.refactorc sR|jj�}xB|dk rL|\}}ztt|�j||�Wd|jj�X|jj�}qWdS)N)r�rmr�r�r�Z task_done)rzZtaskr�r�)r�rrr��s z"MultiprocessRefactoringTool._childcs2|jdk r|jj||f�ntt|�j||�SdS)N)r�r�r�r�r�)rzr�r�)r�rrr��s z)MultiprocessRefactoringTool.refactor_file)FFr)rr r!r{r�r�r�� __classcell__rr)r�rr��s r�)T)rr)(r`� __author__rr�ror�r+rM� itertoolsrZpgen2rrrZ fixer_utilrr}rr r rvrr'rr$r5r;r=�version_info�codecsr�r�rArCr^r_�objectrar�r�rrrr�<module> sF ( PK{��\{K��{{)__pycache__/__init__.cpython-36.opt-1.pycnu�[���3 \�@sdS)N�rrr�(/usr/lib64/python3.6/lib2to3/__init__.py�<module>sPK{��\{K��{{)__pycache__/__init__.cpython-36.opt-2.pycnu�[���3 \�@sdS)N�rrr�(/usr/lib64/python3.6/lib2to3/__init__.py�<module>sPK{��\{K��{{#__pycache__/__init__.cpython-36.pycnu�[���3 \�@sdS)N�rrr�(/usr/lib64/python3.6/lib2to3/__init__.py�<module>sPK{��\�Zr~��)__pycache__/__main__.cpython-36.opt-1.pycnu�[���3 \C�@s&ddlZddlmZejed��dS)�N�)�mainz lib2to3.fixes)�sysr�exit�rr�(/usr/lib64/python3.6/lib2to3/__main__.py�<module>sPK{��\�Zr~��)__pycache__/__main__.cpython-36.opt-2.pycnu�[���3 \C�@s&ddlZddlmZejed��dS)�N�)�mainz lib2to3.fixes)�sysr�exit�rr�(/usr/lib64/python3.6/lib2to3/__main__.py�<module>sPK{��\�Zr~��#__pycache__/__main__.cpython-36.pycnu�[���3 \C�@s&ddlZddlmZejed��dS)�N�)�mainz lib2to3.fixes)�sysr�exit�rr�(/usr/lib64/python3.6/lib2to3/__main__.py�<module>sPK{��\� �EFF,__pycache__/btm_matcher.cpython-36.opt-1.pycnu�[���3 \��@sldZdZddlZddlZddlmZddlmZddlm Z Gdd �d e �ZGd d�de �Zia dd �ZdS)a�A bottom-up tree matching algorithm implementation meant to speed up 2to3's matching process. After the tree patterns are reduced to their rarest linear path, a linear Aho-Corasick automaton is created. The linear automaton traverses the linear paths from the leaves to the root of the AST and returns a set of nodes for further matching. This reduces significantly the number of candidate nodes.z+George Boutsioukis <gboutsioukis@gmail.com>�N)�defaultdict�)�pytree)�reduce_treec@s eZdZdZej�Zdd�ZdS)�BMNodez?Class for a node of the Aho-Corasick automaton used in matchingcCs"i|_g|_ttj�|_d|_dS)N�)�transition_table�fixers�nextr�count�id�content)�self�r�+/usr/lib64/python3.6/lib2to3/btm_matcher.py�__init__szBMNode.__init__N)�__name__� __module__�__qualname__�__doc__� itertoolsrrrrrrrsrc@s8eZdZdZdd�Zdd�Zdd�Zdd �Zd d�ZdS) � BottomMatcherzgThe main matcher class. After instantiating the patterns should be added using the add_fixer methodcCs0t�|_t�|_|jg|_g|_tjd�|_dS)NZRefactoringTool) �set�matchr�rootZnodesr �loggingZ getLoggerZlogger)rrrrrs zBottomMatcher.__init__cCsL|jj|�t|j�}|j�}|j||jd�}x|D]}|jj|�q4WdS)z�Reduces a fixer's pattern tree to a linear path and adds it to the matcher(a common Aho-Corasick automaton). The fixer is appended on the matching states and called when they are reached)�startN)r �appendrZpattern_treeZget_linear_subpattern�addr)r�fixerZtreeZlinear�match_nodesZ match_noderrr� add_fixer%s zBottomMatcher.add_fixercCs�|s |gSt|dt�rhg}xF|dD]:}|j||d�}x&|D]}|j|j|dd�|��q>Wq&W|S|d|jkr�t�}||j|d<n|j|d}|dd�r�|j|dd�|d�}n|g}|SdS)z5Recursively adds a linear pattern to the AC automatonr)rrN)� isinstance�tupler�extendrr)r�patternrr �alternativeZ end_nodes�endZ next_noderrrr1s" "zBottomMatcher.addc Cs0|j}tt�}�x|D�]}|}�x|�r&d|_x,|jD]"}t|tj�r8|jdkr8d|_Pq8W|j dkrp|j}n|j }||j kr�|j |}x�|jD]"}||kr�g||<||j|�q�Wnd|j}|j dk r�|j jr�P||j k�r|j |}x2|jD](}||j�k�rg||<||j|�q�W|j }q$WqW|S)auThe main interface with the bottom matcher. The tree is traversed from the bottom using the constructed automaton. Nodes are only checked once as the tree is retraversed. When the automaton fails, we give it one more shot(in case the above tree matches as a whole with the rejected leaf), then we break for the next leaf. There is the special case of multiple arguments(see code comments) where we recheck the nodes Args: The leaves of the AST tree to be matched Returns: A dictionary of node matches with fixers as the keys T�;FrN)rr�listZwas_checkedZchildrenr"rZLeaf�value�typerr r�parent�keys) rZleavesZcurrent_ac_nodeZresultsZleafZcurrent_ast_nodeZchildZ node_tokenrrrr�runSs> zBottomMatcher.runcs*td��fdd���|j�td�dS)z<Prints a graphviz diagram of the BM automaton(for debugging)z digraph g{cs^xX|jj�D]J}|j|}td|j|jt|�t|j�f�|dkrNt|j��|�qWdS)Nz%d -> %d [label=%s] //%sr)rr-�printr� type_repr�strr r )ZnodeZsubnode_keyZsubnode)� print_noderrr2�s z*BottomMatcher.print_ac.<locals>.print_node�}N)r/r)rr)r2r�print_ac�s zBottomMatcher.print_acN) rrrrrr!rr.r4rrrrrs"=rcCsHts<ddlm}x*|jj�D]\}}t|�tkr|t|<qWtj||�S)Nr)�python_symbols)�_type_reprsZpygramr5�__dict__�itemsr+�int� setdefault)Ztype_numr5�name�valrrrr0�sr0)r� __author__rr�collectionsrrrZ btm_utilsr�objectrrr6r0rrrr�<module>s PK{��\�K,# # ,__pycache__/btm_matcher.cpython-36.opt-2.pycnu�[���3 \��@shdZddlZddlZddlmZddlmZddlmZGdd�de �Z Gd d �d e �Ziadd�Z dS) z+George Boutsioukis <gboutsioukis@gmail.com>�N)�defaultdict�)�pytree)�reduce_treec@seZdZej�Zdd�ZdS)�BMNodecCs"i|_g|_ttj�|_d|_dS)N�)�transition_table�fixers�nextr�count�id�content)�self�r�+/usr/lib64/python3.6/lib2to3/btm_matcher.py�__init__szBMNode.__init__N)�__name__� __module__�__qualname__� itertoolsrrrrrrrsrc@s4eZdZdd�Zdd�Zdd�Zdd�Zd d �ZdS)� BottomMatchercCs0t�|_t�|_|jg|_g|_tjd�|_dS)NZRefactoringTool) �set�matchr�rootZnodesr �loggingZ getLoggerZlogger)rrrrrs zBottomMatcher.__init__cCsL|jj|�t|j�}|j�}|j||jd�}x|D]}|jj|�q4WdS)N)�start)r �appendrZpattern_treeZget_linear_subpattern�addr)r�fixerZtreeZlinear�match_nodesZ match_noderrr� add_fixer%s zBottomMatcher.add_fixercCs�|s |gSt|dt�rhg}xF|dD]:}|j||d�}x&|D]}|j|j|dd�|��q>Wq&W|S|d|jkr�t�}||j|d<n|j|d}|dd�r�|j|dd�|d�}n|g}|SdS)Nr)rr)� isinstance�tupler�extendrr)r�patternrr�alternativeZ end_nodes�endZ next_noderrrr1s" "zBottomMatcher.addc Cs0|j}tt�}�x|D�]}|}�x|�r&d|_x,|jD]"}t|tj�r8|jdkr8d|_Pq8W|j dkrp|j}n|j }||j kr�|j |}x�|jD]"}||kr�g||<||j|�q�Wnd|j}|j dk r�|j jr�P||j k�r|j |}x2|jD](}||j�k�rg||<||j|�q�W|j }q$WqW|S)NT�;Fr)rr�listZwas_checkedZchildrenr!rZLeaf�value�typerr r�parent�keys) rZleavesZcurrent_ac_nodeZresultsZleafZcurrent_ast_nodeZchildZ node_tokenrrrr�runSs> zBottomMatcher.runcs*td��fdd���|j�td�dS)Nz digraph g{cs^xX|jj�D]J}|j|}td|j|jt|�t|j�f�|dkrNt|j��|�qWdS)Nz%d -> %d [label=%s] //%sr)rr,�printr� type_repr�strr r )ZnodeZsubnode_keyZsubnode)� print_noderrr1�s z*BottomMatcher.print_ac.<locals>.print_node�})r.r)rr)r1r�print_ac�s zBottomMatcher.print_acN)rrrrr rr-r3rrrrrs "=rcCsHts<ddlm}x*|jj�D]\}}t|�tkr|t|<qWtj||�S)Nr)�python_symbols)�_type_reprsZpygramr4�__dict__�itemsr*�int� setdefault)Ztype_numr4�name�valrrrr/�sr/)� __author__rr�collectionsrrrZ btm_utilsr�objectrrr5r/rrrr�<module>s PK{��\� �EFF&__pycache__/btm_matcher.cpython-36.pycnu�[���3 \��@sldZdZddlZddlZddlmZddlmZddlm Z Gdd �d e �ZGd d�de �Zia dd �ZdS)a�A bottom-up tree matching algorithm implementation meant to speed up 2to3's matching process. After the tree patterns are reduced to their rarest linear path, a linear Aho-Corasick automaton is created. The linear automaton traverses the linear paths from the leaves to the root of the AST and returns a set of nodes for further matching. This reduces significantly the number of candidate nodes.z+George Boutsioukis <gboutsioukis@gmail.com>�N)�defaultdict�)�pytree)�reduce_treec@s eZdZdZej�Zdd�ZdS)�BMNodez?Class for a node of the Aho-Corasick automaton used in matchingcCs"i|_g|_ttj�|_d|_dS)N�)�transition_table�fixers�nextr�count�id�content)�self�r�+/usr/lib64/python3.6/lib2to3/btm_matcher.py�__init__szBMNode.__init__N)�__name__� __module__�__qualname__�__doc__� itertoolsrrrrrrrsrc@s8eZdZdZdd�Zdd�Zdd�Zdd �Zd d�ZdS) � BottomMatcherzgThe main matcher class. After instantiating the patterns should be added using the add_fixer methodcCs0t�|_t�|_|jg|_g|_tjd�|_dS)NZRefactoringTool) �set�matchr�rootZnodesr �loggingZ getLoggerZlogger)rrrrrs zBottomMatcher.__init__cCsL|jj|�t|j�}|j�}|j||jd�}x|D]}|jj|�q4WdS)z�Reduces a fixer's pattern tree to a linear path and adds it to the matcher(a common Aho-Corasick automaton). The fixer is appended on the matching states and called when they are reached)�startN)r �appendrZpattern_treeZget_linear_subpattern�addr)r�fixerZtreeZlinear�match_nodesZ match_noderrr� add_fixer%s zBottomMatcher.add_fixercCs�|s |gSt|dt�rhg}xF|dD]:}|j||d�}x&|D]}|j|j|dd�|��q>Wq&W|S|d|jkr�t�}||j|d<n|j|d}|dd�r�|j|dd�|d�}n|g}|SdS)z5Recursively adds a linear pattern to the AC automatonr)rrN)� isinstance�tupler�extendrr)r�patternrr �alternativeZ end_nodes�endZ next_noderrrr1s" "zBottomMatcher.addc Cs0|j}tt�}�x|D�]}|}�x|�r&d|_x,|jD]"}t|tj�r8|jdkr8d|_Pq8W|j dkrp|j}n|j }||j kr�|j |}x�|jD]"}||kr�g||<||j|�q�Wnd|j}|j dk r�|j jr�P||j k�r|j |}x2|jD](}||j�k�rg||<||j|�q�W|j }q$WqW|S)auThe main interface with the bottom matcher. The tree is traversed from the bottom using the constructed automaton. Nodes are only checked once as the tree is retraversed. When the automaton fails, we give it one more shot(in case the above tree matches as a whole with the rejected leaf), then we break for the next leaf. There is the special case of multiple arguments(see code comments) where we recheck the nodes Args: The leaves of the AST tree to be matched Returns: A dictionary of node matches with fixers as the keys T�;FrN)rr�listZwas_checkedZchildrenr"rZLeaf�value�typerr r�parent�keys) rZleavesZcurrent_ac_nodeZresultsZleafZcurrent_ast_nodeZchildZ node_tokenrrrr�runSs> zBottomMatcher.runcs*td��fdd���|j�td�dS)z<Prints a graphviz diagram of the BM automaton(for debugging)z digraph g{cs^xX|jj�D]J}|j|}td|j|jt|�t|j�f�|dkrNt|j��|�qWdS)Nz%d -> %d [label=%s] //%sr)rr-�printr� type_repr�strr r )ZnodeZsubnode_keyZsubnode)� print_noderrr2�s z*BottomMatcher.print_ac.<locals>.print_node�}N)r/r)rr)r2r�print_ac�s zBottomMatcher.print_acN) rrrrrr!rr.r4rrrrrs"=rcCsHts<ddlm}x*|jj�D]\}}t|�tkr|t|<qWtj||�S)Nr)�python_symbols)�_type_reprsZpygramr5�__dict__�itemsr+�int� setdefault)Ztype_numr5�name�valrrrr0�sr0)r� __author__rr�collectionsrrrZ btm_utilsr�objectrrr6r0rrrr�<module>s PK{��\7`4��*__pycache__/btm_utils.cpython-36.opt-1.pycnu�[���3 \�&�@s|dZddlmZddlmZmZddlmZmZeZ eZ ejZeZ dZdZdZGdd�de�Zdd d�Zdd �Zdd�Zd S)z0Utility functions used by the btm_matcher module�)�pytree)�grammar�token)�pattern_symbols�python_symbols��c@s:eZdZdZd dd�Zdd�Zdd�Zd d �Zdd�ZdS)�MinNodez�This class serves as an intermediate representation of the pattern tree during the conversion to sets of leaf-to-root subpatternsNcCs.||_||_g|_d|_d|_g|_g|_dS)NF)�type�name�children�leaf�parent�alternatives�group)�selfr r�r�)/usr/lib64/python3.6/lib2to3/btm_utils.py�__init__szMinNode.__init__cCst|j�dt|j�S)N� )�strr r)rrrr�__repr__szMinNode.__repr__cCs�|}g}x�|r�|jtkr`|jj|�t|j�t|j�krTt|j�g}g|_|j}q n|j}d}P|jtkr�|j j|�t|j �t|j�kr�t |j �}g|_ |j}q n|j}d}P|jtjkr�|j r�|j|j �n|j|j�|j}q W|S)z�Internal method. Returns a characteristic path of the pattern tree. This method must be run for all leaves until the linear subpatterns are merged into a singleN)r �TYPE_ALTERNATIVESr�append�lenr�tupler� TYPE_GROUPr�get_characteristic_subpattern�token_labels�NAMEr)r�node�subprrr�leaf_to_root!s8 zMinNode.leaf_to_rootcCs&x |j�D]}|j�}|r |Sq WdS)a�Drives the leaf_to_root method. The reason that leaf_to_root must be run multiple times is because we need to reject 'group' matches; for example the alternative form (a | b c) creates a group [b c] that needs to be matched. Since matching multiple linear patterns overcomes the automaton's capabilities, leaf_to_root merges each group into a single choice based on 'characteristic'ity, i.e. (a|b c) -> (a|b) if b more characteristic than c Returns: The most 'characteristic'(as defined by get_characteristic_subpattern) path for the compiled pattern tree. N)�leavesr")r�lr!rrr�get_linear_subpatternKszMinNode.get_linear_subpatternccs.x|jD]}|j�EdHqW|js*|VdS)z-Generator that returns the leaves of the treeN)rr#)r�childrrrr#`szMinNode.leaves)NN) �__name__� __module__�__qualname__�__doc__rrr"r%r#rrrrr s *r Nc Cs�d}|jtjkr|jd}|jtjkr�t|j�dkrFt|jd|�}nJttd�}x>|jD]4}|jj |�drnqXt||�}|dk rX|jj |�qXW�n|jtjk�rt|j�dkr�ttd�}x(|jD]}t||�}|r�|jj |�q�W|js�d}nt|jd|�}�n�|jtj k�r�t|jdtj��rH|jdjdk�rHt|jd|�St|jdtj��rn|jdjdk�s�t|j�dk�r�t|jdd��r�|jdjdk�r�dSd }d}d}d }d} d } xn|jD]d}|jtjk�r�d }|}n*|jtjk�r�d }|} n|jtjk�r|}t|d��r�|jdk�r�d } �q�W| �rb|jd}t|d��rl|jdk�rl|jd}n |jd}|jtjk�r�|jd k�r�ttd�}n4tt|j��r�ttt|j�d�}nttt|j�d�}n\|jtjk�r|jjd�}|tk�r�tt|d�}nttj|d�}n|jtjk�r$t||�}|�rZ| jdjdk�rBd}n| jdjdk�rVnt�|�r�|dk �r�x8|jdd�D]&}t||�}|dk �rz|jj |��qzW|�r�||_|S)z� Internal function. Reduces a compiled pattern tree to an intermediate representation suitable for feeding the automaton. This also trims off any optional pattern elements(like [a], a*). N�r)r r�(�[�valueTF�=r�any�')r r�*�+���)r �symsZMatcherrZAlternativesr�reduce_treer r�indexrZAlternativerZUnit� isinstancerZLeafr.�hasattrZDetailsZRepeaterrr�TYPE_ANY�getattr�pysyms�STRING�strip�tokens�NotImplementedErrorr) r rZnew_noder&Zreducedr Zdetails_nodeZalternatives_nodeZhas_repeaterZ repeater_nodeZhas_variable_nameZ name_leafrrrrr6gs� r6cs�t|t�s|St|�dkr"|dSg}g}dddddg�g}d�xl|D]d}tt|d d ���rFtt|�fdd ���r~|j|�qFtt|�fdd ���r�|j|�qF|j|�qFW|r�|}n|r�|}n|r�|}t|td �S)z�Picks the most characteristic from a list of linear patterns Current order used is: names > common_names > common_chars rr+�in�for�if�not�Nonez[]().,:cSst|�tkS)N)r r)�xrrr�<lambda>�sz/get_characteristic_subpattern.<locals>.<lambda>cst|t�o|�kS)N)r8r)rF)�common_charsrrrGscst|t�o|�kS)N)r8r)rF)�common_namesrrrGs)�key)r8�listrr0�rec_testr�max)ZsubpatternsZsubpatterns_with_namesZsubpatterns_with_common_namesZsubpatterns_with_common_chars� subpatternr)rHrIrr�s2 rccs<x6|D].}t|ttf�r*t||�EdHq||�VqWdS)zPTests test_func on all items of sequence and items of included sub-iterablesN)r8rKrrL)ZsequenceZ test_funcrFrrrrLs rLr4������)N)r*�rZpgen2rrZpygramrrr5r<Zopmapr?rr:rr�objectr r6rrLrrrr�<module>sW %PK{��\�ѝ*__pycache__/btm_utils.cpython-36.opt-2.pycnu�[���3 \�&�@sxddlmZddlmZmZddlmZmZeZeZ ej ZeZdZ dZdZGdd�de�Zdd d �Zdd�Zd d�ZdS)�)�pytree)�grammar�token)�pattern_symbols�python_symbols��c@s6eZdZddd�Zdd�Zdd�Zdd �Zd d�ZdS) �MinNodeNcCs.||_||_g|_d|_d|_g|_g|_dS)NF)�type�name�children�leaf�parent�alternatives�group)�selfr r�r�)/usr/lib64/python3.6/lib2to3/btm_utils.py�__init__szMinNode.__init__cCst|j�dt|j�S)N� )�strr r)rrrr�__repr__szMinNode.__repr__cCs�|}g}x�|r�|jtkr`|jj|�t|j�t|j�krTt|j�g}g|_|j}q n|j}d}P|jtkr�|j j|�t|j �t|j�kr�t |j �}g|_ |j}q n|j}d}P|jtjkr�|j r�|j|j �n|j|j�|j}q W|S)N)r �TYPE_ALTERNATIVESr�append�lenr�tupler� TYPE_GROUPr�get_characteristic_subpattern�token_labels�NAMEr)r�node�subprrr�leaf_to_root!s8 zMinNode.leaf_to_rootcCs&x |j�D]}|j�}|r |Sq WdS)N)�leavesr")r�lr!rrr�get_linear_subpatternKszMinNode.get_linear_subpatternccs.x|jD]}|j�EdHqW|js*|VdS)N)rr#)r�childrrrr#`szMinNode.leaves)NN)�__name__� __module__�__qualname__rrr"r%r#rrrrr s *r Nc Cs�d}|jtjkr|jd}|jtjkr�t|j�dkrFt|jd|�}nJttd�}x>|jD]4}|jj |�drnqXt||�}|dk rX|jj |�qXW�n|jtjk�rt|j�dkr�ttd�}x(|jD]}t||�}|r�|jj |�q�W|js�d}nt|jd|�}�n�|jtj k�r�t|jdtj��rH|jdjdk�rHt|jd|�St|jdtj��rn|jdjdk�s�t|j�dk�r�t|jdd��r�|jdjdk�r�dSd}d}d}d }d} d } xn|jD]d}|jtjk�r�d }|}n*|jtjk�r�d}|} n|jtjk�r|}t|d��r�|jd k�r�d} �q�W| �rb|jd}t|d��rl|jdk�rl|jd}n |jd}|jtjk�r�|jdk�r�ttd�}n4tt|j��r�ttt|j�d�}nttt|j�d�}n\|jtjk�r|jjd �}|tk�r�tt|d�}nttj|d�}n|jtjk�r$t||�}|�rZ| jdjdk�rBd}n| jdjdk�rVnt�|�r�|dk �r�x8|jdd�D]&}t||�}|dk �rz|jj |��qzW|�r�||_|S)N�r)r r�(�[�valueTF�=r�any�')r r�*�+���)r �symsZMatcherrZAlternativesr�reduce_treer r�indexrZAlternativerZUnit� isinstancerZLeafr-�hasattrZDetailsZRepeaterrr�TYPE_ANY�getattr�pysyms�STRING�strip�tokens�NotImplementedErrorr) r rZnew_noder&Zreducedr Zdetails_nodeZalternatives_nodeZhas_repeaterZ repeater_nodeZhas_variable_nameZ name_leafrrrrr5gs� r5cs�t|t�s|St|�dkr"|dSg}g}dddddg�g}d�xl|D]d}tt|d d ���rFtt|�fdd ���r~|j|�qFtt|�fdd ���r�|j|�qF|j|�qFW|r�|}n|r�|}n|r�|}t|td �S)Nrr*�in�for�if�not�Nonez[]().,:cSst|�tkS)N)r r)�xrrr�<lambda>�sz/get_characteristic_subpattern.<locals>.<lambda>cst|t�o|�kS)N)r7r)rE)�common_charsrrrFscst|t�o|�kS)N)r7r)rE)�common_namesrrrFs)�key)r7�listrr/�rec_testr�max)ZsubpatternsZsubpatterns_with_namesZsubpatterns_with_common_namesZsubpatterns_with_common_chars� subpatternr)rGrHrr�s2 rccs<x6|D].}t|ttf�r*t||�EdHq||�VqWdS)N)r7rJrrK)ZsequenceZ test_funcrErrrrKs rKr3������)N)�rZpgen2rrZpygramrrr4r;Zopmapr>rr9rr�objectr r5rrKrrrr�<module>sW %PK{��\7`4��$__pycache__/btm_utils.cpython-36.pycnu�[���3 \�&�@s|dZddlmZddlmZmZddlmZmZeZ eZ ejZeZ dZdZdZGdd�de�Zdd d�Zdd �Zdd�Zd S)z0Utility functions used by the btm_matcher module�)�pytree)�grammar�token)�pattern_symbols�python_symbols��c@s:eZdZdZd dd�Zdd�Zdd�Zd d �Zdd�ZdS)�MinNodez�This class serves as an intermediate representation of the pattern tree during the conversion to sets of leaf-to-root subpatternsNcCs.||_||_g|_d|_d|_g|_g|_dS)NF)�type�name�children�leaf�parent�alternatives�group)�selfr r�r�)/usr/lib64/python3.6/lib2to3/btm_utils.py�__init__szMinNode.__init__cCst|j�dt|j�S)N� )�strr r)rrrr�__repr__szMinNode.__repr__cCs�|}g}x�|r�|jtkr`|jj|�t|j�t|j�krTt|j�g}g|_|j}q n|j}d}P|jtkr�|j j|�t|j �t|j�kr�t |j �}g|_ |j}q n|j}d}P|jtjkr�|j r�|j|j �n|j|j�|j}q W|S)z�Internal method. Returns a characteristic path of the pattern tree. This method must be run for all leaves until the linear subpatterns are merged into a singleN)r �TYPE_ALTERNATIVESr�append�lenr�tupler� TYPE_GROUPr�get_characteristic_subpattern�token_labels�NAMEr)r�node�subprrr�leaf_to_root!s8 zMinNode.leaf_to_rootcCs&x |j�D]}|j�}|r |Sq WdS)a�Drives the leaf_to_root method. The reason that leaf_to_root must be run multiple times is because we need to reject 'group' matches; for example the alternative form (a | b c) creates a group [b c] that needs to be matched. Since matching multiple linear patterns overcomes the automaton's capabilities, leaf_to_root merges each group into a single choice based on 'characteristic'ity, i.e. (a|b c) -> (a|b) if b more characteristic than c Returns: The most 'characteristic'(as defined by get_characteristic_subpattern) path for the compiled pattern tree. N)�leavesr")r�lr!rrr�get_linear_subpatternKszMinNode.get_linear_subpatternccs.x|jD]}|j�EdHqW|js*|VdS)z-Generator that returns the leaves of the treeN)rr#)r�childrrrr#`szMinNode.leaves)NN) �__name__� __module__�__qualname__�__doc__rrr"r%r#rrrrr s *r Nc Cs�d}|jtjkr|jd}|jtjkr�t|j�dkrFt|jd|�}nJttd�}x>|jD]4}|jj |�drnqXt||�}|dk rX|jj |�qXW�n|jtjk�rt|j�dkr�ttd�}x(|jD]}t||�}|r�|jj |�q�W|js�d}nt|jd|�}�n�|jtj k�r�t|jdtj��rH|jdjdk�rHt|jd|�St|jdtj��rn|jdjdk�s�t|j�dk�r�t|jdd��r�|jdjdk�r�dSd }d}d}d }d} d } xn|jD]d}|jtjk�r�d }|}n*|jtjk�r�d }|} n|jtjk�r|}t|d��r�|jdk�r�d } �q�W| �rb|jd}t|d��rl|jdk�rl|jd}n |jd}|jtjk�r�|jd k�r�ttd�}n4tt|j��r�ttt|j�d�}nttt|j�d�}n\|jtjk�r|jjd�}|tk�r�tt|d�}nttj|d�}n|jtjk�r$t||�}|�rZ| jdjdk�rBd}n| jdjdk�rVnt�|�r�|dk �r�x8|jdd�D]&}t||�}|dk �rz|jj |��qzW|�r�||_|S)z� Internal function. Reduces a compiled pattern tree to an intermediate representation suitable for feeding the automaton. This also trims off any optional pattern elements(like [a], a*). N�r)r r�(�[�valueTF�=r�any�')r r�*�+���)r �symsZMatcherrZAlternativesr�reduce_treer r�indexrZAlternativerZUnit� isinstancerZLeafr.�hasattrZDetailsZRepeaterrr�TYPE_ANY�getattr�pysyms�STRING�strip�tokens�NotImplementedErrorr) r rZnew_noder&Zreducedr Zdetails_nodeZalternatives_nodeZhas_repeaterZ repeater_nodeZhas_variable_nameZ name_leafrrrrr6gs� r6cs�t|t�s|St|�dkr"|dSg}g}dddddg�g}d�xl|D]d}tt|d d ���rFtt|�fdd ���r~|j|�qFtt|�fdd ���r�|j|�qF|j|�qFW|r�|}n|r�|}n|r�|}t|td �S)z�Picks the most characteristic from a list of linear patterns Current order used is: names > common_names > common_chars rr+�in�for�if�not�Nonez[]().,:cSst|�tkS)N)r r)�xrrr�<lambda>�sz/get_characteristic_subpattern.<locals>.<lambda>cst|t�o|�kS)N)r8r)rF)�common_charsrrrGscst|t�o|�kS)N)r8r)rF)�common_namesrrrGs)�key)r8�listrr0�rec_testr�max)ZsubpatternsZsubpatterns_with_namesZsubpatterns_with_common_namesZsubpatterns_with_common_chars� subpatternr)rHrIrr�s2 rccs<x6|D].}t|ttf�r*t||�EdHq||�VqWdS)zPTests test_func on all items of sequence and items of included sub-iterablesN)r8rKrrL)ZsequenceZ test_funcrFrrrrLs rLr4������)N)r*�rZpgen2rrZpygramrrr5r<Zopmapr?rr:rr�objectr r6rrLrrrr�<module>sW %PK{��\))]QQ+__pycache__/fixer_base.cpython-36.opt-1.pycnu�[���3 \"�@sTdZddlZddlmZddlmZddlmZGdd�de�Z Gd d �d e �Z dS)z2Base class for fixers (optional, but recommended).�N�)�PatternCompiler)�pygram)�does_tree_importc@s�eZdZdZdZdZdZdZdZe j d�Ze�Z dZdZdZdZdZdZejZdd�Zd d �Zdd�Zd d�Zdd�Zddd�Zdd�Zddd�Zdd�Zdd�Z dd�Z!dS) �BaseFixaOptional base class for fixers. The subclass name must be FixFooBar where FooBar is the result of removing underscores and capitalizing the words of the fix name. For example, the class name for a fixer named 'has_key' should be FixHasKey. NrZpostF�cCs||_||_|j�dS)aInitializer. Subclass may override. Args: options: a dict containing the options passed to RefactoringTool that could be used to customize the fixer through the command line. log: a list to append warnings and other messages to. N)�options�log�compile_pattern)�selfrr �r�*/usr/lib64/python3.6/lib2to3/fixer_base.py�__init__/szBaseFix.__init__cCs,|jdk r(t�}|j|jdd�\|_|_dS)z�Compiles self.PATTERN into self.pattern. Subclass may override if it doesn't want to use self.{pattern,PATTERN} in .match(). NT)Z with_tree)�PATTERNrr �pattern�pattern_tree)r�PCrrr r ;s zBaseFix.compile_patterncCs ||_dS)zOSet the filename. The main refactoring tool should call this. N)�filename)rrrrr �set_filenameFszBaseFix.set_filenamecCsd|i}|jj||�o|S)aReturns match for a given parse tree node. Should return a true or false object (not necessarily a bool). It may return a non-empty dict of matching sub-nodes as returned by a matching pattern. Subclass may override. �node)r�match)rr�resultsrrr rMs z BaseFix.matchcCs t��dS)a�Returns the transformation for a given parse tree node. Args: node: the root of the parse tree that matched the fixer. results: a dict mapping symbolic names to part of the match. Returns: None, or a node that is a modified copy of the argument node. The node argument may also be modified in-place to effect the same change. Subclass *must* override. N)�NotImplementedError)rrrrrr � transformYszBaseFix.transform�xxx_todo_changemecCs6|}x ||jkr$|tt|j��}qW|jj|�|S)z�Return a string suitable for use as an identifier The new name is guaranteed not to conflict with other identifiers. )� used_names�str�next�numbers�add)r�template�namerrr �new_nameis zBaseFix.new_namecCs.|jrd|_|jjd|j�|jj|�dS)NFz### In file %s ###)� first_logr �appendr)r�messagerrr �log_messagetszBaseFix.log_messagecCs>|j�}|j�}d|_d}|j|||f�|r:|j|�dS)aWarn the user that a given chunk of code is not valid Python 3, but that it cannot be converted automatically. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. �zLine %d: could not convert: %sN)� get_linenoZclone�prefixr&)rr�reason�linenoZ for_output�msgrrr �cannot_convertzszBaseFix.cannot_convertcCs|j�}|jd||f�dS)z�Used for warning the user about possible uncertainty in the translation. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. zLine %d: %sN)r(r&)rrr*r+rrr �warning�szBaseFix.warningcCs(|j|_|j|�tjd�|_d|_dS)z�Some fixers need to maintain tree-wide state. This method is called once, at the start of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. rTN)rr� itertools�countrr#)r�treerrrr � start_tree�s zBaseFix.start_treecCsdS)z�Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. Nr)rr1rrrr �finish_tree�szBaseFix.finish_tree)r)N)"�__name__� __module__�__qualname__�__doc__rrrrrr/r0r�setr�orderZexplicitZ run_orderZ_accept_typeZkeep_line_orderZ BM_compatiblerZpython_symbolsZsymsrr rrrr"r&r-r.r2r3rrrr rs4 rcs,eZdZdZdZ�fdd�Zdd�Z�ZS)�ConditionalFixz@ Base class for fixers which not execute if an import is found. Ncstt|�j|�d|_dS)N)�superr:r2�_should_skip)r�args)� __class__rr r2�szConditionalFix.start_treecCsJ|jdk r|jS|jjd�}|d}dj|dd��}t|||�|_|jS)N�.r���r@)r<�skip_on�split�joinr)rrZpkgr!rrr �should_skip�s zConditionalFix.should_skip)r4r5r6r7rAr2rD� __classcell__rr)r>r r:�sr:)r7r/Zpatcomprr'rZ fixer_utilr�objectrr:rrrr �<module>sPK{��\�7M:� � +__pycache__/fixer_base.cpython-36.opt-2.pycnu�[���3 \"�@sPddlZddlmZddlmZddlmZGdd�de�ZGdd �d e�Z dS) �N�)�PatternCompiler)�pygram)�does_tree_importc@s�eZdZdZdZdZdZdZej d�Z e�ZdZ dZdZdZdZdZejZdd�Zdd �Zd d�Zdd �Zdd�Zddd�Zdd�Zddd�Zdd�Zdd�Zdd�Z dS)�BaseFixNrZpostF�cCs||_||_|j�dS)N)�options�log�compile_pattern)�selfrr �r�*/usr/lib64/python3.6/lib2to3/fixer_base.py�__init__/szBaseFix.__init__cCs,|jdk r(t�}|j|jdd�\|_|_dS)NT)Z with_tree)�PATTERNrr �pattern�pattern_tree)r�PCrrr r ;s zBaseFix.compile_patterncCs ||_dS)N)�filename)rrrrr �set_filenameFszBaseFix.set_filenamecCsd|i}|jj||�o|S)N�node)r�match)rr�resultsrrr rMs z BaseFix.matchcCs t��dS)N)�NotImplementedError)rrrrrr � transformYszBaseFix.transform�xxx_todo_changemecCs6|}x ||jkr$|tt|j��}qW|jj|�|S)N)� used_names�str�next�numbers�add)r�template�namerrr �new_nameis zBaseFix.new_namecCs.|jrd|_|jjd|j�|jj|�dS)NFz### In file %s ###)� first_logr �appendr)r�messagerrr �log_messagetszBaseFix.log_messagecCs>|j�}|j�}d|_d}|j|||f�|r:|j|�dS)N�zLine %d: could not convert: %s)� get_linenoZclone�prefixr&)rr�reason�linenoZ for_output�msgrrr �cannot_convertzszBaseFix.cannot_convertcCs|j�}|jd||f�dS)NzLine %d: %s)r(r&)rrr*r+rrr �warning�szBaseFix.warningcCs(|j|_|j|�tjd�|_d|_dS)NrT)rr� itertools�countrr#)r�treerrrr � start_tree�s zBaseFix.start_treecCsdS)Nr)rr1rrrr �finish_tree�szBaseFix.finish_tree)r)N)!�__name__� __module__�__qualname__rrrrrr/r0r�setr�orderZexplicitZ run_orderZ_accept_typeZkeep_line_orderZ BM_compatiblerZpython_symbolsZsymsrr rrrr"r&r-r.r2r3rrrr rs2 rcs(eZdZdZ�fdd�Zdd�Z�ZS)�ConditionalFixNcstt|�j|�d|_dS)N)�superr9r2�_should_skip)r�args)� __class__rr r2�szConditionalFix.start_treecCsJ|jdk r|jS|jjd�}|d}dj|dd��}t|||�|_|jS)N�.r���r?)r;�skip_on�split�joinr)rrZpkgr!rrr �should_skip�s zConditionalFix.should_skip)r4r5r6r@r2rC� __classcell__rr)r=r r9�sr9) r/Zpatcomprr'rZ fixer_utilr�objectrr9rrrr �<module>sPK{��\))]QQ%__pycache__/fixer_base.cpython-36.pycnu�[���3 \"�@sTdZddlZddlmZddlmZddlmZGdd�de�Z Gd d �d e �Z dS)z2Base class for fixers (optional, but recommended).�N�)�PatternCompiler)�pygram)�does_tree_importc@s�eZdZdZdZdZdZdZdZe j d�Ze�Z dZdZdZdZdZdZejZdd�Zd d �Zdd�Zd d�Zdd�Zddd�Zdd�Zddd�Zdd�Zdd�Z dd�Z!dS) �BaseFixaOptional base class for fixers. The subclass name must be FixFooBar where FooBar is the result of removing underscores and capitalizing the words of the fix name. For example, the class name for a fixer named 'has_key' should be FixHasKey. NrZpostF�cCs||_||_|j�dS)aInitializer. Subclass may override. Args: options: a dict containing the options passed to RefactoringTool that could be used to customize the fixer through the command line. log: a list to append warnings and other messages to. N)�options�log�compile_pattern)�selfrr �r�*/usr/lib64/python3.6/lib2to3/fixer_base.py�__init__/szBaseFix.__init__cCs,|jdk r(t�}|j|jdd�\|_|_dS)z�Compiles self.PATTERN into self.pattern. Subclass may override if it doesn't want to use self.{pattern,PATTERN} in .match(). NT)Z with_tree)�PATTERNrr �pattern�pattern_tree)r�PCrrr r ;s zBaseFix.compile_patterncCs ||_dS)zOSet the filename. The main refactoring tool should call this. N)�filename)rrrrr �set_filenameFszBaseFix.set_filenamecCsd|i}|jj||�o|S)aReturns match for a given parse tree node. Should return a true or false object (not necessarily a bool). It may return a non-empty dict of matching sub-nodes as returned by a matching pattern. Subclass may override. �node)r�match)rr�resultsrrr rMs z BaseFix.matchcCs t��dS)a�Returns the transformation for a given parse tree node. Args: node: the root of the parse tree that matched the fixer. results: a dict mapping symbolic names to part of the match. Returns: None, or a node that is a modified copy of the argument node. The node argument may also be modified in-place to effect the same change. Subclass *must* override. N)�NotImplementedError)rrrrrr � transformYszBaseFix.transform�xxx_todo_changemecCs6|}x ||jkr$|tt|j��}qW|jj|�|S)z�Return a string suitable for use as an identifier The new name is guaranteed not to conflict with other identifiers. )� used_names�str�next�numbers�add)r�template�namerrr �new_nameis zBaseFix.new_namecCs.|jrd|_|jjd|j�|jj|�dS)NFz### In file %s ###)� first_logr �appendr)r�messagerrr �log_messagetszBaseFix.log_messagecCs>|j�}|j�}d|_d}|j|||f�|r:|j|�dS)aWarn the user that a given chunk of code is not valid Python 3, but that it cannot be converted automatically. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. �zLine %d: could not convert: %sN)� get_linenoZclone�prefixr&)rr�reason�linenoZ for_output�msgrrr �cannot_convertzszBaseFix.cannot_convertcCs|j�}|jd||f�dS)z�Used for warning the user about possible uncertainty in the translation. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. zLine %d: %sN)r(r&)rrr*r+rrr �warning�szBaseFix.warningcCs(|j|_|j|�tjd�|_d|_dS)z�Some fixers need to maintain tree-wide state. This method is called once, at the start of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. rTN)rr� itertools�countrr#)r�treerrrr � start_tree�s zBaseFix.start_treecCsdS)z�Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. Nr)rr1rrrr �finish_tree�szBaseFix.finish_tree)r)N)"�__name__� __module__�__qualname__�__doc__rrrrrr/r0r�setr�orderZexplicitZ run_orderZ_accept_typeZkeep_line_orderZ BM_compatiblerZpython_symbolsZsymsrr rrrr"r&r-r.r2r3rrrr rs4 rcs,eZdZdZdZ�fdd�Zdd�Z�ZS)�ConditionalFixz@ Base class for fixers which not execute if an import is found. Ncstt|�j|�d|_dS)N)�superr:r2�_should_skip)r�args)� __class__rr r2�szConditionalFix.start_treecCsJ|jdk r|jS|jjd�}|d}dj|dd��}t|||�|_|jS)N�.r���r@)r<�skip_on�split�joinr)rrZpkgr!rrr �should_skip�s zConditionalFix.should_skip)r4r5r6r7rAr2rD� __classcell__rr)r>r r:�sr:)r7r/Zpatcomprr'rZ fixer_utilr�objectrr:rrrr �<module>sPK{��\/?d�//+__pycache__/fixer_util.cpython-36.opt-1.pycnu�[���3 \g;� @s�dZddlmZddlmZmZddlmZddl m Z dd�Zdd �Zd d�Z dd �ZdWdd�Zdd�Zdd�Zdd�Ze�e �fdd�ZdXdd�Zdd�Zdd�ZdYdd �Zd!d"�ZdZd#d$�Zd[d%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2d3d4d5d6d7d8d9d:h Z d;d<�Z!d=a"d>a#d?a$d@a%dAdB�Z&dCdD�Z'dEdF�Z(dGdH�Z)dIdJ�Z*dKdL�Z+dMdN�Z,dOdP�Z-ej.ej/hZ0d\dQdR�Z1ej/ej.ej2hZ3dSdT�Z4d]dUdV�Z5dS)^z1Utility functions, node construction macros, etc.�)�token)�Leaf�Node)�python_symbols)�patcompcCsttj|ttjd�|g�S)N�=)r�symsZargumentrr�EQUAL)�keyword�value�r�*/usr/lib64/python3.6/lib2to3/fixer_util.py� KeywordArgsrcCsttjd�S)N�()rr�LPARrrrr �LParensrcCsttjd�S)N�))rr�RPARrrrr �RParensrcCsHt|t�s|g}t|t�s&d|_|g}ttj|ttjddd�g|�S)zBuild an assignment statement� r)�prefix) � isinstance�listrrr�atomrrr )�target�sourcerrr �Assigns rNcCsttj||d�S)zReturn a NAME leaf)r)rr�NAME)�namerrrr �Name$srcCs|ttjt�|g�gS)zA node tuple for obj.attr)rr�trailer�Dot)�obj�attrrrr �Attr(sr$cCsttjd�S)zA comma leaf�,)rr�COMMArrrr �Comma,sr'cCsttjd�S)zA period (.) leaf�.)rr�DOTrrrr r!0sr!cCs4ttj|j�|j�g�}|r0|jdttj|��|S)z-A parenthesised argument list, used by Call()r)rrr �clone�insert_child�arglist)�argsZlparenZrparen�noderrr �ArgList4sr/cCs&ttj|t|�g�}|dk r"||_|S)zA function callN)rr�powerr/r)Z func_namer-rr.rrr �Call;sr1cCsttjd�S)zA newline literal� )rr�NEWLINErrrr �NewlineBsr4cCsttjd�S)zA blank line�)rrr3rrrr � BlankLineFsr6cCsttj||d�S)N)r)rr�NUMBER)�nrrrr �NumberJsr9cCs"ttjttjd�|ttjd�g�S)zA numeric or string subscript�[�])rrr rr�LBRACE�RBRACE)Z index_noderrr � SubscriptMsr>cCsttj||d�S)z A string leaf)r)rr�STRING)�stringrrrr �StringSsrAc Cs�d|_d|_d|_ttjd�}d|_ttjd�}d|_||||g}|rtd|_ttjd�}d|_|jttj||g��ttj|ttj |�g�}ttj ttjd�|ttjd�g�S)zuA list comprehension of the form [xp for fp in it if test]. If test is None, the "if test" part is omitted. r5r�for�in�ifr:r;) rrrr�appendrrZcomp_ifZ listmakerZcomp_forrr<r=) Zxp�fp�itZtestZfor_leafZin_leafZ inner_argsZif_leaf�innerrrr �ListCompWs$ rIcCsZx|D]}|j�qWttjd�ttj|dd�ttjddd�ttj|�g}ttj|�}|S)zO Return an import statement in the form: from package import name_leafs�fromr)r�import)�removerrrrr�import_as_names�import_from)Zpackage_nameZ name_leafsZleaf�children�imprrr � FromImportos rQc Cs�|dj�}|jtjkr"|j�}nttj|j�g�}|d}|rNdd�|D�}ttjtt|d�t|d��ttj|dj�||dj�g�g|�}|j |_ |S) zfReturns an import statement and calls a method of the module: import module module.name()r"�aftercSsg|]}|j��qSr)r*)�.0r8rrr � <listcomp>�sz!ImportAndCall.<locals>.<listcomp>�rZlparZrpar) r*�typerr,rr0r$rr r)r.�results�namesr"Z newarglistrR�newrrr � ImportAndCall�s DrZcCs�t|t�r |jt�t�gkr dSt|t�o�t|j�dko�t|jdt�o�t|jdt�o�t|jdt�o�|jdjdko�|jdjdkS)z(Does the node represent a tuple literal?T�rUr�rr)rrrOrr�lenrr)r.rrr �is_tuple�s r^cCsXt|t�oVt|j�dkoVt|jdt�oVt|jdt�oV|jdjdkoV|jdjdkS)z'Does the node represent a list literal?rrUr:r;���r_)rrr]rOrr)r.rrr �is_list�s r`cCsttjt�|t�g�S)N)rrrrr)r.rrr �parenthesize�sra�sortedr�set�any�all�tuple�sum�min�max� enumerateccs(t||�}x|r"|Vt||�}qWdS)alFollow an attribute chain. If you have a chain of objects where a.foo -> b, b.foo-> c, etc, use this to iterate over all objects in the chain. Iteration is terminated by getattr(x, attr) is None. Args: obj: the starting object attr: the name of the chaining attribute Yields: Each successive object in the chain. N)�getattr)r"r#�nextrrr � attr_chain�s rmzefor_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > z� power< ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | 'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > z` power< ( 'sorted' | 'enumerate' ) trailer< '(' arglist<node=any any*> ')' > any* > FcCsrts&tjt�atjt�atjt�adatttg}x<t|t|d��D](\}}i}|j||�rB|d|krBdSqBWdS)a Returns true if node is in an environment where all that is required of it is being iterable (ie, it doesn't matter if it returns a list or an iterator). See test_map_nochange in test_fixers.py for some examples and tests. T�parentr.F) � pats_builtrZcompile_pattern�p0�p1�p2�ziprm�match)r.Zpatterns�patternrnrWrrr �in_special_context�s rvcCs�|j}|dk r|jtjkrdS|j}|jtjtjfkr:dS|jtjkrX|j d|krXdS|jtj ks�|jtjkr�|dk r�|jtjks�|j d|kr�dSdS)zG Check that something isn't an attribute or function name etc. NFrUT) Zprev_siblingrVrr)rnr�funcdef�classdef� expr_stmtrOZ parametersZ typedargslistr&)r.�prevrnrrr �is_probably_builtin�sr{cCsNxH|dk rH|jtjkr@t|j�dkr@|jd}|jtjkr@|jS|j}qWdS)zFind the indentation of *node*.Nr\rr5) rVr�suiter]rOr�INDENTrrn)r.�indentrrr �find_indentations rcCs>|jtjkr|S|j�}|jd}|_ttj|g�}||_|S)N)rVrr|r*rnr)r.rnr|rrr � make_suitesr�cCs(x"|jtjkr"|j}|std��qW|S)zFind the top level namespace.z,root found before file_input node was found.)rVrZ file_inputrn� ValueError)r.rrr � find_root&s r�cCst|t|�|�}t|�S)z� Returns true if name is imported from package at the top level of the tree which node belongs to. To cover the case of an import like 'import foo', use None for the package and 'foo' for the name. )�find_bindingr��bool)�packagerr.Zbindingrrr �does_tree_import/sr�cCs|jtjtjfkS)z0Returns true if the node is an import statement.)rVr�import_namerN)r.rrr � is_import7sr�cCs4dd�}t|�}t|||�r dSd}}xTt|j�D]F\}}||�sFq4x(t|j|d��D]\}}||�sZPqZW||}Pq4W|dkr�xDt|j�D]6\}}|jtjkr�|jr�|jdjtjkr�|d}Pq�W|dkr�t tj ttjd�ttj|dd�g�} nt |ttj|dd�g�} | t�g} |j|t tj| ��dS) z\ Works like `does_tree_import` but adds an import statement if it was not imported. cSs |jtjko|jot|jd�S)NrU)rVr�simple_stmtrOr�)r.rrr �is_import_stmt>sz$touch_import.<locals>.is_import_stmtNrUrrKr)r)r�r�rjrOrVrr�rr?rr�rrrQr4r+)r�rr.r��rootZ insert_pos�offset�idxZnode2�import_rOrrr �touch_import;s4 r�cCs��x�|jD�]�}d}|jtjkrVt||jd�r4|St|t|jd�|�}|rR|}�n4|jtjtjfkr�t|t|jd �|�}|r�|}�n|jtj k�rt|t|jd�|�}|r�|}nXx�t |jdd��D]@\}}|jtjko�|j dkr�t|t|j|d�|�}|r�|}q�Wnx|jtk�r6|jdj |k�r6|}nTt|||��rJ|}n@|jtjk�rft|||�}n$|jtjk�r�t||jd��r�|}|r |�s�|St|�r |Sq WdS) z� Returns the node which binds variable name, otherwise None. If optional argument package is supplied, only imports will be returned. See test cases for examples.Nrr\r[�:�rUr_r_)rOrVrZfor_stmt�_findr�r�Zif_stmtZ while_stmtZtry_stmtrjr�COLONr� _def_syms�_is_import_bindingr�ryr�)rr.r��childZretr8�iZkidrrr r�isH r�cCsX|g}xL|rR|j�}|jdkr6|jtkr6|j|j�q|jtjkr|j|kr|SqWdS)N�)�poprV�_block_syms�extendrOrrr)rr.Znodesrrr r��sr�cCs�|jtjkr�|r�|jd}|jtjkrvx�|jD]@}|jtjkrV|jdj|krp|Sq0|jtjkr0|j|kr0|Sq0WnL|jtjkr�|jd}|jtjkr�|j|kr�|Sn|jtjkr�|j|kr�|Sn�|jtj k�r�|r�t |jd�j�|kr�dS|jd}|�rtd|��rdS|jtj k�r.t||��r.|S|jtjk�rf|jd}|jtjk�r�|j|k�r�|Sn6|jtjk�r�|j|k�r�|S|�r�|jtjk�r�|SdS)z� Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples. rr\Nr[�asr_)rVrr�rOZdotted_as_namesZdotted_as_namerrrrN�str�stripr�rMZimport_as_name�STAR)r.rr�rPr�Zlastr8rrr r��s@ r�)N)NN)N)N)N)N)N)6�__doc__Zpgen2rZpytreerrZpygramrrr5rrrrrrr$r'r!r/r1r4r6r9r>rArIrQrZr^r`raZconsuming_callsrmrprqrrrorvr{rr�r�r�r�r�rxrwr�r�r r�r�r�rrrr �<module>sZ - * PK{��\�"��&�&+__pycache__/fixer_util.cpython-36.opt-2.pycnu�[���3 \g;� @s�ddlmZddlmZmZddlmZddlm Z dd�Z dd�Zd d �Zdd�Z dVdd�Zdd�Zdd�Zdd�Ze�e�fdd�ZdWdd�Zdd�Zdd�ZdXdd�Zd d!�ZdYd"d#�ZdZd$d%�Zd&d'�Zd(d)�Zd*d+�Zd,d-�Zd.d/�Zd0d1d2d3d4d5d6d7d8d9h Zd:d;�Z d<a!d=a"d>a#d?a$d@dA�Z%dBdC�Z&dDdE�Z'dFdG�Z(dHdI�Z)dJdK�Z*dLdM�Z+dNdO�Z,ej-ej.hZ/d[dPdQ�Z0ej.ej-ej1hZ2dRdS�Z3d\dTdU�Z4d S)]�)�token)�Leaf�Node)�python_symbols)�patcompcCsttj|ttjd�|g�S)N�=)r�symsZargumentrr�EQUAL)�keyword�value�r�*/usr/lib64/python3.6/lib2to3/fixer_util.py� KeywordArgsrcCsttjd�S)N�()rr�LPARrrrr �LParensrcCsttjd�S)N�))rr�RPARrrrr �RParensrcCsHt|t�s|g}t|t�s&d|_|g}ttj|ttjddd�g|�S)N� r)�prefix) � isinstance�listrrr�atomrrr )�target�sourcerrr �Assigns rNcCsttj||d�S)N)r)rr�NAME)�namerrrr �Name$srcCs|ttjt�|g�gS)N)rr�trailer�Dot)�obj�attrrrr �Attr(sr$cCsttjd�S)N�,)rr�COMMArrrr �Comma,sr'cCsttjd�S)N�.)rr�DOTrrrr r!0sr!cCs4ttj|j�|j�g�}|r0|jdttj|��|S)Nr)rrr �clone�insert_child�arglist)�argsZlparenZrparen�noderrr �ArgList4sr/cCs&ttj|t|�g�}|dk r"||_|S)N)rr�powerr/r)Z func_namer-rr.rrr �Call;sr1cCsttjd�S)N� )rr�NEWLINErrrr �NewlineBsr4cCsttjd�S)N�)rrr3rrrr � BlankLineFsr6cCsttj||d�S)N)r)rr�NUMBER)�nrrrr �NumberJsr9cCs"ttjttjd�|ttjd�g�S)N�[�])rrr rr�LBRACE�RBRACE)Z index_noderrr � SubscriptMsr>cCsttj||d�S)N)r)rr�STRING)�stringrrrr �StringSsrAc Cs�d|_d|_d|_ttjd�}d|_ttjd�}d|_||||g}|rtd|_ttjd�}d|_|jttj||g��ttj|ttj |�g�}ttj ttjd�|ttjd�g�S)Nr5r�for�in�ifr:r;) rrrr�appendrrZcomp_ifZ listmakerZcomp_forrr<r=) Zxp�fp�itZtestZfor_leafZin_leafZ inner_argsZif_leaf�innerrrr �ListCompWs$ rIcCsZx|D]}|j�qWttjd�ttj|dd�ttjddd�ttj|�g}ttj|�}|S)N�fromr)r�import)�removerrrrr�import_as_names�import_from)Zpackage_nameZ name_leafsZleaf�children�imprrr � FromImportos rQc Cs�|dj�}|jtjkr"|j�}nttj|j�g�}|d}|rNdd�|D�}ttjtt|d�t|d��ttj|dj�||dj�g�g|�}|j |_ |S) Nr"�aftercSsg|]}|j��qSr)r*)�.0r8rrr � <listcomp>�sz!ImportAndCall.<locals>.<listcomp>�rZlparZrpar) r*�typerr,rr0r$rr r)r.�results�namesr"Z newarglistrR�newrrr � ImportAndCall�s DrZcCs�t|t�r |jt�t�gkr dSt|t�o�t|j�dko�t|jdt�o�t|jdt�o�t|jdt�o�|jdjdko�|jdjdkS)NT�rUr�rr)rrrOrr�lenrr)r.rrr �is_tuple�s r^cCsXt|t�oVt|j�dkoVt|jdt�oVt|jdt�oV|jdjdkoV|jdjdkS)NrrUr:r;���r_)rrr]rOrr)r.rrr �is_list�s r`cCsttjt�|t�g�S)N)rrrrr)r.rrr �parenthesize�sra�sortedr�set�any�all�tuple�sum�min�max� enumerateccs(t||�}x|r"|Vt||�}qWdS)N)�getattr)r"r#�nextrrr � attr_chain�s rmzefor_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > z� power< ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | 'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > z` power< ( 'sorted' | 'enumerate' ) trailer< '(' arglist<node=any any*> ')' > any* > FcCsrts&tjt�atjt�atjt�adatttg}x<t|t|d��D](\}}i}|j||�rB|d|krBdSqBWdS)NT�parentr.F) � pats_builtrZcompile_pattern�p0�p1�p2�ziprm�match)r.Zpatterns�patternrnrWrrr �in_special_context�s rvcCs�|j}|dk r|jtjkrdS|j}|jtjtjfkr:dS|jtjkrX|j d|krXdS|jtj ks�|jtjkr�|dk r�|jtjks�|j d|kr�dSdS)NFrUT) Zprev_siblingrVrr)rnr�funcdef�classdef� expr_stmtrOZ parametersZ typedargslistr&)r.�prevrnrrr �is_probably_builtin�sr{cCsNxH|dk rH|jtjkr@t|j�dkr@|jd}|jtjkr@|jS|j}qWdS)Nr\rr5) rVr�suiter]rOr�INDENTrrn)r.�indentrrr �find_indentations rcCs>|jtjkr|S|j�}|jd}|_ttj|g�}||_|S)N)rVrr|r*rnr)r.rnr|rrr � make_suitesr�cCs(x"|jtjkr"|j}|std��qW|S)Nz,root found before file_input node was found.)rVrZ file_inputrn� ValueError)r.rrr � find_root&s r�cCst|t|�|�}t|�S)N)�find_bindingr��bool)�packagerr.Zbindingrrr �does_tree_import/sr�cCs|jtjtjfkS)N)rVr�import_namerN)r.rrr � is_import7sr�cCs4dd�}t|�}t|||�r dSd}}xTt|j�D]F\}}||�sFq4x(t|j|d��D]\}}||�sZPqZW||}Pq4W|dkr�xDt|j�D]6\}}|jtjkr�|jr�|jdjtjkr�|d}Pq�W|dkr�t tj ttjd�ttj|dd�g�} nt |ttj|dd�g�} | t�g} |j|t tj| ��dS)NcSs |jtjko|jot|jd�S)NrU)rVr�simple_stmtrOr�)r.rrr �is_import_stmt>sz$touch_import.<locals>.is_import_stmtrUrrKr)r)r�r�rjrOrVrr�rr?rr�rrrQr4r+)r�rr.r��rootZ insert_pos�offset�idxZnode2�import_rOrrr �touch_import;s4 r�cCs��x�|jD�]�}d}|jtjkrVt||jd�r4|St|t|jd�|�}|rR|}�n4|jtjtjfkr�t|t|jd�|�}|r�|}�n|jtj k�rt|t|jd�|�}|r�|}nXx�t |jdd��D]@\}}|jtjko�|j dkr�t|t|j|d�|�}|r�|}q�Wnx|jtk�r6|jdj |k�r6|}nTt|||��rJ|}n@|jtjk�rft|||�}n$|jtjk�r�t||jd��r�|}|r |�s�|St|�r |Sq WdS) Nrr\r[�:�rUr_r_)rOrVrZfor_stmt�_findr�r�Zif_stmtZ while_stmtZtry_stmtrjr�COLONr� _def_syms�_is_import_bindingr�ryr�)rr.r��childZretr8�iZkidrrr r�isH r�cCsX|g}xL|rR|j�}|jdkr6|jtkr6|j|j�q|jtjkr|j|kr|SqWdS)N�)�poprV�_block_syms�extendrOrrr)rr.Znodesrrr r��sr�cCs�|jtjkr�|r�|jd}|jtjkrvx�|jD]@}|jtjkrV|jdj|krp|Sq0|jtjkr0|j|kr0|Sq0WnL|jtjkr�|jd}|jtjkr�|j|kr�|Sn|jtjkr�|j|kr�|Sn�|jtj k�r�|r�t |jd�j�|kr�dS|jd}|�rtd|��rdS|jtj k�r.t||��r.|S|jtjk�rf|jd}|jtjk�r�|j|k�r�|Sn6|jtjk�r�|j|k�r�|S|�r�|jtjk�r�|SdS)Nrr\r[�asr_)rVrr�rOZdotted_as_namesZdotted_as_namerrrrN�str�stripr�rMZimport_as_name�STAR)r.rr�rPr�Zlastr8rrr r��s@ r�)N)NN)N)N)N)N)N)5Zpgen2rZpytreerrZpygramrrr5rrrrrrr$r'r!r/r1r4r6r9r>rArIrQrZr^r`raZconsuming_callsrmrprqrrrorvr{rr�r�r�r�r�rxrwr�r�r r�r�r�rrrr �<module>sX - * PK{��\/?d�//%__pycache__/fixer_util.cpython-36.pycnu�[���3 \g;� @s�dZddlmZddlmZmZddlmZddl m Z dd�Zdd �Zd d�Z dd �ZdWdd�Zdd�Zdd�Zdd�Ze�e �fdd�ZdXdd�Zdd�Zdd�ZdYdd �Zd!d"�ZdZd#d$�Zd[d%d&�Zd'd(�Zd)d*�Zd+d,�Zd-d.�Zd/d0�Zd1d2d3d4d5d6d7d8d9d:h Z d;d<�Z!d=a"d>a#d?a$d@a%dAdB�Z&dCdD�Z'dEdF�Z(dGdH�Z)dIdJ�Z*dKdL�Z+dMdN�Z,dOdP�Z-ej.ej/hZ0d\dQdR�Z1ej/ej.ej2hZ3dSdT�Z4d]dUdV�Z5dS)^z1Utility functions, node construction macros, etc.�)�token)�Leaf�Node)�python_symbols)�patcompcCsttj|ttjd�|g�S)N�=)r�symsZargumentrr�EQUAL)�keyword�value�r�*/usr/lib64/python3.6/lib2to3/fixer_util.py� KeywordArgsrcCsttjd�S)N�()rr�LPARrrrr �LParensrcCsttjd�S)N�))rr�RPARrrrr �RParensrcCsHt|t�s|g}t|t�s&d|_|g}ttj|ttjddd�g|�S)zBuild an assignment statement� r)�prefix) � isinstance�listrrr�atomrrr )�target�sourcerrr �Assigns rNcCsttj||d�S)zReturn a NAME leaf)r)rr�NAME)�namerrrr �Name$srcCs|ttjt�|g�gS)zA node tuple for obj.attr)rr�trailer�Dot)�obj�attrrrr �Attr(sr$cCsttjd�S)zA comma leaf�,)rr�COMMArrrr �Comma,sr'cCsttjd�S)zA period (.) leaf�.)rr�DOTrrrr r!0sr!cCs4ttj|j�|j�g�}|r0|jdttj|��|S)z-A parenthesised argument list, used by Call()r)rrr �clone�insert_child�arglist)�argsZlparenZrparen�noderrr �ArgList4sr/cCs&ttj|t|�g�}|dk r"||_|S)zA function callN)rr�powerr/r)Z func_namer-rr.rrr �Call;sr1cCsttjd�S)zA newline literal� )rr�NEWLINErrrr �NewlineBsr4cCsttjd�S)zA blank line�)rrr3rrrr � BlankLineFsr6cCsttj||d�S)N)r)rr�NUMBER)�nrrrr �NumberJsr9cCs"ttjttjd�|ttjd�g�S)zA numeric or string subscript�[�])rrr rr�LBRACE�RBRACE)Z index_noderrr � SubscriptMsr>cCsttj||d�S)z A string leaf)r)rr�STRING)�stringrrrr �StringSsrAc Cs�d|_d|_d|_ttjd�}d|_ttjd�}d|_||||g}|rtd|_ttjd�}d|_|jttj||g��ttj|ttj |�g�}ttj ttjd�|ttjd�g�S)zuA list comprehension of the form [xp for fp in it if test]. If test is None, the "if test" part is omitted. r5r�for�in�ifr:r;) rrrr�appendrrZcomp_ifZ listmakerZcomp_forrr<r=) Zxp�fp�itZtestZfor_leafZin_leafZ inner_argsZif_leaf�innerrrr �ListCompWs$ rIcCsZx|D]}|j�qWttjd�ttj|dd�ttjddd�ttj|�g}ttj|�}|S)zO Return an import statement in the form: from package import name_leafs�fromr)r�import)�removerrrrr�import_as_names�import_from)Zpackage_nameZ name_leafsZleaf�children�imprrr � FromImportos rQc Cs�|dj�}|jtjkr"|j�}nttj|j�g�}|d}|rNdd�|D�}ttjtt|d�t|d��ttj|dj�||dj�g�g|�}|j |_ |S) zfReturns an import statement and calls a method of the module: import module module.name()r"�aftercSsg|]}|j��qSr)r*)�.0r8rrr � <listcomp>�sz!ImportAndCall.<locals>.<listcomp>�rZlparZrpar) r*�typerr,rr0r$rr r)r.�results�namesr"Z newarglistrR�newrrr � ImportAndCall�s DrZcCs�t|t�r |jt�t�gkr dSt|t�o�t|j�dko�t|jdt�o�t|jdt�o�t|jdt�o�|jdjdko�|jdjdkS)z(Does the node represent a tuple literal?T�rUr�rr)rrrOrr�lenrr)r.rrr �is_tuple�s r^cCsXt|t�oVt|j�dkoVt|jdt�oVt|jdt�oV|jdjdkoV|jdjdkS)z'Does the node represent a list literal?rrUr:r;���r_)rrr]rOrr)r.rrr �is_list�s r`cCsttjt�|t�g�S)N)rrrrr)r.rrr �parenthesize�sra�sortedr�set�any�all�tuple�sum�min�max� enumerateccs(t||�}x|r"|Vt||�}qWdS)alFollow an attribute chain. If you have a chain of objects where a.foo -> b, b.foo-> c, etc, use this to iterate over all objects in the chain. Iteration is terminated by getattr(x, attr) is None. Args: obj: the starting object attr: the name of the chaining attribute Yields: Each successive object in the chain. N)�getattr)r"r#�nextrrr � attr_chain�s rmzefor_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > z� power< ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | 'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > z` power< ( 'sorted' | 'enumerate' ) trailer< '(' arglist<node=any any*> ')' > any* > FcCsrts&tjt�atjt�atjt�adatttg}x<t|t|d��D](\}}i}|j||�rB|d|krBdSqBWdS)a Returns true if node is in an environment where all that is required of it is being iterable (ie, it doesn't matter if it returns a list or an iterator). See test_map_nochange in test_fixers.py for some examples and tests. T�parentr.F) � pats_builtrZcompile_pattern�p0�p1�p2�ziprm�match)r.Zpatterns�patternrnrWrrr �in_special_context�s rvcCs�|j}|dk r|jtjkrdS|j}|jtjtjfkr:dS|jtjkrX|j d|krXdS|jtj ks�|jtjkr�|dk r�|jtjks�|j d|kr�dSdS)zG Check that something isn't an attribute or function name etc. NFrUT) Zprev_siblingrVrr)rnr�funcdef�classdef� expr_stmtrOZ parametersZ typedargslistr&)r.�prevrnrrr �is_probably_builtin�sr{cCsNxH|dk rH|jtjkr@t|j�dkr@|jd}|jtjkr@|jS|j}qWdS)zFind the indentation of *node*.Nr\rr5) rVr�suiter]rOr�INDENTrrn)r.�indentrrr �find_indentations rcCs>|jtjkr|S|j�}|jd}|_ttj|g�}||_|S)N)rVrr|r*rnr)r.rnr|rrr � make_suitesr�cCs(x"|jtjkr"|j}|std��qW|S)zFind the top level namespace.z,root found before file_input node was found.)rVrZ file_inputrn� ValueError)r.rrr � find_root&s r�cCst|t|�|�}t|�S)z� Returns true if name is imported from package at the top level of the tree which node belongs to. To cover the case of an import like 'import foo', use None for the package and 'foo' for the name. )�find_bindingr��bool)�packagerr.Zbindingrrr �does_tree_import/sr�cCs|jtjtjfkS)z0Returns true if the node is an import statement.)rVr�import_namerN)r.rrr � is_import7sr�cCs4dd�}t|�}t|||�r dSd}}xTt|j�D]F\}}||�sFq4x(t|j|d��D]\}}||�sZPqZW||}Pq4W|dkr�xDt|j�D]6\}}|jtjkr�|jr�|jdjtjkr�|d}Pq�W|dkr�t tj ttjd�ttj|dd�g�} nt |ttj|dd�g�} | t�g} |j|t tj| ��dS) z\ Works like `does_tree_import` but adds an import statement if it was not imported. cSs |jtjko|jot|jd�S)NrU)rVr�simple_stmtrOr�)r.rrr �is_import_stmt>sz$touch_import.<locals>.is_import_stmtNrUrrKr)r)r�r�rjrOrVrr�rr?rr�rrrQr4r+)r�rr.r��rootZ insert_pos�offset�idxZnode2�import_rOrrr �touch_import;s4 r�cCs��x�|jD�]�}d}|jtjkrVt||jd�r4|St|t|jd�|�}|rR|}�n4|jtjtjfkr�t|t|jd �|�}|r�|}�n|jtj k�rt|t|jd�|�}|r�|}nXx�t |jdd��D]@\}}|jtjko�|j dkr�t|t|j|d�|�}|r�|}q�Wnx|jtk�r6|jdj |k�r6|}nTt|||��rJ|}n@|jtjk�rft|||�}n$|jtjk�r�t||jd��r�|}|r |�s�|St|�r |Sq WdS) z� Returns the node which binds variable name, otherwise None. If optional argument package is supplied, only imports will be returned. See test cases for examples.Nrr\r[�:�rUr_r_)rOrVrZfor_stmt�_findr�r�Zif_stmtZ while_stmtZtry_stmtrjr�COLONr� _def_syms�_is_import_bindingr�ryr�)rr.r��childZretr8�iZkidrrr r�isH r�cCsX|g}xL|rR|j�}|jdkr6|jtkr6|j|j�q|jtjkr|j|kr|SqWdS)N�)�poprV�_block_syms�extendrOrrr)rr.Znodesrrr r��sr�cCs�|jtjkr�|r�|jd}|jtjkrvx�|jD]@}|jtjkrV|jdj|krp|Sq0|jtjkr0|j|kr0|Sq0WnL|jtjkr�|jd}|jtjkr�|j|kr�|Sn|jtjkr�|j|kr�|Sn�|jtj k�r�|r�t |jd�j�|kr�dS|jd}|�rtd|��rdS|jtj k�r.t||��r.|S|jtjk�rf|jd}|jtjk�r�|j|k�r�|Sn6|jtjk�r�|j|k�r�|S|�r�|jtjk�r�|SdS)z� Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples. rr\Nr[�asr_)rVrr�rOZdotted_as_namesZdotted_as_namerrrrN�str�stripr�rMZimport_as_name�STAR)r.rr�rPr�Zlastr8rrr r��s@ r�)N)NN)N)N)N)N)N)6�__doc__Zpgen2rZpytreerrZpygramrrr5rrrrrrr$r'r!r/r1r4r6r9r>rArIrQrZr^r`raZconsuming_callsrmrprqrrrorvr{rr�r�r�r�r�rxrwr�r�r r�r�r�rrrr �<module>sZ - * PK{��\��2!2!%__pycache__/main.cpython-36.opt-1.pycnu�[���3 \�-�@s�dZddlmZmZddlZddlZddlZddlZddlZddl Z ddl mZdd�ZGdd �d ej �Zd d�Zddd �ZdS)z Main program for 2to3. �)�with_statement�print_functionN�)�refactorc Cs(|j�}|j�}tj||||dddd�S)z%Return a unified diff of two strings.z (original)z(refactored)�)Zlineterm)� splitlines�difflibZunified_diff)�a�b�filename�r�$/usr/lib64/python3.6/lib2to3/main.py� diff_textss rcs>eZdZdZd�fdd� Zdd�Z�fdd�Zd d �Z�ZS)�StdoutRefactoringToola2 A refactoring tool that can avoid overwriting its input files. Prints output to stdout. Output files can optionally be written to a different directory and or have an extra file suffix appended to their name for use in situations where you do not want to replace the input files. rc sR||_||_|r(|jtj�r(|tj7}||_||_||_tt |�j |||�dS)aF Args: fixers: A list of fixers to import. options: A dict with RefactoringTool configuration. explicit: A list of fixers to run even if they are explicit. nobackups: If true no backup '.bak' files will be created for those files that are being refactored. show_diffs: Should diffs of the refactoring be printed to stdout? input_base_dir: The base directory for all input files. This class will strip this path prefix off of filenames before substituting it with output_dir. Only meaningful if output_dir is supplied. All files processed by refactor() must start with this path. output_dir: If supplied, all converted files will be written into this directory tree instead of input_base_dir. append_suffix: If supplied, all files output by this tool will have this appended to their filename. Useful for changing .py to .py3 for example by passing append_suffix='3'. N)� nobackups� show_diffs�endswith�os�sep�_input_base_dir�_output_dir�_append_suffix�superr�__init__) �selfZfixers�options�explicitrr�input_base_dir� output_dir� append_suffix)� __class__rr r$s zStdoutRefactoringTool.__init__cOs*|jj|||f�|jj|f|�|�dS)N)�errors�append�logger�error)r�msg�args�kwargsrrr � log_errorAszStdoutRefactoringTool.log_errorc !s||}|jrH|j|j�r6tjj|j|t|j�d��}ntd||jf��|jrX||j7}||kr�tjj |�}tjj |�r�|r�tj|�|jd||�|j �s4|d}tjj|�r�ytj|�Wn.tk r�}z|jd|�WYdd}~XnXytj||�Wn2tk �r2}z|jd||�WYdd}~XnXtt|�j} | ||||�|j �sbtj||�||k�rxtj||�dS)Nz5filename %s does not start with the input_base_dir %szWriting converted %s to %s.z.bakzCan't remove backup %szCan't rename %s to %s)r� startswithrr�path�join�len� ValueErrorr�dirname�isdir�makedirs�log_messager�lexists�remove�OSError�renamerr� write_file�shutilZcopymode) rZnew_textrZold_text�encodingZ orig_filenamerZbackup�err�write)r rr r6Es@ z StdoutRefactoringTool.write_filecCs�|r|jd|�n�|jd|�|jr�t|||�}yX|jdk rp|j�&x|D]}t|�qJWtjj�WdQRXnx|D]}t|�qvWWn"tk r�t d|f�dSXdS)NzNo changes to %sz Refactored %sz+couldn't encode %s's diff for your terminal) r1rrZoutput_lock�print�sys�stdout�flush�UnicodeEncodeError�warn)r�old�newrZequalZ diff_lines�linerrr �print_outputls" z"StdoutRefactoringTool.print_output)rrr) �__name__� __module__�__qualname__�__doc__rr(r6rD� __classcell__rr)r r rs 'rcCstd|ftjd�dS)NzWARNING: %s)�file)r;r<�stderr)r%rrr r@�sr@cstjdd�}|jddddd�|jdd d gdd�|jd dddddd�|jddd gdd�|jddddd�|jddddd�|jddddd�|jd dd!d�|jd"d#dd$d�|jd%d&dd'd(d�|jd)d*dd+d,d-d.�|jd/d0dd1d�|jd2dd+d,d3d.�d'}i}|j|�\}}|j�r@d4|d5<|j�s:td6�d4|_|j�r\|j�r\|j d7�|j �rx|j�rx|j d8�|j�r�|j�r�td9�|j�r�|j�r�|j d:�|j�r�t d;�xtj��D]}t |��q�W|�s�d<S|�st d=tjd>�t d?tjd>�d@SdA|k�r4d4}|j�r4t dBtjd>�d@S|j�rDd4|dC<|j�rRtjntj}tjdD|dE�tjdF�}ttj���} t�fdGdH�|jD��} t�}|j�r�d'}x2|jD](} | dIk�r�d4}n|j�dJ| ��q�W|�r�| j|�n|}n | j|�}|j| �}t j!j"|�}|�rD|j#t j$��rDt j!j%|��rDt j!j&|�}|j�rh|j't j$�}|j(dK|j|�t)t*|�|t*|�|j|j||j|j dL�}|j+�s�|�r�|j,�nBy|j||j|j-|j.�Wn&tj/k �r�t dMtjd>�dSX|j0�t1t2|j+��S)Nz�Main program. Args: fixer_pkg: the name of a package where the fixers are located. args: optional; a list of command line arguments. If omitted, sys.argv[1:] is used. Returns a suggested exit status (0, 1, 2). z2to3 [options] file|dir ...)Zusagez-dz--doctests_only� store_truezFix up doctests only)�action�helpz-fz--fixr"z1Each FIX specifies a transformation; default: all)rM�defaultrNz-jz--processesZstorer�intzRun 2to3 concurrently)rMrO�typerNz-xz--nofixz'Prevent a transformation from being runz-lz--list-fixeszList available transformationsz-pz--print-functionz0Modify the grammar so that print() is a functionz-vz --verbosezMore verbose loggingz --no-diffsz#Don't show diffs of the refactoringz-wz--writezWrite back modified filesz-nz--nobackupsFz&Don't write backups for modified filesz-oz--output-dir�strrzXPut output files in this directory instead of overwriting the input files. Requires -n.)rMrQrOrNz-Wz--write-unchanged-fileszYAlso write files even if no changes were required (useful with --output-dir); implies -w.z--add-suffixzuAppend this string to all output filenames. Requires -n if non-empty. ex: --add-suffix='3' will generate .py3 files.T�write_unchanged_filesz&--write-unchanged-files/-W implies -w.z%Can't use --output-dir/-o without -n.z"Can't use --add-suffix without -n.z@not writing files and not printing diffs; that's not very usefulzCan't use -n without -wz2Available transformations for the -f/--fix option:rz1At least one file or directory argument required.)rJzUse --help to show usage.��-zCan't write to stdin.rz%(name)s: %(message)s)�format�levelzlib2to3.mainc3s|]}�d|VqdS)z.fix_Nr)�.0�fix)� fixer_pkgrr � <genexpr>�szmain.<locals>.<genexpr>�allz.fix_z7Output in %r will mirror the input directory %r layout.)rrrz+Sorry, -j isn't supported on this platform.)3�optparseZOptionParserZ add_option� parse_argsrSr:r@rrr$Z add_suffixZno_diffsZ list_fixesr;rZget_all_fix_namesr<rKr�verbose�logging�DEBUG�INFOZbasicConfigZ getLogger�setZget_fixers_from_packageZnofixrY�add�union� differencerr*�commonprefixrrr/r.�rstrip�infor�sortedr!�refactor_stdinZ doctests_onlyZ processesZMultiprocessingUnsupportedZ summarizerP�bool)rZr&�parserrk�flagsrZfixnamerWr#Zavail_fixesZunwanted_fixesrZall_presentrYZ requestedZfixer_namesrZrtr)rZr �main�s� ro)N)rHZ __future__rrr<rrr`r7r]rrrZMultiprocessRefactoringToolrr@rorrrr �<module>s gPK{��\��WWW%__pycache__/main.cpython-36.opt-2.pycnu�[���3 \�-�@s|ddlmZmZddlZddlZddlZddlZddlZddlZddl m Z dd�ZGdd�de j�Z d d �Zd dd�ZdS)�)�with_statement�print_functionN�)�refactorc Cs(|j�}|j�}tj||||dddd�S)Nz (original)z(refactored)�)Zlineterm)� splitlines�difflibZunified_diff)�a�b�filename�r�$/usr/lib64/python3.6/lib2to3/main.py� diff_textss rcs:eZdZd �fdd� Zdd�Z�fdd�Zdd �Z�ZS)�StdoutRefactoringToolrc sR||_||_|r(|jtj�r(|tj7}||_||_||_tt |�j |||�dS)N)� nobackups� show_diffs�endswith�os�sep�_input_base_dir�_output_dir�_append_suffix�superr�__init__) �selfZfixers�options�explicitrr�input_base_dir� output_dir� append_suffix)� __class__rr r$s zStdoutRefactoringTool.__init__cOs*|jj|||f�|jj|f|�|�dS)N)�errors�append�logger�error)r�msg�args�kwargsrrr � log_errorAszStdoutRefactoringTool.log_errorc !s||}|jrH|j|j�r6tjj|j|t|j�d��}ntd||jf��|jrX||j7}||kr�tjj |�}tjj |�r�|r�tj|�|jd||�|j �s4|d}tjj|�r�ytj|�Wn.tk r�}z|jd|�WYdd}~XnXytj||�Wn2tk �r2}z|jd||�WYdd}~XnXtt|�j} | ||||�|j �sbtj||�||k�rxtj||�dS)Nz5filename %s does not start with the input_base_dir %szWriting converted %s to %s.z.bakzCan't remove backup %szCan't rename %s to %s)r� startswithrr�path�join�len� ValueErrorr�dirname�isdir�makedirs�log_messager�lexists�remove�OSError�renamerr� write_file�shutilZcopymode) rZnew_textrZold_text�encodingZ orig_filenamerZbackup�err�write)r rr r6Es@ z StdoutRefactoringTool.write_filecCs�|r|jd|�n�|jd|�|jr�t|||�}yX|jdk rp|j�&x|D]}t|�qJWtjj�WdQRXnx|D]}t|�qvWWn"tk r�t d|f�dSXdS)NzNo changes to %sz Refactored %sz+couldn't encode %s's diff for your terminal) r1rrZoutput_lock�print�sys�stdout�flush�UnicodeEncodeError�warn)r�old�newrZequalZ diff_lines�linerrr �print_outputls" z"StdoutRefactoringTool.print_output)rrr)�__name__� __module__�__qualname__rr(r6rD� __classcell__rr)r r rs'rcCstd|ftjd�dS)NzWARNING: %s)�file)r;r<�stderr)r%rrr r@�sr@cstjdd�}|jddddd�|jdd d gdd�|jd dddddd�|jddd gdd�|jddddd�|jddddd�|jddddd�|jd dd!d�|jd"d#dd$d�|jd%d&dd'd(d�|jd)d*dd+d,d-d.�|jd/d0dd1d�|jd2dd+d,d3d.�d'}i}|j|�\}}|j�r@d4|d5<|j�s:td6�d4|_|j�r\|j�r\|j d7�|j �rx|j�rx|j d8�|j�r�|j�r�td9�|j�r�|j�r�|j d:�|j�r�t d;�xtj��D]}t |��q�W|�s�d<S|�st d=tjd>�t d?tjd>�d@SdA|k�r4d4}|j�r4t dBtjd>�d@S|j�rDd4|dC<|j�rRtjntj}tjdD|dE�tjdF�}ttj���} t�fdGdH�|jD��} t�}|j�r�d'}x2|jD](} | dIk�r�d4}n|j�dJ| ��q�W|�r�| j|�n|}n | j|�}|j| �}t j!j"|�}|�rD|j#t j$��rDt j!j%|��rDt j!j&|�}|j�rh|j't j$�}|j(dK|j|�t)t*|�|t*|�|j|j||j|j dL�}|j+�s�|�r�|j,�nBy|j||j|j-|j.�Wn&tj/k �r�t dMtjd>�dSX|j0�t1t2|j+��S)NNz2to3 [options] file|dir ...)Zusagez-dz--doctests_only� store_truezFix up doctests only)�action�helpz-fz--fixr"z1Each FIX specifies a transformation; default: all)rL�defaultrMz-jz--processesZstorer�intzRun 2to3 concurrently)rLrN�typerMz-xz--nofixz'Prevent a transformation from being runz-lz--list-fixeszList available transformationsz-pz--print-functionz0Modify the grammar so that print() is a functionz-vz --verbosezMore verbose loggingz --no-diffsz#Don't show diffs of the refactoringz-wz--writezWrite back modified filesz-nz--nobackupsFz&Don't write backups for modified filesz-oz--output-dir�strrzXPut output files in this directory instead of overwriting the input files. Requires -n.)rLrPrNrMz-Wz--write-unchanged-fileszYAlso write files even if no changes were required (useful with --output-dir); implies -w.z--add-suffixzuAppend this string to all output filenames. Requires -n if non-empty. ex: --add-suffix='3' will generate .py3 files.T�write_unchanged_filesz&--write-unchanged-files/-W implies -w.z%Can't use --output-dir/-o without -n.z"Can't use --add-suffix without -n.z@not writing files and not printing diffs; that's not very usefulzCan't use -n without -wz2Available transformations for the -f/--fix option:rz1At least one file or directory argument required.)rIzUse --help to show usage.��-zCan't write to stdin.rz%(name)s: %(message)s)�format�levelzlib2to3.mainc3s|]}�d|VqdS)z.fix_Nr)�.0�fix)� fixer_pkgrr � <genexpr>�szmain.<locals>.<genexpr>�allz.fix_z7Output in %r will mirror the input directory %r layout.)rrrz+Sorry, -j isn't supported on this platform.)3�optparseZOptionParserZ add_option� parse_argsrRr:r@rrr$Z add_suffixZno_diffsZ list_fixesr;rZget_all_fix_namesr<rJr�verbose�logging�DEBUG�INFOZbasicConfigZ getLogger�setZget_fixers_from_packageZnofixrX�add�union� differencerr*�commonprefixrrr/r.�rstrip�infor�sortedr!�refactor_stdinZ doctests_onlyZ processesZMultiprocessingUnsupportedZ summarizerO�bool)rYr&�parserrj�flagsrZfixnamerVr#Zavail_fixesZunwanted_fixesrZall_presentrXZ requestedZfixer_namesrZrtr)rYr �main�s� rn)N)Z __future__rrr<rrr_r7r\rrrZMultiprocessRefactoringToolrr@rnrrrr �<module>s gPK{��\4�P�T!T!__pycache__/main.cpython-36.pycnu�[���3 \�-�@s�dZddlmZmZddlZddlZddlZddlZddlZddl Z ddl mZdd�ZGdd �d ej �Zd d�Zddd �ZdS)z Main program for 2to3. �)�with_statement�print_functionN�)�refactorc Cs(|j�}|j�}tj||||dddd�S)z%Return a unified diff of two strings.z (original)z(refactored)�)Zlineterm)� splitlines�difflibZunified_diff)�a�b�filename�r�$/usr/lib64/python3.6/lib2to3/main.py� diff_textss rcs>eZdZdZd�fdd� Zdd�Z�fdd�Zd d �Z�ZS)�StdoutRefactoringToola2 A refactoring tool that can avoid overwriting its input files. Prints output to stdout. Output files can optionally be written to a different directory and or have an extra file suffix appended to their name for use in situations where you do not want to replace the input files. rc sR||_||_|r(|jtj�r(|tj7}||_||_||_tt |�j |||�dS)aF Args: fixers: A list of fixers to import. options: A dict with RefactoringTool configuration. explicit: A list of fixers to run even if they are explicit. nobackups: If true no backup '.bak' files will be created for those files that are being refactored. show_diffs: Should diffs of the refactoring be printed to stdout? input_base_dir: The base directory for all input files. This class will strip this path prefix off of filenames before substituting it with output_dir. Only meaningful if output_dir is supplied. All files processed by refactor() must start with this path. output_dir: If supplied, all converted files will be written into this directory tree instead of input_base_dir. append_suffix: If supplied, all files output by this tool will have this appended to their filename. Useful for changing .py to .py3 for example by passing append_suffix='3'. N)� nobackups� show_diffs�endswith�os�sep�_input_base_dir�_output_dir�_append_suffix�superr�__init__) �selfZfixers�options�explicitrr�input_base_dir� output_dir� append_suffix)� __class__rr r$s zStdoutRefactoringTool.__init__cOs*|jj|||f�|jj|f|�|�dS)N)�errors�append�logger�error)r�msg�args�kwargsrrr � log_errorAszStdoutRefactoringTool.log_errorc !s||}|jrH|j|j�r6tjj|j|t|j�d��}ntd||jf��|jrX||j7}||kr�tjj |�}tjj |�r�|r�tj|�|jd||�|j �s4|d}tjj|�r�ytj|�Wn.tk r�}z|jd|�WYdd}~XnXytj||�Wn2tk �r2}z|jd||�WYdd}~XnXtt|�j} | ||||�|j �sbtj||�||k�rxtj||�dS)Nz5filename %s does not start with the input_base_dir %szWriting converted %s to %s.z.bakzCan't remove backup %szCan't rename %s to %s)r� startswithrr�path�join�len� ValueErrorr�dirname�isdir�makedirs�log_messager�lexists�remove�OSError�renamerr� write_file�shutilZcopymode) rZnew_textrZold_text�encodingZ orig_filenamerZbackup�err�write)r rr r6Es@ z StdoutRefactoringTool.write_filecCs�|r|jd|�n�|jd|�|jr�t|||�}yX|jdk rp|j�&x|D]}t|�qJWtjj�WdQRXnx|D]}t|�qvWWn"tk r�t d|f�dSXdS)NzNo changes to %sz Refactored %sz+couldn't encode %s's diff for your terminal) r1rrZoutput_lock�print�sys�stdout�flush�UnicodeEncodeError�warn)r�old�newrZequalZ diff_lines�linerrr �print_outputls" z"StdoutRefactoringTool.print_output)rrr) �__name__� __module__�__qualname__�__doc__rr(r6rD� __classcell__rr)r r rs 'rcCstd|ftjd�dS)NzWARNING: %s)�file)r;r<�stderr)r%rrr r@�sr@cstjdd�}|jddddd�|jdd d gdd�|jd dddddd�|jddd gdd�|jddddd�|jddddd�|jddddd�|jd dd!d�|jd"d#dd$d�|jd%d&dd'd(d�|jd)d*dd+d,d-d.�|jd/d0dd1d�|jd2dd+d,d3d.�d'}i}|j|�\}}|j�r@d4|d5<|j�s:td6�d4|_|j�r\|j�r\|j d7�|j �rx|j�rx|j d8�|j�r�|j�r�td9�|j�r�|j�r�|j d:�|j�r�t d;�xtj��D]}t |��q�W|�s�d<S|�st d=tjd>�t d?tjd>�d@SdA|k�r4d4}|j�r4t dBtjd>�d@S|j�rDd4|dC<|j�rRtjntj}tjdD|dE�tjdF�}ttj���} t�fdGdH�|jD��} t�}|j�r�d'}x2|jD](} | dIk�r�d4}n|j�dJ| ��q�W|�r�| j|�n|}n | j|�}|j| �}t j!j"|�}|�rD|j#t j$��rDt j!j%|��rDt j!j&|�}|j�rh|j't j$�}|j(dK|j|�t)t*|�|t*|�|j|j||j|j dL�}|j+�s|�r�|j,�nRy|j||j|j-|j.�Wn6tj/k �r�|j.dk�s�t0�t dMtjd>�dSX|j1�t2t3|j+��S)Nz�Main program. Args: fixer_pkg: the name of a package where the fixers are located. args: optional; a list of command line arguments. If omitted, sys.argv[1:] is used. Returns a suggested exit status (0, 1, 2). z2to3 [options] file|dir ...)Zusagez-dz--doctests_only� store_truezFix up doctests only)�action�helpz-fz--fixr"z1Each FIX specifies a transformation; default: all)rM�defaultrNz-jz--processesZstorer�intzRun 2to3 concurrently)rMrO�typerNz-xz--nofixz'Prevent a transformation from being runz-lz--list-fixeszList available transformationsz-pz--print-functionz0Modify the grammar so that print() is a functionz-vz --verbosezMore verbose loggingz --no-diffsz#Don't show diffs of the refactoringz-wz--writezWrite back modified filesz-nz--nobackupsFz&Don't write backups for modified filesz-oz--output-dir�strrzXPut output files in this directory instead of overwriting the input files. Requires -n.)rMrQrOrNz-Wz--write-unchanged-fileszYAlso write files even if no changes were required (useful with --output-dir); implies -w.z--add-suffixzuAppend this string to all output filenames. Requires -n if non-empty. ex: --add-suffix='3' will generate .py3 files.T�write_unchanged_filesz&--write-unchanged-files/-W implies -w.z%Can't use --output-dir/-o without -n.z"Can't use --add-suffix without -n.z@not writing files and not printing diffs; that's not very usefulzCan't use -n without -wz2Available transformations for the -f/--fix option:rz1At least one file or directory argument required.)rJzUse --help to show usage.��-zCan't write to stdin.rz%(name)s: %(message)s)�format�levelzlib2to3.mainc3s|]}�d|VqdS)z.fix_Nr)�.0�fix)� fixer_pkgrr � <genexpr>�szmain.<locals>.<genexpr>�allz.fix_z7Output in %r will mirror the input directory %r layout.)rrrz+Sorry, -j isn't supported on this platform.)4�optparseZOptionParserZ add_option� parse_argsrSr:r@rrr$Z add_suffixZno_diffsZ list_fixesr;rZget_all_fix_namesr<rKr�verbose�logging�DEBUG�INFOZbasicConfigZ getLogger�setZget_fixers_from_packageZnofixrY�add�union� differencerr*�commonprefixrrr/r.�rstrip�infor�sortedr!�refactor_stdinZ doctests_onlyZ processesZMultiprocessingUnsupported�AssertionErrorZ summarizerP�bool)rZr&�parserrk�flagsrZfixnamerWr#Zavail_fixesZunwanted_fixesrZall_presentrYZ requestedZfixer_namesrZrtr)rZr �main�s� rp)N)rHZ __future__rrr<rrr`r7r]rrrZMultiprocessRefactoringToolrr@rprrrr �<module>s gPK{��\\�!Y��(__pycache__/patcomp.cpython-36.opt-1.pycnu�[���3 \��@s�dZdZddlZddlmZmZmZmZmZm Z ddl mZddl mZGdd �d e �Zd d�ZGdd �d e�Zejejejdd�Zdd�Zdd�Zdd�ZdS)z�Pattern compiler. The grammar is taken from PatternGrammar.txt. The compiler compiles a pattern to a pytree.*Pattern instance. z#Guido van Rossum <guido@python.org>�N�)�driver�literals�token�tokenize�parse�grammar)�pytree)�pygramc@seZdZdS)�PatternSyntaxErrorN)�__name__� __module__�__qualname__�rr�'/usr/lib64/python3.6/lib2to3/patcomp.pyrsrc csPtjtjtjh}tjtj|�j�}x(|D] }|\}}}}}||kr(|Vq(WdS)z6Tokenizes a string suppressing significant whitespace.N) r�NEWLINE�INDENT�DEDENTr�generate_tokens�io�StringIO�readline) �input�skip�tokensZ quintuple�type�value�start�endZ line_textrrr�tokenize_wrappers rc@s:eZdZd dd�Zddd�Zdd�Zdd d �Zdd�ZdS)�PatternCompilerNcCsZ|dkrtj|_tj|_ntj|�|_tj|j�|_tj|_ tj |_tj|jt d�|_dS)z^Initializer. Takes an optional alternative filename for the pattern grammar. N)Zconvert)r Zpattern_grammarrZpattern_symbols�symsrZload_grammarZSymbolsZpython_grammarZ pygrammarZpython_symbols�pysymsZDriver�pattern_convert)�selfZgrammar_filerrr�__init__(s zPatternCompiler.__init__FcCsnt|�}y|jj||d�}Wn0tjk rL}ztt|���WYdd}~XnX|r`|j|�|fS|j|�SdS)z=Compiles a pattern string to a nested pytree.*Pattern object.)�debugN)rrZparse_tokensrZ ParseErrorr�str�compile_node)r$rr&Z with_treer�root�errr�compile_pattern7szPatternCompiler.compile_patternc sV|j�jjkr|jd}|j�jjkrz�fdd�|jddd�D�}t|�dkrX|dStjdd�|D�ddd�}|j�S|j�jj krʇfd d�|jD�}t|�dkr�|dStj|gddd�}|j�S|j�jj kr��j|jdd��}tj|�}|j�Sd}|j}t|�d k�r>|djt jk�r>|dj}|dd�}d}t|�dk�rx|dj�jjk�rx|d }|dd�}�j||�}|dk �r>|j} | d} | jt jk�r�d}tj}nX| jt jk�r�d}tj}n>| jt jk�r�j| d�}}t| �dk�r�j| d �}n|dk�s"|dk�r>|j�}tj|gg||d�}|dk �rN||_|j�S)zXCompiles a node, recursively. This is one big switch on the node type. rcsg|]}�j|��qSr)r()�.0�ch)r$rr� <listcomp>Osz0PatternCompiler.compile_node.<locals>.<listcomp>N�rcSsg|] }|g�qSrr)r,�arrrr.Rs)�min�maxcsg|]}�j|��qSr)r()r,r-)r$rrr.Vs�����r5r5)rr!ZMatcher�childrenZAlternatives�lenr �WildcardPattern�optimizeZAlternativeZNegatedUnit� compile_basicZNegatedPatternr�EQUALrZRepeater�STARZHUGE�PLUS�LBRACE�get_int�name) r$�nodeZalts�pZunits�patternr@�nodes�repeatr6Zchildr1r2r)r$rr(Cs^ " zPatternCompiler.compile_nodecCs@|d}|jtjkr4ttj|j��}tjt |�|�S|jtj kr�|j}|j�r�|tkrbt d|��|dd�rvt d��tjt|�S|dkr�d}n,|jd�s�t|j|d�}|dkr�t d|��|dd�r�|j|djd�g}nd}tj||�SnH|jdk�r|j|d�S|jd k�r<|j|d�}tj|ggddd �SdS)NrzInvalid token: %rrzCan't have details for token�any�_zInvalid symbol: %r�(�[)r1r2)rr�STRINGr'rZ evalStringrr ZLeafPattern�_type_of_literal�NAME�isupper� TOKEN_MAPr� startswith�getattrr"r(r6ZNodePatternr8)r$rDrErArrZcontent� subpatternrrrr:�s8 zPatternCompiler.compile_basiccCs t|j�S)N)�intr)r$rArrrr?�szPatternCompiler.get_int)N)FF)N)rr rr%r+r(r:r?rrrrr &s G #r )rLrJ�NUMBERZTOKENcCs.|dj�rtjS|tjkr&tj|SdSdS)Nr)�isalpharrLrZopmap)rrrrrK�s rKcCs>|\}}}}|s||jkr*tj|||d�Stj|||d�SdS)z9Converts raw node information to a Node or Leaf instance.)�contextN)Z number2symbolr ZNodeZLeaf)rZ raw_node_inforrrUr6rrrr#�sr#cCst�j|�S)N)r r+)rCrrrr+�sr+)�__doc__� __author__rZpgen2rrrrrr�r r � Exceptionrr�objectr rLrJrSrNrKr#r+rrrr�<module> s PK{��\����(__pycache__/patcomp.cpython-36.opt-2.pycnu�[���3 \��@s�dZddlZddlmZmZmZmZmZmZddl m Z ddl mZGdd�de�Z d d �ZGdd�de�Zejejejdd �Zdd�Zdd�Zdd�ZdS)z#Guido van Rossum <guido@python.org>�N�)�driver�literals�token�tokenize�parse�grammar)�pytree)�pygramc@seZdZdS)�PatternSyntaxErrorN)�__name__� __module__�__qualname__�rr�'/usr/lib64/python3.6/lib2to3/patcomp.pyrsrc csPtjtjtjh}tjtj|�j�}x(|D] }|\}}}}}||kr(|Vq(WdS)N) r�NEWLINE�INDENT�DEDENTr�generate_tokens�io�StringIO�readline) �input�skip�tokensZ quintuple�type�value�start�endZ line_textrrr�tokenize_wrappers rc@s:eZdZd dd�Zddd�Zdd�Zdd d �Zdd�ZdS)�PatternCompilerNcCsZ|dkrtj|_tj|_ntj|�|_tj|j�|_tj|_ tj |_tj|jt d�|_dS)N)Zconvert)r Zpattern_grammarrZpattern_symbols�symsrZload_grammarZSymbolsZpython_grammarZ pygrammarZpython_symbols�pysymsZDriver�pattern_convert)�selfZgrammar_filerrr�__init__(s zPatternCompiler.__init__FcCsnt|�}y|jj||d�}Wn0tjk rL}ztt|���WYdd}~XnX|r`|j|�|fS|j|�SdS)N)�debug)rrZparse_tokensrZ ParseErrorr�str�compile_node)r$rr&Z with_treer�root�errr�compile_pattern7szPatternCompiler.compile_patternc sV|j�jjkr|jd}|j�jjkrz�fdd�|jddd�D�}t|�dkrX|dStjdd�|D�ddd�}|j�S|j�jj krʇfdd�|jD�}t|�dkr�|dStj|gddd�}|j�S|j�jj kr��j|jdd��}tj|�}|j�Sd}|j}t|�d k�r>|djt jk�r>|dj}|dd�}d}t|�dk�rx|dj�jjk�rx|d}|dd �}�j||�}|dk �r>|j} | d} | jt jk�r�d}tj}nX| jt jk�r�d}tj}n>| jt jk�r�j| d�}}t| �d k�r�j| d �}n|dk�s"|dk�r>|j�}tj|gg||d�}|dk �rN||_|j�S)Nrcsg|]}�j|��qSr)r()�.0�ch)r$rr� <listcomp>Osz0PatternCompiler.compile_node.<locals>.<listcomp>�rcSsg|] }|g�qSrr)r,�arrrr.Rs)�min�maxcsg|]}�j|��qSr)r()r,r-)r$rrr.Vs�����r5r5)rr!ZMatcher�childrenZAlternatives�lenr �WildcardPattern�optimizeZAlternativeZNegatedUnit� compile_basicZNegatedPatternr�EQUALrZRepeater�STARZHUGE�PLUS�LBRACE�get_int�name) r$�nodeZalts�pZunits�patternr@�nodes�repeatr6Zchildr1r2r)r$rr(Cs^ " zPatternCompiler.compile_nodecCs@|d}|jtjkr4ttj|j��}tjt |�|�S|jtj kr�|j}|j�r�|tkrbt d|��|dd�rvt d��tjt|�S|dkr�d}n,|jd�s�t|j|d�}|dkr�t d|��|dd�r�|j|djd�g}nd}tj||�SnH|jdk�r|j|d�S|jd k�r<|j|d�}tj|ggddd �SdS)NrzInvalid token: %rrzCan't have details for token�any�_zInvalid symbol: %r�(�[)r1r2)rr�STRINGr'rZ evalStringrr ZLeafPattern�_type_of_literal�NAME�isupper� TOKEN_MAPr� startswith�getattrr"r(r6ZNodePatternr8)r$rDrErArrZcontent� subpatternrrrr:�s8 zPatternCompiler.compile_basiccCs t|j�S)N)�intr)r$rArrrr?�szPatternCompiler.get_int)N)FF)N)rr rr%r+r(r:r?rrrrr &s G #r )rLrJ�NUMBERZTOKENcCs.|dj�rtjS|tjkr&tj|SdSdS)Nr)�isalpharrLrZopmap)rrrrrK�s rKcCs>|\}}}}|s||jkr*tj|||d�Stj|||d�SdS)N)�context)Z number2symbolr ZNodeZLeaf)rZ raw_node_inforrrUr6rrrr#�sr#cCst�j|�S)N)r r+)rCrrrr+�sr+)� __author__rZpgen2rrrrrr�r r � Exceptionrr�objectr rLrJrSrNrKr#r+rrrr�<module>s PK{��\euf���"__pycache__/patcomp.cpython-36.pycnu�[���3 \��@s�dZdZddlZddlmZmZmZmZmZm Z ddl mZddl mZGdd �d e �Zd d�ZGdd �d e�Zejejejdd�Zdd�Zdd�Zdd�ZdS)z�Pattern compiler. The grammar is taken from PatternGrammar.txt. The compiler compiles a pattern to a pytree.*Pattern instance. z#Guido van Rossum <guido@python.org>�N�)�driver�literals�token�tokenize�parse�grammar)�pytree)�pygramc@seZdZdS)�PatternSyntaxErrorN)�__name__� __module__�__qualname__�rr�'/usr/lib64/python3.6/lib2to3/patcomp.pyrsrc csPtjtjtjh}tjtj|�j�}x(|D] }|\}}}}}||kr(|Vq(WdS)z6Tokenizes a string suppressing significant whitespace.N) r�NEWLINE�INDENT�DEDENTr�generate_tokens�io�StringIO�readline) �input�skip�tokensZ quintuple�type�value�start�endZ line_textrrr�tokenize_wrappers rc@s:eZdZd dd�Zddd�Zdd�Zdd d �Zdd�ZdS)�PatternCompilerNcCsZ|dkrtj|_tj|_ntj|�|_tj|j�|_tj|_ tj |_tj|jt d�|_dS)z^Initializer. Takes an optional alternative filename for the pattern grammar. N)Zconvert)r Zpattern_grammarrZpattern_symbols�symsrZload_grammarZSymbolsZpython_grammarZ pygrammarZpython_symbols�pysymsZDriver�pattern_convert)�selfZgrammar_filerrr�__init__(s zPatternCompiler.__init__FcCsnt|�}y|jj||d�}Wn0tjk rL}ztt|���WYdd}~XnX|r`|j|�|fS|j|�SdS)z=Compiles a pattern string to a nested pytree.*Pattern object.)�debugN)rrZparse_tokensrZ ParseErrorr�str�compile_node)r$rr&Z with_treer�root�errr�compile_pattern7szPatternCompiler.compile_patternc s�|j�jjkr|jd}|j�jjkrz�fdd�|jddd�D�}t|�dkrX|dStjdd�|D�ddd�}|j�S|j�jj krʇfd d�|jD�}t|�dkr�|dStj|gddd�}|j�S|j�jj kr��j|jdd��}tj|�}|j�S|j�jj k�st�d}|j}t|�d k�rR|djtjk�rR|dj}|dd�}d}t|�dk�r�|d j�jjk�r�|d}|dd�}�j||�}|dk �r�|j�jjk�s�t�|j} | d} | jtjk�r�d}tj}n�| jtjk�r�d}tj}np| jtjk�r^| djtjk�st�t| �dk�s.t��j| d�}}t| �dk�rh�j| d �}n d�sht�|dk�s||dk�r�|j�}tj|gg||d�}|dk �r�||_|j�S)zXCompiles a node, recursively. This is one big switch on the node type. rcsg|]}�j|��qSr)r()�.0�ch)r$rr� <listcomp>Osz0PatternCompiler.compile_node.<locals>.<listcomp>N�rcSsg|] }|g�qSrr)r,�arrrr.Rs)�min�maxcsg|]}�j|��qSr)r()r,r-)r$rrr.Vs��F���r5r5r5)r3r4)rr!ZMatcher�childrenZAlternatives�lenr �WildcardPattern�optimizeZAlternativeZNegatedUnit� compile_basicZNegatedPatternZUnit�AssertionErrorr�EQUALrZRepeater�STARZHUGE�PLUS�LBRACE�RBRACE�get_int�name) r$�nodeZalts�pZunits�patternrB�nodes�repeatr6Zchildr1r2r)r$rr(Csh " zPatternCompiler.compile_nodecCsnt|�dkst�|d}|jtjkrDttj|j��}t j t|�|�S|jtjk�r|j}|j �r�|tkrttd|��|dd�r�td��t j t|�S|dkr�d}n,|jd�s�t|j|d�}|dkr�td|��|dd�r�|j|djd�g}nd}t j||�SnV|jdk�r |j|d�S|jd k�r\|dk�s:t�|j|d�}t j|ggddd �Sd�sjt|��dS)NrrzInvalid token: %rzCan't have details for token�any�_zInvalid symbol: %r�(�[)r1r2F)r7r;rr�STRINGr'rZ evalStringrr ZLeafPattern�_type_of_literal�NAME�isupper� TOKEN_MAPr� startswith�getattrr"r(r6ZNodePatternr8)r$rFrGrCrrZcontent� subpatternrrrr:�s< zPatternCompiler.compile_basiccCs|jtjkst�t|j�S)N)rr�NUMBERr;�intr)r$rCrrrrA�szPatternCompiler.get_int)N)FF)N)rr rr%r+r(r:rArrrrr &s G #r )rNrLrTZTOKENcCs.|dj�rtjS|tjkr&tj|SdSdS)Nr)�isalpharrNrZopmap)rrrrrM�s rMcCs>|\}}}}|s||jkr*tj|||d�Stj|||d�SdS)z9Converts raw node information to a Node or Leaf instance.)�contextN)Z number2symbolr ZNodeZLeaf)rZ raw_node_inforrrWr6rrrr#�sr#cCst�j|�S)N)r r+)rErrrr+�sr+)�__doc__� __author__rZpgen2rrrrrr�r r � Exceptionrr�objectr rNrLrTrPrMr#r+rrrr�<module> s PK{��\f<�Q��'__pycache__/pygram.cpython-36.opt-1.pycnu�[���3 \��@s�dZddlZddlmZddlmZddlmZejjejj e �d�Zejjejj e �d�ZGd d �d e �Zejde�Zee�Zej�Zejd=ejde�Zee�ZdS) z&Export the Python grammar and symbols.�N�)�token)�driver)�pytreezGrammar.txtzPatternGrammar.txtc@seZdZdd�ZdS)�SymbolscCs(x"|jj�D]\}}t|||�qWdS)z�Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256). N)Z symbol2number�items�setattr)�selfZgrammar�nameZsymbol�r�&/usr/lib64/python3.6/lib2to3/pygram.py�__init__szSymbols.__init__N)�__name__� __module__�__qualname__r rrrrrsrZlib2to3�print)�__doc__�osZpgen2rr�r�path�join�dirname�__file__Z _GRAMMAR_FILEZ_PATTERN_GRAMMAR_FILE�objectrZload_packaged_grammarZpython_grammarZpython_symbols�copyZ!python_grammar_no_print_statement�keywordsZpattern_grammarZpattern_symbolsrrrr�<module>sPK{��\�O���'__pycache__/pygram.cpython-36.opt-2.pycnu�[���3 \��@s�ddlZddlmZddlmZddlmZejjejje �d�Z ejjejje �d�ZGdd �d e�Z ejd e �Ze e�Zej�Zejd=ejd e�Ze e�ZdS)�N�)�token)�driver)�pytreezGrammar.txtzPatternGrammar.txtc@seZdZdd�ZdS)�SymbolscCs(x"|jj�D]\}}t|||�qWdS)N)Z symbol2number�items�setattr)�selfZgrammar�nameZsymbol�r�&/usr/lib64/python3.6/lib2to3/pygram.py�__init__szSymbols.__init__N)�__name__� __module__�__qualname__r rrrrrsrZlib2to3�print)�osZpgen2rr�r�path�join�dirname�__file__Z _GRAMMAR_FILEZ_PATTERN_GRAMMAR_FILE�objectrZload_packaged_grammarZpython_grammarZpython_symbols�copyZ!python_grammar_no_print_statement�keywordsZpattern_grammarZpattern_symbolsrrrr�<module>sPK{��\f<�Q��!__pycache__/pygram.cpython-36.pycnu�[���3 \��@s�dZddlZddlmZddlmZddlmZejjejj e �d�Zejjejj e �d�ZGd d �d e �Zejde�Zee�Zej�Zejd=ejde�Zee�ZdS) z&Export the Python grammar and symbols.�N�)�token)�driver)�pytreezGrammar.txtzPatternGrammar.txtc@seZdZdd�ZdS)�SymbolscCs(x"|jj�D]\}}t|||�qWdS)z�Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256). N)Z symbol2number�items�setattr)�selfZgrammar�nameZsymbol�r�&/usr/lib64/python3.6/lib2to3/pygram.py�__init__szSymbols.__init__N)�__name__� __module__�__qualname__r rrrrrsrZlib2to3�print)�__doc__�osZpgen2rr�r�path�join�dirname�__file__Z _GRAMMAR_FILEZ_PATTERN_GRAMMAR_FILE�objectrZload_packaged_grammarZpython_grammarZpython_symbols�copyZ!python_grammar_no_print_statement�keywordsZpattern_grammarZpattern_symbolsrrrr�<module>sPK{��\Q��_�_'__pycache__/pytree.cpython-36.opt-1.pycnu�[���3 \�m�@s�dZdZddlZddlZddlmZdZiadd�ZGdd �d e �Z Gd d�de �ZGdd �d e �Zdd�Z Gdd�de �ZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�Zdd�ZdS)z� Python parse tree definitions. This is a very concrete parse tree; we need to keep every token and even the comments and whitespace between tokens. There's also a pattern matching implementation here. z#Guido van Rossum <guido@python.org>�N)�StringIOi���cCsHts<ddlm}x*|jj�D]\}}t|�tkr|t|<qWtj||�S)N�)�python_symbols)�_type_reprsZpygramr�__dict__�items�type�int� setdefault)Ztype_numr�name�val�r �&/usr/lib64/python3.6/lib2to3/pytree.py� type_reprsrc@s�eZdZdZdZdZfZdZdZdd�Z dd�Z dZdd �Zd d�Z dd �Zdd�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zdd�Zdd�Zd d!�Zejd&kr�d$d%�ZdS)'�Basez� Abstract base class for Node and Leaf. This provides some default functionality and boilerplate using the template pattern. A node may be a subnode of at most one parent. NFcOs tj|�S)z7Constructor that prevents Base from being instantiated.)�object�__new__)�cls�args�kwdsr r rr2szBase.__new__cCs|j|jk rtS|j|�S)zW Compare two nodes for equality. This calls the method _eq(). )� __class__�NotImplemented�_eq)�self�otherr r r�__eq__7szBase.__eq__cCst�dS)a_ Compare two nodes for equality. This is called by __eq__ and __ne__. It is only called if the two nodes have the same type. This must be implemented by the concrete subclass. Nodes should be considered equal if they have the same structure, ignoring the prefix string and other context information. N)�NotImplementedError)rrr r rrCs zBase._eqcCst�dS)zr Return a cloned (deep) copy of self. This must be implemented by the concrete subclass. N)r)rr r r�cloneNsz Base.clonecCst�dS)zx Return a post-order iterator for the tree. This must be implemented by the concrete subclass. N)r)rr r r� post_orderVszBase.post_ordercCst�dS)zw Return a pre-order iterator for the tree. This must be implemented by the concrete subclass. N)r)rr r r� pre_order^szBase.pre_ordercCs�t|t�s|g}g}d}x:|jjD].}||krF|dk r@|j|�d}q"|j|�q"W|jj�||j_x|D]}|j|_qlWd|_dS)z/Replace this node with a new one in the parent.FNT)� isinstance�list�parent�children�extend�append�changed)r�newZ l_children�found�ch�xr r r�replacefs zBase.replacecCs.|}x"t|t�s&|jsdS|jd}qW|jS)z9Return the line number which generated the invocant node.Nr)r �Leafr#�lineno)r�noder r r� get_lineno}szBase.get_linenocCs|jr|jj�d|_dS)NT)r"r&�was_changed)rr r rr&�s zBase.changedcCsJ|jrFx>t|jj�D].\}}||kr|jj�|jj|=d|_|SqWdS)z� Remove the node from the tree. Returns the position of the node in its parent's children before it was removed. N)r"� enumerater#r&)r�ir.r r r�remove�s zBase.removecCsZ|jdkrdSxFt|jj�D]6\}}||kry|jj|dStk rPdSXqWdS)z� The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None Nr)r"r1r#� IndexError)rr2�childr r r�next_sibling�s zBase.next_siblingcCsP|jdkrdSx<t|jj�D],\}}||kr|dkr8dS|jj|dSqWdS)z� The node immediately preceding the invocant in their parent's children list. If the invocant does not have a previous sibling, it is None. Nrr)r"r1r#)rr2r5r r r�prev_sibling�s zBase.prev_siblingccs"x|jD]}|j�EdHqWdS)N)r#�leaves)rr5r r rr8�szBase.leavescCs|jdkrdSd|jj�S)Nrr)r"�depth)rr r rr9�s z Base.depthcCs|j}|dkrdS|jS)z� Return the string immediately following the invocant node. This is effectively equivalent to node.next_sibling.prefix N�)r6�prefix)rZnext_sibr r r� get_suffix�szBase.get_suffix�rcCst|�jd�S)N�ascii)�str�encode)rr r r�__str__�szBase.__str__)r=r)�__name__� __module__�__qualname__�__doc__rr"r#r0Zwas_checkedrr�__hash__rrrrr+r/r&r3�propertyr6r7r8r9r<�sys�version_inforAr r r rr s0 rc@s�eZdZdZddd�Zdd�Zdd�Zejdkr4eZ dd�Z d d�Zdd�Zdd�Z dd�Zdd�Zeee�Zdd�Zdd�Zdd�ZdS)�Nodez+Concrete implementation for interior nodes.NcCsR||_t|�|_x|jD] }||_qW|dk r4||_|rH|dd�|_nd|_dS)z� Initializer. Takes a type constant (a symbol number >= 256), a sequence of child nodes, and an optional context keyword argument. As a side effect, the parent pointers of the children are updated. N)rr!r#r"r;�fixers_applied)rrr#�contextr;rKr)r r r�__init__�s z Node.__init__cCsd|jjt|j�|jfS)z)Return a canonical string representation.z %s(%s, %r))rrBrrr#)rr r r�__repr__�sz Node.__repr__cCsdjtt|j��S)zk Return a pretty string representation. This reproduces the input source exactly. r:)�join�mapr?r#)rr r r�__unicode__�szNode.__unicode__r=rcCs|j|jf|j|jfkS)zCompare two nodes for equality.)rr#)rrr r rr�szNode._eqcCst|jdd�|jD�|jd�S)z$Return a cloned (deep) copy of self.cSsg|]}|j��qSr )r)�.0r)r r r� <listcomp>szNode.clone.<locals>.<listcomp>)rK)rJrr#rK)rr r rrsz Node.cloneccs(x|jD]}|j�EdHqW|VdS)z*Return a post-order iterator for the tree.N)r#r)rr5r r rrszNode.post_orderccs(|Vx|jD]}|j�EdHqWdS)z)Return a pre-order iterator for the tree.N)r#r)rr5r r rr szNode.pre_ordercCs|js dS|jdjS)zO The whitespace and comments preceding this node in the input. r:r)r#r;)rr r r�_prefix_getterszNode._prefix_gettercCs|jr||jd_dS)Nr)r#r;)rr;r r r�_prefix_setterszNode._prefix_settercCs(||_d|j|_||j|<|j�dS)z� Equivalent to 'node.children[i] = child'. This method also sets the child's parent attribute appropriately. N)r"r#r&)rr2r5r r r� set_child!s zNode.set_childcCs ||_|jj||�|j�dS)z� Equivalent to 'node.children.insert(i, child)'. This method also sets the child's parent attribute appropriately. N)r"r#�insertr&)rr2r5r r r�insert_child+szNode.insert_childcCs||_|jj|�|j�dS)z� Equivalent to 'node.children.append(child)'. This method also sets the child's parent attribute appropriately. N)r"r#r%r&)rr5r r r�append_child4szNode.append_child)NNN)r=r)rBrCrDrErMrNrQrHrIrArrrrrTrUrGr;rVrXrYr r r rrJ�s$ rJc@s�eZdZdZdZdZdZddgfdd�Zdd�Zd d �Z e jdkrFe Zdd �Z dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zeee�ZdS)r,z'Concrete implementation for leaf nodes.r:rNcCsF|dk r|\|_\|_|_||_||_|dk r4||_|dd�|_dS)z� Initializer. Takes a type constant (a token number < 256), a string value, and an optional context keyword argument. N)�_prefixr-�columnr�valuerK)rrr\rLr;rKr r rrMGsz Leaf.__init__cCsd|jj|j|jfS)z)Return a canonical string representation.z %s(%r, %r))rrBrr\)rr r rrNZsz Leaf.__repr__cCs|jt|j�S)zk Return a pretty string representation. This reproduces the input source exactly. )r;r?r\)rr r rrQ`szLeaf.__unicode__r=cCs|j|jf|j|jfkS)zCompare two nodes for equality.)rr\)rrr r rrkszLeaf._eqcCs$t|j|j|j|j|jff|jd�S)z$Return a cloned (deep) copy of self.)rK)r,rr\r;r-r[rK)rr r rros z Leaf.cloneccs |VdS)Nr )rr r rr8uszLeaf.leavesccs |VdS)z*Return a post-order iterator for the tree.Nr )rr r rrxszLeaf.post_orderccs |VdS)z)Return a pre-order iterator for the tree.Nr )rr r rr|szLeaf.pre_ordercCs|jS)zP The whitespace and comments preceding this token in the input. )rZ)rr r rrT�szLeaf._prefix_gettercCs|j�||_dS)N)r&rZ)rr;r r rrU�szLeaf._prefix_setter)r=r)rBrCrDrErZr-r[rMrNrQrHrIrArrr8rrrTrUrGr;r r r rr,>s& r,cCsN|\}}}}|s||jkr<t|�dkr.|dSt|||d�St|||d�SdS)z� Convert raw node information to a Node or Leaf instance. This is passed to the parser driver which calls it whenever a reduction of a grammar rule produces a new complete node, so that the tree is build strictly bottom-up. rr)rLN)Z number2symbol�lenrJr,)ZgrZraw_noderr\rLr#r r r�convert�sr^c@sPeZdZdZdZdZdZdd�Zdd�Zdd�Z dd d �Z ddd�Zd d�ZdS)�BasePatterna� A pattern is a tree matching pattern. It looks for a specific node type (token or symbol), and optionally for a specific content. This is an abstract base class. There are three concrete subclasses: - LeafPattern matches a single leaf node; - NodePattern matches a single node (usually non-leaf); - WildcardPattern matches a sequence of nodes of variable length. NcOs tj|�S)z>Constructor that prevents BasePattern from being instantiated.)rr)rrrr r rr�szBasePattern.__new__cCsLt|j�|j|jg}x|r.|ddkr.|d=qWd|jjdjtt|��fS)Nrz%s(%s)z, ���r`) rr�contentrrrBrOrP�repr)rrr r rrN�s zBasePattern.__repr__cCs|S)z� A subclass can define this as a hook for optimizations. Returns either self or another node with the same effect. r )rr r r�optimize�szBasePattern.optimizecCsn|jdk r|j|jkrdS|jdk rRd}|dk r4i}|j||�sDdS|rR|j|�|dk rj|jrj|||j<dS)a# Does this pattern exactly match a node? Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. Default implementation for non-wildcard patterns. NFT)rra� _submatch�updater)rr.�results�rr r r�match�s zBasePattern.matchcCs t|�dkrdS|j|d|�S)z� Does this pattern exactly match a sequence of nodes? Default implementation for non-wildcard patterns. rFr)r]rh)r�nodesrfr r r� match_seq�szBasePattern.match_seqccs&i}|r"|j|d|�r"d|fVdS)z} Generator yielding all matches for this pattern. Default implementation for non-wildcard patterns. rrN)rh)rrirgr r r�generate_matches�szBasePattern.generate_matches)N)N) rBrCrDrErrarrrNrcrhrjrkr r r rr_�s r_c@s*eZdZddd�Zd dd�Zd dd�ZdS)�LeafPatternNcCs&|dk r|dk r||_||_||_dS)ap Initializer. Takes optional type, content, and name. The type, if given must be a token type (< 256). If not given, this matches any *leaf* node; the content may still be required. The content, if given, must be a string. If a name is given, the matching node is stored in the results dict under that key. N)rrar)rrrarr r rrM�s zLeafPattern.__init__cCst|t�sdStj|||�S)z*Override match() to insist on a leaf node.F)r r,r_rh)rr.rfr r rrhs zLeafPattern.matchcCs|j|jkS)a� Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. )rar\)rr.rfr r rrds zLeafPattern._submatch)NNN)N)N)rBrCrDrMrhrdr r r rrl�s rlc@s$eZdZdZddd�Zddd�ZdS) �NodePatternFNcCsT|dk r|dk r>t|�}x$t|�D]\}}t|t�r"d|_q"W||_||_||_dS)ad Initializer. Takes optional type, content, and name. The type, if given, must be a symbol type (>= 256). If the type is None this matches *any* single node (leaf or not), except if content is not None, in which it only matches non-leaf nodes that also match the content pattern. The content, if not None, must be a sequence of Patterns that must match the node's children exactly. If the content is given, the type must not be None. If a name is given, the matching node is stored in the results dict under that key. NT)r!r1r �WildcardPattern� wildcardsrrar)rrrarr2�itemr r rrM%s zNodePattern.__init__cCs�|jrJx>t|j|j�D],\}}|t|j�kr|dk r>|j|�dSqWdSt|j�t|j�krbdSx*t|j|j�D]\}}|j||�srdSqrWdS)a� Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. NTF)rorkrar#r]re�ziprh)rr.rf�crg� subpatternr5r r rrdBs zNodePattern._submatch)NNN)N)rBrCrDrorMrdr r r rrm!s rmc@s^eZdZdZddedfdd�Zdd�Zddd �Zdd d�Zdd �Z dd�Z dd�Zdd�ZdS)rna A wildcard pattern can match zero or more nodes. This has all the flexibility needed to implement patterns like: .* .+ .? .{m,n} (a b c | d e | f) (...)* (...)+ (...)? (...){m,n} except it always uses non-greedy matching. NrcCs@|dk r$ttt|��}x|D]}qW||_||_||_||_dS)a� Initializer. Args: content: optional sequence of subsequences of patterns; if absent, matches one node; if present, each subsequence is an alternative [*] min: optional minimum number of times to match, default 0 max: optional maximum number of times to match, default HUGE name: optional name assigned to this match [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is equivalent to (a b c | d e | f g h); if content is None, this is equivalent to '.' in regular expression terms. The min and max parameters work as follows: min=0, max=maxint: .* min=1, max=maxint: .+ min=0, max=1: .? min=1, max=1: . If content is not None, replace the dot with the parenthesized list of alternatives, e.g. (a b c | d e | f g h)* N)�tuplerPra�min�maxr)rrarurvr�altr r rrMls zWildcardPattern.__init__cCs�d}|jdk r<t|j�dkr<t|jd�dkr<|jdd}|jdkr�|jdkr�|jdkrft|jd�S|dk r�|j|jkr�|j�S|jdkr�t|t�r�|jdkr�|j|jkr�t|j|j|j|j|j|j�S|S)z+Optimize certain stacked wildcard patterns.Nrr)r) rar]rurvrmrrcr rn)rrsr r rrc�s zWildcardPattern.optimizecCs|j|g|�S)z'Does this pattern exactly match a node?)rj)rr.rfr r rrh�szWildcardPattern.matchcCsRxL|j|�D]>\}}|t|�kr|dk rF|j|�|jrFt|�||j<dSqWdS)z4Does this pattern exactly match a sequence of nodes?NTF)rkr]rerr!)rrirfrrrgr r rrj�s zWildcardPattern.match_seqccs:|jdkrXxJt|jdtt|�|j��D]*}i}|jrH|d|�||j<||fVq(Wn�|jdkrp|j|�Vn�ttd�r�tj }t �t_ z�y@x:|j|d�D]*\}}|jr�|d|�||j<||fVq�WWnRtk �rx:|j |�D],\}}|j�r |d|�||j<||fVq�WYnXWdttd��r4|t_ XdS)a" Generator yielding matches for a sequence of nodes. Args: nodes: sequence of nodes Yields: (count, results) tuples where: count: the match comprises nodes[:count]; results: dict containing named submatches. NrZ bare_name�getrefcountr)ra�rangerur]rvr�_bare_name_matches�hasattrrH�stderrr�_recursive_matches�RuntimeError�_iterative_matches)rri�countrgZsave_stderrr r rrk�s. " z WildcardPattern.generate_matchesccst|�}d|jkrdifVg}x>|jD]4}x.t||�D] \}}||fV|j||f�q8Wq(Wx�|�rg}x�|D]�\}} ||krr||jkrrxn|jD]d}x^t|||d��D]H\} }| dkr�i}|j| �|j|�|| |fV|j|| |f�q�Wq�WqrW|}qbWdS)z(Helper to iteratively yield the matches.rN)r]rurarkr%rvre)rriZnodelenrfrwrrrgZnew_results�c0�r0�c1�r1r r rr�s* z"WildcardPattern._iterative_matchescCsxd}i}d}t|�}xH|r\||kr\d}x0|jD]&}|dj|||�r0|d7}d}Pq0WqW|d|�||j<||fS)z(Special optimized matcher for bare_name.rFTrN)r]rarhr)rrir�rg�donervZleafr r rrz�s z"WildcardPattern._bare_name_matchesc cs�||jkrdifV||jkr�xr|jD]h}xbt||�D]T\}}xJ|j||d�|d�D].\}}i}|j|�|j|�|||fVqXWq6Wq&WdS)z(Helper to recursively yield the matches.rNr)rurvrarkr}re) rrir�rwr�r�r�r�rgr r rr}s " z"WildcardPattern._recursive_matches)N)N) rBrCrDrE�HUGErMrcrhrjrkrrzr}r r r rrn^s# -rnc@s.eZdZd dd�Zdd�Zdd�Zdd �ZdS)�NegatedPatternNcCs|dk r||_dS)a Initializer. The argument is either a pattern or None. If it is None, this only matches an empty sequence (effectively '$' in regex lingo). If it is not None, this matches whenever the argument pattern doesn't have any matches. N)ra)rrar r rrMs zNegatedPattern.__init__cCsdS)NFr )rr.r r rrh)szNegatedPattern.matchcCst|�dkS)Nr)r])rrir r rrj-szNegatedPattern.match_seqccsL|jdkr"t|�dkrHdifVn&x|jj|�D] \}}dSWdifVdS)Nr)rar]rk)rrirrrgr r rrk1s zNegatedPattern.generate_matches)N)rBrCrDrMrhrjrkr r r rr�s r�c cs�|sdifVn�|d|dd�}}xl|j|�D]^\}}|sJ||fVq2xDt|||d��D].\}}i}|j|�|j|�|||fVq^Wq2WdS)aR Generator yielding matches for a sequence of patterns and nodes. Args: patterns: a sequence of patterns nodes: a sequence of nodes Yields: (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches. rrN)rkre) Zpatternsri�p�restr�r�r�r�rgr r rrk=s rk)rE� __author__rH�warnings�iorr�rrrrrJr,r^r_rlrmrnr�rkr r r r�<module>s& 1nNV,==#PK{��\�sdr0b0b!__pycache__/pytree.cpython-36.pycnu�[���3 \�m�@s�dZdZddlZddlZddlmZdZiadd�ZGdd �d e �Z Gd d�de �ZGdd �d e �Zdd�Z Gdd�de �ZGdd�de�ZGdd�de�ZGdd�de�ZGdd�de�Zdd�ZdS)z� Python parse tree definitions. This is a very concrete parse tree; we need to keep every token and even the comments and whitespace between tokens. There's also a pattern matching implementation here. z#Guido van Rossum <guido@python.org>�N)�StringIOi���cCsHts<ddlm}x*|jj�D]\}}t|�tkr|t|<qWtj||�S)N�)�python_symbols)�_type_reprsZpygramr�__dict__�items�type�int� setdefault)Ztype_numr�name�val�r �&/usr/lib64/python3.6/lib2to3/pytree.py� type_reprsrc@s�eZdZdZdZdZfZdZdZdd�Z dd�Z dZdd �Zd d�Z dd �Zdd�Zdd�Zdd�Zdd�Zdd�Zedd��Zedd��Zdd�Zdd�Zd d!�Zejd&kr�d$d%�ZdS)'�Basez� Abstract base class for Node and Leaf. This provides some default functionality and boilerplate using the template pattern. A node may be a subnode of at most one parent. NFcOs|tk std��tj|�S)z7Constructor that prevents Base from being instantiated.zCannot instantiate Base)r�AssertionError�object�__new__)�cls�args�kwdsr r rr2szBase.__new__cCs|j|jk rtS|j|�S)zW Compare two nodes for equality. This calls the method _eq(). )� __class__�NotImplemented�_eq)�self�otherr r r�__eq__7szBase.__eq__cCst�dS)a_ Compare two nodes for equality. This is called by __eq__ and __ne__. It is only called if the two nodes have the same type. This must be implemented by the concrete subclass. Nodes should be considered equal if they have the same structure, ignoring the prefix string and other context information. N)�NotImplementedError)rrr r rrCs zBase._eqcCst�dS)zr Return a cloned (deep) copy of self. This must be implemented by the concrete subclass. N)r)rr r r�cloneNsz Base.clonecCst�dS)zx Return a post-order iterator for the tree. This must be implemented by the concrete subclass. N)r)rr r r� post_orderVszBase.post_ordercCst�dS)zw Return a pre-order iterator for the tree. This must be implemented by the concrete subclass. N)r)rr r r� pre_order^szBase.pre_ordercCs�|jdk stt|���|dk s"t�t|t�s2|g}g}d}xR|jjD]F}||kr�|sht|jj||f��|dk rz|j|�d}qD|j|�qDW|s�t|j||f��|jj�||j_x|D]}|j|_q�Wd|_dS)z/Replace this node with a new one in the parent.NFT) �parentr�str� isinstance�list�children�extend�append�changed)r�newZ l_children�found�ch�xr r r�replacefs& zBase.replacecCs.|}x"t|t�s&|jsdS|jd}qW|jS)z9Return the line number which generated the invocant node.Nr)r#�Leafr%�lineno)r�noder r r� get_lineno}szBase.get_linenocCs|jr|jj�d|_dS)NT)r!r(�was_changed)rr r rr(�s zBase.changedcCsJ|jrFx>t|jj�D].\}}||kr|jj�|jj|=d|_|SqWdS)z� Remove the node from the tree. Returns the position of the node in its parent's children before it was removed. N)r!� enumerater%r()r�ir0r r r�remove�s zBase.removecCsZ|jdkrdSxFt|jj�D]6\}}||kry|jj|dStk rPdSXqWdS)z� The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None Nr)r!r3r%� IndexError)rr4�childr r r�next_sibling�s zBase.next_siblingcCsP|jdkrdSx<t|jj�D],\}}||kr|dkr8dS|jj|dSqWdS)z� The node immediately preceding the invocant in their parent's children list. If the invocant does not have a previous sibling, it is None. Nrr)r!r3r%)rr4r7r r r�prev_sibling�s zBase.prev_siblingccs"x|jD]}|j�EdHqWdS)N)r%�leaves)rr7r r rr:�szBase.leavescCs|jdkrdSd|jj�S)Nrr)r!�depth)rr r rr;�s z Base.depthcCs|j}|dkrdS|jS)z� Return the string immediately following the invocant node. This is effectively equivalent to node.next_sibling.prefix N�)r8�prefix)rZnext_sibr r r� get_suffix�szBase.get_suffix�rcCst|�jd�S)N�ascii)r"�encode)rr r r�__str__�szBase.__str__)r?r)�__name__� __module__�__qualname__�__doc__rr!r%r2Zwas_checkedrr�__hash__rrrr r-r1r(r5�propertyr8r9r:r;r>�sys�version_inforBr r r rr s0 rc@s�eZdZdZddd�Zdd�Zdd�Zejdkr4eZ dd�Z d d�Zdd�Zdd�Z dd�Zdd�Zeee�Zdd�Zdd�Zdd�ZdS)�Nodez+Concrete implementation for interior nodes.NcCsx|dkst|��||_t|�|_x*|jD] }|jdksBtt|���||_q(W|dk rZ||_|rn|dd�|_nd|_dS)z� Initializer. Takes a type constant (a symbol number >= 256), a sequence of child nodes, and an optional context keyword argument. As a side effect, the parent pointers of the children are updated. �N)rrr$r%r!�reprr=�fixers_applied)rrr%�contextr=rNr+r r r�__init__�s z Node.__init__cCsd|jjt|j�|jfS)z)Return a canonical string representation.z %s(%s, %r))rrCrrr%)rr r r�__repr__�sz Node.__repr__cCsdjtt|j��S)zk Return a pretty string representation. This reproduces the input source exactly. r<)�join�mapr"r%)rr r r�__unicode__�szNode.__unicode__r?rcCs|j|jf|j|jfkS)zCompare two nodes for equality.)rr%)rrr r rr�szNode._eqcCst|jdd�|jD�|jd�S)z$Return a cloned (deep) copy of self.cSsg|]}|j��qSr )r)�.0r+r r r� <listcomp>szNode.clone.<locals>.<listcomp>)rN)rKrr%rN)rr r rrsz Node.cloneccs(x|jD]}|j�EdHqW|VdS)z*Return a post-order iterator for the tree.N)r%r)rr7r r rrszNode.post_orderccs(|Vx|jD]}|j�EdHqWdS)z)Return a pre-order iterator for the tree.N)r%r )rr7r r rr szNode.pre_ordercCs|js dS|jdjS)zO The whitespace and comments preceding this node in the input. r<r)r%r=)rr r r�_prefix_getterszNode._prefix_gettercCs|jr||jd_dS)Nr)r%r=)rr=r r r�_prefix_setterszNode._prefix_settercCs(||_d|j|_||j|<|j�dS)z� Equivalent to 'node.children[i] = child'. This method also sets the child's parent attribute appropriately. N)r!r%r()rr4r7r r r� set_child!s zNode.set_childcCs ||_|jj||�|j�dS)z� Equivalent to 'node.children.insert(i, child)'. This method also sets the child's parent attribute appropriately. N)r!r%�insertr()rr4r7r r r�insert_child+szNode.insert_childcCs||_|jj|�|j�dS)z� Equivalent to 'node.children.append(child)'. This method also sets the child's parent attribute appropriately. N)r!r%r'r()rr7r r r�append_child4szNode.append_child)NNN)r?r)rCrDrErFrPrQrTrIrJrBrrrr rWrXrHr=rYr[r\r r r rrK�s$ rKc@s�eZdZdZdZdZdZddgfdd�Zdd�Zd d �Z e jdkrFe Zdd �Z dd�Zdd�Zdd�Zdd�Zdd�Zdd�Zeee�ZdS)r.z'Concrete implementation for leaf nodes.r<rNcCsfd|kodkns t|��|dk r:|\|_\|_|_||_||_|dk rT||_|dd�|_dS)z� Initializer. Takes a type constant (a token number < 256), a string value, and an optional context keyword argument. rrLN)r�_prefixr/�columnr�valuerN)rrr_rOr=rNr r rrPGs z Leaf.__init__cCsd|jj|j|jfS)z)Return a canonical string representation.z %s(%r, %r))rrCrr_)rr r rrQZsz Leaf.__repr__cCs|jt|j�S)zk Return a pretty string representation. This reproduces the input source exactly. )r=r"r_)rr r rrT`szLeaf.__unicode__r?cCs|j|jf|j|jfkS)zCompare two nodes for equality.)rr_)rrr r rrkszLeaf._eqcCs$t|j|j|j|j|jff|jd�S)z$Return a cloned (deep) copy of self.)rN)r.rr_r=r/r^rN)rr r rros z Leaf.cloneccs |VdS)Nr )rr r rr:uszLeaf.leavesccs |VdS)z*Return a post-order iterator for the tree.Nr )rr r rrxszLeaf.post_orderccs |VdS)z)Return a pre-order iterator for the tree.Nr )rr r rr |szLeaf.pre_ordercCs|jS)zP The whitespace and comments preceding this token in the input. )r])rr r rrW�szLeaf._prefix_gettercCs|j�||_dS)N)r(r])rr=r r rrX�szLeaf._prefix_setter)r?r)rCrDrErFr]r/r^rPrQrTrIrJrBrrr:rr rWrXrHr=r r r rr.>s& r.cCsN|\}}}}|s||jkr<t|�dkr.|dSt|||d�St|||d�SdS)z� Convert raw node information to a Node or Leaf instance. This is passed to the parser driver which calls it whenever a reduction of a grammar rule produces a new complete node, so that the tree is build strictly bottom-up. rr)rON)Z number2symbol�lenrKr.)ZgrZraw_noderr_rOr%r r r�convert�srac@sPeZdZdZdZdZdZdd�Zdd�Zdd�Z dd d �Z ddd�Zd d�ZdS)�BasePatterna� A pattern is a tree matching pattern. It looks for a specific node type (token or symbol), and optionally for a specific content. This is an abstract base class. There are three concrete subclasses: - LeafPattern matches a single leaf node; - NodePattern matches a single node (usually non-leaf); - WildcardPattern matches a sequence of nodes of variable length. NcOs|tk std��tj|�S)z>Constructor that prevents BasePattern from being instantiated.zCannot instantiate BasePattern)rbrrr)rrrr r rr�szBasePattern.__new__cCsLt|j�|j|jg}x|r.|ddkr.|d=qWd|jjdjtt|��fS)Nrz%s(%s)z, ���rc) rr�contentrrrCrRrSrM)rrr r rrQ�s zBasePattern.__repr__cCs|S)z� A subclass can define this as a hook for optimizations. Returns either self or another node with the same effect. r )rr r r�optimize�szBasePattern.optimizecCsn|jdk r|j|jkrdS|jdk rRd}|dk r4i}|j||�sDdS|rR|j|�|dk rj|jrj|||j<dS)a# Does this pattern exactly match a node? Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. Default implementation for non-wildcard patterns. NFT)rrd� _submatch�updater)rr0�results�rr r r�match�s zBasePattern.matchcCs t|�dkrdS|j|d|�S)z� Does this pattern exactly match a sequence of nodes? Default implementation for non-wildcard patterns. rFr)r`rj)r�nodesrhr r r� match_seq�szBasePattern.match_seqccs&i}|r"|j|d|�r"d|fVdS)z} Generator yielding all matches for this pattern. Default implementation for non-wildcard patterns. rrN)rj)rrkrir r r�generate_matches�szBasePattern.generate_matches)N)N) rCrDrErFrrdrrrQrerjrlrmr r r rrb�s rbc@s*eZdZddd�Zd dd�Zd dd�ZdS)�LeafPatternNcCs\|dk r(d|kodkns(t|��|dk rFt|t�sFtt|���||_||_||_dS)ap Initializer. Takes optional type, content, and name. The type, if given must be a token type (< 256). If not given, this matches any *leaf* node; the content may still be required. The content, if given, must be a string. If a name is given, the matching node is stored in the results dict under that key. NrrL)rr#r"rMrrdr)rrrdrr r rrP�s zLeafPattern.__init__cCst|t�sdStj|||�S)z*Override match() to insist on a leaf node.F)r#r.rbrj)rr0rhr r rrjs zLeafPattern.matchcCs|j|jkS)a� Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. )rdr_)rr0rhr r rrfs zLeafPattern._submatch)NNN)N)N)rCrDrErPrjrfr r r rrn�s rnc@s$eZdZdZddd�Zddd�ZdS) �NodePatternFNcCs�|dk r|dkst|��|dk r|t|t�s8tt|���t|�}x:t|�D].\}}t|t�sht||f��t|t�rJd|_qJW||_ ||_ ||_dS)ad Initializer. Takes optional type, content, and name. The type, if given, must be a symbol type (>= 256). If the type is None this matches *any* single node (leaf or not), except if content is not None, in which it only matches non-leaf nodes that also match the content pattern. The content, if not None, must be a sequence of Patterns that must match the node's children exactly. If the content is given, the type must not be None. If a name is given, the matching node is stored in the results dict under that key. NrLT)rr#r"rMr$r3rb�WildcardPattern� wildcardsrrdr)rrrdrr4�itemr r rrP%s zNodePattern.__init__cCs�|jrJx>t|j|j�D],\}}|t|j�kr|dk r>|j|�dSqWdSt|j�t|j�krbdSx*t|j|j�D]\}}|j||�srdSqrWdS)a� Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. NTF)rqrmrdr%r`rg�ziprj)rr0rh�cri� subpatternr7r r rrfBs zNodePattern._submatch)NNN)N)rCrDrErqrPrfr r r rro!s roc@s^eZdZdZddedfdd�Zdd�Zddd �Zdd d�Zdd �Z dd�Z dd�Zdd�ZdS)rpa A wildcard pattern can match zero or more nodes. This has all the flexibility needed to implement patterns like: .* .+ .? .{m,n} (a b c | d e | f) (...)* (...)+ (...)? (...){m,n} except it always uses non-greedy matching. NrcCs�d|ko|kotkns.t||f��|dk rzttt|��}t|�sXtt|���x |D]}t|�s^tt|���q^W||_||_||_||_ dS)a� Initializer. Args: content: optional sequence of subsequences of patterns; if absent, matches one node; if present, each subsequence is an alternative [*] min: optional minimum number of times to match, default 0 max: optional maximum number of times to match, default HUGE name: optional name assigned to this match [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is equivalent to (a b c | d e | f g h); if content is None, this is equivalent to '.' in regular expression terms. The min and max parameters work as follows: min=0, max=maxint: .* min=1, max=maxint: .+ min=0, max=1: .? min=1, max=1: . If content is not None, replace the dot with the parenthesized list of alternatives, e.g. (a b c | d e | f g h)* rN) �HUGEr�tuplerSr`rMrd�min�maxr)rrdrxryr�altr r rrPls. zWildcardPattern.__init__cCs�d}|jdk r<t|j�dkr<t|jd�dkr<|jdd}|jdkr�|jdkr�|jdkrft|jd�S|dk r�|j|jkr�|j�S|jdkr�t|t�r�|jdkr�|j|jkr�t|j|j|j|j|j|j�S|S)z+Optimize certain stacked wildcard patterns.Nrr)r) rdr`rxryrorrer#rp)rrur r rre�s zWildcardPattern.optimizecCs|j|g|�S)z'Does this pattern exactly match a node?)rl)rr0rhr r rrj�szWildcardPattern.matchcCsRxL|j|�D]>\}}|t|�kr|dk rF|j|�|jrFt|�||j<dSqWdS)z4Does this pattern exactly match a sequence of nodes?NTF)rmr`rgrr$)rrkrhrtrir r rrl�s zWildcardPattern.match_seqccs:|jdkrXxJt|jdtt|�|j��D]*}i}|jrH|d|�||j<||fVq(Wn�|jdkrp|j|�Vn�ttd�r�tj }t �t_ z�y@x:|j|d�D]*\}}|jr�|d|�||j<||fVq�WWnRtk �rx:|j |�D],\}}|j�r |d|�||j<||fVq�WYnXWdttd��r4|t_ XdS)a" Generator yielding matches for a sequence of nodes. Args: nodes: sequence of nodes Yields: (count, results) tuples where: count: the match comprises nodes[:count]; results: dict containing named submatches. NrZ bare_name�getrefcountr)rd�rangerxr`ryr�_bare_name_matches�hasattrrI�stderrr�_recursive_matches�RuntimeError�_iterative_matches)rrk�countriZsave_stderrr r rrm�s. " z WildcardPattern.generate_matchesccst|�}d|jkrdifVg}x>|jD]4}x.t||�D] \}}||fV|j||f�q8Wq(Wx�|�rg}x�|D]�\}} ||krr||jkrrxn|jD]d}x^t|||d��D]H\} }| dkr�i}|j| �|j|�|| |fV|j|| |f�q�Wq�WqrW|}qbWdS)z(Helper to iteratively yield the matches.rN)r`rxrdrmr'ryrg)rrkZnodelenrhrzrtriZnew_results�c0�r0�c1�r1r r rr��s* z"WildcardPattern._iterative_matchescCsxd}i}d}t|�}xH|r\||kr\d}x0|jD]&}|dj|||�r0|d7}d}Pq0WqW|d|�||j<||fS)z(Special optimized matcher for bare_name.rFTrN)r`rdrjr)rrkr�ri�doneryZleafr r rr}�s z"WildcardPattern._bare_name_matchesc cs�|jdk st�||jkr"difV||jkr�xr|jD]h}xbt||�D]T\}}xJ|j||d�|d�D].\}}i}|j|�|j|�|||fVqfWqDWq4WdS)z(Helper to recursively yield the matches.Nrr)rdrrxryrmr�rg) rrkr�rzr�r�r�r�rir r rr�s " z"WildcardPattern._recursive_matches)N)N) rCrDrErFrvrPrerjrlrmr�r}r�r r r rrp^s# -rpc@s.eZdZd dd�Zdd�Zdd�Zdd �ZdS)�NegatedPatternNcCs(|dk rt|t�stt|���||_dS)a Initializer. The argument is either a pattern or None. If it is None, this only matches an empty sequence (effectively '$' in regex lingo). If it is not None, this matches whenever the argument pattern doesn't have any matches. N)r#rbrrMrd)rrdr r rrPs zNegatedPattern.__init__cCsdS)NFr )rr0r r rrj)szNegatedPattern.matchcCst|�dkS)Nr)r`)rrkr r rrl-szNegatedPattern.match_seqccsL|jdkr"t|�dkrHdifVn&x|jj|�D] \}}dSWdifVdS)Nr)rdr`rm)rrkrtrir r rrm1s zNegatedPattern.generate_matches)N)rCrDrErPrjrlrmr r r rr�s r�c cs�|sdifVn�|d|dd�}}xl|j|�D]^\}}|sJ||fVq2xDt|||d��D].\}}i}|j|�|j|�|||fVq^Wq2WdS)aR Generator yielding matches for a sequence of patterns and nodes. Args: patterns: a sequence of patterns nodes: a sequence of nodes Yields: (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches. rrN)rmrg) Zpatternsrk�p�restr�r�r�r�rir r rrm=s rm)rF� __author__rI�warnings�iorrvrrrrrKr.rarbrnrorpr�rmr r r r�<module>s& 1nNV,==#PK{��\Z�ļ0fixes/__pycache__/fix_apply.cpython-36.opt-2.pycnu�[���3 \~ �@sNddlmZddlmZddlmZddlmZmZmZGdd�dej �Z dS)�)�pytree)�token)� fixer_base)�Call�Comma�parenthesizec@seZdZdZdZdd�ZdS)�FixApplyTa. power< 'apply' trailer< '(' arglist< (not argument<NAME '=' any>) func=any ',' (not argument<NAME '=' any>) args=any [',' (not argument<NAME '=' any>) kwds=any] [','] > ')' > > c Cs>|j}|d}|d}|jd�}|rX|j|jjkr6dS|j|jjkrX|jdjdkrXdS|r~|j|jjkr~|jdjdkr~dS|j}|j�}|jt j |jfkr�|j|jks�|jdjt j kr�t|�}d|_|j�}d|_|dk r�|j�}d|_tjt jd�|g}|dk �r0|jt�tjt j d�|g�d |d_t|||d �S) N�func�args�kwds�z**r��*� )�prefix���r)�syms�get�typeZ star_exprZargumentZchildren�valuerZcloner�NAMEZatomZpower� DOUBLESTARrrZLeaf�STAR�extendrr) �selfZnodeZresultsrr r rrZ l_newargs�r�//usr/lib64/python3.6/lib2to3/fixes/fix_apply.py� transforms@ zFixApply.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)r rZpgen2rrZ fixer_utilrrrZBaseFixrrrrr�<module> sPK{��\�̻���*fixes/__pycache__/fix_apply.cpython-36.pycnu�[���3 \~ �@sRdZddlmZddlmZddlmZddlmZmZm Z Gdd�dej �ZdS) zIFixer for apply(). This converts apply(func, v, k) into (func)(*v, **k).�)�pytree)�token)� fixer_base)�Call�Comma�parenthesizec@seZdZdZdZdd�ZdS)�FixApplyTa. power< 'apply' trailer< '(' arglist< (not argument<NAME '=' any>) func=any ',' (not argument<NAME '=' any>) args=any [',' (not argument<NAME '=' any>) kwds=any] [','] > ')' > > c CsF|j}|st�|d}|d}|jd�}|r`|j|jjkr>dS|j|jjkr`|jdjdkr`dS|r�|j|jjkr�|jdjdkr�dS|j}|j �}|jt j|jfkr�|j|j ks�|jdjt jkr�t|�}d|_|j �}d|_|dk r�|j �}d|_tjt jd�|g}|dk �r8|jt�tjt jd�|g�d |d_t|||d �S) N�func�args�kwds�z**r��*� )�prefix���r)�syms�AssertionError�get�typeZ star_exprZargumentZchildren�valuerZcloner�NAMEZatomZpower� DOUBLESTARrrZLeaf�STAR�extendrr) �selfZnodeZresultsrr r rrZ l_newargs�r�//usr/lib64/python3.6/lib2to3/fixes/fix_apply.py� transformsB zFixApply.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�__doc__r rZpgen2rrZ fixer_utilrrrZBaseFixrrrrr�<module>s PK{��\�Ⱥ��2fixes/__pycache__/fix_asserts.cpython-36.opt-1.pycnu�[���3 \��@sTdZddlmZddlmZedddddd d dddddddd �ZGdd�de�ZdS)z5Fixer that replaces deprecated unittest method names.�)�BaseFix)�NameZ assertTrueZassertEqualZassertNotEqualZassertAlmostEqualZassertNotAlmostEqualZassertRegexZassertRaisesRegexZassertRaisesZassertFalse)Zassert_ZassertEqualsZassertNotEqualsZassertAlmostEqualsZassertNotAlmostEqualsZassertRegexpMatchesZassertRaisesRegexpZfailUnlessEqualZfailIfEqualZfailUnlessAlmostEqualZfailIfAlmostEqualZ failUnlessZfailUnlessRaisesZfailIfc@s(eZdZddjeee��Zdd�ZdS)� FixAssertszH power< any+ trailer< '.' meth=(%s)> any* > �|cCs,|dd}|jttt|�|jd��dS)N�meth�)�prefix)�replacer�NAMES�strr)�selfZnodeZresults�name�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_asserts.py� transform szFixAsserts.transformN) �__name__� __module__�__qualname__�join�map�reprr ZPATTERNrrrrrrsrN)�__doc__Z fixer_baserZ fixer_utilr�dictr rrrrr�<module>s$PK{��\�Ⱥ��,fixes/__pycache__/fix_asserts.cpython-36.pycnu�[���3 \��@sTdZddlmZddlmZedddddd d dddddddd �ZGdd�de�ZdS)z5Fixer that replaces deprecated unittest method names.�)�BaseFix)�NameZ assertTrueZassertEqualZassertNotEqualZassertAlmostEqualZassertNotAlmostEqualZassertRegexZassertRaisesRegexZassertRaisesZassertFalse)Zassert_ZassertEqualsZassertNotEqualsZassertAlmostEqualsZassertNotAlmostEqualsZassertRegexpMatchesZassertRaisesRegexpZfailUnlessEqualZfailIfEqualZfailUnlessAlmostEqualZfailIfAlmostEqualZ failUnlessZfailUnlessRaisesZfailIfc@s(eZdZddjeee��Zdd�ZdS)� FixAssertszH power< any+ trailer< '.' meth=(%s)> any* > �|cCs,|dd}|jttt|�|jd��dS)N�meth�)�prefix)�replacer�NAMES�strr)�selfZnodeZresults�name�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_asserts.py� transform szFixAsserts.transformN) �__name__� __module__�__qualname__�join�map�reprr ZPATTERNrrrrrrsrN)�__doc__Z fixer_baserZ fixer_utilr�dictr rrrrr�<module>s$PK{��\�����/fixes/__pycache__/fix_basestring.cpython-36.pycnu�[���3 \@�@s2dZddlmZddlmZGdd�dej�ZdS)zFixer for basestring -> str.�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixBasestringTz'basestring'cCstd|jd�S)N�str)�prefix)rr)�selfZnodeZresults�r�4/usr/lib64/python3.6/lib2to3/fixes/fix_basestring.py� transform szFixBasestring.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\���1fixes/__pycache__/fix_buffer.cpython-36.opt-1.pycnu�[���3 \N�@s2dZddlmZddlmZGdd�dej�ZdS)z4Fixer that changes buffer(...) into memoryview(...).�)� fixer_base)�Namec@s eZdZdZdZdZdd�ZdS)� FixBufferTzR power< name='buffer' trailer< '(' [any] ')' > any* > cCs |d}|jtd|jd��dS)N�name� memoryview)�prefix)�replacerr)�selfZnodeZresultsr�r �0/usr/lib64/python3.6/lib2to3/fixes/fix_buffer.py� transformszFixBuffer.transformN)�__name__� __module__�__qualname__Z BM_compatibleZexplicitZPATTERNrr r r rrsrN)�__doc__�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\���+fixes/__pycache__/fix_buffer.cpython-36.pycnu�[���3 \N�@s2dZddlmZddlmZGdd�dej�ZdS)z4Fixer that changes buffer(...) into memoryview(...).�)� fixer_base)�Namec@s eZdZdZdZdZdd�ZdS)� FixBufferTzR power< name='buffer' trailer< '(' [any] ')' > any* > cCs |d}|jtd|jd��dS)N�name� memoryview)�prefix)�replacerr)�selfZnodeZresultsr�r �0/usr/lib64/python3.6/lib2to3/fixes/fix_buffer.py� transformszFixBuffer.transformN)�__name__� __module__�__qualname__Z BM_compatibleZexplicitZPATTERNrr r r rrsrN)�__doc__�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\�Bp /fixes/__pycache__/fix_dict.cpython-36.opt-2.pycnu�[���3 \��@sfddlmZddlmZddlmZddlmZmZmZddlmZejdhBZ Gdd�dej �Zd S) �)�pytree)�patcomp)� fixer_base)�Name�Call�Dot)� fixer_util�iterc@s@eZdZdZdZdd�ZdZeje�Z dZ eje �Zdd�Zd S) �FixDictTa power< head=any+ trailer< '.' method=('keys'|'items'|'values'| 'iterkeys'|'iteritems'|'itervalues'| 'viewkeys'|'viewitems'|'viewvalues') > parens=trailer< '(' ')' > tail=any* > c Cs|d}|dd}|d}|j}|j}|jd�}|jd�} |sD| rP|dd�}dd �|D�}d d �|D�}|o||j||�} |tj|jt�t||j d�g�|dj �g}tj|j|�}| p�| s�d |_ tt|r�dnd�|g�}|r�tj|j|g|�}|j |_ |S)N�head�method��tailr Zview�cSsg|]}|j��qS�)�clone)�.0�nrr�./usr/lib64/python3.6/lib2to3/fixes/fix_dict.py� <listcomp>Asz%FixDict.transform.<locals>.<listcomp>cSsg|]}|j��qSr)r)rrrrrrBs)�prefixZparens��list) �syms�value� startswith�in_special_contextrZNodeZtrailerrrrrZpowerr) �self�node�resultsrrrrZmethod_name�isiterZisviewZspecial�args�newrrr� transform6s2 zFixDict.transformz3power< func=NAME trailer< '(' node=any ')' > any* >zmfor_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > cCs�|jdkrdSi}|jjdk r^|jj|jj|�r^|d|kr^|rN|djtkS|djtjkS|sfdS|jj|j|�o�|d|kS)NFr�func)�parent�p1�matchr�iter_exemptr�consuming_calls�p2)rrr rrrrrZs zFixDict.in_special_contextN) �__name__� __module__�__qualname__Z BM_compatibleZPATTERNr#ZP1rZcompile_patternr&ZP2r*rrrrrr )s r N)rrrrrrrrr)r(ZBaseFixr rrrr�<module>sPK{��\�N���)fixes/__pycache__/fix_dict.cpython-36.pycnu�[���3 \��@sjdZddlmZddlmZddlmZddlmZmZmZddlmZej dhBZ Gdd �d ej�Zd S)ajFixer for dict methods. d.keys() -> list(d.keys()) d.items() -> list(d.items()) d.values() -> list(d.values()) d.iterkeys() -> iter(d.keys()) d.iteritems() -> iter(d.items()) d.itervalues() -> iter(d.values()) d.viewkeys() -> d.keys() d.viewitems() -> d.items() d.viewvalues() -> d.values() Except in certain very specific contexts: the iter() can be dropped when the context is list(), sorted(), iter() or for...in; the list() can be dropped when the context is list() or sorted() (but not iter() or for...in!). Special contexts that apply to both: list(), sorted(), tuple() set(), any(), all(), sum(). Note: iter(d.keys()) could be written as iter(d) but since the original d.iterkeys() was also redundant we don't fix this. And there are (rare) contexts where it makes a difference (e.g. when passing it as an argument to a function that introspects the argument). �)�pytree)�patcomp)� fixer_base)�Name�Call�Dot)� fixer_util�iterc@s@eZdZdZdZdd�ZdZeje�Z dZ eje �Zdd�Zd S) �FixDictTa power< head=any+ trailer< '.' method=('keys'|'items'|'values'| 'iterkeys'|'iteritems'|'itervalues'| 'viewkeys'|'viewitems'|'viewvalues') > parens=trailer< '(' ')' > tail=any* > c Cs|d}|dd}|d}|j}|j}|jd�}|jd�} |sD| rP|dd�}|dksdtt|���dd�|D�}d d�|D�}|o�|j||�} |tj|jt �t ||jd�g�|dj�g}tj|j |�}| p�| s�d|_tt |r�dnd�|g�}|�rtj|j |g|�}|j|_|S)N�head�method��tailr Zview��keys�items�valuescSsg|]}|j��qS�)�clone)�.0�nrr�./usr/lib64/python3.6/lib2to3/fixes/fix_dict.py� <listcomp>Asz%FixDict.transform.<locals>.<listcomp>cSsg|]}|j��qSr)r)rrrrrrBs)�prefixZparens��list)rrr)�syms�value� startswith�AssertionError�repr�in_special_contextrZNodeZtrailerrrrrZpowerr) �self�node�resultsrrrrZmethod_name�isiterZisviewZspecial�args�newrrr� transform6s4 zFixDict.transformz3power< func=NAME trailer< '(' node=any ')' > any* >zmfor_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > cCs�|jdkrdSi}|jjdk r^|jj|jj|�r^|d|kr^|rN|djtkS|djtjkS|sfdS|jj|j|�o�|d|kS)NFr#�func)�parent�p1�matchr�iter_exemptr�consuming_calls�p2)r"r#r%r$rrrr!Zs zFixDict.in_special_contextN) �__name__� __module__�__qualname__Z BM_compatibleZPATTERNr(ZP1rZcompile_patternr+ZP2r/r!rrrrr )s r N) �__doc__rrrrrrrrr.r-ZBaseFixr rrrr�<module>sPK{��\k�-O� � 1fixes/__pycache__/fix_except.cpython-36.opt-1.pycnu�[���3 \ �@sfdZddlmZddlmZddlmZddlmZmZm Z m Z mZmZdd�Z Gdd �d ej�Zd S)a�Fixer for except statements with named exceptions. The following cases will be converted: - "except E, T:" where T is a name: except E as T: - "except E, T:" where T is not a name, tuple or list: except E as t: T = t This is done because the target of an "except" clause must be a name. - "except E, T:" where T is a tuple or list literal: except E as t: T = t.args �)�pytree)�token)� fixer_base)�Assign�Attr�Name�is_tuple�is_list�symsccsHxBt|�D]6\}}|jtjkr |jdjdkr |||dfVq WdS)N��exceptr)� enumerate�typer � except_clause�children�value)Znodes�i�n�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_except.py�find_exceptssrc@seZdZdZdZdd�ZdS)� FixExceptTa1 try_stmt< 'try' ':' (simple_stmt | suite) cleanup=(except_clause ':' (simple_stmt | suite))+ tail=(['except' ':' (simple_stmt | suite)] ['else' ':' (simple_stmt | suite)] ['finally' ':' (simple_stmt | suite)]) > cCs�|j}dd�|dD�}dd�|dD�}�x*t|�D�]\}}t|j�dkr6|jdd�\}} } | jtdd d ��| jtjk�rDt|j �d d �}| j �}d|_| j|�|j �}|j} x"t| �D]\}}t |tj�r�Pq�Wt| �s�t| ��rt|t|td���}n t||�}x&t| d|��D]}|jd |��q W|j||�q6| jdkr6d | _q6Wdd�|jdd�D�||}tj|j|�S)NcSsg|]}|j��qSr)�clone)�.0rrrr� <listcomp>2sz'FixExcept.transform.<locals>.<listcomp>�tailcSsg|]}|j��qSr)r)rZchrrrr4sZcleanup���as� )�prefix��argsrcSsg|]}|j��qSr)r)r�crrrr\s�)r r�lenr�replacerrr�NAME�new_namerr r � isinstancerZNoderr rr�reversedZinsert_child)�selfZnodeZresultsr rZtry_cleanuprZe_suite�EZcomma�NZnew_N�targetZsuite_stmtsrZstmtZassignZchildrrrr� transform/s6 zFixExcept.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr/rrrrr$srN)�__doc__r!rZpgen2rrZ fixer_utilrrrrr r rZBaseFixrrrrr�<module>s PK{��\k�-O� � +fixes/__pycache__/fix_except.cpython-36.pycnu�[���3 \ �@sfdZddlmZddlmZddlmZddlmZmZm Z m Z mZmZdd�Z Gdd �d ej�Zd S)a�Fixer for except statements with named exceptions. The following cases will be converted: - "except E, T:" where T is a name: except E as T: - "except E, T:" where T is not a name, tuple or list: except E as t: T = t This is done because the target of an "except" clause must be a name. - "except E, T:" where T is a tuple or list literal: except E as t: T = t.args �)�pytree)�token)� fixer_base)�Assign�Attr�Name�is_tuple�is_list�symsccsHxBt|�D]6\}}|jtjkr |jdjdkr |||dfVq WdS)N��exceptr)� enumerate�typer � except_clause�children�value)Znodes�i�n�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_except.py�find_exceptssrc@seZdZdZdZdd�ZdS)� FixExceptTa1 try_stmt< 'try' ':' (simple_stmt | suite) cleanup=(except_clause ':' (simple_stmt | suite))+ tail=(['except' ':' (simple_stmt | suite)] ['else' ':' (simple_stmt | suite)] ['finally' ':' (simple_stmt | suite)]) > cCs�|j}dd�|dD�}dd�|dD�}�x*t|�D�]\}}t|j�dkr6|jdd�\}} } | jtdd d ��| jtjk�rDt|j �d d �}| j �}d|_| j|�|j �}|j} x"t| �D]\}}t |tj�r�Pq�Wt| �s�t| ��rt|t|td���}n t||�}x&t| d|��D]}|jd |��q W|j||�q6| jdkr6d | _q6Wdd�|jdd�D�||}tj|j|�S)NcSsg|]}|j��qSr)�clone)�.0rrrr� <listcomp>2sz'FixExcept.transform.<locals>.<listcomp>�tailcSsg|]}|j��qSr)r)rZchrrrr4sZcleanup���as� )�prefix��argsrcSsg|]}|j��qSr)r)r�crrrr\s�)r r�lenr�replacerrr�NAME�new_namerr r � isinstancerZNoderr rr�reversedZinsert_child)�selfZnodeZresultsr rZtry_cleanuprZe_suite�EZcomma�NZnew_N�targetZsuite_stmtsrZstmtZassignZchildrrrr� transform/s6 zFixExcept.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr/rrrrr$srN)�__doc__r!rZpgen2rrZ fixer_utilrrrrr r rZBaseFixrrrrr�<module>s PK{��\:\��NN/fixes/__pycache__/fix_exec.cpython-36.opt-1.pycnu�[���3 \��@s:dZddlmZddlmZmZmZGdd�dej�ZdS)z�Fixer for exec. This converts usages of the exec statement into calls to a built-in exec() function. exec code in ns1, ns2 -> exec(code, ns1, ns2) �)� fixer_base)�Comma�Name�Callc@seZdZdZdZdd�ZdS)�FixExecTzx exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > | exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > cCs�|j}|d}|jd�}|jd�}|j�g}d|d_|dk rR|jt�|j�g�|dk rn|jt�|j�g�ttd�||jd�S)N�a�b�c���exec)�prefix)�syms�getZcloner �extendrrr)�selfZnodeZresultsrrrr �args�r�./usr/lib64/python3.6/lib2to3/fixes/fix_exec.py� transforms zFixExec.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN) �__doc__r rZ fixer_utilrrrZBaseFixrrrrr�<module> sPK{��\hRN���/fixes/__pycache__/fix_exec.cpython-36.opt-2.pycnu�[���3 \��@s6ddlmZddlmZmZmZGdd�dej�ZdS)�)� fixer_base)�Comma�Name�Callc@seZdZdZdZdd�ZdS)�FixExecTzx exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > | exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > cCs�|j}|d}|jd�}|jd�}|j�g}d|d_|dk rR|jt�|j�g�|dk rn|jt�|j�g�ttd�||jd�S)N�a�b�c���exec)�prefix)�syms�getZcloner �extendrrr)�selfZnodeZresultsrrrr �args�r�./usr/lib64/python3.6/lib2to3/fixes/fix_exec.py� transforms zFixExec.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)r rZ fixer_utilrrrZBaseFixrrrrr�<module> sPK{��\�>�fhh)fixes/__pycache__/fix_exec.cpython-36.pycnu�[���3 \��@s:dZddlmZddlmZmZmZGdd�dej�ZdS)z�Fixer for exec. This converts usages of the exec statement into calls to a built-in exec() function. exec code in ns1, ns2 -> exec(code, ns1, ns2) �)� fixer_base)�Comma�Name�Callc@seZdZdZdZdd�ZdS)�FixExecTzx exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > | exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > cCs�|st�|j}|d}|jd�}|jd�}|j�g}d|d_|dk rZ|jt�|j�g�|dk rv|jt�|j�g�ttd�||jd�S)N�a�b�c���exec)�prefix) �AssertionError�syms�getZcloner �extendrrr)�selfZnodeZresultsrrrr �args�r�./usr/lib64/python3.6/lib2to3/fixes/fix_exec.py� transforms zFixExec.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN) �__doc__r rZ fixer_utilrrrZBaseFixrrrrr�<module> sPK{��\9��xx3fixes/__pycache__/fix_execfile.cpython-36.opt-1.pycnu�[���3 \�@sVdZddlmZddlmZmZmZmZmZm Z m Z mZmZm Z Gdd�dej�ZdS)zoFixer for execfile. This converts usages of the execfile function into calls to the built-in exec() function. �)� fixer_base) �Comma�Name�Call�LParen�RParen�Dot�Node�ArgList�String�symsc@seZdZdZdZdd�ZdS)�FixExecfileTz� power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > | power< 'execfile' trailer< '(' filename=any ')' > > cCs&|d}|jd�}|jd�}|jdjdj�}t|j�t�tdd�g|d�}ttjt d�|g�}ttj t�t d �g�ttj t�t �g�g} |g| } |j�}d|_td d�}| t�|t�|g} tt d�| d�}|g}|dk r�|jt�|j�g�|dk �r|jt�|j�g�tt d �||jd�S)N�filename�globals�locals�z"rb"� )Zrparen�open�readz'exec'�compile��exec)�prefix���r)�getZchildrenZcloner rrr rZpowerrZtrailerrrrrr�extend)�selfZnodeZresultsrrrZexecfile_parenZ open_argsZ open_callrZ open_exprZfilename_argZexec_strZcompile_argsZcompile_call�args�r�2/usr/lib64/python3.6/lib2to3/fixes/fix_execfile.py� transforms* zFixExecfile.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrr sr N)�__doc__rrZ fixer_utilrrrrrrr r rrZBaseFixr rrrr�<module>s0PK{��\m2���3fixes/__pycache__/fix_execfile.cpython-36.opt-2.pycnu�[���3 \�@sRddlmZddlmZmZmZmZmZmZm Z m Z mZmZGdd�dej �ZdS)�)� fixer_base) �Comma�Name�Call�LParen�RParen�Dot�Node�ArgList�String�symsc@seZdZdZdZdd�ZdS)�FixExecfileTz� power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > | power< 'execfile' trailer< '(' filename=any ')' > > cCs&|d}|jd�}|jd�}|jdjdj�}t|j�t�tdd�g|d�}ttjt d�|g�}ttj t�t d �g�ttj t�t �g�g} |g| } |j�}d|_td d�}| t�|t�|g} tt d�| d�}|g}|dk r�|jt�|j�g�|dk �r|jt�|j�g�tt d �||jd�S)N�filename�globals�locals�z"rb"� )Zrparen�open�readz'exec'�compile��exec)�prefix���r)�getZchildrenZcloner rrr rZpowerrZtrailerrrrrr�extend)�selfZnodeZresultsrrrZexecfile_parenZ open_argsZ open_callrZ open_exprZfilename_argZexec_strZcompile_argsZcompile_call�args�r�2/usr/lib64/python3.6/lib2to3/fixes/fix_execfile.py� transforms* zFixExecfile.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrr sr N)rrZ fixer_utilrrrrrrr r rrZBaseFixr rrrr�<module> s0PK{��\E�3��-fixes/__pycache__/fix_execfile.cpython-36.pycnu�[���3 \�@sVdZddlmZddlmZmZmZmZmZm Z m Z mZmZm Z Gdd�dej�ZdS)zoFixer for execfile. This converts usages of the execfile function into calls to the built-in exec() function. �)� fixer_base) �Comma�Name�Call�LParen�RParen�Dot�Node�ArgList�String�symsc@seZdZdZdZdd�ZdS)�FixExecfileTz� power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > | power< 'execfile' trailer< '(' filename=any ')' > > cCs0|st�|d}|jd�}|jd�}|jdjdj�}t|j�t�tdd�g|d�}ttj t d�|g�}ttjt�t d �g�ttjt �t�g�g} |g| } |j�}d|_td d�}| t�|t�|g} tt d�| d�}|g}|dk �r�|jt�|j�g�|dk �r|jt�|j�g�tt d �||jd�S)N�filename�globals�locals�z"rb"� )Zrparen�open�readz'exec'�compile��exec)�prefix���r)�AssertionError�getZchildrenZcloner rrr rZpowerrZtrailerrrrrr�extend)�selfZnodeZresultsrrrZexecfile_parenZ open_argsZ open_callrZ open_exprZfilename_argZexec_strZcompile_argsZcompile_call�args�r�2/usr/lib64/python3.6/lib2to3/fixes/fix_execfile.py� transforms, zFixExecfile.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr!rrrr r sr N)�__doc__rrZ fixer_utilrrrrrrr r rrZBaseFixr rrrr �<module>s0PK{��\I��4��3fixes/__pycache__/fix_exitfunc.cpython-36.opt-1.pycnu�[���3 \� �@sJdZddlmZmZddlmZmZmZmZm Z m Z Gdd�dej�ZdS)z7 Convert use of sys.exitfunc to use the atexit module. �)�pytree� fixer_base)�Name�Attr�Call�Comma�Newline�symscs<eZdZdZdZdZ�fdd�Z�fdd�Zdd�Z�Z S) �FixExitfuncTa� ( sys_import=import_name<'import' ('sys' | dotted_as_names< (any ',')* 'sys' (',' any)* > ) > | expr_stmt< power< 'sys' trailer< '.' 'exitfunc' > > '=' func=any > ) cstt|�j|�dS)N)�superr �__init__)�self�args)� __class__��2/usr/lib64/python3.6/lib2to3/fixes/fix_exitfunc.pyrszFixExitfunc.__init__cstt|�j||�d|_dS)N)rr � start_tree� sys_import)r Ztree�filename)rrrr!szFixExitfunc.start_treecCs&d|kr |jdkr|d|_dS|dj�}d|_tjtjttd�td���}t ||g|j�}|j |�|jdkr�|j|d�dS|jjd}|j tjkr�|jt��|jtdd��nj|jj}|jj|j�}|j} tjtjtd �tdd�g�} tjtj| g�}|j|dt��|j|d |�dS)Nr�func��atexit�registerzKCan't find sys import; Please add an atexit import at the top of your file.�� �import�)rZclone�prefixrZNoder Zpowerrrr�replaceZwarningZchildren�typeZdotted_as_namesZappend_childr�parent�indexZimport_nameZsimple_stmtZinsert_childr)r ZnodeZresultsrrZcall�namesZcontaining_stmtZpositionZstmt_containerZ new_import�newrrr� transform%s2 zFixExitfunc.transform) �__name__� __module__�__qualname__Zkeep_line_orderZ BM_compatibleZPATTERNrrr$� __classcell__rr)rrr sr N) �__doc__Zlib2to3rrZlib2to3.fixer_utilrrrrrr ZBaseFixr rrrr�<module>s PK{��\U���3fixes/__pycache__/fix_exitfunc.cpython-36.opt-2.pycnu�[���3 \� �@sFddlmZmZddlmZmZmZmZmZm Z Gdd�dej �ZdS)�)�pytree� fixer_base)�Name�Attr�Call�Comma�Newline�symscs<eZdZdZdZdZ�fdd�Z�fdd�Zdd�Z�Z S) �FixExitfuncTa� ( sys_import=import_name<'import' ('sys' | dotted_as_names< (any ',')* 'sys' (',' any)* > ) > | expr_stmt< power< 'sys' trailer< '.' 'exitfunc' > > '=' func=any > ) cstt|�j|�dS)N)�superr �__init__)�self�args)� __class__��2/usr/lib64/python3.6/lib2to3/fixes/fix_exitfunc.pyrszFixExitfunc.__init__cstt|�j||�d|_dS)N)rr � start_tree� sys_import)r Ztree�filename)rrrr!szFixExitfunc.start_treecCs&d|kr |jdkr|d|_dS|dj�}d|_tjtjttd�td���}t ||g|j�}|j |�|jdkr�|j|d�dS|jjd}|j tjkr�|jt��|jtdd��nj|jj}|jj|j�}|j} tjtjtd �tdd�g�} tjtj| g�}|j|dt��|j|d |�dS)Nr�func��atexit�registerzKCan't find sys import; Please add an atexit import at the top of your file.�� �import�)rZclone�prefixrZNoder Zpowerrrr�replaceZwarningZchildren�typeZdotted_as_namesZappend_childr�parent�indexZimport_nameZsimple_stmtZinsert_childr)r ZnodeZresultsrrZcall�namesZcontaining_stmtZpositionZstmt_containerZ new_import�newrrr� transform%s2 zFixExitfunc.transform) �__name__� __module__�__qualname__Zkeep_line_orderZ BM_compatibleZPATTERNrrr$� __classcell__rr)rrr sr N)Zlib2to3rrZlib2to3.fixer_utilrrrrrr ZBaseFixr rrrr�<module>s PK{��\I��4��-fixes/__pycache__/fix_exitfunc.cpython-36.pycnu�[���3 \� �@sJdZddlmZmZddlmZmZmZmZm Z m Z Gdd�dej�ZdS)z7 Convert use of sys.exitfunc to use the atexit module. �)�pytree� fixer_base)�Name�Attr�Call�Comma�Newline�symscs<eZdZdZdZdZ�fdd�Z�fdd�Zdd�Z�Z S) �FixExitfuncTa� ( sys_import=import_name<'import' ('sys' | dotted_as_names< (any ',')* 'sys' (',' any)* > ) > | expr_stmt< power< 'sys' trailer< '.' 'exitfunc' > > '=' func=any > ) cstt|�j|�dS)N)�superr �__init__)�self�args)� __class__��2/usr/lib64/python3.6/lib2to3/fixes/fix_exitfunc.pyrszFixExitfunc.__init__cstt|�j||�d|_dS)N)rr � start_tree� sys_import)r Ztree�filename)rrrr!szFixExitfunc.start_treecCs&d|kr |jdkr|d|_dS|dj�}d|_tjtjttd�td���}t ||g|j�}|j |�|jdkr�|j|d�dS|jjd}|j tjkr�|jt��|jtdd��nj|jj}|jj|j�}|j} tjtjtd �tdd�g�} tjtj| g�}|j|dt��|j|d |�dS)Nr�func��atexit�registerzKCan't find sys import; Please add an atexit import at the top of your file.�� �import�)rZclone�prefixrZNoder Zpowerrrr�replaceZwarningZchildren�typeZdotted_as_namesZappend_childr�parent�indexZimport_nameZsimple_stmtZinsert_childr)r ZnodeZresultsrrZcall�namesZcontaining_stmtZpositionZstmt_containerZ new_import�newrrr� transform%s2 zFixExitfunc.transform) �__name__� __module__�__qualname__Zkeep_line_orderZ BM_compatibleZPATTERNrrr$� __classcell__rr)rrr sr N) �__doc__Zlib2to3rrZlib2to3.fixer_utilrrrrrr ZBaseFixr rrrr�<module>s PK{��\��G$ $ +fixes/__pycache__/fix_filter.cpython-36.pycnu�[���3 \[ �@sVdZddlmZddlmZddlmZddlm Z m Z mZmZGdd�dej �ZdS) a�Fixer that changes filter(F, X) into list(filter(F, X)). We avoid the transformation if the filter() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on filter(F, X) to return a string if X is a string and a tuple if X is a tuple. That would require type inference, which we don't do. Let Python 2.6 figure it out. �)� fixer_base)�Node)�python_symbols)�Name�ArgList�ListComp�in_special_contextc@s eZdZdZdZdZdd�ZdS)� FixFilterTaV filter_lambda=power< 'filter' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'filter' trailer< '(' arglist< none='None' ',' seq=any > ')' > [extra_trailers=trailer*] > | power< 'filter' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > zfuture_builtins.filtercCs2|j|�rdSg}d|kr:x|dD]}|j|j��q$Wd|kr�t|jd�j�|jd�j�|jd�j�|jd�j��}ttj|g|dd�}n�d|kr�ttd �td �|d j�td ��}ttj|g|dd�}nTt |�r�dS|dj�}ttjtd�|gdd�}ttjtd �t |g�g|�}d|_|j|_|S)NZextra_trailersZ filter_lambda�fp�itZxp�)�prefixZnoneZ_f�seq�args�filter�list)Zshould_skip�appendZcloner�getr�symsZpowerrrrr )�selfZnodeZresultsZtrailers�t�newr�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_filter.py� transform:s4 zFixFilter.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onrrrrrr sr N)�__doc__rrZpytreerZpygramrrZ fixer_utilrrrrZConditionalFixr rrrr�<module>s PK{��\ 'O!��.fixes/__pycache__/fix_funcattrs.cpython-36.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z3Fix function attribute names (f.func_x -> f.__x__).�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixFuncattrsTz� power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' | 'func_dict') > any* > cCs2|dd}|jtd|jdd�|jd��dS)N�attr�z__%s__�)�prefix)�replacer�valuer)�selfZnodeZresultsr�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_funcattrs.py� transformszFixFuncattrs.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrr r srN)�__doc__�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\>���1fixes/__pycache__/fix_future.cpython-36.opt-1.pycnu�[���3 \#�@s2dZddlmZddlmZGdd�dej�ZdS)zVRemove __future__ imports from __future__ import foo is replaced with an empty line. �)� fixer_base)� BlankLinec@s eZdZdZdZdZdd�ZdS)� FixFutureTz;import_from< 'from' module_name="__future__" 'import' any >� cCst�}|j|_|S)N)r�prefix)�selfZnodeZresults�new�r �0/usr/lib64/python3.6/lib2to3/fixes/fix_future.py� transformszFixFuture.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZ run_orderrr r r r rsrN)�__doc__�rZ fixer_utilrZBaseFixrr r r r �<module>sPK{��\>���+fixes/__pycache__/fix_future.cpython-36.pycnu�[���3 \#�@s2dZddlmZddlmZGdd�dej�ZdS)zVRemove __future__ imports from __future__ import foo is replaced with an empty line. �)� fixer_base)� BlankLinec@s eZdZdZdZdZdd�ZdS)� FixFutureTz;import_from< 'from' module_name="__future__" 'import' any >� cCst�}|j|_|S)N)r�prefix)�selfZnodeZresults�new�r �0/usr/lib64/python3.6/lib2to3/fixes/fix_future.py� transformszFixFuture.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZ run_orderrr r r r rsrN)�__doc__�rZ fixer_utilrZBaseFixrr r r r �<module>sPK{��\&Fam��,fixes/__pycache__/fix_getcwdu.cpython-36.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z1 Fixer that changes os.getcwdu() to os.getcwd(). �)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixGetcwduTzR power< 'os' trailer< dot='.' name='getcwdu' > any* > cCs |d}|jtd|jd��dS)N�name�getcwd)�prefix)�replacerr)�selfZnodeZresultsr�r �1/usr/lib64/python3.6/lib2to3/fixes/fix_getcwdu.py� transformszFixGetcwdu.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr r r rr srN)�__doc__�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\�����2fixes/__pycache__/fix_has_key.cpython-36.opt-2.pycnu�[���3 \|�@s>ddlmZddlmZddlmZmZGdd�dej�ZdS)�)�pytree)� fixer_base)�Name�parenthesizec@seZdZdZdZdd�ZdS)� FixHasKeyTa� anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > after=any* > | negation=not_test< 'not' anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > > > c Cs||j}|jj|jkr&|jj|j�r&dS|jd�}|d}|j}dd�|dD�}|dj�}|jd�} | rxdd�| D�} |j|j |j|j |j|j|j |jfkr�t|�}t|�d kr�|d }ntj|j|�}d|_tddd �} |r�tddd �}tj|j|| f�} tj|j || |f�}| �r8t|�}tj|j|ft| ��}|jj|j |j|j|j|j|j|j|j|jf k�rrt|�}||_|S)N�negation�anchorcSsg|]}|j��qS�)�clone)�.0�nr r �1/usr/lib64/python3.6/lib2to3/fixes/fix_has_key.py� <listcomp>Rsz'FixHasKey.transform.<locals>.<listcomp>�before�arg�aftercSsg|]}|j��qSr )r )rrr r r rVs��� �in)�prefix�not)�syms�parent�typeZnot_test�pattern�match�getrr Z comparisonZand_testZor_testZtestZlambdefZargumentr�lenrZNodeZpowerrZcomp_op�tuple�exprZxor_exprZand_exprZ shift_exprZ arith_exprZtermZfactor) �selfZnodeZresultsrrrrrrrZn_opZn_not�newr r r � transformGsD zFixHasKey.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr#r r r r r&srN)�rrZ fixer_utilrrZBaseFixrr r r r �<module>!sPK{��\�oH+QQ,fixes/__pycache__/fix_has_key.cpython-36.pycnu�[���3 \|�@sBdZddlmZddlmZddlmZmZGdd�dej�ZdS)a&Fixer for has_key(). Calls to .has_key() methods are expressed in terms of the 'in' operator: d.has_key(k) -> k in d CAVEATS: 1) While the primary target of this fixer is dict.has_key(), the fixer will change any has_key() method call, regardless of its class. 2) Cases like this will not be converted: m = d.has_key if m(k): ... Only *calls* to has_key() are converted. While it is possible to convert the above to something like m = d.__contains__ if m(k): ... this is currently not done. �)�pytree)� fixer_base)�Name�parenthesizec@seZdZdZdZdd�ZdS)� FixHasKeyTa� anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > after=any* > | negation=not_test< 'not' anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > > > c Cs�|st�|j}|jj|jkr.|jj|j�r.dS|jd�}|d}|j}dd�|dD�}|dj �}|jd�} | r�dd�| D�} |j|j |j|j|j|j |j|jfkr�t|�}t|�d kr�|d }ntj|j|�}d|_tddd �} |�rtddd �}tj|j|| f�} tj|j || |f�}| �rBt|�}tj|j|ft| ��}|jj|j |j|j|j|j|j|j|j|jf k�r|t|�}||_|S)N�negation�anchorcSsg|]}|j��qS�)�clone)�.0�nr r �1/usr/lib64/python3.6/lib2to3/fixes/fix_has_key.py� <listcomp>Rsz'FixHasKey.transform.<locals>.<listcomp>�before�arg�aftercSsg|]}|j��qSr )r )rrr r r rVs��� �in)�prefix�not)�AssertionError�syms�parent�typeZnot_test�pattern�match�getrr Z comparisonZand_testZor_testZtestZlambdefZargumentr�lenrZNodeZpowerrZcomp_op�tuple�exprZxor_exprZand_exprZ shift_exprZ arith_exprZtermZfactor) �selfZnodeZresultsrrrrrrrZn_opZn_not�newr r r � transformGsF zFixHasKey.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr$r r r r r&srN) �__doc__�rrZ fixer_utilrrZBaseFixrr r r r �<module>sPK{��\o>���1fixes/__pycache__/fix_idioms.cpython-36.opt-1.pycnu�[���3 \�@sNdZddlmZddlmZmZmZmZmZm Z dZ dZGdd�dej�Z dS) a�Adjust some old Python 2 idioms to their modern counterparts. * Change some type comparisons to isinstance() calls: type(x) == T -> isinstance(x, T) type(x) is T -> isinstance(x, T) type(x) != T -> not isinstance(x, T) type(x) is not T -> not isinstance(x, T) * Change "while 1:" into "while True:". * Change both v = list(EXPR) v.sort() foo(v) and the more general v = EXPR v.sort() foo(v) into v = sorted(EXPR) foo(v) �)� fixer_base)�Call�Comma�Name�Node� BlankLine�symsz0(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)z(power< 'type' trailer< '(' x=any ')' > >csPeZdZdZdeeeefZ�fdd�Zdd�Zdd�Z d d �Z dd�Z�ZS) � FixIdiomsTa� isinstance=comparison< %s %s T=any > | isinstance=comparison< T=any %s %s > | while_stmt< 'while' while='1' ':' any+ > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' power< list='list' trailer< '(' (not arglist<any+>) any ')' > > > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > cs8tt|�j|�}|r4d|kr4|d|dkr0|SdS|S)N�sortedZid1Zid2)�superr �match)�self�node�r)� __class__��0/usr/lib64/python3.6/lib2to3/fixes/fix_idioms.pyrOszFixIdioms.matchcCsHd|kr|j||�Sd|kr(|j||�Sd|kr<|j||�Std��dS)N� isinstance�whiler z Invalid match)�transform_isinstance�transform_while�transform_sort�RuntimeError)r r�resultsrrr� transformZszFixIdioms.transformcCsh|dj�}|dj�}d|_d|_ttd�|t�|g�}d|kr\d|_ttjtd�|g�}|j|_|S)N�x�T�� r�n�not)�clone�prefixrrrrrZnot_test)r rrrrZtestrrrrdszFixIdioms.transform_isinstancecCs |d}|jtd|jd��dS)Nr�True)r")�replacerr")r rrZonerrrrpszFixIdioms.transform_whilecCs�|d}|d}|jd�}|jd�}|r>|jtd|jd��n8|rn|j�}d|_|jttd�|g|jd��ntd��|j�|j}d |kr�|r�|jd �d |d jf} d j | �|d _n"t �} |jj| �|jd �d | _dS)N�sort�next�list�exprr )r"rzshould not have reached here� �) �getr$rr"r!rr�remove� rpartition�joinr�parentZappend_child)r rrZ sort_stmtZ next_stmtZ list_callZsimple_expr�newZbtwnZprefix_linesZend_linerrrrts* zFixIdioms.transform_sort) �__name__� __module__�__qualname__Zexplicit�TYPE�CMPZPATTERNrrrrr� __classcell__rr)rrr %s' r N)�__doc__rrZ fixer_utilrrrrrrr5r4ZBaseFixr rrrr�<module>s PK{��\��!i��1fixes/__pycache__/fix_idioms.cpython-36.opt-2.pycnu�[���3 \�@sJddlmZddlmZmZmZmZmZmZdZ dZ Gdd�dej�ZdS)�)� fixer_base)�Call�Comma�Name�Node� BlankLine�symsz0(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)z(power< 'type' trailer< '(' x=any ')' > >csPeZdZdZdeeeefZ�fdd�Zdd�Zdd�Z d d �Z dd�Z�ZS) � FixIdiomsTa� isinstance=comparison< %s %s T=any > | isinstance=comparison< T=any %s %s > | while_stmt< 'while' while='1' ':' any+ > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' power< list='list' trailer< '(' (not arglist<any+>) any ')' > > > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > cs8tt|�j|�}|r4d|kr4|d|dkr0|SdS|S)N�sortedZid1Zid2)�superr �match)�self�node�r)� __class__��0/usr/lib64/python3.6/lib2to3/fixes/fix_idioms.pyrOszFixIdioms.matchcCsHd|kr|j||�Sd|kr(|j||�Sd|kr<|j||�Std��dS)N� isinstance�whiler z Invalid match)�transform_isinstance�transform_while�transform_sort�RuntimeError)r r�resultsrrr� transformZszFixIdioms.transformcCsh|dj�}|dj�}d|_d|_ttd�|t�|g�}d|kr\d|_ttjtd�|g�}|j|_|S)N�x�T�� r�n�not)�clone�prefixrrrrrZnot_test)r rrrrZtestrrrrdszFixIdioms.transform_isinstancecCs |d}|jtd|jd��dS)Nr�True)r")�replacerr")r rrZonerrrrpszFixIdioms.transform_whilecCs�|d}|d}|jd�}|jd�}|r>|jtd|jd��n8|rn|j�}d|_|jttd�|g|jd��ntd��|j�|j}d |kr�|r�|jd �d |d jf} d j | �|d _n"t �} |jj| �|jd �d | _dS)N�sort�next�list�exprr )r"rzshould not have reached here� �) �getr$rr"r!rr�remove� rpartition�joinr�parentZappend_child)r rrZ sort_stmtZ next_stmtZ list_callZsimple_expr�newZbtwnZprefix_linesZend_linerrrrts* zFixIdioms.transform_sort) �__name__� __module__�__qualname__Zexplicit�TYPE�CMPZPATTERNrrrrr� __classcell__rr)rrr %s' r N) rrZ fixer_utilrrrrrrr5r4ZBaseFixr rrrr�<module>s PK{��\G�**+fixes/__pycache__/fix_idioms.cpython-36.pycnu�[���3 \�@sNdZddlmZddlmZmZmZmZmZm Z dZ dZGdd�dej�Z dS) a�Adjust some old Python 2 idioms to their modern counterparts. * Change some type comparisons to isinstance() calls: type(x) == T -> isinstance(x, T) type(x) is T -> isinstance(x, T) type(x) != T -> not isinstance(x, T) type(x) is not T -> not isinstance(x, T) * Change "while 1:" into "while True:". * Change both v = list(EXPR) v.sort() foo(v) and the more general v = EXPR v.sort() foo(v) into v = sorted(EXPR) foo(v) �)� fixer_base)�Call�Comma�Name�Node� BlankLine�symsz0(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)z(power< 'type' trailer< '(' x=any ')' > >csPeZdZdZdeeeefZ�fdd�Zdd�Zdd�Z d d �Z dd�Z�ZS) � FixIdiomsTa� isinstance=comparison< %s %s T=any > | isinstance=comparison< T=any %s %s > | while_stmt< 'while' while='1' ':' any+ > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' power< list='list' trailer< '(' (not arglist<any+>) any ')' > > > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > cs8tt|�j|�}|r4d|kr4|d|dkr0|SdS|S)N�sortedZid1Zid2)�superr �match)�self�node�r)� __class__��0/usr/lib64/python3.6/lib2to3/fixes/fix_idioms.pyrOszFixIdioms.matchcCsHd|kr|j||�Sd|kr(|j||�Sd|kr<|j||�Std��dS)N� isinstance�whiler z Invalid match)�transform_isinstance�transform_while�transform_sort�RuntimeError)r r�resultsrrr� transformZszFixIdioms.transformcCsh|dj�}|dj�}d|_d|_ttd�|t�|g�}d|kr\d|_ttjtd�|g�}|j|_|S)N�x�T�� r�n�not)�clone�prefixrrrrrZnot_test)r rrrrZtestrrrrdszFixIdioms.transform_isinstancecCs |d}|jtd|jd��dS)Nr�True)r")�replacerr")r rrZonerrrrpszFixIdioms.transform_whilecCs|d}|d}|jd�}|jd�}|r>|jtd|jd��n8|rn|j�}d|_|jttd�|g|jd��ntd��|j�|j}d |k�r|r�|jd �d |d jf} d j | �|d _nH|j s�t�|jdks�t�t �} |j j| �|j| ks�t�|jd �d | _dS)N�sort�next�list�exprr )r"rzshould not have reached here� �)�getr$rr"r!rr�remove� rpartition�join�parent�AssertionErrorZnext_siblingrZappend_child)r rrZ sort_stmtZ next_stmtZ list_callZsimple_expr�newZbtwnZprefix_linesZend_linerrrrts0 zFixIdioms.transform_sort) �__name__� __module__�__qualname__Zexplicit�TYPE�CMPZPATTERNrrrrr� __classcell__rr)rrr %s' r N)�__doc__rrZ fixer_utilrrrrrrr6r5ZBaseFixr rrrr�<module>s PK{��\��Ȍ� � 1fixes/__pycache__/fix_import.cpython-36.opt-1.pycnu�[���3 \��@sZdZddlmZddlmZmZmZmZddlm Z m Z mZdd�ZGdd �d ej �Zd S)z�Fixer for import statements. If spam is being imported from the local directory, this import: from spam import eggs Becomes: from .spam import eggs And this import: import spam Becomes: from . import spam �)� fixer_base�)�dirname�join�exists�sep)� FromImport�syms�tokenccs�|g}x�|r�|j�}|jtjkr*|jVq|jtjkrPdjdd�|jD��Vq|jtj krn|j |jd�q|jtjkr�|j|jddd��qt d��qWdS) zF Walks over all the names imported in a dotted_as_names node. �cSsg|] }|j�qS�)�value)�.0Zchrr�0/usr/lib64/python3.6/lib2to3/fixes/fix_import.py� <listcomp>sz$traverse_imports.<locals>.<listcomp>rNrzunknown node type���)�pop�typer �NAMEr r Zdotted_namer�childrenZdotted_as_name�appendZdotted_as_names�extend�AssertionError)�names�pending�noderrr�traverse_importss rcs4eZdZdZdZ�fdd�Zdd�Zdd�Z�ZS) � FixImportTzj import_from< 'from' imp=any 'import' ['('] any [')'] > | import_name< 'import' imp=any > cs"tt|�j||�d|jk|_dS)NZabsolute_import)�superr� start_treeZfuture_features�skip)�selfZtree�name)� __class__rrr/szFixImport.start_treecCs�|jr dS|d}|jtjkrZxt|d�s6|jd}q W|j|j�r�d|j|_|j�n^d}d}x$t |�D]}|j|�r�d}qld}qlW|r�|r�|j |d�dStd|g�}|j|_|SdS)N�impr r�.FTz#absolute and local imports together) r rr Zimport_from�hasattrr�probably_a_local_importr ZchangedrZwarningr�prefix)r!rZresultsr$Z have_localZ have_absoluteZmod_name�newrrr� transform3s, zFixImport.transformcCsv|jd�rdS|jdd�d}t|j�}t||�}ttt|�d��sHdSx(dtddd d gD]}t||�rZdSqZWdS)Nr%F�rz__init__.pyz.pyz.pycz.soz.slz.pydT)� startswith�splitr�filenamerrr)r!Zimp_name� base_pathZextrrrr'Us z!FixImport.probably_a_local_import) �__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr*r'� __classcell__rr)r#rr&s "rN)�__doc__rrZos.pathrrrrZ fixer_utilrr r rZBaseFixrrrrr�<module>s PK{��\+8nL� � 1fixes/__pycache__/fix_import.cpython-36.opt-2.pycnu�[���3 \��@sVddlmZddlmZmZmZmZddlmZm Z m Z dd�ZGdd�dej�Z d S) �)� fixer_base�)�dirname�join�exists�sep)� FromImport�syms�tokenccs�|g}x�|r�|j�}|jtjkr*|jVq|jtjkrPdjdd�|jD��Vq|jtj krn|j |jd�q|jtjkr�|j|jddd��qt d��qWdS)N�cSsg|] }|j�qS�)�value)�.0Zchrr�0/usr/lib64/python3.6/lib2to3/fixes/fix_import.py� <listcomp>sz$traverse_imports.<locals>.<listcomp>rrzunknown node type���)�pop�typer �NAMEr r Zdotted_namer�childrenZdotted_as_name�appendZdotted_as_names�extend�AssertionError)�names�pending�noderrr�traverse_importss rcs4eZdZdZdZ�fdd�Zdd�Zdd�Z�ZS) � FixImportTzj import_from< 'from' imp=any 'import' ['('] any [')'] > | import_name< 'import' imp=any > cs"tt|�j||�d|jk|_dS)NZabsolute_import)�superr� start_treeZfuture_features�skip)�selfZtree�name)� __class__rrr/szFixImport.start_treecCs�|jr dS|d}|jtjkrZxt|d�s6|jd}q W|j|j�r�d|j|_|j�n^d}d}x$t |�D]}|j|�r�d}qld}qlW|r�|r�|j |d�dStd|g�}|j|_|SdS)N�impr r�.FTz#absolute and local imports together) r rr Zimport_from�hasattrr�probably_a_local_importr ZchangedrZwarningr�prefix)r!rZresultsr$Z have_localZ have_absoluteZmod_name�newrrr� transform3s, zFixImport.transformcCsv|jd�rdS|jdd�d}t|j�}t||�}ttt|�d��sHdSx(dtddd d gD]}t||�rZdSqZWdS)Nr%F�rz__init__.pyz.pyz.pycz.soz.slz.pydT)� startswith�splitr�filenamerrr)r!Zimp_name� base_pathZextrrrr'Us z!FixImport.probably_a_local_import) �__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr*r'� __classcell__rr)r#rr&s "rN)rrZos.pathrrrrZ fixer_utilrr r rZBaseFixrrrrr�<module>sPK{��\��Ȍ� � +fixes/__pycache__/fix_import.cpython-36.pycnu�[���3 \��@sZdZddlmZddlmZmZmZmZddlm Z m Z mZdd�ZGdd �d ej �Zd S)z�Fixer for import statements. If spam is being imported from the local directory, this import: from spam import eggs Becomes: from .spam import eggs And this import: import spam Becomes: from . import spam �)� fixer_base�)�dirname�join�exists�sep)� FromImport�syms�tokenccs�|g}x�|r�|j�}|jtjkr*|jVq|jtjkrPdjdd�|jD��Vq|jtj krn|j |jd�q|jtjkr�|j|jddd��qt d��qWdS) zF Walks over all the names imported in a dotted_as_names node. �cSsg|] }|j�qS�)�value)�.0Zchrr�0/usr/lib64/python3.6/lib2to3/fixes/fix_import.py� <listcomp>sz$traverse_imports.<locals>.<listcomp>rNrzunknown node type���)�pop�typer �NAMEr r Zdotted_namer�childrenZdotted_as_name�appendZdotted_as_names�extend�AssertionError)�names�pending�noderrr�traverse_importss rcs4eZdZdZdZ�fdd�Zdd�Zdd�Z�ZS) � FixImportTzj import_from< 'from' imp=any 'import' ['('] any [')'] > | import_name< 'import' imp=any > cs"tt|�j||�d|jk|_dS)NZabsolute_import)�superr� start_treeZfuture_features�skip)�selfZtree�name)� __class__rrr/szFixImport.start_treecCs�|jr dS|d}|jtjkrZxt|d�s6|jd}q W|j|j�r�d|j|_|j�n^d}d}x$t |�D]}|j|�r�d}qld}qlW|r�|r�|j |d�dStd|g�}|j|_|SdS)N�impr r�.FTz#absolute and local imports together) r rr Zimport_from�hasattrr�probably_a_local_importr ZchangedrZwarningr�prefix)r!rZresultsr$Z have_localZ have_absoluteZmod_name�newrrr� transform3s, zFixImport.transformcCsv|jd�rdS|jdd�d}t|j�}t||�}ttt|�d��sHdSx(dtddd d gD]}t||�rZdSqZWdS)Nr%F�rz__init__.pyz.pyz.pycz.soz.slz.pydT)� startswith�splitr�filenamerrr)r!Zimp_name� base_pathZextrrrr'Us z!FixImport.probably_a_local_import) �__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr*r'� __classcell__rr)r#rr&s "rN)�__doc__rrZos.pathrrrrZ fixer_utilrr r rZBaseFixrrrrr�<module>s PK{��\%ځ���2fixes/__pycache__/fix_imports.cpython-36.opt-1.pycnu�[���3 \4�1@s�dZddlmZddlmZmZddddddd d dddd d ddddddddddddddddddd d!d"d"d#d$d%d&d'd(d(d(d)d*d*d+d,d-�0Zd.d/�Zefd0d1�ZGd2d3�d3ej �Z d4S)5z/Fix incompatible imports and module references.�)� fixer_base)�Name� attr_chain�io�pickle�builtins�copyregZqueueZsocketserverZconfigparser�reprlibztkinter.filedialogztkinter.simpledialogztkinter.colorchooserztkinter.commondialogztkinter.dialogztkinter.dndztkinter.fontztkinter.messageboxztkinter.scrolledtextztkinter.constantsztkinter.tixztkinter.ttkZtkinterZ_markupbase�winreg�_threadZ _dummy_threadzdbm.bsdzdbm.dumbzdbm.ndbmzdbm.gnuz xmlrpc.clientz xmlrpc.serverzhttp.clientz html.entitieszhtml.parserzhttp.cookieszhttp.cookiejarzhttp.server� subprocess�collectionszurllib.parsezurllib.robotparser)0�StringIOZ cStringIOZcPickleZ__builtin__Zcopy_regZQueueZSocketServerZConfigParser�reprZ FileDialogZtkFileDialogZSimpleDialogZtkSimpleDialogZtkColorChooserZtkCommonDialogZDialogZTkdndZtkFontZtkMessageBoxZScrolledTextZTkconstantsZTixZttkZTkinterZ markupbase�_winregZthreadZdummy_threadZdbhashZdumbdbmZdbmZgdbmZ xmlrpclibZDocXMLRPCServerZSimpleXMLRPCServerZhttplibZhtmlentitydefsZ HTMLParserZCookieZ cookielibZBaseHTTPServerZSimpleHTTPServerZ CGIHTTPServerZcommands� UserString�UserListZurlparseZrobotparsercCsddjtt|��dS)N�(�|�))�join�mapr)�members�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_imports.py� alternates=srccsTdjdd�|D��}t|j��}d||fVd|Vd||fVd|VdS)Nz | cSsg|]}d|�qS)zmodule_name='%s'r)�.0�keyrrr� <listcomp>Bsz!build_pattern.<locals>.<listcomp>zyname_import=import_name< 'import' ((%s) | multiple_imports=dotted_as_names< any* (%s) any* >) > z�import_from< 'from' (%s) 'import' ['('] ( any | import_as_name< any 'as' any > | import_as_names< any* >) [')'] > z�import_name< 'import' (dotted_as_name< (%s) 'as' any > | multiple_imports=dotted_as_names< any* dotted_as_name< (%s) 'as' any > any* >) > z3power< bare_with_attr=(%s) trailer<'.' any > any* >)rr�keys)�mappingZmod_listZ bare_namesrrr� build_patternAs r!csTeZdZdZdZeZdZdd�Z�fdd�Z �fdd�Z �fd d �Zdd�Z�Z S) � FixImportsT�cCsdjt|j��S)Nr)rr!r )�selfrrrr!`szFixImports.build_patterncs|j�|_tt|�j�dS)N)r!ZPATTERN�superr"�compile_pattern)r$)� __class__rrr&cs zFixImports.compile_patterncsHtt|�j��|�}|rDd|kr@t�fdd�t|d�D��r@dS|SdS)N�bare_with_attrc3s|]}�|�VqdS)Nr)r�obj)�matchrr� <genexpr>qsz#FixImports.match.<locals>.<genexpr>�parentF)r%r"r*�anyr)r$�node�results)r')r*rr*jszFixImports.matchcstt|�j||�i|_dS)N)r%r"� start_tree�replace)r$Ztree�filename)r'rrr0vszFixImports.start_treecCs�|jd�}|rh|j}|j|}|jt||jd��d|krD||j|<d|kr�|j|�}|r�|j||�n2|dd}|jj|j�}|r�|jt||jd��dS)NZmodule_name)�prefixZname_importZmultiple_importsr(�)�get�valuer r1rr3r*� transform)r$r.r/Z import_modZmod_name�new_nameZ bare_namerrrr7zs zFixImports.transform)�__name__� __module__�__qualname__Z BM_compatibleZkeep_line_order�MAPPINGr Z run_orderr!r&r*r0r7� __classcell__rr)r'rr"Usr"N)�__doc__�rZ fixer_utilrrr<rr!ZBaseFixr"rrrr�<module>sjPK{��\%ځ���,fixes/__pycache__/fix_imports.cpython-36.pycnu�[���3 \4�1@s�dZddlmZddlmZmZddddddd d dddd d ddddddddddddddddddd d!d"d"d#d$d%d&d'd(d(d(d)d*d*d+d,d-�0Zd.d/�Zefd0d1�ZGd2d3�d3ej �Z d4S)5z/Fix incompatible imports and module references.�)� fixer_base)�Name� attr_chain�io�pickle�builtins�copyregZqueueZsocketserverZconfigparser�reprlibztkinter.filedialogztkinter.simpledialogztkinter.colorchooserztkinter.commondialogztkinter.dialogztkinter.dndztkinter.fontztkinter.messageboxztkinter.scrolledtextztkinter.constantsztkinter.tixztkinter.ttkZtkinterZ_markupbase�winreg�_threadZ _dummy_threadzdbm.bsdzdbm.dumbzdbm.ndbmzdbm.gnuz xmlrpc.clientz xmlrpc.serverzhttp.clientz html.entitieszhtml.parserzhttp.cookieszhttp.cookiejarzhttp.server� subprocess�collectionszurllib.parsezurllib.robotparser)0�StringIOZ cStringIOZcPickleZ__builtin__Zcopy_regZQueueZSocketServerZConfigParser�reprZ FileDialogZtkFileDialogZSimpleDialogZtkSimpleDialogZtkColorChooserZtkCommonDialogZDialogZTkdndZtkFontZtkMessageBoxZScrolledTextZTkconstantsZTixZttkZTkinterZ markupbase�_winregZthreadZdummy_threadZdbhashZdumbdbmZdbmZgdbmZ xmlrpclibZDocXMLRPCServerZSimpleXMLRPCServerZhttplibZhtmlentitydefsZ HTMLParserZCookieZ cookielibZBaseHTTPServerZSimpleHTTPServerZ CGIHTTPServerZcommands� UserString�UserListZurlparseZrobotparsercCsddjtt|��dS)N�(�|�))�join�mapr)�members�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_imports.py� alternates=srccsTdjdd�|D��}t|j��}d||fVd|Vd||fVd|VdS)Nz | cSsg|]}d|�qS)zmodule_name='%s'r)�.0�keyrrr� <listcomp>Bsz!build_pattern.<locals>.<listcomp>zyname_import=import_name< 'import' ((%s) | multiple_imports=dotted_as_names< any* (%s) any* >) > z�import_from< 'from' (%s) 'import' ['('] ( any | import_as_name< any 'as' any > | import_as_names< any* >) [')'] > z�import_name< 'import' (dotted_as_name< (%s) 'as' any > | multiple_imports=dotted_as_names< any* dotted_as_name< (%s) 'as' any > any* >) > z3power< bare_with_attr=(%s) trailer<'.' any > any* >)rr�keys)�mappingZmod_listZ bare_namesrrr� build_patternAs r!csTeZdZdZdZeZdZdd�Z�fdd�Z �fdd�Z �fd d �Zdd�Z�Z S) � FixImportsT�cCsdjt|j��S)Nr)rr!r )�selfrrrr!`szFixImports.build_patterncs|j�|_tt|�j�dS)N)r!ZPATTERN�superr"�compile_pattern)r$)� __class__rrr&cs zFixImports.compile_patterncsHtt|�j��|�}|rDd|kr@t�fdd�t|d�D��r@dS|SdS)N�bare_with_attrc3s|]}�|�VqdS)Nr)r�obj)�matchrr� <genexpr>qsz#FixImports.match.<locals>.<genexpr>�parentF)r%r"r*�anyr)r$�node�results)r')r*rr*jszFixImports.matchcstt|�j||�i|_dS)N)r%r"� start_tree�replace)r$Ztree�filename)r'rrr0vszFixImports.start_treecCs�|jd�}|rh|j}|j|}|jt||jd��d|krD||j|<d|kr�|j|�}|r�|j||�n2|dd}|jj|j�}|r�|jt||jd��dS)NZmodule_name)�prefixZname_importZmultiple_importsr(�)�get�valuer r1rr3r*� transform)r$r.r/Z import_modZmod_name�new_nameZ bare_namerrrr7zs zFixImports.transform)�__name__� __module__�__qualname__Z BM_compatibleZkeep_line_order�MAPPINGr Z run_orderr!r&r*r0r7� __classcell__rr)r'rr"Usr"N)�__doc__�rZ fixer_utilrrr<rr!ZBaseFixr"rrrr�<module>sjPK{��\ �W@-fixes/__pycache__/fix_imports2.cpython-36.pycnu�[���3 \!�@s0dZddlmZddd�ZGdd�dej�ZdS)zTFix incompatible imports and module references that must be fixed after fix_imports.�)�fix_importsZdbm)ZwhichdbZanydbmc@seZdZdZeZdS)�FixImports2�N)�__name__� __module__�__qualname__Z run_order�MAPPING�mapping�r r �2/usr/lib64/python3.6/lib2to3/fixes/fix_imports2.pyrsrN)�__doc__�rrZ FixImportsrr r r r�<module>sPK{��\�9z��*fixes/__pycache__/fix_input.cpython-36.pycnu�[���3 \��@sLdZddlmZddlmZmZddlmZejd�ZGdd�dej �Z dS) z4Fixer that changes input(...) into eval(input(...)).�)� fixer_base)�Call�Name)�patcompz&power< 'eval' trailer< '(' any ')' > >c@seZdZdZdZdd�ZdS)�FixInputTzL power< 'input' args=trailer< '(' [any] ')' > > cCs6tj|jj�rdS|j�}d|_ttd�|g|jd�S)N��eval)�prefix)�context�match�parentZcloner rr)�selfZnodeZresults�new�r�//usr/lib64/python3.6/lib2to3/fixes/fix_input.py� transforms zFixInput.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrr srN)�__doc__rrZ fixer_utilrrrZcompile_patternr ZBaseFixrrrrr�<module>s PK{��\��نuu1fixes/__pycache__/fix_intern.cpython-36.opt-1.pycnu�[���3 \��@s6dZddlmZddlmZmZGdd�dej�ZdS)z/Fixer for intern(). intern(s) -> sys.intern(s)�)� fixer_base)� ImportAndCall�touch_importc@s eZdZdZdZdZdd�ZdS)� FixInternTZprez� power< 'intern' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > cCsd|rD|d}|rD|j|jjkr"dS|j|jjkrD|jdjdkrDdSd}t|||�}tdd|�|S)N�obj�z**�sys�intern)rr )�typeZsymsZ star_exprZargumentZchildren�valuerr)�selfZnodeZresultsr�names�new�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_intern.py� transformszFixIntern.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNrrrrrr s rN)�__doc__�rZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\��نuu+fixes/__pycache__/fix_intern.cpython-36.pycnu�[���3 \��@s6dZddlmZddlmZmZGdd�dej�ZdS)z/Fixer for intern(). intern(s) -> sys.intern(s)�)� fixer_base)� ImportAndCall�touch_importc@s eZdZdZdZdZdd�ZdS)� FixInternTZprez� power< 'intern' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > cCsd|rD|d}|rD|j|jjkr"dS|j|jjkrD|jdjdkrDdSd}t|||�}tdd|�|S)N�obj�z**�sys�intern)rr )�typeZsymsZ star_exprZargumentZchildren�valuerr)�selfZnodeZresultsr�names�new�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_intern.py� transformszFixIntern.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNrrrrrr s rN)�__doc__�rZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\a�/fixes/__pycache__/fix_isinstance.cpython-36.pycnu�[���3 \H�@s2dZddlmZddlmZGdd�dej�ZdS)a,Fixer that cleans up a tuple argument to isinstance after the tokens in it were fixed. This is mainly used to remove double occurrences of tokens as a leftover of the long -> int / unicode -> str conversion. eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) -> isinstance(x, int) �)� fixer_base)�tokenc@s eZdZdZdZdZdd�ZdS)� FixIsinstanceTz� power< 'isinstance' trailer< '(' arglist< any ',' atom< '(' args=testlist_gexp< any+ > ')' > > ')' > > �cCs�t�}|d}|j}g}t|�}xx|D]p\}} | jtjkrt| j|krt|t|�dkr�||djtjkr�t |�q&q&|j | �| jtjkr&|j| j�q&W|r�|djtjkr�|d=t|�dkr�|j} | j |d_ | j|d�n||dd�<|j�dS)N�args�����r )�setZchildren� enumerate�typer�NAME�value�len�COMMA�next�append�add�parent�prefix�replaceZchanged)�selfZnodeZresultsZnames_insertedZtestlistrZnew_args�iterator�idx�argZatom�r�4/usr/lib64/python3.6/lib2to3/fixes/fix_isinstance.py� transforms*$ zFixIsinstance.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZ run_orderrrrrrrsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module> sPK{��\�K��.fixes/__pycache__/fix_itertools.cpython-36.pycnu�[���3 \�@s2dZddlmZddlmZGdd�dej�ZdS)aT Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) imports from itertools are fixed in fix_itertools_import.py If itertools is imported as something else (ie: import itertools as it; it.izip(spam, eggs)) method calls will not get fixed. �)� fixer_base)�Namec@s*eZdZdZdZde�ZdZdd�ZdS)�FixItertoolsTz7('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')z� power< it='itertools' trailer< dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > | power< func=%(it_funcs)s trailer< '(' [any] ')' > > �cCs�d}|dd}d|krV|jd krV|d|d}}|j}|j�|j�|jj|�|p^|j}|jt|jdd�|d��dS) N�func��it�ifilterfalse�izip_longest�dot�)�prefix)r r )�valuer �remove�parent�replacer)�selfZnodeZresultsr rrr�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_itertools.py� transforms zFixItertools.transformN) �__name__� __module__�__qualname__Z BM_compatibleZit_funcs�localsZPATTERNZ run_orderrrrrrrs rN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\&�"��<fixes/__pycache__/fix_itertools_imports.cpython-36.opt-2.pycnu�[���3 \&�@s6ddlmZddlmZmZmZGdd�dej�ZdS)�)� fixer_base)� BlankLine�syms�tokenc@s"eZdZdZde�Zdd�ZdS)�FixItertoolsImportsTzT import_from< 'from' 'itertools' 'import' imports=any > cCsl|d}|jtjks|jr$|g}n|j}x�|ddd�D]z}|jtjkrV|j}|}n|jtjkrfdS|jd}|j}|dkr�d|_|j�q:|dkr:|j �|d d kr�dnd|_q:W|jdd�p�|g}d } x0|D](}| o�|jtj k�r�|j�q�| d N} q�Wx*|�r,|djtj k�r,|j�j��qW|j�p@t|dd��sR|j dk�rh|j} t�}| |_|SdS)N�imports�r�imap�izip�ifilter�ifilterfalse�izip_longest��f�filterfalse�zip_longestT�value)r r r)rr ���)�typerZimport_as_name�childrenr�NAMEr�STAR�removeZchanged�COMMA�pop�getattr�parent�prefixr)�selfZnodeZresultsrrZchild�memberZ name_node�member_nameZremove_comma�p�r"�;/usr/lib64/python3.6/lib2to3/fixes/fix_itertools_imports.py� transformsB zFixItertoolsImports.transformN)�__name__� __module__�__qualname__Z BM_compatible�localsZPATTERNr$r"r"r"r#rs rN)Zlib2to3rZlib2to3.fixer_utilrrrZBaseFixrr"r"r"r#�<module>sPK{��\D�.?446fixes/__pycache__/fix_itertools_imports.cpython-36.pycnu�[���3 \&�@s:dZddlmZddlmZmZmZGdd�dej�ZdS)zA Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) �)� fixer_base)� BlankLine�syms�tokenc@s"eZdZdZde�Zdd�ZdS)�FixItertoolsImportsTzT import_from< 'from' 'itertools' 'import' imports=any > cCs~|d}|jtjks|jr$|g}n|j}x�|ddd�D]�}|jtjkrV|j}|}n*|jtjkrfdS|jtjksvt�|jd}|j}|dkr�d|_|j �q:|dkr:|j �|d d kr�dnd|_q:W|jdd�p�|g}d } x2|D]*}| �r|jtjk�r|j �q�| d N} q�Wx*|�r>|djtjk�r>|j�j ��qW|j�pRt |dd��sd|jdk�rz|j} t�}| |_|SdS)N�imports�r�imap�izip�ifilter�ifilterfalse�izip_longest��f�filterfalse�zip_longestT�value)r r r)rr ���)�typerZimport_as_name�childrenr�NAMEr�STAR�AssertionError�removeZchanged�COMMA�pop�getattr�parent�prefixr)�selfZnodeZresultsrrZchild�memberZ name_node�member_nameZremove_comma�p�r#�;/usr/lib64/python3.6/lib2to3/fixes/fix_itertools_imports.py� transformsD zFixItertoolsImports.transformN)�__name__� __module__�__qualname__Z BM_compatible�localsZPATTERNr%r#r#r#r$rs rN) �__doc__Zlib2to3rZlib2to3.fixer_utilrrrZBaseFixrr#r#r#r$�<module>sPK{��\�"*��/fixes/__pycache__/fix_long.cpython-36.opt-1.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z/Fixer that turns 'long' into 'int' everywhere. �)� fixer_base)�is_probably_builtinc@seZdZdZdZdd�ZdS)�FixLongTz'long'cCst|�rd|_|j�dS)N�int)r�valueZchanged)�selfZnodeZresults�r�./usr/lib64/python3.6/lib2to3/fixes/fix_long.py� transformszFixLong.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�__doc__Zlib2to3rZlib2to3.fixer_utilrZBaseFixrrrrr �<module>sPK{��\6#3�ll/fixes/__pycache__/fix_long.cpython-36.opt-2.pycnu�[���3 \��@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�is_probably_builtinc@seZdZdZdZdd�ZdS)�FixLongTz'long'cCst|�rd|_|j�dS)N�int)r�valueZchanged)�selfZnodeZresults�r�./usr/lib64/python3.6/lib2to3/fixes/fix_long.py� transformszFixLong.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)Zlib2to3rZlib2to3.fixer_utilrZBaseFixrrrrr �<module>sPK{��\�"*��)fixes/__pycache__/fix_long.cpython-36.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z/Fixer that turns 'long' into 'int' everywhere. �)� fixer_base)�is_probably_builtinc@seZdZdZdZdd�ZdS)�FixLongTz'long'cCst|�rd|_|j�dS)N�int)r�valueZchanged)�selfZnodeZresults�r�./usr/lib64/python3.6/lib2to3/fixes/fix_long.py� transformszFixLong.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�__doc__Zlib2to3rZlib2to3.fixer_utilrZBaseFixrrrrr �<module>sPK{��\5ڍ_.fixes/__pycache__/fix_map.cpython-36.opt-1.pycnu�[���3 \8�@sfdZddlmZddlmZddlmZmZmZm Z m Z ddlmZ ddlmZGdd�dej�Zd S) aFixer that changes map(F, ...) into list(map(F, ...)) unless there exists a 'from future_builtins import map' statement in the top-level namespace. As a special case, map(None, X) is changed into list(X). (This is necessary because the semantics are changed in this case -- the new map(None, X) is equivalent to [(x,) for x in X].) We avoid the transformation (except for the special case mentioned above) if the map() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on map(F, X, Y, ...) to go on until the longest argument is exhausted, substituting None for missing values -- like zip(), it now stops as soon as the shortest argument is exhausted. �)�token)� fixer_base)�Name�ArgList�Call�ListComp�in_special_context)�python_symbols)�Nodec@s eZdZdZdZdZdd�ZdS)�FixMapTaL map_none=power< 'map' trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > [extra_trailers=trailer*] > | map_lambda=power< 'map' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'map' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > zfuture_builtins.mapcCs�|j|�rdSg}d|kr:x|dD]}|j|j��q$W|jjtjkrv|j|d�|j�}d|_t t d�|g�}�n&d|kr�t|dj�|dj�|dj��}ttj |g|dd �}n�d |kr�|dj�}d|_n�d|k�rj|d}|jtjk�rL|jd jtjk�rL|jd jdjtjk�rL|jd jdjdk�rL|j|d�dSttj t d�|j�g�}d|_t|��rxdSttj t d�t|g�g|�}d|_|j|_|S)NZextra_trailerszYou should use a for loop here��listZ map_lambdaZxp�fp�it)�prefixZmap_none�arg�args���Nonezjcannot convert map(None, ...) with multiple arguments because map() now truncates to the shortest sequence�map)Zshould_skip�appendZclone�parent�type�symsZsimple_stmtZwarningrrrrr ZpowerZtrailerZchildrenZarglistr�NAME�valuerr)�selfZnodeZresultsZtrailers�t�newr�r �-/usr/lib64/python3.6/lib2to3/fixes/fix_map.py� transform@sF zFixMap.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onr"r r r r!rsrN)�__doc__Zpgen2rrrZ fixer_utilrrrrrZpygramr rZpytreer ZConditionalFixrr r r r!�<module>sPK{��\5ڍ_(fixes/__pycache__/fix_map.cpython-36.pycnu�[���3 \8�@sfdZddlmZddlmZddlmZmZmZm Z m Z ddlmZ ddlmZGdd�dej�Zd S) aFixer that changes map(F, ...) into list(map(F, ...)) unless there exists a 'from future_builtins import map' statement in the top-level namespace. As a special case, map(None, X) is changed into list(X). (This is necessary because the semantics are changed in this case -- the new map(None, X) is equivalent to [(x,) for x in X].) We avoid the transformation (except for the special case mentioned above) if the map() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on map(F, X, Y, ...) to go on until the longest argument is exhausted, substituting None for missing values -- like zip(), it now stops as soon as the shortest argument is exhausted. �)�token)� fixer_base)�Name�ArgList�Call�ListComp�in_special_context)�python_symbols)�Nodec@s eZdZdZdZdZdd�ZdS)�FixMapTaL map_none=power< 'map' trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > [extra_trailers=trailer*] > | map_lambda=power< 'map' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'map' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > zfuture_builtins.mapcCs�|j|�rdSg}d|kr:x|dD]}|j|j��q$W|jjtjkrv|j|d�|j�}d|_t t d�|g�}�n&d|kr�t|dj�|dj�|dj��}ttj |g|dd �}n�d |kr�|dj�}d|_n�d|k�rj|d}|jtjk�rL|jd jtjk�rL|jd jdjtjk�rL|jd jdjdk�rL|j|d�dSttj t d�|j�g�}d|_t|��rxdSttj t d�t|g�g|�}d|_|j|_|S)NZextra_trailerszYou should use a for loop here��listZ map_lambdaZxp�fp�it)�prefixZmap_none�arg�args���Nonezjcannot convert map(None, ...) with multiple arguments because map() now truncates to the shortest sequence�map)Zshould_skip�appendZclone�parent�type�symsZsimple_stmtZwarningrrrrr ZpowerZtrailerZchildrenZarglistr�NAME�valuerr)�selfZnodeZresultsZtrailers�t�newr�r �-/usr/lib64/python3.6/lib2to3/fixes/fix_map.py� transform@sF zFixMap.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onr"r r r r!rsrN)�__doc__Zpgen2rrrZ fixer_utilrrrrrZpygramr rZpytreer ZConditionalFixrr r r r!�<module>sPK{��\؈��]]4fixes/__pycache__/fix_metaclass.cpython-36.opt-2.pycnu�[���3 \ �@srddlmZddlmZddlmZmZmZdd�Zdd�Z dd �Z d d�Zdd �Zdd�Z Gdd�dej�ZdS)�)� fixer_base)�token)�syms�Node�LeafcCsxxr|jD]h}|jtjkr t|�S|jtjkr|jr|jd}|jtjkr|jr|jd}t|t�r|j dkrdSqWdS)N�� __metaclass__TF) �children�typer�suite� has_metaclass�simple_stmt� expr_stmt� isinstancer�value)�parent�node� expr_nodeZ left_side�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_metaclass.pyrs rcCs�x|jD]}|jtjkrdSqWx,t|j�D]\}}|jtjkr,Pq,Wtd��ttjg�}x:|j|dd�r�|j|d}|j |j ��|j�q\W|j |�|}dS)NzNo class suite and no ':'!�)r r rr� enumerater�COLON� ValueErrorr�append_child�clone�remove)�cls_noder�ir� move_noderrr�fixup_parse_tree-s r c Cs�x(t|j�D]\}}|jtjkrPqWdS|j�ttjg�}ttj |g�}x2|j|d�r~|j|}|j |j��|j�qNW|j||�|jdjd}|jdjd} | j |_ dS)Nr)rr r r�SEMIrrrrr rr�insert_child�prefix) rrZ stmt_nodeZsemi_indrZnew_exprZnew_stmtrZ new_leaf1Z old_leaf1rrr�fixup_simple_stmtGs r$cCs*|jr&|jdjtjkr&|jdj�dS)Nr���r%)r r r�NEWLINEr)rrrr�remove_trailing_newline_sr'ccs�x$|jD]}|jtjkrPqWtd��x�tt|j��D]t\}}|jtjkr6|jr6|jd}|jtjkr6|jr6|jd}t |t �r6|jdkr6t|||�t |�|||fVq6WdS)NzNo class suite!rr)r r rrr�listrr rrrrr$r')rrrZsimple_noderZ left_noderrr� find_metasds r)cCs�|jddd�}x|r.|j�}|jtjkrPqWxL|r||j�}t|t�rd|jtjkrd|jr`d|_dS|j |jddd��q2WdS)Nr�r%r%) r �popr r�INDENTrr�DEDENTr#�extend)rZkidsrrrr�fixup_indent{sr/c@seZdZdZdZdd�ZdS)�FixMetaclassTz classdef<any*> cCs<t|�sdSt|�d}x"t|�D]\}}}|}|j�q"W|jdj}t|j�dkr�|jdjtjkrt|jd}n(|jdj �} t tj| g�}|jd|�n�t|j�dkr�t tjg�}|jd|�nZt|j�dk�rt tjg�}|jdt tjd��|jd|�|jdt tjd��ntd ��|jdjd} d | _| j}|j�r^|jt tjd��d| _nd | _|jd}d |jd_d |jd_|j|�t|�|j�s�|j�t |d�} || _|j| �|jt tjd��nbt|j�dk�r8|jdjtjk�r8|jdjtjk�r8t |d�} |jd| �|jdt tjd��dS)Nr����r�)�(zUnexpected class definition� metaclass�,� r*r�pass� ���r%r%r%)rr r)rr r �lenr�arglistrrZ set_childr"rr�RPAR�LPARrrr#r�COMMAr/r&r,r-)�selfrZresultsZlast_metaclassrrZstmtZ text_typer>rZmeta_txtZorig_meta_prefixrZ pass_leafrrr� transform�s^ zFixMetaclass.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrCrrrrr0�sr0N)r*rZpygramrZ fixer_utilrrrrr r$r'r)r/ZBaseFixr0rrrr�<module>sPK{��\�^��.fixes/__pycache__/fix_metaclass.cpython-36.pycnu�[���3 \ �@svdZddlmZddlmZddlmZmZmZdd�Z dd�Z d d �Zdd�Zd d�Z dd�ZGdd�dej�ZdS)a�Fixer for __metaclass__ = X -> (metaclass=X) methods. The various forms of classef (inherits nothing, inherits once, inherints many) don't parse the same in the CST so we look at ALL classes for a __metaclass__ and if we find one normalize the inherits to all be an arglist. For one-liner classes ('class X: pass') there is no indent/dedent so we normalize those into having a suite. Moving the __metaclass__ into the classdef can also cause the class body to be empty so there is some special casing for that as well. This fixer also tries very hard to keep original indenting and spacing in all those corner cases. �)� fixer_base)�token)�syms�Node�LeafcCsxxr|jD]h}|jtjkr t|�S|jtjkr|jr|jd}|jtjkr|jr|jd}t|t�r|j dkrdSqWdS)z� we have to check the cls_node without changing it. There are two possibilities: 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') �� __metaclass__TF) �children�typer�suite� has_metaclass�simple_stmt� expr_stmt� isinstancer�value)�parent�node� expr_nodeZ left_side�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_metaclass.pyrs rcCs�x|jD]}|jtjkrdSqWx,t|j�D]\}}|jtjkr,Pq,Wtd��ttjg�}x:|j|dd�r�|j|d}|j |j ��|j�q\W|j |�|}dS)zf one-line classes don't get a suite in the parse tree so we add one to normalize the tree NzNo class suite and no ':'!�)r r rr� enumerater�COLON� ValueErrorr�append_child�clone�remove)�cls_noder�ir� move_noderrr�fixup_parse_tree-s r c Cs�x(t|j�D]\}}|jtjkrPqWdS|j�ttjg�}ttj |g�}x2|j|d�r~|j|}|j |j��|j�qNW|j||�|jdjd}|jdjd} | j |_ dS)z� if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move everything after the semi-colon into its own simple_stmt node Nr)rr r r�SEMIrrrrr rr�insert_child�prefix) rrZ stmt_nodeZsemi_indrZnew_exprZnew_stmtrZ new_leaf1Z old_leaf1rrr�fixup_simple_stmtGs r$cCs*|jr&|jdjtjkr&|jdj�dS)Nr���r%)r r r�NEWLINEr)rrrr�remove_trailing_newline_sr'ccs�x$|jD]}|jtjkrPqWtd��x�tt|j��D]t\}}|jtjkr6|jr6|jd}|jtjkr6|jr6|jd}t |t �r6|jdkr6t|||�t |�|||fVq6WdS)NzNo class suite!rr)r r rrr�listrr rrrrr$r')rrrZsimple_noderZ left_noderrr� find_metasds r)cCs�|jddd�}x|r.|j�}|jtjkrPqWxL|r||j�}t|t�rd|jtjkrd|jr`d|_dS|j |jddd��q2WdS)z� If an INDENT is followed by a thing with a prefix then nuke the prefix Otherwise we get in trouble when removing __metaclass__ at suite start Nr�r%r%) r �popr r�INDENTrr�DEDENTr#�extend)rZkidsrrrr�fixup_indent{sr/c@seZdZdZdZdd�ZdS)�FixMetaclassTz classdef<any*> cCsNt|�sdSt|�d}x"t|�D]\}}}|}|j�q"W|jdj}t|j�dkr�|jdjtjkrt|jd}n(|jdj �} t tj| g�}|jd|�n�t|j�dkr�t tjg�}|jd|�nZt|j�dk�rt tjg�}|jdt tjd��|jd|�|jdt tjd��ntd ��|jdjd} d | _| j}|j�r^|jt tjd��d| _nd | _|jd}|jtjk�s�t�d |jd_d |jd_|j|�t|�|j�s�|j�t |d�} || _|j| �|jt tjd��nbt|j�dk�rJ|jdjtjk�rJ|jdjtjk�rJt |d�} |jd| �|jdt tjd��dS)Nr����r�)�(zUnexpected class definition� metaclass�,� r*r�pass� ���r%r%r%)rr r)rr r �lenr�arglistrrZ set_childr"rr�RPAR�LPARrrr#r�COMMAr�AssertionErrorr/r&r,r-)�selfrZresultsZlast_metaclassrrZstmtZ text_typer>rZmeta_txtZorig_meta_prefixrZ pass_leafrrr� transform�s` zFixMetaclass.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrDrrrrr0�sr0N)�__doc__r*rZpygramrZ fixer_utilrrrrr r$r'r)r/ZBaseFixr0rrrr�<module>sPK{��\�rD���6fixes/__pycache__/fix_methodattrs.cpython-36.opt-1.pycnu�[���3 \^�@s>dZddlmZddlmZdddd�ZGdd �d ej�Zd S)z;Fix bound method attributes (method.im_? -> method.__?__). �)� fixer_base)�Name�__func__�__self__z__self__.__class__)Zim_funcZim_selfZim_classc@seZdZdZdZdd�ZdS)�FixMethodattrsTzU power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > cCs.|dd}t|j}|jt||jd��dS)N�attr�)�prefix)�MAP�value�replacerr )�selfZnodeZresultsr�new�r�5/usr/lib64/python3.6/lib2to3/fixes/fix_methodattrs.py� transforms zFixMethodattrs.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�__doc__�rZ fixer_utilrr ZBaseFixrrrrr�<module>sPK{��\�>�GG6fixes/__pycache__/fix_methodattrs.cpython-36.opt-2.pycnu�[���3 \^�@s:ddlmZddlmZdddd�ZGdd�dej�Zd S) �)� fixer_base)�Name�__func__�__self__z__self__.__class__)Zim_funcZim_selfZim_classc@seZdZdZdZdd�ZdS)�FixMethodattrsTzU power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > cCs.|dd}t|j}|jt||jd��dS)N�attr�)�prefix)�MAP�value�replacerr )�selfZnodeZresultsr�new�r�5/usr/lib64/python3.6/lib2to3/fixes/fix_methodattrs.py� transforms zFixMethodattrs.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�rZ fixer_utilrr ZBaseFixrrrrr�<module>s PK{��\�rD���0fixes/__pycache__/fix_methodattrs.cpython-36.pycnu�[���3 \^�@s>dZddlmZddlmZdddd�ZGdd �d ej�Zd S)z;Fix bound method attributes (method.im_? -> method.__?__). �)� fixer_base)�Name�__func__�__self__z__self__.__class__)Zim_funcZim_selfZim_classc@seZdZdZdZdd�ZdS)�FixMethodattrsTzU power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > cCs.|dd}t|j}|jt||jd��dS)N�attr�)�prefix)�MAP�value�replacerr )�selfZnodeZresultsr�new�r�5/usr/lib64/python3.6/lib2to3/fixes/fix_methodattrs.py� transforms zFixMethodattrs.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�__doc__�rZ fixer_utilrr ZBaseFixrrrrr�<module>sPK{��\8ڒ�'fixes/__pycache__/fix_ne.cpython-36.pycnu�[���3 \;�@s>dZddlmZddlmZddlmZGdd�dej�ZdS)zFixer that turns <> into !=.�)�pytree)�token)� fixer_basec@s"eZdZejZdd�Zdd�ZdS)�FixNecCs |jdkS)Nz<>)�value)�self�node�r �,/usr/lib64/python3.6/lib2to3/fixes/fix_ne.py�matchszFixNe.matchcCstjtjd|jd�}|S)Nz!=)�prefix)rZLeafr�NOTEQUALr)rrZresults�newr r r � transformszFixNe.transformN)�__name__� __module__�__qualname__rr Z_accept_typerrr r r r rsrN)�__doc__�rZpgen2rrZBaseFixrr r r r �<module>sPK{��\�Qv���/fixes/__pycache__/fix_next.cpython-36.opt-2.pycnu�[���3 \f�@sjddlmZddlmZddlmZddlmZm Z m Z dZGdd�dej�Z dd �Zd d�Zdd �ZdS)�)�token)�python_symbols)� fixer_base)�Name�Call�find_bindingz;Calls to builtin next() possibly shadowed by global bindingcs0eZdZdZdZdZ�fdd�Zdd�Z�ZS)�FixNextTa� power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='next' parameters< '(' NAME ')' > any+ > any* > > | global=global_stmt< 'global' any* 'next' any* > Zprecs>tt|�j||�td|�}|r4|j|t�d|_nd|_dS)N�nextTF)�superr� start_treer�warning�bind_warning� shadowed_next)�selfZtree�filename�n)� __class__��./usr/lib64/python3.6/lib2to3/fixes/fix_next.pyr$s zFixNext.start_treecCs�|jd�}|jd�}|jd�}|rr|jr>|jtd|jd��q�dd�|D�}d|d _|jttd |jd�|��n�|r�td|jd�}|j|�nj|r�t|�r�|d}djdd�|D��j�d kr�|j |t �dS|jtd��nd|kr�|j |t �d|_dS)N�base�attr�name�__next__)�prefixcSsg|]}|j��qSr)Zclone)�.0rrrr� <listcomp>9sz%FixNext.transform.<locals>.<listcomp>��r �headcSsg|]}t|��qSr)�str)rrrrrrEsZ__builtin__�globalT)�getr�replacerrr�is_assign_target�join�striprr )r�nodeZresultsrrrrrrrr� transform.s, zFixNext.transform) �__name__� __module__�__qualname__Z BM_compatibleZPATTERN�orderrr'� __classcell__rr)rrrs rcCsFt|�}|dkrdSx,|jD]"}|jtjkr0dSt||�rdSqWdS)NFT)�find_assign�children�typer�EQUAL� is_subtree)r&ZassignZchildrrrr#Qs r#cCs4|jtjkr|S|jtjks&|jdkr*dSt|j�S)N)r/�symsZ expr_stmtZsimple_stmt�parentr-)r&rrrr-]s r-cs$|�krdSt�fdd�|jD��S)NTc3s|]}t|��VqdS)N)r1)r�c)r&rr� <genexpr>gszis_subtree.<locals>.<genexpr>)�anyr.)�rootr&r)r&rr1dsr1N)Zpgen2rZpygramrr2rrZ fixer_utilrrrr ZBaseFixrr#r-r1rrrr�<module> s@PK{��\�y��)fixes/__pycache__/fix_next.cpython-36.pycnu�[���3 \f�@sndZddlmZddlmZddlmZddlm Z m Z mZdZGdd�dej �Zd d �Zdd�Zd d�ZdS)z.Fixer for it.next() -> next(it), per PEP 3114.�)�token)�python_symbols)� fixer_base)�Name�Call�find_bindingz;Calls to builtin next() possibly shadowed by global bindingcs0eZdZdZdZdZ�fdd�Zdd�Z�ZS)�FixNextTa� power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='next' parameters< '(' NAME ')' > any+ > any* > > | global=global_stmt< 'global' any* 'next' any* > Zprecs>tt|�j||�td|�}|r4|j|t�d|_nd|_dS)N�nextTF)�superr� start_treer�warning�bind_warning� shadowed_next)�selfZtree�filename�n)� __class__��./usr/lib64/python3.6/lib2to3/fixes/fix_next.pyr$s zFixNext.start_treecCs|st�|jd�}|jd�}|jd�}|rz|jrF|jtd|jd��n2dd�|D�}d|d _|jttd |jd�|��n�|r�td|jd�}|j|�nl|r�t|�r�|d}djdd�|D��j �d kr�|j |t�dS|jtd��nd|k�r|j |t�d|_dS)N�base�attr�name�__next__)�prefixcSsg|]}|j��qSr)Zclone)�.0rrrr� <listcomp>9sz%FixNext.transform.<locals>.<listcomp>��r �headcSsg|]}t|��qSr)�str)rrrrrrEsZ__builtin__�globalT)�AssertionError�getr�replacerrr�is_assign_target�join�striprr )r�nodeZresultsrrrrrrrr� transform.s. zFixNext.transform) �__name__� __module__�__qualname__Z BM_compatibleZPATTERN�orderrr(� __classcell__rr)rrrs rcCsFt|�}|dkrdSx,|jD]"}|jtjkr0dSt||�rdSqWdS)NFT)�find_assign�children�typer�EQUAL� is_subtree)r'ZassignZchildrrrr$Qs r$cCs4|jtjkr|S|jtjks&|jdkr*dSt|j�S)N)r0�symsZ expr_stmtZsimple_stmt�parentr.)r'rrrr.]s r.cs$|�krdSt�fdd�|jD��S)NTc3s|]}t|��VqdS)N)r2)r�c)r'rr� <genexpr>gszis_subtree.<locals>.<genexpr>)�anyr/)�rootr'r)r'rr2dsr2N)�__doc__Zpgen2rZpygramrr3rrZ fixer_utilrrrr ZBaseFixrr$r.r2rrrr�<module>s@PK{��\��{4��2fixes/__pycache__/fix_nonzero.cpython-36.opt-1.pycnu�[���3 \O�@s2dZddlmZddlmZGdd�dej�ZdS)z*Fixer for __nonzero__ -> __bool__ methods.�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixNonzeroTz� classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='__nonzero__' parameters< '(' NAME ')' > any+ > any* > > cCs$|d}td|jd�}|j|�dS)N�name�__bool__)�prefix)rr�replace)�selfZnodeZresultsr�new�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_nonzero.py� transformszFixNonzero.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrrsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\ZO#/KK2fixes/__pycache__/fix_nonzero.cpython-36.opt-2.pycnu�[���3 \O�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixNonzeroTz� classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='__nonzero__' parameters< '(' NAME ')' > any+ > any* > > cCs$|d}td|jd�}|j|�dS)N�name�__bool__)�prefix)rr�replace)�selfZnodeZresultsr�new�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_nonzero.py� transformszFixNonzero.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrrsrN)�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\��{4��,fixes/__pycache__/fix_nonzero.cpython-36.pycnu�[���3 \O�@s2dZddlmZddlmZGdd�dej�ZdS)z*Fixer for __nonzero__ -> __bool__ methods.�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixNonzeroTz� classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='__nonzero__' parameters< '(' NAME ')' > any+ > any* > > cCs$|d}td|jd�}|j|�dS)N�name�__bool__)�prefix)rr�replace)�selfZnodeZresultsr�new�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_nonzero.py� transformszFixNonzero.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrrsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\�#���0fixes/__pycache__/fix_numliterals.cpython-36.pycnu�[���3 \�@s>dZddlmZddlmZddlmZGdd�dej�ZdS)z-Fixer that turns 1L into 1, 0755 into 0o755. �)�token)� fixer_base)�Numberc@s"eZdZejZdd�Zdd�ZdS)�FixNumliteralscCs|jjd�p|jddkS)N�0��Ll���)�value� startswith)�self�node�r�5/usr/lib64/python3.6/lib2to3/fixes/fix_numliterals.py�matchszFixNumliterals.matchcCs`|j}|ddkr |dd�}n2|jd�rR|j�rRtt|��dkrRd|dd�}t||jd�S)NrrrZ0o)�prefixr r )r r�isdigit�len�setrr)rr Zresults�valrrr� transforms"zFixNumliterals.transformN)�__name__� __module__�__qualname__r�NUMBERZ_accept_typerrrrrrrsrN) �__doc__Zpgen2r�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\Y�ٜxx3fixes/__pycache__/fix_operator.cpython-36.opt-1.pycnu�[���3 \� �@sNdZddlZddlmZddlmZmZmZmZdd�Z Gdd�dej �ZdS) a�Fixer for operator functions. operator.isCallable(obj) -> hasattr(obj, '__call__') operator.sequenceIncludes(obj) -> operator.contains(obj) operator.isSequenceType(obj) -> isinstance(obj, collections.Sequence) operator.isMappingType(obj) -> isinstance(obj, collections.Mapping) operator.isNumberType(obj) -> isinstance(obj, numbers.Number) operator.repeat(obj, n) -> operator.mul(obj, n) operator.irepeat(obj, n) -> operator.imul(obj, n) �N)� fixer_base)�Call�Name�String�touch_importcs�fdd�}|S)Ncs �|_|S)N)� invocation)�f)�s��2/usr/lib64/python3.6/lib2to3/fixes/fix_operator.py�decszinvocation.<locals>.decr )r rr )r rrsrc@s�eZdZdZdZdZdZdeeed�Zdd�Z e d �d d��Ze d�d d��Ze d�dd��Z e d�dd��Ze d�dd��Ze d�dd��Ze d�dd��Zdd�Zd d!�Zd"d#�Zd$S)%�FixOperatorTZprez� method=('isCallable'|'sequenceIncludes' |'isSequenceType'|'isMappingType'|'isNumberType' |'repeat'|'irepeat') z'(' obj=any ')'z� power< module='operator' trailer< '.' %(methods)s > trailer< %(obj)s > > | power< %(methods)s trailer< %(obj)s > > )�methods�objcCs"|j||�}|dk r|||�SdS)N)� _check_method)�self�node�results�methodr r r� transform+szFixOperator.transformzoperator.contains(%s)cCs|j||d�S)N�contains)�_handle_rename)rrrr r r�_sequenceIncludes0szFixOperator._sequenceIncludeszhasattr(%s, '__call__')cCs2|d}|j�td�td�g}ttd�||jd�S)Nrz, z '__call__'�hasattr)�prefix)�clonerrrr)rrrr�argsr r r�_isCallable4szFixOperator._isCallablezoperator.mul(%s)cCs|j||d�S)N�mul)r)rrrr r r�_repeat:szFixOperator._repeatzoperator.imul(%s)cCs|j||d�S)N�imul)r)rrrr r r�_irepeat>szFixOperator._irepeatz$isinstance(%s, collections.Sequence)cCs|j||dd�S)N�collections�Sequence)�_handle_type2abc)rrrr r r�_isSequenceTypeBszFixOperator._isSequenceTypez#isinstance(%s, collections.Mapping)cCs|j||dd�S)Nr"�Mapping)r$)rrrr r r�_isMappingTypeFszFixOperator._isMappingTypezisinstance(%s, numbers.Number)cCs|j||dd�S)NZnumbers�Number)r$)rrrr r r� _isNumberTypeJszFixOperator._isNumberTypecCs|dd}||_|j�dS)Nrr)�valueZchanged)rrr�namerr r rrNszFixOperator._handle_renamecCsFtd||�|d}|j�tddj||g��g}ttd�||jd�S)Nrz, �.� isinstance)r)rrr�joinrrr)rrr�module�abcrrr r rr$SszFixOperator._handle_type2abccCs\t|d|ddj�}t|tj�rXd|kr0|St|d�f}|j|}|j|d|�dS)N�_rrr/rzYou should use '%s' here.)�getattrr*r-r"�Callable�strrZwarning)rrrr�subZinvocation_strr r rrYs zFixOperator._check_methodN)�__name__� __module__�__qualname__Z BM_compatible�orderrr�dictZPATTERNrrrrrr!r%r'r)rr$rr r r rr s r )�__doc__r"Zlib2to3rZlib2to3.fixer_utilrrrrrZBaseFixr r r r r�<module> s PK{��\Y�ٜxx-fixes/__pycache__/fix_operator.cpython-36.pycnu�[���3 \� �@sNdZddlZddlmZddlmZmZmZmZdd�Z Gdd�dej �ZdS) a�Fixer for operator functions. operator.isCallable(obj) -> hasattr(obj, '__call__') operator.sequenceIncludes(obj) -> operator.contains(obj) operator.isSequenceType(obj) -> isinstance(obj, collections.Sequence) operator.isMappingType(obj) -> isinstance(obj, collections.Mapping) operator.isNumberType(obj) -> isinstance(obj, numbers.Number) operator.repeat(obj, n) -> operator.mul(obj, n) operator.irepeat(obj, n) -> operator.imul(obj, n) �N)� fixer_base)�Call�Name�String�touch_importcs�fdd�}|S)Ncs �|_|S)N)� invocation)�f)�s��2/usr/lib64/python3.6/lib2to3/fixes/fix_operator.py�decszinvocation.<locals>.decr )r rr )r rrsrc@s�eZdZdZdZdZdZdeeed�Zdd�Z e d �d d��Ze d�d d��Ze d�dd��Z e d�dd��Ze d�dd��Ze d�dd��Ze d�dd��Zdd�Zd d!�Zd"d#�Zd$S)%�FixOperatorTZprez� method=('isCallable'|'sequenceIncludes' |'isSequenceType'|'isMappingType'|'isNumberType' |'repeat'|'irepeat') z'(' obj=any ')'z� power< module='operator' trailer< '.' %(methods)s > trailer< %(obj)s > > | power< %(methods)s trailer< %(obj)s > > )�methods�objcCs"|j||�}|dk r|||�SdS)N)� _check_method)�self�node�results�methodr r r� transform+szFixOperator.transformzoperator.contains(%s)cCs|j||d�S)N�contains)�_handle_rename)rrrr r r�_sequenceIncludes0szFixOperator._sequenceIncludeszhasattr(%s, '__call__')cCs2|d}|j�td�td�g}ttd�||jd�S)Nrz, z '__call__'�hasattr)�prefix)�clonerrrr)rrrr�argsr r r�_isCallable4szFixOperator._isCallablezoperator.mul(%s)cCs|j||d�S)N�mul)r)rrrr r r�_repeat:szFixOperator._repeatzoperator.imul(%s)cCs|j||d�S)N�imul)r)rrrr r r�_irepeat>szFixOperator._irepeatz$isinstance(%s, collections.Sequence)cCs|j||dd�S)N�collections�Sequence)�_handle_type2abc)rrrr r r�_isSequenceTypeBszFixOperator._isSequenceTypez#isinstance(%s, collections.Mapping)cCs|j||dd�S)Nr"�Mapping)r$)rrrr r r�_isMappingTypeFszFixOperator._isMappingTypezisinstance(%s, numbers.Number)cCs|j||dd�S)NZnumbers�Number)r$)rrrr r r� _isNumberTypeJszFixOperator._isNumberTypecCs|dd}||_|j�dS)Nrr)�valueZchanged)rrr�namerr r rrNszFixOperator._handle_renamecCsFtd||�|d}|j�tddj||g��g}ttd�||jd�S)Nrz, �.� isinstance)r)rrr�joinrrr)rrr�module�abcrrr r rr$SszFixOperator._handle_type2abccCs\t|d|ddj�}t|tj�rXd|kr0|St|d�f}|j|}|j|d|�dS)N�_rrr/rzYou should use '%s' here.)�getattrr*r-r"�Callable�strrZwarning)rrrr�subZinvocation_strr r rrYs zFixOperator._check_methodN)�__name__� __module__�__qualname__Z BM_compatible�orderrr�dictZPATTERNrrrrrr!r%r'r)rr$rr r r rr s r )�__doc__r"Zlib2to3rZlib2to3.fixer_utilrrrrrZBaseFixr r r r r�<module> s PK{��\���YY*fixes/__pycache__/fix_paren.cpython-36.pycnu�[���3 \��@s6dZddlmZddlmZmZGdd�dej�ZdS)zuFixer that addes parentheses where they are required This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.�)� fixer_base)�LParen�RParenc@seZdZdZdZdd�ZdS)�FixParenTa atom< ('[' | '(') (listmaker< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > > | testlist_gexp< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > >) (']' | ')') > cCs8|d}t�}|j|_d|_|jd|�|jt��dS)N�target��)r�prefixZinsert_childZappend_childr)�selfZnodeZresultsrZlparen�r�//usr/lib64/python3.6/lib2to3/fixes/fix_paren.py� transform%szFixParen.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrrsrN)�__doc__rrZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\*wGV��0fixes/__pycache__/fix_print.cpython-36.opt-2.pycnu�[���3 \�@shddlmZddlmZddlmZddlmZddlmZmZm Z m Z ejd�ZGdd�dej �Zd S) �)�patcomp)�pytree)�token)� fixer_base)�Name�Call�Comma�Stringz"atom< '(' [atom|STRING|NAME] ')' >c@s$eZdZdZdZdd�Zdd�ZdS)�FixPrintTzP simple_stmt< any* bare='print' any* > | print_stmt c Cs`|jd�}|r,|jttd�g|jd��dS|jdd�}t|�dkrXtj|d�rXdSd}}}|r�|dt �kr�|dd�}d}|r�|dt jtj d�kr�|dj�}|dd�}d d �|D�}|r�d|d_|dk s�|dk s�|dk �rF|dk �r|j|dtt|���|dk �r.|j|d tt|���|dk �rF|j|d|�ttd�|�} |j| _| S)NZbare�print)�prefix��� z>>�cSsg|]}|j��qS�)�clone)�.0�argrr�//usr/lib64/python3.6/lib2to3/fixes/fix_print.py� <listcomp>?sz&FixPrint.transform.<locals>.<listcomp>��sep�end�file���r)�get�replacerrrZchildren�len�parend_expr�matchrr�Leafr� RIGHTSHIFTr� add_kwargr �repr) �selfZnodeZresultsZ bare_print�argsrrrZl_argsZn_stmtrrr� transform%s8 zFixPrint.transformcCsNd|_tj|jjt|�tjtjd�|f�}|r@|j t ��d|_|j |�dS)Nr�=r)rrZNodeZsymsZargumentrr!r�EQUAL�appendr)r%Zl_nodesZs_kwdZn_exprZ n_argumentrrrr#Ms zFixPrint.add_kwargN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr'r#rrrrr s(r N)rrrZpgen2rrZ fixer_utilrrrr Zcompile_patternrZBaseFixr rrrr�<module>sPK{��\��.] *fixes/__pycache__/fix_print.cpython-36.pycnu�[���3 \�@sldZddlmZddlmZddlmZddlmZddlmZm Z m Z mZejd�Z Gdd �d ej�Zd S)aFixer for print. Change: 'print' into 'print()' 'print ...' into 'print(...)' 'print ... ,' into 'print(..., end=" ")' 'print >>x, ...' into 'print(..., file=x)' No changes are applied if print_function is imported from __future__ �)�patcomp)�pytree)�token)� fixer_base)�Name�Call�Comma�Stringz"atom< '(' [atom|STRING|NAME] ')' >c@s$eZdZdZdZdd�Zdd�ZdS)�FixPrintTzP simple_stmt< any* bare='print' any* > | print_stmt c Cs�|st�|jd�}|r4|jttd�g|jd��dS|jdtd�ksJt�|jdd�}t|�dkrvtj |d�rvdSd}}}|r�|dt �kr�|dd�}d}|r�|dtjt jd�kr�t|�dks�t�|dj�}|d d�}d d�|D�}|�rd|d_|dk �s"|dk �s"|dk �rz|dk �rB|j|d tt|���|dk �rb|j|dtt|���|dk �rz|j|d|�ttd�|�} |j| _| S)NZbare�print)�prefix��� z>>r�cSsg|]}|j��qS�)�clone)�.0�argrr�//usr/lib64/python3.6/lib2to3/fixes/fix_print.py� <listcomp>?sz&FixPrint.transform.<locals>.<listcomp>��sep�end�file���r)�AssertionError�get�replacerrrZchildren�len�parend_expr�matchrr�Leafr� RIGHTSHIFTr� add_kwargr �repr) �selfZnodeZresultsZ bare_print�argsrrrZl_argsZn_stmtrrr� transform%s> zFixPrint.transformcCsNd|_tj|jjt|�tjtjd�|f�}|r@|j t ��d|_|j |�dS)Nr�=r)rrZNodeZsymsZargumentrr"r�EQUAL�appendr)r&Zl_nodesZs_kwdZn_exprZ n_argumentrrrr$Ms zFixPrint.add_kwargN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr(r$rrrrr s(r N)�__doc__rrrZpgen2rrZ fixer_utilrrrr Zcompile_patternr ZBaseFixr rrrr�<module>sPK{��\C�pb��0fixes/__pycache__/fix_raise.cpython-36.opt-1.pycnu�[���3 \n�@sZdZddlmZddlmZddlmZddlmZmZm Z m Z mZGdd�dej�Z dS) a[Fixer for 'raise E, V, T' raise -> raise raise E -> raise E raise E, V -> raise E(V) raise E, V, T -> raise E(V).with_traceback(T) raise E, None, T -> raise E.with_traceback(T) raise (((E, E'), E''), E'''), V -> raise E(V) raise "foo", V, T -> warns about string exceptions CAVEATS: 1) "raise E, V" will be incorrectly translated if V is an exception instance. The correct Python 3 idiom is raise E from V but since we can't detect instance-hood by syntax alone and since any client code would have to be changed as well, we don't automate this. �)�pytree)�token)� fixer_base)�Name�Call�Attr�ArgList�is_tuplec@seZdZdZdZdd�ZdS)�FixRaiseTzB raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > cCsl|j}|dj�}|jtjkr2d}|j||�dSt|�rbx t|�rZ|jdjdj�}q<Wd|_d|kr�t j |jtd�|g�}|j|_|S|dj�}t|�r�dd �|jdd�D�}nd |_|g}d|k�rF|dj�} d | _|} |jtj kp�|jdk�rt||�} t| td ��t| g�g}t j |jtd�g|�}|j|_|St j |jtd�t||�g|jd�SdS)N�excz+Python 3 does not support string exceptions��� �val�raisecSsg|]}|j��qS�)�clone)�.0�crr�//usr/lib64/python3.6/lib2to3/fixes/fix_raise.py� <listcomp>Dsz&FixRaise.transform.<locals>.<listcomp>��tb�None�with_traceback)�prefix���)�symsr�typer�STRINGZcannot_convertr ZchildrenrrZNodeZ raise_stmtr�NAME�valuerrrZsimple_stmt)�selfZnodeZresultsrr�msg�newr�argsr�eZwith_tbrrr� transform&s@ zFixRaise.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr'rrrrr sr N)�__doc__rrZpgen2rrZ fixer_utilrrrrr ZBaseFixr rrrr�<module>s PK{��\�]f�EE0fixes/__pycache__/fix_raise.cpython-36.opt-2.pycnu�[���3 \n�@sVddlmZddlmZddlmZddlmZmZmZm Z m Z Gdd�dej�ZdS)�)�pytree)�token)� fixer_base)�Name�Call�Attr�ArgList�is_tuplec@seZdZdZdZdd�ZdS)�FixRaiseTzB raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > cCsl|j}|dj�}|jtjkr2d}|j||�dSt|�rbx t|�rZ|jdjdj�}q<Wd|_d|kr�t j |jtd�|g�}|j|_|S|dj�}t|�r�dd �|jdd�D�}nd |_|g}d|k�rF|dj�} d | _|} |jtj kp�|jdk�rt||�} t| td ��t| g�g}t j |jtd�g|�}|j|_|St j |jtd�t||�g|jd�SdS)N�excz+Python 3 does not support string exceptions��� �val�raisecSsg|]}|j��qS�)�clone)�.0�crr�//usr/lib64/python3.6/lib2to3/fixes/fix_raise.py� <listcomp>Dsz&FixRaise.transform.<locals>.<listcomp>��tb�None�with_traceback)�prefix���)�symsr�typer�STRINGZcannot_convertr ZchildrenrrZNodeZ raise_stmtr�NAME�valuerrrZsimple_stmt)�selfZnodeZresultsrr�msg�newr�argsr�eZwith_tbrrr� transform&s@ zFixRaise.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr'rrrrr sr N) rrZpgen2rrZ fixer_utilrrrrr ZBaseFixr rrrr�<module>sPK{��\C�pb��*fixes/__pycache__/fix_raise.cpython-36.pycnu�[���3 \n�@sZdZddlmZddlmZddlmZddlmZmZm Z m Z mZGdd�dej�Z dS) a[Fixer for 'raise E, V, T' raise -> raise raise E -> raise E raise E, V -> raise E(V) raise E, V, T -> raise E(V).with_traceback(T) raise E, None, T -> raise E.with_traceback(T) raise (((E, E'), E''), E'''), V -> raise E(V) raise "foo", V, T -> warns about string exceptions CAVEATS: 1) "raise E, V" will be incorrectly translated if V is an exception instance. The correct Python 3 idiom is raise E from V but since we can't detect instance-hood by syntax alone and since any client code would have to be changed as well, we don't automate this. �)�pytree)�token)� fixer_base)�Name�Call�Attr�ArgList�is_tuplec@seZdZdZdZdd�ZdS)�FixRaiseTzB raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > cCsl|j}|dj�}|jtjkr2d}|j||�dSt|�rbx t|�rZ|jdjdj�}q<Wd|_d|kr�t j |jtd�|g�}|j|_|S|dj�}t|�r�dd �|jdd�D�}nd |_|g}d|k�rF|dj�} d | _|} |jtj kp�|jdk�rt||�} t| td ��t| g�g}t j |jtd�g|�}|j|_|St j |jtd�t||�g|jd�SdS)N�excz+Python 3 does not support string exceptions��� �val�raisecSsg|]}|j��qS�)�clone)�.0�crr�//usr/lib64/python3.6/lib2to3/fixes/fix_raise.py� <listcomp>Dsz&FixRaise.transform.<locals>.<listcomp>��tb�None�with_traceback)�prefix���)�symsr�typer�STRINGZcannot_convertr ZchildrenrrZNodeZ raise_stmtr�NAME�valuerrrZsimple_stmt)�selfZnodeZresultsrr�msg�newr�argsr�eZwith_tbrrr� transform&s@ zFixRaise.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr'rrrrr sr N)�__doc__rrZpgen2rrZ fixer_utilrrrrr ZBaseFixr rrrr�<module>s PK{��\��.fixes/__pycache__/fix_raw_input.cpython-36.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z2Fixer that changes raw_input(...) into input(...).�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixRawInputTzU power< name='raw_input' trailer< '(' [any] ')' > any* > cCs |d}|jtd|jd��dS)N�name�input)�prefix)�replacerr)�selfZnodeZresultsr�r �3/usr/lib64/python3.6/lib2to3/fixes/fix_raw_input.py� transformszFixRawInput.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr r r rrsrN)�__doc__�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\� ��UU+fixes/__pycache__/fix_reduce.cpython-36.pycnu�[���3 \E�@s2dZddlmZddlmZGdd�dej�ZdS)zqFixer for reduce(). Makes sure reduce() is imported from the functools module if reduce is used in that module. �)� fixer_base)�touch_importc@s eZdZdZdZdZdd�ZdS)� FixReduceTZpreai power< 'reduce' trailer< '(' arglist< ( (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) | (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) ) > ')' > > cCstdd|�dS)N� functools�reduce)r)�selfZnodeZresults�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_reduce.py� transform"szFixReduce.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNr rrrr rsrN)�__doc__Zlib2to3rZlib2to3.fixer_utilrZBaseFixrrrrr �<module>sPK{��\�A �uu1fixes/__pycache__/fix_reload.cpython-36.opt-1.pycnu�[���3 \��@s6dZddlmZddlmZmZGdd�dej�ZdS)z/Fixer for reload(). reload(s) -> imp.reload(s)�)� fixer_base)� ImportAndCall�touch_importc@s eZdZdZdZdZdd�ZdS)� FixReloadTZprez� power< 'reload' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > cCsd|rD|d}|rD|j|jjkr"dS|j|jjkrD|jdjdkrDdSd}t|||�}tdd|�|S)N�obj�z**�imp�reload)rr )�typeZsymsZ star_exprZargumentZchildren�valuerr)�selfZnodeZresultsr�names�new�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_reload.py� transformszFixReload.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNrrrrrr s rN)�__doc__�rZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\�A �uu+fixes/__pycache__/fix_reload.cpython-36.pycnu�[���3 \��@s6dZddlmZddlmZmZGdd�dej�ZdS)z/Fixer for reload(). reload(s) -> imp.reload(s)�)� fixer_base)� ImportAndCall�touch_importc@s eZdZdZdZdZdd�ZdS)� FixReloadTZprez� power< 'reload' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > cCsd|rD|d}|rD|j|jjkr"dS|j|jjkrD|jdjdkrDdSd}t|||�}tdd|�|S)N�obj�z**�imp�reload)rr )�typeZsymsZ star_exprZargumentZchildren�valuerr)�selfZnodeZresultsr�names�new�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_reload.py� transformszFixReload.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNrrrrrr s rN)�__doc__�rZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\q�XF��,fixes/__pycache__/fix_renames.cpython-36.pycnu�[���3 \��@sVdZddlmZddlmZmZdddiiZiZdd�Zd d �Z Gdd�dej �Zd S)z?Fix incompatible renames Fixes: * sys.maxint -> sys.maxsize �)� fixer_base)�Name� attr_chain�sysZmaxint�maxsizecCsddjtt|��dS)N�(�|�))�join�map�repr)�members�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_renames.py� alternatessrccsbx\ttj��D]L\}}xBt|j��D]2\}}|t||f<d|||fVd||fVq$WqWdS)Nz� import_from< 'from' module_name=%r 'import' ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > z^ power< module_name=%r trailer< '.' attr_name=%r > any* > )�list�MAPPING�items�LOOKUP)�module�replaceZold_attr�new_attrrrr� build_patterns rcs8eZdZdZdje��ZdZ�fdd�Zdd�Z �Z S)� FixRenamesTrZprecs@tt|�j��|�}|r<t�fdd�t|d�D��r8dS|SdS)Nc3s|]}�|�VqdS)Nr)�.0�obj)�matchrr� <genexpr>5sz#FixRenames.match.<locals>.<genexpr>�parentF)�superrr�anyr)�self�node�results)� __class__)rrr1szFixRenames.matchcCsD|jd�}|jd�}|r@|r@t|j|jf}|jt||jd��dS)NZmodule_name� attr_name)�prefix)�getr�valuerrr&)r!r"r#Zmod_namer%rrrr� transform>s zFixRenames.transform)�__name__� __module__�__qualname__Z BM_compatibler rZPATTERN�orderrr)� __classcell__rr)r$rr*s rN)�__doc__�rZ fixer_utilrrrrrrZBaseFixrrrrr�<module>sPK{��\a�;888)fixes/__pycache__/fix_repr.cpython-36.pycnu�[���3 \e�@s:dZddlmZddlmZmZmZGdd�dej�ZdS)z/Fixer that transforms `xyzzy` into repr(xyzzy).�)� fixer_base)�Call�Name�parenthesizec@seZdZdZdZdd�ZdS)�FixReprTz7 atom < '`' expr=any '`' > cCs8|dj�}|j|jjkr"t|�}ttd�|g|jd�S)N�expr�repr)�prefix)Zclone�typeZsymsZ testlist1rrrr )�selfZnodeZresultsr�r�./usr/lib64/python3.6/lib2to3/fixes/fix_repr.py� transformszFixRepr.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrr rsrN) �__doc__�rZ fixer_utilrrrZBaseFixrrrrr �<module>sPK{��\8`��6fixes/__pycache__/fix_set_literal.cpython-36.opt-1.pycnu�[���3 \��@s:dZddlmZmZddlmZmZGdd�dej�ZdS)z: Optional fixer to transform set() calls to set literals. �)� fixer_base�pytree)�token�symsc@s eZdZdZdZdZdd�ZdS)� FixSetLiteralTajpower< 'set' trailer< '(' (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > | single=any) ']' > | atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > ) ')' > > c Cs�|jd�}|r2tjtj|j�g�}|j|�|}n|d}tjtj d�g}|j dd�|jD��|jtjtj d��|jj|d _tjtj|�}|j|_t|j�dkr�|jd }|j�|j|jd_|S)N�single�items�{css|]}|j�VqdS)N)�clone)�.0�n�r �5/usr/lib64/python3.6/lib2to3/fixes/fix_set_literal.py� <genexpr>'sz*FixSetLiteral.transform.<locals>.<genexpr>�}������r)�getrZNoderZ listmakerr �replaceZLeafr�LBRACE�extendZchildren�append�RBRACEZnext_sibling�prefixZdictsetmaker�len�remove) �selfZnodeZresultsrZfaker�literalZmakerrr r r� transforms" zFixSetLiteral.transformN)�__name__� __module__�__qualname__Z BM_compatibleZexplicitZPATTERNr r r r rrs rN) �__doc__Zlib2to3rrZlib2to3.fixer_utilrrZBaseFixrr r r r�<module>sPK{��\8`��0fixes/__pycache__/fix_set_literal.cpython-36.pycnu�[���3 \��@s:dZddlmZmZddlmZmZGdd�dej�ZdS)z: Optional fixer to transform set() calls to set literals. �)� fixer_base�pytree)�token�symsc@s eZdZdZdZdZdd�ZdS)� FixSetLiteralTajpower< 'set' trailer< '(' (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > | single=any) ']' > | atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > ) ')' > > c Cs�|jd�}|r2tjtj|j�g�}|j|�|}n|d}tjtj d�g}|j dd�|jD��|jtjtj d��|jj|d _tjtj|�}|j|_t|j�dkr�|jd }|j�|j|jd_|S)N�single�items�{css|]}|j�VqdS)N)�clone)�.0�n�r �5/usr/lib64/python3.6/lib2to3/fixes/fix_set_literal.py� <genexpr>'sz*FixSetLiteral.transform.<locals>.<genexpr>�}������r)�getrZNoderZ listmakerr �replaceZLeafr�LBRACE�extendZchildren�append�RBRACEZnext_sibling�prefixZdictsetmaker�len�remove) �selfZnodeZresultsrZfaker�literalZmakerrr r r� transforms" zFixSetLiteral.transformN)�__name__� __module__�__qualname__Z BM_compatibleZexplicitZPATTERNr r r r rrs rN) �__doc__Zlib2to3rrZlib2to3.fixer_utilrrZBaseFixrr r r r�<module>sPK{��\Ѐ���2fixes/__pycache__/fix_standarderror.cpython-36.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z%Fixer for StandardError -> Exception.�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixStandarderrorTz- 'StandardError' cCstd|jd�S)N� Exception)�prefix)rr)�selfZnodeZresults�r�7/usr/lib64/python3.6/lib2to3/fixes/fix_standarderror.py� transformszFixStandarderror.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\N���dd,fixes/__pycache__/fix_sys_exc.cpython-36.pycnu�[���3 \ �@sJdZddlmZddlmZmZmZmZmZm Z m Z Gdd�dej�ZdS)z�Fixer for sys.exc_{type, value, traceback} sys.exc_type -> sys.exc_info()[0] sys.exc_value -> sys.exc_info()[1] sys.exc_traceback -> sys.exc_info()[2] �)� fixer_base)�Attr�Call�Name�Number� Subscript�Node�symsc@s:eZdZdddgZdZddjdd�eD��Zd d �ZdS)� FixSysExc�exc_type� exc_value� exc_tracebackTzN power< 'sys' trailer< dot='.' attribute=(%s) > > �|ccs|]}d|VqdS)z'%s'N�)�.0�err�1/usr/lib64/python3.6/lib2to3/fixes/fix_sys_exc.py� <genexpr>szFixSysExc.<genexpr>cCst|dd}t|jj|j��}ttd�|jd�}ttd�|�}|dj|djd_|j t |��ttj ||jd�S)NZ attribute��exc_info)�prefix�sys�dot�)rr�index�valuerrrrZchildren�appendrrr Zpower)�selfZnodeZresultsZsys_attrrZcall�attrrrr� transformszFixSysExc.transformN)�__name__� __module__�__qualname__rZ BM_compatible�joinZPATTERNrrrrrr s r N) �__doc__�rZ fixer_utilrrrrrrr ZBaseFixr rrrr�<module>s$PK{��\"^%���0fixes/__pycache__/fix_throw.cpython-36.opt-1.pycnu�[���3 \.�@sZdZddlmZddlmZddlmZddlmZmZm Z m Z mZGdd�dej�Z dS) z�Fixer for generator.throw(E, V, T). g.throw(E) -> g.throw(E) g.throw(E, V) -> g.throw(E(V)) g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) g.throw("foo"[, V[, T]]) will warn about string exceptions.�)�pytree)�token)� fixer_base)�Name�Call�ArgList�Attr�is_tuplec@seZdZdZdZdd�ZdS)�FixThrowTz� power< any trailer< '.' 'throw' > trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > > | power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > cCs�|j}|dj�}|jtjkr.|j|d�dS|jd�}|dkrDdS|j�}t|�rndd�|jdd�D�}nd|_ |g}|d}d |kr�|d j�}d|_ t ||�} t| td ��t |g�g} |jtj|j| ��n|jt ||��dS)N�excz+Python 3 does not support string exceptions�valcSsg|]}|j��qS�)�clone)�.0�cr r �//usr/lib64/python3.6/lib2to3/fixes/fix_throw.py� <listcomp>)sz&FixThrow.transform.<locals>.<listcomp>���args�tb�with_traceback���)�symsr�typer�STRINGZcannot_convert�getr Zchildren�prefixrrrr�replacerZNodeZpower)�selfZnodeZresultsrrrrZ throw_argsr�eZwith_tbr r r� transforms* zFixThrow.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr!r r r rr sr N)�__doc__rrZpgen2rrZ fixer_utilrrrrr ZBaseFixr r r r r�<module>s PK{��\"^%���*fixes/__pycache__/fix_throw.cpython-36.pycnu�[���3 \.�@sZdZddlmZddlmZddlmZddlmZmZm Z m Z mZGdd�dej�Z dS) z�Fixer for generator.throw(E, V, T). g.throw(E) -> g.throw(E) g.throw(E, V) -> g.throw(E(V)) g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) g.throw("foo"[, V[, T]]) will warn about string exceptions.�)�pytree)�token)� fixer_base)�Name�Call�ArgList�Attr�is_tuplec@seZdZdZdZdd�ZdS)�FixThrowTz� power< any trailer< '.' 'throw' > trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > > | power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > cCs�|j}|dj�}|jtjkr.|j|d�dS|jd�}|dkrDdS|j�}t|�rndd�|jdd�D�}nd|_ |g}|d}d |kr�|d j�}d|_ t ||�} t| td ��t |g�g} |jtj|j| ��n|jt ||��dS)N�excz+Python 3 does not support string exceptions�valcSsg|]}|j��qS�)�clone)�.0�cr r �//usr/lib64/python3.6/lib2to3/fixes/fix_throw.py� <listcomp>)sz&FixThrow.transform.<locals>.<listcomp>���args�tb�with_traceback���)�symsr�typer�STRINGZcannot_convert�getr Zchildren�prefixrrrr�replacerZNodeZpower)�selfZnodeZresultsrrrrZ throw_argsr�eZwith_tbr r r� transforms* zFixThrow.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr!r r r rr sr N)�__doc__rrZpgen2rrZ fixer_utilrrrrr ZBaseFixr r r r r�<module>s PK{��\T�|���1fixes/__pycache__/fix_tuple_params.cpython-36.pycnu�[���3 \��@s�dZddlmZddlmZddlmZddlmZmZm Z m Z mZmZdd�Z Gdd �d ej�Zd d�Zdd �Zgdfdd�Zdd�ZdS)a:Fixer for function definitions with tuple parameters. def func(((a, b), c), d): ... -> def func(x, d): ((a, b), c) = x ... It will also support lambdas: lambda (x, y): x + y -> lambda t: t[0] + t[1] # The parens are a syntax error in Python 3 lambda (x): x + y -> lambda x: x + y �)�pytree)�token)� fixer_base)�Assign�Name�Newline�Number� Subscript�symscCst|tj�o|jdjtjkS)N�)� isinstancer�Node�children�typer�STRING)�stmt�r�6/usr/lib64/python3.6/lib2to3/fixes/fix_tuple_params.py�is_docstringsrc@s(eZdZdZdZdZdd�Zdd�ZdS) �FixTupleParams�Ta funcdef< 'def' any parameters< '(' args=any ')' > ['->' any] ':' suite=any+ > | lambda= lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > ':' body=any > cs�d|kr�j||�Sg�|d}|d}|djdjtjkrZd}|djdj}t��nd}d}tjtjd��d���fd d� }|jt j kr�||�n@|jt jkr�x2t|j�D]$\}} | jt j kr�|| |dkd�q�W�s�dSx�D]} |d| _ q�W|}|dk�rd �d_n&t|dj|��r8|�d_|d}x�D]} |d| _ �q>W�|dj||�<x4t|d|t��d�D]}||dj|_�q�W|dj�dS)N�lambda�suite�argsr�rz; �Fcs\t�j��}|j�}d|_t||j��}|r2d|_|j|��jtjt j |�j�g��dS)Nr� )r�new_name�clone�prefixr�replace�appendrr r Zsimple_stmt)Z tuple_arg� add_prefix�n�argr)�end� new_lines�selfrr�handle_tupleCs z.FixTupleParams.transform.<locals>.handle_tuple)r"r)F)�transform_lambdarrr�INDENT�valuerrZLeafr ZtfpdefZ typedargslist� enumerate�parentrr�range�lenZchanged)r'�node�resultsrr�start�indentr(�ir$�line�afterr)r%r&r'r� transform.sF zFixTupleParams.transformc Cs�|d}|d}t|d�}|jtjkrD|j�}d|_|j|�dSt|�}t|�}|j t |��}t|dd�} |j| j��xd|j�D]X} | jtjkr�| j |kr�dd�|| j D�}tjtj| j�g|�}| j|_| j|�q�WdS)Nr�body�innerr)rcSsg|]}|j��qSr)r)�.0�crrr� <listcomp>�sz3FixTupleParams.transform_lambda.<locals>.<listcomp>)� simplify_argsrr�NAMErrr �find_params�map_to_indexr� tuple_namerZ post_orderr+rr r Zpower) r'r0r1rr8r9ZparamsZto_indexZtup_nameZ new_paramr#Z subscripts�newrrrr)ns( zFixTupleParams.transform_lambdaN)�__name__� __module__�__qualname__Z run_orderZ BM_compatibleZPATTERNr7r)rrrrrs @rcCsR|jtjtjfkr|S|jtjkrBx|jtjkr<|jd}q$W|Std|��dS)NrzReceived unexpected node %s)rr Zvfplistrr>�vfpdefr�RuntimeError)r0rrrr=�sr=cCs<|jtjkrt|jd�S|jtjkr,|jSdd�|jD�S)NrcSs g|]}|jtjkrt|��qSr)rr�COMMAr?)r:r;rrrr<�szfind_params.<locals>.<listcomp>)rr rFr?rrr>r+)r0rrrr?�s r?NcCs^|dkri}xLt|�D]@\}}ttt|���g}t|t�rJt|||d�q||||<qW|S)N)�d)r,r r�strr�listr@)� param_listrrIr4�objZtrailerrrrr@�s r@cCs@g}x0|D](}t|t�r(|jt|��q |j|�q Wdj|�S)N�_)rrKr!rA�join)rL�lrMrrrrA�s rA)�__doc__rrZpgen2rrZ fixer_utilrrrrr r rZBaseFixrr=r?r@rArrrr�<module>s lPK{��\��k/*fixes/__pycache__/fix_types.cpython-36.pycnu�[���3 \��@spdZddlmZddlmZddddddd d dddd dddddddddd�Zdd�eD�ZGdd�dej�ZdS)a�Fixer for removing uses of the types module. These work for only the known names in the types module. The forms above can include types. or not. ie, It is assumed the module is imported either as: import types from types import ... # either * or specific types The import statements are not modified. There should be another fixer that handles at least the following constants: type([]) -> list type(()) -> tuple type('') -> str �)� fixer_base)�Name�bool� memoryview�type�complex�dictztype(Ellipsis)�float�int�list�objectz type(None)ztype(NotImplemented)�slice�bytesz(str,)�tuple�str�range)ZBooleanTypeZ BufferTypeZ ClassTypeZComplexTypeZDictTypeZDictionaryTypeZEllipsisTypeZ FloatTypeZIntTypeZListTypeZLongTypeZ ObjectTypeZNoneTypeZNotImplementedTypeZ SliceTypeZ StringTypeZStringTypesZ TupleTypeZTypeTypeZUnicodeTypeZ XRangeTypecCsg|]}d|�qS)z)power< 'types' trailer< '.' name='%s' > >�)�.0�trr�//usr/lib64/python3.6/lib2to3/fixes/fix_types.py� <listcomp>3src@s"eZdZdZdje�Zdd�ZdS)�FixTypesT�|cCs&tj|dj�}|r"t||jd�SdS)N�name)�prefix)� _TYPE_MAPPING�get�valuerr)�selfZnodeZresultsZ new_valuerrr� transform9szFixTypes.transformN)�__name__� __module__�__qualname__Z BM_compatible�join�_patsZPATTERNrrrrrr5s rN) �__doc__�rZ fixer_utilrrr$ZBaseFixrrrrr�<module>s2PK{��\�j���2fixes/__pycache__/fix_unicode.cpython-36.opt-1.pycnu�[���3 \��@s<dZddlmZddlmZddd�ZGdd�dej�Zd S) z�Fixer for unicode. * Changes unicode to str and unichr to chr. * If "...\u..." is not unicode literal change it into "...\\u...". * Change u"..." into "...". �)�token)� fixer_base�chr�str)ZunichrZunicodecs,eZdZdZdZ�fdd�Zdd�Z�ZS)� FixUnicodeTzSTRING | 'unicode' | 'unichr'cs"tt|�j||�d|jk|_dS)N�unicode_literals)�superr� start_treeZfuture_featuresr)�selfZtree�filename)� __class__��1/usr/lib64/python3.6/lib2to3/fixes/fix_unicode.pyr szFixUnicode.start_treecCs�|jtjkr$|j�}t|j|_|S|jtjkr�|j}|jrl|ddkrld|krldjdd�|j d�D��}|ddkr�|dd�}||jkr�|S|j�}||_|SdS) N�z'"�\z\\cSs g|]}|jdd�jdd��qS)z\uz\\uz\Uz\\U)�replace)�.0�vr r r� <listcomp>!sz(FixUnicode.transform.<locals>.<listcomp>ZuU�) �typer�NAMEZclone�_mapping�value�STRINGr�join�split)r ZnodeZresults�new�valr r r� transforms" zFixUnicode.transform)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr r� __classcell__r r )rrrsrN)�__doc__Zpgen2r�rrZBaseFixrr r r r�<module> s PK{��\�j���,fixes/__pycache__/fix_unicode.cpython-36.pycnu�[���3 \��@s<dZddlmZddlmZddd�ZGdd�dej�Zd S) z�Fixer for unicode. * Changes unicode to str and unichr to chr. * If "...\u..." is not unicode literal change it into "...\\u...". * Change u"..." into "...". �)�token)� fixer_base�chr�str)ZunichrZunicodecs,eZdZdZdZ�fdd�Zdd�Z�ZS)� FixUnicodeTzSTRING | 'unicode' | 'unichr'cs"tt|�j||�d|jk|_dS)N�unicode_literals)�superr� start_treeZfuture_featuresr)�selfZtree�filename)� __class__��1/usr/lib64/python3.6/lib2to3/fixes/fix_unicode.pyr szFixUnicode.start_treecCs�|jtjkr$|j�}t|j|_|S|jtjkr�|j}|jrl|ddkrld|krldjdd�|j d�D��}|ddkr�|dd�}||jkr�|S|j�}||_|SdS) N�z'"�\z\\cSs g|]}|jdd�jdd��qS)z\uz\\uz\Uz\\U)�replace)�.0�vr r r� <listcomp>!sz(FixUnicode.transform.<locals>.<listcomp>ZuU�) �typer�NAMEZclone�_mapping�value�STRINGr�join�split)r ZnodeZresults�new�valr r r� transforms" zFixUnicode.transform)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr r� __classcell__r r )rrrsrN)�__doc__Zpgen2r�rrZBaseFixrr r r r�<module> s PK{��\�� �OO+fixes/__pycache__/fix_urllib.cpython-36.pycnu�[���3 \� �@s�dZddlmZmZddlmZmZmZmZm Z m Z mZdddddd d ddgfd dddddddddddddddgfddgfgdd dd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5gfdd6d7gfgd8�Zed9j ed:d;�d<d=�ZGd>d?�d?e�Zd@S)Az�Fix changes imports of urllib which are now incompatible. This is rather similar to fix_imports, but because of the more complex nature of the fixing for urllib, it has its own fixer. �)� alternates� FixImports)�Name�Comma� FromImport�Newline�find_indentation�Node�symszurllib.requestZ URLopenerZFancyURLopenerZurlretrieveZ _urlopenerZurlopenZ urlcleanupZpathname2urlZurl2pathnamezurllib.parseZquoteZ quote_plusZunquoteZunquote_plusZ urlencodeZ splitattrZ splithostZ splitnportZsplitpasswdZ splitportZ splitqueryZsplittagZ splittypeZ splituserZ splitvaluezurllib.errorZContentTooShortErrorZinstall_openerZbuild_openerZRequestZOpenerDirectorZBaseHandlerZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ FTPHandlerZCacheFTPHandlerZUnknownHandlerZURLErrorZ HTTPError)�urllib�urllib2rr�ccs~t�}xrtj�D]f\}}x\|D]T}|\}}t|�}d||fVd|||fVd|Vd|Vd||fVqWqWdS)Nz�import_name< 'import' (module=%r | dotted_as_names< any* module=%r any* >) > z�import_from< 'from' mod_member=%r 'import' ( member=%s | import_as_name< member=%s 'as' any > | import_as_names< members=any* >) > zIimport_from< 'from' module_star=%r 'import' star='*' > ztimport_name< 'import' dotted_as_name< module_as=%r 'as' any > > zKpower< bare_with_attr=%r trailer< '.' member=%s > any* > )�set�MAPPING�itemsr)ZbareZ old_moduleZchanges�changeZ new_module�members�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_urllib.py� build_pattern0s rc@s4eZdZdd�Zdd�Zdd�Zdd�Zd d �ZdS)� FixUrllibcCsdjt��S)N�|)�joinr)�selfrrrrIszFixUrllib.build_patterncCsz|jd�}|j}g}x6t|jdd�D] }|jt|d|d�t�g�q(W|jtt|jdd|d��|j|�dS)z�Transform for the basic import case. Replaces the old import name with a comma separated list of its replacements. �moduleNr r)�prefix���r) �getrr�value�extendrr�append�replace)r�node�resultsZ import_mod�pref�names�namerrr�transform_importLs zFixUrllib.transform_importcCs>|jd�}|j}|jd�}|r�t|t�r0|d}d}x*t|jD]}|j|dkr@|d}Pq@W|rx|jt||d��n|j|d��n�g}i} |d} x�| D]�}|j t jkr�|jd j}|jdj}n |j}d}|d kr�xPt|jD]B}||dkr�|d| k�r|j |d�| j|dg�j |�q�Wq�Wg} t|�}d}dd �}x�|D]�}| |}g}x2|dd�D]"}|j|||��|j t���qlW|j||d|��t||�}|�s�|jjj|��r�||_| j |�d}�qNW| �r.g}x&| dd�D]}|j|t�g��q�W|j | d�|j|�n|j|d�dS)z�Transform for imports of specific module elements. Replaces the module to be imported from with the appropriate new module. � mod_member�memberrNr )rz!This is an invalid module elementr��,TcSsX|jtjkrHt|jdj|d�|jdj�|jdj�g}ttj|�gSt|j|d�gS)Nr)rr r*)�typer �import_as_namer�childrenrZcloner )r&rZkidsrrr�handle_name�sz/FixUrllib.transform_member.<locals>.handle_nameFzAll module elements are invalidrrrr)rr� isinstance�listrrr!r�cannot_convertr,r r-r.r � setdefaultrrrr�parent�endswithr)rr"r#r(r$r)�new_namer�modulesZmod_dictrZas_name�member_nameZ new_nodesZindentation�firstr/rZeltsr%Zelt�newZnodesZnew_noderrr�transform_member\sh zFixUrllib.transform_membercCs�|jd�}|jd�}d}t|t�r*|d}x*t|jD]}|j|dkr6|d}Pq6W|rp|jt||jd��n|j|d�dS)z.Transform for calls to module members in code.�bare_with_attrr)Nrr )rz!This is an invalid module element) rr0r1rrr!rrr2)rr"r#Z module_dotr)r6rrrr� transform_dot�s zFixUrllib.transform_dotcCsz|jd�r|j||�n^|jd�r0|j||�nF|jd�rH|j||�n.|jd�r`|j|d�n|jd�rv|j|d�dS)Nrr(r<Zmodule_starzCannot handle star imports.Z module_asz#This module is now multiple modules)rr'r;r=r2)rr"r#rrr� transform�s zFixUrllib.transformN)�__name__� __module__�__qualname__rr'r;r=r>rrrrrGs LrN)�__doc__Zlib2to3.fixes.fix_importsrrZlib2to3.fixer_utilrrrrrr r rr rrrrrr�<module>s@$ PK{��\~aNN-fixes/__pycache__/fix_ws_comma.cpython-36.pycnu�[���3 \B�@s>dZddlmZddlmZddlmZGdd�dej�ZdS)z�Fixer that changes 'a ,b' into 'a, b'. This also changes '{a :b}' into '{a: b}', but does not touch other uses of colons. It does not touch other uses of whitespace. �)�pytree)�token)� fixer_basec@s@eZdZdZdZejejd�Zejej d�Z ee fZ dd�ZdS)� FixWsCommaTzH any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> �,�:cCsd|j�}d}xR|jD]H}||jkrD|j}|j�r>d|kr>d|_d}q|rX|j}|sXd|_d}qW|S)NF� �T� )ZcloneZchildren�SEPS�prefix�isspace)�selfZnodeZresults�newZcommaZchildr�r�2/usr/lib64/python3.6/lib2to3/fixes/fix_ws_comma.py� transforms zFixWsComma.transformN)�__name__� __module__�__qualname__ZexplicitZPATTERNrZLeafr�COMMA�COLONrrrrrrrsrN)�__doc__r rZpgen2rrZBaseFixrrrrr�<module>sPK{��\O��� � 1fixes/__pycache__/fix_xrange.cpython-36.opt-1.pycnu�[���3 \� �@sFdZddlmZddlmZmZmZddlmZGdd�dej�Z dS)z/Fixer that changes xrange(...) into range(...).�)� fixer_base)�Name�Call�consuming_calls)�patcompcsheZdZdZdZ�fdd�Zdd�Zdd�Zd d �Zdd�Z d Z eje �Z dZeje�Zdd�Z�ZS)� FixXrangeTz� power< (name='range'|name='xrange') trailer< '(' args=any ')' > rest=any* > cstt|�j||�t�|_dS)N)�superr� start_tree�set�transformed_xranges)�self�tree�filename)� __class__��0/usr/lib64/python3.6/lib2to3/fixes/fix_xrange.pyr szFixXrange.start_treecCs d|_dS)N)r)rr rrrr�finish_treeszFixXrange.finish_treecCsD|d}|jdkr|j||�S|jdkr4|j||�Stt|���dS)N�nameZxrange�range)�value�transform_xrange�transform_range� ValueError�repr)r�node�resultsrrrr� transforms zFixXrange.transformcCs0|d}|jtd|jd��|jjt|��dS)Nrr)�prefix)�replacerrr�add�id)rrrrrrrr$szFixXrange.transform_xrangecCslt|�|jkrh|j|�rhttd�|dj�g�}ttd�|g|jd�}x|dD]}|j|�qRW|SdS)Nr�args�list)r�rest)r r�in_special_contextrrZclonerZappend_child)rrrZ range_callZ list_call�nrrrr*s zFixXrange.transform_rangez3power< func=NAME trailer< '(' node=any ')' > any* >z�for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > | comparison< any 'in' node=any any*> cCsf|jdkrdSi}|jjdk rJ|jj|jj|�rJ|d|krJ|djtkS|jj|j|�od|d|kS)NFr�func)�parent�p1�matchrr�p2)rrrrrrr$?s zFixXrange.in_special_context)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrZP1rZcompile_patternr(ZP2r*r$� __classcell__rr)rrrs rN) �__doc__�rZ fixer_utilrrrrZBaseFixrrrrr�<module>sPK{��\O��� � +fixes/__pycache__/fix_xrange.cpython-36.pycnu�[���3 \� �@sFdZddlmZddlmZmZmZddlmZGdd�dej�Z dS)z/Fixer that changes xrange(...) into range(...).�)� fixer_base)�Name�Call�consuming_calls)�patcompcsheZdZdZdZ�fdd�Zdd�Zdd�Zd d �Zdd�Z d Z eje �Z dZeje�Zdd�Z�ZS)� FixXrangeTz� power< (name='range'|name='xrange') trailer< '(' args=any ')' > rest=any* > cstt|�j||�t�|_dS)N)�superr� start_tree�set�transformed_xranges)�self�tree�filename)� __class__��0/usr/lib64/python3.6/lib2to3/fixes/fix_xrange.pyr szFixXrange.start_treecCs d|_dS)N)r)rr rrrr�finish_treeszFixXrange.finish_treecCsD|d}|jdkr|j||�S|jdkr4|j||�Stt|���dS)N�nameZxrange�range)�value�transform_xrange�transform_range� ValueError�repr)r�node�resultsrrrr� transforms zFixXrange.transformcCs0|d}|jtd|jd��|jjt|��dS)Nrr)�prefix)�replacerrr�add�id)rrrrrrrr$szFixXrange.transform_xrangecCslt|�|jkrh|j|�rhttd�|dj�g�}ttd�|g|jd�}x|dD]}|j|�qRW|SdS)Nr�args�list)r�rest)r r�in_special_contextrrZclonerZappend_child)rrrZ range_callZ list_call�nrrrr*s zFixXrange.transform_rangez3power< func=NAME trailer< '(' node=any ')' > any* >z�for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > | comparison< any 'in' node=any any*> cCsf|jdkrdSi}|jjdk rJ|jj|jj|�rJ|d|krJ|djtkS|jj|j|�od|d|kS)NFr�func)�parent�p1�matchrr�p2)rrrrrrr$?s zFixXrange.in_special_context)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrZP1rZcompile_patternr(ZP2r*r$� __classcell__rr)rrrs rN) �__doc__�rZ fixer_utilrrrrZBaseFixrrrrr�<module>sPK{��\��ȽHH/fixes/__pycache__/fix_xreadlines.cpython-36.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)zpFix "for x in f.xreadlines()" -> "for x in f". This fixer will also convert g(f.xreadlines) into g(f.__iter__).�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixXreadlinesTz� power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > | power< any+ trailer< '.' no_call='xreadlines' > > cCs@|jd�}|r$|jtd|jd��n|jdd�|dD��dS)N�no_call�__iter__)�prefixcSsg|]}|j��qS�)Zclone)�.0�xrr�4/usr/lib64/python3.6/lib2to3/fixes/fix_xreadlines.py� <listcomp>sz+FixXreadlines.transform.<locals>.<listcomp>Zcall)�get�replacerr)�selfZnodeZresultsrrrr� transforms zFixXreadlines.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\�w�.fixes/__pycache__/fix_zip.cpython-36.opt-1.pycnu�[���3 \ �@sRdZddlmZddlmZddlmZddlm Z m Z mZGdd�dej�Z dS) a7 Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) unless there exists a 'from future_builtins import zip' statement in the top-level namespace. We avoid the transformation if the zip() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. �)� fixer_base)�Node)�python_symbols)�Name�ArgList�in_special_contextc@s eZdZdZdZdZdd�ZdS)�FixZipTzN power< 'zip' args=trailer< '(' [any] ')' > [trailers=trailer*] > zfuture_builtins.zipcCs�|j|�rdSt|�rdS|dj�}d|_g}d|kr^dd�|dD�}x|D] }d|_qPWttjtd�|gdd�}ttjtd�t|g�g|�}|j|_|S) N�args��trailerscSsg|]}|j��qS�)�clone)�.0�nrr�-/usr/lib64/python3.6/lib2to3/fixes/fix_zip.py� <listcomp>'sz$FixZip.transform.<locals>.<listcomp>�zip)�prefix�list) Zshould_skiprr rr�symsZpowerrr)�selfZnodeZresultsr rr�newrrr� transforms zFixZip.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onrrrrrrsrN)�__doc__r rZpytreerZpygramrrZ fixer_utilrrrZConditionalFixrrrrr�<module>s PK{��\�폏��/fixes/__pycache__/__init__.cpython-36.opt-2.pycnu�[���3 \/�@sdS)N�rrr�./usr/lib64/python3.6/lib2to3/fixes/__init__.py�<module>sPK{��\�폏��)fixes/__pycache__/__init__.cpython-36.pycnu�[���3 \/�@sdS)N�rrr�./usr/lib64/python3.6/lib2to3/fixes/__init__.py�<module>sPK{��\���9ss0fixes/__pycache__/fix_apply.cpython-36.opt-1.pycnu�[���3 \~ �@sRdZddlmZddlmZddlmZddlmZmZm Z Gdd�dej �ZdS) zIFixer for apply(). This converts apply(func, v, k) into (func)(*v, **k).�)�pytree)�token)� fixer_base)�Call�Comma�parenthesizec@seZdZdZdZdd�ZdS)�FixApplyTa. power< 'apply' trailer< '(' arglist< (not argument<NAME '=' any>) func=any ',' (not argument<NAME '=' any>) args=any [',' (not argument<NAME '=' any>) kwds=any] [','] > ')' > > c Cs>|j}|d}|d}|jd�}|rX|j|jjkr6dS|j|jjkrX|jdjdkrXdS|r~|j|jjkr~|jdjdkr~dS|j}|j�}|jt j |jfkr�|j|jks�|jdjt j kr�t|�}d|_|j�}d|_|dk r�|j�}d|_tjt jd�|g}|dk �r0|jt�tjt j d�|g�d |d_t|||d �S) N�func�args�kwds�z**r��*� )�prefix���r)�syms�get�typeZ star_exprZargumentZchildren�valuerZcloner�NAMEZatomZpower� DOUBLESTARrrZLeaf�STAR�extendrr) �selfZnodeZresultsrr r rrZ l_newargs�r�//usr/lib64/python3.6/lib2to3/fixes/fix_apply.py� transforms@ zFixApply.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�__doc__r rZpgen2rrZ fixer_utilrrrZBaseFixrrrrr�<module>s PK{��\tҵ0��2fixes/__pycache__/fix_asserts.cpython-36.opt-2.pycnu�[���3 \��@sPddlmZddlmZeddddddd dddddd dd�ZGd d�de�ZdS)�)�BaseFix)�NameZ assertTrueZassertEqualZassertNotEqualZassertAlmostEqualZassertNotAlmostEqualZassertRegexZassertRaisesRegexZassertRaisesZassertFalse)Zassert_ZassertEqualsZassertNotEqualsZassertAlmostEqualsZassertNotAlmostEqualsZassertRegexpMatchesZassertRaisesRegexpZfailUnlessEqualZfailIfEqualZfailUnlessAlmostEqualZfailIfAlmostEqualZ failUnlessZfailUnlessRaisesZfailIfc@s(eZdZddjeee��Zdd�ZdS)� FixAssertszH power< any+ trailer< '.' meth=(%s)> any* > �|cCs,|dd}|jttt|�|jd��dS)N�meth�)�prefix)�replacer�NAMES�strr)�selfZnodeZresults�name�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_asserts.py� transform szFixAsserts.transformN) �__name__� __module__�__qualname__�join�map�reprr ZPATTERNrrrrrrsrN)Z fixer_baserZ fixer_utilr�dictr rrrrr�<module>s"PK{��\�����5fixes/__pycache__/fix_basestring.cpython-36.opt-1.pycnu�[���3 \@�@s2dZddlmZddlmZGdd�dej�ZdS)zFixer for basestring -> str.�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixBasestringTz'basestring'cCstd|jd�S)N�str)�prefix)rr)�selfZnodeZresults�r�4/usr/lib64/python3.6/lib2to3/fixes/fix_basestring.py� transform szFixBasestring.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\�sL�UU5fixes/__pycache__/fix_basestring.cpython-36.opt-2.pycnu�[���3 \@�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixBasestringTz'basestring'cCstd|jd�S)N�str)�prefix)rr)�selfZnodeZresults�r�4/usr/lib64/python3.6/lib2to3/fixes/fix_basestring.py� transform szFixBasestring.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\�9�1��1fixes/__pycache__/fix_buffer.cpython-36.opt-2.pycnu�[���3 \N�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@s eZdZdZdZdZdd�ZdS)� FixBufferTzR power< name='buffer' trailer< '(' [any] ')' > any* > cCs |d}|jtd|jd��dS)N�name� memoryview)�prefix)�replacerr)�selfZnodeZresultsr�r �0/usr/lib64/python3.6/lib2to3/fixes/fix_buffer.py� transformszFixBuffer.transformN)�__name__� __module__�__qualname__Z BM_compatibleZexplicitZPATTERNrr r r rrsrN)�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\�Zq���/fixes/__pycache__/fix_dict.cpython-36.opt-1.pycnu�[���3 \��@sjdZddlmZddlmZddlmZddlmZmZmZddlmZej dhBZ Gdd �d ej�Zd S)ajFixer for dict methods. d.keys() -> list(d.keys()) d.items() -> list(d.items()) d.values() -> list(d.values()) d.iterkeys() -> iter(d.keys()) d.iteritems() -> iter(d.items()) d.itervalues() -> iter(d.values()) d.viewkeys() -> d.keys() d.viewitems() -> d.items() d.viewvalues() -> d.values() Except in certain very specific contexts: the iter() can be dropped when the context is list(), sorted(), iter() or for...in; the list() can be dropped when the context is list() or sorted() (but not iter() or for...in!). Special contexts that apply to both: list(), sorted(), tuple() set(), any(), all(), sum(). Note: iter(d.keys()) could be written as iter(d) but since the original d.iterkeys() was also redundant we don't fix this. And there are (rare) contexts where it makes a difference (e.g. when passing it as an argument to a function that introspects the argument). �)�pytree)�patcomp)� fixer_base)�Name�Call�Dot)� fixer_util�iterc@s@eZdZdZdZdd�ZdZeje�Z dZ eje �Zdd�Zd S) �FixDictTa power< head=any+ trailer< '.' method=('keys'|'items'|'values'| 'iterkeys'|'iteritems'|'itervalues'| 'viewkeys'|'viewitems'|'viewvalues') > parens=trailer< '(' ')' > tail=any* > c Cs|d}|dd}|d}|j}|j}|jd�}|jd�} |sD| rP|dd�}dd �|D�}d d �|D�}|o||j||�} |tj|jt�t||j d�g�|dj �g}tj|j|�}| p�| s�d |_ tt|r�dnd�|g�}|r�tj|j|g|�}|j |_ |S)N�head�method��tailr Zview�cSsg|]}|j��qS�)�clone)�.0�nrr�./usr/lib64/python3.6/lib2to3/fixes/fix_dict.py� <listcomp>Asz%FixDict.transform.<locals>.<listcomp>cSsg|]}|j��qSr)r)rrrrrrBs)�prefixZparens��list) �syms�value� startswith�in_special_contextrZNodeZtrailerrrrrZpowerr) �self�node�resultsrrrrZmethod_name�isiterZisviewZspecial�args�newrrr� transform6s2 zFixDict.transformz3power< func=NAME trailer< '(' node=any ')' > any* >zmfor_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > cCs�|jdkrdSi}|jjdk r^|jj|jj|�r^|d|kr^|rN|djtkS|djtjkS|sfdS|jj|j|�o�|d|kS)NFr�func)�parent�p1�matchr�iter_exemptr�consuming_calls�p2)rrr rrrrrZs zFixDict.in_special_contextN) �__name__� __module__�__qualname__Z BM_compatibleZPATTERNr#ZP1rZcompile_patternr&ZP2r*rrrrrr )s r N) �__doc__rrrrrrrrr)r(ZBaseFixr rrrr�<module>sPK{��\!Z�H/ / 1fixes/__pycache__/fix_except.cpython-36.opt-2.pycnu�[���3 \ �@sbddlmZddlmZddlmZddlmZmZmZm Z m Z mZdd�ZGdd�dej �Zd S) �)�pytree)�token)� fixer_base)�Assign�Attr�Name�is_tuple�is_list�symsccsHxBt|�D]6\}}|jtjkr |jdjdkr |||dfVq WdS)N��exceptr)� enumerate�typer � except_clause�children�value)Znodes�i�n�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_except.py�find_exceptssrc@seZdZdZdZdd�ZdS)� FixExceptTa1 try_stmt< 'try' ':' (simple_stmt | suite) cleanup=(except_clause ':' (simple_stmt | suite))+ tail=(['except' ':' (simple_stmt | suite)] ['else' ':' (simple_stmt | suite)] ['finally' ':' (simple_stmt | suite)]) > cCs�|j}dd�|dD�}dd�|dD�}�x*t|�D�]\}}t|j�dkr6|jdd�\}} } | jtdd d ��| jtjk�rDt|j �d d �}| j �}d|_| j|�|j �}|j} x"t| �D]\}}t |tj�r�Pq�Wt| �s�t| ��rt|t|td���}n t||�}x&t| d|��D]}|jd |��q W|j||�q6| jdkr6d | _q6Wdd�|jdd�D�||}tj|j|�S)NcSsg|]}|j��qSr)�clone)�.0rrrr� <listcomp>2sz'FixExcept.transform.<locals>.<listcomp>�tailcSsg|]}|j��qSr)r)rZchrrrr4sZcleanup���as� )�prefix��argsrcSsg|]}|j��qSr)r)r�crrrr\s�)r r�lenr�replacerrr�NAME�new_namerr r � isinstancerZNoderr rr�reversedZinsert_child)�selfZnodeZresultsr rZtry_cleanuprZe_suite�EZcomma�NZnew_N�targetZsuite_stmtsrZstmtZassignZchildrrrr� transform/s6 zFixExcept.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr/rrrrr$srN)r!rZpgen2rrZ fixer_utilrrrrr r rZBaseFixrrrrr�<module>s PK{��\��G$ $ 1fixes/__pycache__/fix_filter.cpython-36.opt-1.pycnu�[���3 \[ �@sVdZddlmZddlmZddlmZddlm Z m Z mZmZGdd�dej �ZdS) a�Fixer that changes filter(F, X) into list(filter(F, X)). We avoid the transformation if the filter() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on filter(F, X) to return a string if X is a string and a tuple if X is a tuple. That would require type inference, which we don't do. Let Python 2.6 figure it out. �)� fixer_base)�Node)�python_symbols)�Name�ArgList�ListComp�in_special_contextc@s eZdZdZdZdZdd�ZdS)� FixFilterTaV filter_lambda=power< 'filter' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'filter' trailer< '(' arglist< none='None' ',' seq=any > ')' > [extra_trailers=trailer*] > | power< 'filter' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > zfuture_builtins.filtercCs2|j|�rdSg}d|kr:x|dD]}|j|j��q$Wd|kr�t|jd�j�|jd�j�|jd�j�|jd�j��}ttj|g|dd�}n�d|kr�ttd �td �|d j�td ��}ttj|g|dd�}nTt |�r�dS|dj�}ttjtd�|gdd�}ttjtd �t |g�g|�}d|_|j|_|S)NZextra_trailersZ filter_lambda�fp�itZxp�)�prefixZnoneZ_f�seq�args�filter�list)Zshould_skip�appendZcloner�getr�symsZpowerrrrr )�selfZnodeZresultsZtrailers�t�newr�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_filter.py� transform:s4 zFixFilter.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onrrrrrr sr N)�__doc__rrZpytreerZpygramrrZ fixer_utilrrrrZConditionalFixr rrrr�<module>s PK{��\��4PXX1fixes/__pycache__/fix_filter.cpython-36.opt-2.pycnu�[���3 \[ �@sRddlmZddlmZddlmZddlmZm Z m Z mZGdd�dej�Z dS)�)� fixer_base)�Node)�python_symbols)�Name�ArgList�ListComp�in_special_contextc@s eZdZdZdZdZdd�ZdS)� FixFilterTaV filter_lambda=power< 'filter' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'filter' trailer< '(' arglist< none='None' ',' seq=any > ')' > [extra_trailers=trailer*] > | power< 'filter' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > zfuture_builtins.filtercCs2|j|�rdSg}d|kr:x|dD]}|j|j��q$Wd|kr�t|jd�j�|jd�j�|jd�j�|jd�j��}ttj|g|dd�}n�d|kr�ttd �td �|d j�td ��}ttj|g|dd�}nTt |�r�dS|dj�}ttjtd�|gdd�}ttjtd �t |g�g|�}d|_|j|_|S)NZextra_trailersZ filter_lambda�fp�itZxp�)�prefixZnoneZ_f�seq�args�filter�list)Zshould_skip�appendZcloner�getr�symsZpowerrrrr )�selfZnodeZresultsZtrailers�t�newr�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_filter.py� transform:s4 zFixFilter.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onrrrrrr sr N)rrZpytreerZpygramrrZ fixer_utilrrrrZConditionalFixr rrrr�<module>sPK{��\ 'O!��4fixes/__pycache__/fix_funcattrs.cpython-36.opt-1.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z3Fix function attribute names (f.func_x -> f.__x__).�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixFuncattrsTz� power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' | 'func_dict') > any* > cCs2|dd}|jtd|jdd�|jd��dS)N�attr�z__%s__�)�prefix)�replacer�valuer)�selfZnodeZresultsr�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_funcattrs.py� transformszFixFuncattrs.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrr r srN)�__doc__�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\ݛ�uu4fixes/__pycache__/fix_funcattrs.cpython-36.opt-2.pycnu�[���3 \��@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixFuncattrsTz� power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' | 'func_dict') > any* > cCs2|dd}|jtd|jdd�|jd��dS)N�attr�z__%s__�)�prefix)�replacer�valuer)�selfZnodeZresultsr�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_funcattrs.py� transformszFixFuncattrs.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrr r srN)�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\R2P9��1fixes/__pycache__/fix_future.cpython-36.opt-2.pycnu�[���3 \#�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)� BlankLinec@s eZdZdZdZdZdd�ZdS)� FixFutureTz;import_from< 'from' module_name="__future__" 'import' any >� cCst�}|j|_|S)N)r�prefix)�selfZnodeZresults�new�r �0/usr/lib64/python3.6/lib2to3/fixes/fix_future.py� transformszFixFuture.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZ run_orderrr r r r rsrN)�rZ fixer_utilrZBaseFixrr r r r �<module>sPK{��\&Fam��2fixes/__pycache__/fix_getcwdu.cpython-36.opt-1.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z1 Fixer that changes os.getcwdu() to os.getcwd(). �)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixGetcwduTzR power< 'os' trailer< dot='.' name='getcwdu' > any* > cCs |d}|jtd|jd��dS)N�name�getcwd)�prefix)�replacerr)�selfZnodeZresultsr�r �1/usr/lib64/python3.6/lib2to3/fixes/fix_getcwdu.py� transformszFixGetcwdu.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr r r rr srN)�__doc__�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\/�Ii��2fixes/__pycache__/fix_getcwdu.cpython-36.opt-2.pycnu�[���3 \��@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixGetcwduTzR power< 'os' trailer< dot='.' name='getcwdu' > any* > cCs |d}|jtd|jd��dS)N�name�getcwd)�prefix)�replacerr)�selfZnodeZresultsr�r �1/usr/lib64/python3.6/lib2to3/fixes/fix_getcwdu.py� transformszFixGetcwdu.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr r r rr srN)�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\��l�552fixes/__pycache__/fix_has_key.cpython-36.opt-1.pycnu�[���3 \|�@sBdZddlmZddlmZddlmZmZGdd�dej�ZdS)a&Fixer for has_key(). Calls to .has_key() methods are expressed in terms of the 'in' operator: d.has_key(k) -> k in d CAVEATS: 1) While the primary target of this fixer is dict.has_key(), the fixer will change any has_key() method call, regardless of its class. 2) Cases like this will not be converted: m = d.has_key if m(k): ... Only *calls* to has_key() are converted. While it is possible to convert the above to something like m = d.__contains__ if m(k): ... this is currently not done. �)�pytree)� fixer_base)�Name�parenthesizec@seZdZdZdZdd�ZdS)� FixHasKeyTa� anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > after=any* > | negation=not_test< 'not' anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > > > c Cs||j}|jj|jkr&|jj|j�r&dS|jd�}|d}|j}dd�|dD�}|dj�}|jd�} | rxdd�| D�} |j|j |j|j |j|j|j |jfkr�t|�}t|�d kr�|d }ntj|j|�}d|_tddd �} |r�tddd �}tj|j|| f�} tj|j || |f�}| �r8t|�}tj|j|ft| ��}|jj|j |j|j|j|j|j|j|j|jf k�rrt|�}||_|S)N�negation�anchorcSsg|]}|j��qS�)�clone)�.0�nr r �1/usr/lib64/python3.6/lib2to3/fixes/fix_has_key.py� <listcomp>Rsz'FixHasKey.transform.<locals>.<listcomp>�before�arg�aftercSsg|]}|j��qSr )r )rrr r r rVs��� �in)�prefix�not)�syms�parent�typeZnot_test�pattern�match�getrr Z comparisonZand_testZor_testZtestZlambdefZargumentr�lenrZNodeZpowerrZcomp_op�tuple�exprZxor_exprZand_exprZ shift_exprZ arith_exprZtermZfactor) �selfZnodeZresultsrrrrrrrZn_opZn_not�newr r r � transformGsD zFixHasKey.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr#r r r r r&srN) �__doc__�rrZ fixer_utilrrZBaseFixrr r r r �<module>sPK{��\�dL��2fixes/__pycache__/fix_imports.cpython-36.opt-2.pycnu�[���3 \4�1@s�ddlmZddlmZmZdddddddd d ddddd ddddddddddddddddddd d!d!d"d#d$d%d&d'd'd'd(d)d)d*d+d,�0Zd-d.�Zefd/d0�ZGd1d2�d2ej�Z d3S)4�)� fixer_base)�Name� attr_chain�io�pickle�builtins�copyregZqueueZsocketserverZconfigparser�reprlibztkinter.filedialogztkinter.simpledialogztkinter.colorchooserztkinter.commondialogztkinter.dialogztkinter.dndztkinter.fontztkinter.messageboxztkinter.scrolledtextztkinter.constantsztkinter.tixztkinter.ttkZtkinterZ_markupbase�winreg�_threadZ _dummy_threadzdbm.bsdzdbm.dumbzdbm.ndbmzdbm.gnuz xmlrpc.clientz xmlrpc.serverzhttp.clientz html.entitieszhtml.parserzhttp.cookieszhttp.cookiejarzhttp.server� subprocess�collectionszurllib.parsezurllib.robotparser)0�StringIOZ cStringIOZcPickleZ__builtin__Zcopy_regZQueueZSocketServerZConfigParser�reprZ FileDialogZtkFileDialogZSimpleDialogZtkSimpleDialogZtkColorChooserZtkCommonDialogZDialogZTkdndZtkFontZtkMessageBoxZScrolledTextZTkconstantsZTixZttkZTkinterZ markupbase�_winregZthreadZdummy_threadZdbhashZdumbdbmZdbmZgdbmZ xmlrpclibZDocXMLRPCServerZSimpleXMLRPCServerZhttplibZhtmlentitydefsZ HTMLParserZCookieZ cookielibZBaseHTTPServerZSimpleHTTPServerZ CGIHTTPServerZcommands� UserString�UserListZurlparseZrobotparsercCsddjtt|��dS)N�(�|�))�join�mapr)�members�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_imports.py� alternates=srccsTdjdd�|D��}t|j��}d||fVd|Vd||fVd|VdS)Nz | cSsg|]}d|�qS)zmodule_name='%s'r)�.0�keyrrr� <listcomp>Bsz!build_pattern.<locals>.<listcomp>zyname_import=import_name< 'import' ((%s) | multiple_imports=dotted_as_names< any* (%s) any* >) > z�import_from< 'from' (%s) 'import' ['('] ( any | import_as_name< any 'as' any > | import_as_names< any* >) [')'] > z�import_name< 'import' (dotted_as_name< (%s) 'as' any > | multiple_imports=dotted_as_names< any* dotted_as_name< (%s) 'as' any > any* >) > z3power< bare_with_attr=(%s) trailer<'.' any > any* >)rr�keys)�mappingZmod_listZ bare_namesrrr� build_patternAs r!csTeZdZdZdZeZdZdd�Z�fdd�Z �fdd�Z �fd d �Zdd�Z�Z S) � FixImportsT�cCsdjt|j��S)Nr)rr!r )�selfrrrr!`szFixImports.build_patterncs|j�|_tt|�j�dS)N)r!ZPATTERN�superr"�compile_pattern)r$)� __class__rrr&cs zFixImports.compile_patterncsHtt|�j��|�}|rDd|kr@t�fdd�t|d�D��r@dS|SdS)N�bare_with_attrc3s|]}�|�VqdS)Nr)r�obj)�matchrr� <genexpr>qsz#FixImports.match.<locals>.<genexpr>�parentF)r%r"r*�anyr)r$�node�results)r')r*rr*jszFixImports.matchcstt|�j||�i|_dS)N)r%r"� start_tree�replace)r$Ztree�filename)r'rrr0vszFixImports.start_treecCs�|jd�}|rh|j}|j|}|jt||jd��d|krD||j|<d|kr�|j|�}|r�|j||�n2|dd}|jj|j�}|r�|jt||jd��dS)NZmodule_name)�prefixZname_importZmultiple_importsr(�)�get�valuer r1rr3r*� transform)r$r.r/Z import_modZmod_name�new_nameZ bare_namerrrr7zs zFixImports.transform)�__name__� __module__�__qualname__Z BM_compatibleZkeep_line_order�MAPPINGr Z run_orderr!r&r*r0r7� __classcell__rr)r'rr"Usr"N) �rZ fixer_utilrrr<rr!ZBaseFixr"rrrr�<module>shPK{��\ �W@3fixes/__pycache__/fix_imports2.cpython-36.opt-1.pycnu�[���3 \!�@s0dZddlmZddd�ZGdd�dej�ZdS)zTFix incompatible imports and module references that must be fixed after fix_imports.�)�fix_importsZdbm)ZwhichdbZanydbmc@seZdZdZeZdS)�FixImports2�N)�__name__� __module__�__qualname__Z run_order�MAPPING�mapping�r r �2/usr/lib64/python3.6/lib2to3/fixes/fix_imports2.pyrsrN)�__doc__�rrZ FixImportsrr r r r�<module>sPK{��\aI䖪�3fixes/__pycache__/fix_imports2.cpython-36.opt-2.pycnu�[���3 \!�@s,ddlmZddd�ZGdd�dej�ZdS)�)�fix_importsZdbm)ZwhichdbZanydbmc@seZdZdZeZdS)�FixImports2�N)�__name__� __module__�__qualname__Z run_order�MAPPING�mapping�r r �2/usr/lib64/python3.6/lib2to3/fixes/fix_imports2.pyrsrN)�rrZ FixImportsrr r r r�<module>sPK{��\�9z��0fixes/__pycache__/fix_input.cpython-36.opt-1.pycnu�[���3 \��@sLdZddlmZddlmZmZddlmZejd�ZGdd�dej �Z dS) z4Fixer that changes input(...) into eval(input(...)).�)� fixer_base)�Call�Name)�patcompz&power< 'eval' trailer< '(' any ')' > >c@seZdZdZdZdd�ZdS)�FixInputTzL power< 'input' args=trailer< '(' [any] ')' > > cCs6tj|jj�rdS|j�}d|_ttd�|g|jd�S)N��eval)�prefix)�context�match�parentZcloner rr)�selfZnodeZresults�new�r�//usr/lib64/python3.6/lib2to3/fixes/fix_input.py� transforms zFixInput.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrr srN)�__doc__rrZ fixer_utilrrrZcompile_patternr ZBaseFixrrrrr�<module>s PK{��\�3\\0fixes/__pycache__/fix_input.cpython-36.opt-2.pycnu�[���3 \��@sHddlmZddlmZmZddlmZejd�ZGdd�dej�Z dS)�)� fixer_base)�Call�Name)�patcompz&power< 'eval' trailer< '(' any ')' > >c@seZdZdZdZdd�ZdS)�FixInputTzL power< 'input' args=trailer< '(' [any] ')' > > cCs6tj|jj�rdS|j�}d|_ttd�|g|jd�S)N��eval)�prefix)�context�match�parentZcloner rr)�selfZnodeZresults�new�r�//usr/lib64/python3.6/lib2to3/fixes/fix_input.py� transforms zFixInput.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrr srN) rrZ fixer_utilrrrZcompile_patternr ZBaseFixrrrrr�<module>s PK{��\��.]551fixes/__pycache__/fix_intern.cpython-36.opt-2.pycnu�[���3 \��@s2ddlmZddlmZmZGdd�dej�ZdS)�)� fixer_base)� ImportAndCall�touch_importc@s eZdZdZdZdZdd�ZdS)� FixInternTZprez� power< 'intern' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > cCsd|rD|d}|rD|j|jjkr"dS|j|jjkrD|jdjdkrDdSd}t|||�}tdd|�|S)N�obj�z**�sys�intern)rr )�typeZsymsZ star_exprZargumentZchildren�valuerr)�selfZnodeZresultsr�names�new�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_intern.py� transformszFixIntern.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNrrrrrr s rN)�rZ fixer_utilrrZBaseFixrrrrr�<module> sPK{��\a�5fixes/__pycache__/fix_isinstance.cpython-36.opt-1.pycnu�[���3 \H�@s2dZddlmZddlmZGdd�dej�ZdS)a,Fixer that cleans up a tuple argument to isinstance after the tokens in it were fixed. This is mainly used to remove double occurrences of tokens as a leftover of the long -> int / unicode -> str conversion. eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) -> isinstance(x, int) �)� fixer_base)�tokenc@s eZdZdZdZdZdd�ZdS)� FixIsinstanceTz� power< 'isinstance' trailer< '(' arglist< any ',' atom< '(' args=testlist_gexp< any+ > ')' > > ')' > > �cCs�t�}|d}|j}g}t|�}xx|D]p\}} | jtjkrt| j|krt|t|�dkr�||djtjkr�t |�q&q&|j | �| jtjkr&|j| j�q&W|r�|djtjkr�|d=t|�dkr�|j} | j |d_ | j|d�n||dd�<|j�dS)N�args�����r )�setZchildren� enumerate�typer�NAME�value�len�COMMA�next�append�add�parent�prefix�replaceZchanged)�selfZnodeZresultsZnames_insertedZtestlistrZnew_args�iterator�idx�argZatom�r�4/usr/lib64/python3.6/lib2to3/fixes/fix_isinstance.py� transforms*$ zFixIsinstance.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZ run_orderrrrrrrsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module> sPK{��\'q����5fixes/__pycache__/fix_isinstance.cpython-36.opt-2.pycnu�[���3 \H�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�tokenc@s eZdZdZdZdZdd�ZdS)� FixIsinstanceTz� power< 'isinstance' trailer< '(' arglist< any ',' atom< '(' args=testlist_gexp< any+ > ')' > > ')' > > �cCs�t�}|d}|j}g}t|�}xx|D]p\}} | jtjkrt| j|krt|t|�dkr�||djtjkr�t |�q&q&|j | �| jtjkr&|j| j�q&W|r�|djtjkr�|d=t|�dkr�|j} | j |d_ | j|d�n||dd�<|j�dS)N�args�����r )�setZchildren� enumerate�typer�NAME�value�len�COMMA�next�append�add�parent�prefix�replaceZchanged)�selfZnodeZresultsZnames_insertedZtestlistrZnew_args�iterator�idx�argZatom�r�4/usr/lib64/python3.6/lib2to3/fixes/fix_isinstance.py� transforms*$ zFixIsinstance.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZ run_orderrrrrrrsrN)�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\�K��4fixes/__pycache__/fix_itertools.cpython-36.opt-1.pycnu�[���3 \�@s2dZddlmZddlmZGdd�dej�ZdS)aT Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) imports from itertools are fixed in fix_itertools_import.py If itertools is imported as something else (ie: import itertools as it; it.izip(spam, eggs)) method calls will not get fixed. �)� fixer_base)�Namec@s*eZdZdZdZde�ZdZdd�ZdS)�FixItertoolsTz7('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')z� power< it='itertools' trailer< dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > | power< func=%(it_funcs)s trailer< '(' [any] ')' > > �cCs�d}|dd}d|krV|jd krV|d|d}}|j}|j�|j�|jj|�|p^|j}|jt|jdd�|d��dS) N�func��it�ifilterfalse�izip_longest�dot�)�prefix)r r )�valuer �remove�parent�replacer)�selfZnodeZresultsr rrr�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_itertools.py� transforms zFixItertools.transformN) �__name__� __module__�__qualname__Z BM_compatibleZit_funcs�localsZPATTERNZ run_orderrrrrrrs rN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\g2����4fixes/__pycache__/fix_itertools.cpython-36.opt-2.pycnu�[���3 \�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@s*eZdZdZdZde�ZdZdd�ZdS)�FixItertoolsTz7('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')z� power< it='itertools' trailer< dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > | power< func=%(it_funcs)s trailer< '(' [any] ')' > > �cCs�d}|dd}d|krV|jd krV|d|d}}|j}|j�|j�|jj|�|p^|j}|jt|jdd�|d��dS) N�func��it�ifilterfalse�izip_longest�dot�)�prefix)r r )�valuer �remove�parent�replacer)�selfZnodeZresultsr rrr�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_itertools.py� transforms zFixItertools.transformN) �__name__� __module__�__qualname__Z BM_compatibleZit_funcs�localsZPATTERNZ run_orderrrrrrrs rN)�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\^�D:<fixes/__pycache__/fix_itertools_imports.cpython-36.opt-1.pycnu�[���3 \&�@s:dZddlmZddlmZmZmZGdd�dej�ZdS)zA Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) �)� fixer_base)� BlankLine�syms�tokenc@s"eZdZdZde�Zdd�ZdS)�FixItertoolsImportsTzT import_from< 'from' 'itertools' 'import' imports=any > cCsl|d}|jtjks|jr$|g}n|j}x�|ddd�D]z}|jtjkrV|j}|}n|jtjkrfdS|jd}|j}|dkr�d|_|j�q:|dkr:|j �|d d kr�dnd|_q:W|jdd�p�|g}d } x0|D](}| o�|jtj k�r�|j�q�| d N} q�Wx*|�r,|djtj k�r,|j�j��qW|j�p@t|dd��sR|j dk�rh|j} t�}| |_|SdS)N�imports�r�imap�izip�ifilter�ifilterfalse�izip_longest��f�filterfalse�zip_longestT�value)r r r)rr ���)�typerZimport_as_name�childrenr�NAMEr�STAR�removeZchanged�COMMA�pop�getattr�parent�prefixr)�selfZnodeZresultsrrZchild�memberZ name_node�member_nameZremove_comma�p�r"�;/usr/lib64/python3.6/lib2to3/fixes/fix_itertools_imports.py� transformsB zFixItertoolsImports.transformN)�__name__� __module__�__qualname__Z BM_compatible�localsZPATTERNr$r"r"r"r#rs rN) �__doc__Zlib2to3rZlib2to3.fixer_utilrrrZBaseFixrr"r"r"r#�<module>sPK{��\Xz.��.fixes/__pycache__/fix_map.cpython-36.opt-2.pycnu�[���3 \8�@sbddlmZddlmZddlmZmZmZmZm Z ddl mZddl mZGdd�dej�ZdS) �)�token)� fixer_base)�Name�ArgList�Call�ListComp�in_special_context)�python_symbols)�Nodec@s eZdZdZdZdZdd�ZdS)�FixMapTaL map_none=power< 'map' trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > [extra_trailers=trailer*] > | map_lambda=power< 'map' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'map' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > zfuture_builtins.mapcCs�|j|�rdSg}d|kr:x|dD]}|j|j��q$W|jjtjkrv|j|d�|j�}d|_t t d�|g�}�n&d|kr�t|dj�|dj�|dj��}ttj |g|dd �}n�d |kr�|dj�}d|_n�d|k�rj|d}|jtjk�rL|jd jtjk�rL|jd jdjtjk�rL|jd jdjdk�rL|j|d�dSttj t d�|j�g�}d|_t|��rxdSttj t d�t|g�g|�}d|_|j|_|S)NZextra_trailerszYou should use a for loop here��listZ map_lambdaZxp�fp�it)�prefixZmap_none�arg�args���Nonezjcannot convert map(None, ...) with multiple arguments because map() now truncates to the shortest sequence�map)Zshould_skip�appendZclone�parent�type�symsZsimple_stmtZwarningrrrrr ZpowerZtrailerZchildrenZarglistr�NAME�valuerr)�selfZnodeZresultsZtrailers�t�newr�r �-/usr/lib64/python3.6/lib2to3/fixes/fix_map.py� transform@sF zFixMap.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onr"r r r r!rsrN)Zpgen2rrrZ fixer_utilrrrrrZpygramr rZpytreer ZConditionalFixrr r r r!�<module>s PK{��\w2l���4fixes/__pycache__/fix_metaclass.cpython-36.opt-1.pycnu�[���3 \ �@svdZddlmZddlmZddlmZmZmZdd�Z dd�Z d d �Zdd�Zd d�Z dd�ZGdd�dej�ZdS)a�Fixer for __metaclass__ = X -> (metaclass=X) methods. The various forms of classef (inherits nothing, inherits once, inherints many) don't parse the same in the CST so we look at ALL classes for a __metaclass__ and if we find one normalize the inherits to all be an arglist. For one-liner classes ('class X: pass') there is no indent/dedent so we normalize those into having a suite. Moving the __metaclass__ into the classdef can also cause the class body to be empty so there is some special casing for that as well. This fixer also tries very hard to keep original indenting and spacing in all those corner cases. �)� fixer_base)�token)�syms�Node�LeafcCsxxr|jD]h}|jtjkr t|�S|jtjkr|jr|jd}|jtjkr|jr|jd}t|t�r|j dkrdSqWdS)z� we have to check the cls_node without changing it. There are two possibilities: 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') �� __metaclass__TF) �children�typer�suite� has_metaclass�simple_stmt� expr_stmt� isinstancer�value)�parent�node� expr_nodeZ left_side�r�3/usr/lib64/python3.6/lib2to3/fixes/fix_metaclass.pyrs rcCs�x|jD]}|jtjkrdSqWx,t|j�D]\}}|jtjkr,Pq,Wtd��ttjg�}x:|j|dd�r�|j|d}|j |j ��|j�q\W|j |�|}dS)zf one-line classes don't get a suite in the parse tree so we add one to normalize the tree NzNo class suite and no ':'!�)r r rr� enumerater�COLON� ValueErrorr�append_child�clone�remove)�cls_noder�ir� move_noderrr�fixup_parse_tree-s r c Cs�x(t|j�D]\}}|jtjkrPqWdS|j�ttjg�}ttj |g�}x2|j|d�r~|j|}|j |j��|j�qNW|j||�|jdjd}|jdjd} | j |_ dS)z� if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move everything after the semi-colon into its own simple_stmt node Nr)rr r r�SEMIrrrrr rr�insert_child�prefix) rrZ stmt_nodeZsemi_indrZnew_exprZnew_stmtrZ new_leaf1Z old_leaf1rrr�fixup_simple_stmtGs r$cCs*|jr&|jdjtjkr&|jdj�dS)Nr���r%)r r r�NEWLINEr)rrrr�remove_trailing_newline_sr'ccs�x$|jD]}|jtjkrPqWtd��x�tt|j��D]t\}}|jtjkr6|jr6|jd}|jtjkr6|jr6|jd}t |t �r6|jdkr6t|||�t |�|||fVq6WdS)NzNo class suite!rr)r r rrr�listrr rrrrr$r')rrrZsimple_noderZ left_noderrr� find_metasds r)cCs�|jddd�}x|r.|j�}|jtjkrPqWxL|r||j�}t|t�rd|jtjkrd|jr`d|_dS|j |jddd��q2WdS)z� If an INDENT is followed by a thing with a prefix then nuke the prefix Otherwise we get in trouble when removing __metaclass__ at suite start Nr�r%r%) r �popr r�INDENTrr�DEDENTr#�extend)rZkidsrrrr�fixup_indent{sr/c@seZdZdZdZdd�ZdS)�FixMetaclassTz classdef<any*> cCs<t|�sdSt|�d}x"t|�D]\}}}|}|j�q"W|jdj}t|j�dkr�|jdjtjkrt|jd}n(|jdj �} t tj| g�}|jd|�n�t|j�dkr�t tjg�}|jd|�nZt|j�dk�rt tjg�}|jdt tjd��|jd|�|jdt tjd��ntd ��|jdjd} d | _| j}|j�r^|jt tjd��d| _nd | _|jd}d |jd_d |jd_|j|�t|�|j�s�|j�t |d�} || _|j| �|jt tjd��nbt|j�dk�r8|jdjtjk�r8|jdjtjk�r8t |d�} |jd| �|jdt tjd��dS)Nr����r�)�(zUnexpected class definition� metaclass�,� r*r�pass� ���r%r%r%)rr r)rr r �lenr�arglistrrZ set_childr"rr�RPAR�LPARrrr#r�COMMAr/r&r,r-)�selfrZresultsZlast_metaclassrrZstmtZ text_typer>rZmeta_txtZorig_meta_prefixrZ pass_leafrrr� transform�s^ zFixMetaclass.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrCrrrrr0�sr0N)�__doc__r*rZpygramrZ fixer_utilrrrrr r$r'r)r/ZBaseFixr0rrrr�<module>sPK{��\8ڒ�-fixes/__pycache__/fix_ne.cpython-36.opt-1.pycnu�[���3 \;�@s>dZddlmZddlmZddlmZGdd�dej�ZdS)zFixer that turns <> into !=.�)�pytree)�token)� fixer_basec@s"eZdZejZdd�Zdd�ZdS)�FixNecCs |jdkS)Nz<>)�value)�self�node�r �,/usr/lib64/python3.6/lib2to3/fixes/fix_ne.py�matchszFixNe.matchcCstjtjd|jd�}|S)Nz!=)�prefix)rZLeafr�NOTEQUALr)rrZresults�newr r r � transformszFixNe.transformN)�__name__� __module__�__qualname__rr Z_accept_typerrr r r r rsrN)�__doc__�rZpgen2rrZBaseFixrr r r r �<module>sPK{��\��7��-fixes/__pycache__/fix_ne.cpython-36.opt-2.pycnu�[���3 \;�@s:ddlmZddlmZddlmZGdd�dej�ZdS)�)�pytree)�token)� fixer_basec@s"eZdZejZdd�Zdd�ZdS)�FixNecCs |jdkS)Nz<>)�value)�self�node�r �,/usr/lib64/python3.6/lib2to3/fixes/fix_ne.py�matchszFixNe.matchcCstjtjd|jd�}|S)Nz!=)�prefix)rZLeafr�NOTEQUALr)rrZresults�newr r r � transformszFixNe.transformN)�__name__� __module__�__qualname__rr Z_accept_typerrr r r r rsrN)�rZpgen2rrZBaseFixrr r r r �<module>sPK{��\� ��/fixes/__pycache__/fix_next.cpython-36.opt-1.pycnu�[���3 \f�@sndZddlmZddlmZddlmZddlm Z m Z mZdZGdd�dej �Zd d �Zdd�Zd d�ZdS)z.Fixer for it.next() -> next(it), per PEP 3114.�)�token)�python_symbols)� fixer_base)�Name�Call�find_bindingz;Calls to builtin next() possibly shadowed by global bindingcs0eZdZdZdZdZ�fdd�Zdd�Z�ZS)�FixNextTa� power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='next' parameters< '(' NAME ')' > any+ > any* > > | global=global_stmt< 'global' any* 'next' any* > Zprecs>tt|�j||�td|�}|r4|j|t�d|_nd|_dS)N�nextTF)�superr� start_treer�warning�bind_warning� shadowed_next)�selfZtree�filename�n)� __class__��./usr/lib64/python3.6/lib2to3/fixes/fix_next.pyr$s zFixNext.start_treecCs�|jd�}|jd�}|jd�}|rr|jr>|jtd|jd��q�dd�|D�}d|d _|jttd |jd�|��n�|r�td|jd�}|j|�nj|r�t|�r�|d}djdd�|D��j�d kr�|j |t �dS|jtd��nd|kr�|j |t �d|_dS)N�base�attr�name�__next__)�prefixcSsg|]}|j��qSr)Zclone)�.0rrrr� <listcomp>9sz%FixNext.transform.<locals>.<listcomp>��r �headcSsg|]}t|��qSr)�str)rrrrrrEsZ__builtin__�globalT)�getr�replacerrr�is_assign_target�join�striprr )r�nodeZresultsrrrrrrrr� transform.s, zFixNext.transform) �__name__� __module__�__qualname__Z BM_compatibleZPATTERN�orderrr'� __classcell__rr)rrrs rcCsFt|�}|dkrdSx,|jD]"}|jtjkr0dSt||�rdSqWdS)NFT)�find_assign�children�typer�EQUAL� is_subtree)r&ZassignZchildrrrr#Qs r#cCs4|jtjkr|S|jtjks&|jdkr*dSt|j�S)N)r/�symsZ expr_stmtZsimple_stmt�parentr-)r&rrrr-]s r-cs$|�krdSt�fdd�|jD��S)NTc3s|]}t|��VqdS)N)r1)r�c)r&rr� <genexpr>gszis_subtree.<locals>.<genexpr>)�anyr.)�rootr&r)r&rr1dsr1N)�__doc__Zpgen2rZpygramrr2rrZ fixer_utilrrrr ZBaseFixrr#r-r1rrrr�<module>s@PK{��\�#���6fixes/__pycache__/fix_numliterals.cpython-36.opt-1.pycnu�[���3 \�@s>dZddlmZddlmZddlmZGdd�dej�ZdS)z-Fixer that turns 1L into 1, 0755 into 0o755. �)�token)� fixer_base)�Numberc@s"eZdZejZdd�Zdd�ZdS)�FixNumliteralscCs|jjd�p|jddkS)N�0��Ll���)�value� startswith)�self�node�r�5/usr/lib64/python3.6/lib2to3/fixes/fix_numliterals.py�matchszFixNumliterals.matchcCs`|j}|ddkr |dd�}n2|jd�rR|j�rRtt|��dkrRd|dd�}t||jd�S)NrrrZ0o)�prefixr r )r r�isdigit�len�setrr)rr Zresults�valrrr� transforms"zFixNumliterals.transformN)�__name__� __module__�__qualname__r�NUMBERZ_accept_typerrrrrrrsrN) �__doc__Zpgen2r�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\�T��6fixes/__pycache__/fix_numliterals.cpython-36.opt-2.pycnu�[���3 \�@s:ddlmZddlmZddlmZGdd�dej�ZdS)�)�token)� fixer_base)�Numberc@s"eZdZejZdd�Zdd�ZdS)�FixNumliteralscCs|jjd�p|jddkS)N�0��Ll���)�value� startswith)�self�node�r�5/usr/lib64/python3.6/lib2to3/fixes/fix_numliterals.py�matchszFixNumliterals.matchcCs`|j}|ddkr |dd�}n2|jd�rR|j�rRtt|��dkrRd|dd�}t||jd�S)NrrrZ0o)�prefixr r )r r�isdigit�len�setrr)rr Zresults�valrrr� transforms"zFixNumliterals.transformN)�__name__� __module__�__qualname__r�NUMBERZ_accept_typerrrrrrrsrN)Zpgen2r�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\�����3fixes/__pycache__/fix_operator.cpython-36.opt-2.pycnu�[���3 \� �@sJddlZddlmZddlmZmZmZmZdd�ZGdd�dej �Z dS)�N)� fixer_base)�Call�Name�String�touch_importcs�fdd�}|S)Ncs �|_|S)N)� invocation)�f)�s��2/usr/lib64/python3.6/lib2to3/fixes/fix_operator.py�decszinvocation.<locals>.decr )r rr )r rrsrc@s�eZdZdZdZdZdZdeeed�Zdd�Z e d �d d��Ze d�d d��Ze d�dd��Z e d�dd��Ze d�dd��Ze d�dd��Ze d�dd��Zdd�Zd d!�Zd"d#�Zd$S)%�FixOperatorTZprez� method=('isCallable'|'sequenceIncludes' |'isSequenceType'|'isMappingType'|'isNumberType' |'repeat'|'irepeat') z'(' obj=any ')'z� power< module='operator' trailer< '.' %(methods)s > trailer< %(obj)s > > | power< %(methods)s trailer< %(obj)s > > )�methods�objcCs"|j||�}|dk r|||�SdS)N)� _check_method)�self�node�results�methodr r r� transform+szFixOperator.transformzoperator.contains(%s)cCs|j||d�S)N�contains)�_handle_rename)rrrr r r�_sequenceIncludes0szFixOperator._sequenceIncludeszhasattr(%s, '__call__')cCs2|d}|j�td�td�g}ttd�||jd�S)Nrz, z '__call__'�hasattr)�prefix)�clonerrrr)rrrr�argsr r r�_isCallable4szFixOperator._isCallablezoperator.mul(%s)cCs|j||d�S)N�mul)r)rrrr r r�_repeat:szFixOperator._repeatzoperator.imul(%s)cCs|j||d�S)N�imul)r)rrrr r r�_irepeat>szFixOperator._irepeatz$isinstance(%s, collections.Sequence)cCs|j||dd�S)N�collections�Sequence)�_handle_type2abc)rrrr r r�_isSequenceTypeBszFixOperator._isSequenceTypez#isinstance(%s, collections.Mapping)cCs|j||dd�S)Nr"�Mapping)r$)rrrr r r�_isMappingTypeFszFixOperator._isMappingTypezisinstance(%s, numbers.Number)cCs|j||dd�S)NZnumbers�Number)r$)rrrr r r� _isNumberTypeJszFixOperator._isNumberTypecCs|dd}||_|j�dS)Nrr)�valueZchanged)rrr�namerr r rrNszFixOperator._handle_renamecCsFtd||�|d}|j�tddj||g��g}ttd�||jd�S)Nrz, �.� isinstance)r)rrr�joinrrr)rrr�module�abcrrr r rr$SszFixOperator._handle_type2abccCs\t|d|ddj�}t|tj�rXd|kr0|St|d�f}|j|}|j|d|�dS)N�_rrr/rzYou should use '%s' here.)�getattrr*r-r"�Callable�strrZwarning)rrrr�subZinvocation_strr r rrYs zFixOperator._check_methodN)�__name__� __module__�__qualname__Z BM_compatible�orderrr�dictZPATTERNrrrrrr!r%r'r)rr$rr r r rr s r )r"Zlib2to3rZlib2to3.fixer_utilrrrrrZBaseFixr r r r r�<module>sPK{��\���YY0fixes/__pycache__/fix_paren.cpython-36.opt-1.pycnu�[���3 \��@s6dZddlmZddlmZmZGdd�dej�ZdS)zuFixer that addes parentheses where they are required This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.�)� fixer_base)�LParen�RParenc@seZdZdZdZdd�ZdS)�FixParenTa atom< ('[' | '(') (listmaker< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > > | testlist_gexp< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > >) (']' | ')') > cCs8|d}t�}|j|_d|_|jd|�|jt��dS)N�target��)r�prefixZinsert_childZappend_childr)�selfZnodeZresultsrZlparen�r�//usr/lib64/python3.6/lib2to3/fixes/fix_paren.py� transform%szFixParen.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrrsrN)�__doc__rrZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\Cy�F��0fixes/__pycache__/fix_paren.cpython-36.opt-2.pycnu�[���3 \��@s2ddlmZddlmZmZGdd�dej�ZdS)�)� fixer_base)�LParen�RParenc@seZdZdZdZdd�ZdS)�FixParenTa atom< ('[' | '(') (listmaker< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > > | testlist_gexp< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > >) (']' | ')') > cCs8|d}t�}|j|_d|_|jd|�|jt��dS)N�target��)r�prefixZinsert_childZappend_childr)�selfZnodeZresultsrZlparen�r�//usr/lib64/python3.6/lib2to3/fixes/fix_paren.py� transform%szFixParen.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrrsrN)rrZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\\<�ֶ�0fixes/__pycache__/fix_print.cpython-36.opt-1.pycnu�[���3 \�@sldZddlmZddlmZddlmZddlmZddlmZm Z m Z mZejd�Z Gdd �d ej�Zd S)aFixer for print. Change: 'print' into 'print()' 'print ...' into 'print(...)' 'print ... ,' into 'print(..., end=" ")' 'print >>x, ...' into 'print(..., file=x)' No changes are applied if print_function is imported from __future__ �)�patcomp)�pytree)�token)� fixer_base)�Name�Call�Comma�Stringz"atom< '(' [atom|STRING|NAME] ')' >c@s$eZdZdZdZdd�Zdd�ZdS)�FixPrintTzP simple_stmt< any* bare='print' any* > | print_stmt c Cs`|jd�}|r,|jttd�g|jd��dS|jdd�}t|�dkrXtj|d�rXdSd}}}|r�|dt �kr�|dd�}d}|r�|dt jtj d�kr�|dj�}|dd�}d d �|D�}|r�d|d_|dk s�|dk s�|dk �rF|dk �r|j|dtt|���|dk �r.|j|d tt|���|dk �rF|j|d|�ttd�|�} |j| _| S)NZbare�print)�prefix��� z>>�cSsg|]}|j��qS�)�clone)�.0�argrr�//usr/lib64/python3.6/lib2to3/fixes/fix_print.py� <listcomp>?sz&FixPrint.transform.<locals>.<listcomp>��sep�end�file���r)�get�replacerrrZchildren�len�parend_expr�matchrr�Leafr� RIGHTSHIFTr� add_kwargr �repr) �selfZnodeZresultsZ bare_print�argsrrrZl_argsZn_stmtrrr� transform%s8 zFixPrint.transformcCsNd|_tj|jjt|�tjtjd�|f�}|r@|j t ��d|_|j |�dS)Nr�=r)rrZNodeZsymsZargumentrr!r�EQUAL�appendr)r%Zl_nodesZs_kwdZn_exprZ n_argumentrrrr#Ms zFixPrint.add_kwargN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr'r#rrrrr s(r N)�__doc__rrrZpgen2rrZ fixer_utilrrrr Zcompile_patternrZBaseFixr rrrr�<module>sPK{��\��4fixes/__pycache__/fix_raw_input.cpython-36.opt-1.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z2Fixer that changes raw_input(...) into input(...).�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixRawInputTzU power< name='raw_input' trailer< '(' [any] ')' > any* > cCs |d}|jtd|jd��dS)N�name�input)�prefix)�replacerr)�selfZnodeZresultsr�r �3/usr/lib64/python3.6/lib2to3/fixes/fix_raw_input.py� transformszFixRawInput.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr r r rrsrN)�__doc__�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\�^\���4fixes/__pycache__/fix_raw_input.cpython-36.opt-2.pycnu�[���3 \��@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixRawInputTzU power< name='raw_input' trailer< '(' [any] ')' > any* > cCs |d}|jtd|jd��dS)N�name�input)�prefix)�replacerr)�selfZnodeZresultsr�r �3/usr/lib64/python3.6/lib2to3/fixes/fix_raw_input.py� transformszFixRawInput.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrr r r rrsrN)�rZ fixer_utilrZBaseFixrr r r r�<module>sPK{��\� ��UU1fixes/__pycache__/fix_reduce.cpython-36.opt-1.pycnu�[���3 \E�@s2dZddlmZddlmZGdd�dej�ZdS)zqFixer for reduce(). Makes sure reduce() is imported from the functools module if reduce is used in that module. �)� fixer_base)�touch_importc@s eZdZdZdZdZdd�ZdS)� FixReduceTZpreai power< 'reduce' trailer< '(' arglist< ( (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) | (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) ) > ')' > > cCstdd|�dS)N� functools�reduce)r)�selfZnodeZresults�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_reduce.py� transform"szFixReduce.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNr rrrr rsrN)�__doc__Zlib2to3rZlib2to3.fixer_utilrZBaseFixrrrrr �<module>sPK{��\� `��1fixes/__pycache__/fix_reduce.cpython-36.opt-2.pycnu�[���3 \E�@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�touch_importc@s eZdZdZdZdZdd�ZdS)� FixReduceTZpreai power< 'reduce' trailer< '(' arglist< ( (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) | (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) ) > ')' > > cCstdd|�dS)N� functools�reduce)r)�selfZnodeZresults�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_reduce.py� transform"szFixReduce.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNr rrrr rsrN)Zlib2to3rZlib2to3.fixer_utilrZBaseFixrrrrr �<module> sPK{��\t�u551fixes/__pycache__/fix_reload.cpython-36.opt-2.pycnu�[���3 \��@s2ddlmZddlmZmZGdd�dej�ZdS)�)� fixer_base)� ImportAndCall�touch_importc@s eZdZdZdZdZdd�ZdS)� FixReloadTZprez� power< 'reload' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > cCsd|rD|d}|rD|j|jjkr"dS|j|jjkrD|jdjdkrDdSd}t|||�}tdd|�|S)N�obj�z**�imp�reload)rr )�typeZsymsZ star_exprZargumentZchildren�valuerr)�selfZnodeZresultsr�names�new�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_reload.py� transformszFixReload.transformN)�__name__� __module__�__qualname__Z BM_compatible�orderZPATTERNrrrrrr s rN)�rZ fixer_utilrrZBaseFixrrrrr�<module>sPK{��\q�XF��2fixes/__pycache__/fix_renames.cpython-36.opt-1.pycnu�[���3 \��@sVdZddlmZddlmZmZdddiiZiZdd�Zd d �Z Gdd�dej �Zd S)z?Fix incompatible renames Fixes: * sys.maxint -> sys.maxsize �)� fixer_base)�Name� attr_chain�sysZmaxint�maxsizecCsddjtt|��dS)N�(�|�))�join�map�repr)�members�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_renames.py� alternatessrccsbx\ttj��D]L\}}xBt|j��D]2\}}|t||f<d|||fVd||fVq$WqWdS)Nz� import_from< 'from' module_name=%r 'import' ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > z^ power< module_name=%r trailer< '.' attr_name=%r > any* > )�list�MAPPING�items�LOOKUP)�module�replaceZold_attr�new_attrrrr� build_patterns rcs8eZdZdZdje��ZdZ�fdd�Zdd�Z �Z S)� FixRenamesTrZprecs@tt|�j��|�}|r<t�fdd�t|d�D��r8dS|SdS)Nc3s|]}�|�VqdS)Nr)�.0�obj)�matchrr� <genexpr>5sz#FixRenames.match.<locals>.<genexpr>�parentF)�superrr�anyr)�self�node�results)� __class__)rrr1szFixRenames.matchcCsD|jd�}|jd�}|r@|r@t|j|jf}|jt||jd��dS)NZmodule_name� attr_name)�prefix)�getr�valuerrr&)r!r"r#Zmod_namer%rrrr� transform>s zFixRenames.transform)�__name__� __module__�__qualname__Z BM_compatibler rZPATTERN�orderrr)� __classcell__rr)r$rr*s rN)�__doc__�rZ fixer_utilrrrrrrZBaseFixrrrrr�<module>sPK{��\�?�dd2fixes/__pycache__/fix_renames.cpython-36.opt-2.pycnu�[���3 \��@sRddlmZddlmZmZdddiiZiZdd�Zdd �ZGd d�dej �Z dS) �)� fixer_base)�Name� attr_chain�sysZmaxint�maxsizecCsddjtt|��dS)N�(�|�))�join�map�repr)�members�r�1/usr/lib64/python3.6/lib2to3/fixes/fix_renames.py� alternatessrccsbx\ttj��D]L\}}xBt|j��D]2\}}|t||f<d|||fVd||fVq$WqWdS)Nz� import_from< 'from' module_name=%r 'import' ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > z^ power< module_name=%r trailer< '.' attr_name=%r > any* > )�list�MAPPING�items�LOOKUP)�module�replaceZold_attr�new_attrrrr� build_patterns rcs8eZdZdZdje��ZdZ�fdd�Zdd�Z �Z S)� FixRenamesTrZprecs@tt|�j��|�}|r<t�fdd�t|d�D��r8dS|SdS)Nc3s|]}�|�VqdS)Nr)�.0�obj)�matchrr� <genexpr>5sz#FixRenames.match.<locals>.<genexpr>�parentF)�superrr�anyr)�self�node�results)� __class__)rrr1szFixRenames.matchcCsD|jd�}|jd�}|r@|r@t|j|jf}|jt||jd��dS)NZmodule_name� attr_name)�prefix)�getr�valuerrr&)r!r"r#Zmod_namer%rrrr� transform>s zFixRenames.transform)�__name__� __module__�__qualname__Z BM_compatibler rZPATTERN�orderrr)� __classcell__rr)r$rr*s rN)�rZ fixer_utilrrrrrrZBaseFixrrrrr�<module> sPK{��\a�;888/fixes/__pycache__/fix_repr.cpython-36.opt-1.pycnu�[���3 \e�@s:dZddlmZddlmZmZmZGdd�dej�ZdS)z/Fixer that transforms `xyzzy` into repr(xyzzy).�)� fixer_base)�Call�Name�parenthesizec@seZdZdZdZdd�ZdS)�FixReprTz7 atom < '`' expr=any '`' > cCs8|dj�}|j|jjkr"t|�}ttd�|g|jd�S)N�expr�repr)�prefix)Zclone�typeZsymsZ testlist1rrrr )�selfZnodeZresultsr�r�./usr/lib64/python3.6/lib2to3/fixes/fix_repr.py� transformszFixRepr.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrr rsrN) �__doc__�rZ fixer_utilrrrZBaseFixrrrrr �<module>sPK{��\�/Za��/fixes/__pycache__/fix_repr.cpython-36.opt-2.pycnu�[���3 \e�@s6ddlmZddlmZmZmZGdd�dej�ZdS)�)� fixer_base)�Call�Name�parenthesizec@seZdZdZdZdd�ZdS)�FixReprTz7 atom < '`' expr=any '`' > cCs8|dj�}|j|jjkr"t|�}ttd�|g|jd�S)N�expr�repr)�prefix)Zclone�typeZsymsZ testlist1rrrr )�selfZnodeZresultsr�r�./usr/lib64/python3.6/lib2to3/fixes/fix_repr.py� transformszFixRepr.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrr rsrN)�rZ fixer_utilrrrZBaseFixrrrrr �<module>sPK{��\6�H�776fixes/__pycache__/fix_set_literal.cpython-36.opt-2.pycnu�[���3 \��@s6ddlmZmZddlmZmZGdd�dej�ZdS)�)� fixer_base�pytree)�token�symsc@s eZdZdZdZdZdd�ZdS)� FixSetLiteralTajpower< 'set' trailer< '(' (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > | single=any) ']' > | atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > ) ')' > > c Cs�|jd�}|r2tjtj|j�g�}|j|�|}n|d}tjtj d�g}|j dd�|jD��|jtjtj d��|jj|d _tjtj|�}|j|_t|j�dkr�|jd }|j�|j|jd_|S)N�single�items�{css|]}|j�VqdS)N)�clone)�.0�n�r �5/usr/lib64/python3.6/lib2to3/fixes/fix_set_literal.py� <genexpr>'sz*FixSetLiteral.transform.<locals>.<genexpr>�}������r)�getrZNoderZ listmakerr �replaceZLeafr�LBRACE�extendZchildren�append�RBRACEZnext_sibling�prefixZdictsetmaker�len�remove) �selfZnodeZresultsrZfaker�literalZmakerrr r r� transforms" zFixSetLiteral.transformN)�__name__� __module__�__qualname__Z BM_compatibleZexplicitZPATTERNr r r r rrs rN)Zlib2to3rrZlib2to3.fixer_utilrrZBaseFixrr r r r�<module>sPK{��\Ѐ���8fixes/__pycache__/fix_standarderror.cpython-36.opt-1.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)z%Fixer for StandardError -> Exception.�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixStandarderrorTz- 'StandardError' cCstd|jd�S)N� Exception)�prefix)rr)�selfZnodeZresults�r�7/usr/lib64/python3.6/lib2to3/fixes/fix_standarderror.py� transformszFixStandarderror.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\�����8fixes/__pycache__/fix_standarderror.cpython-36.opt-2.pycnu�[���3 \��@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)�FixStandarderrorTz- 'StandardError' cCstd|jd�S)N� Exception)�prefix)rr)�selfZnodeZresults�r�7/usr/lib64/python3.6/lib2to3/fixes/fix_standarderror.py� transformszFixStandarderror.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrr rsrN)�rZ fixer_utilrZBaseFixrrrrr �<module>sPK{��\N���dd2fixes/__pycache__/fix_sys_exc.cpython-36.opt-1.pycnu�[���3 \ �@sJdZddlmZddlmZmZmZmZmZm Z m Z Gdd�dej�ZdS)z�Fixer for sys.exc_{type, value, traceback} sys.exc_type -> sys.exc_info()[0] sys.exc_value -> sys.exc_info()[1] sys.exc_traceback -> sys.exc_info()[2] �)� fixer_base)�Attr�Call�Name�Number� Subscript�Node�symsc@s:eZdZdddgZdZddjdd�eD��Zd d �ZdS)� FixSysExc�exc_type� exc_value� exc_tracebackTzN power< 'sys' trailer< dot='.' attribute=(%s) > > �|ccs|]}d|VqdS)z'%s'N�)�.0�err�1/usr/lib64/python3.6/lib2to3/fixes/fix_sys_exc.py� <genexpr>szFixSysExc.<genexpr>cCst|dd}t|jj|j��}ttd�|jd�}ttd�|�}|dj|djd_|j t |��ttj ||jd�S)NZ attribute��exc_info)�prefix�sys�dot�)rr�index�valuerrrrZchildren�appendrrr Zpower)�selfZnodeZresultsZsys_attrrZcall�attrrrr� transformszFixSysExc.transformN)�__name__� __module__�__qualname__rZ BM_compatible�joinZPATTERNrrrrrr s r N) �__doc__�rZ fixer_utilrrrrrrr ZBaseFixr rrrr�<module>s$PK{��\��C���2fixes/__pycache__/fix_sys_exc.cpython-36.opt-2.pycnu�[���3 \ �@sFddlmZddlmZmZmZmZmZmZm Z Gdd�dej �ZdS)�)� fixer_base)�Attr�Call�Name�Number� Subscript�Node�symsc@s:eZdZdddgZdZddjdd�eD��Zd d �ZdS)� FixSysExc�exc_type� exc_value� exc_tracebackTzN power< 'sys' trailer< dot='.' attribute=(%s) > > �|ccs|]}d|VqdS)z'%s'N�)�.0�err�1/usr/lib64/python3.6/lib2to3/fixes/fix_sys_exc.py� <genexpr>szFixSysExc.<genexpr>cCst|dd}t|jj|j��}ttd�|jd�}ttd�|�}|dj|djd_|j t |��ttj ||jd�S)NZ attribute��exc_info)�prefix�sys�dot�)rr�index�valuerrrrZchildren�appendrrr Zpower)�selfZnodeZresultsZsys_attrrZcall�attrrrr� transformszFixSysExc.transformN)�__name__� __module__�__qualname__rZ BM_compatible�joinZPATTERNrrrrrr s r N)�rZ fixer_utilrrrrrrr ZBaseFixr rrrr�<module>s$PK{��\��0fixes/__pycache__/fix_throw.cpython-36.opt-2.pycnu�[���3 \.�@sVddlmZddlmZddlmZddlmZmZmZm Z m Z Gdd�dej�ZdS)�)�pytree)�token)� fixer_base)�Name�Call�ArgList�Attr�is_tuplec@seZdZdZdZdd�ZdS)�FixThrowTz� power< any trailer< '.' 'throw' > trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > > | power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > cCs�|j}|dj�}|jtjkr.|j|d�dS|jd�}|dkrDdS|j�}t|�rndd�|jdd�D�}nd|_ |g}|d}d |kr�|d j�}d|_ t ||�} t| td ��t |g�g} |jtj|j| ��n|jt ||��dS)N�excz+Python 3 does not support string exceptions�valcSsg|]}|j��qS�)�clone)�.0�cr r �//usr/lib64/python3.6/lib2to3/fixes/fix_throw.py� <listcomp>)sz&FixThrow.transform.<locals>.<listcomp>���args�tb�with_traceback���)�symsr�typer�STRINGZcannot_convert�getr Zchildren�prefixrrrr�replacerZNodeZpower)�selfZnodeZresultsrrrrZ throw_argsr�eZwith_tbr r r� transforms* zFixThrow.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr!r r r rr sr N) rrZpgen2rrZ fixer_utilrrrrr ZBaseFixr r r r r�<module>sPK{��\T�|���7fixes/__pycache__/fix_tuple_params.cpython-36.opt-1.pycnu�[���3 \��@s�dZddlmZddlmZddlmZddlmZmZm Z m Z mZmZdd�Z Gdd �d ej�Zd d�Zdd �Zgdfdd�Zdd�ZdS)a:Fixer for function definitions with tuple parameters. def func(((a, b), c), d): ... -> def func(x, d): ((a, b), c) = x ... It will also support lambdas: lambda (x, y): x + y -> lambda t: t[0] + t[1] # The parens are a syntax error in Python 3 lambda (x): x + y -> lambda x: x + y �)�pytree)�token)� fixer_base)�Assign�Name�Newline�Number� Subscript�symscCst|tj�o|jdjtjkS)N�)� isinstancer�Node�children�typer�STRING)�stmt�r�6/usr/lib64/python3.6/lib2to3/fixes/fix_tuple_params.py�is_docstringsrc@s(eZdZdZdZdZdd�Zdd�ZdS) �FixTupleParams�Ta funcdef< 'def' any parameters< '(' args=any ')' > ['->' any] ':' suite=any+ > | lambda= lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > ':' body=any > cs�d|kr�j||�Sg�|d}|d}|djdjtjkrZd}|djdj}t��nd}d}tjtjd��d���fd d� }|jt j kr�||�n@|jt jkr�x2t|j�D]$\}} | jt j kr�|| |dkd�q�W�s�dSx�D]} |d| _ q�W|}|dk�rd �d_n&t|dj|��r8|�d_|d}x�D]} |d| _ �q>W�|dj||�<x4t|d|t��d�D]}||dj|_�q�W|dj�dS)N�lambda�suite�argsr�rz; �Fcs\t�j��}|j�}d|_t||j��}|r2d|_|j|��jtjt j |�j�g��dS)Nr� )r�new_name�clone�prefixr�replace�appendrr r Zsimple_stmt)Z tuple_arg� add_prefix�n�argr)�end� new_lines�selfrr�handle_tupleCs z.FixTupleParams.transform.<locals>.handle_tuple)r"r)F)�transform_lambdarrr�INDENT�valuerrZLeafr ZtfpdefZ typedargslist� enumerate�parentrr�range�lenZchanged)r'�node�resultsrr�start�indentr(�ir$�line�afterr)r%r&r'r� transform.sF zFixTupleParams.transformc Cs�|d}|d}t|d�}|jtjkrD|j�}d|_|j|�dSt|�}t|�}|j t |��}t|dd�} |j| j��xd|j�D]X} | jtjkr�| j |kr�dd�|| j D�}tjtj| j�g|�}| j|_| j|�q�WdS)Nr�body�innerr)rcSsg|]}|j��qSr)r)�.0�crrr� <listcomp>�sz3FixTupleParams.transform_lambda.<locals>.<listcomp>)� simplify_argsrr�NAMErrr �find_params�map_to_indexr� tuple_namerZ post_orderr+rr r Zpower) r'r0r1rr8r9ZparamsZto_indexZtup_nameZ new_paramr#Z subscripts�newrrrr)ns( zFixTupleParams.transform_lambdaN)�__name__� __module__�__qualname__Z run_orderZ BM_compatibleZPATTERNr7r)rrrrrs @rcCsR|jtjtjfkr|S|jtjkrBx|jtjkr<|jd}q$W|Std|��dS)NrzReceived unexpected node %s)rr Zvfplistrr>�vfpdefr�RuntimeError)r0rrrr=�sr=cCs<|jtjkrt|jd�S|jtjkr,|jSdd�|jD�S)NrcSs g|]}|jtjkrt|��qSr)rr�COMMAr?)r:r;rrrr<�szfind_params.<locals>.<listcomp>)rr rFr?rrr>r+)r0rrrr?�s r?NcCs^|dkri}xLt|�D]@\}}ttt|���g}t|t�rJt|||d�q||||<qW|S)N)�d)r,r r�strr�listr@)� param_listrrIr4�objZtrailerrrrr@�s r@cCs@g}x0|D](}t|t�r(|jt|��q |j|�q Wdj|�S)N�_)rrKr!rA�join)rL�lrMrrrrA�s rA)�__doc__rrZpgen2rrZ fixer_utilrrrrr r rZBaseFixrr=r?r@rArrrr�<module>s lPK{��\������7fixes/__pycache__/fix_tuple_params.cpython-36.opt-2.pycnu�[���3 \��@s�ddlmZddlmZddlmZddlmZmZmZm Z m Z mZdd�ZGdd�dej �Zd d �Zdd�Zgd fdd�Zdd�Zd S)�)�pytree)�token)� fixer_base)�Assign�Name�Newline�Number� Subscript�symscCst|tj�o|jdjtjkS)N�)� isinstancer�Node�children�typer�STRING)�stmt�r�6/usr/lib64/python3.6/lib2to3/fixes/fix_tuple_params.py�is_docstringsrc@s(eZdZdZdZdZdd�Zdd�ZdS) �FixTupleParams�Ta funcdef< 'def' any parameters< '(' args=any ')' > ['->' any] ':' suite=any+ > | lambda= lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > ':' body=any > cs�d|kr�j||�Sg�|d}|d}|djdjtjkrZd}|djdj}t��nd}d}tjtjd��d���fd d� }|jt j kr�||�n@|jt jkr�x2t|j�D]$\}} | jt j kr�|| |dkd�q�W�s�dSx�D]} |d| _ q�W|}|dk�rd �d_n&t|dj|��r8|�d_|d}x�D]} |d| _ �q>W�|dj||�<x4t|d|t��d�D]}||dj|_�q�W|dj�dS)N�lambda�suite�argsr�rz; �Fcs\t�j��}|j�}d|_t||j��}|r2d|_|j|��jtjt j |�j�g��dS)Nr� )r�new_name�clone�prefixr�replace�appendrr r Zsimple_stmt)Z tuple_arg� add_prefix�n�argr)�end� new_lines�selfrr�handle_tupleCs z.FixTupleParams.transform.<locals>.handle_tuple)r"r)F)�transform_lambdarrr�INDENT�valuerrZLeafr ZtfpdefZ typedargslist� enumerate�parentrr�range�lenZchanged)r'�node�resultsrr�start�indentr(�ir$�line�afterr)r%r&r'r� transform.sF zFixTupleParams.transformc Cs�|d}|d}t|d�}|jtjkrD|j�}d|_|j|�dSt|�}t|�}|j t |��}t|dd�} |j| j��xd|j�D]X} | jtjkr�| j |kr�dd�|| j D�}tjtj| j�g|�}| j|_| j|�q�WdS)Nr�body�innerr)rcSsg|]}|j��qSr)r)�.0�crrr� <listcomp>�sz3FixTupleParams.transform_lambda.<locals>.<listcomp>)� simplify_argsrr�NAMErrr �find_params�map_to_indexr� tuple_namerZ post_orderr+rr r Zpower) r'r0r1rr8r9ZparamsZto_indexZtup_nameZ new_paramr#Z subscripts�newrrrr)ns( zFixTupleParams.transform_lambdaN)�__name__� __module__�__qualname__Z run_orderZ BM_compatibleZPATTERNr7r)rrrrrs @rcCsR|jtjtjfkr|S|jtjkrBx|jtjkr<|jd}q$W|Std|��dS)NrzReceived unexpected node %s)rr Zvfplistrr>�vfpdefr�RuntimeError)r0rrrr=�sr=cCs<|jtjkrt|jd�S|jtjkr,|jSdd�|jD�S)NrcSs g|]}|jtjkrt|��qSr)rr�COMMAr?)r:r;rrrr<�szfind_params.<locals>.<listcomp>)rr rFr?rrr>r+)r0rrrr?�s r?NcCs^|dkri}xLt|�D]@\}}ttt|���g}t|t�rJt|||d�q||||<qW|S)N)�d)r,r r�strr�listr@)� param_listrrIr4�objZtrailerrrrr@�s r@cCs@g}x0|D](}t|t�r(|jt|��q |j|�q Wdj|�S)N�_)rrKr!rA�join)rL�lrMrrrrA�s rA)rrZpgen2rrZ fixer_utilrrrrr r rZBaseFixrr=r?r@rArrrr�<module>s lPK{��\��k/0fixes/__pycache__/fix_types.cpython-36.opt-1.pycnu�[���3 \��@spdZddlmZddlmZddddddd d dddd dddddddddd�Zdd�eD�ZGdd�dej�ZdS)a�Fixer for removing uses of the types module. These work for only the known names in the types module. The forms above can include types. or not. ie, It is assumed the module is imported either as: import types from types import ... # either * or specific types The import statements are not modified. There should be another fixer that handles at least the following constants: type([]) -> list type(()) -> tuple type('') -> str �)� fixer_base)�Name�bool� memoryview�type�complex�dictztype(Ellipsis)�float�int�list�objectz type(None)ztype(NotImplemented)�slice�bytesz(str,)�tuple�str�range)ZBooleanTypeZ BufferTypeZ ClassTypeZComplexTypeZDictTypeZDictionaryTypeZEllipsisTypeZ FloatTypeZIntTypeZListTypeZLongTypeZ ObjectTypeZNoneTypeZNotImplementedTypeZ SliceTypeZ StringTypeZStringTypesZ TupleTypeZTypeTypeZUnicodeTypeZ XRangeTypecCsg|]}d|�qS)z)power< 'types' trailer< '.' name='%s' > >�)�.0�trr�//usr/lib64/python3.6/lib2to3/fixes/fix_types.py� <listcomp>3src@s"eZdZdZdje�Zdd�ZdS)�FixTypesT�|cCs&tj|dj�}|r"t||jd�SdS)N�name)�prefix)� _TYPE_MAPPING�get�valuerr)�selfZnodeZresultsZ new_valuerrr� transform9szFixTypes.transformN)�__name__� __module__�__qualname__Z BM_compatible�join�_patsZPATTERNrrrrrr5s rN) �__doc__�rZ fixer_utilrrr$ZBaseFixrrrrr�<module>s2PK{��\��~660fixes/__pycache__/fix_types.cpython-36.opt-2.pycnu�[���3 \��@slddlmZddlmZdddddddd d dd dd ddddddddd�Zdd�eD�ZGdd�dej�ZdS)�)� fixer_base)�Name�bool� memoryview�type�complex�dictztype(Ellipsis)�float�int�list�objectz type(None)ztype(NotImplemented)�slice�bytesz(str,)�tuple�str�range)ZBooleanTypeZ BufferTypeZ ClassTypeZComplexTypeZDictTypeZDictionaryTypeZEllipsisTypeZ FloatTypeZIntTypeZListTypeZLongTypeZ ObjectTypeZNoneTypeZNotImplementedTypeZ SliceTypeZ StringTypeZStringTypesZ TupleTypeZTypeTypeZUnicodeTypeZ XRangeTypecCsg|]}d|�qS)z)power< 'types' trailer< '.' name='%s' > >�)�.0�trr�//usr/lib64/python3.6/lib2to3/fixes/fix_types.py� <listcomp>3src@s"eZdZdZdje�Zdd�ZdS)�FixTypesT�|cCs&tj|dj�}|r"t||jd�SdS)N�name)�prefix)� _TYPE_MAPPING�get�valuerr)�selfZnodeZresultsZ new_valuerrr� transform9szFixTypes.transformN)�__name__� __module__�__qualname__Z BM_compatible�join�_patsZPATTERNrrrrrr5s rN)�rZ fixer_utilrrr$ZBaseFixrrrrr�<module>s0PK{��\+��??2fixes/__pycache__/fix_unicode.cpython-36.opt-2.pycnu�[���3 \��@s8ddlmZddlmZddd�ZGdd�dej�ZdS) �)�token)� fixer_base�chr�str)ZunichrZunicodecs,eZdZdZdZ�fdd�Zdd�Z�ZS)� FixUnicodeTzSTRING | 'unicode' | 'unichr'cs"tt|�j||�d|jk|_dS)N�unicode_literals)�superr� start_treeZfuture_featuresr)�selfZtree�filename)� __class__��1/usr/lib64/python3.6/lib2to3/fixes/fix_unicode.pyr szFixUnicode.start_treecCs�|jtjkr$|j�}t|j|_|S|jtjkr�|j}|jrl|ddkrld|krldjdd�|j d�D��}|ddkr�|dd�}||jkr�|S|j�}||_|SdS) N�z'"�\z\\cSs g|]}|jdd�jdd��qS)z\uz\\uz\Uz\\U)�replace)�.0�vr r r� <listcomp>!sz(FixUnicode.transform.<locals>.<listcomp>ZuU�) �typer�NAMEZclone�_mapping�value�STRINGr�join�split)r ZnodeZresults�new�valr r r� transforms" zFixUnicode.transform)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr r� __classcell__r r )rrrsrN)Zpgen2r�rrZBaseFixrr r r r�<module>s PK{��\�� �OO1fixes/__pycache__/fix_urllib.cpython-36.opt-1.pycnu�[���3 \� �@s�dZddlmZmZddlmZmZmZmZm Z m Z mZdddddd d ddgfd dddddddddddddddgfddgfgdd dd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5gfdd6d7gfgd8�Zed9j ed:d;�d<d=�ZGd>d?�d?e�Zd@S)Az�Fix changes imports of urllib which are now incompatible. This is rather similar to fix_imports, but because of the more complex nature of the fixing for urllib, it has its own fixer. �)� alternates� FixImports)�Name�Comma� FromImport�Newline�find_indentation�Node�symszurllib.requestZ URLopenerZFancyURLopenerZurlretrieveZ _urlopenerZurlopenZ urlcleanupZpathname2urlZurl2pathnamezurllib.parseZquoteZ quote_plusZunquoteZunquote_plusZ urlencodeZ splitattrZ splithostZ splitnportZsplitpasswdZ splitportZ splitqueryZsplittagZ splittypeZ splituserZ splitvaluezurllib.errorZContentTooShortErrorZinstall_openerZbuild_openerZRequestZOpenerDirectorZBaseHandlerZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ FTPHandlerZCacheFTPHandlerZUnknownHandlerZURLErrorZ HTTPError)�urllib�urllib2rr�ccs~t�}xrtj�D]f\}}x\|D]T}|\}}t|�}d||fVd|||fVd|Vd|Vd||fVqWqWdS)Nz�import_name< 'import' (module=%r | dotted_as_names< any* module=%r any* >) > z�import_from< 'from' mod_member=%r 'import' ( member=%s | import_as_name< member=%s 'as' any > | import_as_names< members=any* >) > zIimport_from< 'from' module_star=%r 'import' star='*' > ztimport_name< 'import' dotted_as_name< module_as=%r 'as' any > > zKpower< bare_with_attr=%r trailer< '.' member=%s > any* > )�set�MAPPING�itemsr)ZbareZ old_moduleZchanges�changeZ new_module�members�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_urllib.py� build_pattern0s rc@s4eZdZdd�Zdd�Zdd�Zdd�Zd d �ZdS)� FixUrllibcCsdjt��S)N�|)�joinr)�selfrrrrIszFixUrllib.build_patterncCsz|jd�}|j}g}x6t|jdd�D] }|jt|d|d�t�g�q(W|jtt|jdd|d��|j|�dS)z�Transform for the basic import case. Replaces the old import name with a comma separated list of its replacements. �moduleNr r)�prefix���r) �getrr�value�extendrr�append�replace)r�node�resultsZ import_mod�pref�names�namerrr�transform_importLs zFixUrllib.transform_importcCs>|jd�}|j}|jd�}|r�t|t�r0|d}d}x*t|jD]}|j|dkr@|d}Pq@W|rx|jt||d��n|j|d��n�g}i} |d} x�| D]�}|j t jkr�|jd j}|jdj}n |j}d}|d kr�xPt|jD]B}||dkr�|d| k�r|j |d�| j|dg�j |�q�Wq�Wg} t|�}d}dd �}x�|D]�}| |}g}x2|dd�D]"}|j|||��|j t���qlW|j||d|��t||�}|�s�|jjj|��r�||_| j |�d}�qNW| �r.g}x&| dd�D]}|j|t�g��q�W|j | d�|j|�n|j|d�dS)z�Transform for imports of specific module elements. Replaces the module to be imported from with the appropriate new module. � mod_member�memberrNr )rz!This is an invalid module elementr��,TcSsX|jtjkrHt|jdj|d�|jdj�|jdj�g}ttj|�gSt|j|d�gS)Nr)rr r*)�typer �import_as_namer�childrenrZcloner )r&rZkidsrrr�handle_name�sz/FixUrllib.transform_member.<locals>.handle_nameFzAll module elements are invalidrrrr)rr� isinstance�listrrr!r�cannot_convertr,r r-r.r � setdefaultrrrr�parent�endswithr)rr"r#r(r$r)�new_namer�modulesZmod_dictrZas_name�member_nameZ new_nodesZindentation�firstr/rZeltsr%Zelt�newZnodesZnew_noderrr�transform_member\sh zFixUrllib.transform_membercCs�|jd�}|jd�}d}t|t�r*|d}x*t|jD]}|j|dkr6|d}Pq6W|rp|jt||jd��n|j|d�dS)z.Transform for calls to module members in code.�bare_with_attrr)Nrr )rz!This is an invalid module element) rr0r1rrr!rrr2)rr"r#Z module_dotr)r6rrrr� transform_dot�s zFixUrllib.transform_dotcCsz|jd�r|j||�n^|jd�r0|j||�nF|jd�rH|j||�n.|jd�r`|j|d�n|jd�rv|j|d�dS)Nrr(r<Zmodule_starzCannot handle star imports.Z module_asz#This module is now multiple modules)rr'r;r=r2)rr"r#rrr� transform�s zFixUrllib.transformN)�__name__� __module__�__qualname__rr'r;r=r>rrrrrGs LrN)�__doc__Zlib2to3.fixes.fix_importsrrZlib2to3.fixer_utilrrrrrr r rr rrrrrr�<module>s@$ PK{��\K�t�!!1fixes/__pycache__/fix_urllib.cpython-36.opt-2.pycnu�[���3 \� �@s�ddlmZmZddlmZmZmZmZmZm Z m Z ddddddd d dgfdd ddddddddddddddgfddgfgddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4gfdd5d6gfgd7�Zed8jed9d:�d;d<�Z Gd=d>�d>e�Zd?S)@�)� alternates� FixImports)�Name�Comma� FromImport�Newline�find_indentation�Node�symszurllib.requestZ URLopenerZFancyURLopenerZurlretrieveZ _urlopenerZurlopenZ urlcleanupZpathname2urlZurl2pathnamezurllib.parseZquoteZ quote_plusZunquoteZunquote_plusZ urlencodeZ splitattrZ splithostZ splitnportZsplitpasswdZ splitportZ splitqueryZsplittagZ splittypeZ splituserZ splitvaluezurllib.errorZContentTooShortErrorZinstall_openerZbuild_openerZRequestZOpenerDirectorZBaseHandlerZHTTPDefaultErrorHandlerZHTTPRedirectHandlerZHTTPCookieProcessorZProxyHandlerZHTTPPasswordMgrZHTTPPasswordMgrWithDefaultRealmZAbstractBasicAuthHandlerZHTTPBasicAuthHandlerZProxyBasicAuthHandlerZAbstractDigestAuthHandlerZHTTPDigestAuthHandlerZProxyDigestAuthHandlerZHTTPHandlerZHTTPSHandlerZFileHandlerZ FTPHandlerZCacheFTPHandlerZUnknownHandlerZURLErrorZ HTTPError)�urllib�urllib2rr�ccs~t�}xrtj�D]f\}}x\|D]T}|\}}t|�}d||fVd|||fVd|Vd|Vd||fVqWqWdS)Nz�import_name< 'import' (module=%r | dotted_as_names< any* module=%r any* >) > z�import_from< 'from' mod_member=%r 'import' ( member=%s | import_as_name< member=%s 'as' any > | import_as_names< members=any* >) > zIimport_from< 'from' module_star=%r 'import' star='*' > ztimport_name< 'import' dotted_as_name< module_as=%r 'as' any > > zKpower< bare_with_attr=%r trailer< '.' member=%s > any* > )�set�MAPPING�itemsr)ZbareZ old_moduleZchanges�changeZ new_module�members�r�0/usr/lib64/python3.6/lib2to3/fixes/fix_urllib.py� build_pattern0s rc@s4eZdZdd�Zdd�Zdd�Zdd�Zd d �ZdS)� FixUrllibcCsdjt��S)N�|)�joinr)�selfrrrrIszFixUrllib.build_patterncCsz|jd�}|j}g}x6t|jdd�D] }|jt|d|d�t�g�q(W|jtt|jdd|d��|j|�dS)N�moduler r)�prefix���r) �getrr�value�extendrr�append�replace)r�node�resultsZ import_mod�pref�names�namerrr�transform_importLs zFixUrllib.transform_importcCs>|jd�}|j}|jd�}|r�t|t�r0|d}d}x*t|jD]}|j|dkr@|d}Pq@W|rx|jt||d��n|j|d��n�g}i} |d} x�| D]�}|j t jkr�|jdj}|jdj}n |j}d}|d kr�xPt|jD]B}||dkr�|d| k�r|j |d�| j|dg�j |�q�Wq�Wg} t|�}d }dd�}x�|D]�}| |}g}x2|dd�D]"}|j|||��|j t���qlW|j||d|��t||�}|�s�|jjj|��r�||_| j |�d }�qNW| �r.g}x&| dd�D]}|j|t�g��q�W|j | d�|j|�n|j|d�dS)N� mod_member�memberrr )rz!This is an invalid module elementr��,TcSsX|jtjkrHt|jdj|d�|jdj�|jdj�g}ttj|�gSt|j|d�gS)Nr)rr r*)�typer �import_as_namer�childrenrZcloner )r&rZkidsrrr�handle_name�sz/FixUrllib.transform_member.<locals>.handle_nameFzAll module elements are invalidrrrr)rr� isinstance�listrrr!r�cannot_convertr,r r-r.r � setdefaultrrrr�parent�endswithr)rr"r#r(r$r)�new_namer�modulesZmod_dictrZas_name�member_nameZ new_nodesZindentation�firstr/rZeltsr%Zelt�newZnodesZnew_noderrr�transform_member\sh zFixUrllib.transform_membercCs�|jd�}|jd�}d}t|t�r*|d}x*t|jD]}|j|dkr6|d}Pq6W|rp|jt||jd��n|j|d�dS)N�bare_with_attrr)rr )rz!This is an invalid module element) rr0r1rrr!rrr2)rr"r#Z module_dotr)r6rrrr� transform_dot�s zFixUrllib.transform_dotcCsz|jd�r|j||�n^|jd�r0|j||�nF|jd�rH|j||�n.|jd�r`|j|d�n|jd�rv|j|d�dS)Nrr(r<Zmodule_starzCannot handle star imports.Z module_asz#This module is now multiple modules)rr'r;r=r2)rr"r#rrr� transform�s zFixUrllib.transformN)�__name__� __module__�__qualname__rr'r;r=r>rrrrrGs LrN)Zlib2to3.fixes.fix_importsrrZlib2to3.fixer_utilrrrrrr r rr rrrrrr�<module>s>$ PK{��\~aNN3fixes/__pycache__/fix_ws_comma.cpython-36.opt-1.pycnu�[���3 \B�@s>dZddlmZddlmZddlmZGdd�dej�ZdS)z�Fixer that changes 'a ,b' into 'a, b'. This also changes '{a :b}' into '{a: b}', but does not touch other uses of colons. It does not touch other uses of whitespace. �)�pytree)�token)� fixer_basec@s@eZdZdZdZejejd�Zejej d�Z ee fZ dd�ZdS)� FixWsCommaTzH any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> �,�:cCsd|j�}d}xR|jD]H}||jkrD|j}|j�r>d|kr>d|_d}q|rX|j}|sXd|_d}qW|S)NF� �T� )ZcloneZchildren�SEPS�prefix�isspace)�selfZnodeZresults�newZcommaZchildr�r�2/usr/lib64/python3.6/lib2to3/fixes/fix_ws_comma.py� transforms zFixWsComma.transformN)�__name__� __module__�__qualname__ZexplicitZPATTERNrZLeafr�COMMA�COLONrrrrrrrsrN)�__doc__r rZpgen2rrZBaseFixrrrrr�<module>sPK{��\���a��3fixes/__pycache__/fix_ws_comma.cpython-36.opt-2.pycnu�[���3 \B�@s:ddlmZddlmZddlmZGdd�dej�ZdS)�)�pytree)�token)� fixer_basec@s@eZdZdZdZejejd�Zejej d�Z ee fZ dd�ZdS)� FixWsCommaTzH any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> �,�:cCsd|j�}d}xR|jD]H}||jkrD|j}|j�r>d|kr>d|_d}q|rX|j}|sXd|_d}qW|S)NF� �T� )ZcloneZchildren�SEPS�prefix�isspace)�selfZnodeZresults�newZcommaZchildr�r�2/usr/lib64/python3.6/lib2to3/fixes/fix_ws_comma.py� transforms zFixWsComma.transformN)�__name__� __module__�__qualname__ZexplicitZPATTERNrZLeafr�COMMA�COLONrrrrrrrsrN)r rZpgen2rrZBaseFixrrrrr�<module>sPK{��\��C� � 1fixes/__pycache__/fix_xrange.cpython-36.opt-2.pycnu�[���3 \� �@sBddlmZddlmZmZmZddlmZGdd�dej�ZdS)�)� fixer_base)�Name�Call�consuming_calls)�patcompcsheZdZdZdZ�fdd�Zdd�Zdd�Zd d �Zdd�Z d Z eje �Z dZeje�Zdd�Z�ZS)� FixXrangeTz� power< (name='range'|name='xrange') trailer< '(' args=any ')' > rest=any* > cstt|�j||�t�|_dS)N)�superr� start_tree�set�transformed_xranges)�self�tree�filename)� __class__��0/usr/lib64/python3.6/lib2to3/fixes/fix_xrange.pyr szFixXrange.start_treecCs d|_dS)N)r)rr rrrr�finish_treeszFixXrange.finish_treecCsD|d}|jdkr|j||�S|jdkr4|j||�Stt|���dS)N�nameZxrange�range)�value�transform_xrange�transform_range� ValueError�repr)r�node�resultsrrrr� transforms zFixXrange.transformcCs0|d}|jtd|jd��|jjt|��dS)Nrr)�prefix)�replacerrr�add�id)rrrrrrrr$szFixXrange.transform_xrangecCslt|�|jkrh|j|�rhttd�|dj�g�}ttd�|g|jd�}x|dD]}|j|�qRW|SdS)Nr�args�list)r�rest)r r�in_special_contextrrZclonerZappend_child)rrrZ range_callZ list_call�nrrrr*s zFixXrange.transform_rangez3power< func=NAME trailer< '(' node=any ')' > any* >z�for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > | comparison< any 'in' node=any any*> cCsf|jdkrdSi}|jjdk rJ|jj|jj|�rJ|d|krJ|djtkS|jj|j|�od|d|kS)NFr�func)�parent�p1�matchrr�p2)rrrrrrr$?s zFixXrange.in_special_context)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNr rrrrZP1rZcompile_patternr(ZP2r*r$� __classcell__rr)rrrs rN) �rZ fixer_utilrrrrZBaseFixrrrrr�<module>sPK{��\��ȽHH5fixes/__pycache__/fix_xreadlines.cpython-36.opt-1.pycnu�[���3 \��@s2dZddlmZddlmZGdd�dej�ZdS)zpFix "for x in f.xreadlines()" -> "for x in f". This fixer will also convert g(f.xreadlines) into g(f.__iter__).�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixXreadlinesTz� power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > | power< any+ trailer< '.' no_call='xreadlines' > > cCs@|jd�}|r$|jtd|jd��n|jdd�|dD��dS)N�no_call�__iter__)�prefixcSsg|]}|j��qS�)Zclone)�.0�xrr�4/usr/lib64/python3.6/lib2to3/fixes/fix_xreadlines.py� <listcomp>sz+FixXreadlines.transform.<locals>.<listcomp>Zcall)�get�replacerr)�selfZnodeZresultsrrrr� transforms zFixXreadlines.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�__doc__�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\7�N���5fixes/__pycache__/fix_xreadlines.cpython-36.opt-2.pycnu�[���3 \��@s.ddlmZddlmZGdd�dej�ZdS)�)� fixer_base)�Namec@seZdZdZdZdd�ZdS)� FixXreadlinesTz� power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > | power< any+ trailer< '.' no_call='xreadlines' > > cCs@|jd�}|r$|jtd|jd��n|jdd�|dD��dS)N�no_call�__iter__)�prefixcSsg|]}|j��qS�)Zclone)�.0�xrr�4/usr/lib64/python3.6/lib2to3/fixes/fix_xreadlines.py� <listcomp>sz+FixXreadlines.transform.<locals>.<listcomp>Zcall)�get�replacerr)�selfZnodeZresultsrrrr� transforms zFixXreadlines.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNrrrrrrsrN)�rZ fixer_utilrZBaseFixrrrrr�<module>sPK{��\�:�~��.fixes/__pycache__/fix_zip.cpython-36.opt-2.pycnu�[���3 \ �@sNddlmZddlmZddlmZddlmZm Z m Z Gdd�dej�ZdS)�)� fixer_base)�Node)�python_symbols)�Name�ArgList�in_special_contextc@s eZdZdZdZdZdd�ZdS)�FixZipTzN power< 'zip' args=trailer< '(' [any] ')' > [trailers=trailer*] > zfuture_builtins.zipcCs�|j|�rdSt|�rdS|dj�}d|_g}d|kr^dd�|dD�}x|D] }d|_qPWttjtd�|gdd�}ttjtd�t|g�g|�}|j|_|S) N�args��trailerscSsg|]}|j��qS�)�clone)�.0�nrr�-/usr/lib64/python3.6/lib2to3/fixes/fix_zip.py� <listcomp>'sz$FixZip.transform.<locals>.<listcomp>�zip)�prefix�list) Zshould_skiprr rr�symsZpowerrr)�selfZnodeZresultsr rr�newrrr� transforms zFixZip.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onrrrrrrsrN) r rZpytreerZpygramrrZ fixer_utilrrrZConditionalFixrrrrr�<module>sPK{��\�w�(fixes/__pycache__/fix_zip.cpython-36.pycnu�[���3 \ �@sRdZddlmZddlmZddlmZddlm Z m Z mZGdd�dej�Z dS) a7 Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) unless there exists a 'from future_builtins import zip' statement in the top-level namespace. We avoid the transformation if the zip() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. �)� fixer_base)�Node)�python_symbols)�Name�ArgList�in_special_contextc@s eZdZdZdZdZdd�ZdS)�FixZipTzN power< 'zip' args=trailer< '(' [any] ')' > [trailers=trailer*] > zfuture_builtins.zipcCs�|j|�rdSt|�rdS|dj�}d|_g}d|kr^dd�|dD�}x|D] }d|_qPWttjtd�|gdd�}ttjtd�t|g�g|�}|j|_|S) N�args��trailerscSsg|]}|j��qS�)�clone)�.0�nrr�-/usr/lib64/python3.6/lib2to3/fixes/fix_zip.py� <listcomp>'sz$FixZip.transform.<locals>.<listcomp>�zip)�prefix�list) Zshould_skiprr rr�symsZpowerrr)�selfZnodeZresultsr rr�newrrr� transforms zFixZip.transformN)�__name__� __module__�__qualname__Z BM_compatibleZPATTERNZskip_onrrrrrrsrN)�__doc__r rZpytreerZpygramrrZ fixer_utilrrrZConditionalFixrrrrr�<module>s PK{��\�폏��/fixes/__pycache__/__init__.cpython-36.opt-1.pycnu�[���3 \/�@sdS)N�rrr�./usr/lib64/python3.6/lib2to3/fixes/__init__.py�<module>sPK{��\Gg��fixes/fix_itertools.pynu�[���""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) imports from itertools are fixed in fix_itertools_import.py If itertools is imported as something else (ie: import itertools as it; it.izip(spam, eggs)) method calls will not get fixed. """ # Local imports from .. import fixer_base from ..fixer_util import Name class FixItertools(fixer_base.BaseFix): BM_compatible = True it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')" PATTERN = """ power< it='itertools' trailer< dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > | power< func=%(it_funcs)s trailer< '(' [any] ')' > > """ %(locals()) # Needs to be run after fix_(map|zip|filter) run_order = 6 def transform(self, node, results): prefix = None func = results['func'][0] if ('it' in results and func.value not in ('ifilterfalse', 'izip_longest')): dot, it = (results['dot'], results['it']) # Remove the 'itertools' prefix = it.prefix it.remove() # Replace the node which contains ('.', 'function') with the # function (to be consistent with the second part of the pattern) dot.remove() func.parent.replace(func) prefix = prefix or func.prefix func.replace(Name(func.value[1:], prefix=prefix)) PK{��\>ӵ;&&fixes/fix_itertools_imports.pynu�[���""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """ # Local imports from lib2to3 import fixer_base from lib2to3.fixer_util import BlankLine, syms, token class FixItertoolsImports(fixer_base.BaseFix): BM_compatible = True PATTERN = """ import_from< 'from' 'itertools' 'import' imports=any > """ %(locals()) def transform(self, node, results): imports = results['imports'] if imports.type == syms.import_as_name or not imports.children: children = [imports] else: children = imports.children for child in children[::2]: if child.type == token.NAME: member = child.value name_node = child elif child.type == token.STAR: # Just leave the import as is. return else: assert child.type == syms.import_as_name name_node = child.children[0] member_name = name_node.value if member_name in ('imap', 'izip', 'ifilter'): child.value = None child.remove() elif member_name in ('ifilterfalse', 'izip_longest'): node.changed() name_node.value = ('filterfalse' if member_name[1] == 'f' else 'zip_longest') # Make sure the import statement is still sane children = imports.children[:] or [imports] remove_comma = True for child in children: if remove_comma and child.type == token.COMMA: child.remove() else: remove_comma ^= True while children and children[-1].type == token.COMMA: children.pop().remove() # If there are no imports left, just get rid of the entire statement if (not (imports.children or getattr(imports, 'value', None)) or imports.parent is None): p = node.prefix node = BlankLine() node.prefix = p return node PK{��\i�G���fixes/fix_long.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that turns 'long' into 'int' everywhere. """ # Local imports from lib2to3 import fixer_base from lib2to3.fixer_util import is_probably_builtin class FixLong(fixer_base.BaseFix): BM_compatible = True PATTERN = "'long'" def transform(self, node, results): if is_probably_builtin(node): node.value = "int" node.changed() PK{��\%TW688fixes/fix_map.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes map(F, ...) into list(map(F, ...)) unless there exists a 'from future_builtins import map' statement in the top-level namespace. As a special case, map(None, X) is changed into list(X). (This is necessary because the semantics are changed in this case -- the new map(None, X) is equivalent to [(x,) for x in X].) We avoid the transformation (except for the special case mentioned above) if the map() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on map(F, X, Y, ...) to go on until the longest argument is exhausted, substituting None for missing values -- like zip(), it now stops as soon as the shortest argument is exhausted. """ # Local imports from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, ArgList, Call, ListComp, in_special_context from ..pygram import python_symbols as syms from ..pytree import Node class FixMap(fixer_base.ConditionalFix): BM_compatible = True PATTERN = """ map_none=power< 'map' trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > [extra_trailers=trailer*] > | map_lambda=power< 'map' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'map' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > """ skip_on = 'future_builtins.map' def transform(self, node, results): if self.should_skip(node): return trailers = [] if 'extra_trailers' in results: for t in results['extra_trailers']: trailers.append(t.clone()) if node.parent.type == syms.simple_stmt: self.warning(node, "You should use a for loop here") new = node.clone() new.prefix = "" new = Call(Name("list"), [new]) elif "map_lambda" in results: new = ListComp(results["xp"].clone(), results["fp"].clone(), results["it"].clone()) new = Node(syms.power, [new] + trailers, prefix="") else: if "map_none" in results: new = results["arg"].clone() new.prefix = "" else: if "args" in results: args = results["args"] if args.type == syms.trailer and \ args.children[1].type == syms.arglist and \ args.children[1].children[0].type == token.NAME and \ args.children[1].children[0].value == "None": self.warning(node, "cannot convert map(None, ...) " "with multiple arguments because map() " "now truncates to the shortest sequence") return new = Node(syms.power, [Name("map"), args.clone()]) new.prefix = "" if in_special_context(node): return None new = Node(syms.power, [Name("list"), ArgList([new])] + trailers) new.prefix = "" new.prefix = node.prefix return new PK{��\k�R� fixes/fix_metaclass.pynu�[���"""Fixer for __metaclass__ = X -> (metaclass=X) methods. The various forms of classef (inherits nothing, inherits once, inherints many) don't parse the same in the CST so we look at ALL classes for a __metaclass__ and if we find one normalize the inherits to all be an arglist. For one-liner classes ('class X: pass') there is no indent/dedent so we normalize those into having a suite. Moving the __metaclass__ into the classdef can also cause the class body to be empty so there is some special casing for that as well. This fixer also tries very hard to keep original indenting and spacing in all those corner cases. """ # Author: Jack Diederich # Local imports from .. import fixer_base from ..pygram import token from ..fixer_util import syms, Node, Leaf def has_metaclass(parent): """ we have to check the cls_node without changing it. There are two possibilities: 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') """ for node in parent.children: if node.type == syms.suite: return has_metaclass(node) elif node.type == syms.simple_stmt and node.children: expr_node = node.children[0] if expr_node.type == syms.expr_stmt and expr_node.children: left_side = expr_node.children[0] if isinstance(left_side, Leaf) and \ left_side.value == '__metaclass__': return True return False def fixup_parse_tree(cls_node): """ one-line classes don't get a suite in the parse tree so we add one to normalize the tree """ for node in cls_node.children: if node.type == syms.suite: # already in the preferred format, do nothing return # !%@#! oneliners have no suite node, we have to fake one up for i, node in enumerate(cls_node.children): if node.type == token.COLON: break else: raise ValueError("No class suite and no ':'!") # move everything into a suite node suite = Node(syms.suite, []) while cls_node.children[i+1:]: move_node = cls_node.children[i+1] suite.append_child(move_node.clone()) move_node.remove() cls_node.append_child(suite) node = suite def fixup_simple_stmt(parent, i, stmt_node): """ if there is a semi-colon all the parts count as part of the same simple_stmt. We just want the __metaclass__ part so we move everything after the semi-colon into its own simple_stmt node """ for semi_ind, node in enumerate(stmt_node.children): if node.type == token.SEMI: # *sigh* break else: return node.remove() # kill the semicolon new_expr = Node(syms.expr_stmt, []) new_stmt = Node(syms.simple_stmt, [new_expr]) while stmt_node.children[semi_ind:]: move_node = stmt_node.children[semi_ind] new_expr.append_child(move_node.clone()) move_node.remove() parent.insert_child(i, new_stmt) new_leaf1 = new_stmt.children[0].children[0] old_leaf1 = stmt_node.children[0].children[0] new_leaf1.prefix = old_leaf1.prefix def remove_trailing_newline(node): if node.children and node.children[-1].type == token.NEWLINE: node.children[-1].remove() def find_metas(cls_node): # find the suite node (Mmm, sweet nodes) for node in cls_node.children: if node.type == syms.suite: break else: raise ValueError("No class suite!") # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] for i, simple_node in list(enumerate(node.children)): if simple_node.type == syms.simple_stmt and simple_node.children: expr_node = simple_node.children[0] if expr_node.type == syms.expr_stmt and expr_node.children: # Check if the expr_node is a simple assignment. left_node = expr_node.children[0] if isinstance(left_node, Leaf) and \ left_node.value == '__metaclass__': # We found an assignment to __metaclass__. fixup_simple_stmt(node, i, simple_node) remove_trailing_newline(simple_node) yield (node, i, simple_node) def fixup_indent(suite): """ If an INDENT is followed by a thing with a prefix then nuke the prefix Otherwise we get in trouble when removing __metaclass__ at suite start """ kids = suite.children[::-1] # find the first indent while kids: node = kids.pop() if node.type == token.INDENT: break # find the first Leaf while kids: node = kids.pop() if isinstance(node, Leaf) and node.type != token.DEDENT: if node.prefix: node.prefix = '' return else: kids.extend(node.children[::-1]) class FixMetaclass(fixer_base.BaseFix): BM_compatible = True PATTERN = """ classdef<any*> """ def transform(self, node, results): if not has_metaclass(node): return fixup_parse_tree(node) # find metaclasses, keep the last one last_metaclass = None for suite, i, stmt in find_metas(node): last_metaclass = stmt stmt.remove() text_type = node.children[0].type # always Leaf(nnn, 'class') # figure out what kind of classdef we have if len(node.children) == 7: # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) # 0 1 2 3 4 5 6 if node.children[3].type == syms.arglist: arglist = node.children[3] # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) else: parent = node.children[3].clone() arglist = Node(syms.arglist, [parent]) node.set_child(3, arglist) elif len(node.children) == 6: # Node(classdef, ['class', 'name', '(', ')', ':', suite]) # 0 1 2 3 4 5 arglist = Node(syms.arglist, []) node.insert_child(3, arglist) elif len(node.children) == 4: # Node(classdef, ['class', 'name', ':', suite]) # 0 1 2 3 arglist = Node(syms.arglist, []) node.insert_child(2, Leaf(token.RPAR, ')')) node.insert_child(2, arglist) node.insert_child(2, Leaf(token.LPAR, '(')) else: raise ValueError("Unexpected class definition") # now stick the metaclass in the arglist meta_txt = last_metaclass.children[0].children[0] meta_txt.value = 'metaclass' orig_meta_prefix = meta_txt.prefix if arglist.children: arglist.append_child(Leaf(token.COMMA, ',')) meta_txt.prefix = ' ' else: meta_txt.prefix = '' # compact the expression "metaclass = Meta" -> "metaclass=Meta" expr_stmt = last_metaclass.children[0] assert expr_stmt.type == syms.expr_stmt expr_stmt.children[1].prefix = '' expr_stmt.children[2].prefix = '' arglist.append_child(last_metaclass) fixup_indent(suite) # check for empty suite if not suite.children: # one-liner that was just __metaclass_ suite.remove() pass_leaf = Leaf(text_type, 'pass') pass_leaf.prefix = orig_meta_prefix node.append_child(pass_leaf) node.append_child(Leaf(token.NEWLINE, '\n')) elif len(suite.children) > 1 and \ (suite.children[-2].type == token.INDENT and suite.children[-1].type == token.DEDENT): # there was only one line in the class body and it was __metaclass__ pass_leaf = Leaf(text_type, 'pass') suite.insert_child(-1, pass_leaf) suite.insert_child(-1, Leaf(token.NEWLINE, '\n')) PK{��\+&^^fixes/fix_methodattrs.pynu�[���"""Fix bound method attributes (method.im_? -> method.__?__). """ # Author: Christian Heimes # Local imports from .. import fixer_base from ..fixer_util import Name MAP = { "im_func" : "__func__", "im_self" : "__self__", "im_class" : "__self__.__class__" } class FixMethodattrs(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > """ def transform(self, node, results): attr = results["attr"][0] new = MAP[attr.value] attr.replace(Name(new, prefix=attr.prefix)) PK{��\�<��;;fixes/fix_ne.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that turns <> into !=.""" # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base class FixNe(fixer_base.BaseFix): # This is so simple that we don't need the pattern compiler. _accept_type = token.NOTEQUAL def match(self, node): # Override return node.value == "<>" def transform(self, node, results): new = pytree.Leaf(token.NOTEQUAL, "!=", prefix=node.prefix) return new PK{��\IkN�fffixes/fix_next.pynu�[���"""Fixer for it.next() -> next(it), per PEP 3114.""" # Author: Collin Winter # Things that currently aren't covered: # - listcomp "next" names aren't warned # - "with" statement targets aren't checked # Local imports from ..pgen2 import token from ..pygram import python_symbols as syms from .. import fixer_base from ..fixer_util import Name, Call, find_binding bind_warning = "Calls to builtin next() possibly shadowed by global binding" class FixNext(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='next' parameters< '(' NAME ')' > any+ > any* > > | global=global_stmt< 'global' any* 'next' any* > """ order = "pre" # Pre-order tree traversal def start_tree(self, tree, filename): super(FixNext, self).start_tree(tree, filename) n = find_binding('next', tree) if n: self.warning(n, bind_warning) self.shadowed_next = True else: self.shadowed_next = False def transform(self, node, results): assert results base = results.get("base") attr = results.get("attr") name = results.get("name") if base: if self.shadowed_next: attr.replace(Name("__next__", prefix=attr.prefix)) else: base = [n.clone() for n in base] base[0].prefix = "" node.replace(Call(Name("next", prefix=node.prefix), base)) elif name: n = Name("__next__", prefix=name.prefix) name.replace(n) elif attr: # We don't do this transformation if we're assigning to "x.next". # Unfortunately, it doesn't seem possible to do this in PATTERN, # so it's being done here. if is_assign_target(node): head = results["head"] if "".join([str(n) for n in head]).strip() == '__builtin__': self.warning(node, bind_warning) return attr.replace(Name("__next__")) elif "global" in results: self.warning(node, bind_warning) self.shadowed_next = True ### The following functions help test if node is part of an assignment ### target. def is_assign_target(node): assign = find_assign(node) if assign is None: return False for child in assign.children: if child.type == token.EQUAL: return False elif is_subtree(child, node): return True return False def find_assign(node): if node.type == syms.expr_stmt: return node if node.type == syms.simple_stmt or node.parent is None: return None return find_assign(node.parent) def is_subtree(root, node): if root == node: return True return any(is_subtree(c, node) for c in root.children) PK{��\?���OOfixes/fix_nonzero.pynu�[���"""Fixer for __nonzero__ -> __bool__ methods.""" # Author: Collin Winter # Local imports from .. import fixer_base from ..fixer_util import Name class FixNonzero(fixer_base.BaseFix): BM_compatible = True PATTERN = """ classdef< 'class' any+ ':' suite< any* funcdef< 'def' name='__nonzero__' parameters< '(' NAME ')' > any+ > any* > > """ def transform(self, node, results): name = results["name"] new = Name("__bool__", prefix=name.prefix) name.replace(new) PK{��\l�Hfixes/fix_numliterals.pynu�[���"""Fixer that turns 1L into 1, 0755 into 0o755. """ # Copyright 2007 Georg Brandl. # Licensed to PSF under a Contributor Agreement. # Local imports from ..pgen2 import token from .. import fixer_base from ..fixer_util import Number class FixNumliterals(fixer_base.BaseFix): # This is so simple that we don't need the pattern compiler. _accept_type = token.NUMBER def match(self, node): # Override return (node.value.startswith("0") or node.value[-1] in "Ll") def transform(self, node, results): val = node.value if val[-1] in 'Ll': val = val[:-1] elif val.startswith('0') and val.isdigit() and len(set(val)) > 1: val = "0o" + val[1:] return Number(val, prefix=node.prefix) PK{��\$��� � fixes/fix_operator.pynu�[���"""Fixer for operator functions. operator.isCallable(obj) -> hasattr(obj, '__call__') operator.sequenceIncludes(obj) -> operator.contains(obj) operator.isSequenceType(obj) -> isinstance(obj, collections.Sequence) operator.isMappingType(obj) -> isinstance(obj, collections.Mapping) operator.isNumberType(obj) -> isinstance(obj, numbers.Number) operator.repeat(obj, n) -> operator.mul(obj, n) operator.irepeat(obj, n) -> operator.imul(obj, n) """ import collections # Local imports from lib2to3 import fixer_base from lib2to3.fixer_util import Call, Name, String, touch_import def invocation(s): def dec(f): f.invocation = s return f return dec class FixOperator(fixer_base.BaseFix): BM_compatible = True order = "pre" methods = """ method=('isCallable'|'sequenceIncludes' |'isSequenceType'|'isMappingType'|'isNumberType' |'repeat'|'irepeat') """ obj = "'(' obj=any ')'" PATTERN = """ power< module='operator' trailer< '.' %(methods)s > trailer< %(obj)s > > | power< %(methods)s trailer< %(obj)s > > """ % dict(methods=methods, obj=obj) def transform(self, node, results): method = self._check_method(node, results) if method is not None: return method(node, results) @invocation("operator.contains(%s)") def _sequenceIncludes(self, node, results): return self._handle_rename(node, results, "contains") @invocation("hasattr(%s, '__call__')") def _isCallable(self, node, results): obj = results["obj"] args = [obj.clone(), String(", "), String("'__call__'")] return Call(Name("hasattr"), args, prefix=node.prefix) @invocation("operator.mul(%s)") def _repeat(self, node, results): return self._handle_rename(node, results, "mul") @invocation("operator.imul(%s)") def _irepeat(self, node, results): return self._handle_rename(node, results, "imul") @invocation("isinstance(%s, collections.Sequence)") def _isSequenceType(self, node, results): return self._handle_type2abc(node, results, "collections", "Sequence") @invocation("isinstance(%s, collections.Mapping)") def _isMappingType(self, node, results): return self._handle_type2abc(node, results, "collections", "Mapping") @invocation("isinstance(%s, numbers.Number)") def _isNumberType(self, node, results): return self._handle_type2abc(node, results, "numbers", "Number") def _handle_rename(self, node, results, name): method = results["method"][0] method.value = name method.changed() def _handle_type2abc(self, node, results, module, abc): touch_import(None, module, node) obj = results["obj"] args = [obj.clone(), String(", " + ".".join([module, abc]))] return Call(Name("isinstance"), args, prefix=node.prefix) def _check_method(self, node, results): method = getattr(self, "_" + results["method"][0].value) if isinstance(method, collections.Callable): if "module" in results: return method else: sub = (str(results["obj"]),) invocation_str = method.invocation % sub self.warning(node, "You should use '%s' here." % invocation_str) return None PK{��\������fixes/fix_paren.pynu�[���"""Fixer that addes parentheses where they are required This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" # By Taek Joo Kim and Benjamin Peterson # Local imports from .. import fixer_base from ..fixer_util import LParen, RParen # XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2] class FixParen(fixer_base.BaseFix): BM_compatible = True PATTERN = """ atom< ('[' | '(') (listmaker< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > > | testlist_gexp< any comp_for< 'for' NAME 'in' target=testlist_safe< any (',' any)+ [','] > [any] > >) (']' | ')') > """ def transform(self, node, results): target = results["target"] lparen = LParen() lparen.prefix = target.prefix target.prefix = "" # Make it hug the parentheses target.insert_child(0, lparen) target.append_child(RParen()) PK{��\Z6��fixes/fix_print.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for print. Change: 'print' into 'print()' 'print ...' into 'print(...)' 'print ... ,' into 'print(..., end=" ")' 'print >>x, ...' into 'print(..., file=x)' No changes are applied if print_function is imported from __future__ """ # Local imports from .. import patcomp from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, Call, Comma, String parend_expr = patcomp.compile_pattern( """atom< '(' [atom|STRING|NAME] ')' >""" ) class FixPrint(fixer_base.BaseFix): BM_compatible = True PATTERN = """ simple_stmt< any* bare='print' any* > | print_stmt """ def transform(self, node, results): assert results bare_print = results.get("bare") if bare_print: # Special-case print all by itself bare_print.replace(Call(Name("print"), [], prefix=bare_print.prefix)) return assert node.children[0] == Name("print") args = node.children[1:] if len(args) == 1 and parend_expr.match(args[0]): # We don't want to keep sticking parens around an # already-parenthesised expression. return sep = end = file = None if args and args[-1] == Comma(): args = args[:-1] end = " " if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, ">>"): assert len(args) >= 2 file = args[1].clone() args = args[3:] # Strip a possible comma after the file expression # Now synthesize a print(args, sep=..., end=..., file=...) node. l_args = [arg.clone() for arg in args] if l_args: l_args[0].prefix = "" if sep is not None or end is not None or file is not None: if sep is not None: self.add_kwarg(l_args, "sep", String(repr(sep))) if end is not None: self.add_kwarg(l_args, "end", String(repr(end))) if file is not None: self.add_kwarg(l_args, "file", file) n_stmt = Call(Name("print"), l_args) n_stmt.prefix = node.prefix return n_stmt def add_kwarg(self, l_nodes, s_kwd, n_expr): # XXX All this prefix-setting may lose comments (though rarely) n_expr.prefix = "" n_argument = pytree.Node(self.syms.argument, (Name(s_kwd), pytree.Leaf(token.EQUAL, "="), n_expr)) if l_nodes: l_nodes.append(Comma()) n_argument.prefix = " " l_nodes.append(n_argument) PK{��\=��nnfixes/fix_raise.pynu�[���"""Fixer for 'raise E, V, T' raise -> raise raise E -> raise E raise E, V -> raise E(V) raise E, V, T -> raise E(V).with_traceback(T) raise E, None, T -> raise E.with_traceback(T) raise (((E, E'), E''), E'''), V -> raise E(V) raise "foo", V, T -> warns about string exceptions CAVEATS: 1) "raise E, V" will be incorrectly translated if V is an exception instance. The correct Python 3 idiom is raise E from V but since we can't detect instance-hood by syntax alone and since any client code would have to be changed as well, we don't automate this. """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, Call, Attr, ArgList, is_tuple class FixRaise(fixer_base.BaseFix): BM_compatible = True PATTERN = """ raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > """ def transform(self, node, results): syms = self.syms exc = results["exc"].clone() if exc.type == token.STRING: msg = "Python 3 does not support string exceptions" self.cannot_convert(node, msg) return # Python 2 supports # raise ((((E1, E2), E3), E4), E5), V # as a synonym for # raise E1, V # Since Python 3 will not support this, we recurse down any tuple # literals, always taking the first element. if is_tuple(exc): while is_tuple(exc): # exc.children[1:-1] is the unparenthesized tuple # exc.children[1].children[0] is the first element of the tuple exc = exc.children[1].children[0].clone() exc.prefix = " " if "val" not in results: # One-argument raise new = pytree.Node(syms.raise_stmt, [Name("raise"), exc]) new.prefix = node.prefix return new val = results["val"].clone() if is_tuple(val): args = [c.clone() for c in val.children[1:-1]] else: val.prefix = "" args = [val] if "tb" in results: tb = results["tb"].clone() tb.prefix = "" e = exc # If there's a traceback and None is passed as the value, then don't # add a call, since the user probably just wants to add a # traceback. See issue #9661. if val.type != token.NAME or val.value != "None": e = Call(exc, args) with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb) new.prefix = node.prefix return new else: return pytree.Node(syms.raise_stmt, [Name("raise"), Call(exc, args)], prefix=node.prefix) PK{��\6u���fixes/fix_raw_input.pynu�[���"""Fixer that changes raw_input(...) into input(...).""" # Author: Andre Roberge # Local imports from .. import fixer_base from ..fixer_util import Name class FixRawInput(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< name='raw_input' trailer< '(' [any] ')' > any* > """ def transform(self, node, results): name = results["name"] name.replace(Name("input", prefix=name.prefix)) PK{��\)��EEfixes/fix_reduce.pynu�[���# Copyright 2008 Armin Ronacher. # Licensed to PSF under a Contributor Agreement. """Fixer for reduce(). Makes sure reduce() is imported from the functools module if reduce is used in that module. """ from lib2to3 import fixer_base from lib2to3.fixer_util import touch_import class FixReduce(fixer_base.BaseFix): BM_compatible = True order = "pre" PATTERN = """ power< 'reduce' trailer< '(' arglist< ( (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) | (not(argument<any '=' any>) any ',' not(argument<any '=' any>) any ',' not(argument<any '=' any>) any) ) > ')' > > """ def transform(self, node, results): touch_import('functools', 'reduce', node) PK{��\��&��fixes/fix_reload.pynu�[���"""Fixer for reload(). reload(s) -> imp.reload(s)""" # Local imports from .. import fixer_base from ..fixer_util import ImportAndCall, touch_import class FixReload(fixer_base.BaseFix): BM_compatible = True order = "pre" PATTERN = """ power< 'reload' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > """ def transform(self, node, results): if results: # I feel like we should be able to express this logic in the # PATTERN above but I don't know how to do it so... obj = results['obj'] if obj: if obj.type == self.syms.star_expr: return # Make no change. if (obj.type == self.syms.argument and obj.children[0].value == '**'): return # Make no change. names = ('imp', 'reload') new = ImportAndCall(node, results, names) touch_import(None, 'imp', node) return new PK{��\2�ۭ�fixes/fix_renames.pynu�[���"""Fix incompatible renames Fixes: * sys.maxint -> sys.maxsize """ # Author: Christian Heimes # based on Collin Winter's fix_import # Local imports from .. import fixer_base from ..fixer_util import Name, attr_chain MAPPING = {"sys": {"maxint" : "maxsize"}, } LOOKUP = {} def alternates(members): return "(" + "|".join(map(repr, members)) + ")" def build_pattern(): #bare = set() for module, replace in list(MAPPING.items()): for old_attr, new_attr in list(replace.items()): LOOKUP[(module, old_attr)] = new_attr #bare.add(module) #bare.add(old_attr) #yield """ # import_name< 'import' (module=%r # | dotted_as_names< any* module=%r any* >) > # """ % (module, module) yield """ import_from< 'from' module_name=%r 'import' ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > """ % (module, old_attr, old_attr) yield """ power< module_name=%r trailer< '.' attr_name=%r > any* > """ % (module, old_attr) #yield """bare_name=%s""" % alternates(bare) class FixRenames(fixer_base.BaseFix): BM_compatible = True PATTERN = "|".join(build_pattern()) order = "pre" # Pre-order tree traversal # Don't match the node if it's within another match def match(self, node): match = super(FixRenames, self).match results = match(node) if results: if any(match(obj) for obj in attr_chain(node, "parent")): return False return results return False #def start_tree(self, tree, filename): # super(FixRenames, self).start_tree(tree, filename) # self.replace = {} def transform(self, node, results): mod_name = results.get("module_name") attr_name = results.get("attr_name") #bare_name = results.get("bare_name") #import_mod = results.get("module") if mod_name and attr_name: new_attr = LOOKUP[(mod_name.value, attr_name.value)] attr_name.replace(Name(new_attr, prefix=attr_name.prefix)) PK{��\a�.�eefixes/fix_repr.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that transforms `xyzzy` into repr(xyzzy).""" # Local imports from .. import fixer_base from ..fixer_util import Call, Name, parenthesize class FixRepr(fixer_base.BaseFix): BM_compatible = True PATTERN = """ atom < '`' expr=any '`' > """ def transform(self, node, results): expr = results["expr"].clone() if expr.type == self.syms.testlist1: expr = parenthesize(expr) return Call(Name("repr"), [expr], prefix=node.prefix) PK{��\�f�ϡ�fixes/fix_set_literal.pynu�[���""" Optional fixer to transform set() calls to set literals. """ # Author: Benjamin Peterson from lib2to3 import fixer_base, pytree from lib2to3.fixer_util import token, syms class FixSetLiteral(fixer_base.BaseFix): BM_compatible = True explicit = True PATTERN = """power< 'set' trailer< '(' (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > | single=any) ']' > | atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > ) ')' > > """ def transform(self, node, results): single = results.get("single") if single: # Make a fake listmaker fake = pytree.Node(syms.listmaker, [single.clone()]) single.replace(fake) items = fake else: items = results["items"] # Build the contents of the literal literal = [pytree.Leaf(token.LBRACE, "{")] literal.extend(n.clone() for n in items.children) literal.append(pytree.Leaf(token.RBRACE, "}")) # Set the prefix of the right brace to that of the ')' or ']' literal[-1].prefix = items.next_sibling.prefix maker = pytree.Node(syms.dictsetmaker, literal) maker.prefix = node.prefix # If the original was a one tuple, we need to remove the extra comma. if len(maker.children) == 4: n = maker.children[2] n.remove() maker.children[-1].prefix = n.prefix # Finally, replace the set call with our shiny new literal. return maker PK{��\����fixes/fix_standarderror.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for StandardError -> Exception.""" # Local imports from .. import fixer_base from ..fixer_util import Name class FixStandarderror(fixer_base.BaseFix): BM_compatible = True PATTERN = """ 'StandardError' """ def transform(self, node, results): return Name("Exception", prefix=node.prefix) PK{��\���I fixes/fix_sys_exc.pynu�[���"""Fixer for sys.exc_{type, value, traceback} sys.exc_type -> sys.exc_info()[0] sys.exc_value -> sys.exc_info()[1] sys.exc_traceback -> sys.exc_info()[2] """ # By Jeff Balogh and Benjamin Peterson # Local imports from .. import fixer_base from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms class FixSysExc(fixer_base.BaseFix): # This order matches the ordering of sys.exc_info(). exc_info = ["exc_type", "exc_value", "exc_traceback"] BM_compatible = True PATTERN = """ power< 'sys' trailer< dot='.' attribute=(%s) > > """ % '|'.join("'%s'" % e for e in exc_info) def transform(self, node, results): sys_attr = results["attribute"][0] index = Number(self.exc_info.index(sys_attr.value)) call = Call(Name("exc_info"), prefix=sys_attr.prefix) attr = Attr(Name("sys"), call) attr[1].children[0].prefix = results["dot"].prefix attr.append(Subscript(index)) return Node(syms.power, attr, prefix=node.prefix) PK{��\�x�u..fixes/fix_throw.pynu�[���"""Fixer for generator.throw(E, V, T). g.throw(E) -> g.throw(E) g.throw(E, V) -> g.throw(E(V)) g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) g.throw("foo"[, V[, T]]) will warn about string exceptions.""" # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, Call, ArgList, Attr, is_tuple class FixThrow(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< any trailer< '.' 'throw' > trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > > | power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > """ def transform(self, node, results): syms = self.syms exc = results["exc"].clone() if exc.type is token.STRING: self.cannot_convert(node, "Python 3 does not support string exceptions") return # Leave "g.throw(E)" alone val = results.get("val") if val is None: return val = val.clone() if is_tuple(val): args = [c.clone() for c in val.children[1:-1]] else: val.prefix = "" args = [val] throw_args = results["args"] if "tb" in results: tb = results["tb"].clone() tb.prefix = "" e = Call(exc, args) with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] throw_args.replace(pytree.Node(syms.power, with_tb)) else: throw_args.replace(Call(exc, args)) PK{��\O�+��fixes/fix_tuple_params.pynu�[���"""Fixer for function definitions with tuple parameters. def func(((a, b), c), d): ... -> def func(x, d): ((a, b), c) = x ... It will also support lambdas: lambda (x, y): x + y -> lambda t: t[0] + t[1] # The parens are a syntax error in Python 3 lambda (x): x + y -> lambda x: x + y """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms def is_docstring(stmt): return isinstance(stmt, pytree.Node) and \ stmt.children[0].type == token.STRING class FixTupleParams(fixer_base.BaseFix): run_order = 4 #use a lower order since lambda is part of other #patterns BM_compatible = True PATTERN = """ funcdef< 'def' any parameters< '(' args=any ')' > ['->' any] ':' suite=any+ > | lambda= lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > ':' body=any > """ def transform(self, node, results): if "lambda" in results: return self.transform_lambda(node, results) new_lines = [] suite = results["suite"] args = results["args"] # This crap is so "def foo(...): x = 5; y = 7" is handled correctly. # TODO(cwinter): suite-cleanup if suite[0].children[1].type == token.INDENT: start = 2 indent = suite[0].children[1].value end = Newline() else: start = 0 indent = "; " end = pytree.Leaf(token.INDENT, "") # We need access to self for new_name(), and making this a method # doesn't feel right. Closing over self and new_lines makes the # code below cleaner. def handle_tuple(tuple_arg, add_prefix=False): n = Name(self.new_name()) arg = tuple_arg.clone() arg.prefix = "" stmt = Assign(arg, n.clone()) if add_prefix: n.prefix = " " tuple_arg.replace(n) new_lines.append(pytree.Node(syms.simple_stmt, [stmt, end.clone()])) if args.type == syms.tfpdef: handle_tuple(args) elif args.type == syms.typedargslist: for i, arg in enumerate(args.children): if arg.type == syms.tfpdef: # Without add_prefix, the emitted code is correct, # just ugly. handle_tuple(arg, add_prefix=(i > 0)) if not new_lines: return # This isn't strictly necessary, but it plays nicely with other fixers. # TODO(cwinter) get rid of this when children becomes a smart list for line in new_lines: line.parent = suite[0] # TODO(cwinter) suite-cleanup after = start if start == 0: new_lines[0].prefix = " " elif is_docstring(suite[0].children[start]): new_lines[0].prefix = indent after = start + 1 for line in new_lines: line.parent = suite[0] suite[0].children[after:after] = new_lines for i in range(after+1, after+len(new_lines)+1): suite[0].children[i].prefix = indent suite[0].changed() def transform_lambda(self, node, results): args = results["args"] body = results["body"] inner = simplify_args(results["inner"]) # Replace lambda ((((x)))): x with lambda x: x if inner.type == token.NAME: inner = inner.clone() inner.prefix = " " args.replace(inner) return params = find_params(args) to_index = map_to_index(params) tup_name = self.new_name(tuple_name(params)) new_param = Name(tup_name, prefix=" ") args.replace(new_param.clone()) for n in body.post_order(): if n.type == token.NAME and n.value in to_index: subscripts = [c.clone() for c in to_index[n.value]] new = pytree.Node(syms.power, [new_param.clone()] + subscripts) new.prefix = n.prefix n.replace(new) ### Helper functions for transform_lambda() def simplify_args(node): if node.type in (syms.vfplist, token.NAME): return node elif node.type == syms.vfpdef: # These look like vfpdef< '(' x ')' > where x is NAME # or another vfpdef instance (leading to recursion). while node.type == syms.vfpdef: node = node.children[1] return node raise RuntimeError("Received unexpected node %s" % node) def find_params(node): if node.type == syms.vfpdef: return find_params(node.children[1]) elif node.type == token.NAME: return node.value return [find_params(c) for c in node.children if c.type != token.COMMA] def map_to_index(param_list, prefix=[], d=None): if d is None: d = {} for i, obj in enumerate(param_list): trailer = [Subscript(Number(str(i)))] if isinstance(obj, list): map_to_index(obj, trailer, d=d) else: d[obj] = prefix + trailer return d def tuple_name(param_list): l = [] for obj in param_list: if isinstance(obj, list): l.append(tuple_name(obj)) else: l.append(obj) return "_".join(l) PK{��\��S���fixes/fix_types.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for removing uses of the types module. These work for only the known names in the types module. The forms above can include types. or not. ie, It is assumed the module is imported either as: import types from types import ... # either * or specific types The import statements are not modified. There should be another fixer that handles at least the following constants: type([]) -> list type(()) -> tuple type('') -> str """ # Local imports from .. import fixer_base from ..fixer_util import Name _TYPE_MAPPING = { 'BooleanType' : 'bool', 'BufferType' : 'memoryview', 'ClassType' : 'type', 'ComplexType' : 'complex', 'DictType': 'dict', 'DictionaryType' : 'dict', 'EllipsisType' : 'type(Ellipsis)', #'FileType' : 'io.IOBase', 'FloatType': 'float', 'IntType': 'int', 'ListType': 'list', 'LongType': 'int', 'ObjectType' : 'object', 'NoneType': 'type(None)', 'NotImplementedType' : 'type(NotImplemented)', 'SliceType' : 'slice', 'StringType': 'bytes', # XXX ? 'StringTypes' : '(str,)', # XXX ? 'TupleType': 'tuple', 'TypeType' : 'type', 'UnicodeType': 'str', 'XRangeType' : 'range', } _pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] class FixTypes(fixer_base.BaseFix): BM_compatible = True PATTERN = '|'.join(_pats) def transform(self, node, results): new_value = _TYPE_MAPPING.get(results["name"].value) if new_value: return Name(new_value, prefix=node.prefix) return None PK{��\�&���fixes/fix_unicode.pynu�[���r"""Fixer for unicode. * Changes unicode to str and unichr to chr. * If "...\u..." is not unicode literal change it into "...\\u...". * Change u"..." into "...". """ from ..pgen2 import token from .. import fixer_base _mapping = {"unichr" : "chr", "unicode" : "str"} class FixUnicode(fixer_base.BaseFix): BM_compatible = True PATTERN = "STRING | 'unicode' | 'unichr'" def start_tree(self, tree, filename): super(FixUnicode, self).start_tree(tree, filename) self.unicode_literals = 'unicode_literals' in tree.future_features def transform(self, node, results): if node.type == token.NAME: new = node.clone() new.value = _mapping[node.value] return new elif node.type == token.STRING: val = node.value if not self.unicode_literals and val[0] in '\'"' and '\\' in val: val = r'\\'.join([ v.replace('\\u', r'\\u').replace('\\U', r'\\U') for v in val.split(r'\\') ]) if val[0] in 'uU': val = val[1:] if val == node.value: return node new = node.clone() new.value = val return new PK{��\'�� � fixes/fix_urllib.pynu�[���"""Fix changes imports of urllib which are now incompatible. This is rather similar to fix_imports, but because of the more complex nature of the fixing for urllib, it has its own fixer. """ # Author: Nick Edds # Local imports from lib2to3.fixes.fix_imports import alternates, FixImports from lib2to3.fixer_util import (Name, Comma, FromImport, Newline, find_indentation, Node, syms) MAPPING = {"urllib": [ ("urllib.request", ["URLopener", "FancyURLopener", "urlretrieve", "_urlopener", "urlopen", "urlcleanup", "pathname2url", "url2pathname"]), ("urllib.parse", ["quote", "quote_plus", "unquote", "unquote_plus", "urlencode", "splitattr", "splithost", "splitnport", "splitpasswd", "splitport", "splitquery", "splittag", "splittype", "splituser", "splitvalue", ]), ("urllib.error", ["ContentTooShortError"])], "urllib2" : [ ("urllib.request", ["urlopen", "install_opener", "build_opener", "Request", "OpenerDirector", "BaseHandler", "HTTPDefaultErrorHandler", "HTTPRedirectHandler", "HTTPCookieProcessor", "ProxyHandler", "HTTPPasswordMgr", "HTTPPasswordMgrWithDefaultRealm", "AbstractBasicAuthHandler", "HTTPBasicAuthHandler", "ProxyBasicAuthHandler", "AbstractDigestAuthHandler", "HTTPDigestAuthHandler", "ProxyDigestAuthHandler", "HTTPHandler", "HTTPSHandler", "FileHandler", "FTPHandler", "CacheFTPHandler", "UnknownHandler"]), ("urllib.error", ["URLError", "HTTPError"]), ] } # Duplicate the url parsing functions for urllib2. MAPPING["urllib2"].append(MAPPING["urllib"][1]) def build_pattern(): bare = set() for old_module, changes in MAPPING.items(): for change in changes: new_module, members = change members = alternates(members) yield """import_name< 'import' (module=%r | dotted_as_names< any* module=%r any* >) > """ % (old_module, old_module) yield """import_from< 'from' mod_member=%r 'import' ( member=%s | import_as_name< member=%s 'as' any > | import_as_names< members=any* >) > """ % (old_module, members, members) yield """import_from< 'from' module_star=%r 'import' star='*' > """ % old_module yield """import_name< 'import' dotted_as_name< module_as=%r 'as' any > > """ % old_module # bare_with_attr has a special significance for FixImports.match(). yield """power< bare_with_attr=%r trailer< '.' member=%s > any* > """ % (old_module, members) class FixUrllib(FixImports): def build_pattern(self): return "|".join(build_pattern()) def transform_import(self, node, results): """Transform for the basic import case. Replaces the old import name with a comma separated list of its replacements. """ import_mod = results.get("module") pref = import_mod.prefix names = [] # create a Node list of the replacement modules for name in MAPPING[import_mod.value][:-1]: names.extend([Name(name[0], prefix=pref), Comma()]) names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref)) import_mod.replace(names) def transform_member(self, node, results): """Transform for imports of specific module elements. Replaces the module to be imported from with the appropriate new module. """ mod_member = results.get("mod_member") pref = mod_member.prefix member = results.get("member") # Simple case with only a single member being imported if member: # this may be a list of length one, or just a node if isinstance(member, list): member = member[0] new_name = None for change in MAPPING[mod_member.value]: if member.value in change[1]: new_name = change[0] break if new_name: mod_member.replace(Name(new_name, prefix=pref)) else: self.cannot_convert(node, "This is an invalid module element") # Multiple members being imported else: # a dictionary for replacements, order matters modules = [] mod_dict = {} members = results["members"] for member in members: # we only care about the actual members if member.type == syms.import_as_name: as_name = member.children[2].value member_name = member.children[0].value else: member_name = member.value as_name = None if member_name != ",": for change in MAPPING[mod_member.value]: if member_name in change[1]: if change[0] not in mod_dict: modules.append(change[0]) mod_dict.setdefault(change[0], []).append(member) new_nodes = [] indentation = find_indentation(node) first = True def handle_name(name, prefix): if name.type == syms.import_as_name: kids = [Name(name.children[0].value, prefix=prefix), name.children[1].clone(), name.children[2].clone()] return [Node(syms.import_as_name, kids)] return [Name(name.value, prefix=prefix)] for module in modules: elts = mod_dict[module] names = [] for elt in elts[:-1]: names.extend(handle_name(elt, pref)) names.append(Comma()) names.extend(handle_name(elts[-1], pref)) new = FromImport(module, names) if not first or node.parent.prefix.endswith(indentation): new.prefix = indentation new_nodes.append(new) first = False if new_nodes: nodes = [] for new_node in new_nodes[:-1]: nodes.extend([new_node, Newline()]) nodes.append(new_nodes[-1]) node.replace(nodes) else: self.cannot_convert(node, "All module elements are invalid") def transform_dot(self, node, results): """Transform for calls to module members in code.""" module_dot = results.get("bare_with_attr") member = results.get("member") new_name = None if isinstance(member, list): member = member[0] for change in MAPPING[module_dot.value]: if member.value in change[1]: new_name = change[0] break if new_name: module_dot.replace(Name(new_name, prefix=module_dot.prefix)) else: self.cannot_convert(node, "This is an invalid module element") def transform(self, node, results): if results.get("module"): self.transform_import(node, results) elif results.get("mod_member"): self.transform_member(node, results) elif results.get("bare_with_attr"): self.transform_dot(node, results) # Renaming and star imports are not supported for these modules. elif results.get("module_star"): self.cannot_convert(node, "Cannot handle star imports.") elif results.get("module_as"): self.cannot_convert(node, "This module is now multiple modules") PK{��\J�b�BBfixes/fix_ws_comma.pynu�[���"""Fixer that changes 'a ,b' into 'a, b'. This also changes '{a :b}' into '{a: b}', but does not touch other uses of colons. It does not touch other uses of whitespace. """ from .. import pytree from ..pgen2 import token from .. import fixer_base class FixWsComma(fixer_base.BaseFix): explicit = True # The user must ask for this fixers PATTERN = """ any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> """ COMMA = pytree.Leaf(token.COMMA, ",") COLON = pytree.Leaf(token.COLON, ":") SEPS = (COMMA, COLON) def transform(self, node, results): new = node.clone() comma = False for child in new.children: if child in self.SEPS: prefix = child.prefix if prefix.isspace() and "\n" not in prefix: child.prefix = "" comma = True else: if comma: prefix = child.prefix if not prefix: child.prefix = " " comma = False return new PK{��\C��� � fixes/fix_xrange.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes xrange(...) into range(...).""" # Local imports from .. import fixer_base from ..fixer_util import Name, Call, consuming_calls from .. import patcomp class FixXrange(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< (name='range'|name='xrange') trailer< '(' args=any ')' > rest=any* > """ def start_tree(self, tree, filename): super(FixXrange, self).start_tree(tree, filename) self.transformed_xranges = set() def finish_tree(self, tree, filename): self.transformed_xranges = None def transform(self, node, results): name = results["name"] if name.value == "xrange": return self.transform_xrange(node, results) elif name.value == "range": return self.transform_range(node, results) else: raise ValueError(repr(name)) def transform_xrange(self, node, results): name = results["name"] name.replace(Name("range", prefix=name.prefix)) # This prevents the new range call from being wrapped in a list later. self.transformed_xranges.add(id(node)) def transform_range(self, node, results): if (id(node) not in self.transformed_xranges and not self.in_special_context(node)): range_call = Call(Name("range"), [results["args"].clone()]) # Encase the range call in list(). list_call = Call(Name("list"), [range_call], prefix=node.prefix) # Put things that were after the range() call after the list call. for n in results["rest"]: list_call.append_child(n) return list_call P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" p1 = patcomp.compile_pattern(P1) P2 = """for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > | comparison< any 'in' node=any any*> """ p2 = patcomp.compile_pattern(P2) def in_special_context(self, node): if node.parent is None: return False results = {} if (node.parent.parent is not None and self.p1.match(node.parent.parent, results) and results["node"] is node): # list(d.keys()) -> list(d.keys()), etc. return results["func"].value in consuming_calls # for ... in d.iterkeys() -> for ... in d.keys(), etc. return self.p2.match(node.parent, results) and results["node"] is node PK{��\N2E���fixes/fix_xreadlines.pynu�[���"""Fix "for x in f.xreadlines()" -> "for x in f". This fixer will also convert g(f.xreadlines) into g(f.__iter__).""" # Author: Collin Winter # Local imports from .. import fixer_base from ..fixer_util import Name class FixXreadlines(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > | power< any+ trailer< '.' no_call='xreadlines' > > """ def transform(self, node, results): no_call = results.get("no_call") if no_call: no_call.replace(Name("__iter__", prefix=no_call.prefix)) else: node.replace([x.clone() for x in results["call"]]) PK{��\��M1 fixes/fix_zip.pynu�[���""" Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) unless there exists a 'from future_builtins import zip' statement in the top-level namespace. We avoid the transformation if the zip() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. """ # Local imports from .. import fixer_base from ..pytree import Node from ..pygram import python_symbols as syms from ..fixer_util import Name, ArgList, in_special_context class FixZip(fixer_base.ConditionalFix): BM_compatible = True PATTERN = """ power< 'zip' args=trailer< '(' [any] ')' > [trailers=trailer*] > """ skip_on = "future_builtins.zip" def transform(self, node, results): if self.should_skip(node): return if in_special_context(node): return None args = results['args'].clone() args.prefix = "" trailers = [] if 'trailers' in results: trailers = [n.clone() for n in results['trailers']] for n in trailers: n.prefix = "" new = Node(syms.power, [Name("zip"), args], prefix="") new = Node(syms.power, [Name("list"), ArgList([new])] + trailers) new.prefix = node.prefix return new PK{��\�E[//fixes/__init__.pynu�[���# Dummy file to make this directory a package. PK{��\2�b~ ~ fixes/fix_apply.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for apply(). This converts apply(func, v, k) into (func)(*v, **k).""" # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Call, Comma, parenthesize class FixApply(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< 'apply' trailer< '(' arglist< (not argument<NAME '=' any>) func=any ',' (not argument<NAME '=' any>) args=any [',' (not argument<NAME '=' any>) kwds=any] [','] > ')' > > """ def transform(self, node, results): syms = self.syms assert results func = results["func"] args = results["args"] kwds = results.get("kwds") # I feel like we should be able to express this logic in the # PATTERN above but I don't know how to do it so... if args: if args.type == self.syms.star_expr: return # Make no change. if (args.type == self.syms.argument and args.children[0].value == '**'): return # Make no change. if kwds and (kwds.type == self.syms.argument and kwds.children[0].value == '**'): return # Make no change. prefix = node.prefix func = func.clone() if (func.type not in (token.NAME, syms.atom) and (func.type != syms.power or func.children[-2].type == token.DOUBLESTAR)): # Need to parenthesize func = parenthesize(func) func.prefix = "" args = args.clone() args.prefix = "" if kwds is not None: kwds = kwds.clone() kwds.prefix = "" l_newargs = [pytree.Leaf(token.STAR, "*"), args] if kwds is not None: l_newargs.extend([Comma(), pytree.Leaf(token.DOUBLESTAR, "**"), kwds]) l_newargs[-2].prefix = " " # that's the ** token # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t) # can be translated into f(x, y, *t) instead of f(*(x, y) + t) #new = pytree.Node(syms.power, (func, ArgList(l_newargs))) return Call(func, l_newargs, prefix=prefix) PK{��\�=���fixes/fix_asserts.pynu�[���"""Fixer that replaces deprecated unittest method names.""" # Author: Ezio Melotti from ..fixer_base import BaseFix from ..fixer_util import Name NAMES = dict( assert_="assertTrue", assertEquals="assertEqual", assertNotEquals="assertNotEqual", assertAlmostEquals="assertAlmostEqual", assertNotAlmostEquals="assertNotAlmostEqual", assertRegexpMatches="assertRegex", assertRaisesRegexp="assertRaisesRegex", failUnlessEqual="assertEqual", failIfEqual="assertNotEqual", failUnlessAlmostEqual="assertAlmostEqual", failIfAlmostEqual="assertNotAlmostEqual", failUnless="assertTrue", failUnlessRaises="assertRaises", failIf="assertFalse", ) class FixAsserts(BaseFix): PATTERN = """ power< any+ trailer< '.' meth=(%s)> any* > """ % '|'.join(map(repr, NAMES)) def transform(self, node, results): name = results["meth"][0] name.replace(Name(NAMES[str(name)], prefix=name.prefix)) PK{��\�M��@@fixes/fix_basestring.pynu�[���"""Fixer for basestring -> str.""" # Author: Christian Heimes # Local imports from .. import fixer_base from ..fixer_util import Name class FixBasestring(fixer_base.BaseFix): BM_compatible = True PATTERN = "'basestring'" def transform(self, node, results): return Name("str", prefix=node.prefix) PK{��\��NNfixes/fix_buffer.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes buffer(...) into memoryview(...).""" # Local imports from .. import fixer_base from ..fixer_util import Name class FixBuffer(fixer_base.BaseFix): BM_compatible = True explicit = True # The user must ask for this fixer PATTERN = """ power< name='buffer' trailer< '(' [any] ')' > any* > """ def transform(self, node, results): name = results["name"] name.replace(Name("memoryview", prefix=name.prefix)) PK{��\d�s��fixes/fix_dict.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for dict methods. d.keys() -> list(d.keys()) d.items() -> list(d.items()) d.values() -> list(d.values()) d.iterkeys() -> iter(d.keys()) d.iteritems() -> iter(d.items()) d.itervalues() -> iter(d.values()) d.viewkeys() -> d.keys() d.viewitems() -> d.items() d.viewvalues() -> d.values() Except in certain very specific contexts: the iter() can be dropped when the context is list(), sorted(), iter() or for...in; the list() can be dropped when the context is list() or sorted() (but not iter() or for...in!). Special contexts that apply to both: list(), sorted(), tuple() set(), any(), all(), sum(). Note: iter(d.keys()) could be written as iter(d) but since the original d.iterkeys() was also redundant we don't fix this. And there are (rare) contexts where it makes a difference (e.g. when passing it as an argument to a function that introspects the argument). """ # Local imports from .. import pytree from .. import patcomp from .. import fixer_base from ..fixer_util import Name, Call, Dot from .. import fixer_util iter_exempt = fixer_util.consuming_calls | {"iter"} class FixDict(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< head=any+ trailer< '.' method=('keys'|'items'|'values'| 'iterkeys'|'iteritems'|'itervalues'| 'viewkeys'|'viewitems'|'viewvalues') > parens=trailer< '(' ')' > tail=any* > """ def transform(self, node, results): head = results["head"] method = results["method"][0] # Extract node for method name tail = results["tail"] syms = self.syms method_name = method.value isiter = method_name.startswith("iter") isview = method_name.startswith("view") if isiter or isview: method_name = method_name[4:] assert method_name in ("keys", "items", "values"), repr(method) head = [n.clone() for n in head] tail = [n.clone() for n in tail] special = not tail and self.in_special_context(node, isiter) args = head + [pytree.Node(syms.trailer, [Dot(), Name(method_name, prefix=method.prefix)]), results["parens"].clone()] new = pytree.Node(syms.power, args) if not (special or isview): new.prefix = "" new = Call(Name("iter" if isiter else "list"), [new]) if tail: new = pytree.Node(syms.power, [new] + tail) new.prefix = node.prefix return new P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" p1 = patcomp.compile_pattern(P1) P2 = """for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > """ p2 = patcomp.compile_pattern(P2) def in_special_context(self, node, isiter): if node.parent is None: return False results = {} if (node.parent.parent is not None and self.p1.match(node.parent.parent, results) and results["node"] is node): if isiter: # iter(d.iterkeys()) -> iter(d.keys()), etc. return results["func"].value in iter_exempt else: # list(d.keys()) -> list(d.keys()), etc. return results["func"].value in fixer_util.consuming_calls if not isiter: return False # for ... in d.iterkeys() -> for ... in d.keys(), etc. return self.p2.match(node.parent, results) and results["node"] is node PK{��\p2�I fixes/fix_except.pynu�[���"""Fixer for except statements with named exceptions. The following cases will be converted: - "except E, T:" where T is a name: except E as T: - "except E, T:" where T is not a name, tuple or list: except E as t: T = t This is done because the target of an "except" clause must be a name. - "except E, T:" where T is a tuple or list literal: except E as t: T = t.args """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms def find_excepts(nodes): for i, n in enumerate(nodes): if n.type == syms.except_clause: if n.children[0].value == 'except': yield (n, nodes[i+2]) class FixExcept(fixer_base.BaseFix): BM_compatible = True PATTERN = """ try_stmt< 'try' ':' (simple_stmt | suite) cleanup=(except_clause ':' (simple_stmt | suite))+ tail=(['except' ':' (simple_stmt | suite)] ['else' ':' (simple_stmt | suite)] ['finally' ':' (simple_stmt | suite)]) > """ def transform(self, node, results): syms = self.syms tail = [n.clone() for n in results["tail"]] try_cleanup = [ch.clone() for ch in results["cleanup"]] for except_clause, e_suite in find_excepts(try_cleanup): if len(except_clause.children) == 4: (E, comma, N) = except_clause.children[1:4] comma.replace(Name("as", prefix=" ")) if N.type != token.NAME: # Generate a new N for the except clause new_N = Name(self.new_name(), prefix=" ") target = N.clone() target.prefix = "" N.replace(new_N) new_N = new_N.clone() # Insert "old_N = new_N" as the first statement in # the except body. This loop skips leading whitespace # and indents #TODO(cwinter) suite-cleanup suite_stmts = e_suite.children for i, stmt in enumerate(suite_stmts): if isinstance(stmt, pytree.Node): break # The assignment is different if old_N is a tuple or list # In that case, the assignment is old_N = new_N.args if is_tuple(N) or is_list(N): assign = Assign(target, Attr(new_N, Name('args'))) else: assign = Assign(target, new_N) #TODO(cwinter) stopgap until children becomes a smart list for child in reversed(suite_stmts[:i]): e_suite.insert_child(0, child) e_suite.insert_child(i, assign) elif N.prefix == "": # No space after a comma is legal; no space after "as", # not so much. N.prefix = " " #TODO(cwinter) fix this when children becomes a smart list children = [c.clone() for c in node.children[:3]] + try_cleanup + tail return pytree.Node(node.type, children) PK{��\�:���fixes/fix_exec.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for exec. This converts usages of the exec statement into calls to a built-in exec() function. exec code in ns1, ns2 -> exec(code, ns1, ns2) """ # Local imports from .. import fixer_base from ..fixer_util import Comma, Name, Call class FixExec(fixer_base.BaseFix): BM_compatible = True PATTERN = """ exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > | exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > """ def transform(self, node, results): assert results syms = self.syms a = results["a"] b = results.get("b") c = results.get("c") args = [a.clone()] args[0].prefix = "" if b is not None: args.extend([Comma(), b.clone()]) if c is not None: args.extend([Comma(), c.clone()]) return Call(Name("exec"), args, prefix=node.prefix) PK{��\�{Wkfixes/fix_execfile.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for execfile. This converts usages of the execfile function into calls to the built-in exec() function. """ from .. import fixer_base from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node, ArgList, String, syms) class FixExecfile(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > | power< 'execfile' trailer< '(' filename=any ')' > > """ def transform(self, node, results): assert results filename = results["filename"] globals = results.get("globals") locals = results.get("locals") # Copy over the prefix from the right parentheses end of the execfile # call. execfile_paren = node.children[-1].children[-1].clone() # Construct open().read(). open_args = ArgList([filename.clone(), Comma(), String('"rb"', ' ')], rparen=execfile_paren) open_call = Node(syms.power, [Name("open"), open_args]) read = [Node(syms.trailer, [Dot(), Name('read')]), Node(syms.trailer, [LParen(), RParen()])] open_expr = [open_call] + read # Wrap the open call in a compile call. This is so the filename will be # preserved in the execed code. filename_arg = filename.clone() filename_arg.prefix = " " exec_str = String("'exec'", " ") compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str] compile_call = Call(Name("compile"), compile_args, "") # Finally, replace the execfile call with an exec call. args = [compile_call] if globals is not None: args.extend([Comma(), globals.clone()]) if locals is not None: args.extend([Comma(), locals.clone()]) return Call(Name("exec"), args, prefix=node.prefix) PK{��\ 2�Ϳ � fixes/fix_exitfunc.pynu�[���""" Convert use of sys.exitfunc to use the atexit module. """ # Author: Benjamin Peterson from lib2to3 import pytree, fixer_base from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms class FixExitfunc(fixer_base.BaseFix): keep_line_order = True BM_compatible = True PATTERN = """ ( sys_import=import_name<'import' ('sys' | dotted_as_names< (any ',')* 'sys' (',' any)* > ) > | expr_stmt< power< 'sys' trailer< '.' 'exitfunc' > > '=' func=any > ) """ def __init__(self, *args): super(FixExitfunc, self).__init__(*args) def start_tree(self, tree, filename): super(FixExitfunc, self).start_tree(tree, filename) self.sys_import = None def transform(self, node, results): # First, find the sys import. We'll just hope it's global scope. if "sys_import" in results: if self.sys_import is None: self.sys_import = results["sys_import"] return func = results["func"].clone() func.prefix = "" register = pytree.Node(syms.power, Attr(Name("atexit"), Name("register")) ) call = Call(register, [func], node.prefix) node.replace(call) if self.sys_import is None: # That's interesting. self.warning(node, "Can't find sys import; Please add an atexit " "import at the top of your file.") return # Now add an atexit import after the sys import. names = self.sys_import.children[1] if names.type == syms.dotted_as_names: names.append_child(Comma()) names.append_child(Name("atexit", " ")) else: containing_stmt = self.sys_import.parent position = containing_stmt.children.index(self.sys_import) stmt_container = containing_stmt.parent new_import = pytree.Node(syms.import_name, [Name("import"), Name("atexit", " ")] ) new = pytree.Node(syms.simple_stmt, [new_import]) containing_stmt.insert_child(position + 1, Newline()) containing_stmt.insert_child(position + 2, new) PK{��\O�4[ [ fixes/fix_filter.pynu�[���# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes filter(F, X) into list(filter(F, X)). We avoid the transformation if the filter() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on filter(F, X) to return a string if X is a string and a tuple if X is a tuple. That would require type inference, which we don't do. Let Python 2.6 figure it out. """ # Local imports from .. import fixer_base from ..pytree import Node from ..pygram import python_symbols as syms from ..fixer_util import Name, ArgList, ListComp, in_special_context class FixFilter(fixer_base.ConditionalFix): BM_compatible = True PATTERN = """ filter_lambda=power< 'filter' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > [extra_trailers=trailer*] > | power< 'filter' trailer< '(' arglist< none='None' ',' seq=any > ')' > [extra_trailers=trailer*] > | power< 'filter' args=trailer< '(' [any] ')' > [extra_trailers=trailer*] > """ skip_on = "future_builtins.filter" def transform(self, node, results): if self.should_skip(node): return trailers = [] if 'extra_trailers' in results: for t in results['extra_trailers']: trailers.append(t.clone()) if "filter_lambda" in results: new = ListComp(results.get("fp").clone(), results.get("fp").clone(), results.get("it").clone(), results.get("xp").clone()) new = Node(syms.power, [new] + trailers, prefix="") elif "none" in results: new = ListComp(Name("_f"), Name("_f"), results["seq"].clone(), Name("_f")) new = Node(syms.power, [new] + trailers, prefix="") else: if in_special_context(node): return None args = results['args'].clone() new = Node(syms.power, [Name("filter"), args], prefix="") new = Node(syms.power, [Name("list"), ArgList([new])] + trailers) new.prefix = "" new.prefix = node.prefix return new PK{��\F����fixes/fix_funcattrs.pynu�[���"""Fix function attribute names (f.func_x -> f.__x__).""" # Author: Collin Winter # Local imports from .. import fixer_base from ..fixer_util import Name class FixFuncattrs(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' | 'func_dict') > any* > """ def transform(self, node, results): attr = results["attr"][0] attr.replace(Name(("__%s__" % attr.value[5:]), prefix=attr.prefix)) PK{��\�7h##fixes/fix_future.pynu�[���"""Remove __future__ imports from __future__ import foo is replaced with an empty line. """ # Author: Christian Heimes # Local imports from .. import fixer_base from ..fixer_util import BlankLine class FixFuture(fixer_base.BaseFix): BM_compatible = True PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" # This should be run last -- some things check for the import run_order = 10 def transform(self, node, results): new = BlankLine() new.prefix = node.prefix return new PK{��\�?k��fixes/fix_getcwdu.pynu�[���""" Fixer that changes os.getcwdu() to os.getcwd(). """ # Author: Victor Stinner # Local imports from .. import fixer_base from ..fixer_util import Name class FixGetcwdu(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< 'os' trailer< dot='.' name='getcwdu' > any* > """ def transform(self, node, results): name = results["name"] name.replace(Name("getcwd", prefix=name.prefix)) PK{��\D�Iu||fixes/fix_has_key.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer for has_key(). Calls to .has_key() methods are expressed in terms of the 'in' operator: d.has_key(k) -> k in d CAVEATS: 1) While the primary target of this fixer is dict.has_key(), the fixer will change any has_key() method call, regardless of its class. 2) Cases like this will not be converted: m = d.has_key if m(k): ... Only *calls* to has_key() are converted. While it is possible to convert the above to something like m = d.__contains__ if m(k): ... this is currently not done. """ # Local imports from .. import pytree from .. import fixer_base from ..fixer_util import Name, parenthesize class FixHasKey(fixer_base.BaseFix): BM_compatible = True PATTERN = """ anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > after=any* > | negation=not_test< 'not' anchor=power< before=any+ trailer< '.' 'has_key' > trailer< '(' ( not(arglist | argument<any '=' any>) arg=any | arglist<(not argument<any '=' any>) arg=any ','> ) ')' > > > """ def transform(self, node, results): assert results syms = self.syms if (node.parent.type == syms.not_test and self.pattern.match(node.parent)): # Don't transform a node matching the first alternative of the # pattern when its parent matches the second alternative return None negation = results.get("negation") anchor = results["anchor"] prefix = node.prefix before = [n.clone() for n in results["before"]] arg = results["arg"].clone() after = results.get("after") if after: after = [n.clone() for n in after] if arg.type in (syms.comparison, syms.not_test, syms.and_test, syms.or_test, syms.test, syms.lambdef, syms.argument): arg = parenthesize(arg) if len(before) == 1: before = before[0] else: before = pytree.Node(syms.power, before) before.prefix = " " n_op = Name("in", prefix=" ") if negation: n_not = Name("not", prefix=" ") n_op = pytree.Node(syms.comp_op, (n_not, n_op)) new = pytree.Node(syms.comparison, (arg, n_op, before)) if after: new = parenthesize(new) new = pytree.Node(syms.power, (new,) + tuple(after)) if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr, syms.and_expr, syms.shift_expr, syms.arith_expr, syms.term, syms.factor, syms.power): new = parenthesize(new) new.prefix = prefix return new PK{��\_� \fixes/fix_idioms.pynu�[���"""Adjust some old Python 2 idioms to their modern counterparts. * Change some type comparisons to isinstance() calls: type(x) == T -> isinstance(x, T) type(x) is T -> isinstance(x, T) type(x) != T -> not isinstance(x, T) type(x) is not T -> not isinstance(x, T) * Change "while 1:" into "while True:". * Change both v = list(EXPR) v.sort() foo(v) and the more general v = EXPR v.sort() foo(v) into v = sorted(EXPR) foo(v) """ # Author: Jacques Frechet, Collin Winter # Local imports from .. import fixer_base from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)" TYPE = "power< 'type' trailer< '(' x=any ')' > >" class FixIdioms(fixer_base.BaseFix): explicit = True # The user must ask for this fixer PATTERN = r""" isinstance=comparison< %s %s T=any > | isinstance=comparison< T=any %s %s > | while_stmt< 'while' while='1' ':' any+ > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' power< list='list' trailer< '(' (not arglist<any+>) any ')' > > > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > | sorted=any< any* simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > sort= simple_stmt< power< id2=any trailer< '.' 'sort' > trailer< '(' ')' > > '\n' > next=any* > """ % (TYPE, CMP, CMP, TYPE) def match(self, node): r = super(FixIdioms, self).match(node) # If we've matched one of the sort/sorted subpatterns above, we # want to reject matches where the initial assignment and the # subsequent .sort() call involve different identifiers. if r and "sorted" in r: if r["id1"] == r["id2"]: return r return None return r def transform(self, node, results): if "isinstance" in results: return self.transform_isinstance(node, results) elif "while" in results: return self.transform_while(node, results) elif "sorted" in results: return self.transform_sort(node, results) else: raise RuntimeError("Invalid match") def transform_isinstance(self, node, results): x = results["x"].clone() # The thing inside of type() T = results["T"].clone() # The type being compared against x.prefix = "" T.prefix = " " test = Call(Name("isinstance"), [x, Comma(), T]) if "n" in results: test.prefix = " " test = Node(syms.not_test, [Name("not"), test]) test.prefix = node.prefix return test def transform_while(self, node, results): one = results["while"] one.replace(Name("True", prefix=one.prefix)) def transform_sort(self, node, results): sort_stmt = results["sort"] next_stmt = results["next"] list_call = results.get("list") simple_expr = results.get("expr") if list_call: list_call.replace(Name("sorted", prefix=list_call.prefix)) elif simple_expr: new = simple_expr.clone() new.prefix = "" simple_expr.replace(Call(Name("sorted"), [new], prefix=simple_expr.prefix)) else: raise RuntimeError("should not have reached here") sort_stmt.remove() btwn = sort_stmt.prefix # Keep any prefix lines between the sort_stmt and the list_call and # shove them right after the sorted() call. if "\n" in btwn: if next_stmt: # The new prefix should be everything from the sort_stmt's # prefix up to the last newline, then the old prefix after a new # line. prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix) next_stmt[0].prefix = "\n".join(prefix_lines) else: assert list_call.parent assert list_call.next_sibling is None # Put a blank line after list_call and set its prefix. end_line = BlankLine() list_call.parent.append_child(end_line) assert list_call.next_sibling is end_line # The new prefix should be everything up to the first new line # of sort_stmt's prefix. end_line.prefix = btwn.rpartition("\n")[0] PK{��\6ng��fixes/fix_import.pynu�[���"""Fixer for import statements. If spam is being imported from the local directory, this import: from spam import eggs Becomes: from .spam import eggs And this import: import spam Becomes: from . import spam """ # Local imports from .. import fixer_base from os.path import dirname, join, exists, sep from ..fixer_util import FromImport, syms, token def traverse_imports(names): """ Walks over all the names imported in a dotted_as_names node. """ pending = [names] while pending: node = pending.pop() if node.type == token.NAME: yield node.value elif node.type == syms.dotted_name: yield "".join([ch.value for ch in node.children]) elif node.type == syms.dotted_as_name: pending.append(node.children[0]) elif node.type == syms.dotted_as_names: pending.extend(node.children[::-2]) else: raise AssertionError("unknown node type") class FixImport(fixer_base.BaseFix): BM_compatible = True PATTERN = """ import_from< 'from' imp=any 'import' ['('] any [')'] > | import_name< 'import' imp=any > """ def start_tree(self, tree, name): super(FixImport, self).start_tree(tree, name) self.skip = "absolute_import" in tree.future_features def transform(self, node, results): if self.skip: return imp = results['imp'] if node.type == syms.import_from: # Some imps are top-level (eg: 'import ham') # some are first level (eg: 'import ham.eggs') # some are third level (eg: 'import ham.eggs as spam') # Hence, the loop while not hasattr(imp, 'value'): imp = imp.children[0] if self.probably_a_local_import(imp.value): imp.value = "." + imp.value imp.changed() else: have_local = False have_absolute = False for mod_name in traverse_imports(imp): if self.probably_a_local_import(mod_name): have_local = True else: have_absolute = True if have_absolute: if have_local: # We won't handle both sibling and absolute imports in the # same statement at the moment. self.warning(node, "absolute and local imports together") return new = FromImport(".", [imp]) new.prefix = node.prefix return new def probably_a_local_import(self, imp_name): if imp_name.startswith("."): # Relative imports are certainly not local imports. return False imp_name = imp_name.split(".", 1)[0] base_path = dirname(self.filename) base_path = join(base_path, imp_name) # If there is no __init__.py next to the file its not in a package # so can't be a relative import. if not exists(join(dirname(base_path), "__init__.py")): return False for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]: if exists(base_path + ext): return True return False PK{��\�|�44fixes/fix_imports.pynu�[���"""Fix incompatible imports and module references.""" # Authors: Collin Winter, Nick Edds # Local imports from .. import fixer_base from ..fixer_util import Name, attr_chain MAPPING = {'StringIO': 'io', 'cStringIO': 'io', 'cPickle': 'pickle', '__builtin__' : 'builtins', 'copy_reg': 'copyreg', 'Queue': 'queue', 'SocketServer': 'socketserver', 'ConfigParser': 'configparser', 'repr': 'reprlib', 'FileDialog': 'tkinter.filedialog', 'tkFileDialog': 'tkinter.filedialog', 'SimpleDialog': 'tkinter.simpledialog', 'tkSimpleDialog': 'tkinter.simpledialog', 'tkColorChooser': 'tkinter.colorchooser', 'tkCommonDialog': 'tkinter.commondialog', 'Dialog': 'tkinter.dialog', 'Tkdnd': 'tkinter.dnd', 'tkFont': 'tkinter.font', 'tkMessageBox': 'tkinter.messagebox', 'ScrolledText': 'tkinter.scrolledtext', 'Tkconstants': 'tkinter.constants', 'Tix': 'tkinter.tix', 'ttk': 'tkinter.ttk', 'Tkinter': 'tkinter', 'markupbase': '_markupbase', '_winreg': 'winreg', 'thread': '_thread', 'dummy_thread': '_dummy_thread', # anydbm and whichdb are handled by fix_imports2 'dbhash': 'dbm.bsd', 'dumbdbm': 'dbm.dumb', 'dbm': 'dbm.ndbm', 'gdbm': 'dbm.gnu', 'xmlrpclib': 'xmlrpc.client', 'DocXMLRPCServer': 'xmlrpc.server', 'SimpleXMLRPCServer': 'xmlrpc.server', 'httplib': 'http.client', 'htmlentitydefs' : 'html.entities', 'HTMLParser' : 'html.parser', 'Cookie': 'http.cookies', 'cookielib': 'http.cookiejar', 'BaseHTTPServer': 'http.server', 'SimpleHTTPServer': 'http.server', 'CGIHTTPServer': 'http.server', #'test.test_support': 'test.support', 'commands': 'subprocess', 'UserString' : 'collections', 'UserList' : 'collections', 'urlparse' : 'urllib.parse', 'robotparser' : 'urllib.robotparser', } def alternates(members): return "(" + "|".join(map(repr, members)) + ")" def build_pattern(mapping=MAPPING): mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) bare_names = alternates(mapping.keys()) yield """name_import=import_name< 'import' ((%s) | multiple_imports=dotted_as_names< any* (%s) any* >) > """ % (mod_list, mod_list) yield """import_from< 'from' (%s) 'import' ['('] ( any | import_as_name< any 'as' any > | import_as_names< any* >) [')'] > """ % mod_list yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | multiple_imports=dotted_as_names< any* dotted_as_name< (%s) 'as' any > any* >) > """ % (mod_list, mod_list) # Find usages of module members in code e.g. thread.foo(bar) yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names class FixImports(fixer_base.BaseFix): BM_compatible = True keep_line_order = True # This is overridden in fix_imports2. mapping = MAPPING # We want to run this fixer late, so fix_import doesn't try to make stdlib # renames into relative imports. run_order = 6 def build_pattern(self): return "|".join(build_pattern(self.mapping)) def compile_pattern(self): # We override this, so MAPPING can be pragmatically altered and the # changes will be reflected in PATTERN. self.PATTERN = self.build_pattern() super(FixImports, self).compile_pattern() # Don't match the node if it's within another match. def match(self, node): match = super(FixImports, self).match results = match(node) if results: # Module usage could be in the trailer of an attribute lookup, so we # might have nested matches when "bare_with_attr" is present. if "bare_with_attr" not in results and \ any(match(obj) for obj in attr_chain(node, "parent")): return False return results return False def start_tree(self, tree, filename): super(FixImports, self).start_tree(tree, filename) self.replace = {} def transform(self, node, results): import_mod = results.get("module_name") if import_mod: mod_name = import_mod.value new_name = self.mapping[mod_name] import_mod.replace(Name(new_name, prefix=import_mod.prefix)) if "name_import" in results: # If it's not a "from x import x, y" or "import x as y" import, # marked its usage to be replaced. self.replace[mod_name] = new_name if "multiple_imports" in results: # This is a nasty hack to fix multiple imports on a line (e.g., # "import StringIO, urlparse"). The problem is that I can't # figure out an easy way to make a pattern recognize the keys of # MAPPING randomly sprinkled in an import statement. results = self.match(node) if results: self.transform(node, results) else: # Replace usage of the module. bare_name = results["bare_with_attr"][0] new_name = self.replace.get(bare_name.value) if new_name: bare_name.replace(Name(new_name, prefix=bare_name.prefix)) PK{��\ܬ'!!fixes/fix_imports2.pynu�[���"""Fix incompatible imports and module references that must be fixed after fix_imports.""" from . import fix_imports MAPPING = { 'whichdb': 'dbm', 'anydbm': 'dbm', } class FixImports2(fix_imports.FixImports): run_order = 7 mapping = MAPPING PK{��\q�|��fixes/fix_input.pynu�[���"""Fixer that changes input(...) into eval(input(...)).""" # Author: Andre Roberge # Local imports from .. import fixer_base from ..fixer_util import Call, Name from .. import patcomp context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >") class FixInput(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< 'input' args=trailer< '(' [any] ')' > > """ def transform(self, node, results): # If we're already wrapped in an eval() call, we're done. if context.match(node.parent.parent): return new = node.clone() new.prefix = "" return Call(Name("eval"), [new], prefix=node.prefix) PK{��\�����fixes/fix_intern.pynu�[���# Copyright 2006 Georg Brandl. # Licensed to PSF under a Contributor Agreement. """Fixer for intern(). intern(s) -> sys.intern(s)""" # Local imports from .. import fixer_base from ..fixer_util import ImportAndCall, touch_import class FixIntern(fixer_base.BaseFix): BM_compatible = True order = "pre" PATTERN = """ power< 'intern' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > """ def transform(self, node, results): if results: # I feel like we should be able to express this logic in the # PATTERN above but I don't know how to do it so... obj = results['obj'] if obj: if obj.type == self.syms.star_expr: return # Make no change. if (obj.type == self.syms.argument and obj.children[0].value == '**'): return # Make no change. names = ('sys', 'intern') new = ImportAndCall(node, results, names) touch_import(None, 'sys', node) return new PK{��\H��gHHfixes/fix_isinstance.pynu�[���# Copyright 2008 Armin Ronacher. # Licensed to PSF under a Contributor Agreement. """Fixer that cleans up a tuple argument to isinstance after the tokens in it were fixed. This is mainly used to remove double occurrences of tokens as a leftover of the long -> int / unicode -> str conversion. eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) -> isinstance(x, int) """ from .. import fixer_base from ..fixer_util import token class FixIsinstance(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< 'isinstance' trailer< '(' arglist< any ',' atom< '(' args=testlist_gexp< any+ > ')' > > ')' > > """ run_order = 6 def transform(self, node, results): names_inserted = set() testlist = results["args"] args = testlist.children new_args = [] iterator = enumerate(args) for idx, arg in iterator: if arg.type == token.NAME and arg.value in names_inserted: if idx < len(args) - 1 and args[idx + 1].type == token.COMMA: next(iterator) continue else: new_args.append(arg) if arg.type == token.NAME: names_inserted.add(arg.value) if new_args and new_args[-1].type == token.COMMA: del new_args[-1] if len(new_args) == 1: atom = testlist.parent new_args[0].prefix = atom.prefix atom.replace(new_args[0]) else: args[:] = new_args node.changed() PK{��\(���/pgen2/__pycache__/__init__.cpython-36.opt-1.pycnu�[���3 \��@sdZdS)zThe pgen2 package.N)�__doc__�rr�./usr/lib64/python3.6/lib2to3/pgen2/__init__.py�<module>sPK{��\�z���/pgen2/__pycache__/__init__.cpython-36.opt-2.pycnu�[���3 \��@sdS)N�rrr�./usr/lib64/python3.6/lib2to3/pgen2/__init__.py�<module>sPK{��\(���)pgen2/__pycache__/__init__.cpython-36.pycnu�[���3 \��@sdZdS)zThe pgen2 package.N)�__doc__�rr�./usr/lib64/python3.6/lib2to3/pgen2/__init__.py�<module>sPK{��\�6�!+pgen2/__pycache__/conv.cpython-36.opt-1.pycnu�[���3 \�%�@s2dZddlZddlmZmZGdd�dej�ZdS)a�Convert graminit.[ch] spit out by pgen to Python code. Pgen is the Python parser generator. It is useful to quickly create a parser from a grammar file in Python's grammar notation. But I don't want my parsers to be written in C (yet), so I'm translating the parsing tables to Python data structures and writing a Python parse engine. Note that the token numbers are constants determined by the standard Python tokenizer. The standard token module defines these numbers and their names (the names are not used much). The token numbers are hardcoded into the Python tokenizer and into pgen. A Python implementation of the Python tokenizer is also available, in the standard tokenize module. On the other hand, symbol numbers (representing the grammar's non-terminals) are assigned by pgen based on the actual grammar input. Note: this module is pretty much obsolete; the pgen module generates equivalent grammar tables directly from the Grammar.txt input file without having to invoke the Python pgen C program. �N)�grammar�tokenc@s0eZdZdZdd�Zdd�Zdd�Zdd �Zd S)� Convertera2Grammar subclass that reads classic pgen output files. The run() method reads the tables as produced by the pgen parser generator, typically contained in two C files, graminit.h and graminit.c. The other methods are for internal use only. See the base class for more documentation. cCs |j|�|j|�|j�dS)z<Load the grammar tables from the text files written by pgen.N)�parse_graminit_h�parse_graminit_c� finish_off)�selfZ graminit_hZ graminit_c�r �*/usr/lib64/python3.6/lib2to3/pgen2/conv.py�run/s z Converter.runc Cs�yt|�}Wn0tk r<}ztd||f�dSd}~XnXi|_i|_d}xn|D]f}|d7}tjd|�}|r�|j�r�td|||j�f�qT|j�\}}t |�}||j|<||j|<qTWdS) z�Parse the .h file written by pgen. (Internal) This file is a sequence of #define statements defining the nonterminals of the grammar as numbers. We build two tables mapping the numbers to names and back. zCan't open %s: %sFNr�z^#define\s+(\w+)\s+(\d+)$z%s(%s): can't parse %sT) �open�OSError�printZ symbol2numberZ number2symbol�re�match�strip�groups�int) r�filename�f�err�lineno�line�mo�symbol�numberr r r r5s& zConverter.parse_graminit_hc!Cs�yt|�}Wn0tk r<}ztd||f�dSd}~XnXd}|dt|�}}|dt|�}}|dt|�}}i}g}�x�|jd��rx�|jd��rLtjd|�}ttt |j ���\} } }g}xRt|�D]F} |dt|�}}tjd|�}ttt |j ���\}}|j||f�q�W|dt|�}}||| | f<|dt|�}}q�Wtjd |�}ttt |j ���\}}g}x^t|�D]R} |dt|�}}tjd |�}ttt |j ���\}} } || | f}|j|��q~W|j|�|dt|�}}|dt|�}}q�W||_ i}tjd|�}t |jd��}x�t|�D]�}|dt|�}}tjd|�}|jd �}ttt |jdddd���\}}}}||}|dt|�}}tjd|�}i}t|jd��}xPt|�D]D\}}t|�}x0td�D]$}|d|>@�r�d||d|<�q�W�q�W||f||<�q4W|dt|�}}||_g}|dt|�}}tjd|�}t |jd��}xjt|�D]^}|dt|�}}tjd|�}|j �\}}t |�}|dk�r�d}nt|�}|j||f��qpW|dt|�}}||_|dt|�}}|dt|�}}tjd|�}t |jd��}|dt|�}}|dt|�}}tjd|�}t |jd��}|dt|�}}tjd|�}t |jd��} | |_|dt|�}}y|dt|�}}Wntk �r�YnXdS)a�Parse the .c file written by pgen. (Internal) The file looks as follows. The first two lines are always this: #include "pgenheaders.h" #include "grammar.h" After that come four blocks: 1) one or more state definitions 2) a table defining dfas 3) a table defining labels 4) a struct defining the grammar A state definition has the following form: - one or more arc arrays, each of the form: static arc arcs_<n>_<m>[<k>] = { {<i>, <j>}, ... }; - followed by a state array, of the form: static state states_<s>[<t>] = { {<k>, arcs_<n>_<m>}, ... }; zCan't open %s: %sFNrrzstatic arc z)static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$z\s+{(\d+), (\d+)},$z'static state states_(\d+)\[(\d+)\] = {$z\s+{(\d+), arcs_(\d+)_(\d+)},$zstatic dfa dfas\[(\d+)\] = {$z0\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$����z\s+("(?:\\\d\d\d)*")},$�z!static label labels\[(\d+)\] = {$z\s+{(\d+), (0|"\w+")},$�0z \s+(\d+),$z\s+{(\d+), labels},$z \s+(\d+)$)r rr�next� startswithrr�list�maprr�range�append�states�group�eval� enumerate�ord�dfas�labels�start� StopIteration)!rrrrrrZallarcsr)r�n�m�kZarcs�_�i�j�s�t�stater.Zndfasrr�x�y�z�firstZ rawbitset�cZbyter/Znlabelsr0r r r rTs� " zConverter.parse_graminit_ccCs\i|_i|_xJt|j�D]<\}\}}|tjkrB|dk rB||j|<q|dkr||j|<qWdS)z1Create additional useful structures. (Internal).N)�keywords�tokensr,r/r�NAME)rZilabel�type�valuer r r r�szConverter.finish_offN)�__name__� __module__�__qualname__�__doc__rrrrr r r r r$s &r)rHrZpgen2rrZGrammarrr r r r �<module>sPK{��\���PP+pgen2/__pycache__/conv.cpython-36.opt-2.pycnu�[���3 \�%�@s.ddlZddlmZmZGdd�dej�ZdS)�N)�grammar�tokenc@s,eZdZdd�Zdd�Zdd�Zdd�Zd S) � ConvertercCs |j|�|j|�|j�dS)N)�parse_graminit_h�parse_graminit_c� finish_off)�selfZ graminit_hZ graminit_c�r �*/usr/lib64/python3.6/lib2to3/pgen2/conv.py�run/s z Converter.runc Cs�yt|�}Wn0tk r<}ztd||f�dSd}~XnXi|_i|_d}xn|D]f}|d7}tjd|�}|r�|j�r�td|||j�f�qT|j�\}}t |�}||j|<||j|<qTWdS)NzCan't open %s: %sFr�z^#define\s+(\w+)\s+(\d+)$z%s(%s): can't parse %sT) �open�OSError�printZ symbol2numberZ number2symbol�re�match�strip�groups�int) r�filename�f�err�lineno�line�mo�symbol�numberr r r r5s& zConverter.parse_graminit_hc!Cs�yt|�}Wn0tk r<}ztd||f�dSd}~XnXd}|dt|�}}|dt|�}}|dt|�}}i}g}�x�|jd��rx�|jd��rLtjd|�}ttt |j ���\} } }g}xRt|�D]F} |dt|�}}tjd|�}ttt |j ���\}}|j||f�q�W|dt|�}}||| | f<|dt|�}}q�Wtjd|�}ttt |j ���\}}g}x^t|�D]R} |dt|�}}tjd |�}ttt |j ���\}} } || | f}|j|��q~W|j|�|dt|�}}|dt|�}}q�W||_ i}tjd |�}t |jd��}x�t|�D]�}|dt|�}}tjd|�}|jd�}ttt |jdd dd���\}}}}||}|dt|�}}tjd|�}i}t|jd��}xPt|�D]D\}}t|�}x0td�D]$}|d|>@�r�d||d|<�q�W�q�W||f||<�q4W|dt|�}}||_g}|dt|�}}tjd|�}t |jd��}xjt|�D]^}|dt|�}}tjd|�}|j �\}}t |�}|dk�r�d}nt|�}|j||f��qpW|dt|�}}||_|dt|�}}|dt|�}}tjd|�}t |jd��}|dt|�}}|dt|�}}tjd|�}t |jd��}|dt|�}}tjd|�}t |jd��} | |_|dt|�}}y|dt|�}}Wntk �r�YnXdS)NzCan't open %s: %sFrrzstatic arc z)static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$z\s+{(\d+), (\d+)},$z'static state states_(\d+)\[(\d+)\] = {$z\s+{(\d+), arcs_(\d+)_(\d+)},$zstatic dfa dfas\[(\d+)\] = {$z0\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$����z\s+("(?:\\\d\d\d)*")},$�z!static label labels\[(\d+)\] = {$z\s+{(\d+), (0|"\w+")},$�0z \s+(\d+),$z\s+{(\d+), labels},$z \s+(\d+)$)r rr�next� startswithrr�list�maprr�range�append�states�group�eval� enumerate�ord�dfas�labels�start� StopIteration)!rrrrrrZallarcsr)r�n�m�kZarcs�_�i�j�s�t�stater.Zndfasrr�x�y�z�firstZ rawbitset�cZbyter/Znlabelsr0r r r rTs� " zConverter.parse_graminit_ccCs\i|_i|_xJt|j�D]<\}\}}|tjkrB|dk rB||j|<q|dkr||j|<qWdS)N)�keywords�tokensr,r/r�NAME)rZilabel�type�valuer r r r�szConverter.finish_offN)�__name__� __module__�__qualname__rrrrr r r r r$s &r)rZpgen2rrZGrammarrr r r r �<module>sPK{��\�F�xx%pgen2/__pycache__/conv.cpython-36.pycnu�[���3 \�%�@s2dZddlZddlmZmZGdd�dej�ZdS)a�Convert graminit.[ch] spit out by pgen to Python code. Pgen is the Python parser generator. It is useful to quickly create a parser from a grammar file in Python's grammar notation. But I don't want my parsers to be written in C (yet), so I'm translating the parsing tables to Python data structures and writing a Python parse engine. Note that the token numbers are constants determined by the standard Python tokenizer. The standard token module defines these numbers and their names (the names are not used much). The token numbers are hardcoded into the Python tokenizer and into pgen. A Python implementation of the Python tokenizer is also available, in the standard tokenize module. On the other hand, symbol numbers (representing the grammar's non-terminals) are assigned by pgen based on the actual grammar input. Note: this module is pretty much obsolete; the pgen module generates equivalent grammar tables directly from the Grammar.txt input file without having to invoke the Python pgen C program. �N)�grammar�tokenc@s0eZdZdZdd�Zdd�Zdd�Zdd �Zd S)� Convertera2Grammar subclass that reads classic pgen output files. The run() method reads the tables as produced by the pgen parser generator, typically contained in two C files, graminit.h and graminit.c. The other methods are for internal use only. See the base class for more documentation. cCs |j|�|j|�|j�dS)z<Load the grammar tables from the text files written by pgen.N)�parse_graminit_h�parse_graminit_c� finish_off)�selfZ graminit_hZ graminit_c�r �*/usr/lib64/python3.6/lib2to3/pgen2/conv.py�run/s z Converter.runc Cs�yt|�}Wn0tk r<}ztd||f�dSd}~XnXi|_i|_d}x�|D]�}|d7}tjd|�}|r�|j�r�td|||j�f�qT|j�\}}t |�}||jks�t �||jks�t �||j|<||j|<qTWdS) z�Parse the .h file written by pgen. (Internal) This file is a sequence of #define statements defining the nonterminals of the grammar as numbers. We build two tables mapping the numbers to names and back. zCan't open %s: %sFNr�z^#define\s+(\w+)\s+(\d+)$z%s(%s): can't parse %sT)�open�OSError�print� symbol2number� number2symbol�re�match�strip�groups�int�AssertionError) r�filename�f�err�lineno�line�mo�symbol�numberr r r r5s* zConverter.parse_graminit_hc!Cspyt|�}Wn0tk r<}ztd||f�dSd}~XnXd}|dt|�}}|dksht||f��|dt|�}}|dks�t||f��|dt|�}}i}g}�x*|jd��r�x�|jd��r�tjd |�}|s�t||f��tt t |j���\} } }g}xft|�D]Z} |dt|�}}tjd |�}|�s<t||f��tt t |j���\}}|j ||f��qW|dt|�}}|dk�s�t||f��||| | f<|dt|�}}q�Wtjd|�}|�s�t||f��tt t |j���\}}|t|�k�s�t||f��g}x�t|�D]~} |dt|�}}tjd |�}|�s@t||f��tt t |j���\}} } || | f}|t|�k�s~t||f��|j |��qW|j |�|dt|�}}|dk�s�t||f��|dt|�}}q�W||_i}tjd|�}|�s�t||f��t |jd��}�x�t|�D�]r}|dt|�}}tjd|�}|�sNt||f��|jd�}tt t |jdddd���\}}}}|j||k�s�t||f��|j||k�s�t||f��|dk�s�t||f��||}|t|�k�s�t||f��|dt|�}}tjd|�}|�st||f��i}t|jd��}xPt|�D]D\}}t|�}x0td�D]$}|d|>@�rPd||d|<�qPW�q6W||f||<�qW|dt|�}}|dk�s�t||f��||_g}|dt|�}}tjd|�}|�s�t||f��t |jd��}x|t|�D]p}|dt|�}}tjd|�}|�s>t||f��|j�\}}t |�}|dk�rbd}nt|�}|j ||f��q W|dt|�}}|dk�s�t||f��||_|dt|�}}|dk�s�t||f��|dt|�}}tjd|�}|�st||f��t |jd��}|t|j�k�s&t�|dt|�}}|dk�sNt||f��|dt|�}}tjd|�}|�s~t||f��t |jd��}|t|j�k�s�t||f��|dt|�}}tjd|�}|�s�t||f��t |jd��} | |jk�s�t||f��| |_|dt|�}}|dk�s,t||f��y|dt|�}}Wntk �rXYnXd�slt||f��dS)a�Parse the .c file written by pgen. (Internal) The file looks as follows. The first two lines are always this: #include "pgenheaders.h" #include "grammar.h" After that come four blocks: 1) one or more state definitions 2) a table defining dfas 3) a table defining labels 4) a struct defining the grammar A state definition has the following form: - one or more arc arrays, each of the form: static arc arcs_<n>_<m>[<k>] = { {<i>, <j>}, ... }; - followed by a state array, of the form: static state states_<s>[<t>] = { {<k>, arcs_<n>_<m>}, ... }; zCan't open %s: %sFNrrz#include "pgenheaders.h" z#include "grammar.h" zstatic arc z)static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$z\s+{(\d+), (\d+)},$z}; z'static state states_(\d+)\[(\d+)\] = {$z\s+{(\d+), arcs_(\d+)_(\d+)},$zstatic dfa dfas\[(\d+)\] = {$z0\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$����z\s+("(?:\\\d\d\d)*")},$�z!static label labels\[(\d+)\] = {$z\s+{(\d+), (0|"\w+")},$�0zgrammar _PyParser_Grammar = { z \s+(\d+),$z dfas, z\s+{(\d+), labels},$z \s+(\d+)$)r rr�nextr� startswithrr�list�maprr�range�append�len�states�grouprr�eval� enumerate�ord�dfas�labels�start� StopIteration)!rrrrrrZallarcsr-r�n�m�kZarcs�_�i�j�s�t�stater2Zndfasrr�x�y�z�firstZ rawbitset�cZbyter3Znlabelsr4r r r rTs� " zConverter.parse_graminit_ccCs\i|_i|_xJt|j�D]<\}\}}|tjkrB|dk rB||j|<q|dkr||j|<qWdS)z1Create additional useful structures. (Internal).N)�keywords�tokensr0r3r�NAME)rZilabel�type�valuer r r r�szConverter.finish_offN)�__name__� __module__�__qualname__�__doc__rrrrr r r r r$s &r)rLrZpgen2rrZGrammarrr r r r �<module>sPK{��\�!>���-pgen2/__pycache__/driver.cpython-36.opt-1.pycnu�[���3 \��@s�dZdZddgZddlZddlZddlZddlZddlZddlZddl m Z mZmZm Z mZGdd�de�Zd d �Zddd�Zdd�Zdd�Zdd�Zedkr�ejee���dS)zZParser driver. This provides a high-level interface to parse a file into a syntax tree. z#Guido van Rossum <guido@python.org>�Driver�load_grammar�N�)�grammar�parse�token�tokenize�pgenc@sHeZdZddd�Zddd�Zddd�Zdd d �Zddd�Zdd d�ZdS)rNcCs&||_|dkrtj�}||_||_dS)N)r�logging� getLogger�logger�convert)�selfrr r�r�,/usr/lib64/python3.6/lib2to3/pgen2/driver.py�__init__ s zDriver.__init__FcCsvtj|j|j�}|j�d}d}d}}}} } d}�x4|D�]}|\}}}} } |||fkr�|\} }|| kr�|d| |7}| }d}||kr�|| ||�7}|}|tjtjfkr�||7}| \}}|jd�r@|d7}d}q@|t j kr�tj|}|�r|jj dt j|||�|j||||f��r6|�r4|jj d�Pd}| \}}|jd�r@|d7}d}q@Wtjd||||f��|jS) z4Parse a series of tokens and return the syntax tree.rrN�� z%s %r (prefix=%r)zStop.zincomplete input)rZParserrr Zsetupr�COMMENT�NL�endswithr�OPZopmapr�debug�tok_nameZaddtokenZ ParseErrorZrootnode)r�tokensr�p�lineno�column�type�value�start�endZ line_text�prefixZ quintupleZs_linenoZs_columnrrr�parse_tokens'sR zDriver.parse_tokenscCstj|j�}|j||�S)z*Parse a stream and return the syntax tree.)r�generate_tokens�readliner#)r�streamrrrrr�parse_stream_rawWszDriver.parse_stream_rawcCs|j||�S)z*Parse a stream and return the syntax tree.)r')rr&rrrr�parse_stream\szDriver.parse_streamc Cs*tj|d|�}z|j||�S|j�XdS)z(Parse a file and return the syntax tree.�rN)�codecs�openr(�close)r�filename�encodingrr&rrr� parse_file`szDriver.parse_filecCstjtj|�j�}|j||�S)z*Parse a string and return the syntax tree.)rr$�io�StringIOr%r#)r�textrrrrr�parse_stringhszDriver.parse_string)NN)F)F)F)NF)F) �__name__� __module__�__qualname__rr#r'r(r/r3rrrrrs 0 cCs:tjj|�\}}|dkrd}||djtttj��dS)Nz.txtr�.z.pickle)�os�path�splitext�join�map�str�sys�version_info)�gt�head�tailrrr�_generate_pickle_namensrC�Grammar.txtTFcCs�|dkrtj�}|dkr t|�n|}|s4t||�r�|jd|�tj|�}|r�|jd|�y|j|�Wq�tk r�}z|jd|�WYdd}~Xq�Xnt j �}|j|�|S)z'Load the grammar (maybe from a pickle).Nz!Generating grammar tables from %szWriting grammar tables to %szWriting failed: %s)r rrC�_newer�infor Zgenerate_grammar�dump�OSErrorr�Grammar�load)r@Zgp�save�forcer�g�errrrus cCs8tjj|�sdStjj|�s dStjj|�tjj|�kS)z0Inquire whether file a was written since file b.FT)r8r9�exists�getmtime)�a�brrrrE�s rEcCsFtjj|�rt|�Sttjj|��}tj||�}tj �}|j |�|S)a�Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) where *pickled_grammar* is computed from *grammar_source* by adding the Python version and using a ``.pickle`` extension. However, if *grammar_source* is an extant file, load_grammar(grammar_source) is called instead. This facilitates using a packaged grammar file when needed but preserves load_grammar's automatic regeneration behavior when possible. )r8r9�isfilerrC�basename�pkgutil�get_datarrI�loads)�packageZgrammar_sourceZpickled_name�datarMrrr�load_packaged_grammar�s rZcGsF|stjdd�}tjtjtjdd�x|D]}t|ddd�q,WdS)z�Main program, when run as a script: produce grammar pickle files. Calls load_grammar for each argument, a path to a grammar text file. rNz%(message)s)�levelr&�formatT)rKrL)r>�argvr ZbasicConfig�INFO�stdoutr)�argsr@rrr�main�s ra�__main__)rDNTFN)�__doc__� __author__�__all__r*r0r8r rUr>rrrrrr �objectrrCrrErZrar4�exit�intrrrr�<module>s$P PK{��\mS���-pgen2/__pycache__/driver.cpython-36.opt-2.pycnu�[���3 \��@s�dZddgZddlZddlZddlZddlZddlZddlZddlm Z m Z mZmZm Z Gdd�de�Zdd �Zdd d�Zdd�Zdd�Zdd�Zedkr�ejee���dS)z#Guido van Rossum <guido@python.org>�Driver�load_grammar�N�)�grammar�parse�token�tokenize�pgenc@sHeZdZddd�Zddd�Zddd�Zdd d �Zddd�Zdd d�ZdS)rNcCs&||_|dkrtj�}||_||_dS)N)r�logging� getLogger�logger�convert)�selfrr r�r�,/usr/lib64/python3.6/lib2to3/pgen2/driver.py�__init__ s zDriver.__init__FcCsvtj|j|j�}|j�d}d}d}}}} } d}�x4|D�]}|\}}}} } |||fkr�|\} }|| kr�|d| |7}| }d}||kr�|| ||�7}|}|tjtjfkr�||7}| \}}|jd�r@|d7}d}q@|t j kr�tj|}|�r|jj dt j|||�|j||||f��r6|�r4|jj d�Pd}| \}}|jd�r@|d7}d}q@Wtjd||||f��|jS)Nrr�� z%s %r (prefix=%r)zStop.zincomplete input)rZParserrr Zsetupr�COMMENT�NL�endswithr�OPZopmapr�debug�tok_nameZaddtokenZ ParseErrorZrootnode)r�tokensr�p�lineno�column�type�value�start�endZ line_text�prefixZ quintupleZs_linenoZs_columnrrr�parse_tokens'sR zDriver.parse_tokenscCstj|j�}|j||�S)N)r�generate_tokens�readliner#)r�streamrrrrr�parse_stream_rawWszDriver.parse_stream_rawcCs|j||�S)N)r')rr&rrrr�parse_stream\szDriver.parse_streamc Cs*tj|d|�}z|j||�S|j�XdS)N�r)�codecs�openr(�close)r�filename�encodingrr&rrr� parse_file`szDriver.parse_filecCstjtj|�j�}|j||�S)N)rr$�io�StringIOr%r#)r�textrrrrr�parse_stringhszDriver.parse_string)NN)F)F)F)NF)F) �__name__� __module__�__qualname__rr#r'r(r/r3rrrrrs 0 cCs:tjj|�\}}|dkrd}||djtttj��dS)Nz.txtr�.z.pickle)�os�path�splitext�join�map�str�sys�version_info)�gt�head�tailrrr�_generate_pickle_namensrC�Grammar.txtTFcCs�|dkrtj�}|dkr t|�n|}|s4t||�r�|jd|�tj|�}|r�|jd|�y|j|�Wq�tk r�}z|jd|�WYdd}~Xq�Xnt j �}|j|�|S)Nz!Generating grammar tables from %szWriting grammar tables to %szWriting failed: %s)r rrC�_newer�infor Zgenerate_grammar�dump�OSErrorr�Grammar�load)r@Zgp�save�forcer�g�errrrus cCs8tjj|�sdStjj|�s dStjj|�tjj|�kS)NFT)r8r9�exists�getmtime)�a�brrrrE�s rEcCsFtjj|�rt|�Sttjj|��}tj||�}tj �}|j |�|S)N)r8r9�isfilerrC�basename�pkgutil�get_datarrI�loads)�packageZgrammar_sourceZpickled_name�datarMrrr�load_packaged_grammar�s rZcGsF|stjdd�}tjtjtjdd�x|D]}t|ddd�q,WdS)Nrz%(message)s)�levelr&�formatT)rKrL)r>�argvr ZbasicConfig�INFO�stdoutr)�argsr@rrr�main�s ra�__main__)rDNTFN)� __author__�__all__r*r0r8r rUr>rrrrrr �objectrrCrrErZrar4�exit�intrrrr�<module>s"P PK{��\�[-'pgen2/__pycache__/driver.cpython-36.pycnu�[���3 \��@s�dZdZddgZddlZddlZddlZddlZddlZddlZddl m Z mZmZm Z mZGdd�de�Zd d �Zddd�Zdd�Zdd�Zdd�Zedkr�ejee���dS)zZParser driver. This provides a high-level interface to parse a file into a syntax tree. z#Guido van Rossum <guido@python.org>�Driver�load_grammar�N�)�grammar�parse�token�tokenize�pgenc@sHeZdZddd�Zddd�Zddd�Zdd d �Zddd�Zdd d�ZdS)rNcCs&||_|dkrtj�}||_||_dS)N)r�logging� getLogger�logger�convert)�selfrr r�r�,/usr/lib64/python3.6/lib2to3/pgen2/driver.py�__init__ s zDriver.__init__FcCs�tj|j|j�}|j�d}d}d}}}} } d}�xR|D�]4}|\}}}} } |||fkr�||f|ks|t||f|f��|\} }|| kr�|d| |7}| }d}||kr�|| ||�7}|}|tjtjfkr�||7}| \}}|j d�r@|d7}d}q@|t jk�rtj|}|�r,|j jdt j|||�|j||||f��rT|�rR|j jd�Pd}| \}}|j d�r@|d7}d}q@Wtjd||||f��|jS) z4Parse a series of tokens and return the syntax tree.rrN�� z%s %r (prefix=%r)zStop.zincomplete input)rZParserrr Zsetup�AssertionErrorr�COMMENT�NL�endswithr�OPZopmapr�debug�tok_nameZaddtokenZ ParseErrorZrootnode)r�tokensr�p�lineno�column�type�value�start�endZ line_text�prefixZ quintupleZs_linenoZs_columnrrr�parse_tokens'sT zDriver.parse_tokenscCstj|j�}|j||�S)z*Parse a stream and return the syntax tree.)r�generate_tokens�readliner$)r�streamrrrrr�parse_stream_rawWszDriver.parse_stream_rawcCs|j||�S)z*Parse a stream and return the syntax tree.)r()rr'rrrr�parse_stream\szDriver.parse_streamc Cs*tj|d|�}z|j||�S|j�XdS)z(Parse a file and return the syntax tree.�rN)�codecs�openr)�close)r�filename�encodingrr'rrr� parse_file`szDriver.parse_filecCstjtj|�j�}|j||�S)z*Parse a string and return the syntax tree.)rr%�io�StringIOr&r$)r�textrrrrr�parse_stringhszDriver.parse_string)NN)F)F)F)NF)F) �__name__� __module__�__qualname__rr$r(r)r0r4rrrrrs 0 cCs:tjj|�\}}|dkrd}||djtttj��dS)Nz.txtr�.z.pickle)�os�path�splitext�join�map�str�sys�version_info)�gt�head�tailrrr�_generate_pickle_namensrD�Grammar.txtTFcCs�|dkrtj�}|dkr t|�n|}|s4t||�r�|jd|�tj|�}|r�|jd|�y|j|�Wq�tk r�}z|jd|�WYdd}~Xq�Xnt j �}|j|�|S)z'Load the grammar (maybe from a pickle).Nz!Generating grammar tables from %szWriting grammar tables to %szWriting failed: %s)r rrD�_newer�infor Zgenerate_grammar�dump�OSErrorr�Grammar�load)rAZgp�save�forcer�g�errrrus cCs8tjj|�sdStjj|�s dStjj|�tjj|�kS)z0Inquire whether file a was written since file b.FT)r9r:�exists�getmtime)�a�brrrrF�s rFcCsFtjj|�rt|�Sttjj|��}tj||�}tj �}|j |�|S)a�Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) where *pickled_grammar* is computed from *grammar_source* by adding the Python version and using a ``.pickle`` extension. However, if *grammar_source* is an extant file, load_grammar(grammar_source) is called instead. This facilitates using a packaged grammar file when needed but preserves load_grammar's automatic regeneration behavior when possible. )r9r:�isfilerrD�basename�pkgutil�get_datarrJ�loads)�packageZgrammar_sourceZpickled_name�datarNrrr�load_packaged_grammar�s r[cGsF|stjdd�}tjtjtjdd�x|D]}t|ddd�q,WdS)z�Main program, when run as a script: produce grammar pickle files. Calls load_grammar for each argument, a path to a grammar text file. rNz%(message)s)�levelr'�formatT)rLrM)r?�argvr ZbasicConfig�INFO�stdoutr)�argsrArrr�main�s rb�__main__)rENTFN)�__doc__� __author__�__all__r+r1r9r rVr?rrrrrr �objectrrDrrFr[rbr5�exit�intrrrr�<module>s$P PK{��\�����.pgen2/__pycache__/grammar.cpython-36.opt-1.pycnu�[���3 \��@sxdZddlZddlZddlmZmZGdd�de�Zdd�Zd Z iZ x.e j�D]"ZerNej �\ZZeee�e e<qNWdS) a�This module defines the data structures used to represent a grammar. These are a bit arcane because they are derived from the data structures used by Python's 'pgen' parser generator. There's also a table here mapping operators to their names in the token module; the Python tokenize module reports all operators as the fallback token code OP, but the parser needs the actual token code. �N�)�token�tokenizec@s@eZdZdZdd�Zdd�Zdd�Zdd �Zd d�Zdd �Z dS)�Grammara� Pgen parsing tables conversion class. Once initialized, this class supplies the grammar tables for the parsing engine implemented by parse.py. The parsing engine accesses the instance variables directly. The class here does not provide initialization of the tables; several subclasses exist to do this (see the conv and pgen modules). The load() method reads the tables from a pickle file, which is much faster than the other ways offered by subclasses. The pickle file is written by calling dump() (after loading the grammar tables using a subclass). The report() method prints a readable representation of the tables to stdout, for debugging. The instance variables are as follows: symbol2number -- a dict mapping symbol names to numbers. Symbol numbers are always 256 or higher, to distinguish them from token numbers, which are between 0 and 255 (inclusive). number2symbol -- a dict mapping numbers to symbol names; these two are each other's inverse. states -- a list of DFAs, where each DFA is a list of states, each state is a list of arcs, and each arc is a (i, j) pair where i is a label and j is a state number. The DFA number is the index into this list. (This name is slightly confusing.) Final states are represented by a special arc of the form (0, j) where j is its own state number. dfas -- a dict mapping symbol numbers to (DFA, first) pairs, where DFA is an item from the states list above, and first is a set of tokens that can begin this grammar rule (represented by a dict whose values are always 1). labels -- a list of (x, y) pairs where x is either a token number or a symbol number, and y is either None or a string; the strings are keywords. The label number is the index in this list; label numbers are used to mark state transitions (arcs) in the DFAs. start -- the number of the grammar's start symbol. keywords -- a dict mapping keyword strings to arc labels. tokens -- a dict mapping token numbers to arc labels. cCs<i|_i|_g|_i|_dg|_i|_i|_i|_d|_dS)Nr�EMPTY�)rr) � symbol2number� number2symbol�states�dfas�labels�keywords�tokens�symbol2label�start)�self�r�-/usr/lib64/python3.6/lib2to3/pgen2/grammar.py�__init__MszGrammar.__init__cCs2t|d��}t|j�}tj||d�WdQRXdS)a�Dump the grammar tables to a pickle file. dump() recursively changes all dict to OrderedDict, so the pickled file is not exactly the same as what was passed in to dump(). load() uses the pickled file to create the tables, but only changes OrderedDict to dict at the top level; it does not recursively change OrderedDict to dict. So, the loaded tables are different from the original tables that were passed to load() in that some of the OrderedDict (from the pickled file) are not changed back to dict. For parsing, this has no effect on performance because OrderedDict uses dict's __getitem__ with nothing in between. �wb�N)�open�_make_deterministic�__dict__�pickle�dump)r�filename�f�drrrrXs zGrammar.dumpc Cs0t|d��}tj|�}WdQRX|jj|�dS)z+Load the grammar tables from a pickle file.�rbN)rr�loadr�update)rrrrrrrr iszGrammar.loadcCs|jjtj|��dS)z3Load the grammar tables from a pickle bytes object.N)rr!r�loads)rZpklrrrr"osz Grammar.loadscCsX|j�}x"dD]}t||t||�j��qW|jdd�|_|jdd�|_|j|_|S) z# Copy the grammar. rr rr rrN)rr rr rr)� __class__�setattr�getattr�copyrr r)r�newZ dict_attrrrrr&sszGrammar.copycCsvddlm}td�||j�td�||j�td�||j�td�||j�td�||j�td|j�d S) z:Dump the grammar tables to standard output, for debugging.r)�pprintZs2nZn2sr rrrN)r(�printrr r rrr)rr(rrr�report�s zGrammar.reportN) �__name__� __module__�__qualname__�__doc__rrr r"r&r*rrrrrs4 rcCs^t|t�r&tjtdd�|j�D���St|t�r>dd�|D�St|t�rZtdd�|D��S|S)Ncss|]\}}|t|�fVqdS)N)r)�.0�k�vrrr� <genexpr>�sz&_make_deterministic.<locals>.<genexpr>cSsg|]}t|��qSr)r)r/�errr� <listcomp>�sz'_make_deterministic.<locals>.<listcomp>css|]}t|�VqdS)N)r)r/r3rrrr2�s)� isinstance�dict�collections�OrderedDict�sorted�items�list�tuple)�toprrrr�s ra ( LPAR ) RPAR [ LSQB ] RSQB : COLON , COMMA ; SEMI + PLUS - MINUS * STAR / SLASH | VBAR & AMPER < LESS > GREATER = EQUAL . DOT % PERCENT ` BACKQUOTE { LBRACE } RBRACE @ AT @= ATEQUAL == EQEQUAL != NOTEQUAL <> NOTEQUAL <= LESSEQUAL >= GREATEREQUAL ~ TILDE ^ CIRCUMFLEX << LEFTSHIFT >> RIGHTSHIFT ** DOUBLESTAR += PLUSEQUAL -= MINEQUAL *= STAREQUAL /= SLASHEQUAL %= PERCENTEQUAL &= AMPEREQUAL |= VBAREQUAL ^= CIRCUMFLEXEQUAL <<= LEFTSHIFTEQUAL >>= RIGHTSHIFTEQUAL **= DOUBLESTAREQUAL // DOUBLESLASH //= DOUBLESLASHEQUAL -> RARROW )r.r7r�rr�objectrrZ opmap_rawZopmap� splitlines�line�split�op�namer%rrrr�<module> sy=PK{��\qM���.pgen2/__pycache__/grammar.cpython-36.opt-2.pycnu�[���3 \��@stddlZddlZddlmZmZGdd�de�Zdd�ZdZiZ x.ej �D]"ZerJej�\Z Zeee�e e <qJWdS) �N�)�token�tokenizec@s<eZdZdd�Zdd�Zdd�Zdd�Zd d �Zdd�Zd S)�GrammarcCs<i|_i|_g|_i|_dg|_i|_i|_i|_d|_dS)Nr�EMPTY�)rr) � symbol2number� number2symbol�states�dfas�labels�keywords�tokens�symbol2label�start)�self�r�-/usr/lib64/python3.6/lib2to3/pgen2/grammar.py�__init__MszGrammar.__init__cCs2t|d��}t|j�}tj||d�WdQRXdS)N�wb�)�open�_make_deterministic�__dict__�pickle�dump)r�filename�f�drrrrXs zGrammar.dumpc Cs0t|d��}tj|�}WdQRX|jj|�dS)N�rb)rr�loadr�update)rrrrrrrr iszGrammar.loadcCs|jjtj|��dS)N)rr!r�loads)rZpklrrrr"osz Grammar.loadscCsX|j�}x"dD]}t||t||�j��qW|jdd�|_|jdd�|_|j|_|S)Nrr rr rr)rr rr rr)� __class__�setattr�getattr�copyrr r)r�newZ dict_attrrrrr&sszGrammar.copycCsvddlm}td�||j�td�||j�td�||j�td�||j�td�||j�td|j�dS) Nr)�pprintZs2nZn2sr rrr)r(�printrr r rrr)rr(rrr�report�s zGrammar.reportN) �__name__� __module__�__qualname__rrr r"r&r*rrrrrs6 rcCs^t|t�r&tjtdd�|j�D���St|t�r>dd�|D�St|t�rZtdd�|D��S|S)Ncss|]\}}|t|�fVqdS)N)r)�.0�k�vrrr� <genexpr>�sz&_make_deterministic.<locals>.<genexpr>cSsg|]}t|��qSr)r)r.�errr� <listcomp>�sz'_make_deterministic.<locals>.<listcomp>css|]}t|�VqdS)N)r)r.r2rrrr1�s)� isinstance�dict�collections�OrderedDict�sorted�items�list�tuple)�toprrrr�s ra ( LPAR ) RPAR [ LSQB ] RSQB : COLON , COMMA ; SEMI + PLUS - MINUS * STAR / SLASH | VBAR & AMPER < LESS > GREATER = EQUAL . DOT % PERCENT ` BACKQUOTE { LBRACE } RBRACE @ AT @= ATEQUAL == EQEQUAL != NOTEQUAL <> NOTEQUAL <= LESSEQUAL >= GREATEREQUAL ~ TILDE ^ CIRCUMFLEX << LEFTSHIFT >> RIGHTSHIFT ** DOUBLESTAR += PLUSEQUAL -= MINEQUAL *= STAREQUAL /= SLASHEQUAL %= PERCENTEQUAL &= AMPEREQUAL |= VBAREQUAL ^= CIRCUMFLEXEQUAL <<= LEFTSHIFTEQUAL >>= RIGHTSHIFTEQUAL **= DOUBLESTAREQUAL // DOUBLESLASH //= DOUBLESLASHEQUAL -> RARROW )r6r�rr�objectrrZ opmap_rawZopmap� splitlines�line�split�op�namer%rrrr�<module>sy=PK{��\�����(pgen2/__pycache__/grammar.cpython-36.pycnu�[���3 \��@sxdZddlZddlZddlmZmZGdd�de�Zdd�Zd Z iZ x.e j�D]"ZerNej �\ZZeee�e e<qNWdS) a�This module defines the data structures used to represent a grammar. These are a bit arcane because they are derived from the data structures used by Python's 'pgen' parser generator. There's also a table here mapping operators to their names in the token module; the Python tokenize module reports all operators as the fallback token code OP, but the parser needs the actual token code. �N�)�token�tokenizec@s@eZdZdZdd�Zdd�Zdd�Zdd �Zd d�Zdd �Z dS)�Grammara� Pgen parsing tables conversion class. Once initialized, this class supplies the grammar tables for the parsing engine implemented by parse.py. The parsing engine accesses the instance variables directly. The class here does not provide initialization of the tables; several subclasses exist to do this (see the conv and pgen modules). The load() method reads the tables from a pickle file, which is much faster than the other ways offered by subclasses. The pickle file is written by calling dump() (after loading the grammar tables using a subclass). The report() method prints a readable representation of the tables to stdout, for debugging. The instance variables are as follows: symbol2number -- a dict mapping symbol names to numbers. Symbol numbers are always 256 or higher, to distinguish them from token numbers, which are between 0 and 255 (inclusive). number2symbol -- a dict mapping numbers to symbol names; these two are each other's inverse. states -- a list of DFAs, where each DFA is a list of states, each state is a list of arcs, and each arc is a (i, j) pair where i is a label and j is a state number. The DFA number is the index into this list. (This name is slightly confusing.) Final states are represented by a special arc of the form (0, j) where j is its own state number. dfas -- a dict mapping symbol numbers to (DFA, first) pairs, where DFA is an item from the states list above, and first is a set of tokens that can begin this grammar rule (represented by a dict whose values are always 1). labels -- a list of (x, y) pairs where x is either a token number or a symbol number, and y is either None or a string; the strings are keywords. The label number is the index in this list; label numbers are used to mark state transitions (arcs) in the DFAs. start -- the number of the grammar's start symbol. keywords -- a dict mapping keyword strings to arc labels. tokens -- a dict mapping token numbers to arc labels. cCs<i|_i|_g|_i|_dg|_i|_i|_i|_d|_dS)Nr�EMPTY�)rr) � symbol2number� number2symbol�states�dfas�labels�keywords�tokens�symbol2label�start)�self�r�-/usr/lib64/python3.6/lib2to3/pgen2/grammar.py�__init__MszGrammar.__init__cCs2t|d��}t|j�}tj||d�WdQRXdS)a�Dump the grammar tables to a pickle file. dump() recursively changes all dict to OrderedDict, so the pickled file is not exactly the same as what was passed in to dump(). load() uses the pickled file to create the tables, but only changes OrderedDict to dict at the top level; it does not recursively change OrderedDict to dict. So, the loaded tables are different from the original tables that were passed to load() in that some of the OrderedDict (from the pickled file) are not changed back to dict. For parsing, this has no effect on performance because OrderedDict uses dict's __getitem__ with nothing in between. �wb�N)�open�_make_deterministic�__dict__�pickle�dump)r�filename�f�drrrrXs zGrammar.dumpc Cs0t|d��}tj|�}WdQRX|jj|�dS)z+Load the grammar tables from a pickle file.�rbN)rr�loadr�update)rrrrrrrr iszGrammar.loadcCs|jjtj|��dS)z3Load the grammar tables from a pickle bytes object.N)rr!r�loads)rZpklrrrr"osz Grammar.loadscCsX|j�}x"dD]}t||t||�j��qW|jdd�|_|jdd�|_|j|_|S) z# Copy the grammar. rr rr rrN)rr rr rr)� __class__�setattr�getattr�copyrr r)r�newZ dict_attrrrrr&sszGrammar.copycCsvddlm}td�||j�td�||j�td�||j�td�||j�td�||j�td|j�d S) z:Dump the grammar tables to standard output, for debugging.r)�pprintZs2nZn2sr rrrN)r(�printrr r rrr)rr(rrr�report�s zGrammar.reportN) �__name__� __module__�__qualname__�__doc__rrr r"r&r*rrrrrs4 rcCs^t|t�r&tjtdd�|j�D���St|t�r>dd�|D�St|t�rZtdd�|D��S|S)Ncss|]\}}|t|�fVqdS)N)r)�.0�k�vrrr� <genexpr>�sz&_make_deterministic.<locals>.<genexpr>cSsg|]}t|��qSr)r)r/�errr� <listcomp>�sz'_make_deterministic.<locals>.<listcomp>css|]}t|�VqdS)N)r)r/r3rrrr2�s)� isinstance�dict�collections�OrderedDict�sorted�items�list�tuple)�toprrrr�s ra ( LPAR ) RPAR [ LSQB ] RSQB : COLON , COMMA ; SEMI + PLUS - MINUS * STAR / SLASH | VBAR & AMPER < LESS > GREATER = EQUAL . DOT % PERCENT ` BACKQUOTE { LBRACE } RBRACE @ AT @= ATEQUAL == EQEQUAL != NOTEQUAL <> NOTEQUAL <= LESSEQUAL >= GREATEREQUAL ~ TILDE ^ CIRCUMFLEX << LEFTSHIFT >> RIGHTSHIFT ** DOUBLESTAR += PLUSEQUAL -= MINEQUAL *= STAREQUAL /= SLASHEQUAL %= PERCENTEQUAL &= AMPEREQUAL |= VBAREQUAL ^= CIRCUMFLEXEQUAL <<= LEFTSHIFTEQUAL >>= RIGHTSHIFTEQUAL **= DOUBLESTAREQUAL // DOUBLESLASH //= DOUBLESLASHEQUAL -> RARROW )r.r7r�rr�objectrrZ opmap_rawZopmap� splitlines�line�split�op�namer%rrrr�<module> sy=PK{��\��8BB/pgen2/__pycache__/literals.cpython-36.opt-1.pycnu�[���3 \O�@sPdZddlZddddddd d ddd � Zdd�Zdd�Zdd�ZedkrLe�dS)z<Safely evaluate Python string literals without using eval().�N���� � � ��'�"�\) �a�b�f�n�r�t�vr r rcCs�|jdd�\}}tj|�}|dk r&|S|jd�r�|dd�}t|�dkrTtd|��yt|d�}Wq�tk r�td|��Yq�Xn0yt|d�}Wn tk r�td|��YnXt|�S) Nr��x�z!invalid hex string escape ('\%s')��z#invalid octal string escape ('\%s'))�group�simple_escapes�get� startswith�len� ValueError�int�chr)�m�all�tail�escZhexes�i�r%�./usr/lib64/python3.6/lib2to3/pgen2/literals.py�escapes" r'cCsH|d}|dd�|dkr$|d}|t|�t|��}tjdt|�S)Nr�z)\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3}))r�re�subr')�s�qr%r%r&� evalString(s r-cCsDx>td�D]2}t|�}t|�}t|�}||kr t||||�q WdS)N�)�ranger�reprr-�print)r$�cr+�er%r%r&�test2sr4�__main__)�__doc__r)rr'r-r4�__name__r%r%r%r&�<module>s PK{��\&�{��/pgen2/__pycache__/literals.cpython-36.opt-2.pycnu�[���3 \O�@sLddlZdddddddd d dd� Zd d�Zdd�Zdd�ZedkrHe�dS)�N���� � � ��'�"�\) �a�b�f�n�r�t�vr r rcCs�|jdd�\}}tj|�}|dk r&|S|jd�r�|dd�}t|�dkrTtd|��yt|d�}Wq�tk r�td|��Yq�Xn0yt|d�}Wn tk r�td|��YnXt|�S) Nr��x�z!invalid hex string escape ('\%s')��z#invalid octal string escape ('\%s'))�group�simple_escapes�get� startswith�len� ValueError�int�chr)�m�all�tail�escZhexes�i�r%�./usr/lib64/python3.6/lib2to3/pgen2/literals.py�escapes" r'cCsH|d}|dd�|dkr$|d}|t|�t|��}tjdt|�S)Nr�z)\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3}))r�re�subr')�s�qr%r%r&� evalString(s r-cCsDx>td�D]2}t|�}t|�}t|�}||kr t||||�q WdS)N�)�ranger�reprr-�print)r$�cr+�er%r%r&�test2sr4�__main__)r)rr'r-r4�__name__r%r%r%r&�<module>s PK{��\�1���)pgen2/__pycache__/literals.cpython-36.pycnu�[���3 \O�@sPdZddlZddddddd d ddd � Zdd�Zdd�Zdd�ZedkrLe�dS)z<Safely evaluate Python string literals without using eval().�N���� � � ��'�"�\) �a�b�f�n�r�t�vr r rcCs�|jdd�\}}|jd�st�tj|�}|dk r4|S|jd�r�|dd�}t|�dkrbtd|��yt|d�}Wq�tk r�td|��Yq�Xn0yt|d�}Wn tk r�td |��YnXt|�S) Nr�r�x�z!invalid hex string escape ('\%s')��z#invalid octal string escape ('\%s')) �group� startswith�AssertionError�simple_escapes�get�len� ValueError�int�chr)�m�all�tail�escZhexes�i�r&�./usr/lib64/python3.6/lib2to3/pgen2/literals.py�escapes$ r(cCs�|jd�s(|jd�s(tt|dd����|d}|dd�|dkrL|d}|j|�sptt|t|�d����t|�dt|�ks�t�|t|�t|��}tjdt|�S)Nr r rr�rz)\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3}))rr�repr�endswithr�re�subr()�s�qr&r&r'� evalString(s($r0cCsDx>td�D]2}t|�}t|�}t|�}||kr t||||�q WdS)N�)�ranger r*r0�print)r%�cr.�er&r&r'�test2sr6�__main__)�__doc__r,rr(r0r6�__name__r&r&r&r'�<module>s PK{��\5�f��,pgen2/__pycache__/parse.cpython-36.opt-1.pycnu�[���3 \u�@s4dZddlmZGdd�de�ZGdd�de�ZdS)z�Parser engine for the grammar tables generated by pgen. The grammar table must be loaded first. See Parser/parser.c in the Python distribution for additional info on how this parsing engine works. �)�tokenc@seZdZdZdd�ZdS)� ParseErrorz(Exception to signal the parser is stuck.cCs4tj|d||||f�||_||_||_||_dS)Nz!%s: type=%r, value=%r, context=%r)� Exception�__init__�msg�type�value�context)�selfrrrr �r�+/usr/lib64/python3.6/lib2to3/pgen2/parse.pyrszParseError.__init__N)�__name__� __module__�__qualname__�__doc__rrrrrrsrc@sLeZdZdZddd�Zddd�Zdd�Zd d �Zdd�Zd d�Z dd�Z dS)�Parsera5Parser engine. The proper usage sequence is: p = Parser(grammar, [converter]) # create instance p.setup([start]) # prepare for parsing <for each input token>: if p.addtoken(...): # parse a token; may raise ParseError break root = p.rootnode # root of abstract syntax tree A Parser instance may be reused by calling setup() repeatedly. A Parser instance contains state pertaining to the current token sequence, and should not be used concurrently by different threads to parse separate token sequences. See driver.py for how to get input tokens by tokenizing a file or string. Parsing is complete when addtoken() returns True; the root of the abstract syntax tree can then be retrieved from the rootnode instance variable. When a syntax error occurs, addtoken() raises the ParseError exception. There is no error recovery; the parser cannot be used after a syntax error was reported (but it can be reinitialized by calling setup()). NcCs||_|pdd�|_dS)a�Constructor. The grammar argument is a grammar.Grammar instance; see the grammar module for more information. The parser is not ready yet for parsing; you must call the setup() method to get it started. The optional convert argument is a function mapping concrete syntax tree nodes to abstract syntax tree nodes. If not given, no conversion is done and the syntax tree produced is the concrete syntax tree. If given, it must be a function of two arguments, the first being the grammar (a grammar.Grammar instance), and the second being the concrete syntax tree node to be converted. The syntax tree is converted from the bottom up. A concrete syntax tree node is a (type, value, context, nodes) tuple, where type is the node type (a token or symbol number), value is None for symbols and a string for tokens, context is None or an opaque value used for error reporting (typically a (lineno, offset) pair), and nodes is a list of children for symbols, and None for tokens. An abstract syntax tree node may be anything; this is entirely up to the converter function. cSs|S)Nr)�grammar�noderrr�<lambda>Wsz!Parser.__init__.<locals>.<lambda>N)r�convert)r rrrrrr9szParser.__init__cCsH|dkr|jj}|ddgf}|jj|d|f}|g|_d|_t�|_dS)a�Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol. N�)r�start�dfas�stack�rootnode�set� used_names)r r�newnodeZ stackentryrrr�setupYs zParser.setupcCs:|j|||�}�x$|jd \}}}|\}} ||} �x�| D]�\}}|jj|\} }||kr�|j||||�|}x@||d|fgkr�|j�|js�dS|jd \}}}|\}} qpWdS| dkr:|jj| }|\}}||kr:|j| |jj| ||�Pq:Wd|f| k�r$|j�|j�s2td|||��qtd|||��qWdS)z<Add a token; return True iff this is the end of the program.rrTF�ztoo much inputz bad inputN���r ) �classifyrrZlabels�shift�popr�pushr)r rrr �ilabel�dfa�staterZstates�firstZarcs�i�newstate�t�vZitsdfaZ itsstatesZitsfirstrrr�addtokenqs:zParser.addtokencCsX|tjkr0|jj|�|jjj|�}|dk r0|S|jjj|�}|dkrTtd|||��|S)z&Turn a token into a label. (Internal)Nz bad token) r�NAMEr�addr�keywords�get�tokensr)r rrr r%rrrr!�s zParser.classifyc CsT|jd\}}}|||df}|j|j|�}|dk r@|dj|�|||f|jd<dS)zShift a token. (Internal)rNr r r )rrr�append) r rrr*r r&r'rrrrrr"�szParser.shiftc CsB|jd\}}}|d|gf}|||f|jd<|jj|d|f�dS)zPush a nonterminal. (Internal)rNrr r )rr3) r rZnewdfar*r r&r'rrrrrr$�szParser.pushcCs`|jj�\}}}|j|j|�}|dk r\|jrL|jd\}}}|dj|�n||_|j|j_dS)zPop a nonterminal. (Internal)Nrr r )rr#rrr3rr)r ZpopdfaZpopstateZpopnoderr&r'rrrrr#�sz Parser.pop)N)N)r rrrrrr-r!r"r$r#rrrrrs 0 rN)r�rrr�objectrrrrr�<module>sPK{��\ ��,pgen2/__pycache__/parse.cpython-36.opt-2.pycnu�[���3 \u�@s0ddlmZGdd�de�ZGdd�de�ZdS)�)�tokenc@seZdZdd�ZdS)� ParseErrorcCs4tj|d||||f�||_||_||_||_dS)Nz!%s: type=%r, value=%r, context=%r)� Exception�__init__�msg�type�value�context)�selfrrrr �r�+/usr/lib64/python3.6/lib2to3/pgen2/parse.pyrszParseError.__init__N)�__name__� __module__�__qualname__rrrrrrsrc@sHeZdZddd�Zddd�Zdd�Zdd �Zd d�Zdd �Zdd�Z dS)�ParserNcCs||_|pdd�|_dS)NcSs|S)Nr)�grammar�noderrr�<lambda>Wsz!Parser.__init__.<locals>.<lambda>)r�convert)r rrrrrr9szParser.__init__cCsH|dkr|jj}|ddgf}|jj|d|f}|g|_d|_t�|_dS)N�)r�start�dfas�stack�rootnode�set� used_names)r r�newnodeZ stackentryrrr�setupYs zParser.setupcCs:|j|||�}�x$|jd\}}}|\}} ||} �x�| D]�\}}|jj|\} }||kr�|j||||�|}x@||d|fgkr�|j�|js�dS|jd \}}}|\}} qpWdS| dkr:|jj| }|\}}||kr:|j| |jj| ||�Pq:Wd|f| k�r$|j�|j�s2td|||��qtd|||��qWdS) NrrTF�ztoo much inputz bad input���r) �classifyrrZlabels�shift�popr�pushr)r rrr �ilabel�dfa�staterZstates�firstZarcs�i�newstate�t�vZitsdfaZ itsstatesZitsfirstrrr�addtokenqs:zParser.addtokencCsX|tjkr0|jj|�|jjj|�}|dk r0|S|jjj|�}|dkrTtd|||��|S)Nz bad token) r�NAMEr�addr�keywords�get�tokensr)r rrr r$rrrr �s zParser.classifyc CsT|jd\}}}|||df}|j|j|�}|dk r@|dj|�|||f|jd<dS)Nrrrr)rrr�append) r rrr)r r%r&rrrrrr!�szParser.shiftc CsB|jd\}}}|d|gf}|||f|jd<|jj|d|f�dS)Nrrrr)rr2) r rZnewdfar)r r%r&rrrrrr#�szParser.pushcCs`|jj�\}}}|j|j|�}|dk r\|jrL|jd\}}}|dj|�n||_|j|j_dS)Nrrr)rr"rrr2rr)r ZpopdfaZpopstateZpopnoderr%r&rrrrr"�sz Parser.pop)N)N) r rrrrr,r r!r#r"rrrrrs 0 rN)�rrr�objectrrrrr�<module>sPK{��\��E��&pgen2/__pycache__/parse.cpython-36.pycnu�[���3 \u�@s4dZddlmZGdd�de�ZGdd�de�ZdS)z�Parser engine for the grammar tables generated by pgen. The grammar table must be loaded first. See Parser/parser.c in the Python distribution for additional info on how this parsing engine works. �)�tokenc@seZdZdZdd�ZdS)� ParseErrorz(Exception to signal the parser is stuck.cCs4tj|d||||f�||_||_||_||_dS)Nz!%s: type=%r, value=%r, context=%r)� Exception�__init__�msg�type�value�context)�selfrrrr �r�+/usr/lib64/python3.6/lib2to3/pgen2/parse.pyrszParseError.__init__N)�__name__� __module__�__qualname__�__doc__rrrrrrsrc@sLeZdZdZddd�Zddd�Zdd�Zd d �Zdd�Zd d�Z dd�Z dS)�Parsera5Parser engine. The proper usage sequence is: p = Parser(grammar, [converter]) # create instance p.setup([start]) # prepare for parsing <for each input token>: if p.addtoken(...): # parse a token; may raise ParseError break root = p.rootnode # root of abstract syntax tree A Parser instance may be reused by calling setup() repeatedly. A Parser instance contains state pertaining to the current token sequence, and should not be used concurrently by different threads to parse separate token sequences. See driver.py for how to get input tokens by tokenizing a file or string. Parsing is complete when addtoken() returns True; the root of the abstract syntax tree can then be retrieved from the rootnode instance variable. When a syntax error occurs, addtoken() raises the ParseError exception. There is no error recovery; the parser cannot be used after a syntax error was reported (but it can be reinitialized by calling setup()). NcCs||_|pdd�|_dS)a�Constructor. The grammar argument is a grammar.Grammar instance; see the grammar module for more information. The parser is not ready yet for parsing; you must call the setup() method to get it started. The optional convert argument is a function mapping concrete syntax tree nodes to abstract syntax tree nodes. If not given, no conversion is done and the syntax tree produced is the concrete syntax tree. If given, it must be a function of two arguments, the first being the grammar (a grammar.Grammar instance), and the second being the concrete syntax tree node to be converted. The syntax tree is converted from the bottom up. A concrete syntax tree node is a (type, value, context, nodes) tuple, where type is the node type (a token or symbol number), value is None for symbols and a string for tokens, context is None or an opaque value used for error reporting (typically a (lineno, offset) pair), and nodes is a list of children for symbols, and None for tokens. An abstract syntax tree node may be anything; this is entirely up to the converter function. cSs|S)Nr)�grammar�noderrr�<lambda>Wsz!Parser.__init__.<locals>.<lambda>N)r�convert)r rrrrrr9szParser.__init__cCsH|dkr|jj}|ddgf}|jj|d|f}|g|_d|_t�|_dS)a�Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol. N�)r�start�dfas�stack�rootnode�set� used_names)r r�newnodeZ stackentryrrr�setupYs zParser.setupcCsF|j|||�}�x0|jd \}}}|\}} ||} �x| D]�\}}|jj|\} }||kr�| dksft�|j||||�|}x@||d|fgkr�|j�|js�dS|jd \}}}|\}} q|WdS| dkr:|jj| }|\}}||kr:|j| |jj| ||�Pq:Wd|f| k�r0|j�|j�s>t d|||��qt d|||��qWdS)z<Add a token; return True iff this is the end of the program.r�rTFztoo much inputz bad inputN���r ) �classifyrrZlabels�AssertionError�shift�popr�pushr)r rrr �ilabel�dfa�staterZstates�firstZarcs�i�newstate�t�vZitsdfaZ itsstatesZitsfirstrrr�addtokenqs<zParser.addtokencCsX|tjkr0|jj|�|jjj|�}|dk r0|S|jjj|�}|dkrTtd|||��|S)z&Turn a token into a label. (Internal)Nz bad token) r�NAMEr�addr�keywords�get�tokensr)r rrr r&rrrr!�s zParser.classifyc CsT|jd\}}}|||df}|j|j|�}|dk r@|dj|�|||f|jd<dS)zShift a token. (Internal)rNr r r )rrr�append) r rrr+r r'r(rrrrrr#�szParser.shiftc CsB|jd\}}}|d|gf}|||f|jd<|jj|d|f�dS)zPush a nonterminal. (Internal)rNrr r )rr4) r rZnewdfar+r r'r(rrrrrr%�szParser.pushcCs`|jj�\}}}|j|j|�}|dk r\|jrL|jd\}}}|dj|�n||_|j|j_dS)zPop a nonterminal. (Internal)Nrr r )rr$rrr4rr)r ZpopdfaZpopstateZpopnoderr'r(rrrrr$�sz Parser.pop)N)N)r rrrrrr.r!r#r%r$rrrrrs 0 rN)r�rrr�objectrrrrr�<module>sPK{��\�*��u$u$+pgen2/__pycache__/pgen.cpython-36.opt-1.pycnu�[���3 \�5�@sdddlmZmZmZGdd�dej�ZGdd�de�ZGdd�de�ZGdd �d e�Z ddd�Z d S)�)�grammar�token�tokenizec@seZdZdS)�PgenGrammarN)�__name__� __module__�__qualname__�r r �*/usr/lib64/python3.6/lib2to3/pgen2/pgen.pyrsrc@s�eZdZd&dd�Zdd�Zdd�Zdd �Zd d�Zdd �Zdd�Z dd�Z dd�Zdd�Zdd�Z dd�Zdd�Zdd�Zdd�Zd'd d!�Zd"d#�Zd$d%�ZdS)(�ParserGeneratorNcCsld}|dkrt|�}|j}||_||_tj|j�|_|j�|j �\|_ |_|dk rZ|�i|_|j �dS)N)�open�close�filename�streamr�generate_tokens�readline� generator�gettoken�parse�dfas�startsymbol�first�addfirstsets)�selfrrZclose_streamr r r �__init__szParserGenerator.__init__cCs*t�}t|jj��}|j�|j|j�|jd|j�x.|D]&}dt|j �}||j |<||j |<q<Wx�|D]�}|j|}g}xl|D]d}g}x6t|jj ��D]$\} } |j|j|| �|j| �f�q�W|jr�|jd|j|�f�|j|�q�W|jj|�||j||�f|j|j |<qlW|j |j|_|S)N��)r�listr�keys�sort�remover�insert�len� symbol2numberZ number2symbol�sorted�arcs�items�append� make_label�index�isfinal�states� make_first�start)r�c�names�name�i�dfar+�stater%�label�nextr r r �make_grammars. zParserGenerator.make_grammarcCs8|j|}i}x$t|�D]}|j||�}d||<qW|S)Nr)rr$r()rr.r0Zrawfirstrr4�ilabelr r r r,4s zParserGenerator.make_firstcCs&t|j�}|dj�r�||jkrZ||jkr4|j|S|jj|j|df�||j|<|Sn>tt|d�}||jkrz|j|S|jj|df�||j|<|Sn�t |�}|dj�r�||j kr�|j |S|jjtj|f�||j |<|Sn>tj |}||jk�r|j|S|jj|df�||j|<|SdS)Nr)r"Zlabels�isalphar#Zsymbol2labelr'�getattrr�tokens�eval�keywords�NAMErZopmap)rr.r4r7Zitoken�valuer r r r(=s6 zParserGenerator.make_labelcCs<t|jj��}|j�x |D]}||jkr|j|�qWdS)N)rrrrr� calcfirst)rr/r0r r r rks zParserGenerator.addfirstsetsc Cs |j|}d|j|<|d}i}i}x�|jj�D]x\}}||jkr�||jkrl|j|}|dkr�td|��n|j|�|j|}|j|�|||<q0d||<|di||<q0Wi} xJ|j�D]>\}} x4| D],}|| kr�td|||| |f��|| |<q�Wq�W||j|<dS)Nrzrecursion for rule %rrzArule %s is ambiguous; %s is in the first sets of %s as well as %s)rrr%r&� ValueErrorr?�update)rr0r2r3ZtotalsetZoverlapcheckr4r5�fsetZinverseZitsfirstZsymbolr r r r?ss2 zParserGenerator.calcfirstc Cs�i}d}x�|jtjkr�x|jtjkr.|j�qW|jtj�}|jtjd�|j�\}}|jtj�|j ||�}t |�}|j|�t |�}|||<|dkr |}q W||fS)N�:)�typer� ENDMARKER�NEWLINEr�expectr=�OP� parse_rhs�make_dfar"�simplify_dfa) rrrr0�a�zr2ZoldlenZnewlenr r r r�s" zParserGenerator.parsecs��fdd�}�fdd��t||�|�g}x�|D]�}i}x<|jD]2}x,|jD]"\}} |dk rJ�| |j|i��qJWq>WxRt|j��D]B\}} x,|D]}|j| kr�Pq�Wt| |�}|j|�|j||�q�Wq.W|S)Ncsi}�||�|S)Nr )r3�base)� addclosurer r �closure�s z)ParserGenerator.make_dfa.<locals>.closurecs>||krdSd||<x$|jD]\}}|dkr�||�qWdS)Nr)r%)r3rNr4r5)rOr r rO�sz,ParserGenerator.make_dfa.<locals>.addclosure)�DFAState�nfasetr%� setdefaultr$r&r'�addarc)rr-�finishrPr+r3r%Znfastater4r5rR�str )rOr rJ�s" zParserGenerator.make_dfac Cs�td|�|g}x�t|�D]�\}}td|||kr4dp6d�x^|jD]T\}}||kr^|j|�} nt|�} |j|�|dkr�td| �qBtd|| f�qBWqWdS)NzDump of NFA forz Statez(final)�z -> %dz %s -> %d)�print� enumerater%r)r"r') rr0r-rUZtodor1r3r4r5�jr r r �dump_nfa�s zParserGenerator.dump_nfacCsltd|�x\t|�D]P\}}td||jr,dp.d�x0t|jj��D]\}}td||j|�f�qBWqWdS)NzDump of DFA forz Statez(final)rWz %s -> %d)rXrYr*r$r%r&r))rr0r2r1r3r4r5r r r �dump_dfa�s zParserGenerator.dump_dfacCs~d}xt|rxd}xft|�D]Z\}}xPt|dt|��D]:}||}||kr4||=x|D]}|j||�qTWd}Pq4WqWqWdS)NTFr)rY�ranger"� unifystate)rr2Zchangesr1Zstate_irZZstate_jr3r r r rK�s zParserGenerator.simplify_dfacCs�|j�\}}|jdkr||fSt�}t�}|j|�|j|�x6|jdkrt|j�|j�\}}|j|�|j|�q@W||fSdS)N�|)� parse_altr>�NFAStaterTr)rrLrMZaaZzzr r r rI�s zParserGenerator.parse_rhscCsP|j�\}}x:|jdks*|jtjtjfkrF|j�\}}|j|�|}qW||fS)N�(�[)rbrc)� parse_itemr>rDrr=�STRINGrT)rrL�br.�dr r r r` s zParserGenerator.parse_altcCs�|jdkr>|j�|j�\}}|jtjd�|j|�||fS|j�\}}|j}|dkr`||fS|j�|j|�|dkr�||fS||fSdS)Nrc�]�+�*)rirj)r>rrIrGrrHrT� parse_atom)rrLrMr>r r r rds zParserGenerator.parse_itemcCs�|jdkr4|j�|j�\}}|jtjd�||fS|jtjtjfkrpt �}t �}|j ||j�|j�||fS|jd|j|j�dS)Nrb�)z+expected (...) or NAME or STRING, got %s/%s)r>rrIrGrrHrDr=rerarT�raise_error)rrLrMr r r rk(s zParserGenerator.parse_atomcCsD|j|ks|dk r2|j|kr2|jd|||j|j�|j}|j�|S)Nzexpected %s/%s, got %s/%s)rDr>rmr)rrDr>r r r rG9szParserGenerator.expectcCsJt|j�}x"|dtjtjfkr,t|j�}qW|\|_|_|_|_|_ dS)Nr) r5rr�COMMENT�NLrDr>Zbegin�end�line)r�tupr r r rAs zParserGenerator.gettokencGs^|r8y||}Wn&dj|gttt|���}YnXt||j|jd|jd|jf��dS)N� rr)�joinr�map�str�SyntaxErrorrrprq)r�msg�argsr r r rmHs zParserGenerator.raise_error)N)N)rrrrr6r,r(rr?rrJr[r\rKrIr`rdrkrGrrmr r r r r s$ .$ rc@seZdZdd�Zddd�ZdS)racCs g|_dS)N)r%)rr r r rSszNFAState.__init__NcCs|jj||f�dS)N)r%r')rr5r4r r r rTVszNFAState.addarc)N)rrrrrTr r r r raQsrac@s0eZdZdd�Zdd�Zdd�Zdd�Zd Zd S) rQcCs||_||k|_i|_dS)N)rRr*r%)rrR�finalr r r r]s zDFAState.__init__cCs||j|<dS)N)r%)rr5r4r r r rTeszDFAState.addarccCs.x(|jj�D]\}}||kr||j|<qWdS)N)r%r&)r�old�newr4r5r r r r^kszDFAState.unifystatecCsX|j|jkrdSt|j�t|j�kr(dSx*|jj�D]\}}||jj|�k r4dSq4WdS)NFT)r*r"r%r&�get)r�otherr4r5r r r �__eq__pszDFAState.__eq__N)rrrrrTr^r�__hash__r r r r rQ[s rQ�Grammar.txtcCst|�}|j�S)N)rr6)r�pr r r �generate_grammar�sr�N)r�)rWrrrZGrammarr�objectrrarQr�r r r r �<module>sI %PK{��\�*��u$u$+pgen2/__pycache__/pgen.cpython-36.opt-2.pycnu�[���3 \�5�@sdddlmZmZmZGdd�dej�ZGdd�de�ZGdd�de�ZGdd �d e�Z ddd�Z d S)�)�grammar�token�tokenizec@seZdZdS)�PgenGrammarN)�__name__� __module__�__qualname__�r r �*/usr/lib64/python3.6/lib2to3/pgen2/pgen.pyrsrc@s�eZdZd&dd�Zdd�Zdd�Zdd �Zd d�Zdd �Zdd�Z dd�Z dd�Zdd�Zdd�Z dd�Zdd�Zdd�Zdd�Zd'd d!�Zd"d#�Zd$d%�ZdS)(�ParserGeneratorNcCsld}|dkrt|�}|j}||_||_tj|j�|_|j�|j �\|_ |_|dk rZ|�i|_|j �dS)N)�open�close�filename�streamr�generate_tokens�readline� generator�gettoken�parse�dfas�startsymbol�first�addfirstsets)�selfrrZclose_streamr r r �__init__szParserGenerator.__init__cCs*t�}t|jj��}|j�|j|j�|jd|j�x.|D]&}dt|j �}||j |<||j |<q<Wx�|D]�}|j|}g}xl|D]d}g}x6t|jj ��D]$\} } |j|j|| �|j| �f�q�W|jr�|jd|j|�f�|j|�q�W|jj|�||j||�f|j|j |<qlW|j |j|_|S)N��)r�listr�keys�sort�remover�insert�len� symbol2numberZ number2symbol�sorted�arcs�items�append� make_label�index�isfinal�states� make_first�start)r�c�names�name�i�dfar+�stater%�label�nextr r r �make_grammars. zParserGenerator.make_grammarcCs8|j|}i}x$t|�D]}|j||�}d||<qW|S)Nr)rr$r()rr.r0Zrawfirstrr4�ilabelr r r r,4s zParserGenerator.make_firstcCs&t|j�}|dj�r�||jkrZ||jkr4|j|S|jj|j|df�||j|<|Sn>tt|d�}||jkrz|j|S|jj|df�||j|<|Sn�t |�}|dj�r�||j kr�|j |S|jjtj|f�||j |<|Sn>tj |}||jk�r|j|S|jj|df�||j|<|SdS)Nr)r"Zlabels�isalphar#Zsymbol2labelr'�getattrr�tokens�eval�keywords�NAMErZopmap)rr.r4r7Zitoken�valuer r r r(=s6 zParserGenerator.make_labelcCs<t|jj��}|j�x |D]}||jkr|j|�qWdS)N)rrrrr� calcfirst)rr/r0r r r rks zParserGenerator.addfirstsetsc Cs |j|}d|j|<|d}i}i}x�|jj�D]x\}}||jkr�||jkrl|j|}|dkr�td|��n|j|�|j|}|j|�|||<q0d||<|di||<q0Wi} xJ|j�D]>\}} x4| D],}|| kr�td|||| |f��|| |<q�Wq�W||j|<dS)Nrzrecursion for rule %rrzArule %s is ambiguous; %s is in the first sets of %s as well as %s)rrr%r&� ValueErrorr?�update)rr0r2r3ZtotalsetZoverlapcheckr4r5�fsetZinverseZitsfirstZsymbolr r r r?ss2 zParserGenerator.calcfirstc Cs�i}d}x�|jtjkr�x|jtjkr.|j�qW|jtj�}|jtjd�|j�\}}|jtj�|j ||�}t |�}|j|�t |�}|||<|dkr |}q W||fS)N�:)�typer� ENDMARKER�NEWLINEr�expectr=�OP� parse_rhs�make_dfar"�simplify_dfa) rrrr0�a�zr2ZoldlenZnewlenr r r r�s" zParserGenerator.parsecs��fdd�}�fdd��t||�|�g}x�|D]�}i}x<|jD]2}x,|jD]"\}} |dk rJ�| |j|i��qJWq>WxRt|j��D]B\}} x,|D]}|j| kr�Pq�Wt| |�}|j|�|j||�q�Wq.W|S)Ncsi}�||�|S)Nr )r3�base)� addclosurer r �closure�s z)ParserGenerator.make_dfa.<locals>.closurecs>||krdSd||<x$|jD]\}}|dkr�||�qWdS)Nr)r%)r3rNr4r5)rOr r rO�sz,ParserGenerator.make_dfa.<locals>.addclosure)�DFAState�nfasetr%� setdefaultr$r&r'�addarc)rr-�finishrPr+r3r%Znfastater4r5rR�str )rOr rJ�s" zParserGenerator.make_dfac Cs�td|�|g}x�t|�D]�\}}td|||kr4dp6d�x^|jD]T\}}||kr^|j|�} nt|�} |j|�|dkr�td| �qBtd|| f�qBWqWdS)NzDump of NFA forz Statez(final)�z -> %dz %s -> %d)�print� enumerater%r)r"r') rr0r-rUZtodor1r3r4r5�jr r r �dump_nfa�s zParserGenerator.dump_nfacCsltd|�x\t|�D]P\}}td||jr,dp.d�x0t|jj��D]\}}td||j|�f�qBWqWdS)NzDump of DFA forz Statez(final)rWz %s -> %d)rXrYr*r$r%r&r))rr0r2r1r3r4r5r r r �dump_dfa�s zParserGenerator.dump_dfacCs~d}xt|rxd}xft|�D]Z\}}xPt|dt|��D]:}||}||kr4||=x|D]}|j||�qTWd}Pq4WqWqWdS)NTFr)rY�ranger"� unifystate)rr2Zchangesr1Zstate_irZZstate_jr3r r r rK�s zParserGenerator.simplify_dfacCs�|j�\}}|jdkr||fSt�}t�}|j|�|j|�x6|jdkrt|j�|j�\}}|j|�|j|�q@W||fSdS)N�|)� parse_altr>�NFAStaterTr)rrLrMZaaZzzr r r rI�s zParserGenerator.parse_rhscCsP|j�\}}x:|jdks*|jtjtjfkrF|j�\}}|j|�|}qW||fS)N�(�[)rbrc)� parse_itemr>rDrr=�STRINGrT)rrL�br.�dr r r r` s zParserGenerator.parse_altcCs�|jdkr>|j�|j�\}}|jtjd�|j|�||fS|j�\}}|j}|dkr`||fS|j�|j|�|dkr�||fS||fSdS)Nrc�]�+�*)rirj)r>rrIrGrrHrT� parse_atom)rrLrMr>r r r rds zParserGenerator.parse_itemcCs�|jdkr4|j�|j�\}}|jtjd�||fS|jtjtjfkrpt �}t �}|j ||j�|j�||fS|jd|j|j�dS)Nrb�)z+expected (...) or NAME or STRING, got %s/%s)r>rrIrGrrHrDr=rerarT�raise_error)rrLrMr r r rk(s zParserGenerator.parse_atomcCsD|j|ks|dk r2|j|kr2|jd|||j|j�|j}|j�|S)Nzexpected %s/%s, got %s/%s)rDr>rmr)rrDr>r r r rG9szParserGenerator.expectcCsJt|j�}x"|dtjtjfkr,t|j�}qW|\|_|_|_|_|_ dS)Nr) r5rr�COMMENT�NLrDr>Zbegin�end�line)r�tupr r r rAs zParserGenerator.gettokencGs^|r8y||}Wn&dj|gttt|���}YnXt||j|jd|jd|jf��dS)N� rr)�joinr�map�str�SyntaxErrorrrprq)r�msg�argsr r r rmHs zParserGenerator.raise_error)N)N)rrrrr6r,r(rr?rrJr[r\rKrIr`rdrkrGrrmr r r r r s$ .$ rc@seZdZdd�Zddd�ZdS)racCs g|_dS)N)r%)rr r r rSszNFAState.__init__NcCs|jj||f�dS)N)r%r')rr5r4r r r rTVszNFAState.addarc)N)rrrrrTr r r r raQsrac@s0eZdZdd�Zdd�Zdd�Zdd�Zd Zd S) rQcCs||_||k|_i|_dS)N)rRr*r%)rrR�finalr r r r]s zDFAState.__init__cCs||j|<dS)N)r%)rr5r4r r r rTeszDFAState.addarccCs.x(|jj�D]\}}||kr||j|<qWdS)N)r%r&)r�old�newr4r5r r r r^kszDFAState.unifystatecCsX|j|jkrdSt|j�t|j�kr(dSx*|jj�D]\}}||jj|�k r4dSq4WdS)NFT)r*r"r%r&�get)r�otherr4r5r r r �__eq__pszDFAState.__eq__N)rrrrrTr^r�__hash__r r r r rQ[s rQ�Grammar.txtcCst|�}|j�S)N)rr6)r�pr r r �generate_grammar�sr�N)r�)rWrrrZGrammarr�objectrrarQr�r r r r �<module>sI %PK{��\�.��9&9&%pgen2/__pycache__/pgen.cpython-36.pycnu�[���3 \�5�@sdddlmZmZmZGdd�dej�ZGdd�de�ZGdd�de�ZGdd �d e�Z ddd�Z d S)�)�grammar�token�tokenizec@seZdZdS)�PgenGrammarN)�__name__� __module__�__qualname__�r r �*/usr/lib64/python3.6/lib2to3/pgen2/pgen.pyrsrc@s�eZdZd&dd�Zdd�Zdd�Zdd �Zd d�Zdd �Zdd�Z dd�Z dd�Zdd�Zdd�Z dd�Zdd�Zdd�Zdd�Zd'd d!�Zd"d#�Zd$d%�ZdS)(�ParserGeneratorNcCsld}|dkrt|�}|j}||_||_tj|j�|_|j�|j �\|_ |_|dk rZ|�i|_|j �dS)N)�open�close�filename�streamr�generate_tokens�readline� generator�gettoken�parse�dfas�startsymbol�first�addfirstsets)�selfrrZclose_streamr r r �__init__szParserGenerator.__init__cCs*t�}t|jj��}|j�|j|j�|jd|j�x.|D]&}dt|j �}||j |<||j |<q<Wx�|D]�}|j|}g}xl|D]d}g}x6t|jj ��D]$\} } |j|j|| �|j| �f�q�W|jr�|jd|j|�f�|j|�q�W|jj|�||j||�f|j|j |<qlW|j |j|_|S)N��)r�listr�keys�sort�remover�insert�len� symbol2numberZ number2symbol�sorted�arcs�items�append� make_label�index�isfinal�states� make_first�start)r�c�names�name�i�dfar+�stater%�label�nextr r r �make_grammars. zParserGenerator.make_grammarcCs8|j|}i}x$t|�D]}|j||�}d||<qW|S)Nr)rr$r()rr.r0Zrawfirstrr4�ilabelr r r r,4s zParserGenerator.make_firstcCsbt|j�}|dj�r�||jkrZ||jkr4|j|S|jj|j|df�||j|<|Snbtt|d�}t|t �sxt |��|tjks�t |��||jkr�|j|S|jj|df�||j|<|Sn�|ddks�t |��t |�}|dj��r ||jk�r�|j|S|jjtj|f�||j|<|Sn>tj|}||jk�r@|j|S|jj|df�||j|<|SdS)Nr�"�')r8r9)r"Zlabels�isalphar#Zsymbol2labelr'�getattrr� isinstance�int�AssertionError�tok_name�tokens�eval�keywords�NAMErZopmap)rr.r4r7Zitoken�valuer r r r(=s< zParserGenerator.make_labelcCs<t|jj��}|j�x |D]}||jkr|j|�qWdS)N)rrrrr� calcfirst)rr/r0r r r rks zParserGenerator.addfirstsetsc Cs |j|}d|j|<|d}i}i}x�|jj�D]x\}}||jkr�||jkrl|j|}|dkr�td|��n|j|�|j|}|j|�|||<q0d||<|di||<q0Wi} xJ|j�D]>\}} x4| D],}|| kr�td|||| |f��|| |<q�Wq�W||j|<dS)Nrzrecursion for rule %rrzArule %s is ambiguous; %s is in the first sets of %s as well as %s)rrr%r&� ValueErrorrE�update)rr0r2r3ZtotalsetZoverlapcheckr4r5�fsetZinverseZitsfirstZsymbolr r r rEss2 zParserGenerator.calcfirstc Cs�i}d}x�|jtjkr�x|jtjkr.|j�qW|jtj�}|jtjd�|j�\}}|jtj�|j ||�}t |�}|j|�t |�}|||<|dkr |}q W||fS)N�:)�typer� ENDMARKER�NEWLINEr�expectrC�OP� parse_rhs�make_dfar"�simplify_dfa) rrrr0�a�zr2ZoldlenZnewlenr r r r�s" zParserGenerator.parsecs�t|t�st�t|t�st��fdd�}�fdd��t||�|�g}x�|D]�}i}x<|jD]2}x,|jD]"\}} |dk rf�| |j|i��qfWqZWxRt|j��D]B\}} x,|D]}|j| kr�Pq�Wt| |�}|j |�|j ||�q�WqJW|S)Ncsi}�||�|S)Nr )r3�base)� addclosurer r �closure�s z)ParserGenerator.make_dfa.<locals>.closurecsLt|t�st�||krdSd||<x$|jD]\}}|dkr*�||�q*WdS)Nr)r<�NFAStater>r%)r3rTr4r5)rUr r rU�sz,ParserGenerator.make_dfa.<locals>.addclosure)r<rWr>�DFAState�nfasetr%� setdefaultr$r&r'�addarc)rr-�finishrVr+r3r%Znfastater4r5rY�str )rUr rP�s& zParserGenerator.make_dfac Cs�td|�|g}x�t|�D]�\}}td|||kr4dp6d�x^|jD]T\}}||kr^|j|�} nt|�} |j|�|dkr�td| �qBtd|| f�qBWqWdS)NzDump of NFA forz Statez(final)�z -> %dz %s -> %d)�print� enumerater%r)r"r') rr0r-r\Ztodor1r3r4r5�jr r r �dump_nfa�s zParserGenerator.dump_nfacCsltd|�x\t|�D]P\}}td||jr,dp.d�x0t|jj��D]\}}td||j|�f�qBWqWdS)NzDump of DFA forz Statez(final)r^z %s -> %d)r_r`r*r$r%r&r))rr0r2r1r3r4r5r r r �dump_dfa�s zParserGenerator.dump_dfacCs~d}xt|rxd}xft|�D]Z\}}xPt|dt|��D]:}||}||kr4||=x|D]}|j||�qTWd}Pq4WqWqWdS)NTFr)r`�ranger"� unifystate)rr2Zchangesr1Zstate_iraZstate_jr3r r r rQ�s zParserGenerator.simplify_dfacCs�|j�\}}|jdkr||fSt�}t�}|j|�|j|�x6|jdkrt|j�|j�\}}|j|�|j|�q@W||fSdS)N�|)� parse_altrDrWr[r)rrRrSZaaZzzr r r rO�s zParserGenerator.parse_rhscCsP|j�\}}x:|jdks*|jtjtjfkrF|j�\}}|j|�|}qW||fS)N�(�[)rhri)� parse_itemrDrJrrC�STRINGr[)rrR�br.�dr r r rg s zParserGenerator.parse_altcCs�|jdkr>|j�|j�\}}|jtjd�|j|�||fS|j�\}}|j}|dkr`||fS|j�|j|�|dkr�||fS||fSdS)Nri�]�+�*)rorp)rDrrOrMrrNr[� parse_atom)rrRrSrDr r r rjs zParserGenerator.parse_itemcCs�|jdkr4|j�|j�\}}|jtjd�||fS|jtjtjfkrpt �}t �}|j ||j�|j�||fS|jd|j|j�dS)Nrh�)z+expected (...) or NAME or STRING, got %s/%s)rDrrOrMrrNrJrCrkrWr[�raise_error)rrRrSr r r rq(s zParserGenerator.parse_atomcCsD|j|ks|dk r2|j|kr2|jd|||j|j�|j}|j�|S)Nzexpected %s/%s, got %s/%s)rJrDrsr)rrJrDr r r rM9szParserGenerator.expectcCsJt|j�}x"|dtjtjfkr,t|j�}qW|\|_|_|_|_|_ dS)Nr) r5rr�COMMENT�NLrJrDZbegin�end�line)r�tupr r r rAs zParserGenerator.gettokencGs^|r8y||}Wn&dj|gttt|���}YnXt||j|jd|jd|jf��dS)N� rr)�joinr�map�str�SyntaxErrorrrvrw)r�msg�argsr r r rsHs zParserGenerator.raise_error)N)N)rrrrr6r,r(rrErrPrbrcrQrOrgrjrqrMrrsr r r r r s$ .$ rc@seZdZdd�Zddd�ZdS)rWcCs g|_dS)N)r%)rr r r rSszNFAState.__init__NcCs8|dkst|t�st�t|t�s$t�|jj||f�dS)N)r<r|r>rWr%r')rr5r4r r r r[VszNFAState.addarc)N)rrrrr[r r r r rWQsrWc@s0eZdZdd�Zdd�Zdd�Zdd�Zd Zd S) rXcCsLt|t�st�ttt|��t�s$t�t|t�s2t�||_||k|_i|_dS)N) r<�dictr>r5�iterrWrYr*r%)rrY�finalr r r r]s zDFAState.__init__cCs8t|t�st�||jkst�t|t�s*t�||j|<dS)N)r<r|r>r%rX)rr5r4r r r r[eszDFAState.addarccCs.x(|jj�D]\}}||kr||j|<qWdS)N)r%r&)r�old�newr4r5r r r rekszDFAState.unifystatecCsft|t�st�|j|jkrdSt|j�t|j�kr6dSx*|jj�D]\}}||jj|�k rBdSqBWdS)NFT)r<rXr>r*r"r%r&�get)r�otherr4r5r r r �__eq__pszDFAState.__eq__N)rrrrr[rer��__hash__r r r r rX[s rX�Grammar.txtcCst|�}|j�S)N)rr6)r�pr r r �generate_grammar�sr�N)r�)r^rrrZGrammarr�objectrrWrXr�r r r r �<module>sI %PK{��\��S�<<,pgen2/__pycache__/token.cpython-36.opt-1.pycnu�[���3 \�@sPdZdZdZdZdZdZdZdZdZd Z d Z dZdZd Z dZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZd Z d!Z!d"Z"d#Z#d$Z$d%Z%d&Z&d'Z'd(Z(d)Z)d*Z*d+Z+d,Z,d-Z-d.Z.d/Z/d0Z0d1Z1d2Z2d3Z3d4Z4d5Z5d6Z6d7Z7d8Z8d9Z9d:Z:d;Z;d<Z<d=Z=iZ>x6e?e@�jA��D]$\ZBZCeDeC�eDd�k�reBe>eC<�qWd>d?�ZEd@dA�ZFdBdC�ZGdDS)Ez!Token constants (from "token.h").���������� � ��� ������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�cCs|tkS)N)� NT_OFFSET)�x�r@�+/usr/lib64/python3.6/lib2to3/pgen2/token.py� ISTERMINALNsrBcCs|tkS)N)r>)r?r@r@rA� ISNONTERMINALQsrCcCs|tkS)N)� ENDMARKER)r?r@r@rA�ISEOFTsrEN)H�__doc__rD�NAME�NUMBER�STRING�NEWLINE�INDENT�DEDENT�LPAR�RPAR�LSQB�RSQB�COLON�COMMA�SEMI�PLUS�MINUS�STAR�SLASH�VBAR�AMPER�LESS�GREATER�EQUAL�DOT�PERCENTZ BACKQUOTE�LBRACE�RBRACE�EQEQUAL�NOTEQUAL� LESSEQUAL�GREATEREQUAL�TILDE� CIRCUMFLEX� LEFTSHIFT� RIGHTSHIFT� DOUBLESTAR� PLUSEQUAL�MINEQUAL� STAREQUAL� SLASHEQUAL�PERCENTEQUAL� AMPEREQUAL� VBAREQUAL�CIRCUMFLEXEQUAL�LEFTSHIFTEQUAL�RIGHTSHIFTEQUAL�DOUBLESTAREQUAL�DOUBLESLASH�DOUBLESLASHEQUAL�AT�ATEQUAL�OP�COMMENT�NL�RARROW�AWAIT�ASYNC� ERRORTOKEN�N_TOKENSr>�tok_name�list�globals�items�_nameZ_value�typerBrCrEr@r@r@rA�<module>s�PK{��\_�Ϩ ,pgen2/__pycache__/token.cpython-36.opt-2.pycnu�[���3 \�@sLdZdZdZdZdZdZdZdZdZd Z d Z dZdZd Z dZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZd Z d!Z!d"Z"d#Z#d$Z$d%Z%d&Z&d'Z'd(Z(d)Z)d*Z*d+Z+d,Z,d-Z-d.Z.d/Z/d0Z0d1Z1d2Z2d3Z3d4Z4d5Z5d6Z6d7Z7d8Z8d9Z9d:Z:d;Z;d<Z<iZ=x6e>e?�j@��D]$\ZAZBeCeB�eCd�k�reAe=eB<�qWd=d>�ZDd?d@�ZEdAdB�ZFdCS)D���������� � ��� ������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�cCs|tkS)N)� NT_OFFSET)�x�r@�+/usr/lib64/python3.6/lib2to3/pgen2/token.py� ISTERMINALNsrBcCs|tkS)N)r>)r?r@r@rA� ISNONTERMINALQsrCcCs|tkS)N)� ENDMARKER)r?r@r@rA�ISEOFTsrEN)GrD�NAME�NUMBER�STRING�NEWLINE�INDENT�DEDENT�LPAR�RPAR�LSQB�RSQB�COLON�COMMA�SEMI�PLUS�MINUS�STAR�SLASH�VBAR�AMPER�LESS�GREATER�EQUAL�DOT�PERCENTZ BACKQUOTE�LBRACE�RBRACE�EQEQUAL�NOTEQUAL� LESSEQUAL�GREATEREQUAL�TILDE� CIRCUMFLEX� LEFTSHIFT� RIGHTSHIFT� DOUBLESTAR� PLUSEQUAL�MINEQUAL� STAREQUAL� SLASHEQUAL�PERCENTEQUAL� AMPEREQUAL� VBAREQUAL�CIRCUMFLEXEQUAL�LEFTSHIFTEQUAL�RIGHTSHIFTEQUAL�DOUBLESTAREQUAL�DOUBLESLASH�DOUBLESLASHEQUAL�AT�ATEQUAL�OP�COMMENT�NL�RARROW�AWAIT�ASYNC� ERRORTOKEN�N_TOKENSr>�tok_name�list�globals�items�_nameZ_value�typerBrCrEr@r@r@rA�<module> s�PK{��\��S�<<&pgen2/__pycache__/token.cpython-36.pycnu�[���3 \�@sPdZdZdZdZdZdZdZdZdZd Z d Z dZdZd Z dZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZd Z d!Z!d"Z"d#Z#d$Z$d%Z%d&Z&d'Z'd(Z(d)Z)d*Z*d+Z+d,Z,d-Z-d.Z.d/Z/d0Z0d1Z1d2Z2d3Z3d4Z4d5Z5d6Z6d7Z7d8Z8d9Z9d:Z:d;Z;d<Z<d=Z=iZ>x6e?e@�jA��D]$\ZBZCeDeC�eDd�k�reBe>eC<�qWd>d?�ZEd@dA�ZFdBdC�ZGdDS)Ez!Token constants (from "token.h").���������� � ��� ������������������� �!�"�#�$�%�&�'�(�)�*�+�,�-�.�/�0�1�2�3�4�5�6�7�8�9�:�;�cCs|tkS)N)� NT_OFFSET)�x�r@�+/usr/lib64/python3.6/lib2to3/pgen2/token.py� ISTERMINALNsrBcCs|tkS)N)r>)r?r@r@rA� ISNONTERMINALQsrCcCs|tkS)N)� ENDMARKER)r?r@r@rA�ISEOFTsrEN)H�__doc__rD�NAME�NUMBER�STRING�NEWLINE�INDENT�DEDENT�LPAR�RPAR�LSQB�RSQB�COLON�COMMA�SEMI�PLUS�MINUS�STAR�SLASH�VBAR�AMPER�LESS�GREATER�EQUAL�DOT�PERCENTZ BACKQUOTE�LBRACE�RBRACE�EQEQUAL�NOTEQUAL� LESSEQUAL�GREATEREQUAL�TILDE� CIRCUMFLEX� LEFTSHIFT� RIGHTSHIFT� DOUBLESTAR� PLUSEQUAL�MINEQUAL� STAREQUAL� SLASHEQUAL�PERCENTEQUAL� AMPEREQUAL� VBAREQUAL�CIRCUMFLEXEQUAL�LEFTSHIFTEQUAL�RIGHTSHIFTEQUAL�DOUBLESTAREQUAL�DOUBLESLASH�DOUBLESLASHEQUAL�AT�ATEQUAL�OP�COMMENT�NL�RARROW�AWAIT�ASYNC� ERRORTOKEN�N_TOKENSr>�tok_name�list�globals�items�_nameZ_value�typerBrCrEr@r@r@rA�<module>s�PK{��\�v]��;�;/pgen2/__pycache__/tokenize.cpython-36.opt-1.pycnu�[���3 \NX�=@s�dZdZdZddlZddlZddlmZmZddlTddl m Z d d �ee �D�ddd gZ[ ye Wnek r~eZ YnXdd�Zdd�Zdd�ZdZdZeede�ee�ZdZdZdZdZedd�Zeeeee�ZdZedd�ee�Zd eZeee�Zed!ed"�Z ee ee�Z!d#Z"d$Z#d%Z$d&Z%d'Z&ee&d(e&d)�Z'ee&d*e&d+�Z(ed,d-d.d/d0d1d2d3d4� Z)d5Z*ed6d7�Z+ee)e*e+�Z,ee!e,e(e�Z-ee-Z.ee&d8ed9d�e&d:ed;d��Z/edee'�Z0eee0e!e,e/e�Z1e2e3ej4e.e1e$e%f��\Z5Z6Z7Z8ej4e"�ej4e#�e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8ddddddddd<�4Z9iZ:xd�D]Z;e;e:e;<�q�WiZ<xd�D]Z;e;e<e;<�q�Wd�Z=Gd�d��d�e>�Z?Gd�d��d�e>�Z@d�d��ZAeAfd�d�ZBd�d��ZCGd�d��d��ZDej4d�ejE�ZFej4d�ejE�ZGd�d��ZHd�d��ZId�d �ZJd�d�ZKeLd�k�r�ddlMZMeNeMjO�dk�r|eBePeMjOd�jQ�neBeMjRjQ�dS)�a�Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.zKa-Ping Yee <ping@lfw.org>z@GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro�N)�BOM_UTF8�lookup)�*�)�tokencCsg|]}|ddkr|�qS)r�_�)�.0�xrr�./usr/lib64/python3.6/lib2to3/pgen2/tokenize.py� <listcomp>%sr�tokenize�generate_tokens� untokenizecGsddj|�dS)N�(�|�))�join)�choicesrrr�group0srcGst|�dS)Nr)r)rrrr�any1srcGst|�dS)N�?)r)rrrr�maybe2srz[ \f\t]*z #[^\r\n]*z\\\r?\nz[a-zA-Z_]\w*z0[bB]_?[01]+(?:_[01]+)*z(0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?z0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?z[1-9]\d*(?:_\d+)*[lL]?z0[lL]?z[eE][-+]?\d+(?:_\d+)*z\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?z\.\d+(?:_\d+)*z\d+(?:_\d+)*z\d+(?:_\d+)*[jJ]z[jJ]z[^'\\]*(?:\\.[^'\\]*)*'z[^"\\]*(?:\\.[^"\\]*)*"z%[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''z%[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""z#(?:[uUrRbBfF]|[rR][bB]|[bBuU][rR])?�'''�"""z'[^\n'\\]*(?:\\.[^\n'\\]*)*'z"[^\n"\\]*(?:\\.[^\n"\\]*)*"z\*\*=?z>>=?z<<=?z<>z!=z//=?z->z[+\-*/%&@|^=<>]=?�~z[][(){}]z\r?\nz[:;.,`@]z'[^\n'\\]*(?:\\.[^\n'\\]*)*�'z"[^\n"\\]*(?:\\.[^\n"\\]*)*�")4rrz'''z"""zr'''zr"""zu'''zu"""zb'''zb"""zf'''zf"""zur'''zur"""zbr'''zbr"""zrb'''zrb"""zR'''zR"""zU'''zU"""zB'''zB"""zF'''zF"""zuR'''zuR"""zUr'''zUr"""zUR'''zUR"""zbR'''zbR"""zBr'''zBr"""zBR'''zBR"""zrB'''zrB"""zRb'''zRb"""zRB'''zRB"""�r�R�u�U�f�F�b�B�r'''�r"""�R'''�R"""�u'''�u"""�U'''�U"""�b'''�b"""�B'''�B"""�f'''�f"""�F'''�F"""�ur'''�ur"""�Ur'''�Ur"""�uR'''�uR"""�UR'''�UR"""�br'''�br"""�Br'''�Br"""�bR'''�bR"""�BR'''�BR"""�rb'''�rb"""�Rb'''�Rb"""�rB'''�rB"""�RB'''�RB"""�r'�r"�R'�R"�u'�u"�U'�U"�b'�b"�B'�B"�f'�f"�F'�F"�ur'�ur"�Ur'�Ur"�uR'�uR"�UR'�UR"�br'�br"�Br'�Br"�bR'�bR"�BR'�BR"�rb'�rb"�Rb'�Rb"�rB'�rB"�RB'�RB"�c@seZdZdS)� TokenErrorN)�__name__� __module__�__qualname__rrrrrw�srwc@seZdZdS)�StopTokenizingN)rxryrzrrrrr{�sr{c Cs4|\}}|\}}td||||t|t|�f�dS)Nz%d,%d-%d,%d: %s %s)�print�tok_name�repr) �typerZxxx_todo_changemeZxxx_todo_changeme1�lineZsrowZscolZerowZecolrrr� printtoken�sr�cCs(yt||�Wntk r"YnXdS)a: The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). N)� tokenize_loopr{)�readline� tokeneaterrrrr �s cCsxt|�D]}||�q WdS)N)r)r�r�Z token_inforrrr��sr�c@s,eZdZdd�Zdd�Zdd�Zdd�Zd S) �UntokenizercCsg|_d|_d|_dS)Nrr)�tokens�prev_row�prev_col)�selfrrr�__init__�szUntokenizer.__init__cCs*|\}}||j}|r&|jjd|�dS)N� )r�r��append)r��start�row�col� col_offsetrrr�add_whitespace�s zUntokenizer.add_whitespacecCs�xv|D]n}t|�dkr$|j||�P|\}}}}}|j|�|jj|�|\|_|_|ttfkr|jd7_d|_qWdj |j�S)N�rr�) �len�compatr�r�r�r�r��NEWLINE�NLr)r��iterable�t�tok_typerr��endr�rrrr�s zUntokenizer.untokenizec Cs�d}g}|jj}|\}}|ttfkr,|d7}|ttfkr<d}x�|D]�}|dd�\}}|ttttfkrn|d7}|tkr�|j|�qBn>|t kr�|j �qBn*|ttfkr�d}n|r�|r�||d�d}||�qBWdS)NFr�Tr�r���)r�r��NAME�NUMBERr�r��ASYNC�AWAIT�INDENT�DEDENT�pop) r�rr�� startline�indents�toks_append�toknum�tokval�tokrrrr��s0 zUntokenizer.compatN)rxryrzr�r�rr�rrrrr��sr�z&^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)s^[ \t\f]*(?:[#\r\n]|$)cCsH|dd�j�jdd�}|dks*|jd�r.dS|d ks@|jd�rDdS|S)z(Imitates get_normal_name in tokenizer.c.N�r�-zutf-8zutf-8-�latin-1� iso-8859-1�iso-latin-1�latin-1-�iso-8859-1-�iso-latin-1-)r�r�r�)r�r�r�)�lower�replace� startswith)�orig_enc�encrrr�_get_normal_names r�cs�d�d}d}�fdd�}�fdd�}|�}|jt�rHd�|d d�}d }|sT|gfS||�}|rj||gfStj|�s~||gfS|�}|s�||gfS||�}|r�|||gfS|||gfS)a The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. FNzutf-8cs"y��Stk rt�SXdS)N)� StopIteration�bytesr)r�rr�read_or_stop sz%detect_encoding.<locals>.read_or_stopcs�y|jd�}Wntk r"dSXtj|�}|s6dSt|jd��}yt|�}Wn tk rptd|��YnX�r�|j dkr�td��|d7}|S)N�asciirzunknown encoding: zutf-8zencoding problem: utf-8z-sig) �decode�UnicodeDecodeError� cookie_re�matchr�rr�LookupError�SyntaxError�name)r��line_stringr��encoding�codec)� bom_foundrr�find_cookie&s" z$detect_encoding.<locals>.find_cookieT�z utf-8-sig)r�r�blank_rer�)r�r��defaultr�r��first�secondr)r�r�r�detect_encodings0 r�cCst�}|j|�S)a�Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited intput: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 )r�r)r��utrrrrTsc!cs�d}}}tjdd}}d \}}d}dg} d} d}d}d} �x�y |�}Wntk rdd}YnX|d}dt|�}}|�rF|s�td|��|j|�}|r�|jd�}}t||d|�|||f||fVd!\}}d}nd|�r0|d"d�d k�r0|d#d�dk�r0t||||t|�f|fVd}d}qBn||}||}qB�nF|dk�rt|�rt|�s`Pd}xf||k�r�||d k�r�|d}n6||dk�r�|t dt }n||dk�r�d}nP|d}�qfW||k�r�P| �r�| Vd} ||dk�r�||dk�rh||d�j d�}|t|�}t|||f||t|�f|fVt||d�||f|t|�f|fVqBttf||dk||d�||f|t|�f|fVqB|| d$k�r�| j |�t|d|�|df||f|fVxt|| d%k�rJ|| k�rtdd|||f��| dd&�} |�r.|| d'k�r.d}d} d}td||f||f|fV�q�W|�r�| �r�|| d(k�r�d}d} d}n|�s�td|df��d}�x�||k�r8tj||�}|�r|jd�\}}||f||f|}}}|||�||}}||k�s|dk�r|dk�rt||||fV�q4|dk�rft}|dk�r8t}n |�rBd} | �rR| Vd} |||||fV�q4|dk�r�| �r�| Vd} t||||fV�q4|tk�rt|}|j||�}|�r�|jd�}|||�}| �r�| Vd} t||||f|fVn||f}||d�}|}P�q4|tk�s@|dd �tk�s@|dd�tk�r�|d)dk�r�||f}t|�pxt|d�pxt|d }||d�d}}|}Pn | �r�| Vd} t||||fV�q4||k�r�|d*k�r�|�r�|dk�r�tnt||||fV�q�t||||f}|dk�r| �r|} �q�|dk�rx| �rx| dtk�rx| ddk�rxd}| d+}t| d| d | d| dfVd} | �r�| Vd} |Vnz|dk�r�| �r�| Vd} t||||f|fVd}nF|dk�r�|d}n|dk�r�|d}| �r�| Vd} t||||fVn(t||||f||df|fV|d}�q�WqBW| �rN| Vd} x.| dd�D]} td|df|dfdfV�q\Wtd|df|dfdfVdS),aT The generate_tokens() generator requires one argument, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. rr� 0123456789r�NFrzEOF in multi-line stringr�z\ r�z\ r�� �z# �#z z3unindent does not match any outer indentation levelz <tokenize>zEOF in multi-line statement�.T� �async�await�def��\z([{z)]})r�r)r�r������r�r�r�r�r�r�)r�r�r�)�stringZ ascii_lettersr�r�rwr�r��STRING� ERRORTOKEN�tabsize�rstrip�COMMENTr�r�r��IndentationErrorr�� pseudoprog�spanr�r�� triple_quoted�endprogs� single_quotedr�r�r��OP� ENDMARKER)!r��lnum�parenlev� continuedZ namechars�numchars�contstr�needcont�contliner��stashed� async_def�async_def_indent�async_def_nlr��pos�max�strstart�endprog�endmatchr��column� comment_token�nl_pos�pseudomatchr��spos�eposr�initial�newliner��indentrrrrisp * �__main__)*rrr&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrM)*rrrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtru)S�__doc__� __author__�__credits__r��re�codecsrrZlib2to3.pgen2.tokenr�r�dir�__all__r�� NameError�strrrr� Whitespace�Comment�Ignore�Name� Binnumber� Hexnumber� Octnumber� Decnumber� Intnumber�Exponent� Pointfloat�Expfloat�Floatnumber� Imagnumber�Number�Single�Double�Single3�Double3Z _litprefix�Triple�String�Operator�Bracket�Special�Funny� PlainToken�Token�ContStr�PseudoExtras�PseudoToken�list�map�compileZ tokenprogr�Zsingle3progZdouble3progr�r�r�r�r�� Exceptionrwr{r�r r�r��ASCIIr�r�r�r�rrrx�sysr��argv�openr��stdinrrrr�<module>s� 8Ic PK{��\��&w,w,/pgen2/__pycache__/tokenize.cpython-36.opt-2.pycnu�[���3 \NX�=@s�dZdZddlZddlZddlmZmZddlTddlm Z dd �e e �D�d ddgZ[ yeWne k rzeZYnXd d�Zdd�Zdd�ZdZdZeede�ee�ZdZdZdZdZedd�Zeeeee�ZdZedd�ee�ZdeZeee�Zed ed!�Zeeee�Z d"Z!d#Z"d$Z#d%Z$d&Z%ee%d'e%d(�Z&ee%d)e%d*�Z'ed+d,d-d.d/d0d1d2d3� Z(d4Z)ed5d6�Z*ee(e)e*�Z+ee e+e'e�Z,ee,Z-ee%d7ed8d�e%d9ed:d��Z.edee&�Z/eee/e e+e.e�Z0e1e2ej3e-e0e#e$f��\Z4Z5Z6Z7ej3e!�ej3e"�e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7e6e7ddddddddd;�4Z8iZ9xd�D]Z:e:e9e:<�q�WiZ;xd�D]Z:e:e;e:<�q�Wd�Z<Gd�d��d�e=�Z>Gd�d��d�e=�Z?d�d��Z@e@fd�d �ZAd�d��ZBGd�d��d��ZCej3d�ejD�ZEej3d�ejD�ZFd�d��ZGd�d��ZHd�d�ZId�d�ZJeKd�k�r�ddlLZLeMeLjN�dk�rxeAeOeLjNd�jP�neAeLjQjP�dS)�zKa-Ping Yee <ping@lfw.org>z@GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro�N)�BOM_UTF8�lookup)�*�)�tokencCsg|]}|ddkr|�qS)r�_�)�.0�xrr�./usr/lib64/python3.6/lib2to3/pgen2/tokenize.py� <listcomp>%sr�tokenize�generate_tokens� untokenizecGsddj|�dS)N�(�|�))�join)�choicesrrr�group0srcGst|�dS)Nr)r)rrrr�any1srcGst|�dS)N�?)r)rrrr�maybe2srz[ \f\t]*z #[^\r\n]*z\\\r?\nz[a-zA-Z_]\w*z0[bB]_?[01]+(?:_[01]+)*z(0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?z0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?z[1-9]\d*(?:_\d+)*[lL]?z0[lL]?z[eE][-+]?\d+(?:_\d+)*z\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?z\.\d+(?:_\d+)*z\d+(?:_\d+)*z\d+(?:_\d+)*[jJ]z[jJ]z[^'\\]*(?:\\.[^'\\]*)*'z[^"\\]*(?:\\.[^"\\]*)*"z%[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''z%[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""z#(?:[uUrRbBfF]|[rR][bB]|[bBuU][rR])?�'''�"""z'[^\n'\\]*(?:\\.[^\n'\\]*)*'z"[^\n"\\]*(?:\\.[^\n"\\]*)*"z\*\*=?z>>=?z<<=?z<>z!=z//=?z->z[+\-*/%&@|^=<>]=?�~z[][(){}]z\r?\nz[:;.,`@]z'[^\n'\\]*(?:\\.[^\n'\\]*)*�'z"[^\n"\\]*(?:\\.[^\n"\\]*)*�")4rrz'''z"""zr'''zr"""zu'''zu"""zb'''zb"""zf'''zf"""zur'''zur"""zbr'''zbr"""zrb'''zrb"""zR'''zR"""zU'''zU"""zB'''zB"""zF'''zF"""zuR'''zuR"""zUr'''zUr"""zUR'''zUR"""zbR'''zbR"""zBr'''zBr"""zBR'''zBR"""zrB'''zrB"""zRb'''zRb"""zRB'''zRB"""�r�R�u�U�f�F�b�B�r'''�r"""�R'''�R"""�u'''�u"""�U'''�U"""�b'''�b"""�B'''�B"""�f'''�f"""�F'''�F"""�ur'''�ur"""�Ur'''�Ur"""�uR'''�uR"""�UR'''�UR"""�br'''�br"""�Br'''�Br"""�bR'''�bR"""�BR'''�BR"""�rb'''�rb"""�Rb'''�Rb"""�rB'''�rB"""�RB'''�RB"""�r'�r"�R'�R"�u'�u"�U'�U"�b'�b"�B'�B"�f'�f"�F'�F"�ur'�ur"�Ur'�Ur"�uR'�uR"�UR'�UR"�br'�br"�Br'�Br"�bR'�bR"�BR'�BR"�rb'�rb"�Rb'�Rb"�rB'�rB"�RB'�RB"�c@seZdZdS)� TokenErrorN)�__name__� __module__�__qualname__rrrrrw�srwc@seZdZdS)�StopTokenizingN)rxryrzrrrrr{�sr{c Cs4|\}}|\}}td||||t|t|�f�dS)Nz%d,%d-%d,%d: %s %s)�print�tok_name�repr) �typerZxxx_todo_changemeZxxx_todo_changeme1�lineZsrowZscolZerowZecolrrr� printtoken�sr�cCs(yt||�Wntk r"YnXdS)N)� tokenize_loopr{)�readline� tokeneaterrrrr �s cCsxt|�D]}||�q WdS)N)r)r�r�Z token_inforrrr��sr�c@s,eZdZdd�Zdd�Zdd�Zdd�Zd S) �UntokenizercCsg|_d|_d|_dS)Nrr)�tokens�prev_row�prev_col)�selfrrr�__init__�szUntokenizer.__init__cCs*|\}}||j}|r&|jjd|�dS)N� )r�r��append)r��start�row�col� col_offsetrrr�add_whitespace�s zUntokenizer.add_whitespacecCs�xv|D]n}t|�dkr$|j||�P|\}}}}}|j|�|jj|�|\|_|_|ttfkr|jd7_d|_qWdj |j�S)N�rr�) �len�compatr�r�r�r�r��NEWLINE�NLr)r��iterable�t�tok_typerr��endr�rrrr�s zUntokenizer.untokenizec Cs�d}g}|jj}|\}}|ttfkr,|d7}|ttfkr<d}x�|D]�}|dd�\}}|ttttfkrn|d7}|tkr�|j|�qBn>|t kr�|j �qBn*|ttfkr�d}n|r�|r�||d�d}||�qBWdS)NFr�Tr�r���)r�r��NAME�NUMBERr�r��ASYNC�AWAIT�INDENT�DEDENT�pop) r�rr�� startline�indents�toks_append�toknum�tokval�tokrrrr��s0 zUntokenizer.compatN)rxryrzr�r�rr�rrrrr��sr�z&^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)s^[ \t\f]*(?:[#\r\n]|$)cCsH|dd�j�jdd�}|dks*|jd�r.dS|dks@|jd �rDdS|S)N�r�-zutf-8zutf-8-�latin-1� iso-8859-1�iso-latin-1�latin-1-�iso-8859-1-�iso-latin-1-)r�r�r�)r�r�r�)�lower�replace� startswith)�orig_enc�encrrr�_get_normal_names r�cs�d�d}d}�fdd�}�fdd�}|�}|jt�rHd�|dd�}d }|sT|gfS||�}|rj||gfStj|�s~||gfS|�}|s�||gfS||�}|r�|||gfS|||gfS) NFzutf-8cs"y��Stk rt�SXdS)N)� StopIteration�bytesr)r�rr�read_or_stop sz%detect_encoding.<locals>.read_or_stopcs�y|jd�}Wntk r"dSXtj|�}|s6dSt|jd��}yt|�}Wn tk rptd|��YnX�r�|j dkr�td��|d7}|S)N�asciirzunknown encoding: zutf-8zencoding problem: utf-8z-sig) �decode�UnicodeDecodeError� cookie_re�matchr�rr�LookupError�SyntaxError�name)r��line_stringr��encoding�codec)� bom_foundrr�find_cookie&s" z$detect_encoding.<locals>.find_cookieT�z utf-8-sig)r�r�blank_rer�)r�r��defaultr�r��first�secondr)r�r�r�detect_encodings0 r�cCst�}|j|�S)N)r�r)r��utrrrrTsc!cs�d}}}tjdd}}d\}}d}dg} d} d}d}d} �x�y |�}Wntk rdd}YnX|d}dt|�}}|�rF|s�td|��|j|�}|r�|jd�}}t||d|�|||f||fVd \}}d}nd|�r0|d!d�d k�r0|d"d�dk�r0t||||t|�f|fVd}d}qBn||}||}qB�nF|dk�rt|�rt|�s`Pd}xf||k�r�||dk�r�|d}n6||d k�r�|t dt }n||dk�r�d}nP|d}�qfW||k�r�P| �r�| Vd} ||dk�r�||dk�rh||d�j d�}|t|�}t|||f||t|�f|fVt||d�||f|t|�f|fVqBttf||dk||d�||f|t|�f|fVqB|| d#k�r�| j |�t|d|�|df||f|fVxt|| d$k�rJ|| k�rtdd|||f��| dd%�} |�r.|| d&k�r.d}d} d}td||f||f|fV�q�W|�r�| �r�|| d'k�r�d}d} d}n|�s�td|df��d}�x�||k�r8tj||�}|�r|jd�\}}||f||f|}}}|||�||}}||k�s|dk�r|dk�rt||||fV�q4|dk�rft}|dk�r8t}n |�rBd} | �rR| Vd} |||||fV�q4|dk�r�| �r�| Vd} t||||fV�q4|tk�rt|}|j||�}|�r�|jd�}|||�}| �r�| Vd} t||||f|fVn||f}||d�}|}P�q4|tk�s@|dd�tk�s@|dd �tk�r�|d(dk�r�||f}t|�pxt|d�pxt|d}||d�d}}|}Pn | �r�| Vd} t||||fV�q4||k�r�|d)k�r�|�r�|dk�r�tnt||||fV�q�t||||f}|dk�r| �r|} �q�|dk�rx| �rx| dtk�rx| ddk�rxd}| d*}t| d| d| d | dfVd} | �r�| Vd} |Vnz|dk�r�| �r�| Vd} t||||f|fVd}nF|dk�r�|d}n|dk�r�|d}| �r�| Vd} t||||fVn(t||||f||df|fV|d}�q�WqBW| �rN| Vd} x.| dd�D]} td|df|dfdfV�q\Wtd|df|dfdfVdS)+Nrr� 0123456789r�FrzEOF in multi-line stringr�z\ r�z\ r�� �z# �#z z3unindent does not match any outer indentation levelz <tokenize>zEOF in multi-line statement�.T� �async�await�def��\z([{z)]})r�r)r�r������r�r�r�r�r�r�)r�r�r�)�stringZ ascii_lettersr�r�rwr�r��STRING� ERRORTOKEN�tabsize�rstrip�COMMENTr�r�r��IndentationErrorr�� pseudoprog�spanr�r�� triple_quoted�endprogs� single_quotedr�r�r��OP� ENDMARKER)!r��lnum�parenlev� continuedZ namechars�numchars�contstr�needcont�contliner��stashed� async_def�async_def_indent�async_def_nlr��pos�max�strstart�endprog�endmatchr��column� comment_token�nl_pos�pseudomatchr��spos�eposr�initial�newliner��indentrrrrisp * �__main__)*rrr&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrM)*rrrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtru)R� __author__�__credits__r��re�codecsrrZlib2to3.pgen2.tokenr�r�dir�__all__r�� NameError�strrrr� Whitespace�Comment�Ignore�Name� Binnumber� Hexnumber� Octnumber� Decnumber� Intnumber�Exponent� Pointfloat�Expfloat�Floatnumber� Imagnumber�Number�Single�Double�Single3�Double3Z _litprefix�Triple�String�Operator�Bracket�Special�Funny� PlainToken�Token�ContStr�PseudoExtras�PseudoToken�list�map�compileZ tokenprogr�Zsingle3progZdouble3progr�r�r�r�r�� Exceptionrwr{r�r r�r��ASCIIr�r�r�r�rrrx�sysr��argv�openr��stdinrrrr�<module>s� 8Ic PK{��\Սp�<<)pgen2/__pycache__/tokenize.cpython-36.pycnu�[���3 \NX�=@s�dZdZdZddlZddlZddlmZmZddlTddl m Z d d �ee �D�ddd gZ[ ye Wnek r~eZ YnXdd�Zdd�Zdd�ZdZdZeede�ee�ZdZdZdZdZedd�Zeeeee�ZdZedd�ee�Zd eZeee�Zed!ed"�Z ee ee�Z!d#Z"d$Z#d%Z$d&Z%d'Z&ee&d(e&d)�Z'ee&d*e&d+�Z(ed,d-d.d/d0d1d2d3d4� Z)d5Z*ed6d7�Z+ee)e*e+�Z,ee!e,e(e�Z-ee-Z.ee&d8ed9d�e&d:ed;d��Z/edee'�Z0eee0e!e,e/e�Z1e2e3ej4e.e1e$e%f��\Z5Z6Z7Z8ej4e"�ej4e#�e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8e7e8ddddddddd<�4Z9iZ:xd�D]Z;e;e:e;<�q�WiZ<xd�D]Z;e;e<e;<�q�Wd�Z=Gd�d��d�e>�Z?Gd�d��d�e>�Z@d�d��ZAeAfd�d�ZBd�d��ZCGd�d��d��ZDej4d�ejE�ZFej4d�ejE�ZGd�d��ZHd�d��ZId�d �ZJd�d�ZKeLd�k�r�ddlMZMeNeMjO�dk�r|eBePeMjOd�jQ�neBeMjRjQ�dS)�a�Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.zKa-Ping Yee <ping@lfw.org>z@GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro�N)�BOM_UTF8�lookup)�*�)�tokencCsg|]}|ddkr|�qS)r�_�)�.0�xrr�./usr/lib64/python3.6/lib2to3/pgen2/tokenize.py� <listcomp>%sr�tokenize�generate_tokens� untokenizecGsddj|�dS)N�(�|�))�join)�choicesrrr�group0srcGst|�dS)Nr)r)rrrr�any1srcGst|�dS)N�?)r)rrrr�maybe2srz[ \f\t]*z #[^\r\n]*z\\\r?\nz[a-zA-Z_]\w*z0[bB]_?[01]+(?:_[01]+)*z(0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?z0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?z[1-9]\d*(?:_\d+)*[lL]?z0[lL]?z[eE][-+]?\d+(?:_\d+)*z\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?z\.\d+(?:_\d+)*z\d+(?:_\d+)*z\d+(?:_\d+)*[jJ]z[jJ]z[^'\\]*(?:\\.[^'\\]*)*'z[^"\\]*(?:\\.[^"\\]*)*"z%[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''z%[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""z#(?:[uUrRbBfF]|[rR][bB]|[bBuU][rR])?�'''�"""z'[^\n'\\]*(?:\\.[^\n'\\]*)*'z"[^\n"\\]*(?:\\.[^\n"\\]*)*"z\*\*=?z>>=?z<<=?z<>z!=z//=?z->z[+\-*/%&@|^=<>]=?�~z[][(){}]z\r?\nz[:;.,`@]z'[^\n'\\]*(?:\\.[^\n'\\]*)*�'z"[^\n"\\]*(?:\\.[^\n"\\]*)*�")4rrz'''z"""zr'''zr"""zu'''zu"""zb'''zb"""zf'''zf"""zur'''zur"""zbr'''zbr"""zrb'''zrb"""zR'''zR"""zU'''zU"""zB'''zB"""zF'''zF"""zuR'''zuR"""zUr'''zUr"""zUR'''zUR"""zbR'''zbR"""zBr'''zBr"""zBR'''zBR"""zrB'''zrB"""zRb'''zRb"""zRB'''zRB"""�r�R�u�U�f�F�b�B�r'''�r"""�R'''�R"""�u'''�u"""�U'''�U"""�b'''�b"""�B'''�B"""�f'''�f"""�F'''�F"""�ur'''�ur"""�Ur'''�Ur"""�uR'''�uR"""�UR'''�UR"""�br'''�br"""�Br'''�Br"""�bR'''�bR"""�BR'''�BR"""�rb'''�rb"""�Rb'''�Rb"""�rB'''�rB"""�RB'''�RB"""�r'�r"�R'�R"�u'�u"�U'�U"�b'�b"�B'�B"�f'�f"�F'�F"�ur'�ur"�Ur'�Ur"�uR'�uR"�UR'�UR"�br'�br"�Br'�Br"�bR'�bR"�BR'�BR"�rb'�rb"�Rb'�Rb"�rB'�rB"�RB'�RB"�c@seZdZdS)� TokenErrorN)�__name__� __module__�__qualname__rrrrrw�srwc@seZdZdS)�StopTokenizingN)rxryrzrrrrr{�sr{c Cs4|\}}|\}}td||||t|t|�f�dS)Nz%d,%d-%d,%d: %s %s)�print�tok_name�repr) �typerZxxx_todo_changemeZxxx_todo_changeme1�lineZsrowZscolZerowZecolrrr� printtoken�sr�cCs(yt||�Wntk r"YnXdS)a: The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). N)� tokenize_loopr{)�readline� tokeneaterrrrr �s cCsxt|�D]}||�q WdS)N)r)r�r�Z token_inforrrr��sr�c@s,eZdZdd�Zdd�Zdd�Zdd�Zd S) �UntokenizercCsg|_d|_d|_dS)Nrr)�tokens�prev_row�prev_col)�selfrrr�__init__�szUntokenizer.__init__cCs8|\}}||jkst�||j}|r4|jjd|�dS)N� )r��AssertionErrorr�r��append)r��start�row�col� col_offsetrrr�add_whitespace�s zUntokenizer.add_whitespacecCs�xv|D]n}t|�dkr$|j||�P|\}}}}}|j|�|jj|�|\|_|_|ttfkr|jd7_d|_qWdj |j�S)N�rr�) �len�compatr�r�r�r�r��NEWLINE�NLr)r��iterable�t�tok_typerr��endr�rrrr�s zUntokenizer.untokenizec Cs�d}g}|jj}|\}}|ttfkr,|d7}|ttfkr<d}x�|D]�}|dd�\}}|ttttfkrn|d7}|tkr�|j|�qBn>|t kr�|j �qBn*|ttfkr�d}n|r�|r�||d�d}||�qBWdS)NFr�Tr�r���)r�r��NAME�NUMBERr�r��ASYNC�AWAIT�INDENT�DEDENT�pop) r�rr�� startline�indents�toks_append�toknum�tokval�tokrrrr��s0 zUntokenizer.compatN)rxryrzr�r�rr�rrrrr��sr�z&^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)s^[ \t\f]*(?:[#\r\n]|$)cCsH|dd�j�jdd�}|dks*|jd�r.dS|d ks@|jd�rDdS|S)z(Imitates get_normal_name in tokenizer.c.N�r�-zutf-8zutf-8-�latin-1� iso-8859-1�iso-latin-1�latin-1-�iso-8859-1-�iso-latin-1-)r�r�r�)r�r�r�)�lower�replace� startswith)�orig_enc�encrrr�_get_normal_names r�cs�d�d}d}�fdd�}�fdd�}|�}|jt�rHd�|d d�}d }|sT|gfS||�}|rj||gfStj|�s~||gfS|�}|s�||gfS||�}|r�|||gfS|||gfS)a The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. FNzutf-8cs"y��Stk rt�SXdS)N)� StopIteration�bytesr)r�rr�read_or_stop sz%detect_encoding.<locals>.read_or_stopcs�y|jd�}Wntk r"dSXtj|�}|s6dSt|jd��}yt|�}Wn tk rptd|��YnX�r�|j dkr�td��|d7}|S)N�asciirzunknown encoding: zutf-8zencoding problem: utf-8z-sig) �decode�UnicodeDecodeError� cookie_re�matchr�rr�LookupError�SyntaxError�name)r��line_stringr��encoding�codec)� bom_foundrr�find_cookie&s" z$detect_encoding.<locals>.find_cookieT�z utf-8-sig)r�r�blank_rer�)r�r��defaultr�r��first�secondr)r�r�r�detect_encodings0 r�cCst�}|j|�S)a�Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited intput: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 )r�r)r��utrrrrTsc!cs�d}}}tjdd}}d \}}d}dg} d} d}d}d} �xy |�}Wntk rdd}YnX|d}dt|�}}|�rF|s�td|��|j|�}|r�|jd�}}t||d|�|||f||fVd!\}}d}nd|�r0|d"d�d k�r0|d#d�dk�r0t||||t|�f|fVd}d}qBn||}||}qB�nF|dk�rt|�rt|�s`Pd}xf||k�r�||d k�r�|d}n6||dk�r�|t dt }n||dk�r�d}nP|d}�qfW||k�r�P| �r�| Vd} ||dk�r�||dk�rh||d�j d�}|t|�}t|||f||t|�f|fVt||d�||f|t|�f|fVqBttf||dk||d�||f|t|�f|fVqB|| d$k�r�| j |�t|d|�|df||f|fVxt|| d%k�rJ|| k�rtdd|||f��| dd&�} |�r.|| d'k�r.d}d} d}td||f||f|fV�q�W|�r�| �r�|| d(k�r�d}d} d}n|�s�td|df��d}�x�||k�rJtj||�}|�r|jd�\}}||f||f|}}}|||�||}}||k�s|dk�r|dk�rt||||fV�qF|dk�rft}|dk�r8t}n |�rBd} | �rR| Vd} |||||fV�qF|dk�r�|jd��s�t�| �r�| Vd} t||||fV�qF|tk�r$t|}|j||�}|�r|jd�}|||�}| �r�| Vd} t||||f|fVn||f}||d�}|}P�qF|tk�sR|dd �tk�sR|dd�tk�r�|d)dk�r�||f}t|�p�t|d�p�t|d }||d�d}}|}Pn | �r�| Vd} t||||fV�qF||k�r�|d*k�r|�r|dk�r�tnt||||fV�q�t||||f}|dk�r.| �r.|} �q�|dk�r�| �r�| dtk�r�| ddk�r�d}| d+}t| d| d | d| dfVd} | �r�| Vd} |Vnz|dk�r�| �r�| Vd} t||||f|fVd}nF|dk�r�|d}n|dk�r�|d}| �r| Vd} t||||fVn(t||||f||df|fV|d}�q�WqBW| �r`| Vd} x.| dd�D]} td|df|dfdfV�qnWtd|df|dfdfVdS),aT The generate_tokens() generator requires one argument, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. rr� 0123456789r�NFrzEOF in multi-line stringr�z\ r�z\ r�� �z# �#z z3unindent does not match any outer indentation levelz <tokenize>zEOF in multi-line statement�.T� �async�await�def��\z([{z)]})r�r)r�r������r�r�r�r�r�r�)r�r�r�)�stringZ ascii_lettersr�r�rwr�r��STRING� ERRORTOKEN�tabsize�rstrip�COMMENTr�r�r��IndentationErrorr�� pseudoprog�spanr�r��endswithr�� triple_quoted�endprogs� single_quotedr�r�r��OP� ENDMARKER)!r��lnum�parenlev� continuedZ namechars�numchars�contstr�needcont�contliner��stashed� async_def�async_def_indent�async_def_nlr��pos�max�strstart�endprog�endmatchr��column� comment_token�nl_pos�pseudomatchr��spos�eposr�initial�newliner��indentrrrrisr * �__main__)*rrr&r'r(r)r*r+r,r-r.r/r0r1r2r3r4r5r6r7r8r9r:r;r<r=r>r?r@rArBrCrDrErFrGrHrIrJrKrLrM)*rrrNrOrPrQrRrSrTrUrVrWrXrYrZr[r\r]r^r_r`rarbrcrdrerfrgrhrirjrkrlrmrnrorprqrrrsrtru)S�__doc__� __author__�__credits__r��re�codecsrrZlib2to3.pgen2.tokenr�r�dir�__all__r�� NameError�strrrr� Whitespace�Comment�Ignore�Name� Binnumber� Hexnumber� Octnumber� Decnumber� Intnumber�Exponent� Pointfloat�Expfloat�Floatnumber� Imagnumber�Number�Single�Double�Single3�Double3Z _litprefix�Triple�String�Operator�Bracket�Special�Funny� PlainToken�Token�ContStr�PseudoExtras�PseudoToken�list�map�compileZ tokenprogr�Zsingle3progZdouble3progr�r�r�r�r�� Exceptionrwr{r�r r�r��ASCIIr�r�r�r�rrrx�sysr��argv�openr��stdinrrrr�<module>s� 8Ic PK{��\��r��pgen2/__init__.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """The pgen2 package.""" PK{��\�����%�% pgen2/conv.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Convert graminit.[ch] spit out by pgen to Python code. Pgen is the Python parser generator. It is useful to quickly create a parser from a grammar file in Python's grammar notation. But I don't want my parsers to be written in C (yet), so I'm translating the parsing tables to Python data structures and writing a Python parse engine. Note that the token numbers are constants determined by the standard Python tokenizer. The standard token module defines these numbers and their names (the names are not used much). The token numbers are hardcoded into the Python tokenizer and into pgen. A Python implementation of the Python tokenizer is also available, in the standard tokenize module. On the other hand, symbol numbers (representing the grammar's non-terminals) are assigned by pgen based on the actual grammar input. Note: this module is pretty much obsolete; the pgen module generates equivalent grammar tables directly from the Grammar.txt input file without having to invoke the Python pgen C program. """ # Python imports import re # Local imports from pgen2 import grammar, token class Converter(grammar.Grammar): """Grammar subclass that reads classic pgen output files. The run() method reads the tables as produced by the pgen parser generator, typically contained in two C files, graminit.h and graminit.c. The other methods are for internal use only. See the base class for more documentation. """ def run(self, graminit_h, graminit_c): """Load the grammar tables from the text files written by pgen.""" self.parse_graminit_h(graminit_h) self.parse_graminit_c(graminit_c) self.finish_off() def parse_graminit_h(self, filename): """Parse the .h file written by pgen. (Internal) This file is a sequence of #define statements defining the nonterminals of the grammar as numbers. We build two tables mapping the numbers to names and back. """ try: f = open(filename) except OSError as err: print("Can't open %s: %s" % (filename, err)) return False self.symbol2number = {} self.number2symbol = {} lineno = 0 for line in f: lineno += 1 mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) if not mo and line.strip(): print("%s(%s): can't parse %s" % (filename, lineno, line.strip())) else: symbol, number = mo.groups() number = int(number) assert symbol not in self.symbol2number assert number not in self.number2symbol self.symbol2number[symbol] = number self.number2symbol[number] = symbol return True def parse_graminit_c(self, filename): """Parse the .c file written by pgen. (Internal) The file looks as follows. The first two lines are always this: #include "pgenheaders.h" #include "grammar.h" After that come four blocks: 1) one or more state definitions 2) a table defining dfas 3) a table defining labels 4) a struct defining the grammar A state definition has the following form: - one or more arc arrays, each of the form: static arc arcs_<n>_<m>[<k>] = { {<i>, <j>}, ... }; - followed by a state array, of the form: static state states_<s>[<t>] = { {<k>, arcs_<n>_<m>}, ... }; """ try: f = open(filename) except OSError as err: print("Can't open %s: %s" % (filename, err)) return False # The code below essentially uses f's iterator-ness! lineno = 0 # Expect the two #include lines lineno, line = lineno+1, next(f) assert line == '#include "pgenheaders.h"\n', (lineno, line) lineno, line = lineno+1, next(f) assert line == '#include "grammar.h"\n', (lineno, line) # Parse the state definitions lineno, line = lineno+1, next(f) allarcs = {} states = [] while line.startswith("static arc "): while line.startswith("static arc "): mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", line) assert mo, (lineno, line) n, m, k = list(map(int, mo.groups())) arcs = [] for _ in range(k): lineno, line = lineno+1, next(f) mo = re.match(r"\s+{(\d+), (\d+)},$", line) assert mo, (lineno, line) i, j = list(map(int, mo.groups())) arcs.append((i, j)) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) allarcs[(n, m)] = arcs lineno, line = lineno+1, next(f) mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) assert mo, (lineno, line) s, t = list(map(int, mo.groups())) assert s == len(states), (lineno, line) state = [] for _ in range(t): lineno, line = lineno+1, next(f) mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) assert mo, (lineno, line) k, n, m = list(map(int, mo.groups())) arcs = allarcs[n, m] assert k == len(arcs), (lineno, line) state.append(arcs) states.append(state) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) lineno, line = lineno+1, next(f) self.states = states # Parse the dfas dfas = {} mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) assert mo, (lineno, line) ndfas = int(mo.group(1)) for i in range(ndfas): lineno, line = lineno+1, next(f) mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', line) assert mo, (lineno, line) symbol = mo.group(2) number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) assert self.symbol2number[symbol] == number, (lineno, line) assert self.number2symbol[number] == symbol, (lineno, line) assert x == 0, (lineno, line) state = states[z] assert y == len(state), (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) assert mo, (lineno, line) first = {} rawbitset = eval(mo.group(1)) for i, c in enumerate(rawbitset): byte = ord(c) for j in range(8): if byte & (1<<j): first[i*8 + j] = 1 dfas[number] = (state, first) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) self.dfas = dfas # Parse the labels labels = [] lineno, line = lineno+1, next(f) mo = re.match(r"static label labels\[(\d+)\] = {$", line) assert mo, (lineno, line) nlabels = int(mo.group(1)) for i in range(nlabels): lineno, line = lineno+1, next(f) mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line) assert mo, (lineno, line) x, y = mo.groups() x = int(x) if y == "0": y = None else: y = eval(y) labels.append((x, y)) lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) self.labels = labels # Parse the grammar struct lineno, line = lineno+1, next(f) assert line == "grammar _PyParser_Grammar = {\n", (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r"\s+(\d+),$", line) assert mo, (lineno, line) ndfas = int(mo.group(1)) assert ndfas == len(self.dfas) lineno, line = lineno+1, next(f) assert line == "\tdfas,\n", (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r"\s+{(\d+), labels},$", line) assert mo, (lineno, line) nlabels = int(mo.group(1)) assert nlabels == len(self.labels), (lineno, line) lineno, line = lineno+1, next(f) mo = re.match(r"\s+(\d+)$", line) assert mo, (lineno, line) start = int(mo.group(1)) assert start in self.number2symbol, (lineno, line) self.start = start lineno, line = lineno+1, next(f) assert line == "};\n", (lineno, line) try: lineno, line = lineno+1, next(f) except StopIteration: pass else: assert 0, (lineno, line) def finish_off(self): """Create additional useful structures. (Internal).""" self.keywords = {} # map from keyword strings to arc labels self.tokens = {} # map from numeric token values to arc labels for ilabel, (type, value) in enumerate(self.labels): if type == token.NAME and value is not None: self.keywords[value] = ilabel elif value is None: self.tokens[type] = ilabel PK{��\�,����pgen2/driver.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. # Modifications: # Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Parser driver. This provides a high-level interface to parse a file into a syntax tree. """ __author__ = "Guido van Rossum <guido@python.org>" __all__ = ["Driver", "load_grammar"] # Python imports import codecs import io import os import logging import pkgutil import sys # Pgen imports from . import grammar, parse, token, tokenize, pgen class Driver(object): def __init__(self, grammar, convert=None, logger=None): self.grammar = grammar if logger is None: logger = logging.getLogger() self.logger = logger self.convert = convert def parse_tokens(self, tokens, debug=False): """Parse a series of tokens and return the syntax tree.""" # XXX Move the prefix computation into a wrapper around tokenize. p = parse.Parser(self.grammar, self.convert) p.setup() lineno = 1 column = 0 type = value = start = end = line_text = None prefix = "" for quintuple in tokens: type, value, start, end, line_text = quintuple if start != (lineno, column): assert (lineno, column) <= start, ((lineno, column), start) s_lineno, s_column = start if lineno < s_lineno: prefix += "\n" * (s_lineno - lineno) lineno = s_lineno column = 0 if column < s_column: prefix += line_text[column:s_column] column = s_column if type in (tokenize.COMMENT, tokenize.NL): prefix += value lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 continue if type == token.OP: type = grammar.opmap[value] if debug: self.logger.debug("%s %r (prefix=%r)", token.tok_name[type], value, prefix) if p.addtoken(type, value, (prefix, start)): if debug: self.logger.debug("Stop.") break prefix = "" lineno, column = end if value.endswith("\n"): lineno += 1 column = 0 else: # We never broke out -- EOF is too soon (how can this happen???) raise parse.ParseError("incomplete input", type, value, (prefix, start)) return p.rootnode def parse_stream_raw(self, stream, debug=False): """Parse a stream and return the syntax tree.""" tokens = tokenize.generate_tokens(stream.readline) return self.parse_tokens(tokens, debug) def parse_stream(self, stream, debug=False): """Parse a stream and return the syntax tree.""" return self.parse_stream_raw(stream, debug) def parse_file(self, filename, encoding=None, debug=False): """Parse a file and return the syntax tree.""" stream = codecs.open(filename, "r", encoding) try: return self.parse_stream(stream, debug) finally: stream.close() def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" tokens = tokenize.generate_tokens(io.StringIO(text).readline) return self.parse_tokens(tokens, debug) def _generate_pickle_name(gt): head, tail = os.path.splitext(gt) if tail == ".txt": tail = "" return head + tail + ".".join(map(str, sys.version_info)) + ".pickle" def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None): """Load the grammar (maybe from a pickle).""" if logger is None: logger = logging.getLogger() gp = _generate_pickle_name(gt) if gp is None else gp if force or not _newer(gp, gt): logger.info("Generating grammar tables from %s", gt) g = pgen.generate_grammar(gt) if save: logger.info("Writing grammar tables to %s", gp) try: g.dump(gp) except OSError as e: logger.info("Writing failed: %s", e) else: g = grammar.Grammar() g.load(gp) return g def _newer(a, b): """Inquire whether file a was written since file b.""" if not os.path.exists(a): return False if not os.path.exists(b): return True return os.path.getmtime(a) >= os.path.getmtime(b) def load_packaged_grammar(package, grammar_source): """Normally, loads a pickled grammar by doing pkgutil.get_data(package, pickled_grammar) where *pickled_grammar* is computed from *grammar_source* by adding the Python version and using a ``.pickle`` extension. However, if *grammar_source* is an extant file, load_grammar(grammar_source) is called instead. This facilitates using a packaged grammar file when needed but preserves load_grammar's automatic regeneration behavior when possible. """ if os.path.isfile(grammar_source): return load_grammar(grammar_source) pickled_name = _generate_pickle_name(os.path.basename(grammar_source)) data = pkgutil.get_data(package, pickled_name) g = grammar.Grammar() g.loads(data) return g def main(*args): """Main program, when run as a script: produce grammar pickle files. Calls load_grammar for each argument, a path to a grammar text file. """ if not args: args = sys.argv[1:] logging.basicConfig(level=logging.INFO, stream=sys.stdout, format='%(message)s') for gt in args: load_grammar(gt, save=True, force=True) return True if __name__ == "__main__": sys.exit(int(not main())) PK{��\��H���pgen2/grammar.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """This module defines the data structures used to represent a grammar. These are a bit arcane because they are derived from the data structures used by Python's 'pgen' parser generator. There's also a table here mapping operators to their names in the token module; the Python tokenize module reports all operators as the fallback token code OP, but the parser needs the actual token code. """ # Python imports import collections import pickle # Local imports from . import token, tokenize class Grammar(object): """Pgen parsing tables conversion class. Once initialized, this class supplies the grammar tables for the parsing engine implemented by parse.py. The parsing engine accesses the instance variables directly. The class here does not provide initialization of the tables; several subclasses exist to do this (see the conv and pgen modules). The load() method reads the tables from a pickle file, which is much faster than the other ways offered by subclasses. The pickle file is written by calling dump() (after loading the grammar tables using a subclass). The report() method prints a readable representation of the tables to stdout, for debugging. The instance variables are as follows: symbol2number -- a dict mapping symbol names to numbers. Symbol numbers are always 256 or higher, to distinguish them from token numbers, which are between 0 and 255 (inclusive). number2symbol -- a dict mapping numbers to symbol names; these two are each other's inverse. states -- a list of DFAs, where each DFA is a list of states, each state is a list of arcs, and each arc is a (i, j) pair where i is a label and j is a state number. The DFA number is the index into this list. (This name is slightly confusing.) Final states are represented by a special arc of the form (0, j) where j is its own state number. dfas -- a dict mapping symbol numbers to (DFA, first) pairs, where DFA is an item from the states list above, and first is a set of tokens that can begin this grammar rule (represented by a dict whose values are always 1). labels -- a list of (x, y) pairs where x is either a token number or a symbol number, and y is either None or a string; the strings are keywords. The label number is the index in this list; label numbers are used to mark state transitions (arcs) in the DFAs. start -- the number of the grammar's start symbol. keywords -- a dict mapping keyword strings to arc labels. tokens -- a dict mapping token numbers to arc labels. """ def __init__(self): self.symbol2number = {} self.number2symbol = {} self.states = [] self.dfas = {} self.labels = [(0, "EMPTY")] self.keywords = {} self.tokens = {} self.symbol2label = {} self.start = 256 def dump(self, filename): """Dump the grammar tables to a pickle file. dump() recursively changes all dict to OrderedDict, so the pickled file is not exactly the same as what was passed in to dump(). load() uses the pickled file to create the tables, but only changes OrderedDict to dict at the top level; it does not recursively change OrderedDict to dict. So, the loaded tables are different from the original tables that were passed to load() in that some of the OrderedDict (from the pickled file) are not changed back to dict. For parsing, this has no effect on performance because OrderedDict uses dict's __getitem__ with nothing in between. """ with open(filename, "wb") as f: d = _make_deterministic(self.__dict__) pickle.dump(d, f, 2) def load(self, filename): """Load the grammar tables from a pickle file.""" with open(filename, "rb") as f: d = pickle.load(f) self.__dict__.update(d) def loads(self, pkl): """Load the grammar tables from a pickle bytes object.""" self.__dict__.update(pickle.loads(pkl)) def copy(self): """ Copy the grammar. """ new = self.__class__() for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords", "tokens", "symbol2label"): setattr(new, dict_attr, getattr(self, dict_attr).copy()) new.labels = self.labels[:] new.states = self.states[:] new.start = self.start return new def report(self): """Dump the grammar tables to standard output, for debugging.""" from pprint import pprint print("s2n") pprint(self.symbol2number) print("n2s") pprint(self.number2symbol) print("states") pprint(self.states) print("dfas") pprint(self.dfas) print("labels") pprint(self.labels) print("start", self.start) def _make_deterministic(top): if isinstance(top, dict): return collections.OrderedDict( sorted(((k, _make_deterministic(v)) for k, v in top.items()))) if isinstance(top, list): return [_make_deterministic(e) for e in top] if isinstance(top, tuple): return tuple(_make_deterministic(e) for e in top) return top # Map from operator to number (since tokenize doesn't do this) opmap_raw = """ ( LPAR ) RPAR [ LSQB ] RSQB : COLON , COMMA ; SEMI + PLUS - MINUS * STAR / SLASH | VBAR & AMPER < LESS > GREATER = EQUAL . DOT % PERCENT ` BACKQUOTE { LBRACE } RBRACE @ AT @= ATEQUAL == EQEQUAL != NOTEQUAL <> NOTEQUAL <= LESSEQUAL >= GREATEREQUAL ~ TILDE ^ CIRCUMFLEX << LEFTSHIFT >> RIGHTSHIFT ** DOUBLESTAR += PLUSEQUAL -= MINEQUAL *= STAREQUAL /= SLASHEQUAL %= PERCENTEQUAL &= AMPEREQUAL |= VBAREQUAL ^= CIRCUMFLEXEQUAL <<= LEFTSHIFTEQUAL >>= RIGHTSHIFTEQUAL **= DOUBLESTAREQUAL // DOUBLESLASH //= DOUBLESLASHEQUAL -> RARROW """ opmap = {} for line in opmap_raw.splitlines(): if line: op, name = line.split() opmap[op] = getattr(token, name) PK{��\8?OOpgen2/literals.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Safely evaluate Python string literals without using eval().""" import re simple_escapes = {"a": "\a", "b": "\b", "f": "\f", "n": "\n", "r": "\r", "t": "\t", "v": "\v", "'": "'", '"': '"', "\\": "\\"} def escape(m): all, tail = m.group(0, 1) assert all.startswith("\\") esc = simple_escapes.get(tail) if esc is not None: return esc if tail.startswith("x"): hexes = tail[1:] if len(hexes) < 2: raise ValueError("invalid hex string escape ('\\%s')" % tail) try: i = int(hexes, 16) except ValueError: raise ValueError("invalid hex string escape ('\\%s')" % tail) else: try: i = int(tail, 8) except ValueError: raise ValueError("invalid octal string escape ('\\%s')" % tail) return chr(i) def evalString(s): assert s.startswith("'") or s.startswith('"'), repr(s[:1]) q = s[0] if s[:3] == q*3: q = q*3 assert s.endswith(q), repr(s[-len(q):]) assert len(s) >= 2*len(q) s = s[len(q):-len(q)] return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s) def test(): for i in range(256): c = chr(i) s = repr(c) e = evalString(s) if e != c: print(i, c, s, e) if __name__ == "__main__": test() PK{��\sWP�uupgen2/parse.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Parser engine for the grammar tables generated by pgen. The grammar table must be loaded first. See Parser/parser.c in the Python distribution for additional info on how this parsing engine works. """ # Local imports from . import token class ParseError(Exception): """Exception to signal the parser is stuck.""" def __init__(self, msg, type, value, context): Exception.__init__(self, "%s: type=%r, value=%r, context=%r" % (msg, type, value, context)) self.msg = msg self.type = type self.value = value self.context = context class Parser(object): """Parser engine. The proper usage sequence is: p = Parser(grammar, [converter]) # create instance p.setup([start]) # prepare for parsing <for each input token>: if p.addtoken(...): # parse a token; may raise ParseError break root = p.rootnode # root of abstract syntax tree A Parser instance may be reused by calling setup() repeatedly. A Parser instance contains state pertaining to the current token sequence, and should not be used concurrently by different threads to parse separate token sequences. See driver.py for how to get input tokens by tokenizing a file or string. Parsing is complete when addtoken() returns True; the root of the abstract syntax tree can then be retrieved from the rootnode instance variable. When a syntax error occurs, addtoken() raises the ParseError exception. There is no error recovery; the parser cannot be used after a syntax error was reported (but it can be reinitialized by calling setup()). """ def __init__(self, grammar, convert=None): """Constructor. The grammar argument is a grammar.Grammar instance; see the grammar module for more information. The parser is not ready yet for parsing; you must call the setup() method to get it started. The optional convert argument is a function mapping concrete syntax tree nodes to abstract syntax tree nodes. If not given, no conversion is done and the syntax tree produced is the concrete syntax tree. If given, it must be a function of two arguments, the first being the grammar (a grammar.Grammar instance), and the second being the concrete syntax tree node to be converted. The syntax tree is converted from the bottom up. A concrete syntax tree node is a (type, value, context, nodes) tuple, where type is the node type (a token or symbol number), value is None for symbols and a string for tokens, context is None or an opaque value used for error reporting (typically a (lineno, offset) pair), and nodes is a list of children for symbols, and None for tokens. An abstract syntax tree node may be anything; this is entirely up to the converter function. """ self.grammar = grammar self.convert = convert or (lambda grammar, node: node) def setup(self, start=None): """Prepare for parsing. This *must* be called before starting to parse. The optional argument is an alternative start symbol; it defaults to the grammar's start symbol. You can use a Parser instance to parse any number of programs; each time you call setup() the parser is reset to an initial state determined by the (implicit or explicit) start symbol. """ if start is None: start = self.grammar.start # Each stack entry is a tuple: (dfa, state, node). # A node is a tuple: (type, value, context, children), # where children is a list of nodes or None, and context may be None. newnode = (start, None, None, []) stackentry = (self.grammar.dfas[start], 0, newnode) self.stack = [stackentry] self.rootnode = None self.used_names = set() # Aliased to self.rootnode.used_names in pop() def addtoken(self, type, value, context): """Add a token; return True iff this is the end of the program.""" # Map from token to label ilabel = self.classify(type, value, context) # Loop until the token is shifted; may raise exceptions while True: dfa, state, node = self.stack[-1] states, first = dfa arcs = states[state] # Look for a state with this label for i, newstate in arcs: t, v = self.grammar.labels[i] if ilabel == i: # Look it up in the list of labels assert t < 256 # Shift a token; we're done with it self.shift(type, value, newstate, context) # Pop while we are in an accept-only state state = newstate while states[state] == [(0, state)]: self.pop() if not self.stack: # Done parsing! return True dfa, state, node = self.stack[-1] states, first = dfa # Done with this token return False elif t >= 256: # See if it's a symbol and if we're in its first set itsdfa = self.grammar.dfas[t] itsstates, itsfirst = itsdfa if ilabel in itsfirst: # Push a symbol self.push(t, self.grammar.dfas[t], newstate, context) break # To continue the outer while loop else: if (0, state) in arcs: # An accepting state, pop it and try something else self.pop() if not self.stack: # Done parsing, but another token is input raise ParseError("too much input", type, value, context) else: # No success finding a transition raise ParseError("bad input", type, value, context) def classify(self, type, value, context): """Turn a token into a label. (Internal)""" if type == token.NAME: # Keep a listing of all used names self.used_names.add(value) # Check for reserved words ilabel = self.grammar.keywords.get(value) if ilabel is not None: return ilabel ilabel = self.grammar.tokens.get(type) if ilabel is None: raise ParseError("bad token", type, value, context) return ilabel def shift(self, type, value, newstate, context): """Shift a token. (Internal)""" dfa, state, node = self.stack[-1] newnode = (type, value, context, None) newnode = self.convert(self.grammar, newnode) if newnode is not None: node[-1].append(newnode) self.stack[-1] = (dfa, newstate, node) def push(self, type, newdfa, newstate, context): """Push a nonterminal. (Internal)""" dfa, state, node = self.stack[-1] newnode = (type, None, context, []) self.stack[-1] = (dfa, newstate, node) self.stack.append((newdfa, 0, newnode)) def pop(self): """Pop a nonterminal. (Internal)""" popdfa, popstate, popnode = self.stack.pop() newnode = self.convert(self.grammar, popnode) if newnode is not None: if self.stack: dfa, state, node = self.stack[-1] node[-1].append(newnode) else: self.rootnode = newnode self.rootnode.used_names = self.used_names PK{��\�v/��5�5 pgen2/pgen.pynu�[���# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. # Pgen imports from . import grammar, token, tokenize class PgenGrammar(grammar.Grammar): pass class ParserGenerator(object): def __init__(self, filename, stream=None): close_stream = None if stream is None: stream = open(filename) close_stream = stream.close self.filename = filename self.stream = stream self.generator = tokenize.generate_tokens(stream.readline) self.gettoken() # Initialize lookahead self.dfas, self.startsymbol = self.parse() if close_stream is not None: close_stream() self.first = {} # map from symbol name to set of tokens self.addfirstsets() def make_grammar(self): c = PgenGrammar() names = list(self.dfas.keys()) names.sort() names.remove(self.startsymbol) names.insert(0, self.startsymbol) for name in names: i = 256 + len(c.symbol2number) c.symbol2number[name] = i c.number2symbol[i] = name for name in names: dfa = self.dfas[name] states = [] for state in dfa: arcs = [] for label, next in sorted(state.arcs.items()): arcs.append((self.make_label(c, label), dfa.index(next))) if state.isfinal: arcs.append((0, dfa.index(state))) states.append(arcs) c.states.append(states) c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name)) c.start = c.symbol2number[self.startsymbol] return c def make_first(self, c, name): rawfirst = self.first[name] first = {} for label in sorted(rawfirst): ilabel = self.make_label(c, label) ##assert ilabel not in first # XXX failed on <> ... != first[ilabel] = 1 return first def make_label(self, c, label): # XXX Maybe this should be a method on a subclass of converter? ilabel = len(c.labels) if label[0].isalpha(): # Either a symbol name or a named token if label in c.symbol2number: # A symbol name (a non-terminal) if label in c.symbol2label: return c.symbol2label[label] else: c.labels.append((c.symbol2number[label], None)) c.symbol2label[label] = ilabel return ilabel else: # A named token (NAME, NUMBER, STRING) itoken = getattr(token, label, None) assert isinstance(itoken, int), label assert itoken in token.tok_name, label if itoken in c.tokens: return c.tokens[itoken] else: c.labels.append((itoken, None)) c.tokens[itoken] = ilabel return ilabel else: # Either a keyword or an operator assert label[0] in ('"', "'"), label value = eval(label) if value[0].isalpha(): # A keyword if value in c.keywords: return c.keywords[value] else: c.labels.append((token.NAME, value)) c.keywords[value] = ilabel return ilabel else: # An operator (any non-numeric token) itoken = grammar.opmap[value] # Fails if unknown token if itoken in c.tokens: return c.tokens[itoken] else: c.labels.append((itoken, None)) c.tokens[itoken] = ilabel return ilabel def addfirstsets(self): names = list(self.dfas.keys()) names.sort() for name in names: if name not in self.first: self.calcfirst(name) #print name, self.first[name].keys() def calcfirst(self, name): dfa = self.dfas[name] self.first[name] = None # dummy to detect left recursion state = dfa[0] totalset = {} overlapcheck = {} for label, next in state.arcs.items(): if label in self.dfas: if label in self.first: fset = self.first[label] if fset is None: raise ValueError("recursion for rule %r" % name) else: self.calcfirst(label) fset = self.first[label] totalset.update(fset) overlapcheck[label] = fset else: totalset[label] = 1 overlapcheck[label] = {label: 1} inverse = {} for label, itsfirst in overlapcheck.items(): for symbol in itsfirst: if symbol in inverse: raise ValueError("rule %s is ambiguous; %s is in the" " first sets of %s as well as %s" % (name, symbol, label, inverse[symbol])) inverse[symbol] = label self.first[name] = totalset def parse(self): dfas = {} startsymbol = None # MSTART: (NEWLINE | RULE)* ENDMARKER while self.type != token.ENDMARKER: while self.type == token.NEWLINE: self.gettoken() # RULE: NAME ':' RHS NEWLINE name = self.expect(token.NAME) self.expect(token.OP, ":") a, z = self.parse_rhs() self.expect(token.NEWLINE) #self.dump_nfa(name, a, z) dfa = self.make_dfa(a, z) #self.dump_dfa(name, dfa) oldlen = len(dfa) self.simplify_dfa(dfa) newlen = len(dfa) dfas[name] = dfa #print name, oldlen, newlen if startsymbol is None: startsymbol = name return dfas, startsymbol def make_dfa(self, start, finish): # To turn an NFA into a DFA, we define the states of the DFA # to correspond to *sets* of states of the NFA. Then do some # state reduction. Let's represent sets as dicts with 1 for # values. assert isinstance(start, NFAState) assert isinstance(finish, NFAState) def closure(state): base = {} addclosure(state, base) return base def addclosure(state, base): assert isinstance(state, NFAState) if state in base: return base[state] = 1 for label, next in state.arcs: if label is None: addclosure(next, base) states = [DFAState(closure(start), finish)] for state in states: # NB states grows while we're iterating arcs = {} for nfastate in state.nfaset: for label, next in nfastate.arcs: if label is not None: addclosure(next, arcs.setdefault(label, {})) for label, nfaset in sorted(arcs.items()): for st in states: if st.nfaset == nfaset: break else: st = DFAState(nfaset, finish) states.append(st) state.addarc(st, label) return states # List of DFAState instances; first one is start def dump_nfa(self, name, start, finish): print("Dump of NFA for", name) todo = [start] for i, state in enumerate(todo): print(" State", i, state is finish and "(final)" or "") for label, next in state.arcs: if next in todo: j = todo.index(next) else: j = len(todo) todo.append(next) if label is None: print(" -> %d" % j) else: print(" %s -> %d" % (label, j)) def dump_dfa(self, name, dfa): print("Dump of DFA for", name) for i, state in enumerate(dfa): print(" State", i, state.isfinal and "(final)" or "") for label, next in sorted(state.arcs.items()): print(" %s -> %d" % (label, dfa.index(next))) def simplify_dfa(self, dfa): # This is not theoretically optimal, but works well enough. # Algorithm: repeatedly look for two states that have the same # set of arcs (same labels pointing to the same nodes) and # unify them, until things stop changing. # dfa is a list of DFAState instances changes = True while changes: changes = False for i, state_i in enumerate(dfa): for j in range(i+1, len(dfa)): state_j = dfa[j] if state_i == state_j: #print " unify", i, j del dfa[j] for state in dfa: state.unifystate(state_j, state_i) changes = True break def parse_rhs(self): # RHS: ALT ('|' ALT)* a, z = self.parse_alt() if self.value != "|": return a, z else: aa = NFAState() zz = NFAState() aa.addarc(a) z.addarc(zz) while self.value == "|": self.gettoken() a, z = self.parse_alt() aa.addarc(a) z.addarc(zz) return aa, zz def parse_alt(self): # ALT: ITEM+ a, b = self.parse_item() while (self.value in ("(", "[") or self.type in (token.NAME, token.STRING)): c, d = self.parse_item() b.addarc(c) b = d return a, b def parse_item(self): # ITEM: '[' RHS ']' | ATOM ['+' | '*'] if self.value == "[": self.gettoken() a, z = self.parse_rhs() self.expect(token.OP, "]") a.addarc(z) return a, z else: a, z = self.parse_atom() value = self.value if value not in ("+", "*"): return a, z self.gettoken() z.addarc(a) if value == "+": return a, z else: return a, a def parse_atom(self): # ATOM: '(' RHS ')' | NAME | STRING if self.value == "(": self.gettoken() a, z = self.parse_rhs() self.expect(token.OP, ")") return a, z elif self.type in (token.NAME, token.STRING): a = NFAState() z = NFAState() a.addarc(z, self.value) self.gettoken() return a, z else: self.raise_error("expected (...) or NAME or STRING, got %s/%s", self.type, self.value) def expect(self, type, value=None): if self.type != type or (value is not None and self.value != value): self.raise_error("expected %s/%s, got %s/%s", type, value, self.type, self.value) value = self.value self.gettoken() return value def gettoken(self): tup = next(self.generator) while tup[0] in (tokenize.COMMENT, tokenize.NL): tup = next(self.generator) self.type, self.value, self.begin, self.end, self.line = tup #print token.tok_name[self.type], repr(self.value) def raise_error(self, msg, *args): if args: try: msg = msg % args except: msg = " ".join([msg] + list(map(str, args))) raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line)) class NFAState(object): def __init__(self): self.arcs = [] # list of (label, NFAState) pairs def addarc(self, next, label=None): assert label is None or isinstance(label, str) assert isinstance(next, NFAState) self.arcs.append((label, next)) class DFAState(object): def __init__(self, nfaset, final): assert isinstance(nfaset, dict) assert isinstance(next(iter(nfaset)), NFAState) assert isinstance(final, NFAState) self.nfaset = nfaset self.isfinal = final in nfaset self.arcs = {} # map from label to DFAState def addarc(self, next, label): assert isinstance(label, str) assert label not in self.arcs assert isinstance(next, DFAState) self.arcs[label] = next def unifystate(self, old, new): for label, next in self.arcs.items(): if next is old: self.arcs[label] = new def __eq__(self, other): # Equality test -- ignore the nfaset instance variable assert isinstance(other, DFAState) if self.isfinal != other.isfinal: return False # Can't just return self.arcs == other.arcs, because that # would invoke this method recursively, with cycles... if len(self.arcs) != len(other.arcs): return False for label, next in self.arcs.items(): if next is not other.arcs.get(label): return False return True __hash__ = None # For Py3 compatibility. def generate_grammar(filename="Grammar.txt"): p = ParserGenerator(filename) return p.make_grammar() PK{��\u��pgen2/token.pynuȯ��#! /usr/libexec/platform-python """Token constants (from "token.h").""" # Taken from Python (r53757) and modified to include some tokens # originally monkeypatched in by pgen2.tokenize #--start constants-- ENDMARKER = 0 NAME = 1 NUMBER = 2 STRING = 3 NEWLINE = 4 INDENT = 5 DEDENT = 6 LPAR = 7 RPAR = 8 LSQB = 9 RSQB = 10 COLON = 11 COMMA = 12 SEMI = 13 PLUS = 14 MINUS = 15 STAR = 16 SLASH = 17 VBAR = 18 AMPER = 19 LESS = 20 GREATER = 21 EQUAL = 22 DOT = 23 PERCENT = 24 BACKQUOTE = 25 LBRACE = 26 RBRACE = 27 EQEQUAL = 28 NOTEQUAL = 29 LESSEQUAL = 30 GREATEREQUAL = 31 TILDE = 32 CIRCUMFLEX = 33 LEFTSHIFT = 34 RIGHTSHIFT = 35 DOUBLESTAR = 36 PLUSEQUAL = 37 MINEQUAL = 38 STAREQUAL = 39 SLASHEQUAL = 40 PERCENTEQUAL = 41 AMPEREQUAL = 42 VBAREQUAL = 43 CIRCUMFLEXEQUAL = 44 LEFTSHIFTEQUAL = 45 RIGHTSHIFTEQUAL = 46 DOUBLESTAREQUAL = 47 DOUBLESLASH = 48 DOUBLESLASHEQUAL = 49 AT = 50 ATEQUAL = 51 OP = 52 COMMENT = 53 NL = 54 RARROW = 55 AWAIT = 56 ASYNC = 57 ERRORTOKEN = 58 N_TOKENS = 59 NT_OFFSET = 256 #--end constants-- tok_name = {} for _name, _value in list(globals().items()): if type(_value) is type(0): tok_name[_value] = _name def ISTERMINAL(x): return x < NT_OFFSET def ISNONTERMINAL(x): return x >= NT_OFFSET def ISEOF(x): return x == ENDMARKER PK{��\ �q�NXNXpgen2/tokenize.pynu�[���# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. # All rights reserved. """Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.""" __author__ = 'Ka-Ping Yee <ping@lfw.org>' __credits__ = \ 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' import string, re from codecs import BOM_UTF8, lookup from lib2to3.pgen2.token import * from . import token __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", "generate_tokens", "untokenize"] del token try: bytes except NameError: # Support bytes type in Python <= 2.5, so 2to3 turns itself into # valid Python 3 code. bytes = str def group(*choices): return '(' + '|'.join(choices) + ')' def any(*choices): return group(*choices) + '*' def maybe(*choices): return group(*choices) + '?' Whitespace = r'[ \f\t]*' Comment = r'#[^\r\n]*' Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) Name = r'[a-zA-Z_]\w*' Binnumber = r'0[bB]_?[01]+(?:_[01]+)*' Hexnumber = r'0[xX]_?[\da-fA-F]+(?:_[\da-fA-F]+)*[lL]?' Octnumber = r'0[oO]?_?[0-7]+(?:_[0-7]+)*[lL]?' Decnumber = group(r'[1-9]\d*(?:_\d+)*[lL]?', '0[lL]?') Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) Exponent = r'[eE][-+]?\d+(?:_\d+)*' Pointfloat = group(r'\d+(?:_\d+)*\.(?:\d+(?:_\d+)*)?', r'\.\d+(?:_\d+)*') + maybe(Exponent) Expfloat = r'\d+(?:_\d+)*' + Exponent Floatnumber = group(Pointfloat, Expfloat) Imagnumber = group(r'\d+(?:_\d+)*[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" # Tail end of " string. Double = r'[^"\\]*(?:\\.[^"\\]*)*"' # Tail end of ''' string. Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" # Tail end of """ string. Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' _litprefix = r"(?:[uUrRbBfF]|[rR][bB]|[bBuU][rR])?" Triple = group(_litprefix + "'''", _litprefix + '"""') # Single-line ' or " string. String = group(_litprefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'", _litprefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"') # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get # recognized as two instances of =). Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", r"//=?", r"->", r"[+\-*/%&@|^=<>]=?", r"~") Bracket = '[][(){}]' Special = group(r'\r?\n', r'[:;.,`@]') Funny = group(Operator, Bracket, Special) PlainToken = group(Number, Funny, String, Name) Token = Ignore + PlainToken # First (or only) line of ' or " string. ContStr = group(_litprefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" + group("'", r'\\\r?\n'), _litprefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' + group('"', r'\\\r?\n')) PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) tokenprog, pseudoprog, single3prog, double3prog = list(map( re.compile, (Token, PseudoToken, Single3, Double3))) endprogs = {"'": re.compile(Single), '"': re.compile(Double), "'''": single3prog, '"""': double3prog, "r'''": single3prog, 'r"""': double3prog, "u'''": single3prog, 'u"""': double3prog, "b'''": single3prog, 'b"""': double3prog, "f'''": single3prog, 'f"""': double3prog, "ur'''": single3prog, 'ur"""': double3prog, "br'''": single3prog, 'br"""': double3prog, "rb'''": single3prog, 'rb"""': double3prog, "R'''": single3prog, 'R"""': double3prog, "U'''": single3prog, 'U"""': double3prog, "B'''": single3prog, 'B"""': double3prog, "F'''": single3prog, 'F"""': double3prog, "uR'''": single3prog, 'uR"""': double3prog, "Ur'''": single3prog, 'Ur"""': double3prog, "UR'''": single3prog, 'UR"""': double3prog, "bR'''": single3prog, 'bR"""': double3prog, "Br'''": single3prog, 'Br"""': double3prog, "BR'''": single3prog, 'BR"""': double3prog, "rB'''": single3prog, 'rB"""': double3prog, "Rb'''": single3prog, 'Rb"""': double3prog, "RB'''": single3prog, 'RB"""': double3prog, 'r': None, 'R': None, 'u': None, 'U': None, 'f': None, 'F': None, 'b': None, 'B': None} triple_quoted = {} for t in ("'''", '"""', "r'''", 'r"""', "R'''", 'R"""', "u'''", 'u"""', "U'''", 'U"""', "b'''", 'b"""', "B'''", 'B"""', "f'''", 'f"""', "F'''", 'F"""', "ur'''", 'ur"""', "Ur'''", 'Ur"""', "uR'''", 'uR"""', "UR'''", 'UR"""', "br'''", 'br"""', "Br'''", 'Br"""', "bR'''", 'bR"""', "BR'''", 'BR"""', "rb'''", 'rb"""', "Rb'''", 'Rb"""', "rB'''", 'rB"""', "RB'''", 'RB"""',): triple_quoted[t] = t single_quoted = {} for t in ("'", '"', "r'", 'r"', "R'", 'R"', "u'", 'u"', "U'", 'U"', "b'", 'b"', "B'", 'B"', "f'", 'f"', "F'", 'F"', "ur'", 'ur"', "Ur'", 'Ur"', "uR'", 'uR"', "UR'", 'UR"', "br'", 'br"', "Br'", 'Br"', "bR'", 'bR"', "BR'", 'BR"', "rb'", 'rb"', "Rb'", 'Rb"', "rB'", 'rB"', "RB'", 'RB"',): single_quoted[t] = t tabsize = 8 class TokenError(Exception): pass class StopTokenizing(Exception): pass def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing (srow, scol) = xxx_todo_changeme (erow, ecol) = xxx_todo_changeme1 print("%d,%d-%d,%d:\t%s\t%s" % \ (srow, scol, erow, ecol, tok_name[type], repr(token))) def tokenize(readline, tokeneater=printtoken): """ The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). """ try: tokenize_loop(readline, tokeneater) except StopTokenizing: pass # backwards compatible interface def tokenize_loop(readline, tokeneater): for token_info in generate_tokens(readline): tokeneater(*token_info) class Untokenizer: def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 def add_whitespace(self, start): row, col = start assert row <= self.prev_row col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) def untokenize(self, iterable): for t in iterable: if len(t) == 2: self.compat(t, iterable) break tok_type, token, start, end, line = t self.add_whitespace(start) self.tokens.append(token) self.prev_row, self.prev_col = end if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 return "".join(self.tokens) def compat(self, token, iterable): startline = False indents = [] toks_append = self.tokens.append toknum, tokval = token if toknum in (NAME, NUMBER): tokval += ' ' if toknum in (NEWLINE, NL): startline = True for tok in iterable: toknum, tokval = tok[:2] if toknum in (NAME, NUMBER, ASYNC, AWAIT): tokval += ' ' if toknum == INDENT: indents.append(tokval) continue elif toknum == DEDENT: indents.pop() continue elif toknum in (NEWLINE, NL): startline = True elif startline and indents: toks_append(indents[-1]) startline = False toks_append(tokval) cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) def _get_normal_name(orig_enc): """Imitates get_normal_name in tokenizer.c.""" # Only care about the first 12 characters. enc = orig_enc[:12].lower().replace("_", "-") if enc == "utf-8" or enc.startswith("utf-8-"): return "utf-8" if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): return "iso-8859-1" return orig_enc def detect_encoding(readline): """ The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. """ bom_found = False encoding = None default = 'utf-8' def read_or_stop(): try: return readline() except StopIteration: return bytes() def find_cookie(line): try: line_string = line.decode('ascii') except UnicodeDecodeError: return None match = cookie_re.match(line_string) if not match: return None encoding = _get_normal_name(match.group(1)) try: codec = lookup(encoding) except LookupError: # This behaviour mimics the Python interpreter raise SyntaxError("unknown encoding: " + encoding) if bom_found: if codec.name != 'utf-8': # This behaviour mimics the Python interpreter raise SyntaxError('encoding problem: utf-8') encoding += '-sig' return encoding first = read_or_stop() if first.startswith(BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if not first: return default, [] encoding = find_cookie(first) if encoding: return encoding, [first] if not blank_re.match(first): return default, [first] second = read_or_stop() if not second: return default, [first] encoding = find_cookie(second) if encoding: return encoding, [first, second] return default, [first, second] def untokenize(iterable): """Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited intput: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 """ ut = Untokenizer() return ut.untokenize(iterable) def generate_tokens(readline): """ The generate_tokens() generator requires one argument, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. """ lnum = parenlev = continued = 0 namechars, numchars = string.ascii_letters + '_', '0123456789' contstr, needcont = '', 0 contline = None indents = [0] # 'stashed' and 'async_*' are used for async/await parsing stashed = None async_def = False async_def_indent = 0 async_def_nl = False while 1: # loop over lines in stream try: line = readline() except StopIteration: line = '' lnum = lnum + 1 pos, max = 0, len(line) if contstr: # continued string if not line: raise TokenError("EOF in multi-line string", strstart) endmatch = endprog.match(line) if endmatch: pos = end = endmatch.end(0) yield (STRING, contstr + line[:end], strstart, (lnum, end), contline + line) contstr, needcont = '', 0 contline = None elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': yield (ERRORTOKEN, contstr + line, strstart, (lnum, len(line)), contline) contstr = '' contline = None continue else: contstr = contstr + line contline = contline + line continue elif parenlev == 0 and not continued: # new statement if not line: break column = 0 while pos < max: # measure leading whitespace if line[pos] == ' ': column = column + 1 elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize elif line[pos] == '\f': column = 0 else: break pos = pos + 1 if pos == max: break if stashed: yield stashed stashed = None if line[pos] in '#\r\n': # skip comments or blank lines if line[pos] == '#': comment_token = line[pos:].rstrip('\r\n') nl_pos = pos + len(comment_token) yield (COMMENT, comment_token, (lnum, pos), (lnum, pos + len(comment_token)), line) yield (NL, line[nl_pos:], (lnum, nl_pos), (lnum, len(line)), line) else: yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], (lnum, pos), (lnum, len(line)), line) continue if column > indents[-1]: # count indents or dedents indents.append(column) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) while column < indents[-1]: if column not in indents: raise IndentationError( "unindent does not match any outer indentation level", ("<tokenize>", lnum, pos, line)) indents = indents[:-1] if async_def and async_def_indent >= indents[-1]: async_def = False async_def_nl = False async_def_indent = 0 yield (DEDENT, '', (lnum, pos), (lnum, pos), line) if async_def and async_def_nl and async_def_indent >= indents[-1]: async_def = False async_def_nl = False async_def_indent = 0 else: # continued statement if not line: raise TokenError("EOF in multi-line statement", (lnum, 0)) continued = 0 while pos < max: pseudomatch = pseudoprog.match(line, pos) if pseudomatch: # scan for tokens start, end = pseudomatch.span(1) spos, epos, pos = (lnum, start), (lnum, end), end token, initial = line[start:end], line[start] if initial in numchars or \ (initial == '.' and token != '.'): # ordinary number yield (NUMBER, token, spos, epos, line) elif initial in '\r\n': newline = NEWLINE if parenlev > 0: newline = NL elif async_def: async_def_nl = True if stashed: yield stashed stashed = None yield (newline, token, spos, epos, line) elif initial == '#': assert not token.endswith("\n") if stashed: yield stashed stashed = None yield (COMMENT, token, spos, epos, line) elif token in triple_quoted: endprog = endprogs[token] endmatch = endprog.match(line, pos) if endmatch: # all on one line pos = endmatch.end(0) token = line[start:pos] if stashed: yield stashed stashed = None yield (STRING, token, spos, (lnum, pos), line) else: strstart = (lnum, start) # multiple lines contstr = line[start:] contline = line break elif initial in single_quoted or \ token[:2] in single_quoted or \ token[:3] in single_quoted: if token[-1] == '\n': # continued string strstart = (lnum, start) endprog = (endprogs[initial] or endprogs[token[1]] or endprogs[token[2]]) contstr, needcont = line[start:], 1 contline = line break else: # ordinary string if stashed: yield stashed stashed = None yield (STRING, token, spos, epos, line) elif initial in namechars: # ordinary name if token in ('async', 'await'): if async_def: yield (ASYNC if token == 'async' else AWAIT, token, spos, epos, line) continue tok = (NAME, token, spos, epos, line) if token == 'async' and not stashed: stashed = tok continue if token == 'def': if (stashed and stashed[0] == NAME and stashed[1] == 'async'): async_def = True async_def_indent = indents[-1] yield (ASYNC, stashed[1], stashed[2], stashed[3], stashed[4]) stashed = None if stashed: yield stashed stashed = None yield tok elif initial == '\\': # continued stmt # This yield is new; needed for better idempotency: if stashed: yield stashed stashed = None yield (NL, token, spos, (lnum, pos), line) continued = 1 else: if initial in '([{': parenlev = parenlev + 1 elif initial in ')]}': parenlev = parenlev - 1 if stashed: yield stashed stashed = None yield (OP, token, spos, epos, line) else: yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos+1), line) pos = pos + 1 if stashed: yield stashed stashed = None for indent in indents[1:]: # pop remaining indent levels yield (DEDENT, '', (lnum, 0), (lnum, 0), '') yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') if __name__ == '__main__': # testing import sys if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) else: tokenize(sys.stdin.readline) PK{��\�29��Grammar.txtnu�[���# Grammar for 2to3. This grammar supports Python 2.x and 3.x. # NOTE WELL: You should also follow all the steps listed at # https://devguide.python.org/grammar/ # Start symbols for the grammar: # file_input is a module or sequence of commands read from an input file; # single_input is a single interactive statement; # eval_input is the input for the eval() and input() functions. # NB: compound_stmt in single_input is followed by extra NEWLINE! file_input: (NEWLINE | stmt)* ENDMARKER single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE eval_input: testlist NEWLINE* ENDMARKER decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE decorators: decorator+ decorated: decorators (classdef | funcdef | async_funcdef) async_funcdef: ASYNC funcdef funcdef: 'def' NAME parameters ['->' test] ':' suite parameters: '(' [typedargslist] ')' typedargslist: ((tfpdef ['=' test] ',')* ('*' [tname] (',' tname ['=' test])* [',' ['**' tname [',']]] | '**' tname [',']) | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) tname: NAME [':' test] tfpdef: tname | '(' tfplist ')' tfplist: tfpdef (',' tfpdef)* [','] varargslist: ((vfpdef ['=' test] ',')* ('*' [vname] (',' vname ['=' test])* [',' ['**' vname [',']]] | '**' vname [',']) | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) vname: NAME vfpdef: vname | '(' vfplist ')' vfplist: vfpdef (',' vfpdef)* [','] stmt: simple_stmt | compound_stmt simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | exec_stmt | assert_stmt) expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | ('=' (yield_expr|testlist_star_expr))*) annassign: ':' test ['=' test] testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//=') # For normal and annotated assignments, additional restrictions enforced by the interpreter print_stmt: 'print' ( [ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ] ) del_stmt: 'del' exprlist pass_stmt: 'pass' flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt break_stmt: 'break' continue_stmt: 'continue' return_stmt: 'return' [testlist] yield_stmt: yield_expr raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]] import_stmt: import_name | import_from import_name: 'import' dotted_as_names import_from: ('from' ('.'* dotted_name | '.'+) 'import' ('*' | '(' import_as_names ')' | import_as_names)) import_as_name: NAME ['as' NAME] dotted_as_name: dotted_name ['as' NAME] import_as_names: import_as_name (',' import_as_name)* [','] dotted_as_names: dotted_as_name (',' dotted_as_name)* dotted_name: NAME ('.' NAME)* global_stmt: ('global' | 'nonlocal') NAME (',' NAME)* exec_stmt: 'exec' expr ['in' test [',' test]] assert_stmt: 'assert' test [',' test] compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt async_stmt: ASYNC (funcdef | with_stmt | for_stmt) if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] while_stmt: 'while' test ':' suite ['else' ':' suite] for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] try_stmt: ('try' ':' suite ((except_clause ':' suite)+ ['else' ':' suite] ['finally' ':' suite] | 'finally' ':' suite)) with_stmt: 'with' with_item (',' with_item)* ':' suite with_item: test ['as' expr] with_var: 'as' expr # NB compile.c makes sure that the default except clause is last except_clause: 'except' [test [(',' | 'as') test]] suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT # Backward compatibility cruft to support: # [ x for x in lambda: True, lambda: False if x() ] # even while also allowing: # lambda x: 5 if x else 2 # (But not a mix of the two) testlist_safe: old_test [(',' old_test)+ [',']] old_test: or_test | old_lambdef old_lambdef: 'lambda' [varargslist] ':' old_test test: or_test ['if' or_test 'else' test] | lambdef or_test: and_test ('or' and_test)* and_test: not_test ('and' not_test)* not_test: 'not' not_test | comparison comparison: expr (comp_op expr)* comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' star_expr: '*' expr expr: xor_expr ('|' xor_expr)* xor_expr: and_expr ('^' and_expr)* and_expr: shift_expr ('&' shift_expr)* shift_expr: arith_expr (('<<'|'>>') arith_expr)* arith_expr: term (('+'|'-') term)* term: factor (('*'|'@'|'/'|'%'|'//') factor)* factor: ('+'|'-'|'~') factor | power power: [AWAIT] atom trailer* ['**' factor] atom: ('(' [yield_expr|testlist_gexp] ')' | '[' [listmaker] ']' | '{' [dictsetmaker] '}' | '`' testlist1 '`' | NAME | NUMBER | STRING+ | '.' '.' '.') listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) lambdef: 'lambda' [varargslist] ':' test trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME subscriptlist: subscript (',' subscript)* [','] subscript: test | [test] ':' [test] [sliceop] sliceop: ':' [test] exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] testlist: test (',' test)* [','] dictsetmaker: ( ((test ':' test | '**' expr) (comp_for | (',' (test ':' test | '**' expr))* [','])) | ((test | star_expr) (comp_for | (',' (test | star_expr))* [','])) ) classdef: 'class' NAME ['(' [arglist] ')'] ':' suite arglist: argument (',' argument)* [','] # "test '=' test" is really "keyword '=' test", but we have no such token. # These need to be in a single rule to avoid grammar that is ambiguous # to our LL(1) parser. Even though 'test' includes '*expr' in star_expr, # we explicitly match '*' here, too, to give it proper precedence. # Illegal combinations and orderings are blocked in ast.c: # multiple (test comp_for) arguments are blocked; keyword unpackings # that precede iterable unpackings are blocked; etc. argument: ( test [comp_for] | test '=' test | '**' expr | star_expr ) comp_iter: comp_for | comp_if comp_for: [ASYNC] 'for' exprlist 'in' testlist_safe [comp_iter] comp_if: 'if' old_test [comp_iter] testlist1: test (',' test)* # not used in grammar, but may appear in "node" passed from Parser to Compiler encoding_decl: NAME yield_expr: 'yield' [yield_arg] yield_arg: 'from' test | testlist PK{��\S/ �}�}Grammar3.6.8.final.0.picklenu�[����ccollections OrderedDict q)Rq(Xdfasqh)Rq(M]q(]q(KK�qKK�qKK�qe]q KK�q aeh)Rq(KKKKKKKKKKKKKKK KK KKKKKK KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK KK!KK"KK#KK$KK%KK&KK'KK(KK)Ku�qM]q (]qK*K�qa]q(K+K�qKK�qeeh)Rq(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�qM]q(]qK,K�qa]q(K-K�qKK�qeeh)Rq(KKKKKKKKK KKKKKK#KK$KK&KK'KK(KK)Ku�qM]q(]qK.K�qa]q K/K�q!a]q"(K0K�q#KK�q$e]q%K/K�q&a]q'KK�q(aeh)Rq)K.Ks�q*M]q+(]q,K1K�q-a]q.(K2K�q/KK�q0e]q1(K1K�q2KK�q3eeh)Rq4(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)KK3Ku�q5M]q6(]q7(K3K�q8K4K�q9K/K�q:e]q;K5K�q<a]q=KK�q>a]q?(K0K�q@K6K�qAKK�qBe]qCK/K�qDaeh)RqE(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)KK3Ku�qFM]qG(]qHK7K�qIa]qJ(KK�qKKK�qLKK�qMeeh)RqN(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�qOM]qP(]qQKK�qRa]qSK/K�qTa]qU(K2K�qVKK�qWe]qXK/K�qYa]qZKK�q[aeh)Rq\KKs�q]M]q^(]q_K%K�q`a]qaK8K�qba]qcKK�qdaeh)RqeK%Ks�qfM ]qg(]qhK%K�qia]qj(K9K�qkK8K�qlK:K�qme]qnKK�qoaeh)RqpK%Ks�qqM ]qr(]qs(KK�qtKK�quK K�qvKK�qwK#K�qxK'K�qyK(K�qzK)K�q{e]q|(K;K�q}K<K�q~K=K�qe]q�KK �q�a]q�(K>K�q�K?K �q�e]q�K@K�q�a]q�(KAK�q�KBK�q�e]q�KK�q�a]q�(K)K�q�KK�q�e]q�K;K�q�a]q�KK�q�a]q�K>K�q�a]q�KK�q�a]q�KAK�q�aeh)Rq�(KKKKK KKKK#KK'KK(KK)Ku�q�M]q�(]q�(KCK�q�KDK�q�KEK�q�KFK�q�KGK�q�KHK�q�KIK�q�KJK�q�KKK�q�KLK�q�KMK�q�KNK�q�KOK�q�e]q�KK�q�aeh)Rq�(KCKKDKKEKKFKKGKKHKKIKKJKKKKKLKKMKKNKKOKu�q�M]q�(]q�K K�q�a]q�KK�q�aeh)Rq�K Ks�q�M ]q�(]q�KK�q�a]q�K'K�q�a]q�(KK�q�K.K�q�e]q�(K;K�q�KPK�q�e]q�KQK�q�a]q�K.K�q�a]q�K;K�q�a]q�KK�q�aeh)Rq�KKs�q�M]q�(]q�(KK�q�K%K�q�e]q�KRK�q�a]q�KK�q�a]q�KSK�q�a]q�KTK�q�a]q�(KUK�q�KK�q�e]q�KK�q�aeh)Rq�(KKK%Ku�q�M]q�(]q�KK�q�a]q�KVK�q�a]q�(KUK�q�KK�q�e]q�KK�q�aeh)Rq�KKs�q�M]q�(]q�(K6K�q�KWK�q�e]q�KK�q�aeh)Rq�(KKKKK%Ku�q�M]q�(]q�(KXK�q�KYK�q�KZK�q�KXK�q�K[K�q�K\K�q�K]K�q�KSK�q�K^K�q�KK�q�e]q�KK�q�a]q�(KK�rKK�re]rKSK�raeh)Rr(KKKSKKXKKYKKZKK[KK\KK]KK^Ku�rM]r(]rK5K�ra]r (K_K�r KK�reeh)Rr(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�r M]r(]r(K`K�rKaK�rKbK�rK9K�rK8K�rKcK�rKdK�rKeK�rK:K�re]rKK�raeh)Rr(K KKKKKKKKKKKK KK!KK%Ku�rM]r(]rKK�ra]r KK�r!aeh)Rr"KKs�r#M]r$(]r%KfK�r&a]r'(KgK�r(KaK�r)K8K�r*e]r+KK�r,aeh)Rr-K Ks�r.M]r/(]r0K K�r1a]r2KhK�r3a]r4(KK�r5KK�r6e]r7(K;K�r8KPK�r9e]r:KK�r;a]r<KK�r=a]r>K;K�r?aeh)Rr@K Ks�rAM]rB(]rCKiK�rDa]rE(KiK�rFKK�rGeeh)RrHK Ks�rIM]rJ(]rKKK�rLa]rMKRK�rNa]rOKK�rPaeh)RrQKKs�rRM]rS(]rT(K3K�rUK4K�rVK/K�rWe]rXK5K�rYa]rZ(K2K�r[K6K�r\KK�r]e]r^(K2K�r_K.K�r`K6K�raKK�rbe]rc(K2K�rdK6K�reKK�rfe]rg(K4K �rhK/K �riKK�rje]rkKK�rla]rmK/K�rna]ro(K3K �rpK/K�rqKK�rre]rs(K2K�rtKK �rue]rvK5K�rwa]rxK.K �rya]rz(K2K�r{KK�r|e]r}K/K�r~aeh)Rr(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)KK3Ku�r�M]r�(]r�KhK�r�a]r�(KjK�r�KK�r�e]r�K'K�r�a]r�KK�r�aeh)Rr�K'Ks�r�M]r�(]r�KkK�r�a]r�(K2K�r�KK�r�eeh)Rr�K'Ks�r�M]r�(]r�K'K�r�a]r�(KK�r�KK�r�eeh)Rr�K'Ks�r�M]r�(]r�K'K�r�a]r�KK�r�aeh)Rr�K'Ks�r�M]r�(]r�KlK�r�a]r�(KK�r�KK�r�e]r�KK�r�aeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�M]r�(]r�KmK�r�a]r�(K/K�r�KK�r�e]r�(K2K�r�KjK�r�KK�r�e]r�K/K�r�a]r�KK�r�aeh)Rr�KmKs�r�M ]r�(]r�KK�r�a]r�K5K�r�a]r�(KSK�r�KK�r�e]r�K/K�r�a]r�(K2K�r�KK�r�e]r�K/K�r�a]r�KK�r�aeh)Rr�KKs�r�M!]r�(]r�KnK�r�a]r�(KoK�r�KK�r�eeh)Rr�(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�r�M"]r�(]r�KpK�r�a]r�(K0K�r�KqK�r�KrK�r�KK�r�e]r�(KpK�r�K=K�r�e]r�KK�r�a]r�(KlK�r�K=K�r�e]r�(K0K�r�KK�r�eeh)Rr�(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�M#]r�(]r�(K5K�r�K4K�r�e]r�(K2K�r�KK�r�e]r�(K5K�r�K4K�r�KK�r�eeh)Rr�(KKKKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�r�M$]r�(]r�(KK�r�KK�r�K$K�r�KsK�re]rKtK�ra]rKK�raeh)Rr(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�rM%]r(]r(KuK�r KvK�r KwK�rKxK�rKyK�r e]rKK�raeh)Rr(K KKKKKKKK"Ku�rM&]r(]rKK�ra]rKRK�ra]rKSK�ra]rKlK�ra]rK.K�ra]rKQK�ra]r(KzK�r KK�r!e]r"K.K�r#a]r$KQK �r%a]r&KK �r'aeh)Rr(KKs�r)M']r*(]r+KK�r,a]r-K'K�r.a]r/K{K�r0a]r1(K|K�r2K.K�r3e]r4K/K�r5a]r6KQK�r7a]r8K.K�r9a]r:KK�r;aeh)Rr<KKs�r=M(]r>(]r?(KK�r@KK�rAe]rBK'K�rCa]rD(K2K�rEKK�rFeeh)RrG(KKKKu�rHM)]rI(]rJKK�rKa]rLK/K�rMa]rNK.K�rOa]rPKQK�rQa]rR(K}K�rSKzK�rTKK�rUe]rVK.K�rWa]rXKQK�rYa]rZKK�r[aeh)Rr\KKs�r]M*]r^(]r_K'K�r`a]ra(KjK�rbKK�rce]rdK'K�rea]rfKK�rgaeh)RrhK'Ks�riM+]rj(]rkK~K�rla]rm(K2K�rnKK�roe]rp(K~K�rqKK�rreeh)RrsK'Ks�rtM,]ru(]rvKK�rwa]rx(KK�ryKhK�rze]r{(KK�r|KK�r}KhK�r~e]rKK�r�a]r�(KK�r�KK�r�KK�r�e]r�KK�r�a]r�KK�r�a]r�K;K�r�aeh)Rr�KKs�r�M-]r�(]r�KK�r�a]r�K�K�r�a]r�KK�r�aeh)Rr�KKs�r�M.]r�(]r�(K�K�r�K�K�r�e]r�KK�r�aeh)Rr�(KKKKu�r�M/]r�(]r�KK�r�a]r�(K.K�r�K�K�r�e]r�K/K�r�a]r�K.K�r�a]r�KK�r�aeh)Rr�KKs�r�M0]r�(]r�(K4K�r�K/K�r�e]r�(K2K�r�K6K�r�KK�r�e]r�(K4K�r�K/K�r�KK�r�e]r�KK�r�a]r�(K2K�r�KK�r�eeh)Rr�(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�M1]r�(]r�(KK�r�K�K�r�e]r�K,K�r�a]r�KK�r�aeh)Rr�(KKKKKKKKK KKKKKK#KK$KK&KK'KK(KK)Ku�r�M2]r�(]r�KK�r�a]r�(K.K�r�K�K�r�e]r�KVK�r�a]r�K.K�r�a]r�KK�r�aeh)Rr�KKs�r�M3]r�(]r�(K�K�r�K�K�r�e]r�KK�r�aeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�M4]r�(]r�K�K�r�a]r�(K�K�r�KK�r�eeh)Rr�(KKKKKKKKK KKKKKK#KK$KK&KK'KK(KK)Ku�r�M5]r�(]r�KK�r�a]r�(K;K�r�K�K�r�e]r�KK�r�a]r�K;K�r�aeh)Rr�KKs�r�M6]r�(]r�KK�r�a]r�KK�r�aeh)Rr�KKs�r�M7]r�(]r�(K&K�r�K�K�r�e]r�K�K�r�a]r(K3K�rK�K�rKK�re]rKtK�ra]rKK�raeh)Rr(KKKKK KKKK#KK&KK'KK(KK)Ku�r M8]r (]rKK�ra]r (K�K�rK/K�rKK�re]rK/K�ra]r(K2K�rKK�re]r(K2K�rKK�re]r(K/K�rKK�re]rK/K�ra]r(K2K�rKK�r e]r!(K/K�r"KK�r#eeh)Rr$KKs�r%M9]r&(]r'KK�r(a]r)(K/K�r*KK�r+e]r,(K2K�r-KK�r.KK�r/e]r0K/K�r1a]r2K/K�r3a]r4(K2K�r5KK�r6e]r7KK�r8aeh)Rr9KKs�r:M:]r;(]r<KK�r=a]r>(KlK�r?KK�r@e]rAKK�rBaeh)RrCKKs�rDM;]rE(]rFK�K�rGa]rH(K�K�rIK�K�rJKK�rKeeh)RrL(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�rMM<]rN(]rOK�K�rPa]rQ(K�K�rRKK�rSe]rT(KK�rUK�K�rVe]rWKK�rXaeh)RrY(KKKKKKKKKKK KKKKKK KKKKKKKKKKKKKKKKKKKKKKKKKKKK"KK#KK$KK&KK'KK(KK)Ku�rZM=]r[(]r\(KK�r]K�K�r^K�K�r_e]r`KK�raa]rbKK�rcaeh)Rrd(KKKKKKKKKKKKK KK KKKKKK KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK KK!KK"KK#KK$KK%KK&KK'KK(KK)Ku�reM>]rf(]rgK.K�rha]ri(K/K�rjKK�rke]rlKK�rmaeh)RrnK.Ks�roM?]rp(]rq(K�K�rrK�K�rsK�K�rtK�K�ruK�K�rvK�K�rwK�K�rxK�K�ryK�K�rze]r{KK�r|aeh)Rr}(KKKKKKKKKKK KKKKKK KKKKKKKKKKKKKKKKKKKKKKKKKKKK"KK#KK$KK&KK'KK(KK)Ku�r~M@]r(]r�KK�r�a]r�K5K�r�a]r�KK�r�aeh)Rr�KKs�r�MA]r�(]r�(K�K�r�K�K�r�e]r�KK�r�aeh)Rr�(KKKKKKKKKKK KK KKKKKK KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK KK!KK"KK#KK$KK%KK&KK'KK(KK)Ku�r�MB]r�(]r�(K.K�r�K/K�r�e]r�(K�K�r�K/K�r�KK�r�e]r�(K.K�r�KK�r�e]r�KK�r�a]r�(K�K�r�KK�r�eeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)KK.Ku�r�MC]r�(]r�K�K�r�a]r�(K2K�r�KK�r�e]r�(K�K�r�KK�r�eeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)KK.Ku�r�MD]r�(]r�(KK�r�K�K�r�e]r�K�K�r�a]r�KK�r�a]r�KK�r�a]r�(K�K�r�KK�r�eeh)Rr�(KKKKKKKKKKKKK KKKKKK KKKKKKKKKKKKKKKKKKKKKKKKKKKK"KK#KK$KK&KK'KK(KK)Ku�r�ME]r�(]r�KtK�r�a]r�(K�K�r�KK�r�K�K�r�K�K�r�K K�r�KK�r�eeh)Rr�(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�r�MF]r�(]r�(K�K�r�K�K�r�e]r�KK�r�a]r�(KK�r�KK�r�e]r�K�K�r�a]r�KzK�r�a]r�K/K�r�aeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�MG]r�(]r�K/K�r�a]r�(K2K�r�KK�r�e]r�(K/K�r�KK�r�eeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�MH]r�(]r�K/K�r�a]r�(K2K�r�KK�r�eeh)Rr�(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�MI]r�(]r�(K4K�r�K/K�r�e]r�(K2K�r�K6K�r�KK�r�e]r�(K4K�r�K/K�r�KK�r�e]r�KK�r�a]r�(K2K�r�KK�r�eeh)Rr�(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�r�MJ]r�(]rKVK�ra]r(K2K�rKK�re]rKVK�ra]r(K2K�rKK�r e]r (KVK�rKK�reeh)Rr (KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�rMK]r(]r(K4K�rK/K�re]r(K2K�rKK�re]r(K4K�rK/K�rKK�reeh)Rr(KKKKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�rML]r(]r(KK�rK�K�re]r K�K�r!a]r"KK�r#a]r$K;K�r%aeh)Rr&(KKK'Ku�r'MM]r((]r)K�K�r*a]r+(K2K�r,KK�r-e]r.(K�K�r/KK�r0eeh)Rr1(KKK'Ku�r2MN]r3(]r4K'K�r5a]r6(K.K�r7KK�r8e]r9K/K�r:a]r;KK�r<aeh)Rr=K'Ks�r>MO]r?(]r@(KK�rAKK�rBK K�rCe]rD(K;K�rEKPK�rFe]rGK'K�rHa]rIK�K�rJa]rKKK�rLa]rMK;K�rNa]rOK>K�rPaeh)RrQ(KKKKK Ku�rRMP]rS(]rTKK�rUa]rVK.K�rWa]rXKQK�rYa]rZ(K�K�r[K�K�r\e]r]K.K�r^a]r_K.K�r`a]raKQK�rba]rcKQK �rda]reKK�rfa]rg(KzK �rhK�K�riK�K�rjKK �rke]rlK.K�rma]rnKQK�roa]rp(K�K�rqKK�rreeh)RrsKKs�rtMQ]ru(]rv(KK�rwK3K�rxK�K�rye]rz(K2K�r{K�K�r|KK�r}e]r~K�K�ra]r�(K2K�r�K0K�r�KK�r�e]r�(K3K�r�K�K �r�KK�r�e]r�(K2K�r�KK�r�e]r�(K2K �r�KK�r�e]r�(KK�r�K3K�r�K�K�r�KK�r�e]r�K/K�r�a]r�(K2K�r�K0K�r�KK �r�e]r�KK �r�a]r�(K2K�r�KK�r�e]r�K/K�r�aeh)Rr�(KKKKK'KK3Ku�r�MR]r�(]r�(KK�r�K3K�r�K�K�r�e]r�(K2K�r�K�K�r�KK�r�e]r�K�K�r�a]r�(K2K�r�K0K�r�KK�r�e]r�(K3K�r�K�K �r�KK�r�e]r�(K2K�r�KK�r�e]r�(K2K �r�KK�r�e]r�(KK�r�K3K�r�K�K�r�KK�r�e]r�K/K�r�a]r�(K2K�r�K0K�r�KK �r�e]r�KK �r�a]r�(K2K�r�KK�r�e]r�K/K�r�aeh)Rr�(KKKKK'KK3Ku�r�MS]r�(]r�(KK�r�K�K�r�e]r�K�K�r�a]r�KK�r�a]r�K;K�r�aeh)Rr�(KKK'Ku�r�MT]r�(]r�K�K�r�a]r�(K2K�r�KK�r�e]r�(K�K�r�KK�r�eeh)Rr�(KKK'Ku�r�MU]r�(]r�K'K�r�a]r�KK�r�aeh)Rr�K'Ks�r�MV]r�(]r�K K�r�a]r�K/K�r�a]r�K.K�r�a]r�KQK�r�a]r�(KzK�r�KK�r�e]r�K.K�r�a]r�KQK�r�a]r�KK�r�aeh)Rr�K Ks�rMW]r(]rK/K�ra]r(KjK�rKK�re]rK5K�ra]r KK�r aeh)Rr(KKKKKKKKK KKKKKKKK#KK$KK&KK'KK(KK)Ku�rMX]r (]rK!K�ra]rK�K�ra]r(K2K�rK.K�re]rKQK�ra]rKK�raeh)RrK!Ks�rMY]r(]rKjK�ra]rK5K�ra]r KK�r!aeh)Rr"KjKs�r#MZ]r$(]r%K�K�r&a]r'(K�K�r(KK�r)eeh)Rr*(KKKKKKKKK KKKK#KK$KK&KK'KK(KK)Ku�r+M[]r,(]r-(KK�r.KlK�r/e]r0K/K�r1a]r2KK�r3aeh)Rr4(KKKKKKKKK KKKKKKKKKK#KK$KK&KK'KK(KK)Ku�r5M\]r6(]r7K"K�r8a]r9(K�K�r:KK�r;e]r<KK�r=aeh)Rr>K"Ks�r?M]]r@(]rAK=K�rBa]rCKK�rDaeh)RrEK"Ks�rFuXkeywordsrGh)RrH(XandrIK-XasrJKjXassertrKKXbreakrLK XclassrMKXcontinuerNKXdefrOKXdelrPKXelifrQK}XelserRKzXexceptrSKmXexecrTKXfinallyrUK�XforrVKXfromrWKXglobalrXKXifrYKXimportrZKXinr[KSXisr\K^Xlambdar]KXnonlocalr^KXnotr_KXorr`K�XpassraKXprintrbKXraisercKXreturnrdKXtryreKXwhilerfK XwithrgK!XyieldrhK"uXlabelsri]rj(KXEMPTYrk�rlKN�rmKN�rnMAN�roKN�rpKN�rqKN�rrKN�rsKN�rtK2N�ruK N�rvKN�rwKjK�rxKjL�ryKjM�rzKjN�r{KjO�r|KjP�r}KjT�r~KjV�rKjW�r�KjX�r�KjY�r�KjZ�r�Kj]�r�Kj^�r�Kj_�r�Kja�r�Kjb�r�Kjc�r�Kjd�r�Kje�r�Kjf�r�Kjg�r�Kjh�r�KN�r�K N�r�K9N�r�K8N�r�KN�r�KN�r�KN�r�M;N�r�KN�r�M1N�r�KjI�r�KN�r�MFN�r�KN�r�MN�r�KN�r�K$N�r�M@N�r�M!N�r�MN�r�MEN�r�M'N�r�M&N�r�MXN�r�KN�r�MIN�r�M\N�r�K N�r�M0N�r�MHN�r�KN�r�MN�r�K)N�r�K*N�r�K/N�r�K'N�r�K%N�r�K&N�r�K1N�r�K(N�r�K-N�r�K.N�r�K3N�r�K,N�r�K+N�r�MN�r�MDN�r�M#N�r�Kj[�r�MJN�r�MN�r�M3N�r�MN�r�KN�r�KN�r�KN�r�KN�r�KN�r�KN�r�Kj\�r�MN�r�M N�r�M N�r�MN�r�M)N�r�MPN�r�MVN�r�MN�r�MN�r�MN�r�MN�r�KjJ�r�MN�r�MGN�r�KjS�r�MZN�r�KN�r�MKN�r�MN�r�MN�r�M7N�r�M$N�r�MN�r�MN�r�M9N�r�M:N�r�M]N�r�KjR�r�M5N�r�K7N�r�KjQ�r�M*N�r�M+N�r�MN�r�M,N�r�M-N�r�MRN�r�MN�r�M2N�r�M4N�r�MN�r�Kj`�r�MQN�r�M N�r�MON�r�K#N�r�MN�r�K"N�r�M?N�r�K N�r�MN�r�M<N�r�MN�r�MN�rM N�rM"N�rM%N�rM(N�rM.N�rM6N�rM8N�rM>N�rMBN�r KN�r KN�rKN�rKN�r K0N�rM/N�rMNN�rMMN�rMLN�rMCN�rKjU�rMN�rMSN�rMUN�rMTN�rMWN�rMN�rK!N�rM[N�reX number2symbolrh)Rr(MX file_inputrMXand_exprr MXand_testr!MX annassignr"MXarglistr#MXargumentr$MX arith_exprr%MXassert_stmtr&MX async_funcdefr'M X async_stmtr(M Xatomr)MX augassignr*MX break_stmtr+M Xclassdefr,MXcomp_forr-MXcomp_ifr.MX comp_iterr/MXcomp_opr0MX comparisonr1MX compound_stmtr2MX continue_stmtr3MX decoratedr4MX decoratorr5MX decoratorsr6MXdel_stmtr7MXdictsetmakerr8MXdotted_as_namer9MXdotted_as_namesr:MXdotted_namer;MX encoding_declr<MX eval_inputr=MX except_clauser>M X exec_stmtr?M!Xexprr@M"X expr_stmtrAM#XexprlistrBM$XfactorrCM%X flow_stmtrDM&Xfor_stmtrEM'XfuncdefrFM(Xglobal_stmtrGM)Xif_stmtrHM*Ximport_as_namerIM+Ximport_as_namesrJM,Ximport_fromrKM-Ximport_namerLM.Ximport_stmtrMM/XlambdefrNM0X listmakerrOM1Xnot_testrPM2Xold_lambdefrQM3Xold_testrRM4Xor_testrSM5X parametersrTM6X pass_stmtrUM7XpowerrVM8X print_stmtrWM9X raise_stmtrXM:Xreturn_stmtrYM;X shift_exprrZM<Xsimple_stmtr[M=Xsingle_inputr\M>Xsliceopr]M?X small_stmtr^M@X star_exprr_MAXstmtr`MBX subscriptraMCX subscriptlistrbMDXsuitercMEXtermrdMFXtestreMGXtestlistrfMHX testlist1rgMIX testlist_gexprhMJX testlist_saferiMKXtestlist_star_exprrjMLXtfpdefrkMMXtfplistrlMNXtnamermMOXtrailerrnMPXtry_stmtroMQX typedargslistrpMRXvarargslistrqMSXvfpdefrrMTXvfplistrsMUXvnamertMVX while_stmtruMWX with_itemrvMXX with_stmtrwMYXwith_varrxMZXxor_exprryM[X yield_argrzM\X yield_exprr{M]X yield_stmtr|uXstartr}MXstatesr~]r(]r�(]r�(KK�r�KK�r�KK�r�e]r�KK�r�ae]r�(]r�K*K�r�a]r�(K+K�r�KK�r�ee]r�(]r�K,K�r�a]r�(K-K�r�KK�r�ee]r�(]r�K.K�r�a]r�K/K�r�a]r�(K0K�r�KK�r�e]r�K/K�r�a]r�KK�r�ae]r�(]r�K1K�r�a]r�(K2K�r�KK�r�e]r�(K1K�r�KK�r�ee]r�(]r�(K3K�r�K4K�r�K/K�r�e]r�K5K�r�a]r�KK�r�a]r�(K0K�r�K6K�r�KK�r�e]r�K/K�r�ae]r�(]r�K7K�r�a]r�(KK�r�KK�r�KK�r�ee]r�(]r�KK�r�a]r�K/K�r�a]r�(K2K�r�KK�r�e]r�K/K�r�a]r�KK�r�ae]r�(]r�K%K�r�a]r�K8K�r�a]r�KK�r�ae]r�(]r�K%K�r�a]r�(K9K�r�K8K�r�K:K�r�e]r�KK�r�ae]r�(]r�(KK�r�KK�r�K K�r�KK�r�K#K�r�K'K�r�K(K�r�K)K�r�e]r�(K;K�r�K<K�r�K=K�r�e]r�KK �r�a]r�(K>K�r�K?K �r�e]r�K@K�r�a]r�(KAK�r�KBK�r�e]r�KK�r�a]r�(K)K�r�KK�r�e]r�K;K�r�a]r�KK�r�a]r�K>K�r�a]r�KK�r�a]r�KAK�rae]r(]r(KCK�rKDK�rKEK�rKFK�rKGK�rKHK�rKIK�r KJK�r KKK�rKLK�rKMK�r KNK�rKOK�re]rKK�rae]r(]rK K�ra]rKK�rae]r(]rKK�ra]rK'K�ra]r(KK�rK.K�re]r(K;K�r KPK�r!e]r"KQK�r#a]r$K.K�r%a]r&K;K�r'a]r(KK�r)ae]r*(]r+(KK�r,K%K�r-e]r.KRK�r/a]r0KK�r1a]r2KSK�r3a]r4KTK�r5a]r6(KUK�r7KK�r8e]r9KK�r:ae]r;(]r<KK�r=a]r>KVK�r?a]r@(KUK�rAKK�rBe]rCKK�rDae]rE(]rF(K6K�rGKWK�rHe]rIKK�rJae]rK(]rL(KXK�rMKYK�rNKZK�rOKXK�rPK[K�rQK\K�rRK]K�rSKSK�rTK^K�rUKK�rVe]rWKK�rXa]rY(KK�rZKK�r[e]r\KSK�r]ae]r^(]r_K5K�r`a]ra(K_K�rbKK�rcee]rd(]re(K`K�rfKaK�rgKbK�rhK9K�riK8K�rjKcK�rkKdK�rlKeK�rmK:K�rne]roKK�rpae]rq(]rrKK�rsa]rtKK�ruae]rv(]rwKfK�rxa]ry(KgK�rzKaK�r{K8K�r|e]r}KK�r~ae]r(]r�K K�r�a]r�KhK�r�a]r�(KK�r�KK�r�e]r�(K;K�r�KPK�r�e]r�KK�r�a]r�KK�r�a]r�K;K�r�ae]r�(]r�KiK�r�a]r�(KiK�r�KK�r�ee]r�(]r�KK�r�a]r�KRK�r�a]r�KK�r�ae]r�(]r�(K3K�r�K4K�r�K/K�r�e]r�K5K�r�a]r�(K2K�r�K6K�r�KK�r�e]r�(K2K�r�K.K�r�K6K�r�KK�r�e]r�(K2K�r�K6K�r�KK�r�e]r�(K4K �r�K/K �r�KK�r�e]r�KK�r�a]r�K/K�r�a]r�(K3K �r�K/K�r�KK�r�e]r�(K2K�r�KK �r�e]r�K5K�r�a]r�K.K �r�a]r�(K2K�r�KK�r�e]r�K/K�r�ae]r�(]r�KhK�r�a]r�(KjK�r�KK�r�e]r�K'K�r�a]r�KK�r�ae]r�(]r�KkK�r�a]r�(K2K�r�KK�r�ee]r�(]r�K'K�r�a]r�(KK�r�KK�r�ee]r�(]r�K'K�r�a]r�KK�r�ae]r�(]r�KlK�r�a]r�(KK�r�KK�r�e]r�KK�r�ae]r�(]r�KmK�r�a]r�(K/K�r�KK�r�e]r�(K2K�r�KjK�r�KK�r�e]r�K/K�r�a]r�KK�r�ae]r�(]r�KK�r�a]r�K5K�r�a]r�(KSK�rKK�re]rK/K�ra]r(K2K�rKK�re]rK/K�ra]r KK�r ae]r(]rKnK�r a]r(KoK�rKK�ree]r(]rKpK�ra]r(K0K�rKqK�rKrK�rKK�re]r(KpK�rK=K�re]rKK�ra]r(KlK�rK=K�r e]r!(K0K�r"KK�r#ee]r$(]r%(K5K�r&K4K�r'e]r((K2K�r)KK�r*e]r+(K5K�r,K4K�r-KK�r.ee]r/(]r0(KK�r1KK�r2K$K�r3KsK�r4e]r5KtK�r6a]r7KK�r8ae]r9(]r:(KuK�r;KvK�r<KwK�r=KxK�r>KyK�r?e]r@KK�rAae]rB(]rCKK�rDa]rEKRK�rFa]rGKSK�rHa]rIKlK�rJa]rKK.K�rLa]rMKQK�rNa]rO(KzK�rPKK�rQe]rRK.K�rSa]rTKQK �rUa]rVKK �rWae]rX(]rYKK�rZa]r[K'K�r\a]r]K{K�r^a]r_(K|K�r`K.K�rae]rbK/K�rca]rdKQK�rea]rfK.K�rga]rhKK�riae]rj(]rk(KK�rlKK�rme]rnK'K�roa]rp(K2K�rqKK�rree]rs(]rtKK�rua]rvK/K�rwa]rxK.K�rya]rzKQK�r{a]r|(K}K�r}KzK�r~KK�re]r�K.K�r�a]r�KQK�r�a]r�KK�r�ae]r�(]r�K'K�r�a]r�(KjK�r�KK�r�e]r�K'K�r�a]r�KK�r�ae]r�(]r�K~K�r�a]r�(K2K�r�KK�r�e]r�(K~K�r�KK�r�ee]r�(]r�KK�r�a]r�(KK�r�KhK�r�e]r�(KK�r�KK�r�KhK�r�e]r�KK�r�a]r�(KK�r�KK�r�KK�r�e]r�KK�r�a]r�KK�r�a]r�K;K�r�ae]r�(]r�KK�r�a]r�K�K�r�a]r�KK�r�ae]r�(]r�(K�K�r�K�K�r�e]r�KK�r�ae]r�(]r�KK�r�a]r�(K.K�r�K�K�r�e]r�K/K�r�a]r�K.K�r�a]r�KK�r�ae]r�(]r�(K4K�r�K/K�r�e]r�(K2K�r�K6K�r�KK�r�e]r�(K4K�r�K/K�r�KK�r�e]r�KK�r�a]r�(K2K�r�KK�r�ee]r�(]r�(KK�r�K�K�r�e]r�K,K�r�a]r�KK�r�ae]r�(]r�KK�r�a]r�(K.K�r�K�K�r�e]r�KVK�r�a]r�K.K�r�a]r�KK�r�ae]r�(]r�(K�K�r�K�K�r�e]r�KK�r�ae]r�(]r�K�K�r�a]r�(K�K�r�KK�r�ee]r�(]r�KK�r�a]r�(K;K�r�K�K�r�e]r�KK�r a]r K;K�r ae]r (]r KK�r a]r KK�r ae]r (]r (K&K�r K�K�r e]r K�K�r a]r (K3K�r K�K�r KK�r e]r KtK�r a]r KK�r ae]r (]r KK�r a]r (K�K�r K/K�r KK�r e]r K/K�r a]r (K2K�r KK�r! e]r" (K2K�r# KK�r$ e]r% (K/K�r& KK�r' e]r( K/K�r) a]r* (K2K�r+ KK�r, e]r- (K/K�r. KK�r/ ee]r0 (]r1 KK�r2 a]r3 (K/K�r4 KK�r5 e]r6 (K2K�r7 KK�r8 KK�r9 e]r: K/K�r; a]r< K/K�r= a]r> (K2K�r? KK�r@ e]rA KK�rB ae]rC (]rD KK�rE a]rF (KlK�rG KK�rH e]rI KK�rJ ae]rK (]rL K�K�rM a]rN (K�K�rO K�K�rP KK�rQ ee]rR (]rS K�K�rT a]rU (K�K�rV KK�rW e]rX (KK�rY K�K�rZ e]r[ KK�r\ ae]r] (]r^ (KK�r_ K�K�r` K�K�ra e]rb KK�rc a]rd KK�re ae]rf (]rg K.K�rh a]ri (K/K�rj KK�rk e]rl KK�rm ae]rn (]ro (K�K�rp K�K�rq K�K�rr K�K�rs K�K�rt K�K�ru K�K�rv K�K�rw K�K�rx e]ry KK�rz ae]r{ (]r| KK�r} a]r~ K5K�r a]r� KK�r� ae]r� (]r� (K�K�r� K�K�r� e]r� KK�r� ae]r� (]r� (K.K�r� K/K�r� e]r� (K�K�r� K/K�r� KK�r� e]r� (K.K�r� KK�r� e]r� KK�r� a]r� (K�K�r� KK�r� ee]r� (]r� K�K�r� a]r� (K2K�r� KK�r� e]r� (K�K�r� KK�r� ee]r� (]r� (KK�r� K�K�r� e]r� K�K�r� a]r� KK�r� a]r� KK�r� a]r� (K�K�r� KK�r� ee]r� (]r� KtK�r� a]r� (K�K�r� KK�r� K�K�r� K�K�r� K K�r� KK�r� ee]r� (]r� (K�K�r� K�K�r� e]r� KK�r� a]r� (KK�r� KK�r� e]r� K�K�r� a]r� KzK�r� a]r� K/K�r� ae]r� (]r� K/K�r� a]r� (K2K�r� KK�r� e]r� (K/K�r� KK�r� ee]r� (]r� K/K�r� a]r� (K2K�r� KK�r� ee]r� (]r� (K4K�r� K/K�r� e]r� (K2K�r� K6K�r� KK�r� e]r� (K4K�r� K/K�r� KK�r� e]r� KK�r� a]r� (K2K�r� KK�r� ee]r� (]r� KVK�r� a]r� (K2K�r� KK�r� e]r� KVK�r� a]r� (K2K�r� KK�r� e]r� (KVK�r� KK�r� ee]r� (]r� (K4K�r� K/K�r� e]r� (K2K�r� KK�r� e]r� (K4K�r� K/K�r� KK�r� ee]r (]r (KK�r K�K�r e]r K�K�r a]r KK�r a]r K;K�r ae]r (]r K�K�r a]r (K2K�r KK�r e]r (K�K�r KK�r ee]r (]r K'K�r a]r (K.K�r KK�r e]r K/K�r a]r KK�r ae]r (]r (KK�r KK�r K K�r! e]r" (K;K�r# KPK�r$ e]r% K'K�r& a]r' K�K�r( a]r) KK�r* a]r+ K;K�r, a]r- K>K�r. ae]r/ (]r0 KK�r1 a]r2 K.K�r3 a]r4 KQK�r5 a]r6 (K�K�r7 K�K�r8 e]r9 K.K�r: a]r; K.K�r< a]r= KQK�r> a]r? KQK �r@ a]rA KK�rB a]rC (KzK �rD K�K�rE K�K�rF KK �rG e]rH K.K�rI a]rJ KQK�rK a]rL (K�K�rM KK�rN ee]rO (]rP (KK�rQ K3K�rR K�K�rS e]rT (K2K�rU K�K�rV KK�rW e]rX K�K�rY a]rZ (K2K�r[ K0K�r\ KK�r] e]r^ (K3K�r_ K�K �r` KK�ra e]rb (K2K�rc KK�rd e]re (K2K �rf KK�rg e]rh (KK�ri K3K�rj K�K�rk KK�rl e]rm K/K�rn a]ro (K2K�rp K0K�rq KK �rr e]rs KK �rt a]ru (K2K�rv KK�rw e]rx K/K�ry ae]rz (]r{ (KK�r| K3K�r} K�K�r~ e]r (K2K�r� K�K�r� KK�r� e]r� K�K�r� a]r� (K2K�r� K0K�r� KK�r� e]r� (K3K�r� K�K �r� KK�r� e]r� (K2K�r� KK�r� e]r� (K2K �r� KK�r� e]r� (KK�r� K3K�r� K�K�r� KK�r� e]r� K/K�r� a]r� (K2K�r� K0K�r� KK �r� e]r� KK �r� a]r� (K2K�r� KK�r� e]r� K/K�r� ae]r� (]r� (KK�r� K�K�r� e]r� K�K�r� a]r� KK�r� a]r� K;K�r� ae]r� (]r� K�K�r� a]r� (K2K�r� KK�r� e]r� (K�K�r� KK�r� ee]r� (]r� K'K�r� a]r� KK�r� ae]r� (]r� K K�r� a]r� K/K�r� a]r� K.K�r� a]r� KQK�r� a]r� (KzK�r� KK�r� e]r� K.K�r� a]r� KQK�r� a]r� KK�r� ae]r� (]r� K/K�r� a]r� (KjK�r� KK�r� e]r� K5K�r� a]r� KK�r� ae]r� (]r� K!K�r� a]r� K�K�r� a]r� (K2K�r� K.K�r� e]r� KQK�r� a]r� KK�r� ae]r� (]r� KjK�r� a]r� K5K�r� a]r� KK�r� ae]r� (]r� K�K�r� a]r� (K�K�r� KK�r� ee]r� (]r� (KK�r� KlK�r� e]r� K/K�r� a]r� KK�r� ae]r� (]r� K"K�r� a]r� (K�K�r� KK�r� e]rKK�rae]r(]rK=K�ra]rKK�raeeXsymbol2labelrh)Rr(Xand_exprr K�Xand_testr K�X annassignrKqXarglistrKPXargumentr K1X arith_exprrK�Xassert_stmtrK�X async_funcdefrKgX async_stmtrK`XatomrK�X augassignrKrX break_stmtrKuXclassdefrKaXcomp_forrK6Xcomp_ifrKWX comp_iterrKUXcomp_oprK_X comparisonrK�X compound_stmtrK�X continue_stmtrKvX decoratedrKbX decoratorrKiX decoratorsrKfXdel_stmtr K�Xdictsetmakerr!KBXdotted_as_namer"KkXdotted_as_namesr#K�Xdotted_namer$KhX except_clauser%K�X exec_stmtr&K�Xexprr'K5X expr_stmtr(K�Xexprlistr)KRXfactorr*KtX flow_stmtr+K�Xfor_stmtr,K9Xfuncdefr-K8Xglobal_stmtr.K�Xif_stmtr/KcXimport_as_namer0K~Ximport_as_namesr1KXimport_fromr2K�Ximport_namer3K�Ximport_stmtr4K�Xlambdefr5K�X listmakerr6K?Xnot_testr7K,Xold_lambdefr8K�Xold_testr9KVXor_testr:K�X parametersr;K{X pass_stmtr<K�Xpowerr=KsX print_stmtr>K�X raise_stmtr?KwXreturn_stmtr@KxX shift_exprrAK*Xsimple_stmtrBK�XsliceoprCK�X small_stmtrDK�X star_exprrEK4XstmtrFKX subscriptrGK�X subscriptlistrHK�XsuiterIKQXtermrJK7XtestrKK/XtestlistrLKlX testlist1rMK@X testlist_gexprNK<X testlist_saferOKTXtestlist_star_exprrPKpXtfpdefrQK�XtfplistrRK�XtnamerSK�XtrailerrTK�Xtry_stmtrUKdX typedargslistrVK�XvarargslistrWK�XvfpdefrXK�XvfplistrYK�XvnamerZK�X while_stmtr[KeX with_itemr\K�X with_stmtr]K:Xxor_exprr^KnX yield_argr_K�X yield_exprr`K=X yield_stmtraKyuX symbol2numberrbh)Rrc(j Mj!Mj"Mj#Mj$Mj%Mj&Mj'Mj(M j)M j*Mj+Mj,M j-Mj.Mj/Mj0Mj1Mj2Mj3Mj4Mj5Mj6Mj7Mj8Mj9Mj:Mj;Mj<Mj=Mj>Mj?M j@M!jAM"jBM#jCM$jMjDM%jEM&jFM'jGM(jHM)jIM*jJM+jKM,jLM-jMM.jNM/jOM0jPM1jQM2jRM3jSM4jTM5jUM6jVM7jWM8jXM9jYM:jZM;j[M<j\M=j]M>j^M?j_M@j`MAjaMBjbMCjcMDjdMEjeMFjfMGjgMHjhMIjiMJjjMKjkMLjlMMjmMNjnMOjoMPjpMQjqMRjrMSjsMTjtMUjuMVjvMWjwMXjxMYjyMZjzM[j{M\j|M]uXtokensrdh)Rre(KKKK'KK(KK)KKKK�KK�KKKK;K K K K>KK.KK2K K�KKKKKKKK�KKoKK+KKYKK\KK0KKKK�KKKK#KKAKK[KKXKKZKK]K K$K!K�K"K�K#K�K$K3K%KGK&KHK'KFK(KJK)KCK*KDK+KOK,KNK-KKK.KLK/KEK0K�K1KIK2K K3KMK7K|K8K&K9K%uu.PK{��\�� �PatternGrammar.txtnu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. # A grammar to describe tree matching patterns. # Not shown here: # - 'TOKEN' stands for any token (leaf node) # - 'any' stands for any node (leaf or interior) # With 'any' we can still specify the sub-structure. # The start symbol is 'Matcher'. Matcher: Alternatives ENDMARKER Alternatives: Alternative ('|' Alternative)* Alternative: (Unit | NegatedUnit)+ Unit: [NAME '='] ( STRING [Repeater] | NAME [Details] [Repeater] | '(' Alternatives ')' [Repeater] | '[' Alternatives ']' ) NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')') Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}' Details: '<' Alternatives '>' PK{��\����--"PatternGrammar3.6.8.final.0.picklenu�[����ccollections OrderedDict q)Rq(Xdfasqh)Rq(M]q(]qKK�qa]qKK�qa]q KK�q aeh)Rq(KKKKKKKKKKu�qM]q (]q(KK�qK K�qe]q(KK�qK K�qKK�qeeh)Rq(KKKKKKKKKKu�qM]q(]qK K�qa]q(KK�qKK�qeeh)Rq(KKKKKKKKKKu�qM]q(]q KK�q!a]q"KK�q#a]q$K K�q%a]q&KK�q'aeh)Rq(KKs�q)M]q*(]q+KK�q,a]q-(KK�q.KK�q/KK�q0e]q1KK�q2a]q3(KK�q4KK�q5e]q6KK�q7a]q8KK�q9aeh)Rq:KKs�q;M]q<(]q=(KK�q>KK�q?KK�q@e]qAKK�qBa]qCKK�qDa]qE(KK�qFKK�qGe]qHKK�qIa]qJKK�qKaeh)RqL(KKKKKKu�qMM]qN(]qO(KK�qPKK�qQKK�qRKK�qSe]qTKK�qUa]qVKK�qWa]qX(KK�qYKK�qZKK�q[KK�q\e]q](KK�q^KK�q_e]q`KK�qaa]qbKK�qca]qd(KK�qeKK�qfKK �qgKK�qhe]qiKK�qja]qk(KK�qlKK�qmKK �qneeh)Rqo(KKKKKKKKu�qpuXkeywordsqqh)RqrXnotqsKsXlabelsqt]qu(KXEMPTYqv�qwMN�qxKN�qyKN�qzK N�q{Khs�q|KN�q}KN�q~MN�qMN�q�MN�q�KN�q�KN�q�KN�q�MN�q�KN�q�KN�q�KN�q�KN�q�KN�q�KN�q�KN�q�KN�q�MN�q�K N�q�eX number2symbolq�h)Rq�(MXMatcherq�MXAlternativeq�MXAlternativesq�MXDetailsq�MXNegatedUnitq�MXRepeaterq�MXUnitq�uXstartq�MXstatesq�]q�(]q�(]q�KK�q�a]q�KK�q�a]q�KK�q�ae]q�(]q�(KK�q�K K�q�e]q�(KK�q�K K�q�KK�q�ee]q�(]q�K K�q�a]q�(KK�q�KK�q�ee]q�(]q�KK�q�a]q�KK�q�a]q�K K�q�a]q�KK�q�ae]q�(]q�KK�q�a]q�(KK�q�KK�q�KK�q�e]q�KK�q�a]q�(KK�q�KK�q�e]q�KK�q�a]q�KK�q�ae]q�(]q�(KK�q�KK�q�KK�q�e]q�KK�q�a]q�KK�q�a]q�(KK�q�KK�q�e]q�KK�q�a]q�KK�q�ae]q�(]q�(KK�q�KK�q�KK�q�KK�q�e]q�KK�q�a]q�KK�q�a]q�(KK�q�KK�q�KK�q�KK�q�e]q�(KK�q�KK�q�e]q�KK�q�a]q�KK�q�a]q�(KK�q�KK�q�KK �q�KK�q�e]q�KK�q�a]q�(KK�q�KK�q�KK �q�eeeXsymbol2labelq�h)Rq�(XAlternativeq�K XAlternativesq�KXDetailsq�KXNegatedUnitrKXRepeaterrKXUnitrK uX symbol2numberrh)Rr(h�Mh�Mh�Mh�Mh�Mh�Mh�MuXtokensrh)Rr(KKKKKKKKKKKKK KK KKKKKKKKKKKKK KKKKKKuu.PK{��\ۚM|__init__.pynu�[���#empty PK{��\az�CC__main__.pynu�[���import sys from .main import main sys.exit(main("lib2to3.fixes")) PK{��\ {M��btm_matcher.pynu�[���"""A bottom-up tree matching algorithm implementation meant to speed up 2to3's matching process. After the tree patterns are reduced to their rarest linear path, a linear Aho-Corasick automaton is created. The linear automaton traverses the linear paths from the leaves to the root of the AST and returns a set of nodes for further matching. This reduces significantly the number of candidate nodes.""" __author__ = "George Boutsioukis <gboutsioukis@gmail.com>" import logging import itertools from collections import defaultdict from . import pytree from .btm_utils import reduce_tree class BMNode(object): """Class for a node of the Aho-Corasick automaton used in matching""" count = itertools.count() def __init__(self): self.transition_table = {} self.fixers = [] self.id = next(BMNode.count) self.content = '' class BottomMatcher(object): """The main matcher class. After instantiating the patterns should be added using the add_fixer method""" def __init__(self): self.match = set() self.root = BMNode() self.nodes = [self.root] self.fixers = [] self.logger = logging.getLogger("RefactoringTool") def add_fixer(self, fixer): """Reduces a fixer's pattern tree to a linear path and adds it to the matcher(a common Aho-Corasick automaton). The fixer is appended on the matching states and called when they are reached""" self.fixers.append(fixer) tree = reduce_tree(fixer.pattern_tree) linear = tree.get_linear_subpattern() match_nodes = self.add(linear, start=self.root) for match_node in match_nodes: match_node.fixers.append(fixer) def add(self, pattern, start): "Recursively adds a linear pattern to the AC automaton" #print("adding pattern", pattern, "to", start) if not pattern: #print("empty pattern") return [start] if isinstance(pattern[0], tuple): #alternatives #print("alternatives") match_nodes = [] for alternative in pattern[0]: #add all alternatives, and add the rest of the pattern #to each end node end_nodes = self.add(alternative, start=start) for end in end_nodes: match_nodes.extend(self.add(pattern[1:], end)) return match_nodes else: #single token #not last if pattern[0] not in start.transition_table: #transition did not exist, create new next_node = BMNode() start.transition_table[pattern[0]] = next_node else: #transition exists already, follow next_node = start.transition_table[pattern[0]] if pattern[1:]: end_nodes = self.add(pattern[1:], start=next_node) else: end_nodes = [next_node] return end_nodes def run(self, leaves): """The main interface with the bottom matcher. The tree is traversed from the bottom using the constructed automaton. Nodes are only checked once as the tree is retraversed. When the automaton fails, we give it one more shot(in case the above tree matches as a whole with the rejected leaf), then we break for the next leaf. There is the special case of multiple arguments(see code comments) where we recheck the nodes Args: The leaves of the AST tree to be matched Returns: A dictionary of node matches with fixers as the keys """ current_ac_node = self.root results = defaultdict(list) for leaf in leaves: current_ast_node = leaf while current_ast_node: current_ast_node.was_checked = True for child in current_ast_node.children: # multiple statements, recheck if isinstance(child, pytree.Leaf) and child.value == ";": current_ast_node.was_checked = False break if current_ast_node.type == 1: #name node_token = current_ast_node.value else: node_token = current_ast_node.type if node_token in current_ac_node.transition_table: #token matches current_ac_node = current_ac_node.transition_table[node_token] for fixer in current_ac_node.fixers: if not fixer in results: results[fixer] = [] results[fixer].append(current_ast_node) else: #matching failed, reset automaton current_ac_node = self.root if (current_ast_node.parent is not None and current_ast_node.parent.was_checked): #the rest of the tree upwards has been checked, next leaf break #recheck the rejected node once from the root if node_token in current_ac_node.transition_table: #token matches current_ac_node = current_ac_node.transition_table[node_token] for fixer in current_ac_node.fixers: if not fixer in results.keys(): results[fixer] = [] results[fixer].append(current_ast_node) current_ast_node = current_ast_node.parent return results def print_ac(self): "Prints a graphviz diagram of the BM automaton(for debugging)" print("digraph g{") def print_node(node): for subnode_key in node.transition_table.keys(): subnode = node.transition_table[subnode_key] print("%d -> %d [label=%s] //%s" % (node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers))) if subnode_key == 1: print(subnode.content) print_node(subnode) print_node(self.root) print("}") # taken from pytree.py for debugging; only used by print_ac _type_reprs = {} def type_repr(type_num): global _type_reprs if not _type_reprs: from .pygram import python_symbols # printing tokens is possible but not as useful # from .pgen2 import token // token.__dict__.items(): for name, val in python_symbols.__dict__.items(): if type(val) == int: _type_reprs[val] = name return _type_reprs.setdefault(type_num, type_num) PK{��\zQ���&�&btm_utils.pynu�[���"Utility functions used by the btm_matcher module" from . import pytree from .pgen2 import grammar, token from .pygram import pattern_symbols, python_symbols syms = pattern_symbols pysyms = python_symbols tokens = grammar.opmap token_labels = token TYPE_ANY = -1 TYPE_ALTERNATIVES = -2 TYPE_GROUP = -3 class MinNode(object): """This class serves as an intermediate representation of the pattern tree during the conversion to sets of leaf-to-root subpatterns""" def __init__(self, type=None, name=None): self.type = type self.name = name self.children = [] self.leaf = False self.parent = None self.alternatives = [] self.group = [] def __repr__(self): return str(self.type) + ' ' + str(self.name) def leaf_to_root(self): """Internal method. Returns a characteristic path of the pattern tree. This method must be run for all leaves until the linear subpatterns are merged into a single""" node = self subp = [] while node: if node.type == TYPE_ALTERNATIVES: node.alternatives.append(subp) if len(node.alternatives) == len(node.children): #last alternative subp = [tuple(node.alternatives)] node.alternatives = [] node = node.parent continue else: node = node.parent subp = None break if node.type == TYPE_GROUP: node.group.append(subp) #probably should check the number of leaves if len(node.group) == len(node.children): subp = get_characteristic_subpattern(node.group) node.group = [] node = node.parent continue else: node = node.parent subp = None break if node.type == token_labels.NAME and node.name: #in case of type=name, use the name instead subp.append(node.name) else: subp.append(node.type) node = node.parent return subp def get_linear_subpattern(self): """Drives the leaf_to_root method. The reason that leaf_to_root must be run multiple times is because we need to reject 'group' matches; for example the alternative form (a | b c) creates a group [b c] that needs to be matched. Since matching multiple linear patterns overcomes the automaton's capabilities, leaf_to_root merges each group into a single choice based on 'characteristic'ity, i.e. (a|b c) -> (a|b) if b more characteristic than c Returns: The most 'characteristic'(as defined by get_characteristic_subpattern) path for the compiled pattern tree. """ for l in self.leaves(): subp = l.leaf_to_root() if subp: return subp def leaves(self): "Generator that returns the leaves of the tree" for child in self.children: yield from child.leaves() if not self.children: yield self def reduce_tree(node, parent=None): """ Internal function. Reduces a compiled pattern tree to an intermediate representation suitable for feeding the automaton. This also trims off any optional pattern elements(like [a], a*). """ new_node = None #switch on the node type if node.type == syms.Matcher: #skip node = node.children[0] if node.type == syms.Alternatives : #2 cases if len(node.children) <= 2: #just a single 'Alternative', skip this node new_node = reduce_tree(node.children[0], parent) else: #real alternatives new_node = MinNode(type=TYPE_ALTERNATIVES) #skip odd children('|' tokens) for child in node.children: if node.children.index(child)%2: continue reduced = reduce_tree(child, new_node) if reduced is not None: new_node.children.append(reduced) elif node.type == syms.Alternative: if len(node.children) > 1: new_node = MinNode(type=TYPE_GROUP) for child in node.children: reduced = reduce_tree(child, new_node) if reduced: new_node.children.append(reduced) if not new_node.children: # delete the group if all of the children were reduced to None new_node = None else: new_node = reduce_tree(node.children[0], parent) elif node.type == syms.Unit: if (isinstance(node.children[0], pytree.Leaf) and node.children[0].value == '('): #skip parentheses return reduce_tree(node.children[1], parent) if ((isinstance(node.children[0], pytree.Leaf) and node.children[0].value == '[') or (len(node.children)>1 and hasattr(node.children[1], "value") and node.children[1].value == '[')): #skip whole unit if its optional return None leaf = True details_node = None alternatives_node = None has_repeater = False repeater_node = None has_variable_name = False for child in node.children: if child.type == syms.Details: leaf = False details_node = child elif child.type == syms.Repeater: has_repeater = True repeater_node = child elif child.type == syms.Alternatives: alternatives_node = child if hasattr(child, 'value') and child.value == '=': # variable name has_variable_name = True #skip variable name if has_variable_name: #skip variable name, '=' name_leaf = node.children[2] if hasattr(name_leaf, 'value') and name_leaf.value == '(': # skip parenthesis name_leaf = node.children[3] else: name_leaf = node.children[0] #set node type if name_leaf.type == token_labels.NAME: #(python) non-name or wildcard if name_leaf.value == 'any': new_node = MinNode(type=TYPE_ANY) else: if hasattr(token_labels, name_leaf.value): new_node = MinNode(type=getattr(token_labels, name_leaf.value)) else: new_node = MinNode(type=getattr(pysyms, name_leaf.value)) elif name_leaf.type == token_labels.STRING: #(python) name or character; remove the apostrophes from #the string value name = name_leaf.value.strip("'") if name in tokens: new_node = MinNode(type=tokens[name]) else: new_node = MinNode(type=token_labels.NAME, name=name) elif name_leaf.type == syms.Alternatives: new_node = reduce_tree(alternatives_node, parent) #handle repeaters if has_repeater: if repeater_node.children[0].value == '*': #reduce to None new_node = None elif repeater_node.children[0].value == '+': #reduce to a single occurrence i.e. do nothing pass else: #TODO: handle {min, max} repeaters raise NotImplementedError pass #add children if details_node and new_node is not None: for child in details_node.children[1:-1]: #skip '<', '>' markers reduced = reduce_tree(child, new_node) if reduced is not None: new_node.children.append(reduced) if new_node: new_node.parent = parent return new_node def get_characteristic_subpattern(subpatterns): """Picks the most characteristic from a list of linear patterns Current order used is: names > common_names > common_chars """ if not isinstance(subpatterns, list): return subpatterns if len(subpatterns)==1: return subpatterns[0] # first pick out the ones containing variable names subpatterns_with_names = [] subpatterns_with_common_names = [] common_names = ['in', 'for', 'if' , 'not', 'None'] subpatterns_with_common_chars = [] common_chars = "[]().,:" for subpattern in subpatterns: if any(rec_test(subpattern, lambda x: type(x) is str)): if any(rec_test(subpattern, lambda x: isinstance(x, str) and x in common_chars)): subpatterns_with_common_chars.append(subpattern) elif any(rec_test(subpattern, lambda x: isinstance(x, str) and x in common_names)): subpatterns_with_common_names.append(subpattern) else: subpatterns_with_names.append(subpattern) if subpatterns_with_names: subpatterns = subpatterns_with_names elif subpatterns_with_common_names: subpatterns = subpatterns_with_common_names elif subpatterns_with_common_chars: subpatterns = subpatterns_with_common_chars # of the remaining subpatterns pick out the longest one return max(subpatterns, key=len) def rec_test(sequence, test_func): """Tests test_func on all items of sequence and items of included sub-iterables""" for x in sequence: if isinstance(x, (list, tuple)): yield from rec_test(x, test_func) else: yield test_func(x) PK{��\�+P�"" fixer_base.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Base class for fixers (optional, but recommended).""" # Python imports import itertools # Local imports from .patcomp import PatternCompiler from . import pygram from .fixer_util import does_tree_import class BaseFix(object): """Optional base class for fixers. The subclass name must be FixFooBar where FooBar is the result of removing underscores and capitalizing the words of the fix name. For example, the class name for a fixer named 'has_key' should be FixHasKey. """ PATTERN = None # Most subclasses should override with a string literal pattern = None # Compiled pattern, set by compile_pattern() pattern_tree = None # Tree representation of the pattern options = None # Options object passed to initializer filename = None # The filename (set by set_filename) numbers = itertools.count(1) # For new_name() used_names = set() # A set of all used NAMEs order = "post" # Does the fixer prefer pre- or post-order traversal explicit = False # Is this ignored by refactor.py -f all? run_order = 5 # Fixers will be sorted by run order before execution # Lower numbers will be run first. _accept_type = None # [Advanced and not public] This tells RefactoringTool # which node type to accept when there's not a pattern. keep_line_order = False # For the bottom matcher: match with the # original line order BM_compatible = False # Compatibility with the bottom matching # module; every fixer should set this # manually # Shortcut for access to Python grammar symbols syms = pygram.python_symbols def __init__(self, options, log): """Initializer. Subclass may override. Args: options: a dict containing the options passed to RefactoringTool that could be used to customize the fixer through the command line. log: a list to append warnings and other messages to. """ self.options = options self.log = log self.compile_pattern() def compile_pattern(self): """Compiles self.PATTERN into self.pattern. Subclass may override if it doesn't want to use self.{pattern,PATTERN} in .match(). """ if self.PATTERN is not None: PC = PatternCompiler() self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN, with_tree=True) def set_filename(self, filename): """Set the filename. The main refactoring tool should call this. """ self.filename = filename def match(self, node): """Returns match for a given parse tree node. Should return a true or false object (not necessarily a bool). It may return a non-empty dict of matching sub-nodes as returned by a matching pattern. Subclass may override. """ results = {"node": node} return self.pattern.match(node, results) and results def transform(self, node, results): """Returns the transformation for a given parse tree node. Args: node: the root of the parse tree that matched the fixer. results: a dict mapping symbolic names to part of the match. Returns: None, or a node that is a modified copy of the argument node. The node argument may also be modified in-place to effect the same change. Subclass *must* override. """ raise NotImplementedError() def new_name(self, template="xxx_todo_changeme"): """Return a string suitable for use as an identifier The new name is guaranteed not to conflict with other identifiers. """ name = template while name in self.used_names: name = template + str(next(self.numbers)) self.used_names.add(name) return name def log_message(self, message): if self.first_log: self.first_log = False self.log.append("### In file %s ###" % self.filename) self.log.append(message) def cannot_convert(self, node, reason=None): """Warn the user that a given chunk of code is not valid Python 3, but that it cannot be converted automatically. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. """ lineno = node.get_lineno() for_output = node.clone() for_output.prefix = "" msg = "Line %d: could not convert: %s" self.log_message(msg % (lineno, for_output)) if reason: self.log_message(reason) def warning(self, node, reason): """Used for warning the user about possible uncertainty in the translation. First argument is the top-level node for the code in question. Optional second argument is why it can't be converted. """ lineno = node.get_lineno() self.log_message("Line %d: %s" % (lineno, reason)) def start_tree(self, tree, filename): """Some fixers need to maintain tree-wide state. This method is called once, at the start of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. """ self.used_names = tree.used_names self.set_filename(filename) self.numbers = itertools.count(1) self.first_log = True def finish_tree(self, tree, filename): """Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. """ pass class ConditionalFix(BaseFix): """ Base class for fixers which not execute if an import is found. """ # This is the name of the import which, if found, will cause the test to be skipped skip_on = None def start_tree(self, *args): super(ConditionalFix, self).start_tree(*args) self._should_skip = None def should_skip(self, node): if self._should_skip is not None: return self._should_skip pkg = self.skip_on.split(".") name = pkg[-1] pkg = ".".join(pkg[:-1]) self._should_skip = does_tree_import(pkg, name, node) return self._should_skip PK{��\��~�g;g; fixer_util.pynu�[���"""Utility functions, node construction macros, etc.""" # Author: Collin Winter # Local imports from .pgen2 import token from .pytree import Leaf, Node from .pygram import python_symbols as syms from . import patcomp ########################################################### ### Common node-construction "macros" ########################################################### def KeywordArg(keyword, value): return Node(syms.argument, [keyword, Leaf(token.EQUAL, "="), value]) def LParen(): return Leaf(token.LPAR, "(") def RParen(): return Leaf(token.RPAR, ")") def Assign(target, source): """Build an assignment statement""" if not isinstance(target, list): target = [target] if not isinstance(source, list): source.prefix = " " source = [source] return Node(syms.atom, target + [Leaf(token.EQUAL, "=", prefix=" ")] + source) def Name(name, prefix=None): """Return a NAME leaf""" return Leaf(token.NAME, name, prefix=prefix) def Attr(obj, attr): """A node tuple for obj.attr""" return [obj, Node(syms.trailer, [Dot(), attr])] def Comma(): """A comma leaf""" return Leaf(token.COMMA, ",") def Dot(): """A period (.) leaf""" return Leaf(token.DOT, ".") def ArgList(args, lparen=LParen(), rparen=RParen()): """A parenthesised argument list, used by Call()""" node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) if args: node.insert_child(1, Node(syms.arglist, args)) return node def Call(func_name, args=None, prefix=None): """A function call""" node = Node(syms.power, [func_name, ArgList(args)]) if prefix is not None: node.prefix = prefix return node def Newline(): """A newline literal""" return Leaf(token.NEWLINE, "\n") def BlankLine(): """A blank line""" return Leaf(token.NEWLINE, "") def Number(n, prefix=None): return Leaf(token.NUMBER, n, prefix=prefix) def Subscript(index_node): """A numeric or string subscript""" return Node(syms.trailer, [Leaf(token.LBRACE, "["), index_node, Leaf(token.RBRACE, "]")]) def String(string, prefix=None): """A string leaf""" return Leaf(token.STRING, string, prefix=prefix) def ListComp(xp, fp, it, test=None): """A list comprehension of the form [xp for fp in it if test]. If test is None, the "if test" part is omitted. """ xp.prefix = "" fp.prefix = " " it.prefix = " " for_leaf = Leaf(token.NAME, "for") for_leaf.prefix = " " in_leaf = Leaf(token.NAME, "in") in_leaf.prefix = " " inner_args = [for_leaf, fp, in_leaf, it] if test: test.prefix = " " if_leaf = Leaf(token.NAME, "if") if_leaf.prefix = " " inner_args.append(Node(syms.comp_if, [if_leaf, test])) inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) return Node(syms.atom, [Leaf(token.LBRACE, "["), inner, Leaf(token.RBRACE, "]")]) def FromImport(package_name, name_leafs): """ Return an import statement in the form: from package import name_leafs""" # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') #assert package_name == '.' or '.' not in package_name, "FromImport has "\ # "not been tested with dotted package names -- use at your own "\ # "peril!" for leaf in name_leafs: # Pull the leaves out of their old tree leaf.remove() children = [Leaf(token.NAME, "from"), Leaf(token.NAME, package_name, prefix=" "), Leaf(token.NAME, "import", prefix=" "), Node(syms.import_as_names, name_leafs)] imp = Node(syms.import_from, children) return imp def ImportAndCall(node, results, names): """Returns an import statement and calls a method of the module: import module module.name()""" obj = results["obj"].clone() if obj.type == syms.arglist: newarglist = obj.clone() else: newarglist = Node(syms.arglist, [obj.clone()]) after = results["after"] if after: after = [n.clone() for n in after] new = Node(syms.power, Attr(Name(names[0]), Name(names[1])) + [Node(syms.trailer, [results["lpar"].clone(), newarglist, results["rpar"].clone()])] + after) new.prefix = node.prefix return new ########################################################### ### Determine whether a node represents a given literal ########################################################### def is_tuple(node): """Does the node represent a tuple literal?""" if isinstance(node, Node) and node.children == [LParen(), RParen()]: return True return (isinstance(node, Node) and len(node.children) == 3 and isinstance(node.children[0], Leaf) and isinstance(node.children[1], Node) and isinstance(node.children[2], Leaf) and node.children[0].value == "(" and node.children[2].value == ")") def is_list(node): """Does the node represent a list literal?""" return (isinstance(node, Node) and len(node.children) > 1 and isinstance(node.children[0], Leaf) and isinstance(node.children[-1], Leaf) and node.children[0].value == "[" and node.children[-1].value == "]") ########################################################### ### Misc ########################################################### def parenthesize(node): return Node(syms.atom, [LParen(), node, RParen()]) consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum", "min", "max", "enumerate"} def attr_chain(obj, attr): """Follow an attribute chain. If you have a chain of objects where a.foo -> b, b.foo-> c, etc, use this to iterate over all objects in the chain. Iteration is terminated by getattr(x, attr) is None. Args: obj: the starting object attr: the name of the chaining attribute Yields: Each successive object in the chain. """ next = getattr(obj, attr) while next: yield next next = getattr(next, attr) p0 = """for_stmt< 'for' any 'in' node=any ':' any* > | comp_for< 'for' any 'in' node=any any* > """ p1 = """ power< ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | 'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > """ p2 = """ power< ( 'sorted' | 'enumerate' ) trailer< '(' arglist<node=any any*> ')' > any* > """ pats_built = False def in_special_context(node): """ Returns true if node is in an environment where all that is required of it is being iterable (ie, it doesn't matter if it returns a list or an iterator). See test_map_nochange in test_fixers.py for some examples and tests. """ global p0, p1, p2, pats_built if not pats_built: p0 = patcomp.compile_pattern(p0) p1 = patcomp.compile_pattern(p1) p2 = patcomp.compile_pattern(p2) pats_built = True patterns = [p0, p1, p2] for pattern, parent in zip(patterns, attr_chain(node, "parent")): results = {} if pattern.match(parent, results) and results["node"] is node: return True return False def is_probably_builtin(node): """ Check that something isn't an attribute or function name etc. """ prev = node.prev_sibling if prev is not None and prev.type == token.DOT: # Attribute lookup. return False parent = node.parent if parent.type in (syms.funcdef, syms.classdef): return False if parent.type == syms.expr_stmt and parent.children[0] is node: # Assignment. return False if parent.type == syms.parameters or \ (parent.type == syms.typedargslist and ( (prev is not None and prev.type == token.COMMA) or parent.children[0] is node )): # The name of an argument. return False return True def find_indentation(node): """Find the indentation of *node*.""" while node is not None: if node.type == syms.suite and len(node.children) > 2: indent = node.children[1] if indent.type == token.INDENT: return indent.value node = node.parent return "" ########################################################### ### The following functions are to find bindings in a suite ########################################################### def make_suite(node): if node.type == syms.suite: return node node = node.clone() parent, node.parent = node.parent, None suite = Node(syms.suite, [node]) suite.parent = parent return suite def find_root(node): """Find the top level namespace.""" # Scamper up to the top level namespace while node.type != syms.file_input: node = node.parent if not node: raise ValueError("root found before file_input node was found.") return node def does_tree_import(package, name, node): """ Returns true if name is imported from package at the top level of the tree which node belongs to. To cover the case of an import like 'import foo', use None for the package and 'foo' for the name. """ binding = find_binding(name, find_root(node), package) return bool(binding) def is_import(node): """Returns true if the node is an import statement.""" return node.type in (syms.import_name, syms.import_from) def touch_import(package, name, node): """ Works like `does_tree_import` but adds an import statement if it was not imported. """ def is_import_stmt(node): return (node.type == syms.simple_stmt and node.children and is_import(node.children[0])) root = find_root(node) if does_tree_import(package, name, root): return # figure out where to insert the new import. First try to find # the first import and then skip to the last one. insert_pos = offset = 0 for idx, node in enumerate(root.children): if not is_import_stmt(node): continue for offset, node2 in enumerate(root.children[idx:]): if not is_import_stmt(node2): break insert_pos = idx + offset break # if there are no imports where we can insert, find the docstring. # if that also fails, we stick to the beginning of the file if insert_pos == 0: for idx, node in enumerate(root.children): if (node.type == syms.simple_stmt and node.children and node.children[0].type == token.STRING): insert_pos = idx + 1 break if package is None: import_ = Node(syms.import_name, [ Leaf(token.NAME, "import"), Leaf(token.NAME, name, prefix=" ") ]) else: import_ = FromImport(package, [Leaf(token.NAME, name, prefix=" ")]) children = [import_, Newline()] root.insert_child(insert_pos, Node(syms.simple_stmt, children)) _def_syms = {syms.classdef, syms.funcdef} def find_binding(name, node, package=None): """ Returns the node which binds variable name, otherwise None. If optional argument package is supplied, only imports will be returned. See test cases for examples.""" for child in node.children: ret = None if child.type == syms.for_stmt: if _find(name, child.children[1]): return child n = find_binding(name, make_suite(child.children[-1]), package) if n: ret = n elif child.type in (syms.if_stmt, syms.while_stmt): n = find_binding(name, make_suite(child.children[-1]), package) if n: ret = n elif child.type == syms.try_stmt: n = find_binding(name, make_suite(child.children[2]), package) if n: ret = n else: for i, kid in enumerate(child.children[3:]): if kid.type == token.COLON and kid.value == ":": # i+3 is the colon, i+4 is the suite n = find_binding(name, make_suite(child.children[i+4]), package) if n: ret = n elif child.type in _def_syms and child.children[1].value == name: ret = child elif _is_import_binding(child, name, package): ret = child elif child.type == syms.simple_stmt: ret = find_binding(name, child, package) elif child.type == syms.expr_stmt: if _find(name, child.children[0]): ret = child if ret: if not package: return ret if is_import(ret): return ret return None _block_syms = {syms.funcdef, syms.classdef, syms.trailer} def _find(name, node): nodes = [node] while nodes: node = nodes.pop() if node.type > 256 and node.type not in _block_syms: nodes.extend(node.children) elif node.type == token.NAME and node.value == name: return node return None def _is_import_binding(node, name, package=None): """ Will reuturn node if node will import name, or node will import * from package. None is returned otherwise. See test cases for examples. """ if node.type == syms.import_name and not package: imp = node.children[1] if imp.type == syms.dotted_as_names: for child in imp.children: if child.type == syms.dotted_as_name: if child.children[2].value == name: return node elif child.type == token.NAME and child.value == name: return node elif imp.type == syms.dotted_as_name: last = imp.children[-1] if last.type == token.NAME and last.value == name: return node elif imp.type == token.NAME and imp.value == name: return node elif node.type == syms.import_from: # str(...) is used to make life easier here, because # from a.b import parses to ['import', ['a', '.', 'b'], ...] if package and str(node.children[1]).strip() != package: return None n = node.children[3] if package and _find("as", n): # See test_from_import_as for explanation return None elif n.type == syms.import_as_names and _find(name, n): return node elif n.type == syms.import_as_name: child = n.children[2] if child.type == token.NAME and child.value == name: return node elif n.type == token.NAME and n.value == name: return node elif package and n.type == token.STAR: return node return None PK{��\SBz_�-�-main.pynu�[���""" Main program for 2to3. """ from __future__ import with_statement, print_function import sys import os import difflib import logging import shutil import optparse from . import refactor def diff_texts(a, b, filename): """Return a unified diff of two strings.""" a = a.splitlines() b = b.splitlines() return difflib.unified_diff(a, b, filename, filename, "(original)", "(refactored)", lineterm="") class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): """ A refactoring tool that can avoid overwriting its input files. Prints output to stdout. Output files can optionally be written to a different directory and or have an extra file suffix appended to their name for use in situations where you do not want to replace the input files. """ def __init__(self, fixers, options, explicit, nobackups, show_diffs, input_base_dir='', output_dir='', append_suffix=''): """ Args: fixers: A list of fixers to import. options: A dict with RefactoringTool configuration. explicit: A list of fixers to run even if they are explicit. nobackups: If true no backup '.bak' files will be created for those files that are being refactored. show_diffs: Should diffs of the refactoring be printed to stdout? input_base_dir: The base directory for all input files. This class will strip this path prefix off of filenames before substituting it with output_dir. Only meaningful if output_dir is supplied. All files processed by refactor() must start with this path. output_dir: If supplied, all converted files will be written into this directory tree instead of input_base_dir. append_suffix: If supplied, all files output by this tool will have this appended to their filename. Useful for changing .py to .py3 for example by passing append_suffix='3'. """ self.nobackups = nobackups self.show_diffs = show_diffs if input_base_dir and not input_base_dir.endswith(os.sep): input_base_dir += os.sep self._input_base_dir = input_base_dir self._output_dir = output_dir self._append_suffix = append_suffix super(StdoutRefactoringTool, self).__init__(fixers, options, explicit) def log_error(self, msg, *args, **kwargs): self.errors.append((msg, args, kwargs)) self.logger.error(msg, *args, **kwargs) def write_file(self, new_text, filename, old_text, encoding): orig_filename = filename if self._output_dir: if filename.startswith(self._input_base_dir): filename = os.path.join(self._output_dir, filename[len(self._input_base_dir):]) else: raise ValueError('filename %s does not start with the ' 'input_base_dir %s' % ( filename, self._input_base_dir)) if self._append_suffix: filename += self._append_suffix if orig_filename != filename: output_dir = os.path.dirname(filename) if not os.path.isdir(output_dir) and output_dir: os.makedirs(output_dir) self.log_message('Writing converted %s to %s.', orig_filename, filename) if not self.nobackups: # Make backup backup = filename + ".bak" if os.path.lexists(backup): try: os.remove(backup) except OSError as err: self.log_message("Can't remove backup %s", backup) try: os.rename(filename, backup) except OSError as err: self.log_message("Can't rename %s to %s", filename, backup) # Actually write the new file write = super(StdoutRefactoringTool, self).write_file write(new_text, filename, old_text, encoding) if not self.nobackups: shutil.copymode(backup, filename) if orig_filename != filename: # Preserve the file mode in the new output directory. shutil.copymode(orig_filename, filename) def print_output(self, old, new, filename, equal): if equal: self.log_message("No changes to %s", filename) else: self.log_message("Refactored %s", filename) if self.show_diffs: diff_lines = diff_texts(old, new, filename) try: if self.output_lock is not None: with self.output_lock: for line in diff_lines: print(line) sys.stdout.flush() else: for line in diff_lines: print(line) except UnicodeEncodeError: warn("couldn't encode %s's diff for your terminal" % (filename,)) return def warn(msg): print("WARNING: %s" % (msg,), file=sys.stderr) def main(fixer_pkg, args=None): """Main program. Args: fixer_pkg: the name of a package where the fixers are located. args: optional; a list of command line arguments. If omitted, sys.argv[1:] is used. Returns a suggested exit status (0, 1, 2). """ # Set up option parser parser = optparse.OptionParser(usage="2to3 [options] file|dir ...") parser.add_option("-d", "--doctests_only", action="store_true", help="Fix up doctests only") parser.add_option("-f", "--fix", action="append", default=[], help="Each FIX specifies a transformation; default: all") parser.add_option("-j", "--processes", action="store", default=1, type="int", help="Run 2to3 concurrently") parser.add_option("-x", "--nofix", action="append", default=[], help="Prevent a transformation from being run") parser.add_option("-l", "--list-fixes", action="store_true", help="List available transformations") parser.add_option("-p", "--print-function", action="store_true", help="Modify the grammar so that print() is a function") parser.add_option("-v", "--verbose", action="store_true", help="More verbose logging") parser.add_option("--no-diffs", action="store_true", help="Don't show diffs of the refactoring") parser.add_option("-w", "--write", action="store_true", help="Write back modified files") parser.add_option("-n", "--nobackups", action="store_true", default=False, help="Don't write backups for modified files") parser.add_option("-o", "--output-dir", action="store", type="str", default="", help="Put output files in this directory " "instead of overwriting the input files. Requires -n.") parser.add_option("-W", "--write-unchanged-files", action="store_true", help="Also write files even if no changes were required" " (useful with --output-dir); implies -w.") parser.add_option("--add-suffix", action="store", type="str", default="", help="Append this string to all output filenames." " Requires -n if non-empty. " "ex: --add-suffix='3' will generate .py3 files.") # Parse command line arguments refactor_stdin = False flags = {} options, args = parser.parse_args(args) if options.write_unchanged_files: flags["write_unchanged_files"] = True if not options.write: warn("--write-unchanged-files/-W implies -w.") options.write = True # If we allowed these, the original files would be renamed to backup names # but not replaced. if options.output_dir and not options.nobackups: parser.error("Can't use --output-dir/-o without -n.") if options.add_suffix and not options.nobackups: parser.error("Can't use --add-suffix without -n.") if not options.write and options.no_diffs: warn("not writing files and not printing diffs; that's not very useful") if not options.write and options.nobackups: parser.error("Can't use -n without -w") if options.list_fixes: print("Available transformations for the -f/--fix option:") for fixname in refactor.get_all_fix_names(fixer_pkg): print(fixname) if not args: return 0 if not args: print("At least one file or directory argument required.", file=sys.stderr) print("Use --help to show usage.", file=sys.stderr) return 2 if "-" in args: refactor_stdin = True if options.write: print("Can't write to stdin.", file=sys.stderr) return 2 if options.print_function: flags["print_function"] = True # Set up logging handler level = logging.DEBUG if options.verbose else logging.INFO logging.basicConfig(format='%(name)s: %(message)s', level=level) logger = logging.getLogger('lib2to3.main') # Initialize the refactoring tool avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg)) unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) explicit = set() if options.fix: all_present = False for fix in options.fix: if fix == "all": all_present = True else: explicit.add(fixer_pkg + ".fix_" + fix) requested = avail_fixes.union(explicit) if all_present else explicit else: requested = avail_fixes.union(explicit) fixer_names = requested.difference(unwanted_fixes) input_base_dir = os.path.commonprefix(args) if (input_base_dir and not input_base_dir.endswith(os.sep) and not os.path.isdir(input_base_dir)): # One or more similar names were passed, their directory is the base. # os.path.commonprefix() is ignorant of path elements, this corrects # for that weird API. input_base_dir = os.path.dirname(input_base_dir) if options.output_dir: input_base_dir = input_base_dir.rstrip(os.sep) logger.info('Output in %r will mirror the input directory %r layout.', options.output_dir, input_base_dir) rt = StdoutRefactoringTool( sorted(fixer_names), flags, sorted(explicit), options.nobackups, not options.no_diffs, input_base_dir=input_base_dir, output_dir=options.output_dir, append_suffix=options.add_suffix) # Refactor all files and directories passed as arguments if not rt.errors: if refactor_stdin: rt.refactor_stdin() else: try: rt.refactor(args, options.write, options.doctests_only, options.processes) except refactor.MultiprocessingUnsupported: assert options.processes > 1 print("Sorry, -j isn't supported on this platform.", file=sys.stderr) return 1 rt.summarize() # Return error status (0 if rt.errors is zero) return int(bool(rt.errors)) PK{��\a��� patcomp.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Pattern compiler. The grammar is taken from PatternGrammar.txt. The compiler compiles a pattern to a pytree.*Pattern instance. """ __author__ = "Guido van Rossum <guido@python.org>" # Python imports import io # Fairly local imports from .pgen2 import driver, literals, token, tokenize, parse, grammar # Really local imports from . import pytree from . import pygram class PatternSyntaxError(Exception): pass def tokenize_wrapper(input): """Tokenizes a string suppressing significant whitespace.""" skip = {token.NEWLINE, token.INDENT, token.DEDENT} tokens = tokenize.generate_tokens(io.StringIO(input).readline) for quintuple in tokens: type, value, start, end, line_text = quintuple if type not in skip: yield quintuple class PatternCompiler(object): def __init__(self, grammar_file=None): """Initializer. Takes an optional alternative filename for the pattern grammar. """ if grammar_file is None: self.grammar = pygram.pattern_grammar self.syms = pygram.pattern_symbols else: self.grammar = driver.load_grammar(grammar_file) self.syms = pygram.Symbols(self.grammar) self.pygrammar = pygram.python_grammar self.pysyms = pygram.python_symbols self.driver = driver.Driver(self.grammar, convert=pattern_convert) def compile_pattern(self, input, debug=False, with_tree=False): """Compiles a pattern string to a nested pytree.*Pattern object.""" tokens = tokenize_wrapper(input) try: root = self.driver.parse_tokens(tokens, debug=debug) except parse.ParseError as e: raise PatternSyntaxError(str(e)) if with_tree: return self.compile_node(root), root else: return self.compile_node(root) def compile_node(self, node): """Compiles a node, recursively. This is one big switch on the node type. """ # XXX Optimize certain Wildcard-containing-Wildcard patterns # that can be merged if node.type == self.syms.Matcher: node = node.children[0] # Avoid unneeded recursion if node.type == self.syms.Alternatives: # Skip the odd children since they are just '|' tokens alts = [self.compile_node(ch) for ch in node.children[::2]] if len(alts) == 1: return alts[0] p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1) return p.optimize() if node.type == self.syms.Alternative: units = [self.compile_node(ch) for ch in node.children] if len(units) == 1: return units[0] p = pytree.WildcardPattern([units], min=1, max=1) return p.optimize() if node.type == self.syms.NegatedUnit: pattern = self.compile_basic(node.children[1:]) p = pytree.NegatedPattern(pattern) return p.optimize() assert node.type == self.syms.Unit name = None nodes = node.children if len(nodes) >= 3 and nodes[1].type == token.EQUAL: name = nodes[0].value nodes = nodes[2:] repeat = None if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater: repeat = nodes[-1] nodes = nodes[:-1] # Now we've reduced it to: STRING | NAME [Details] | (...) | [...] pattern = self.compile_basic(nodes, repeat) if repeat is not None: assert repeat.type == self.syms.Repeater children = repeat.children child = children[0] if child.type == token.STAR: min = 0 max = pytree.HUGE elif child.type == token.PLUS: min = 1 max = pytree.HUGE elif child.type == token.LBRACE: assert children[-1].type == token.RBRACE assert len(children) in (3, 5) min = max = self.get_int(children[1]) if len(children) == 5: max = self.get_int(children[3]) else: assert False if min != 1 or max != 1: pattern = pattern.optimize() pattern = pytree.WildcardPattern([[pattern]], min=min, max=max) if name is not None: pattern.name = name return pattern.optimize() def compile_basic(self, nodes, repeat=None): # Compile STRING | NAME [Details] | (...) | [...] assert len(nodes) >= 1 node = nodes[0] if node.type == token.STRING: value = str(literals.evalString(node.value)) return pytree.LeafPattern(_type_of_literal(value), value) elif node.type == token.NAME: value = node.value if value.isupper(): if value not in TOKEN_MAP: raise PatternSyntaxError("Invalid token: %r" % value) if nodes[1:]: raise PatternSyntaxError("Can't have details for token") return pytree.LeafPattern(TOKEN_MAP[value]) else: if value == "any": type = None elif not value.startswith("_"): type = getattr(self.pysyms, value, None) if type is None: raise PatternSyntaxError("Invalid symbol: %r" % value) if nodes[1:]: # Details present content = [self.compile_node(nodes[1].children[1])] else: content = None return pytree.NodePattern(type, content) elif node.value == "(": return self.compile_node(nodes[1]) elif node.value == "[": assert repeat is None subpattern = self.compile_node(nodes[1]) return pytree.WildcardPattern([[subpattern]], min=0, max=1) assert False, node def get_int(self, node): assert node.type == token.NUMBER return int(node.value) # Map named tokens to the type value for a LeafPattern TOKEN_MAP = {"NAME": token.NAME, "STRING": token.STRING, "NUMBER": token.NUMBER, "TOKEN": None} def _type_of_literal(value): if value[0].isalpha(): return token.NAME elif value in grammar.opmap: return grammar.opmap[value] else: return None def pattern_convert(grammar, raw_node_info): """Converts raw node information to a Node or Leaf instance.""" type, value, context, children = raw_node_info if children or type in grammar.number2symbol: return pytree.Node(type, children, context=context) else: return pytree.Leaf(type, value, context=context) def compile_pattern(pattern): return PatternCompiler().compile_pattern(pattern) PK{��\"�Ui�� pygram.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Export the Python grammar and symbols.""" # Python imports import os # Local imports from .pgen2 import token from .pgen2 import driver from . import pytree # The grammar file _GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") _PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "PatternGrammar.txt") class Symbols(object): def __init__(self, grammar): """Initializer. Creates an attribute for each grammar symbol (nonterminal), whose value is the symbol's type (an int >= 256). """ for name, symbol in grammar.symbol2number.items(): setattr(self, name, symbol) python_grammar = driver.load_packaged_grammar("lib2to3", _GRAMMAR_FILE) python_symbols = Symbols(python_grammar) python_grammar_no_print_statement = python_grammar.copy() del python_grammar_no_print_statement.keywords["print"] pattern_grammar = driver.load_packaged_grammar("lib2to3", _PATTERN_GRAMMAR_FILE) pattern_symbols = Symbols(pattern_grammar) PK{��\R����m�m pytree.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """ Python parse tree definitions. This is a very concrete parse tree; we need to keep every token and even the comments and whitespace between tokens. There's also a pattern matching implementation here. """ __author__ = "Guido van Rossum <guido@python.org>" import sys import warnings from io import StringIO HUGE = 0x7FFFFFFF # maximum repeat count, default max _type_reprs = {} def type_repr(type_num): global _type_reprs if not _type_reprs: from .pygram import python_symbols # printing tokens is possible but not as useful # from .pgen2 import token // token.__dict__.items(): for name, val in python_symbols.__dict__.items(): if type(val) == int: _type_reprs[val] = name return _type_reprs.setdefault(type_num, type_num) class Base(object): """ Abstract base class for Node and Leaf. This provides some default functionality and boilerplate using the template pattern. A node may be a subnode of at most one parent. """ # Default values for instance variables type = None # int: token number (< 256) or symbol number (>= 256) parent = None # Parent node pointer, or None children = () # Tuple of subnodes was_changed = False was_checked = False def __new__(cls, *args, **kwds): """Constructor that prevents Base from being instantiated.""" assert cls is not Base, "Cannot instantiate Base" return object.__new__(cls) def __eq__(self, other): """ Compare two nodes for equality. This calls the method _eq(). """ if self.__class__ is not other.__class__: return NotImplemented return self._eq(other) __hash__ = None # For Py3 compatibility. def _eq(self, other): """ Compare two nodes for equality. This is called by __eq__ and __ne__. It is only called if the two nodes have the same type. This must be implemented by the concrete subclass. Nodes should be considered equal if they have the same structure, ignoring the prefix string and other context information. """ raise NotImplementedError def clone(self): """ Return a cloned (deep) copy of self. This must be implemented by the concrete subclass. """ raise NotImplementedError def post_order(self): """ Return a post-order iterator for the tree. This must be implemented by the concrete subclass. """ raise NotImplementedError def pre_order(self): """ Return a pre-order iterator for the tree. This must be implemented by the concrete subclass. """ raise NotImplementedError def replace(self, new): """Replace this node with a new one in the parent.""" assert self.parent is not None, str(self) assert new is not None if not isinstance(new, list): new = [new] l_children = [] found = False for ch in self.parent.children: if ch is self: assert not found, (self.parent.children, self, new) if new is not None: l_children.extend(new) found = True else: l_children.append(ch) assert found, (self.children, self, new) self.parent.changed() self.parent.children = l_children for x in new: x.parent = self.parent self.parent = None def get_lineno(self): """Return the line number which generated the invocant node.""" node = self while not isinstance(node, Leaf): if not node.children: return node = node.children[0] return node.lineno def changed(self): if self.parent: self.parent.changed() self.was_changed = True def remove(self): """ Remove the node from the tree. Returns the position of the node in its parent's children before it was removed. """ if self.parent: for i, node in enumerate(self.parent.children): if node is self: self.parent.changed() del self.parent.children[i] self.parent = None return i @property def next_sibling(self): """ The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None """ if self.parent is None: return None # Can't use index(); we need to test by identity for i, child in enumerate(self.parent.children): if child is self: try: return self.parent.children[i+1] except IndexError: return None @property def prev_sibling(self): """ The node immediately preceding the invocant in their parent's children list. If the invocant does not have a previous sibling, it is None. """ if self.parent is None: return None # Can't use index(); we need to test by identity for i, child in enumerate(self.parent.children): if child is self: if i == 0: return None return self.parent.children[i-1] def leaves(self): for child in self.children: yield from child.leaves() def depth(self): if self.parent is None: return 0 return 1 + self.parent.depth() def get_suffix(self): """ Return the string immediately following the invocant node. This is effectively equivalent to node.next_sibling.prefix """ next_sib = self.next_sibling if next_sib is None: return "" return next_sib.prefix if sys.version_info < (3, 0): def __str__(self): return str(self).encode("ascii") class Node(Base): """Concrete implementation for interior nodes.""" def __init__(self,type, children, context=None, prefix=None, fixers_applied=None): """ Initializer. Takes a type constant (a symbol number >= 256), a sequence of child nodes, and an optional context keyword argument. As a side effect, the parent pointers of the children are updated. """ assert type >= 256, type self.type = type self.children = list(children) for ch in self.children: assert ch.parent is None, repr(ch) ch.parent = self if prefix is not None: self.prefix = prefix if fixers_applied: self.fixers_applied = fixers_applied[:] else: self.fixers_applied = None def __repr__(self): """Return a canonical string representation.""" return "%s(%s, %r)" % (self.__class__.__name__, type_repr(self.type), self.children) def __unicode__(self): """ Return a pretty string representation. This reproduces the input source exactly. """ return "".join(map(str, self.children)) if sys.version_info > (3, 0): __str__ = __unicode__ def _eq(self, other): """Compare two nodes for equality.""" return (self.type, self.children) == (other.type, other.children) def clone(self): """Return a cloned (deep) copy of self.""" return Node(self.type, [ch.clone() for ch in self.children], fixers_applied=self.fixers_applied) def post_order(self): """Return a post-order iterator for the tree.""" for child in self.children: yield from child.post_order() yield self def pre_order(self): """Return a pre-order iterator for the tree.""" yield self for child in self.children: yield from child.pre_order() def _prefix_getter(self): """ The whitespace and comments preceding this node in the input. """ if not self.children: return "" return self.children[0].prefix def _prefix_setter(self, prefix): if self.children: self.children[0].prefix = prefix prefix = property(_prefix_getter, _prefix_setter) def set_child(self, i, child): """ Equivalent to 'node.children[i] = child'. This method also sets the child's parent attribute appropriately. """ child.parent = self self.children[i].parent = None self.children[i] = child self.changed() def insert_child(self, i, child): """ Equivalent to 'node.children.insert(i, child)'. This method also sets the child's parent attribute appropriately. """ child.parent = self self.children.insert(i, child) self.changed() def append_child(self, child): """ Equivalent to 'node.children.append(child)'. This method also sets the child's parent attribute appropriately. """ child.parent = self self.children.append(child) self.changed() class Leaf(Base): """Concrete implementation for leaf nodes.""" # Default values for instance variables _prefix = "" # Whitespace and comments preceding this token in the input lineno = 0 # Line where this token starts in the input column = 0 # Column where this token tarts in the input def __init__(self, type, value, context=None, prefix=None, fixers_applied=[]): """ Initializer. Takes a type constant (a token number < 256), a string value, and an optional context keyword argument. """ assert 0 <= type < 256, type if context is not None: self._prefix, (self.lineno, self.column) = context self.type = type self.value = value if prefix is not None: self._prefix = prefix self.fixers_applied = fixers_applied[:] def __repr__(self): """Return a canonical string representation.""" return "%s(%r, %r)" % (self.__class__.__name__, self.type, self.value) def __unicode__(self): """ Return a pretty string representation. This reproduces the input source exactly. """ return self.prefix + str(self.value) if sys.version_info > (3, 0): __str__ = __unicode__ def _eq(self, other): """Compare two nodes for equality.""" return (self.type, self.value) == (other.type, other.value) def clone(self): """Return a cloned (deep) copy of self.""" return Leaf(self.type, self.value, (self.prefix, (self.lineno, self.column)), fixers_applied=self.fixers_applied) def leaves(self): yield self def post_order(self): """Return a post-order iterator for the tree.""" yield self def pre_order(self): """Return a pre-order iterator for the tree.""" yield self def _prefix_getter(self): """ The whitespace and comments preceding this token in the input. """ return self._prefix def _prefix_setter(self, prefix): self.changed() self._prefix = prefix prefix = property(_prefix_getter, _prefix_setter) def convert(gr, raw_node): """ Convert raw node information to a Node or Leaf instance. This is passed to the parser driver which calls it whenever a reduction of a grammar rule produces a new complete node, so that the tree is build strictly bottom-up. """ type, value, context, children = raw_node if children or type in gr.number2symbol: # If there's exactly one child, return that child instead of # creating a new node. if len(children) == 1: return children[0] return Node(type, children, context=context) else: return Leaf(type, value, context=context) class BasePattern(object): """ A pattern is a tree matching pattern. It looks for a specific node type (token or symbol), and optionally for a specific content. This is an abstract base class. There are three concrete subclasses: - LeafPattern matches a single leaf node; - NodePattern matches a single node (usually non-leaf); - WildcardPattern matches a sequence of nodes of variable length. """ # Defaults for instance variables type = None # Node type (token if < 256, symbol if >= 256) content = None # Optional content matching pattern name = None # Optional name used to store match in results dict def __new__(cls, *args, **kwds): """Constructor that prevents BasePattern from being instantiated.""" assert cls is not BasePattern, "Cannot instantiate BasePattern" return object.__new__(cls) def __repr__(self): args = [type_repr(self.type), self.content, self.name] while args and args[-1] is None: del args[-1] return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args))) def optimize(self): """ A subclass can define this as a hook for optimizations. Returns either self or another node with the same effect. """ return self def match(self, node, results=None): """ Does this pattern exactly match a node? Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. Default implementation for non-wildcard patterns. """ if self.type is not None and node.type != self.type: return False if self.content is not None: r = None if results is not None: r = {} if not self._submatch(node, r): return False if r: results.update(r) if results is not None and self.name: results[self.name] = node return True def match_seq(self, nodes, results=None): """ Does this pattern exactly match a sequence of nodes? Default implementation for non-wildcard patterns. """ if len(nodes) != 1: return False return self.match(nodes[0], results) def generate_matches(self, nodes): """ Generator yielding all matches for this pattern. Default implementation for non-wildcard patterns. """ r = {} if nodes and self.match(nodes[0], r): yield 1, r class LeafPattern(BasePattern): def __init__(self, type=None, content=None, name=None): """ Initializer. Takes optional type, content, and name. The type, if given must be a token type (< 256). If not given, this matches any *leaf* node; the content may still be required. The content, if given, must be a string. If a name is given, the matching node is stored in the results dict under that key. """ if type is not None: assert 0 <= type < 256, type if content is not None: assert isinstance(content, str), repr(content) self.type = type self.content = content self.name = name def match(self, node, results=None): """Override match() to insist on a leaf node.""" if not isinstance(node, Leaf): return False return BasePattern.match(self, node, results) def _submatch(self, node, results=None): """ Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. """ return self.content == node.value class NodePattern(BasePattern): wildcards = False def __init__(self, type=None, content=None, name=None): """ Initializer. Takes optional type, content, and name. The type, if given, must be a symbol type (>= 256). If the type is None this matches *any* single node (leaf or not), except if content is not None, in which it only matches non-leaf nodes that also match the content pattern. The content, if not None, must be a sequence of Patterns that must match the node's children exactly. If the content is given, the type must not be None. If a name is given, the matching node is stored in the results dict under that key. """ if type is not None: assert type >= 256, type if content is not None: assert not isinstance(content, str), repr(content) content = list(content) for i, item in enumerate(content): assert isinstance(item, BasePattern), (i, item) if isinstance(item, WildcardPattern): self.wildcards = True self.type = type self.content = content self.name = name def _submatch(self, node, results=None): """ Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated. """ if self.wildcards: for c, r in generate_matches(self.content, node.children): if c == len(node.children): if results is not None: results.update(r) return True return False if len(self.content) != len(node.children): return False for subpattern, child in zip(self.content, node.children): if not subpattern.match(child, results): return False return True class WildcardPattern(BasePattern): """ A wildcard pattern can match zero or more nodes. This has all the flexibility needed to implement patterns like: .* .+ .? .{m,n} (a b c | d e | f) (...)* (...)+ (...)? (...){m,n} except it always uses non-greedy matching. """ def __init__(self, content=None, min=0, max=HUGE, name=None): """ Initializer. Args: content: optional sequence of subsequences of patterns; if absent, matches one node; if present, each subsequence is an alternative [*] min: optional minimum number of times to match, default 0 max: optional maximum number of times to match, default HUGE name: optional name assigned to this match [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is equivalent to (a b c | d e | f g h); if content is None, this is equivalent to '.' in regular expression terms. The min and max parameters work as follows: min=0, max=maxint: .* min=1, max=maxint: .+ min=0, max=1: .? min=1, max=1: . If content is not None, replace the dot with the parenthesized list of alternatives, e.g. (a b c | d e | f g h)* """ assert 0 <= min <= max <= HUGE, (min, max) if content is not None: content = tuple(map(tuple, content)) # Protect against alterations # Check sanity of alternatives assert len(content), repr(content) # Can't have zero alternatives for alt in content: assert len(alt), repr(alt) # Can have empty alternatives self.content = content self.min = min self.max = max self.name = name def optimize(self): """Optimize certain stacked wildcard patterns.""" subpattern = None if (self.content is not None and len(self.content) == 1 and len(self.content[0]) == 1): subpattern = self.content[0][0] if self.min == 1 and self.max == 1: if self.content is None: return NodePattern(name=self.name) if subpattern is not None and self.name == subpattern.name: return subpattern.optimize() if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and subpattern.min <= 1 and self.name == subpattern.name): return WildcardPattern(subpattern.content, self.min*subpattern.min, self.max*subpattern.max, subpattern.name) return self def match(self, node, results=None): """Does this pattern exactly match a node?""" return self.match_seq([node], results) def match_seq(self, nodes, results=None): """Does this pattern exactly match a sequence of nodes?""" for c, r in self.generate_matches(nodes): if c == len(nodes): if results is not None: results.update(r) if self.name: results[self.name] = list(nodes) return True return False def generate_matches(self, nodes): """ Generator yielding matches for a sequence of nodes. Args: nodes: sequence of nodes Yields: (count, results) tuples where: count: the match comprises nodes[:count]; results: dict containing named submatches. """ if self.content is None: # Shortcut for special case (see __init__.__doc__) for count in range(self.min, 1 + min(len(nodes), self.max)): r = {} if self.name: r[self.name] = nodes[:count] yield count, r elif self.name == "bare_name": yield self._bare_name_matches(nodes) else: # The reason for this is that hitting the recursion limit usually # results in some ugly messages about how RuntimeErrors are being # ignored. We only have to do this on CPython, though, because other # implementations don't have this nasty bug in the first place. if hasattr(sys, "getrefcount"): save_stderr = sys.stderr sys.stderr = StringIO() try: for count, r in self._recursive_matches(nodes, 0): if self.name: r[self.name] = nodes[:count] yield count, r except RuntimeError: # We fall back to the iterative pattern matching scheme if the recursive # scheme hits the recursion limit. for count, r in self._iterative_matches(nodes): if self.name: r[self.name] = nodes[:count] yield count, r finally: if hasattr(sys, "getrefcount"): sys.stderr = save_stderr def _iterative_matches(self, nodes): """Helper to iteratively yield the matches.""" nodelen = len(nodes) if 0 >= self.min: yield 0, {} results = [] # generate matches that use just one alt from self.content for alt in self.content: for c, r in generate_matches(alt, nodes): yield c, r results.append((c, r)) # for each match, iterate down the nodes while results: new_results = [] for c0, r0 in results: # stop if the entire set of nodes has been matched if c0 < nodelen and c0 <= self.max: for alt in self.content: for c1, r1 in generate_matches(alt, nodes[c0:]): if c1 > 0: r = {} r.update(r0) r.update(r1) yield c0 + c1, r new_results.append((c0 + c1, r)) results = new_results def _bare_name_matches(self, nodes): """Special optimized matcher for bare_name.""" count = 0 r = {} done = False max = len(nodes) while not done and count < max: done = True for leaf in self.content: if leaf[0].match(nodes[count], r): count += 1 done = False break r[self.name] = nodes[:count] return count, r def _recursive_matches(self, nodes, count): """Helper to recursively yield the matches.""" assert self.content is not None if count >= self.min: yield 0, {} if count < self.max: for alt in self.content: for c0, r0 in generate_matches(alt, nodes): for c1, r1 in self._recursive_matches(nodes[c0:], count+1): r = {} r.update(r0) r.update(r1) yield c0 + c1, r class NegatedPattern(BasePattern): def __init__(self, content=None): """ Initializer. The argument is either a pattern or None. If it is None, this only matches an empty sequence (effectively '$' in regex lingo). If it is not None, this matches whenever the argument pattern doesn't have any matches. """ if content is not None: assert isinstance(content, BasePattern), repr(content) self.content = content def match(self, node): # We never match a node in its entirety return False def match_seq(self, nodes): # We only match an empty sequence of nodes in its entirety return len(nodes) == 0 def generate_matches(self, nodes): if self.content is None: # Return a match if there is an empty sequence if len(nodes) == 0: yield 0, {} else: # Return a match if the argument pattern has no matches for c, r in self.content.generate_matches(nodes): return yield 0, {} def generate_matches(patterns, nodes): """ Generator yielding matches for a sequence of patterns and nodes. Args: patterns: a sequence of patterns nodes: a sequence of nodes Yields: (count, results) tuples where: count: the entire sequence of patterns matches nodes[:count]; results: dict containing named submatches. """ if not patterns: yield 0, {} else: p, rest = patterns[0], patterns[1:] for c0, r0 in p.generate_matches(nodes): if not rest: yield c0, r0 else: for c1, r1 in generate_matches(rest, nodes[c0:]): r = {} r.update(r0) r.update(r1) yield c0 + c1, r PK{��\��3�=m=mrefactor.pynu�[���# Copyright 2006 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Refactoring framework. Used as a main program, this can refactor any number of files and/or recursively descend down directories. Imported as a module, this provides infrastructure to write your own refactoring tool. """ __author__ = "Guido van Rossum <guido@python.org>" # Python imports import os import sys import logging import operator import collections import io from itertools import chain # Local imports from .pgen2 import driver, tokenize, token from .fixer_util import find_root from . import pytree, pygram from . import btm_matcher as bm def get_all_fix_names(fixer_pkg, remove_prefix=True): """Return a sorted list of all available fix names in the given package.""" pkg = __import__(fixer_pkg, [], [], ["*"]) fixer_dir = os.path.dirname(pkg.__file__) fix_names = [] for name in sorted(os.listdir(fixer_dir)): if name.startswith("fix_") and name.endswith(".py"): if remove_prefix: name = name[4:] fix_names.append(name[:-3]) return fix_names class _EveryNode(Exception): pass def _get_head_types(pat): """ Accepts a pytree Pattern Node and returns a set of the pattern types which will match first. """ if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): # NodePatters must either have no type and no content # or a type and content -- so they don't get any farther # Always return leafs if pat.type is None: raise _EveryNode return {pat.type} if isinstance(pat, pytree.NegatedPattern): if pat.content: return _get_head_types(pat.content) raise _EveryNode # Negated Patterns don't have a type if isinstance(pat, pytree.WildcardPattern): # Recurse on each node in content r = set() for p in pat.content: for x in p: r.update(_get_head_types(x)) return r raise Exception("Oh no! I don't understand pattern %s" %(pat)) def _get_headnode_dict(fixer_list): """ Accepts a list of fixers and returns a dictionary of head node type --> fixer list. """ head_nodes = collections.defaultdict(list) every = [] for fixer in fixer_list: if fixer.pattern: try: heads = _get_head_types(fixer.pattern) except _EveryNode: every.append(fixer) else: for node_type in heads: head_nodes[node_type].append(fixer) else: if fixer._accept_type is not None: head_nodes[fixer._accept_type].append(fixer) else: every.append(fixer) for node_type in chain(pygram.python_grammar.symbol2number.values(), pygram.python_grammar.tokens): head_nodes[node_type].extend(every) return dict(head_nodes) def get_fixers_from_package(pkg_name): """ Return the fully qualified names for fixers in the package pkg_name. """ return [pkg_name + "." + fix_name for fix_name in get_all_fix_names(pkg_name, False)] def _identity(obj): return obj if sys.version_info < (3, 0): import codecs _open_with_encoding = codecs.open # codecs.open doesn't translate newlines sadly. def _from_system_newlines(input): return input.replace("\r\n", "\n") def _to_system_newlines(input): if os.linesep != "\n": return input.replace("\n", os.linesep) else: return input else: _open_with_encoding = open _from_system_newlines = _identity _to_system_newlines = _identity def _detect_future_features(source): have_docstring = False gen = tokenize.generate_tokens(io.StringIO(source).readline) def advance(): tok = next(gen) return tok[0], tok[1] ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT}) features = set() try: while True: tp, value = advance() if tp in ignore: continue elif tp == token.STRING: if have_docstring: break have_docstring = True elif tp == token.NAME and value == "from": tp, value = advance() if tp != token.NAME or value != "__future__": break tp, value = advance() if tp != token.NAME or value != "import": break tp, value = advance() if tp == token.OP and value == "(": tp, value = advance() while tp == token.NAME: features.add(value) tp, value = advance() if tp != token.OP or value != ",": break tp, value = advance() else: break except StopIteration: pass return frozenset(features) class FixerError(Exception): """A fixer could not be loaded.""" class RefactoringTool(object): _default_options = {"print_function" : False, "write_unchanged_files" : False} CLASS_PREFIX = "Fix" # The prefix for fixer classes FILE_PREFIX = "fix_" # The prefix for modules with a fixer within def __init__(self, fixer_names, options=None, explicit=None): """Initializer. Args: fixer_names: a list of fixers to import options: a dict with configuration. explicit: a list of fixers to run even if they are explicit. """ self.fixers = fixer_names self.explicit = explicit or [] self.options = self._default_options.copy() if options is not None: self.options.update(options) if self.options["print_function"]: self.grammar = pygram.python_grammar_no_print_statement else: self.grammar = pygram.python_grammar # When this is True, the refactor*() methods will call write_file() for # files processed even if they were not changed during refactoring. If # and only if the refactor method's write parameter was True. self.write_unchanged_files = self.options.get("write_unchanged_files") self.errors = [] self.logger = logging.getLogger("RefactoringTool") self.fixer_log = [] self.wrote = False self.driver = driver.Driver(self.grammar, convert=pytree.convert, logger=self.logger) self.pre_order, self.post_order = self.get_fixers() self.files = [] # List of files that were or should be modified self.BM = bm.BottomMatcher() self.bmi_pre_order = [] # Bottom Matcher incompatible fixers self.bmi_post_order = [] for fixer in chain(self.post_order, self.pre_order): if fixer.BM_compatible: self.BM.add_fixer(fixer) # remove fixers that will be handled by the bottom-up # matcher elif fixer in self.pre_order: self.bmi_pre_order.append(fixer) elif fixer in self.post_order: self.bmi_post_order.append(fixer) self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order) self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order) def get_fixers(self): """Inspects the options to load the requested patterns and handlers. Returns: (pre_order, post_order), where pre_order is the list of fixers that want a pre-order AST traversal, and post_order is the list that want post-order traversal. """ pre_order_fixers = [] post_order_fixers = [] for fix_mod_path in self.fixers: mod = __import__(fix_mod_path, {}, {}, ["*"]) fix_name = fix_mod_path.rsplit(".", 1)[-1] if fix_name.startswith(self.FILE_PREFIX): fix_name = fix_name[len(self.FILE_PREFIX):] parts = fix_name.split("_") class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) try: fix_class = getattr(mod, class_name) except AttributeError: raise FixerError("Can't find %s.%s" % (fix_name, class_name)) fixer = fix_class(self.options, self.fixer_log) if fixer.explicit and self.explicit is not True and \ fix_mod_path not in self.explicit: self.log_message("Skipping optional fixer: %s", fix_name) continue self.log_debug("Adding transformation: %s", fix_name) if fixer.order == "pre": pre_order_fixers.append(fixer) elif fixer.order == "post": post_order_fixers.append(fixer) else: raise FixerError("Illegal fixer order: %r" % fixer.order) key_func = operator.attrgetter("run_order") pre_order_fixers.sort(key=key_func) post_order_fixers.sort(key=key_func) return (pre_order_fixers, post_order_fixers) def log_error(self, msg, *args, **kwds): """Called when an error occurs.""" raise def log_message(self, msg, *args): """Hook to log a message.""" if args: msg = msg % args self.logger.info(msg) def log_debug(self, msg, *args): if args: msg = msg % args self.logger.debug(msg) def print_output(self, old_text, new_text, filename, equal): """Called with the old version, new version, and filename of a refactored file.""" pass def refactor(self, items, write=False, doctests_only=False): """Refactor a list of files and directories.""" for dir_or_file in items: if os.path.isdir(dir_or_file): self.refactor_dir(dir_or_file, write, doctests_only) else: self.refactor_file(dir_or_file, write, doctests_only) def refactor_dir(self, dir_name, write=False, doctests_only=False): """Descends down a directory and refactor every Python file found. Python files are assumed to have a .py extension. Files and subdirectories starting with '.' are skipped. """ py_ext = os.extsep + "py" for dirpath, dirnames, filenames in os.walk(dir_name): self.log_debug("Descending into %s", dirpath) dirnames.sort() filenames.sort() for name in filenames: if (not name.startswith(".") and os.path.splitext(name)[1] == py_ext): fullname = os.path.join(dirpath, name) self.refactor_file(fullname, write, doctests_only) # Modify dirnames in-place to remove subdirs with leading dots dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] def _read_python_source(self, filename): """ Do our best to decode a Python source file correctly. """ try: f = open(filename, "rb") except OSError as err: self.log_error("Can't open %s: %s", filename, err) return None, None try: encoding = tokenize.detect_encoding(f.readline)[0] finally: f.close() with _open_with_encoding(filename, "r", encoding=encoding) as f: return _from_system_newlines(f.read()), encoding def refactor_file(self, filename, write=False, doctests_only=False): """Refactors a file.""" input, encoding = self._read_python_source(filename) if input is None: # Reading the file failed. return input += "\n" # Silence certain parse errors if doctests_only: self.log_debug("Refactoring doctests in %s", filename) output = self.refactor_docstring(input, filename) if self.write_unchanged_files or output != input: self.processed_file(output, filename, input, write, encoding) else: self.log_debug("No doctest changes in %s", filename) else: tree = self.refactor_string(input, filename) if self.write_unchanged_files or (tree and tree.was_changed): # The [:-1] is to take off the \n we added earlier self.processed_file(str(tree)[:-1], filename, write=write, encoding=encoding) else: self.log_debug("No changes in %s", filename) def refactor_string(self, data, name): """Refactor a given input string. Args: data: a string holding the code to be refactored. name: a human-readable name for use in error/log messages. Returns: An AST corresponding to the refactored input stream; None if there were errors during the parse. """ features = _detect_future_features(data) if "print_function" in features: self.driver.grammar = pygram.python_grammar_no_print_statement try: tree = self.driver.parse_string(data) except Exception as err: self.log_error("Can't parse %s: %s: %s", name, err.__class__.__name__, err) return finally: self.driver.grammar = self.grammar tree.future_features = features self.log_debug("Refactoring %s", name) self.refactor_tree(tree, name) return tree def refactor_stdin(self, doctests_only=False): input = sys.stdin.read() if doctests_only: self.log_debug("Refactoring doctests in stdin") output = self.refactor_docstring(input, "<stdin>") if self.write_unchanged_files or output != input: self.processed_file(output, "<stdin>", input) else: self.log_debug("No doctest changes in stdin") else: tree = self.refactor_string(input, "<stdin>") if self.write_unchanged_files or (tree and tree.was_changed): self.processed_file(str(tree), "<stdin>", input) else: self.log_debug("No changes in stdin") def refactor_tree(self, tree, name): """Refactors a parse tree (modifying the tree in place). For compatible patterns the bottom matcher module is used. Otherwise the tree is traversed node-to-node for matches. Args: tree: a pytree.Node instance representing the root of the tree to be refactored. name: a human-readable name for this tree. Returns: True if the tree was modified, False otherwise. """ for fixer in chain(self.pre_order, self.post_order): fixer.start_tree(tree, name) #use traditional matching for the incompatible fixers self.traverse_by(self.bmi_pre_order_heads, tree.pre_order()) self.traverse_by(self.bmi_post_order_heads, tree.post_order()) # obtain a set of candidate nodes match_set = self.BM.run(tree.leaves()) while any(match_set.values()): for fixer in self.BM.fixers: if fixer in match_set and match_set[fixer]: #sort by depth; apply fixers from bottom(of the AST) to top match_set[fixer].sort(key=pytree.Base.depth, reverse=True) if fixer.keep_line_order: #some fixers(eg fix_imports) must be applied #with the original file's line order match_set[fixer].sort(key=pytree.Base.get_lineno) for node in list(match_set[fixer]): if node in match_set[fixer]: match_set[fixer].remove(node) try: find_root(node) except ValueError: # this node has been cut off from a # previous transformation ; skip continue if node.fixers_applied and fixer in node.fixers_applied: # do not apply the same fixer again continue results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) #new.fixers_applied.append(fixer) for node in new.post_order(): # do not apply the fixer again to # this or any subnode if not node.fixers_applied: node.fixers_applied = [] node.fixers_applied.append(fixer) # update the original match set for # the added code new_matches = self.BM.run(new.leaves()) for fxr in new_matches: if not fxr in match_set: match_set[fxr]=[] match_set[fxr].extend(new_matches[fxr]) for fixer in chain(self.pre_order, self.post_order): fixer.finish_tree(tree, name) return tree.was_changed def traverse_by(self, fixers, traversal): """Traverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None """ if not fixers: return for node in traversal: for fixer in fixers[node.type]: results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) node = new def processed_file(self, new_text, filename, old_text=None, write=False, encoding=None): """ Called when a file has been refactored and there may be changes. """ self.files.append(filename) if old_text is None: old_text = self._read_python_source(filename)[0] if old_text is None: return equal = old_text == new_text self.print_output(old_text, new_text, filename, equal) if equal: self.log_debug("No changes to %s", filename) if not self.write_unchanged_files: return if write: self.write_file(new_text, filename, old_text, encoding) else: self.log_debug("Not writing changes to %s", filename) def write_file(self, new_text, filename, old_text, encoding=None): """Writes a string to a file. It first shows a unified diff between the old text and the new text, and then rewrites the file; the latter is only done if the write option is set. """ try: f = _open_with_encoding(filename, "w", encoding=encoding) except OSError as err: self.log_error("Can't create %s: %s", filename, err) return try: f.write(_to_system_newlines(new_text)) except OSError as err: self.log_error("Can't write %s: %s", filename, err) finally: f.close() self.log_debug("Wrote changes to %s", filename) self.wrote = True PS1 = ">>> " PS2 = "... " def refactor_docstring(self, input, filename): """Refactors a docstring, looking for doctests. This returns a modified version of the input string. It looks for doctests, which start with a ">>>" prompt, and may be continued with "..." prompts, as long as the "..." is indented the same as the ">>>". (Unfortunately we can't use the doctest module's parser, since, like most parsers, it is not geared towards preserving the original source.) """ result = [] block = None block_lineno = None indent = None lineno = 0 for line in input.splitlines(keepends=True): lineno += 1 if line.lstrip().startswith(self.PS1): if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block_lineno = lineno block = [line] i = line.find(self.PS1) indent = line[:i] elif (indent is not None and (line.startswith(indent + self.PS2) or line == indent + self.PS2.rstrip() + "\n")): block.append(line) else: if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) block = None indent = None result.append(line) if block is not None: result.extend(self.refactor_doctest(block, block_lineno, indent, filename)) return "".join(result) def refactor_doctest(self, block, lineno, indent, filename): """Refactors one doctest. A doctest is given as a block of lines, the first of which starts with ">>>" (possibly indented), while the remaining lines start with "..." (identically indented). """ try: tree = self.parse_block(block, lineno, indent) except Exception as err: if self.logger.isEnabledFor(logging.DEBUG): for line in block: self.log_debug("Source: %s", line.rstrip("\n")) self.log_error("Can't parse docstring in %s line %s: %s: %s", filename, lineno, err.__class__.__name__, err) return block if self.refactor_tree(tree, filename): new = str(tree).splitlines(keepends=True) # Undo the adjustment of the line numbers in wrap_toks() below. clipped, new = new[:lineno-1], new[lineno-1:] assert clipped == ["\n"] * (lineno-1), clipped if not new[-1].endswith("\n"): new[-1] += "\n" block = [indent + self.PS1 + new.pop(0)] if new: block += [indent + self.PS2 + line for line in new] return block def summarize(self): if self.wrote: were = "were" else: were = "need to be" if not self.files: self.log_message("No files %s modified.", were) else: self.log_message("Files that %s modified:", were) for file in self.files: self.log_message(file) if self.fixer_log: self.log_message("Warnings/messages while refactoring:") for message in self.fixer_log: self.log_message(message) if self.errors: if len(self.errors) == 1: self.log_message("There was 1 error:") else: self.log_message("There were %d errors:", len(self.errors)) for msg, args, kwds in self.errors: self.log_message(msg, *args, **kwds) def parse_block(self, block, lineno, indent): """Parses a block into a tree. This is necessary to get correct line number / offset information in the parser diagnostics and embedded into the parse tree. """ tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) tree.future_features = frozenset() return tree def wrap_toks(self, block, lineno, indent): """Wraps a tokenize stream to systematically modify start/end.""" tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__) for type, value, (line0, col0), (line1, col1), line_text in tokens: line0 += lineno - 1 line1 += lineno - 1 # Don't bother updating the columns; this is too complicated # since line_text would also have to be updated and it would # still break for tokens spanning lines. Let the user guess # that the column numbers for doctests are relative to the # end of the prompt string (PS1 or PS2). yield type, value, (line0, col0), (line1, col1), line_text def gen_lines(self, block, indent): """Generates lines as expected by tokenize from a list of lines. This strips the first len(indent + self.PS1) characters off each line. """ prefix1 = indent + self.PS1 prefix2 = indent + self.PS2 prefix = prefix1 for line in block: if line.startswith(prefix): yield line[len(prefix):] elif line == prefix.rstrip() + "\n": yield "\n" else: raise AssertionError("line=%r, prefix=%r" % (line, prefix)) prefix = prefix2 while True: yield "" class MultiprocessingUnsupported(Exception): pass class MultiprocessRefactoringTool(RefactoringTool): def __init__(self, *args, **kwargs): super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) self.queue = None self.output_lock = None def refactor(self, items, write=False, doctests_only=False, num_processes=1): if num_processes == 1: return super(MultiprocessRefactoringTool, self).refactor( items, write, doctests_only) try: import multiprocessing except ImportError: raise MultiprocessingUnsupported if self.queue is not None: raise RuntimeError("already doing multiple processes") self.queue = multiprocessing.JoinableQueue() self.output_lock = multiprocessing.Lock() processes = [multiprocessing.Process(target=self._child) for i in range(num_processes)] try: for p in processes: p.start() super(MultiprocessRefactoringTool, self).refactor(items, write, doctests_only) finally: self.queue.join() for i in range(num_processes): self.queue.put(None) for p in processes: if p.is_alive(): p.join() self.queue = None def _child(self): task = self.queue.get() while task is not None: args, kwargs = task try: super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs) finally: self.queue.task_done() task = self.queue.get() def refactor_file(self, *args, **kwargs): if self.queue is not None: self.queue.put((args, kwargs)) else: return super(MultiprocessRefactoringTool, self).refactor_file( *args, **kwargs) PK{��\vá�FAFA)__pycache__/refactor.cpython-36.opt-2.pycnu�[���PK{��\H�4I7Q7Q#�A__pycache__/refactor.cpython-36.pycnu�[���PK{��\`l��:�:')�__pycache__/pytree.cpython-36.opt-2.pycnu�[���PK{��\0'?QQ)O�__pycache__/refactor.cpython-36.opt-1.pycnu�[���PK{��\{K��{{)�__pycache__/__init__.cpython-36.opt-1.pycnu�[���PK{��\{K��{{)� __pycache__/__init__.cpython-36.opt-2.pycnu�[���PK{��\{K��{{#f!__pycache__/__init__.cpython-36.pycnu�[���PK{��\�Zr~��)4"__pycache__/__main__.cpython-36.opt-1.pycnu�[���PK{��\�Zr~��)\#__pycache__/__main__.cpython-36.opt-2.pycnu�[���PK{��\�Zr~��#�$__pycache__/__main__.cpython-36.pycnu�[���PK{��\� �EFF,�%__pycache__/btm_matcher.cpython-36.opt-1.pycnu�[���PK{��\�K,# # ,H9__pycache__/btm_matcher.cpython-36.opt-2.pycnu�[���PK{��\� �EFF&�F__pycache__/btm_matcher.cpython-36.pycnu�[���PK{��\7`4��*cZ__pycache__/btm_utils.cpython-36.opt-1.pycnu�[���PK{��\�ѝ*�r__pycache__/btm_utils.cpython-36.opt-2.pycnu�[���PK{��\7`4��$�__pycache__/btm_utils.cpython-36.pycnu�[���PK{��\))]QQ+Z�__pycache__/fixer_base.cpython-36.opt-1.pycnu�[���PK{��\�7M:� � +�__pycache__/fixer_base.cpython-36.opt-2.pycnu�[���PK{��\))]QQ%��__pycache__/fixer_base.cpython-36.pycnu�[���PK{��\/?d�//+��__pycache__/fixer_util.cpython-36.opt-1.pycnu�[���PK{��\�"��&�&+�__pycache__/fixer_util.cpython-36.opt-2.pycnu�[���PK{��\/?d�//%�2__pycache__/fixer_util.cpython-36.pycnu�[���PK{��\��2!2!%4b__pycache__/main.cpython-36.opt-1.pycnu�[���PK{��\��WWW%��__pycache__/main.cpython-36.opt-2.pycnu�[���PK{��\4�P�T!T!g�__pycache__/main.cpython-36.pycnu�[���PK{��\\�!Y��( �__pycache__/patcomp.cpython-36.opt-1.pycnu�[���PK{��\����(O�__pycache__/patcomp.cpython-36.opt-2.pycnu�[���PK{��\euf���"��__pycache__/patcomp.cpython-36.pycnu�[���PK{��\f<�Q��'�__pycache__/pygram.cpython-36.opt-1.pycnu�[���PK{��\�O���'�__pycache__/pygram.cpython-36.opt-2.pycnu�[���PK{��\f<�Q��!�__pycache__/pygram.cpython-36.pycnu�[���PK{��\Q��_�_'�__pycache__/pytree.cpython-36.opt-1.pycnu�[���PK{��\�sdr0b0b!�l__pycache__/pytree.cpython-36.pycnu�[���PK{��\Z�ļ0%�fixes/__pycache__/fix_apply.cpython-36.opt-2.pycnu�[���PK{��\�̻���*��fixes/__pycache__/fix_apply.cpython-36.pycnu�[���PK{��\�Ⱥ��2��fixes/__pycache__/fix_asserts.cpython-36.opt-1.pycnu�[���PK{��\�Ⱥ��,��fixes/__pycache__/fix_asserts.cpython-36.pycnu�[���PK{��\�����/�fixes/__pycache__/fix_basestring.cpython-36.pycnu�[���PK{��\���1��fixes/__pycache__/fix_buffer.cpython-36.opt-1.pycnu�[���PK{��\���+`�fixes/__pycache__/fix_buffer.cpython-36.pycnu�[���PK{��\�Bp /��fixes/__pycache__/fix_dict.cpython-36.opt-2.pycnu�[���PK{��\�N���)I�fixes/__pycache__/fix_dict.cpython-36.pycnu�[���PK{��\k�-O� � 1�fixes/__pycache__/fix_except.cpython-36.opt-1.pycnu�[���PK{��\k�-O� � +�fixes/__pycache__/fix_except.cpython-36.pycnu�[���PK{��\:\��NN/&fixes/__pycache__/fix_exec.cpython-36.opt-1.pycnu�[���PK{��\hRN���/�"fixes/__pycache__/fix_exec.cpython-36.opt-2.pycnu�[���PK{��\�>�fhh)�&fixes/__pycache__/fix_exec.cpython-36.pycnu�[���PK{��\9��xx3�+fixes/__pycache__/fix_execfile.cpython-36.opt-1.pycnu�[���PK{��\m2���3v2fixes/__pycache__/fix_execfile.cpython-36.opt-2.pycnu�[���PK{��\E�3��-�8fixes/__pycache__/fix_execfile.cpython-36.pycnu�[���PK{��\I��4��3�?fixes/__pycache__/fix_exitfunc.cpython-36.opt-1.pycnu�[���PK{��\U���3Ifixes/__pycache__/fix_exitfunc.cpython-36.opt-2.pycnu�[���PK{��\I��4��-Rfixes/__pycache__/fix_exitfunc.cpython-36.pycnu�[���PK{��\��G$ $ +F[fixes/__pycache__/fix_filter.cpython-36.pycnu�[���PK{��\ 'O!��.�dfixes/__pycache__/fix_funcattrs.cpython-36.pycnu�[���PK{��\>���1�hfixes/__pycache__/fix_future.cpython-36.opt-1.pycnu�[���PK{��\>���+8lfixes/__pycache__/fix_future.cpython-36.pycnu�[���PK{��\&Fam��,�ofixes/__pycache__/fix_getcwdu.cpython-36.pycnu�[���PK{��\�����2�rfixes/__pycache__/fix_has_key.cpython-36.opt-2.pycnu�[���PK{��\�oH+QQ,F|fixes/__pycache__/fix_has_key.cpython-36.pycnu�[���PK{��\o>���1�fixes/__pycache__/fix_idioms.cpython-36.opt-1.pycnu�[���PK{��\��!i��12�fixes/__pycache__/fix_idioms.cpython-36.opt-2.pycnu�[���PK{��\G�**+��fixes/__pycache__/fix_idioms.cpython-36.pycnu�[���PK{��\��Ȍ� � 1�fixes/__pycache__/fix_import.cpython-36.opt-1.pycnu�[���PK{��\+8nL� � 18�fixes/__pycache__/fix_import.cpython-36.opt-2.pycnu�[���PK{��\��Ȍ� � +3�fixes/__pycache__/fix_import.cpython-36.pycnu�[���PK{��\%ځ���2_�fixes/__pycache__/fix_imports.cpython-36.opt-1.pycnu�[���PK{��\%ځ���,��fixes/__pycache__/fix_imports.cpython-36.pycnu�[���PK{��\ �W@-�fixes/__pycache__/fix_imports2.cpython-36.pycnu�[���PK{��\�9z��*Y�fixes/__pycache__/fix_input.cpython-36.pycnu�[���PK{��\��نuu1T�fixes/__pycache__/fix_intern.cpython-36.opt-1.pycnu�[���PK{��\��نuu+*fixes/__pycache__/fix_intern.cpython-36.pycnu�[���PK{��\a�/�fixes/__pycache__/fix_isinstance.cpython-36.pycnu�[���PK{��\�K��.\ fixes/__pycache__/fix_itertools.cpython-36.pycnu�[���PK{��\&�"��<�fixes/__pycache__/fix_itertools_imports.cpython-36.opt-2.pycnu�[���PK{��\D�.?446�fixes/__pycache__/fix_itertools_imports.cpython-36.pycnu�[���PK{��\�"*��/{ fixes/__pycache__/fix_long.cpython-36.opt-1.pycnu�[���PK{��\6#3�ll/�#fixes/__pycache__/fix_long.cpython-36.opt-2.pycnu�[���PK{��\�"*��)Q&fixes/__pycache__/fix_long.cpython-36.pycnu�[���PK{��\5ڍ_.V)fixes/__pycache__/fix_map.cpython-36.opt-1.pycnu�[���PK{��\5ڍ_(�5fixes/__pycache__/fix_map.cpython-36.pycnu�[���PK{��\؈��]]4Bfixes/__pycache__/fix_metaclass.cpython-36.opt-2.pycnu�[���PK{��\�^��.�Qfixes/__pycache__/fix_metaclass.cpython-36.pycnu�[���PK{��\�rD���6gfixes/__pycache__/fix_methodattrs.cpython-36.opt-1.pycnu�[���PK{��\�>�GG6kfixes/__pycache__/fix_methodattrs.cpython-36.opt-2.pycnu�[���PK{��\�rD���0�nfixes/__pycache__/fix_methodattrs.cpython-36.pycnu�[���PK{��\8ڒ�'�rfixes/__pycache__/fix_ne.cpython-36.pycnu�[���PK{��\�Qv���/vfixes/__pycache__/fix_next.cpython-36.opt-2.pycnu�[���PK{��\�y��)��fixes/__pycache__/fix_next.cpython-36.pycnu�[���PK{��\��{4��23�fixes/__pycache__/fix_nonzero.cpython-36.opt-1.pycnu�[���PK{��\ZO#/KK2�fixes/__pycache__/fix_nonzero.cpython-36.opt-2.pycnu�[���PK{��\��{4��,ȕfixes/__pycache__/fix_nonzero.cpython-36.pycnu�[���PK{��\�#���0��fixes/__pycache__/fix_numliterals.cpython-36.pycnu�[���PK{��\Y�ٜxx3��fixes/__pycache__/fix_operator.cpython-36.opt-1.pycnu�[���PK{��\Y�ٜxx-ծfixes/__pycache__/fix_operator.cpython-36.pycnu�[���PK{��\���YY*��fixes/__pycache__/fix_paren.cpython-36.pycnu�[���PK{��\*wGV��0]�fixes/__pycache__/fix_print.cpython-36.opt-2.pycnu�[���PK{��\��.] *T�fixes/__pycache__/fix_print.cpython-36.pycnu�[���PK{��\C�pb��0��fixes/__pycache__/fix_raise.cpython-36.opt-1.pycnu�[���PK{��\�]f�EE0��fixes/__pycache__/fix_raise.cpython-36.opt-2.pycnu�[���PK{��\C�pb��*l�fixes/__pycache__/fix_raise.cpython-36.pycnu�[���PK{��\��.z�fixes/__pycache__/fix_raw_input.cpython-36.pycnu�[���PK{��\� ��UU+��fixes/__pycache__/fix_reduce.cpython-36.pycnu�[���PK{��\�A �uu1��fixes/__pycache__/fix_reload.cpython-36.opt-1.pycnu�[���PK{��\�A �uu+d�fixes/__pycache__/fix_reload.cpython-36.pycnu�[���PK{��\q�XF��,4fixes/__pycache__/fix_renames.cpython-36.pycnu�[���PK{��\a�;888)D fixes/__pycache__/fix_repr.cpython-36.pycnu�[���PK{��\8`��6�fixes/__pycache__/fix_set_literal.cpython-36.opt-1.pycnu�[���PK{��\8`��0�fixes/__pycache__/fix_set_literal.cpython-36.pycnu�[���PK{��\Ѐ���2�fixes/__pycache__/fix_standarderror.cpython-36.pycnu�[���PK{��\N���dd,�fixes/__pycache__/fix_sys_exc.cpython-36.pycnu�[���PK{��\"^%���0|#fixes/__pycache__/fix_throw.cpython-36.opt-1.pycnu�[���PK{��\"^%���*�*fixes/__pycache__/fix_throw.cpython-36.pycnu�[���PK{��\T�|���1 2fixes/__pycache__/fix_tuple_params.cpython-36.pycnu�[���PK{��\��k/*`Dfixes/__pycache__/fix_types.cpython-36.pycnu�[���PK{��\�j���2�Kfixes/__pycache__/fix_unicode.cpython-36.opt-1.pycnu�[���PK{��\�j���,Rfixes/__pycache__/fix_unicode.cpython-36.pycnu�[���PK{��\�� �OO+lXfixes/__pycache__/fix_urllib.cpython-36.pycnu�[���PK{��\~aNN-pfixes/__pycache__/fix_ws_comma.cpython-36.pycnu�[���PK{��\O��� � 1�tfixes/__pycache__/fix_xrange.cpython-36.opt-1.pycnu�[���PK{��\O��� � +�~fixes/__pycache__/fix_xrange.cpython-36.pycnu�[���PK{��\��ȽHH/'�fixes/__pycache__/fix_xreadlines.cpython-36.pycnu�[���PK{��\�w�.fixes/__pycache__/fix_zip.cpython-36.opt-1.pycnu�[���PK{��\�폏��/D�fixes/__pycache__/__init__.cpython-36.opt-2.pycnu�[���PK{��\�폏��)$�fixes/__pycache__/__init__.cpython-36.pycnu�[���PK{��\���9ss0��fixes/__pycache__/fix_apply.cpython-36.opt-1.pycnu�[���PK{��\tҵ0��2ќfixes/__pycache__/fix_asserts.cpython-36.opt-2.pycnu�[���PK{��\�����5ѡfixes/__pycache__/fix_basestring.cpython-36.opt-1.pycnu�[���PK{��\�sL�UU5��fixes/__pycache__/fix_basestring.cpython-36.opt-2.pycnu�[���PK{��\�9�1��1r�fixes/__pycache__/fix_buffer.cpython-36.opt-2.pycnu�[���PK{��\�Zq���/��fixes/__pycache__/fix_dict.cpython-36.opt-1.pycnu�[���PK{��\!Z�H/ / 1��fixes/__pycache__/fix_except.cpython-36.opt-2.pycnu�[���PK{��\��G$ $ 1*�fixes/__pycache__/fix_filter.cpython-36.opt-1.pycnu�[���PK{��\��4PXX1��fixes/__pycache__/fix_filter.cpython-36.opt-2.pycnu�[���PK{��\ 'O!��4h�fixes/__pycache__/fix_funcattrs.cpython-36.opt-1.pycnu�[���PK{��\ݛ�uu4��fixes/__pycache__/fix_funcattrs.cpython-36.opt-2.pycnu�[���PK{��\R2P9��1^�fixes/__pycache__/fix_future.cpython-36.opt-2.pycnu�[���PK{��\&Fam��2S�fixes/__pycache__/fix_getcwdu.cpython-36.opt-1.pycnu�[���PK{��\/�Ii��2��fixes/__pycache__/fix_getcwdu.cpython-36.opt-2.pycnu�[���PK{��\��l�552��fixes/__pycache__/fix_has_key.cpython-36.opt-1.pycnu�[���PK{��\�dL��2j�fixes/__pycache__/fix_imports.cpython-36.opt-2.pycnu�[���PK{��\ �W@3tfixes/__pycache__/fix_imports2.cpython-36.opt-1.pycnu�[���PK{��\aI䖪�3�fixes/__pycache__/fix_imports2.cpython-36.opt-2.pycnu�[���PK{��\�9z��0�fixes/__pycache__/fix_input.cpython-36.opt-1.pycnu�[���PK{��\�3\\0�fixes/__pycache__/fix_input.cpython-36.opt-2.pycnu�[���PK{��\��.]551�fixes/__pycache__/fix_intern.cpython-36.opt-2.pycnu�[���PK{��\a�5Ffixes/__pycache__/fix_isinstance.cpython-36.opt-1.pycnu�[���PK{��\'q����5�fixes/__pycache__/fix_isinstance.cpython-36.opt-2.pycnu�[���PK{��\�K��4�fixes/__pycache__/fix_itertools.cpython-36.opt-1.pycnu�[���PK{��\g2����47#fixes/__pycache__/fix_itertools.cpython-36.opt-2.pycnu�[���PK{��\^�D:<0(fixes/__pycache__/fix_itertools_imports.cpython-36.opt-1.pycnu�[���PK{��\Xz.��.�.fixes/__pycache__/fix_map.cpython-36.opt-2.pycnu�[���PK{��\w2l���4�7fixes/__pycache__/fix_metaclass.cpython-36.opt-1.pycnu�[���PK{��\8ڒ�-Mfixes/__pycache__/fix_ne.cpython-36.opt-1.pycnu�[���PK{��\��7��-�Pfixes/__pycache__/fix_ne.cpython-36.opt-2.pycnu�[���PK{��\� ��/�Sfixes/__pycache__/fix_next.cpython-36.opt-1.pycnu�[���PK{��\�#���6�_fixes/__pycache__/fix_numliterals.cpython-36.opt-1.pycnu�[���PK{��\�T��6=dfixes/__pycache__/fix_numliterals.cpython-36.opt-2.pycnu�[���PK{��\�����3Uhfixes/__pycache__/fix_operator.cpython-36.opt-2.pycnu�[���PK{��\���YY0Iwfixes/__pycache__/fix_paren.cpython-36.opt-1.pycnu�[���PK{��\Cy�F��0}fixes/__pycache__/fix_paren.cpython-36.opt-2.pycnu�[���PK{��\\<�ֶ�05�fixes/__pycache__/fix_print.cpython-36.opt-1.pycnu�[���PK{��\��4K�fixes/__pycache__/fix_raw_input.cpython-36.opt-1.pycnu�[���PK{��\�^\���4��fixes/__pycache__/fix_raw_input.cpython-36.opt-2.pycnu�[���PK{��\� ��UU1ܑfixes/__pycache__/fix_reduce.cpython-36.opt-1.pycnu�[���PK{��\� `��1��fixes/__pycache__/fix_reduce.cpython-36.opt-2.pycnu�[���PK{��\t�u551ƚfixes/__pycache__/fix_reload.cpython-36.opt-2.pycnu�[���PK{��\q�XF��2\�fixes/__pycache__/fix_renames.cpython-36.opt-1.pycnu�[���PK{��\�?�dd2r�fixes/__pycache__/fix_renames.cpython-36.opt-2.pycnu�[���PK{��\a�;888/8�fixes/__pycache__/fix_repr.cpython-36.opt-1.pycnu�[���PK{��\�/Za��/ϲfixes/__pycache__/fix_repr.cpython-36.opt-2.pycnu�[���PK{��\6�H�776&�fixes/__pycache__/fix_set_literal.cpython-36.opt-2.pycnu�[���PK{��\Ѐ���8üfixes/__pycache__/fix_standarderror.cpython-36.opt-1.pycnu�[���PK{��\�����8�fixes/__pycache__/fix_standarderror.cpython-36.opt-2.pycnu�[���PK{��\N���dd2��fixes/__pycache__/fix_sys_exc.cpython-36.opt-1.pycnu�[���PK{��\��C���2��fixes/__pycache__/fix_sys_exc.cpython-36.opt-2.pycnu�[���PK{��\��0��fixes/__pycache__/fix_throw.cpython-36.opt-2.pycnu�[���PK{��\T�|���7$�fixes/__pycache__/fix_tuple_params.cpython-36.opt-1.pycnu�[���PK{��\������7j�fixes/__pycache__/fix_tuple_params.cpython-36.opt-2.pycnu�[���PK{��\��k/0b�fixes/__pycache__/fix_types.cpython-36.opt-1.pycnu�[���PK{��\��~660�fixes/__pycache__/fix_types.cpython-36.opt-2.pycnu�[���PK{��\+��??2hfixes/__pycache__/fix_unicode.cpython-36.opt-2.pycnu�[���PK{��\�� �OO1 fixes/__pycache__/fix_urllib.cpython-36.opt-1.pycnu�[���PK{��\K�t�!!1�!fixes/__pycache__/fix_urllib.cpython-36.opt-2.pycnu�[���PK{��\~aNN3;7fixes/__pycache__/fix_ws_comma.cpython-36.opt-1.pycnu�[���PK{��\���a��3�;fixes/__pycache__/fix_ws_comma.cpython-36.opt-2.pycnu�[���PK{��\��C� � 1�?fixes/__pycache__/fix_xrange.cpython-36.opt-2.pycnu�[���PK{��\��ȽHH5�Ifixes/__pycache__/fix_xreadlines.cpython-36.opt-1.pycnu�[���PK{��\7�N���5�Nfixes/__pycache__/fix_xreadlines.cpython-36.opt-2.pycnu�[���PK{��\�:�~��.�Rfixes/__pycache__/fix_zip.cpython-36.opt-2.pycnu�[���PK{��\�w�(�Wfixes/__pycache__/fix_zip.cpython-36.pycnu�[���PK{��\�폏��/M^fixes/__pycache__/__init__.cpython-36.opt-1.pycnu�[���PK{��\Gg��-_fixes/fix_itertools.pynu�[���PK{��\>ӵ;&&efixes/fix_itertools_imports.pynu�[���PK{��\i�G����mfixes/fix_long.pynu�[���PK{��\%TW688pfixes/fix_map.pynu�[���PK{��\k�R� �~fixes/fix_metaclass.pynu�[���PK{��\+&^^Ӟfixes/fix_methodattrs.pynu�[���PK{��\�<��;;y�fixes/fix_ne.pynu�[���PK{��\IkN�ff�fixes/fix_next.pynu�[���PK{��\?���OO��fixes/fix_nonzero.pynu�[���PK{��\l�H-�fixes/fix_numliterals.pynu�[���PK{��\$��� � u�fixes/fix_operator.pynu�[���PK{��\������I�fixes/fix_paren.pynu�[���PK{��\Z6��V�fixes/fix_print.pynu�[���PK{��\=��nn��fixes/fix_raise.pynu�[���PK{��\6u���d�fixes/fix_raw_input.pynu�[���PK{��\)��EEp�fixes/fix_reduce.pynu�[���PK{��\��&����fixes/fix_reload.pynu�[���PK{��\2�ۭ���fixes/fix_renames.pynu�[���PK{��\a�.�ee��fixes/fix_repr.pynu�[���PK{��\�f�ϡ�T�fixes/fix_set_literal.pynu�[���PK{��\����=�fixes/fix_standarderror.pynu�[���PK{��\���I H�fixes/fix_sys_exc.pynu�[���PK{��\�x�u..� fixes/fix_throw.pynu�[���PK{��\O�+�� fixes/fix_tuple_params.pynu�[���PK{��\��S��� fixes/fix_types.pynu�[���PK{��\�&���<' fixes/fix_unicode.pynu�[���PK{��\'�� � h, fixes/fix_urllib.pynu�[���PK{��\J�b�BBLM fixes/fix_ws_comma.pynu�[���PK{��\C��� � �Q fixes/fix_xrange.pynu�[���PK{��\N2E����\ fixes/fix_xreadlines.pynu�[���PK{��\��M1 �_ fixes/fix_zip.pynu�[���PK{��\�E[//�d fixes/__init__.pynu�[���PK{��\2�b~ ~ Me fixes/fix_apply.pynu�[���PK{��\�=��� o fixes/fix_asserts.pynu�[���PK{��\�M��@@)s fixes/fix_basestring.pynu�[���PK{��\��NN�t fixes/fix_buffer.pynu�[���PK{��\d�s��Aw fixes/fix_dict.pynu�[���PK{��\p2�I 2� fixes/fix_except.pynu�[���PK{��\�:����� fixes/fix_exec.pynu�[���PK{��\�{Wk�� fixes/fix_execfile.pynu�[���PK{��\ 2�Ϳ � ޟ fixes/fix_exitfunc.pynu�[���PK{��\O�4[ [ � fixes/fix_filter.pynu�[���PK{��\F������ fixes/fix_funcattrs.pynu�[���PK{��\�7h##J� fixes/fix_future.pynu�[���PK{��\�?k���� fixes/fix_getcwdu.pynu�[���PK{��\D�Iu||�� fixes/fix_has_key.pynu�[���PK{��\_� \w� fixes/fix_idioms.pynu�[���PK{��\6ng���� fixes/fix_import.pynu�[���PK{��\�|�44�� fixes/fix_imports.pynu�[���PK{��\ܬ'!!9� fixes/fix_imports2.pynu�[���PK{��\q�|��� fixes/fix_input.pynu�[���PK{��\������ fixes/fix_intern.pynu�[���PK{��\H��gHH� fixes/fix_isinstance.pynu�[���PK{��\(���/J pgen2/__pycache__/__init__.cpython-36.opt-1.pycnu�[���PK{��\�z���/H pgen2/__pycache__/__init__.cpython-36.opt-2.pycnu�[���PK{��\(���)* pgen2/__pycache__/__init__.cpython-36.pycnu�[���PK{��\�6�!+" pgen2/__pycache__/conv.cpython-36.opt-1.pycnu�[���PK{��\���PP+�* pgen2/__pycache__/conv.cpython-36.opt-2.pycnu�[���PK{��\�F�xx%,9 pgen2/__pycache__/conv.cpython-36.pycnu�[���PK{��\�!>���-�T pgen2/__pycache__/driver.cpython-36.opt-1.pycnu�[���PK{��\mS���-.i pgen2/__pycache__/driver.cpython-36.opt-2.pycnu�[���PK{��\�[-'Ny pgen2/__pycache__/driver.cpython-36.pycnu�[���PK{��\�����.�� pgen2/__pycache__/grammar.cpython-36.opt-1.pycnu�[���PK{��\qM���.�� pgen2/__pycache__/grammar.cpython-36.opt-2.pycnu�[���PK{��\�����(�� pgen2/__pycache__/grammar.cpython-36.pycnu�[���PK{��\��8BB/�� pgen2/__pycache__/literals.cpython-36.opt-1.pycnu�[���PK{��\&�{��//� pgen2/__pycache__/literals.cpython-36.opt-2.pycnu�[���PK{��\�1���)�� pgen2/__pycache__/literals.cpython-36.pycnu�[���PK{��\5�f��,�� pgen2/__pycache__/parse.cpython-36.opt-1.pycnu�[���PK{��\ ��,� pgen2/__pycache__/parse.cpython-36.opt-2.pycnu�[���PK{��\��E��&3 pgen2/__pycache__/parse.cpython-36.pycnu�[���PK{��\�*��u$u$+B"pgen2/__pycache__/pgen.cpython-36.opt-1.pycnu�[���PK{��\�*��u$u$+Gpgen2/__pycache__/pgen.cpython-36.opt-2.pycnu�[���PK{��\�.��9&9&%�kpgen2/__pycache__/pgen.cpython-36.pycnu�[���PK{��\��S�<<,p�pgen2/__pycache__/token.cpython-36.opt-1.pycnu�[���PK{��\_�Ϩ ,�pgen2/__pycache__/token.cpython-36.opt-2.pycnu�[���PK{��\��S�<<&n�pgen2/__pycache__/token.cpython-36.pycnu�[���PK{��\�v]��;�;/�pgen2/__pycache__/tokenize.cpython-36.opt-1.pycnu�[���PK{��\��&w,w,/1�pgen2/__pycache__/tokenize.cpython-36.opt-2.pycnu�[���PK{��\Սp�<<)pgen2/__pycache__/tokenize.cpython-36.pycnu�[���PK{��\��r��zNpgen2/__init__.pynu�[���PK{��\�����%�% JOpgen2/conv.pynu�[���PK{��\�,����1upgen2/driver.pynu�[���PK{��\��H�����pgen2/grammar.pynu�[���PK{��\8?OO��pgen2/literals.pynu�[���PK{��\sWP�uu��pgen2/parse.pynu�[���PK{��\�v/��5�5 <�pgen2/pgen.pynu�[���PK{��\u��m pgen2/token.pynuȯ��PK{��\ �q�NXNX� pgen2/tokenize.pynu�[���PK{��\�29��Ha Grammar.txtnu�[���PK{��\S/ �}�}%{ Grammar3.6.8.final.0.picklenu�[���PK{��\�� �l� PatternGrammar.txtnu�[���PK{��\����--"� PatternGrammar3.6.8.final.0.picklenu�[���PK{��\ۚM|F__init__.pynu�[���PK{��\az�CC�__main__.pynu�[���PK{��\ {M��btm_matcher.pynu�[���PK{��\zQ���&�&� btm_utils.pynu�[���PK{��\�+P�"" Hfixer_base.pynu�[���PK{��\��~�g;g; ~bfixer_util.pynu�[���PK{��\SBz_�-�-"�main.pynu�[���PK{��\a��� ��patcomp.pynu�[���PK{��\"�Ui�� ��pygram.pynu�[���PK{��\R����m�m W�pytree.pynu�[���PK{��\��3�=m=m$Zrefactor.pynu�[���PK((�v��
/home/emeraadmin/www/node_modules/jqGrid/../parse-filepath/../array-slice/../../4d695/lib2to3.zip