Current File : //usr/local/apps/python3/lib/python3.11/test/__pycache__/test_tokenize.cpython-311.pyc |
�
�Ke9� � �R � d dl mZ d dlmZ d dlmZmZmZmZmZm Z m
Z
mZmZm
Z
mZmZmZmZmZmZmZ d dlmZmZ d dlZd dlmZ d dlmZmZ d dlmZm Z d dlmZ d d l!m"Z"m#Z# d dl$Z$d dl%Z%d
� Z& G d� de� � Z' G d
� de'� � Z(d� Z) G d� de� � Z* G d� de� � Z+ G d� de� � Z, G d� de� � Z- G d� de� � Z. G d� de� � Z/ G d� de� � Z0 G d� de� � Z1 G d � d!ej � � Z2e3d"k r ej4 � � dS dS )#� )�support)� os_helper)�tokenize� _tokenize�
untokenize�NUMBER�NAME�OP�STRING� ENDMARKER�ENCODING�tok_name�detect_encoding�open�Untokenizer�generate_tokens�NEWLINE�!_generate_tokens_from_c_tokenizer�DEDENT)�BytesIO�StringION)�dedent)�TestCase�mock)�VALID_UNDERSCORE_LITERALS�INVALID_UNDERSCORE_LITERALS)�run_test_script�make_scriptc �$ � g }t |� � � � � }|d dv}| D ]_\ }}}}} |t k r nK|r|t k r
|d |k r�/t | }|� d|d�d|d�d|� d|� �� � �`|S )N���z
r � �10� �13)�len�
splitlinesr r r �append)
�token_generator�
source_string�result� num_lines�missing_trailing_nl�type�token�start�end�lines
�</usr/local/apps/python3/lib/python3.11/test/test_tokenize.py�stringify_tokens_from_sourcer3 s� � �
�F��M�,�,�.�.�/�/�I�'��+�6�9��)8� C� C�%��e�U�C���9����E�� �4�7�?�?�s�1�v��7J�7J����~���
�
�A�T�A�A�A�u�A�A�A�E�A�A�C�A�A�B�B�B�B��M� c � � e Zd Zd� Zd� Zd� Zd� Zd� Zd� Zd� Z d� Z
d � Zd
� Zd� Z
d� Zd
� Zd� Zd� Zd� Zd� Zd� Zd� Zd� ZdS )�TokenizeTestc �
� t |� d� � � � }t t |j � � |� � }| � |dg|� � � � � � z � � d S )N�utf-8z* ENCODING 'utf-8' (0, 0) (0, 0))r �encoder3 r �readline�assertEqual�rstripr&