eaiovnaovbqoebvqoeavibavo PK Zeߪ2x2xsite-packages/six.pycnu[ abcC@@sdZddlmZddlZddlZddlZddlZddlZdZdZ ej ddkZ ej ddkZ ej dd!dkkZ e refZefZefZeZeZejZnefZeefZeejfZeZeZejjd r$edmZnVd efd YZ ye!e Wne"k rjedoZn XedqZ[ dZ#dZ$defdYZ%de%fdYZ&dej'fdYZ(de%fdYZ)defdYZ*e*e+Z,de(fdYZ-e)dddde)d d!d"d#d e)d$d!d!d%d$e)d&d'd"d(d&e)d)d'd*e)d+d!d"d,d+e)d-d.d.d/d-e)d0d.d.d-d0e)d1d2d3e)d4d'd"d5d4e)d6d'e rd7nd8d9e)d:d'd;e)d<d=d>d?e)ddde)d@d@dAe)dBdBdAe)dCdCdAe)d5d'd"d5d4e)dDd!d"dEdDe)dFd!d!dGdFe&d"d'e&dHdIe&dJdKe&dLdMdNe&dOdPdOe&dQdRdSe&dTdUdVe&dWdXdYe&dZd[d\e&d]d^d_e&d`dadbe&dcdddee&dfdgdhe&didjdke&dldmdne&dododpe&dqdqdpe&drdrdpe&dsdsdte&dudve&dwdxe&dydze&d{d|d{e&d}d~e&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&de+dde&de+dde&de+de+de&ddde&ddde&dddg@Z.ejdkrYe.e&ddg7Z.nxJe.D]BZ/e0e-e/j1e/e2e/e&r`e,j3e/de/j1q`q`W[/e.e-_.e-e+dZ4e,j3e4dde(fdYZ5e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)d?dde)ddde)ddde)ddde)ddddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddgZ6x!e6D]Z/e0e5e/j1e/qrW[/e6e5_.e,j3e5e+dddde(fdYZ7e)ddde)ddde)dddgZ8x!e8D]Z/e0e7e/j1e/q W[/e8e7_.e,j3e7e+dddde(fdYZ9e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddg#Z:x!e:D]Z/e0e9e/j1e/q~ W[/e:e9_.e,j3e9e+d d d d e(fd YZ;e)ddde)ddde)ddde)dddgZ<x!e<D]Z/e0e;e/j1e/q# W[/e<e;_.e,j3e;e+dddde(fdYZ=e)dddgZ>x!e>D]Z/e0e=e/j1e/q W[/e>e=_.e,j3e=e+ddddej'fdYZ?e,j3e?e+dddZ@d ZAe rS d!ZBd"ZCd#ZDd$ZEd%ZFd&ZGn$d'ZBd(ZCd)ZDd*ZEd+ZFd,ZGy eHZIWneJk r d-ZInXeIZHy eKZKWneJk r d.ZKnXe r d/ZLejMZNd0ZOeZPn7d1ZLd2ZNd3ZOd4efd5YZPeKZKe#eLd6ejQeBZRejQeCZSejQeDZTejQeEZUejQeFZVejQeGZWe rd7ZXd8ZYd9ZZd:Z[ej\d;Z]ej\d<Z^ej\d=Z_nQd>ZXd?ZYd@ZZdAZ[ej\dBZ]ej\dCZ^ej\dDZ_e#eXdEe#eYdFe#eZdGe#e[dHe r$dIZ`dJZaebZcddldZdedjedKjfZg[dejhdZiejjZkelZmddlnZnenjoZoenjpZpdLZqej d d krdMZrdNZsqdOZrdPZsnpdQZ`dRZaecZcebZgdSZidTZkejtejuevZmddloZoeojoZoZpdUZqdMZrdNZse#e`dVe#eadWdXZwdYZxdZZye reze4j{d[Z|dd\Z~nddd]Z|e|d^ej d drkr)e|d_n)ej d dskrIe|d`n daZeze4j{dbdZedkrdcZnej d dtkreZddZne#e~deej dd!dukrejejdfZn ejZdgZdhZdiZgZe+Zejdjdk r3ge_nejrxOeejD]>\ZZeej+dkrLej1e+krLeje=PqLqLW[[nejje,dS(vs6Utilities for writing code that runs on Python 2 and 3i(tabsolute_importNs'Benjamin Peterson s1.11.0iiitjavaiitXcB@seZdZRS(cC@sdS(NiiI((tself((s'/usr/lib/python2.7/site-packages/six.pyt__len__>s(t__name__t __module__R(((s'/usr/lib/python2.7/site-packages/six.pyR<si?cC@s ||_dS(s Add documentation to a function.N(t__doc__(tfunctdoc((s'/usr/lib/python2.7/site-packages/six.pyt_add_docKscC@st|tj|S(s7Import module, returning the module after the last dot.(t __import__tsystmodules(tname((s'/usr/lib/python2.7/site-packages/six.pyt_import_modulePs t _LazyDescrcB@seZdZdZRS(cC@s ||_dS(N(R(RR((s'/usr/lib/python2.7/site-packages/six.pyt__init__XscC@sN|j}t||j|yt|j|jWntk rInX|S(N(t_resolvetsetattrRtdelattrt __class__tAttributeError(Rtobjttptresult((s'/usr/lib/python2.7/site-packages/six.pyt__get__[s  (RRRR(((s'/usr/lib/python2.7/site-packages/six.pyRVs t MovedModulecB@s&eZddZdZdZRS(cC@sJtt|j|tr=|dkr1|}n||_n ||_dS(N(tsuperRRtPY3tNonetmod(RRtoldtnew((s'/usr/lib/python2.7/site-packages/six.pyRis    cC@s t|jS(N(RR(R((s'/usr/lib/python2.7/site-packages/six.pyRrscC@s/|j}t||}t||||S(N(RtgetattrR(Rtattrt_moduletvalue((s'/usr/lib/python2.7/site-packages/six.pyt __getattr__us N(RRRRRR&(((s'/usr/lib/python2.7/site-packages/six.pyRgs t _LazyModulecB@s eZdZdZgZRS(cC@s)tt|j||jj|_dS(N(RR'RRR(RR((s'/usr/lib/python2.7/site-packages/six.pyR~scC@s3ddg}|g|jD]}|j^q7}|S(NRR(t_moved_attributesR(RtattrsR#((s'/usr/lib/python2.7/site-packages/six.pyt__dir__s #(RRRR*R((((s'/usr/lib/python2.7/site-packages/six.pyR'|s  tMovedAttributecB@s eZdddZdZRS(cC@stt|j|trp|dkr1|}n||_|dkrd|dkr[|}qd|}n||_n'||_|dkr|}n||_dS(N(RR+RRRRR#(RRtold_modtnew_modtold_attrtnew_attr((s'/usr/lib/python2.7/site-packages/six.pyRs           cC@st|j}t||jS(N(RRR"R#(Rtmodule((s'/usr/lib/python2.7/site-packages/six.pyRsN(RRRRR(((s'/usr/lib/python2.7/site-packages/six.pyR+st_SixMetaPathImportercB@s_eZdZdZdZdZd dZdZdZ dZ dZ e Z RS( s A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 cC@s||_i|_dS(N(Rt known_modules(Rtsix_module_name((s'/usr/lib/python2.7/site-packages/six.pyRs cG@s-x&|D]}||j|jd|(RR6((s'/usr/lib/python2.7/site-packages/six.pyt is_packagescC@s|j|dS(s;Return None Required, if is_package is implementedN(R>R(RR6((s'/usr/lib/python2.7/site-packages/six.pytget_codes N( RRRRR7R8RR:R>RARDREt get_source(((s'/usr/lib/python2.7/site-packages/six.pyR1s       t _MovedItemscB@seZdZgZRS(sLazy loading of moved objects(RRRRB(((s'/usr/lib/python2.7/site-packages/six.pyRGst cStringIOtiotStringIOtfiltert itertoolstbuiltinstifiltert filterfalset ifilterfalsetinputt __builtin__t raw_inputtinternR tmaptimaptgetcwdtostgetcwdutgetcwdbt getoutputtcommandst subprocesstrangetxranget reload_modulet importlibtimptreloadtreducet functoolst shlex_quotetpipestshlextquotetUserDictt collectionstUserListt UserStringtziptizipt zip_longestt izip_longestt configparsert ConfigParsertcopyregtcopy_regtdbm_gnutgdbmsdbm.gnut _dummy_threadt dummy_threadthttp_cookiejart cookielibshttp.cookiejart http_cookiestCookies http.cookiest html_entitiesthtmlentitydefss html.entitiest html_parsert HTMLParsers html.parsert http_clientthttplibs http.clienttemail_mime_basesemail.MIMEBasesemail.mime.basetemail_mime_imagesemail.MIMEImagesemail.mime.imagetemail_mime_multipartsemail.MIMEMultipartsemail.mime.multiparttemail_mime_nonmultipartsemail.MIMENonMultipartsemail.mime.nonmultiparttemail_mime_textsemail.MIMETextsemail.mime.texttBaseHTTPServers http.servert CGIHTTPServertSimpleHTTPServertcPickletpickletqueuetQueuetreprlibtreprt socketservert SocketServert_threadtthreadttkintertTkinterttkinter_dialogtDialogstkinter.dialogttkinter_filedialogt FileDialogstkinter.filedialogttkinter_scrolledtextt ScrolledTextstkinter.scrolledtextttkinter_simpledialogt SimpleDialogstkinter.simpledialogt tkinter_tixtTixs tkinter.tixt tkinter_ttktttks tkinter.ttkttkinter_constantst Tkconstantsstkinter.constantst tkinter_dndtTkdnds tkinter.dndttkinter_colorchooserttkColorChooserstkinter.colorchooserttkinter_commondialogttkCommonDialogstkinter.commondialogttkinter_tkfiledialogt tkFileDialogt tkinter_fontttkFonts tkinter.fontttkinter_messageboxt tkMessageBoxstkinter.messageboxttkinter_tksimpledialogttkSimpleDialogt urllib_parses.moves.urllib_parses urllib.parset urllib_errors.moves.urllib_errors urllib.errorturllibs .moves.urllibturllib_robotparsert robotparsersurllib.robotparsert xmlrpc_clientt xmlrpclibs xmlrpc.clientt xmlrpc_servertSimpleXMLRPCServers xmlrpc.servertwin32twinregt_winregsmoves.s.movestmovestModule_six_moves_urllib_parsecB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_parse(RRR(((s'/usr/lib/python2.7/site-packages/six.pyRBst ParseResultturlparset SplitResulttparse_qst parse_qslt urldefragturljointurlsplitt urlunparset urlunsplitt quote_plustunquotet unquote_plustunquote_to_bytest urlencodet splitquerytsplittagt splitusert splitvaluet uses_fragmentt uses_netloct uses_paramst uses_queryt uses_relativesmoves.urllib_parsesmoves.urllib.parsetModule_six_moves_urllib_errorcB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_error(RRR(((s'/usr/lib/python2.7/site-packages/six.pyRlstURLErrorturllib2t HTTPErrortContentTooShortErrors.moves.urllib.errorsmoves.urllib_errorsmoves.urllib.errortModule_six_moves_urllib_requestcB@seZdZRS(s9Lazy loading of moved objects in six.moves.urllib_request(RRR(((s'/usr/lib/python2.7/site-packages/six.pyRsturlopensurllib.requesttinstall_openert build_openert pathname2urlt url2pathnamet getproxiestRequesttOpenerDirectortHTTPDefaultErrorHandlertHTTPRedirectHandlertHTTPCookieProcessort ProxyHandlert BaseHandlertHTTPPasswordMgrtHTTPPasswordMgrWithDefaultRealmtAbstractBasicAuthHandlertHTTPBasicAuthHandlertProxyBasicAuthHandlertAbstractDigestAuthHandlertHTTPDigestAuthHandlertProxyDigestAuthHandlert HTTPHandlert HTTPSHandlert FileHandlert FTPHandlertCacheFTPHandlertUnknownHandlertHTTPErrorProcessort urlretrievet urlcleanupt URLopenertFancyURLopenert proxy_bypasstparse_http_listtparse_keqv_lists.moves.urllib.requestsmoves.urllib_requestsmoves.urllib.requestt Module_six_moves_urllib_responsecB@seZdZRS(s:Lazy loading of moved objects in six.moves.urllib_response(RRR(((s'/usr/lib/python2.7/site-packages/six.pyRstaddbasesurllib.responset addclosehooktaddinfot addinfourls.moves.urllib.responsesmoves.urllib_responsesmoves.urllib.responset#Module_six_moves_urllib_robotparsercB@seZdZRS(s=Lazy loading of moved objects in six.moves.urllib_robotparser(RRR(((s'/usr/lib/python2.7/site-packages/six.pyRstRobotFileParsers.moves.urllib.robotparsersmoves.urllib_robotparsersmoves.urllib.robotparsertModule_six_moves_urllibcB@sheZdZgZejdZejdZejdZejdZ ejdZ dZ RS(sICreate a six.moves.urllib namespace that resembles the Python 3 namespacesmoves.urllib_parsesmoves.urllib_errorsmoves.urllib_requestsmoves.urllib_responsesmoves.urllib_robotparsercC@sdddddgS(NtparseterrortrequesttresponseR((R((s'/usr/lib/python2.7/site-packages/six.pyR*s( RRRRBt _importerR8R R R RRR*(((s'/usr/lib/python2.7/site-packages/six.pyR ss moves.urllibcC@stt|j|dS(sAdd an item to six.moves.N(RRGR(tmove((s'/usr/lib/python2.7/site-packages/six.pytadd_movescC@s^ytt|WnFtk rYytj|=WqZtk rUtd|fqZXnXdS(sRemove item from six.moves.sno such move, %rN(RRGRRt__dict__R;(R((s'/usr/lib/python2.7/site-packages/six.pyt remove_moves  t__func__t__self__t __closure__t__code__t __defaults__t __globals__tim_functim_selft func_closuret func_codet func_defaultst func_globalscC@s |jS(N(tnext(tit((s'/usr/lib/python2.7/site-packages/six.pytadvance_iteratorscC@stdt|jDS(Ncs@s|]}d|jkVqdS(t__call__N(R(t.0tklass((s'/usr/lib/python2.7/site-packages/six.pys s(tanyttypet__mro__(R((s'/usr/lib/python2.7/site-packages/six.pytcallablescC@s|S(N((tunbound((s'/usr/lib/python2.7/site-packages/six.pytget_unbound_functionscC@s|S(N((Rtcls((s'/usr/lib/python2.7/site-packages/six.pytcreate_unbound_method#scC@s|jS(N(R(R*((s'/usr/lib/python2.7/site-packages/six.pyR+(scC@stj|||jS(N(ttypest MethodTypeR(RR((s'/usr/lib/python2.7/site-packages/six.pytcreate_bound_method+scC@stj|d|S(N(R.R/R(RR,((s'/usr/lib/python2.7/site-packages/six.pyR-.stIteratorcB@seZdZRS(cC@st|j|S(N(R't__next__(R((s'/usr/lib/python2.7/site-packages/six.pyR 3s(RRR (((s'/usr/lib/python2.7/site-packages/six.pyR11ss3Get the function out of a possibly unbound functioncK@st|j|S(N(titertkeys(tdtkw((s'/usr/lib/python2.7/site-packages/six.pytiterkeysDscK@st|j|S(N(R3tvalues(R5R6((s'/usr/lib/python2.7/site-packages/six.pyt itervaluesGscK@st|j|S(N(R3titems(R5R6((s'/usr/lib/python2.7/site-packages/six.pyt iteritemsJscK@st|j|S(N(R3tlists(R5R6((s'/usr/lib/python2.7/site-packages/six.pyt iterlistsMsR4R8R:cK@s |j|S(N(R7(R5R6((s'/usr/lib/python2.7/site-packages/six.pyR7VscK@s |j|S(N(R9(R5R6((s'/usr/lib/python2.7/site-packages/six.pyR9YscK@s |j|S(N(R;(R5R6((s'/usr/lib/python2.7/site-packages/six.pyR;\scK@s |j|S(N(R=(R5R6((s'/usr/lib/python2.7/site-packages/six.pyR=_stviewkeyst viewvaluest viewitemss1Return an iterator over the keys of a dictionary.s3Return an iterator over the values of a dictionary.s?Return an iterator over the (key, value) pairs of a dictionary.sBReturn an iterator over the (key, [values]) pairs of a dictionary.cC@s |jdS(Nslatin-1(tencode(ts((s'/usr/lib/python2.7/site-packages/six.pytbqscC@s|S(N((RB((s'/usr/lib/python2.7/site-packages/six.pytutss>BtassertCountEqualtassertRaisesRegexptassertRegexpMatchestassertRaisesRegext assertRegexcC@s|S(N((RB((s'/usr/lib/python2.7/site-packages/six.pyRCscC@st|jdddS(Ns\\s\\\\tunicode_escape(tunicodetreplace(RB((s'/usr/lib/python2.7/site-packages/six.pyRDscC@st|dS(Ni(tord(tbs((s'/usr/lib/python2.7/site-packages/six.pytbyte2intscC@st||S(N(RM(tbufti((s'/usr/lib/python2.7/site-packages/six.pyt indexbytesstassertItemsEquals Byte literals Text literalcO@st|t||S(N(R"t_assertCountEqual(Rtargstkwargs((s'/usr/lib/python2.7/site-packages/six.pyREscO@st|t||S(N(R"t_assertRaisesRegex(RRURV((s'/usr/lib/python2.7/site-packages/six.pyRHscO@st|t||S(N(R"t _assertRegex(RRURV((s'/usr/lib/python2.7/site-packages/six.pyRIstexeccC@sWzC|dkr|}n|j|k r<|j|n|Wdd}d}XdS(N(Rt __traceback__twith_traceback(RR%ttb((s'/usr/lib/python2.7/site-packages/six.pytreraises   cB@sc|dkrBejd}|j}|dkr<|j}n~n|dkrW|}nddUdS(sExecute code in a namespace.isexec _code_ in _globs_, _locs_N(RR t _getframet f_globalstf_locals(t_code_t_globs_t_locs_tframe((s'/usr/lib/python2.7/site-packages/six.pytexec_s      sedef reraise(tp, value, tb=None): try: raise tp, value, tb finally: tb = None sdef raise_from(value, from_value): try: if from_value is None: raise value raise value from from_value finally: value = None srdef raise_from(value, from_value): try: raise value from from_value finally: value = None cC@s |dS(N((R%t from_value((s'/usr/lib/python2.7/site-packages/six.pyt raise_fromstprintc @s|jdtjdkr%dSfd}t}|jdd}|dk rt|trpt}qt|tst dqn|jdd}|dk rt|trt}qt|tst dqn|rt dn|s0x*|D]}t|tr t}Pq q Wn|rQtd }td }n d }d }|dkrr|}n|dkr|}nx7t |D])\} }| r||n||qW||dS( s4The new-style print function for Python 2.4 and 2.5.tfileNc@st|tst|}nttrt|trjdk rtdd}|dkrrd}n|jj|}nj |dS(Nterrorststrict( R?t basestringtstrRiRKtencodingRR"RAtwrite(tdataRj(tfp(s'/usr/lib/python2.7/site-packages/six.pyRos  tsepssep must be None or a stringtendsend must be None or a strings$invalid keyword arguments to print()s t ( tpopR tstdoutRtFalseR?RKtTrueRmt TypeErrort enumerate( RURVRot want_unicodeRrRstargtnewlinetspaceRQ((Rqs'/usr/lib/python2.7/site-packages/six.pytprint_sL              cO@sW|jdtj}|jdt}t|||rS|dk rS|jndS(NRitflush(tgetR RvRuRwt_printRR(RURVRqR((s'/usr/lib/python2.7/site-packages/six.pyRs  sReraise an exception.c@sfd}|S(Nc@s(tj|}|_|S(N(Retwrapst __wrapped__(tf(tassignedtupdatedtwrapped(s'/usr/lib/python2.7/site-packages/six.pytwrapper*s ((RRRR((RRRs'/usr/lib/python2.7/site-packages/six.pyR(sc@s5dtffdY}tj|ddiS(s%Create a base class with a metaclass.t metaclassc@s2eZfdZefdZRS(c@s||S(N((R,Rt this_basesR5(tbasestmeta(s'/usr/lib/python2.7/site-packages/six.pyt__new__:sc@sj|S(N(t __prepare__(R,RR(RR(s'/usr/lib/python2.7/site-packages/six.pyR=s(RRRt classmethodR((RR(s'/usr/lib/python2.7/site-packages/six.pyR8sttemporary_class((R'R(RRR((RRs'/usr/lib/python2.7/site-packages/six.pytwith_metaclass3sc@sfd}|S(s6Class decorator for creating a class with a metaclass.c@s|jj}|jd}|dk rft|trE|g}nx|D]}|j|qLWn|jdd|jdd|j|j|S(Nt __slots__Rt __weakref__( RtcopyRRR?RmRuRt __bases__(R,t orig_varstslotst slots_var(R(s'/usr/lib/python2.7/site-packages/six.pyREs   ((RR((Rs'/usr/lib/python2.7/site-packages/six.pyt add_metaclassCs cC@sJtrFd|jkr+td|jn|j|_d|_n|S(s A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. t__str__sY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cS@s|jjdS(Nsutf-8(t __unicode__RA(R((s'/usr/lib/python2.7/site-packages/six.pytat(tPY2Rt ValueErrorRRR(R%((s'/usr/lib/python2.7/site-packages/six.pytpython_2_unicode_compatibleSs t__spec__(iiIiIill(ii(ii(ii(ii(Rt __future__RReRLtoperatorR R.t __author__t __version__t version_infoRRtPY34Rmt string_typestintt integer_typesR't class_typest text_typetbytest binary_typetmaxsizetMAXSIZERltlongt ClassTypeRKtplatformt startswithtobjectRtlent OverflowErrorR RRRt ModuleTypeR'R+R1RRRGR(R#RRR?R7RRt_urllib_parse_moved_attributesRt_urllib_error_moved_attributesRt _urllib_request_moved_attributesRt!_urllib_response_moved_attributesRt$_urllib_robotparser_moved_attributesR RRt _meth_funct _meth_selft _func_closuret _func_codet_func_defaultst _func_globalsR R"t NameErrorR)R+R/R0R-R1t attrgettertget_method_functiontget_method_selftget_function_closuretget_function_codetget_function_defaultstget_function_globalsR7R9R;R=t methodcallerR>R?R@RCRDtchrtunichrtstructtStructtpacktint2bytet itemgetterROtgetitemRRR3t iterbytesRIRJtBytesIORTRWRXtpartialRVRMRERHRIR"RMReRR]RgRRtWRAPPER_ASSIGNMENTStWRAPPER_UPDATESRRRRRBt __package__tglobalsRRtsubmodule_search_locationst meta_pathRzRQtimportertappend(((s'/usr/lib/python2.7/site-packages/six.pyts               >                                                                                  5          PK Z!Ί;;site-packages/easy_install.pyonu[ fc@s0dZedkr,ddlmZendS(sRun the EasyInstall commandt__main__i(tmainN(t__doc__t__name__tsetuptools.command.easy_installR(((s0/usr/lib/python2.7/site-packages/easy_install.pyts PK Z*%xxsite-packages/six.pynu[# Copyright (c) 2010-2017 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Utilities for writing code that runs on Python 2 and 3""" from __future__ import absolute_import import functools import itertools import operator import sys import types __author__ = "Benjamin Peterson " __version__ = "1.11.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("getoutput", "commands", "subprocess"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("splitvalue", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), MovedAttribute("parse_http_list", "urllib2", "urllib.request"), MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): try: if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value finally: value = None tb = None else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): try: raise tp, value, tb finally: tb = None """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): try: if from_value is None: raise value raise value from from_value finally: value = None """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): try: raise value from from_value finally: value = None """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) @classmethod def __prepare__(cls, name, this_bases): return meta.__prepare__(name, bases) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer) PK Z2@site-packages/protobuf-3.5.0-py2.7.egg-info/dependency_links.txtnu[ PK Zcn  7site-packages/protobuf-3.5.0-py2.7.egg-info/SOURCES.txtnu[MANIFEST.in README.md setup.cfg setup.py google/__init__.py google/protobuf/__init__.py google/protobuf/any_pb2.py google/protobuf/any_test_pb2.py google/protobuf/api_pb2.py google/protobuf/descriptor.py google/protobuf/descriptor_database.py google/protobuf/descriptor_pb2.py google/protobuf/descriptor_pool.py google/protobuf/duration_pb2.py google/protobuf/empty_pb2.py google/protobuf/field_mask_pb2.py google/protobuf/json_format.py google/protobuf/map_proto2_unittest_pb2.py google/protobuf/map_unittest_pb2.py google/protobuf/message.py google/protobuf/message_factory.py google/protobuf/proto_builder.py google/protobuf/reflection.py google/protobuf/service.py google/protobuf/service_reflection.py google/protobuf/source_context_pb2.py google/protobuf/struct_pb2.py google/protobuf/symbol_database.py google/protobuf/test_messages_proto2_pb2.py google/protobuf/test_messages_proto3_pb2.py google/protobuf/text_encoding.py google/protobuf/text_format.py google/protobuf/timestamp_pb2.py google/protobuf/type_pb2.py google/protobuf/wrappers_pb2.py google/protobuf/compiler/__init__.py google/protobuf/compiler/plugin_pb2.py google/protobuf/internal/__init__.py google/protobuf/internal/_parameterized.py google/protobuf/internal/api_implementation.py google/protobuf/internal/containers.py google/protobuf/internal/decoder.py google/protobuf/internal/encoder.py google/protobuf/internal/enum_type_wrapper.py google/protobuf/internal/message_listener.py google/protobuf/internal/python_message.py google/protobuf/internal/testing_refleaks.py google/protobuf/internal/type_checkers.py google/protobuf/internal/well_known_types.py google/protobuf/internal/wire_format.py google/protobuf/pyext/__init__.py google/protobuf/pyext/cpp_message.py google/protobuf/pyext/python_pb2.py google/protobuf/util/__init__.py google/protobuf/util/json_format_proto3_pb2.py protobuf.egg-info/PKG-INFO protobuf.egg-info/SOURCES.txt protobuf.egg-info/dependency_links.txt protobuf.egg-info/namespace_packages.txt protobuf.egg-info/requires.txt protobuf.egg-info/top_level.txtPK Z{.9site-packages/protobuf-3.5.0-py2.7.egg-info/top_level.txtnu[google PK Z{.Bsite-packages/protobuf-3.5.0-py2.7.egg-info/namespace_packages.txtnu[google PK Z#%8site-packages/protobuf-3.5.0-py2.7.egg-info/requires.txtnu[six>=1.9 setuptools PK ZUh4site-packages/protobuf-3.5.0-py2.7.egg-info/PKG-INFOnu[Metadata-Version: 1.2 Name: protobuf Version: 3.5.0 Summary: Protocol Buffers Home-page: https://developers.google.com/protocol-buffers/ Maintainer: protobuf@googlegroups.com Maintainer-email: protobuf@googlegroups.com License: 3-Clause BSD License Download-URL: https://github.com/google/protobuf/releases Description: Protocol Buffers are Google's data interchange format Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 PK Z7_>site-packages/setuptools-39.0.1.dist-info/dependency_links.txtnu[https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d https://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 PK ZImT`,,0site-packages/setuptools-39.0.1.dist-info/RECORDnu[easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 pkg_resources/__init__.py,sha256=YQ4_WQnPztMsUy1yuvp7ZRBPK9IhOyhgosLpvkFso1I,103551 pkg_resources/py31compat.py,sha256=-ysVqoxLetAnL94uM0kHkomKQTC1JZLN2ZUjqUhMeKE,600 pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pkg_resources/_vendor/appdirs.py,sha256=tgGaL0m4Jo2VeuGfoOOifLv7a7oUEJu2n1vRkqoPw-0,22374 pkg_resources/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867 pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248 pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 pkg_resources/extern/__init__.py,sha256=JUtlHHvlxHSNuB4pWqNjcx7n6kG-fwXg7qmJ2zNJlIY,2487 setuptools/__init__.py,sha256=WWIdCbFJnZ9fZoaWDN_x1vDA_Rkm-Sc15iKvPtIYKFs,5700 setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592 setuptools/build_meta.py,sha256=FllaKTr1vSJyiUeRjVJEZmeEaRzhYueNlimtcwaJba8,5671 setuptools/config.py,sha256=tVYBM3w1U_uBRRTOZydflxyZ_IrTJT5odlZz3cbuhSw,16381 setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935 setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837 setuptools/dist.py,sha256=_wCSFiGqwyaOUTj0tBjqZF2bqW9aEVu4W1D4gmsveno,42514 setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729 setuptools/glibc.py,sha256=X64VvGPL2AbURKwYRsWJOXXGAYOiF_v2qixeTkAULuU,3146 setuptools/glob.py,sha256=Y-fpv8wdHZzv9DPCaGACpMSBWJ6amq_1e0R_i8_el4w,5207 setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787 setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013 setuptools/monkey.py,sha256=zZGTH7p0xeXQKLmEwJTPIE4m5m7fJeHoAsxyv5M8e_E,5789 setuptools/msvc.py,sha256=8EiV9ypb3EQJQssPcH1HZbdNsbRvqsFnJ7wPFEGwFIo,40877 setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199 setuptools/package_index.py,sha256=RAmsgjp2rudp9UEuiVPCGZoBJi4oX_PpBTexBld-QIk,40153 setuptools/pep425tags.py,sha256=NuGMx1gGif7x6iYemh0LfgBr_FZF5GFORIbgmMdU8J4,10882 setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536 setuptools/py31compat.py,sha256=XuU1HCsGE_3zGvBRIhYw2iB-IhCFK4-Pxw_jMiqdNVk,1192 setuptools/py33compat.py,sha256=NKS84nl4LjLIoad6OQfgmygZn4mMvrok_b1N1tzebew,1182 setuptools/py36compat.py,sha256=VUDWxmu5rt4QHlGTRtAFu6W5jvfL6WBjeDAzeoBy0OM,2891 setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276 setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 setuptools/site-patch.py,sha256=BVt6yIrDMXJoflA5J6DJIcsJUfW_XEeVhOzelTTFDP4,2307 setuptools/ssl_support.py,sha256=YBDJsCZjSp62CWjxmSkke9kn9rhHHj25Cus6zhJRW3c,8492 setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996 setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144 setuptools/wheel.py,sha256=yF9usxMvpwnymV-oOo5mfDiv3E8jrKkbDEItT7_kjBs,7230 setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 setuptools/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867 setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 setuptools/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 setuptools/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 setuptools/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 setuptools/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 setuptools/_vendor/packaging/markers.py,sha256=Gvpk9EY20yKaMTiKgQZ8yFEEpodqVgVYtfekoic1Yts,8239 setuptools/_vendor/packaging/requirements.py,sha256=t44M2HVWtr8phIz2OhnILzuGT3rTATaovctV1dpnVIg,4343 setuptools/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 setuptools/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 setuptools/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 setuptools/command/__init__.py,sha256=NWzJ0A1BEengZpVeqUyWLNm2bk4P3F4iL5QUErHy7kA,594 setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 setuptools/command/bdist_egg.py,sha256=RQ9h8BmSVpXKJQST3i_b_sm093Z-aCXbfMBEM2IrI-Q,18185 setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484 setuptools/command/build_ext.py,sha256=PCRAZ2xYnqyEof7EFNtpKYl0sZzT0qdKUNTH3sUdPqk,13173 setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596 setuptools/command/develop.py,sha256=wKbOw2_qUvcDti2lZmtxbDmYb54yAAibExzXIvToz-A,8046 setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960 setuptools/command/easy_install.py,sha256=TglOCC2inaNplGxmXCqHbb2SSt_5juqo2eDWmOCbQbw,87032 setuptools/command/egg_info.py,sha256=3b5Y3t_bl_zZRCkmlGi3igvRze9oOaxd-dVf2w1FBOc,24800 setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683 setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203 setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840 setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439 setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986 setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164 setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 setuptools/command/sdist.py,sha256=obDTe2BmWt2PlnFPZZh7e0LWvemEsbCCO9MzhrTZjm8,6711 setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085 setuptools/command/test.py,sha256=MeBAcXUePGjPKqjz4zvTrHatLvNsjlPFcagt3XnFYdk,9214 setuptools/command/upload.py,sha256=i1gfItZ3nQOn5FKXb8tLC2Kd7eKC8lWO4bdE6NqGpE4,1172 setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311 setuptools/extern/__init__.py,sha256=2eKMsBMwsZqolIcYBtLZU3t96s6xSTP4PTaNfM5P-I0,2499 setuptools-39.0.1.dist-info/LICENSE.txt,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078 setuptools-39.0.1.dist-info/METADATA,sha256=-TKj2ub7r8hqvK0ahNU1QcYhoZqbedJk9sh4bcvSJ-U,2905 setuptools-39.0.1.dist-info/RECORD,, setuptools-39.0.1.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 setuptools-39.0.1.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239 setuptools-39.0.1.dist-info/entry_points.txt,sha256=4qf7zhnPUdPJqdy1qJ0J_5V0jUJcs5QS0aKgXGYhQQk,2990 setuptools-39.0.1.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 setuptools-39.0.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 ../../../bin/easy_install,sha256=Zmfd1NX_Pap-GMT3mxrDmi-ycWjKLR523Ooes1-2CZU,234 ../../../bin/easy_install-2.7,sha256=Zmfd1NX_Pap-GMT3mxrDmi-ycWjKLR523Ooes1-2CZU,234 setuptools-39.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 setuptools/ssl_support.pyc,, setuptools/extern/__init__.pyc,, setuptools/command/install_scripts.pyc,, setuptools/unicode_utils.pyc,, setuptools/_vendor/packaging/_structures.pyc,, pkg_resources/_vendor/packaging/_compat.pyc,, setuptools/msvc.pyc,, setuptools/command/dist_info.pyc,, setuptools/_vendor/packaging/__init__.pyc,, setuptools/command/bdist_wininst.pyc,, setuptools/_vendor/packaging/utils.pyc,, setuptools/command/upload.pyc,, setuptools/command/upload_docs.pyc,, setuptools/_vendor/packaging/markers.pyc,, setuptools/_vendor/packaging/_compat.pyc,, pkg_resources/py31compat.pyc,, setuptools/wheel.pyc,, setuptools/namespaces.pyc,, setuptools/_vendor/packaging/version.pyc,, pkg_resources/_vendor/six.pyc,, pkg_resources/_vendor/packaging/_structures.pyc,, setuptools/_vendor/packaging/requirements.pyc,, easy_install.pyc,, setuptools/_vendor/__init__.pyc,, setuptools/command/install_egg_info.pyc,, pkg_resources/_vendor/packaging/markers.pyc,, setuptools/site-patch.pyc,, setuptools/build_meta.pyc,, setuptools/windows_support.pyc,, setuptools/command/setopt.pyc,, setuptools/extension.pyc,, setuptools/command/bdist_egg.pyc,, setuptools/py31compat.pyc,, setuptools/dep_util.pyc,, setuptools/command/sdist.pyc,, setuptools/command/saveopts.pyc,, setuptools/command/egg_info.pyc,, pkg_resources/_vendor/packaging/__init__.pyc,, setuptools/pep425tags.pyc,, setuptools/command/install.pyc,, setuptools/command/alias.pyc,, setuptools/__init__.pyc,, setuptools/command/easy_install.pyc,, setuptools/py27compat.pyc,, pkg_resources/extern/__init__.pyc,, setuptools/command/build_py.pyc,, setuptools/command/test.pyc,, setuptools/command/build_ext.pyc,, setuptools/version.pyc,, setuptools/command/py36compat.pyc,, setuptools/glibc.pyc,, setuptools/dist.pyc,, setuptools/command/bdist_rpm.pyc,, setuptools/_vendor/six.pyc,, pkg_resources/_vendor/packaging/version.pyc,, pkg_resources/_vendor/packaging/utils.pyc,, setuptools/command/__init__.pyc,, setuptools/py33compat.pyc,, setuptools/archive_util.pyc,, pkg_resources/_vendor/packaging/__about__.pyc,, pkg_resources/__init__.pyc,, setuptools/py36compat.pyc,, pkg_resources/_vendor/__init__.pyc,, setuptools/_vendor/pyparsing.pyc,, setuptools/command/install_lib.pyc,, pkg_resources/_vendor/appdirs.pyc,, setuptools/_vendor/packaging/specifiers.pyc,, setuptools/lib2to3_ex.pyc,, setuptools/sandbox.pyc,, setuptools/command/develop.pyc,, pkg_resources/_vendor/packaging/requirements.pyc,, pkg_resources/_vendor/pyparsing.pyc,, setuptools/_vendor/packaging/__about__.pyc,, setuptools/glob.pyc,, pkg_resources/_vendor/packaging/specifiers.pyc,, setuptools/command/rotate.pyc,, setuptools/config.pyc,, setuptools/command/build_clib.pyc,, setuptools/depends.pyc,, setuptools/package_index.pyc,, setuptools/monkey.pyc,, setuptools/launch.pyc,, setuptools/command/register.pyc,, PK ZȑY Y 2site-packages/setuptools-39.0.1.dist-info/METADATAnu[Metadata-Version: 2.1 Name: setuptools Version: 39.0.1 Summary: Easily download, build, install, upgrade, and uninstall Python packages Home-page: https://github.com/pypa/setuptools Author: Python Packaging Authority Author-email: distutils-sig@python.org License: UNKNOWN Project-URL: Documentation, https://setuptools.readthedocs.io/ Keywords: CPAN PyPI distutils eggs package management Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.6 Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: System :: Archiving :: Packaging Classifier: Topic :: System :: Systems Administration Classifier: Topic :: Utilities Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.* Description-Content-Type: text/x-rst; charset=UTF-8 Provides-Extra: ssl Provides-Extra: certs Provides-Extra: certs Requires-Dist: certifi (==2016.9.26); extra == 'certs' Provides-Extra: ssl Requires-Dist: wincertstore (==0.2); (sys_platform=='win32') and extra == 'ssl' .. image:: https://img.shields.io/pypi/v/setuptools.svg :target: https://pypi.org/project/setuptools .. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest :target: https://setuptools.readthedocs.io .. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI :target: https://travis-ci.org/pypa/setuptools .. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor :target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master .. image:: https://img.shields.io/pypi/pyversions/setuptools.svg See the `Installation Instructions `_ in the Python Packaging User's Guide for instructions on installing, upgrading, and uninstalling Setuptools. The project is `maintained at GitHub `_. Questions and comments should be directed to the `distutils-sig mailing list `_. Bug reports and especially tested patches may be submitted directly to the `bug tracker `_. Code of Conduct --------------- Everyone interacting in the setuptools project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the `PyPA Code of Conduct `_. PK Z0\&&7site-packages/setuptools-39.0.1.dist-info/top_level.txtnu[easy_install pkg_resources setuptools PK ZP׫665site-packages/setuptools-39.0.1.dist-info/LICENSE.txtnu[Copyright (C) 2016 Jason R Coombs Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK Z22site-packages/setuptools-39.0.1.dist-info/zip-safenu[ PK Znn/site-packages/setuptools-39.0.1.dist-info/WHEELnu[Wheel-Version: 1.0 Generator: bdist_wheel (0.31.1) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any PK Zg&n :site-packages/setuptools-39.0.1.dist-info/entry_points.txtnu[[console_scripts] easy_install = setuptools.command.easy_install:main easy_install-2.7 = setuptools.command.easy_install:main [distutils.commands] alias = setuptools.command.alias:alias bdist_egg = setuptools.command.bdist_egg:bdist_egg bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst build_clib = setuptools.command.build_clib:build_clib build_ext = setuptools.command.build_ext:build_ext build_py = setuptools.command.build_py:build_py develop = setuptools.command.develop:develop dist_info = setuptools.command.dist_info:dist_info easy_install = setuptools.command.easy_install:easy_install egg_info = setuptools.command.egg_info:egg_info install = setuptools.command.install:install install_egg_info = setuptools.command.install_egg_info:install_egg_info install_lib = setuptools.command.install_lib:install_lib install_scripts = setuptools.command.install_scripts:install_scripts register = setuptools.command.register:register rotate = setuptools.command.rotate:rotate saveopts = setuptools.command.saveopts:saveopts sdist = setuptools.command.sdist:sdist setopt = setuptools.command.setopt:setopt test = setuptools.command.test:test upload = setuptools.command.upload:upload upload_docs = setuptools.command.upload_docs:upload_docs [distutils.setup_keywords] convert_2to3_doctests = setuptools.dist:assert_string_list dependency_links = setuptools.dist:assert_string_list eager_resources = setuptools.dist:assert_string_list entry_points = setuptools.dist:check_entry_points exclude_package_data = setuptools.dist:check_package_data extras_require = setuptools.dist:check_extras include_package_data = setuptools.dist:assert_bool install_requires = setuptools.dist:check_requirements namespace_packages = setuptools.dist:check_nsp package_data = setuptools.dist:check_package_data packages = setuptools.dist:check_packages python_requires = setuptools.dist:check_specifier setup_requires = setuptools.dist:check_requirements test_loader = setuptools.dist:check_importable test_runner = setuptools.dist:check_importable test_suite = setuptools.dist:check_test_suite tests_require = setuptools.dist:check_requirements use_2to3 = setuptools.dist:assert_bool use_2to3_exclude_fixers = setuptools.dist:assert_string_list use_2to3_fixers = setuptools.dist:assert_string_list zip_safe = setuptools.dist:assert_bool [egg_info.writers] PKG-INFO = setuptools.command.egg_info:write_pkg_info dependency_links.txt = setuptools.command.egg_info:overwrite_arg depends.txt = setuptools.command.egg_info:warn_depends_obsolete eager_resources.txt = setuptools.command.egg_info:overwrite_arg entry_points.txt = setuptools.command.egg_info:write_entries namespace_packages.txt = setuptools.command.egg_info:overwrite_arg requires.txt = setuptools.command.egg_info:write_requirements top_level.txt = setuptools.command.egg_info:write_toplevel_names [setuptools.installation] eggsecutable = setuptools.command.easy_install:bootstrap PK Z3site-packages/setuptools-39.0.1.dist-info/INSTALLERnu[pip PK ZG}2XX)site-packages/pkg_resources/py31compat.pynu[import os import errno import sys def _makedirs_31(path, exist_ok=False): try: os.makedirs(path) except OSError as exc: if not exist_ok or exc.errno != errno.EEXIST: raise # rely on compatibility behavior until mode considerations # and exists_ok considerations are disentangled. # See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 needs_makedirs = ( sys.version_info < (3, 2, 5) or (3, 3) <= sys.version_info < (3, 3, 6) or (3, 4) <= sys.version_info < (3, 4, 1) ) makedirs = _makedirs_31 if needs_makedirs else os.makedirs PK Z A(site-packages/pkg_resources/__init__.pycnu[ fcF@@s dZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZyddlZWnek r]ddlZnXddlmZddl m!Z!m"Z"m#Z#ddlm$Z$y&ddlm%Z%m&Z&m'Z'e(Z)Wnek re*Z)nXdd lm+Z,dd l-m.Z.m/Z/yddl0j1Z2e2j3Wnek r3e4Z2nXd d l5m6Z6dd lm7Z7ddlm8Z8e9de9de9de9dddfej:koddfknre;dnej<re4Z=e4Z>ne4Z?e4Z@e4ZAe4ZBe4ZCe4ZDe4ZEe4ZFe4ZGe4ZHe4ZIe4ZJe4ZKe4ZLe4ZMe4ZNe4ZOdePfdYZQdZRiZSdZTdZUdZVdZWdZXdZYdZZdZ[Z\d Z]d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@ddAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddegFZ^d<e_fdfYZ`d=e`fdgYZadheafdiYZbd>e`fdjYZcd?e`fdkYZdiZeejfd ZgdZhdlZid ZjdZkdmZldnZmdoZngdpZodqZpdrZqejrdsZsejrdtZteqZuduZvdvZwewZxdwZydxZze4dyZ{dzZ|dTfd{YZ}dUe}fd|YZ~d7efd}YZd~efdYZd6efdYZeZd@e;fdYZd8fdYZdZdZdZdZdZdZe4dZd[fdYZemeed\efdYZd]efdYZejdYefdYZeZdefdYZdefdYZd^efdYZeme jedVefdYZdWefdYZdXefdYZeTddidZe*dZe*dZee jee*dZeeedZe*dZdZdfdYZdZdZdZdZeejeee2dree2jeneTddieTddidZdZdZdZe4dZdZeejeee jeee2dr) ee2jendZeeedZidZdZdZdZdZejrdjZejrdejejBjZd;efdYZdZdZd9efdYZdefdYZdefdYZied6ed6ed6ZdZdefdYZdZd:e8jjfdYZdZdZdZdZdZdZe jddeQde(dZeedZedZdS(sZ Package resource API -------------------- A resource is a logical file contained within a package, or a logical subdirectory thereof. The package resource API expects resource names to have their path parts separated with ``/``, *not* whatever the local path separator is. Do not use os.path operations to manipulate resource names being passed into the API. The package resource API is designed to work with normal filesystem packages, .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. i(tabsolute_importN(t get_importer(tsix(turllibtmaptfilter(tutime(tmkdirtrenametunlink(topen(tisdirtspliti(t py31compat(tappdirs(t packagings&pkg_resources.extern.packaging.versions)pkg_resources.extern.packaging.specifierss+pkg_resources.extern.packaging.requirementss&pkg_resources.extern.packaging.markersisPython 3.3 or later is requiredt PEP440WarningcB@seZdZRS(sa Used when there is an issue with a version or specifier not complying with PEP 440. (t__name__t __module__t__doc__(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRnscC@s?ytjj|SWn$tjjk r:tjj|SXdS(N(RtversiontVersiontInvalidVersiont LegacyVersion(tv((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt parse_versionuscK@s-tj|tjtj||dS(N(tglobalstupdatet _state_varstdicttfromkeys(tvartypetkw((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_declare_statescC@sLi}t}x6tjD](\}}|d|||||tcC@s{t}tj|}|dk rwtjdkrwy-ddjtd |jdf}Wqwt k rsqwXn|S(sZReturn this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. tdarwins macosx-%s-%st.iiN( tget_build_platformtmacosVersionStringtmatchR3tsystplatformtjoint _macosx_verstgroupt ValueError(tplattm((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_supported_platforms - trequiret run_scriptt get_providertget_distributiontload_entry_pointt get_entry_maptget_entry_infotiter_entry_pointstresource_stringtresource_streamtresource_filenametresource_listdirtresource_existstresource_isdirtdeclare_namespacet working_settadd_activation_listenertfind_distributionstset_extraction_pathtcleanup_resourcestget_default_cachet Environmentt WorkingSettResourceManagert Distributiont Requirementt EntryPointtResolutionErrortVersionConflicttDistributionNotFoundt UnknownExtratExtractionErrortparse_requirementsRt safe_namet safe_versiont get_platformtcompatible_platformst yield_linestsplit_sectionst safe_extrat to_filenametinvalid_markertevaluate_markertensure_directorytnormalize_pathtEGG_DISTt BINARY_DISTt SOURCE_DISTt CHECKOUT_DISTt DEVELOP_DISTtIMetadataProvidertIResourceProvidert FileMetadatat PathMetadatat EggMetadatat EmptyProvidertempty_providert NullProvidert EggProvidertDefaultProvidert ZipProvidertregister_findertregister_namespace_handlertregister_loader_typetfixup_namespace_packagesRtrun_maintAvailableDistributionscB@seZdZdZRS(s.Abstract base for dependency resolution errorscC@s|jjt|jS(N(t __class__RtreprR4(tself((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__repr__s(RRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR`scB@sDeZdZdZedZedZdZdZRS(s An already-installed version conflicts with the requested version. Should be initialized with the installed Distribution and the requested Requirement. s3{self.dist} is installed but {self.req} is requiredcC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytdistscC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytreqscC@s|jjtS(N(t _templatetformattlocals(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytreportscC@s$|s |S|j|f}t|S(st If required_by is non-empty, return a version of self that is a ContextualVersionConflict. (R4tContextualVersionConflict(Rt required_byR4((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt with_context s( RRRRtpropertyRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRas  RcB@s*eZdZejdZedZRS(s A VersionConflict that accepts a third parameter, the set of the requirements that required the installed Distribution. s by {self.required_by}cC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs(RRRRaRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cB@sSeZdZdZedZedZedZdZdZ RS(s&A requested distribution was not foundsSThe '{self.req}' distribution was not found and is required by {self.requirers_str}cC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR(scC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt requirers,scC@s|js dSdj|jS(Nsthe applications, (RR>(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt requirers_str0s cC@s|jjtS(N(RRR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR6scC@s |jS(N(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__str__9s( RRRRRRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRb"s cB@seZdZRS(s>Distribution doesn't have an "extra feature" of the given name(RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRc=siicC@s|t|eZdZdZdZdZdZdZRS(cC@sdS(s;Does the package's distribution contain the named metadata?N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt has_metadataR6cC@sdS(s'The named metadata resource as a stringN((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt get_metadataR6cC@sdS(sYield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_metadata_linesR6cC@sdS(s>Is the named metadata a directory? (like ``os.path.isdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytmetadata_isdirR6cC@sdS(s?List of metadata names in the directory (like ``os.listdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytmetadata_listdirR6cC@sdS(s=Execute the named script in the supplied namespace dictionaryN((Rt namespace((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRFR6(RRRRRRRRF(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRws      cB@sDeZdZdZdZdZdZdZdZRS(s3An object that provides access to package resourcescC@sdS(sdReturn a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``N((tmanagert resource_name((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_resource_filenameR6cC@sdS(siReturn a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``N((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_resource_stream R6cC@sdS(smReturn a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``N((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_resource_stringR6cC@sdS(s,Does the package contain the named resource?N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt has_resourceR6cC@sdS(s>Is the named resource a directory? (like ``os.path.isdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRRR6cC@sdS(s?List of resource names in the directory (like ``os.listdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRPR6( RRRRRRRRRRP(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRxs     cB@seZdZddZedZedZdZdZ dZ ddZ dZ d Z deed Zddedd Zdded Zd ZedZdZdZdZRS(sDA collection of active distributions on sys.path (or a similar list)cC@s^g|_i|_i|_g|_|dkr<tj}nx|D]}|j|qCWdS(s?Create working set from list of path entries (default=sys.path)N(tentriest entry_keystby_keyt callbacksR3R<Rt add_entry(RRtentry((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__init__#s       cC@se|}yddlm}Wntk r1|SXy|j|Wntk r`|j|SX|S(s1 Prepare the master working set. i(t __requires__(t__main__RRRERat_build_from_requirements(tclstwsR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _build_master0s   cC@s|g}t|}|j|t}x|D]}|j|q4Wx0tjD]%}||jkrU|j|qUqUW|jtj(|S(sQ Build a working set from a requirement spec. Rewrites sys.path. (RetresolveRZtaddR<RRR(Rtreq_specRtreqstdistsRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRDs    cC@sT|jj|g|jj|x*t|tD]}|j||tq3WdS(sAdd a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) N(Rt setdefaultRRRVRRR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRZs cC@s|jj|j|kS(s9True if `dist` is the active distribution for its project(RRR.(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt __contains__iscC@sC|jj|j}|dk r?||kr?t||n|S(sFind a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. N(RRR.R3Ra(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRms cc@sgx`|D]X}|j|}|dkrGx4|jD] }|Vq5Wq||kr||VqqWdS(sYield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order). N(RJR3tvalues(RR@RRRtep((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRL}s    cC@sQtjdj}|d}|j||d<|j|dj||dS(s?Locate distribution for `requires` and run `script_name` scriptiRiN(R<RRR-RERF(RtrequiresRRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRFs    cc@spi}xc|jD]X}||jkr+qnx:|j|D]+}||kr9d||<|j|Vq9q9WqWdS(sYield distributions for non-duplicate projects in the working set The yield order is the order in which the items' path entries were added to the working set. iN(RRR(RtseentitemR.((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__iter__s  cC@s|r"|j|j|d|n|dkr:|j}n|jj|g}|jj|jg}| r|j|jkrdS||j|j<|j|kr|j|jn|j|kr|j|jn|j |dS(sAdd `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set's ``.entries`` (if it wasn't already present). `dist` is only added to the working set if it's for a project that doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method will be called. RN( t insert_onRR3tlocationRRR.RRt _added_new(RRRtinsertRtkeystkeys2((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@s+t|ddd}i}i}g}t} tjt} x|r&|jd} | |krmqFn| j| |sqFn|j| j} | dkr|j j| j} | dks| | krz|rz|} |dkr!| dkrt |j }q!t g}t g} n|j| | |d|} || j<| dkrz| j| d}t| |qzn|j| n| | kr| | }t| | j|n| j| jddd}|j|x/|D]'}| |j| j| j| | Map each requirement to the extras that demanded it. c@s@fd|jd|p$dD}j p?t|S(s Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. c3@s(|]}jji|d6VqdS(textraN(tmarkertevaluate(t.0R)(R(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys s(N(N(RR3R*tany(RRR t extra_evals((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  N(RRRR3R (((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscB@seZdZd eedZdZdZd dZ dZ dZ d e dZ d dZd Zd Zd ZRS( s5Searchable snapshot of distributions on a search pathcC@s,i|_||_||_|j|dS(s!Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distributions must be compatible with. If unspecified, it defaults to the current platform. `python` is an optional string naming the desired version of Python (e.g. ``'3.3'``); it defaults to the current version. You may explicitly set `platform` (and/or `python`) to ``None`` if you wish to map *all* distributions, not just those compatible with the running platform or Python version. N(t_distmapR=tpythontscan(Rt search_pathR=R0((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sI|jdkp-|jdkp-|j|jk}|oHt|j|jS(sIs distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned. N(R0R3t py_versionRiR=(RRt py_compat((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytcan_addscC@s|j|jj|dS(s"Remove `dist` from the environmentN(R/R.tremove(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR6scC@sQ|dkrtj}nx2|D]*}x!t|D]}|j|q2WqWdS(sdScan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added. N(R3R<RRVR(RR2RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR1s    cC@s|j}|jj|gS(sReturn a newest-to-oldest list of distributions for `project_name` Uses case-insensitive `project_name` comparison, assuming all the project's distributions use their project's name converted to all lowercase as their key. (tlowerR/R(RR tdistribution_key((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt __getitem__s cC@su|j|rq|jrq|jj|jg}||krq|j||jdtjddt qqndS(sLAdd `dist` if we ``can_add()`` it and it has not already been added R.thashcmptreverseN( R5t has_versionR/RR.RRtoperatort attrgetterR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sy|j|}Wn#tk r8|s/nd}nX|dk rI|Sx%||jD]}||krW|SqWW|j||S(sFind distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable distribution isn't active, this method returns the newest distribution in the environment that meets the ``Requirement`` in `req`. If no suitable distribution is found, and `installer` is supplied, then the result of calling the environment's ``obtain(req, installer)`` method will be returned. N(RRaR3R.tobtain(RRRTRRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s    cC@s|dk r||SdS(sObtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.N(R3(Rt requirementR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR?#s cc@s0x)|jjD]}||r|VqqWdS(s=Yield the unique project names of the available distributionsN(R/R(RR.((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR/s cC@s{t|tr|j|nXt|trdxF|D](}x||D]}|j|qFWq5Wntd|f|S(s2In-place addition of a distribution or environmentsCan't add %r to environment(RR]RRZR(RtothertprojectR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__iadd__5s cC@s@|jgdddd}x||fD]}||7}q(W|S(s4Add an environment or distribution to an environmentR=R0N(RR3(RRAtnewR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__add__AsN(RRRR3RDtPY_MAJORRR5R6R1R9RRR R?RRCRE(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRZs        cB@seZdZRS(sTAn error occurred extracting a resource The following attributes are available from instances of this exception: manager The resource manager that raised this exception cache_path The base directory for resource extraction original_error The exception instance that caused extraction to fail (RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRdMs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dd Z ed Zd Zd Zed ZRS(s'Manage resource extraction and packagescC@s i|_dS(N(t cached_files(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRascC@st|j|S(sDoes the named resource exist?(RGR(Rtpackage_or_requirementR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRQdscC@st|j|S(s,Is the named resource an existing directory?(RGRR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRRhs cC@st|j||S(s4Return a true filesystem path for specified resource(RGR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyROns cC@st|j||S(s9Return a readable file-like object for specified resource(RGR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRNts cC@st|j||S(s%Return specified resource as a string(RGR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRMzs cC@st|j|S(s1List the contents of the named resource directory(RGRP(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRPs cC@sttjd}|jpt}tjdj}t|jt }||_ ||_ ||_ |dS(s5Give an error message for problems extracting file(s)is Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) to the Python egg cache: {old_exc} The Python egg cache directory is currently set to: {cache_path} Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. N( R<texc_infotextraction_pathRYttextwraptdedenttlstripRdRRRt cache_pathtoriginal_error(Rtold_excRNttmplterr((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytextraction_errors   cC@sx|jpt}tjj||d|}yt|Wntk rY|jnX|j|d|j |<|S(sReturn absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if provided, should be a sequence of path name parts "under" the egg's extraction location. This method should only be called by resource providers that need to obtain an extraction location, and only for names they intend to extract, as it tracks the generated names for possible cleanup later. s-tmpi( RJRYRRR>t_bypass_ensure_directoryt ExceptionRSt_warn_unsafe_extraction_pathRG(Rt archive_nametnamest extract_patht target_path((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_cache_paths    cC@swtjdkr*|jtjd r*dStj|j}|tj@sV|tj@rsd|}tj |t ndS(sN If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. tnttwindirNs%s is writable by group/others and vulnerable to attack when used with get_resource_filename. Consider a more secure location (set with .set_extraction_path or the PYTHON_EGG_CACHE environment variable).( RRRtenvirontstattst_modetS_IWOTHtS_IWGRPtwarningstwarnt UserWarning(Rtmodetmsg((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRVs &cC@s@tjdkr<tj|jdBd@}tj||ndS(s4Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don't have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are already in the filesystem. `tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine returns. tposiximiN(RRR_R`tchmod(RttempnametfilenameRf((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt postprocessscC@s%|jrtdn||_dS(sSet the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that routine's documentation for more details.) Resources are extracted to subdirectories of this path based upon information given by the ``IResourceProvider``. You may set this to a temporary directory, but then you must call ``cleanup_resources()`` to delete the extracted files when done. There is no guarantee that ``cleanup_resources()`` will be able to remove all extracted files. (Note: you may not change the extraction path for a given resource manager once resources have been extracted, unless you first call ``cleanup_resources()``.) s5Can't change extraction path, files already extractedN(RGRARJ(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRWs  cC@sdS(sB Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. This function does not have any concurrency protection, so it should generally only be called when the extraction path is a temporary directory exclusive to a single process. This method is not automatically called; you must call it explicitly or register it as an ``atexit`` function if you wish to ensure cleanup of a temporary directory used for extractions. N((Rtforce((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRXR6N((RRRR3RJRRQRRRORNRMRPRSR[t staticmethodRVRlRWRRX(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR\]s           cC@s"tjjdp!tjddS(s Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". tPYTHON_EGG_CACHEtappnames Python-Eggs(RR^RRtuser_cache_dir(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRYscC@stjdd|S(sConvert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. s[^A-Za-z0-9.]+t-(tretsub(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRfscC@sZyttjj|SWn9tjjk rU|jdd}tjdd|SXdS(sB Convert an arbitrary string to a standard version string RR8s[^A-Za-z0-9.]+RrN(RRRRRRRsRt(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRg%s cC@stjdd|jS(sConvert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. s[^A-Za-z0-9.-]+R(RsRtR7(R)((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRl1scC@s|jddS(s|Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. RrR(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRm:scC@s;yt|Wn&tk r6}d|_d|_|SXtS(so Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. N(Rot SyntaxErrorR3RktlinenoR(ttextte((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRnBs  cC@sLy tjj|}|jSWn%tjjk rG}t|nXdS(s Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. N(RtmarkerstMarkerR+t InvalidMarkerRu(RwR)R*Rx((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRoPs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dZ dZd Zd Zd Zd Zd ZdZdZdZdZdZRS(sETry to implement resources and metadata for arbitrary PEP 302 loaderscC@s:t|dd|_tjjt|dd|_dS(NRt__file__R6(RR3RRRtdirnamet module_path(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRfscC@s|j|j|S(N(t_fnR~(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRjscC@stj|j||S(N(tiotBytesIOR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRmscC@s|j|j|j|S(N(t_getRR~(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRpscC@s|j|j|j|S(N(t_hasRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRsscC@s%|jo$|j|j|j|S(N(tegg_infoRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRvscC@sE|js dS|j|j|j|}tjrA|jdS|S(NR6sutf-8(RRRRtPY3tdecode(RRtvalue((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRys cC@st|j|S(N(RjR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_isdirRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRRscC@s%|jo$|j|j|j|S(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_listdirRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRPscC@s)|jr%|j|j|j|SgS(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs c B@sd|}|j|s4edjen|j|jdd}|jdd}|j|j|}||dR (RtbaseR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s2t|jdr"|jj|StddS(Ntget_datas=Can't perform this operation for loaders without 'get_data()'(RRRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRsN(RRRR3tegg_nameRRRRRRRRRRRRRRPRRFRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR~_s,                 cB@s eZdZdZdZRS(s&Provider based on a virtual filesystemcC@stj|||jdS(N(R~Rt _setup_prefix(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j}d}xt||krt|rdtjj||_tjj|d|_||_ Pn|}tjj |\}}qWdS(NsEGG-INFO( R~R3t _is_egg_pathRRtbasenameRR>Rtegg_rootR (RRtoldR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   (RRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cB@sJeZdZdZdZdZdZdZedZ RS(s6Provides access to package resources in the filesystemcC@stjj|S(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@stjj|S(N(RRR (RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s tj|S(N(Rtlistdir(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@st|j|j|dS(Ntrb(R RR~(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s&t|d}|jSWdQXdS(NR(R R(RRtstream((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s)ttdtd}t||dS(NtSourceFileLoader(Rtimportlib_machineryttypeR3R(Rt loader_cls((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _registers ( RRRRRRRRR(R(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs     cB@s<eZdZdZdZZdZdZdZ RS(s.Provider that returns nothing for all requestscC@stS(N(R(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR5R6cC@sdS(NR6((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@sgS(N((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@sdS(N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR sN( RRRR3R~RRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR|s    t ZipManifestscB@s#eZdZedZeZRS(s zip manifest builder c@sBtj|-fdjD}t|SWdQXdS(s Build a dictionary similar to the zipimport directory caches, except instead of tuples, store ZipInfo objects. Use a platform-specific path separator (os.sep) for the path keys for compatibility with pypy on Windows. c3@s3|])}|jdtjj|fVqdS(RN(RRtseptgetinfo(R,R(tzfile(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys sN(tzipfiletZipFiletnamelistR(RRR#((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytbuilds  (RRRR(Rtload(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRstMemoizedZipManifestscB@s)eZdZejddZdZRS(s% Memoized zipfile manifests. t manifest_modsmanifest mtimecC@svtjj|}tj|j}||ksC||j|krk|j|}|j||||Rt_extract_resourcet _eager_to_zip(RRRRteagersR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRcs     cC@s/|j}|jd}tj|}||fS(Nii(iii(t file_sizet date_timettimetmktime(tzip_stattsizeRt timestamp((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_get_date_and_sizeps  c C@s||jkr^x9|j|D]'}|j|tjj||}q#Wtjj|S|j|j|\}}tst dny(|j |j |j |}|j ||r|Stddtjj|\}} tj||jj|tj|t| ||f|j| |yt| |Wnltjk rtjj|r|j ||r|Stjdkrt|t| ||SnnXWntjk r|jnX|S(Ns>"os.rename" and "os.unlink" are not supported on this platforms .$extracttdirR\(t_indexRRRR>R}RRt WRITE_SUPPORTtIOErrorR[RRt _is_currentt_mkstemptwriteRRtcloseRRlRterrortisfileRR RS( RRRRtlastRRt real_pathtoutfttmpnam((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRysD    c C@s|j|j|\}}tjj|s2tStj|}|j|ks_|j|krctS|j j |}t |d}|j }WdQX||kS(sK Return True if the file_path is current for this zip_path RN( RRRRRRR_tst_sizeRRRR R( Rt file_pathRRRR_t zip_contentstft file_contents((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@sa|jdkrZg}x6dD].}|j|r|j|j|qqW||_n|jS(Nsnative_libs.txtseager_resources.txt(snative_libs.txtseager_resources.txt(RR3RR R(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs  cC@sy |jSWntk ri}x~|jD]s}|jtj}xX|rtjj|d }||kr||j|dPqF|jg||RR(RtindRtpartstparent((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs     cC@s.|j|}||jkp-||jkS(N(RRR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j||jkS(N(RR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s%t|jj|j|dS(N((RRRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(RRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRsN(RRRR3RRRRRRRRRRnRRRRRRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR?s$     7      cB@s;eZdZdZdZdZdZdZRS(s*Metadata handler for standalone PKG-INFO files Usage:: metadata = FileMetadata("/path/to/PKG-INFO") This provider rejects all data and metadata requests except for PKG-INFO, which is treated as existing, and will be the contents of the file at the provided location. cC@s ||_dS(N(R(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|dkotjj|jS(NsPKG-INFO(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s_|dkrtdntj|jdddd}|j}WdQX|j||S(NsPKG-INFOs(No metadata except PKG-INFO is availabletencodingsutf-8terrorsR(RRR RRt_warn_on_replacement(RRRtmetadata((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs  ! cC@sGdjd}||krCd}|jt}tj|ndS(Ns�sutf-8s2{self.path} could not be properly decoded in UTF-8(RRRRcRd(RRtreplacement_charRQRg((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  cC@st|j|S(N(RjR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs(RRRRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRys     cB@seZdZdZRS(ssMetadata provider for egg directories Usage:: # Development eggs: egg_info = "/path/to/PackageName.egg-info" base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: egg_path = "/path/to/PackageName-ver-pyver-etc.egg" metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) dist = Distribution.from_filename(egg_path, metadata=metadata) cC@s||_||_dS(N(R~R(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR)s (RRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRzscB@seZdZdZRS(s Metadata provider for .egg filescC@s`|jtj|_||_|jrFtjj|j|j|_n |j|_|j dS(s-Create a metadata provider from a zipimporterN( RRRRRtprefixRR>R~R(Rtimporter((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR1s   ! (RRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR{.sRt_distribution_finderscC@s|t|tfind_eggs_in_zipt zipimportt zipimporterR7Rt from_location( RRRRtsubitemtsubpathRRtsubmeta((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRQs$     cC@sdS(N(((RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt find_nothingoscC@sd}t|d|dtS(sL Given a list of filenames, return them in descending order by version number. >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] cS@sYtjj|\}}tj|jd|g}g|D]}tjj|^q=S(s6 Parse each component of the filename Rr( RRtsplitextt itertoolstchainR RRR(RtextRtpart((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _by_versionsR.R;(tsortedR(RXR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_by_version_descendingvs c #@sttrHtjdttjjdVdSt}fd|D}t |}xQ|D]I}tjj|}t |}x||D] } | VqWqWdS(s6Yield distributions accessible on a sys.path directoryRsEGG-INFONc3@s'|]}t|r|VqdS(N(t dist_factory(R,R(RR(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys s( t_normalize_cachedt_is_unpacked_eggR]RRzRRR>t safe_listdirRR( RRRRtfilteredtpath_item_entriesRtfullpathtfactoryR((RRs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt find_on_paths       cC@sf|j}tt|jd}|r.tS| rEt|rEtS| r_|jdr_tStS(s9 Return a dist_factory for a path_item and entry s .egg-infos .dist-infos .egg-link(s .egg-infos .dist-info( R7R-RRtdistributions_from_metadataRRVtresolve_egg_linktNoDists(RRRR7tis_meta((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   RcB@s2eZdZdZejr'eZndZRS(sS >>> bool(NoDists()) False >>> list(NoDists()('anything')) [] cC@stS(N(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__bool__scC@s tdS(N((titer(RR ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__call__s(RRRRRtPY2t __nonzero__R(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs    cC@sytj|SWnnttfk r*nXtk r}|jtjtjtjfkpot |dddk}|sqnXdS(sI Attempt to list contents of path, but suppress some exceptions. twinerrori N(( RRtPermissionErrortNotADirectoryErrortOSErrorterrnotENOTDIRtEACCEStENOENTRR3(RRxt ignorable((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s! cc@stjj|}tjj|rUttj|dkrCdSt||}n t|}tjj|}t j |||dt VdS(Nit precedence( RRR}R RRRzRyRR]RRv(RtrootRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cc@sDt|2}x(|D] }|j}|r|VqqWWdQXdS(s1 Yield non-empty lines from file at path N(R tstrip(RRtline((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytnon_empty_liness   c@s>t}fd|D}tt|}t|dS(sa Given a path to an .egg-link, resolve distributions present in the referenced path. c3@s0|]&}tjjtjj|VqdS(N(RRR>R}(R,tref(R(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys s((R%RRVtnext(Rtreferenced_pathstresolved_pathst dist_groups((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs    t FileFindert_namespace_handlerst_namespace_packagescC@s|t|(Rt path_partst module_partsR(t package_nameR=(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytposition_in_sys_pathTs NR.(R<RRRRRR/(t orig_pathRARtpRB((RAR=R<s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR5Ds"  cC@s tjz|tkrdStjd}}d|krdj|jdd }t||tkrxt |nytj |j }Wqt k rt d|qXntj|gj|tj|gx|D]}t||qWWdtjXdS(s9Declare that package 'packageName' is a namespace packageNR8isNot a package:(t_impt acquire_lockR-R<RR3R>R RSRRR/RRRRR8t release_lock(R6RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRSes&        cC@sbtjzFx?tj|dD]+}t||}|r t||q q WWdtjXdS(sDEnsure that previously-declared namespace packages include path_itemN((RERFR-RR8RRG(RRtpackageR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cC@s`tjj||jdd}t|}x+|jD]}t||kr8Pq8q8W|SdS(sBCompute an ns-package subpath for a filesystem or zipfile importerR8iN(RRR>R RR/(RRR6RRt normalizedR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytfile_ns_handlers " cC@sdS(N(R3(RRR6R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytnull_ns_handlerscC@stjjtjj|S(s1Normalize a file/dir name for comparison purposes(RRtnormcasetrealpath(Rk((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRqscC@s9y ||SWn&tk r4t|||<}|SXdS(N(RRq(RkRtresult((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@s|jjdS(s7 Determine if given path appears to be an egg. s.egg(R7R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s.t|o-tjjtjj|ddS(s@ Determine if given path appears to be an unpacked egg. sEGG-INFOsPKG-INFO(RRRRR>(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cC@sU|jd}|j}|rQdj|}ttj||tj|ndS(NR8(R RR>tsetattrR<R(R6RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR3s  cc@st|tjrVxn|jD]0}|j}|r|jd r|VqqWn-x*|D]"}xt|D] }|VqpWq]WdS(s9Yield non-empty/non-comment lines of a string or sequencet#N(RRRt splitlinesR#RRj(tstrststss((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRjs  s \w+(\.\w+)*$s (?P[^-]+) ( -(?P[^-]+) ( -py(?P[^-]+) ( -(?P.+) )? )? )? cB@seZdZd d ddZdZdZedZdZ dddZ e j dZ eddZed Zedd Zedd ZRS(s3Object representing an advertised importable objectcC@s[t|std|n||_||_t||_t||_||_dS(NsInvalid module name(tMODULERARt module_namettupletattrsR R(RRRVRXR R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sfd|j|jf}|jr<|ddj|j7}n|jrb|ddj|j7}n|S(Ns%s = %st:R8s [%s]t,(RRVRXR>R (RRS((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sdt|S(NsEntryPoint.parse(%r)(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scO@sO| s|s|r,tjdtddn|rE|j||n|jS(sH Require packages for this EntryPoint, then resolve it. sJParameters to load are deprecated. Call .resolve and .require separately.t stackleveli(RcRdtDeprecationWarningRER(RRER4tkwargs((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s cC@sdt|jddgdd}ytjt|j|SWn%tk r_}tt|nXdS(sD Resolve the entry point from its module and attrs. tfromlistRtleveliN( RRVt functoolstreduceRRXRRR(RRtexc((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s cC@sr|jr%|j r%td|n|jj|j}tj|||d|j}tttj|dS(Ns&Can't require() without a distributionR ( R RRcRRTRRRR(RRRRR#((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRE s s]\s*(?P.+?)\s*=\s*(?P[\w.]+)\s*(:\s*(?P[\w.]+))?\s*(?P\[.*\])?\s*$cC@s|jj|}|s0d}t||n|j}|j|d}|drl|djdnd}||d|d|||S(sParse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional s9EntryPoint must be in 'name=module:attrs [extras]' formatR tattrR8RR((tpatternR;RAt groupdictt _parse_extrasR (RtsrcRRCRgtresR RX((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR6 s  #cC@s9|s dStjd|}|jr2tn|jS(Ntx((R^RtspecsRAR (Rt extras_specR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRfJ s   cC@st|std|ni}xZt|D]L}|j||}|j|krptd||jn|||j '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. s R(RRRRRRKRLR#RRcRdRtvarsR(RtLVt is_legacyRQ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_warn_legacy_version s  cC@sgy |jSWnUtk rbt|j|j}|dkr^d}t||j|n|SXdS(Ns(Missing 'Version:' header and/or %s file(R{RRyt _get_metadatatPKG_INFOR3RA(RRRQ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s   cC@s>y |jSWn)tk r6|j|j|_nX|jS(s~ A map of extra to its list of (direct) requirements for this distribution, including the null extra. (t_Distribution__dep_mapRt_filter_extrast_build_dep_map(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_dep_map s   cC@sxttd|D]}|}|j|}|jd\}}}|oet|pet| }|rwg}nt|pd}|j|gj |qW|S(s Given a mapping of extras to dependencies, strip off environment markers and filter out any dependencies not matching the markers. RYN( RRR3RRwRnRoRlRR (tdmR)t new_extraRRR*t fails_marker((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s   cC@s`i}xSdD]K}xBt|j|D]+\}}|j|gjt|q)Wq W|S(Ns requires.txts depends.txt(s requires.txts depends.txt(RkRRR Re(RRRR)R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR, s  "'cC@s|j}g}|j|jddxS|D]K}y|j|t|Wq/tk rytd||fq/Xq/W|S(s@List of Requirements needed for this distro if `extras` are useds%s has no such extra feature %rN((RR RR3RlRRc(RR RtdepsR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR3 s   cc@s5|j|r1x|j|D] }|VqWndS(N(RR(RRR$((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRA scC@s|dkrtj}n|j|d||tjkrt|jx6|jdD]"}|tjkrWt|qWqWWndS(s>Ensure distribution is importable on `path` (default=sys.path)Rsnamespace_packages.txtN( R3R<RRRRRRRS(RRRtpkg((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytactivateF s   cC@sOdt|jt|j|jp'tf}|jrK|d|j7}n|S(s@Return what this distribution's standard .egg filename should bes %s-%s-py%sRr(RmR RR3RFR=(RRk((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRQ s  cC@s(|jrd||jfSt|SdS(Ns%s (%s)(RR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR\ s cC@sMyt|dd}Wntk r/d}nX|p9d}d|j|fS(NRs[unknown version]s%s %s(RR3RAR (RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRb s    cC@s.|jdrt|nt|j|S(sADelegate all unrecognized public attributes to .metadata providerR(RRRR|(RRc((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt __getattr__j scK@s(|jt|tjj|||S(N(RRRRR(RRkRR ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRp scC@sTt|jtjjr1d|j|jf}nd|j|jf}tj|S(s?Return a ``Requirement`` that matches this distribution exactlys%s==%ss%s===%s(RRRRRR R^R(Rtspec((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRw scC@sD|j||}|dkr:td||ffn|jS(s=Return the `name` entry point of `group` or raise ImportErrorsEntry point %r not foundN(RKR3RR(RR@RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRI s cC@scy |j}Wn3tk rBtj|jd|}|_nX|dk r_|j|iS|S(s=Return the entry point map for `group`, or the full entry mapsentry_points.txtN(t_ep_mapRR_RqRR3R(RR@tep_map((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRJ s    cC@s|j|j|S(s<Return the EntryPoint object for `group`+`name`, or ``None``(RJR(RR@R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRK sc C@s|p |j}|sdSt|}tjj|}g|D]}|rVt|pY|^q>}xt|D]\}}||kr|rPqdSqo||kro|jtkro| r|||krdS|tjkr|j n|j |||j ||PqoqoW|tjkr.|j n|rG|j d|n |j |dSxMt ry|j ||d} Wntk rPq[X|| =|| =| }q[WdS(sEnsure self.location is on path If replace=False (default): - If location is already in path anywhere, do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent. - Else: add to the end of path. If replace=True: - If location is already on path anywhere (not eggs) or higher priority than its parent (eggs) do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent, removing any lower-priority entries. - Else: add it to the front of path. Nii(RRRRR}t enumerateR!RrR<tcheck_version_conflictRRRR:RA( RRtlocRtnloctbdirRDtnpathRtnp((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR sB +       cC@s|jdkrdStj|jd}t|j}x|jdD]}|tjksJ||ksJ|tkr}qJn|dkrqJnt tj|dd}|rt|j |sJ|j |jrqJnt d|||jfqJWdS( Nt setuptoolssnamespace_packages.txts top_level.txtt pkg_resourcestsiteR|sIModule %s was already imported from %s, but %s is being added to sys.path(RRR( R.RRRRqRR<RR-RR3Rt issue_warning(RtnspRtmodnametfn((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s"  cC@s8y |jWn&tk r3tdt|tSXtS(NsUnbuilt egg for (RRARRRR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR< s   cK@sYd}x0|jD]"}|j|t||dqW|jd|j|j|S(s@Copy this distribution, substituting in any changed keyword argss<project_name version py_version platform location precedenceRN(R RRR3R|R(RR RXRc((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytclone s  cC@s g|jD]}|r |^q S(N(R(Rtdep((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR  sN((.RRRRR3RFRrRR(RRRR:RRRRRRRR.RRRRRnRRRRRRRRRRRRRIRJRKRRR<RR (((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR] sR                      D   tEggInfoDistributioncB@seZdZRS(cC@s.t|j|j}|r*||_n|S(s Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. (RyRRR{(Rt md_version((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  (RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR stDistInfoDistributioncB@sJeZdZdZejdZedZedZ dZ RS(sV Wrap an actual or potential sys.path entry w/metadata, .dist-info style. tMETADATAs([\(,])\s*(\d.*?)\s*([,\)])cC@sTy |jSWnBtk rO|j|j}tjjj||_|jSXdS(sParse and cache metadataN(t _pkg_infoRRRtemailtparsertParsertparsestr(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_parsed_pkg_info! s   cC@s6y |jSWn$tk r1|j|_|jSXdS(N(t_DistInfoDistribution__dep_mapRt_compute_dependencies(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR+ s   c@sigd6}|_gx3|jjdp2gD]}jt|q3Wfd}t|d}|dj|xR|jjdpgD]8}t|j}t t|||||R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR` s(RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR_ scc@stt|}x|D]~}d|krA||jd }n|jdr|d j}y|t|7}Wqtk rdSXnt|VqWdS(sYield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. s #s\iN(RRjRRR#R't StopIterationR^(RRRlR$((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRed s   cB@sMeZdZdZdZdZdZdZedZ RS(cC@sytt|j|Wn+tjjk rG}tt|nX|j|_ t |j}||j |_ |_ g|jD]}|j|jf^q|_ttt|j|_|j |jt|j|jrt|jndf|_t|j|_dS(s>DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!N(tsuperR^RRRtInvalidRequirementRRRt unsafe_nameRfR7R R.t specifierR=RRjRWRRlR RR*R3thashCmpRt_Requirement__hash(Rtrequirement_stringRxR R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR{ s + $cC@st|to|j|jkS(N(RR^R(RRA((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@s ||k S(N((RRA((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@sGt|tr1|j|jkr%tS|j}n|jj|dtS(Nt prereleases(RR]R.RRRtcontainsR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  cC@s|jS(N(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@sdt|S(NsRequirement.parse(%r)(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@st|\}|S(N(Re(RSR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s( RRRRRRRRRnR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR^z s     cC@st|kr|tfS|S(sJ Ensure object appears in the mro even for old-style classes. (tobject(tclasses((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_always_object s  cC@sPttjt|dt|}x"|D]}||kr.||Sq.WdS(s2Return an adapter factory for `ob` from `registry`RN(RtinspecttgetmroRR(tregistryR/R1tt((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s'  cC@s)tjj|}tj|dtdS(s1Ensure that the parent directory of `path` existstexist_okN(RRR}R tmakedirsR(RR}((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRp scC@s^tstdnt|\}}|rZ|rZt| rZt|t|dndS(s/Sandbox-bypassing version of ensure_directory()s*"os.mkdir" not supported on this platform.iN(RRR R RTR(RR}Rk((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRT s  cc@sd}g}xt|D]y}|jdr|jdrs|sI|rW||fVn|dd!j}g}qtd|q|j|qW||fVdS(ssSplit a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. t[t]iisInvalid section headingN(R3RjRRR#RAR(RStsectiontcontentR$((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRk s  cO@s7tj}ztt_tj||SWd|t_XdS(N(RR tos_openttempfiletmkstemp(R4R told_open((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s   tignoretcategoryRcO@s||||S(N((RR4R]((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _call_aside s c@s:t|d<|jfdtDdS(s=Set up global resource manager (deliberately not state-saved)t_managerc3@s3|])}|jds|t|fVqdS(RN(RR(R,R(R(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys sN(R\RR(R%((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _initialize s    cC@stj}tdd||j}|j}|j}|j}|}td|D|ddtg|_ t t |j t jtjtdS(sE Prepare the master working set and make the ``require()`` API available. This function has explicit effects on the global state of pkg_resources. It is intended to be invoked once at the initialization of this module. Invocation by other packages is unsupported and done at their own risk. RRTcs@s!|]}|jdtVqdS(RN(RR(R,R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys + scS@s|jdtS(NR(RR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR5/ R6R%N(R[RR!RERLR&RFRWRRRRRR<RRRR(RTRERLRURFR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_initialize_master_working_set s        (Rt __future__RR<RRRRsR1RRRcR_R`tpkgutilR=R=RRt email.parserRRRRKRRRRERtimptpkg_resources.externRtpkg_resources.extern.six.movesRRRRRRR RRRR Rtos.pathR R timportlib.machineryt machineryRRR3R6R RRRt version_infot RuntimeErrorRRRRERTRUtresources_streamRXt resource_dirRNRWRRRMRLRPRORQRR,R-tRuntimeWarningRRRR!R'R)R,R0R1R2t _sget_nonet _sset_noneRDt__all__RUR`RaRRbRcRRRFRrRsRtRuRvRRGR?RR9RR:RRhRiRFRRHRIRJRKRwRxRR[RRRZRRdR\RYRfRgRlRmRnRoR~RRRR|R}RRRRRyRzR{RRVRRRRRRR RR%Rt ImpImporterRR+RR8R5RSRRJRKRqRRRR3RjR;RUtVERBOSEt IGNORECASERR_RuRyR]RRRRRARReRR^RRRpRTRkRtfilterwarningsRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyts                                 +                                    .        d   '            ! !          }2   6      PK Z A(site-packages/pkg_resources/__init__.pyonu[ fcF@@s dZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZyddlZWnek r]ddlZnXddlmZddl m!Z!m"Z"m#Z#ddlm$Z$y&ddlm%Z%m&Z&m'Z'e(Z)Wnek re*Z)nXdd lm+Z,dd l-m.Z.m/Z/yddl0j1Z2e2j3Wnek r3e4Z2nXd d l5m6Z6dd lm7Z7ddlm8Z8e9de9de9de9dddfej:koddfknre;dnej<re4Z=e4Z>ne4Z?e4Z@e4ZAe4ZBe4ZCe4ZDe4ZEe4ZFe4ZGe4ZHe4ZIe4ZJe4ZKe4ZLe4ZMe4ZNe4ZOdePfdYZQdZRiZSdZTdZUdZVdZWdZXdZYdZZdZ[Z\d Z]d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@ddAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddegFZ^d<e_fdfYZ`d=e`fdgYZadheafdiYZbd>e`fdjYZcd?e`fdkYZdiZeejfd ZgdZhdlZid ZjdZkdmZldnZmdoZngdpZodqZpdrZqejrdsZsejrdtZteqZuduZvdvZwewZxdwZydxZze4dyZ{dzZ|dTfd{YZ}dUe}fd|YZ~d7efd}YZd~efdYZd6efdYZeZd@e;fdYZd8fdYZdZdZdZdZdZdZe4dZd[fdYZemeed\efdYZd]efdYZejdYefdYZeZdefdYZdefdYZd^efdYZeme jedVefdYZdWefdYZdXefdYZeTddidZe*dZe*dZee jee*dZeeedZe*dZdZdfdYZdZdZdZdZeejeee2dree2jeneTddieTddidZdZdZdZe4dZdZeejeee jeee2dr) ee2jendZeeedZidZdZdZdZdZejrdjZejrdejejBjZd;efdYZdZdZd9efdYZdefdYZdefdYZied6ed6ed6ZdZdefdYZdZd:e8jjfdYZdZdZdZdZdZdZe jddeQde(dZeedZedZdS(sZ Package resource API -------------------- A resource is a logical file contained within a package, or a logical subdirectory thereof. The package resource API expects resource names to have their path parts separated with ``/``, *not* whatever the local path separator is. Do not use os.path operations to manipulate resource names being passed into the API. The package resource API is designed to work with normal filesystem packages, .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. i(tabsolute_importN(t get_importer(tsix(turllibtmaptfilter(tutime(tmkdirtrenametunlink(topen(tisdirtspliti(t py31compat(tappdirs(t packagings&pkg_resources.extern.packaging.versions)pkg_resources.extern.packaging.specifierss+pkg_resources.extern.packaging.requirementss&pkg_resources.extern.packaging.markersisPython 3.3 or later is requiredt PEP440WarningcB@seZdZRS(sa Used when there is an issue with a version or specifier not complying with PEP 440. (t__name__t __module__t__doc__(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRnscC@s?ytjj|SWn$tjjk r:tjj|SXdS(N(RtversiontVersiontInvalidVersiont LegacyVersion(tv((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt parse_versionuscK@s-tj|tjtj||dS(N(tglobalstupdatet _state_varstdicttfromkeys(tvartypetkw((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_declare_statescC@sLi}t}x6tjD](\}}|d|||||tcC@s{t}tj|}|dk rwtjdkrwy-ddjtd |jdf}Wqwt k rsqwXn|S(sZReturn this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. tdarwins macosx-%s-%st.iiN( tget_build_platformtmacosVersionStringtmatchR3tsystplatformtjoint _macosx_verstgroupt ValueError(tplattm((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_supported_platforms - trequiret run_scriptt get_providertget_distributiontload_entry_pointt get_entry_maptget_entry_infotiter_entry_pointstresource_stringtresource_streamtresource_filenametresource_listdirtresource_existstresource_isdirtdeclare_namespacet working_settadd_activation_listenertfind_distributionstset_extraction_pathtcleanup_resourcestget_default_cachet Environmentt WorkingSettResourceManagert Distributiont Requirementt EntryPointtResolutionErrortVersionConflicttDistributionNotFoundt UnknownExtratExtractionErrortparse_requirementsRt safe_namet safe_versiont get_platformtcompatible_platformst yield_linestsplit_sectionst safe_extrat to_filenametinvalid_markertevaluate_markertensure_directorytnormalize_pathtEGG_DISTt BINARY_DISTt SOURCE_DISTt CHECKOUT_DISTt DEVELOP_DISTtIMetadataProvidertIResourceProvidert FileMetadatat PathMetadatat EggMetadatat EmptyProvidertempty_providert NullProvidert EggProvidertDefaultProvidert ZipProvidertregister_findertregister_namespace_handlertregister_loader_typetfixup_namespace_packagesRtrun_maintAvailableDistributionscB@seZdZdZRS(s.Abstract base for dependency resolution errorscC@s|jjt|jS(N(t __class__RtreprR4(tself((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__repr__s(RRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR`scB@sDeZdZdZedZedZdZdZRS(s An already-installed version conflicts with the requested version. Should be initialized with the installed Distribution and the requested Requirement. s3{self.dist} is installed but {self.req} is requiredcC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytdistscC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytreqscC@s|jjtS(N(t _templatetformattlocals(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytreportscC@s$|s |S|j|f}t|S(st If required_by is non-empty, return a version of self that is a ContextualVersionConflict. (R4tContextualVersionConflict(Rt required_byR4((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt with_context s( RRRRtpropertyRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRas  RcB@s*eZdZejdZedZRS(s A VersionConflict that accepts a third parameter, the set of the requirements that required the installed Distribution. s by {self.required_by}cC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs(RRRRaRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cB@sSeZdZdZedZedZedZdZdZ RS(s&A requested distribution was not foundsSThe '{self.req}' distribution was not found and is required by {self.requirers_str}cC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR(scC@s |jdS(Ni(R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt requirers,scC@s|js dSdj|jS(Nsthe applications, (RR>(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt requirers_str0s cC@s|jjtS(N(RRR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR6scC@s |jS(N(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__str__9s( RRRRRRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRb"s cB@seZdZRS(s>Distribution doesn't have an "extra feature" of the given name(RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRc=siicC@s|t|eZdZdZdZdZdZdZRS(cC@sdS(s;Does the package's distribution contain the named metadata?N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt has_metadataR6cC@sdS(s'The named metadata resource as a stringN((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt get_metadataR6cC@sdS(sYield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_metadata_linesR6cC@sdS(s>Is the named metadata a directory? (like ``os.path.isdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytmetadata_isdirR6cC@sdS(s?List of metadata names in the directory (like ``os.listdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytmetadata_listdirR6cC@sdS(s=Execute the named script in the supplied namespace dictionaryN((Rt namespace((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRFR6(RRRRRRRRF(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRws      cB@sDeZdZdZdZdZdZdZdZRS(s3An object that provides access to package resourcescC@sdS(sdReturn a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``N((tmanagert resource_name((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_resource_filenameR6cC@sdS(siReturn a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``N((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_resource_stream R6cC@sdS(smReturn a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``N((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_resource_stringR6cC@sdS(s,Does the package contain the named resource?N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt has_resourceR6cC@sdS(s>Is the named resource a directory? (like ``os.path.isdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRRR6cC@sdS(s?List of resource names in the directory (like ``os.listdir()``)N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRPR6( RRRRRRRRRRP(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRxs     cB@seZdZddZedZedZdZdZ dZ ddZ dZ d Z deed Zddedd Zdded Zd ZedZdZdZdZRS(sDA collection of active distributions on sys.path (or a similar list)cC@s^g|_i|_i|_g|_|dkr<tj}nx|D]}|j|qCWdS(s?Create working set from list of path entries (default=sys.path)N(tentriest entry_keystby_keyt callbacksR3R<Rt add_entry(RRtentry((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__init__#s       cC@se|}yddlm}Wntk r1|SXy|j|Wntk r`|j|SX|S(s1 Prepare the master working set. i(t __requires__(t__main__RRRERat_build_from_requirements(tclstwsR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _build_master0s   cC@s|g}t|}|j|t}x|D]}|j|q4Wx0tjD]%}||jkrU|j|qUqUW|jtj(|S(sQ Build a working set from a requirement spec. Rewrites sys.path. (RetresolveRZtaddR<RRR(Rtreq_specRtreqstdistsRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRDs    cC@sT|jj|g|jj|x*t|tD]}|j||tq3WdS(sAdd a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) N(Rt setdefaultRRRVRRR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRZs cC@s|jj|j|kS(s9True if `dist` is the active distribution for its project(RRR.(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt __contains__iscC@sC|jj|j}|dk r?||kr?t||n|S(sFind a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. N(RRR.R3Ra(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRms cc@sgx`|D]X}|j|}|dkrGx4|jD] }|Vq5Wq||kr||VqqWdS(sYield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order). N(RJR3tvalues(RR@RRRtep((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRL}s    cC@sQtjdj}|d}|j||d<|j|dj||dS(s?Locate distribution for `requires` and run `script_name` scriptiRiN(R<RRR-RERF(RtrequiresRRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRFs    cc@spi}xc|jD]X}||jkr+qnx:|j|D]+}||kr9d||<|j|Vq9q9WqWdS(sYield distributions for non-duplicate projects in the working set The yield order is the order in which the items' path entries were added to the working set. iN(RRR(RtseentitemR.((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__iter__s  cC@s|r"|j|j|d|n|dkr:|j}n|jj|g}|jj|jg}| r|j|jkrdS||j|j<|j|kr|j|jn|j|kr|j|jn|j |dS(sAdd `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set's ``.entries`` (if it wasn't already present). `dist` is only added to the working set if it's for a project that doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method will be called. RN( t insert_onRR3tlocationRRR.RRt _added_new(RRRtinsertRtkeystkeys2((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@s+t|ddd}i}i}g}t} tjt} x|r&|jd} | |krmqFn| j| |sqFn|j| j} | dkr|j j| j} | dks| | krz|rz|} |dkr!| dkrt |j }q!t g}t g} n|j| | |d|} || j<| dkrz| j| d}t| |qzn|j| n| | kr| | }t| | j|n| j| jddd}|j|x/|D]'}| |j| j| j| | Map each requirement to the extras that demanded it. c@s@fd|jd|p$dD}j p?t|S(s Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. c3@s(|]}jji|d6VqdS(textraN(tmarkertevaluate(t.0R)(R(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys s(N(N(RR3R*tany(RRR t extra_evals((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  N(RRRR3R (((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscB@seZdZd eedZdZdZd dZ dZ dZ d e dZ d dZd Zd Zd ZRS( s5Searchable snapshot of distributions on a search pathcC@s,i|_||_||_|j|dS(s!Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distributions must be compatible with. If unspecified, it defaults to the current platform. `python` is an optional string naming the desired version of Python (e.g. ``'3.3'``); it defaults to the current version. You may explicitly set `platform` (and/or `python`) to ``None`` if you wish to map *all* distributions, not just those compatible with the running platform or Python version. N(t_distmapR=tpythontscan(Rt search_pathR=R0((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sI|jdkp-|jdkp-|j|jk}|oHt|j|jS(sIs distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned. N(R0R3t py_versionRiR=(RRt py_compat((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytcan_addscC@s|j|jj|dS(s"Remove `dist` from the environmentN(R/R.tremove(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR6scC@sQ|dkrtj}nx2|D]*}x!t|D]}|j|q2WqWdS(sdScan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added. N(R3R<RRVR(RR2RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR1s    cC@s|j}|jj|gS(sReturn a newest-to-oldest list of distributions for `project_name` Uses case-insensitive `project_name` comparison, assuming all the project's distributions use their project's name converted to all lowercase as their key. (tlowerR/R(RR tdistribution_key((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt __getitem__s cC@su|j|rq|jrq|jj|jg}||krq|j||jdtjddt qqndS(sLAdd `dist` if we ``can_add()`` it and it has not already been added R.thashcmptreverseN( R5t has_versionR/RR.RRtoperatort attrgetterR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sy|j|}Wn#tk r8|s/nd}nX|dk rI|Sx%||jD]}||krW|SqWW|j||S(sFind distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable distribution isn't active, this method returns the newest distribution in the environment that meets the ``Requirement`` in `req`. If no suitable distribution is found, and `installer` is supplied, then the result of calling the environment's ``obtain(req, installer)`` method will be returned. N(RRaR3R.tobtain(RRRTRRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s    cC@s|dk r||SdS(sObtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.N(R3(Rt requirementR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR?#s cc@s0x)|jjD]}||r|VqqWdS(s=Yield the unique project names of the available distributionsN(R/R(RR.((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR/s cC@s{t|tr|j|nXt|trdxF|D](}x||D]}|j|qFWq5Wntd|f|S(s2In-place addition of a distribution or environmentsCan't add %r to environment(RR]RRZR(RtothertprojectR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__iadd__5s cC@s@|jgdddd}x||fD]}||7}q(W|S(s4Add an environment or distribution to an environmentR=R0N(RR3(RRAtnewR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__add__AsN(RRRR3RDtPY_MAJORRR5R6R1R9RRR R?RRCRE(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRZs        cB@seZdZRS(sTAn error occurred extracting a resource The following attributes are available from instances of this exception: manager The resource manager that raised this exception cache_path The base directory for resource extraction original_error The exception instance that caused extraction to fail (RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRdMs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dd Z ed Zd Zd Zed ZRS(s'Manage resource extraction and packagescC@s i|_dS(N(t cached_files(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRascC@st|j|S(sDoes the named resource exist?(RGR(Rtpackage_or_requirementR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRQdscC@st|j|S(s,Is the named resource an existing directory?(RGRR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRRhs cC@st|j||S(s4Return a true filesystem path for specified resource(RGR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyROns cC@st|j||S(s9Return a readable file-like object for specified resource(RGR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRNts cC@st|j||S(s%Return specified resource as a string(RGR(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRMzs cC@st|j|S(s1List the contents of the named resource directory(RGRP(RRHR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRPs cC@sttjd}|jpt}tjdj}t|jt }||_ ||_ ||_ |dS(s5Give an error message for problems extracting file(s)is Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) to the Python egg cache: {old_exc} The Python egg cache directory is currently set to: {cache_path} Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. N( R<texc_infotextraction_pathRYttextwraptdedenttlstripRdRRRt cache_pathtoriginal_error(Rtold_excRNttmplterr((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytextraction_errors   cC@sx|jpt}tjj||d|}yt|Wntk rY|jnX|j|d|j |<|S(sReturn absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if provided, should be a sequence of path name parts "under" the egg's extraction location. This method should only be called by resource providers that need to obtain an extraction location, and only for names they intend to extract, as it tracks the generated names for possible cleanup later. s-tmpi( RJRYRRR>t_bypass_ensure_directoryt ExceptionRSt_warn_unsafe_extraction_pathRG(Rt archive_nametnamest extract_patht target_path((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytget_cache_paths    cC@swtjdkr*|jtjd r*dStj|j}|tj@sV|tj@rsd|}tj |t ndS(sN If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. tnttwindirNs%s is writable by group/others and vulnerable to attack when used with get_resource_filename. Consider a more secure location (set with .set_extraction_path or the PYTHON_EGG_CACHE environment variable).( RRRtenvirontstattst_modetS_IWOTHtS_IWGRPtwarningstwarnt UserWarning(Rtmodetmsg((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRVs &cC@s@tjdkr<tj|jdBd@}tj||ndS(s4Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don't have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are already in the filesystem. `tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine returns. tposiximiN(RRR_R`tchmod(RttempnametfilenameRf((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt postprocessscC@s%|jrtdn||_dS(sSet the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that routine's documentation for more details.) Resources are extracted to subdirectories of this path based upon information given by the ``IResourceProvider``. You may set this to a temporary directory, but then you must call ``cleanup_resources()`` to delete the extracted files when done. There is no guarantee that ``cleanup_resources()`` will be able to remove all extracted files. (Note: you may not change the extraction path for a given resource manager once resources have been extracted, unless you first call ``cleanup_resources()``.) s5Can't change extraction path, files already extractedN(RGRARJ(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRWs  cC@sdS(sB Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. This function does not have any concurrency protection, so it should generally only be called when the extraction path is a temporary directory exclusive to a single process. This method is not automatically called; you must call it explicitly or register it as an ``atexit`` function if you wish to ensure cleanup of a temporary directory used for extractions. N((Rtforce((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRXR6N((RRRR3RJRRQRRRORNRMRPRSR[t staticmethodRVRlRWRRX(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR\]s           cC@s"tjjdp!tjddS(s Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". tPYTHON_EGG_CACHEtappnames Python-Eggs(RR^RRtuser_cache_dir(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRYscC@stjdd|S(sConvert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. s[^A-Za-z0-9.]+t-(tretsub(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRfscC@sZyttjj|SWn9tjjk rU|jdd}tjdd|SXdS(sB Convert an arbitrary string to a standard version string RR8s[^A-Za-z0-9.]+RrN(RRRRRRRsRt(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRg%s cC@stjdd|jS(sConvert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. s[^A-Za-z0-9.-]+R(RsRtR7(R)((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRl1scC@s|jddS(s|Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. RrR(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRm:scC@s;yt|Wn&tk r6}d|_d|_|SXtS(so Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. N(Rot SyntaxErrorR3RktlinenoR(ttextte((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRnBs  cC@sLy tjj|}|jSWn%tjjk rG}t|nXdS(s Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. N(RtmarkerstMarkerR+t InvalidMarkerRu(RwR)R*Rx((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRoPs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dZ dZd Zd Zd Zd Zd ZdZdZdZdZdZRS(sETry to implement resources and metadata for arbitrary PEP 302 loaderscC@s:t|dd|_tjjt|dd|_dS(NRt__file__R6(RR3RRRtdirnamet module_path(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRfscC@s|j|j|S(N(t_fnR~(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRjscC@stj|j||S(N(tiotBytesIOR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRmscC@s|j|j|j|S(N(t_getRR~(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRpscC@s|j|j|j|S(N(t_hasRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRsscC@s%|jo$|j|j|j|S(N(tegg_infoRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRvscC@sE|js dS|j|j|j|}tjrA|jdS|S(NR6sutf-8(RRRRtPY3tdecode(RRtvalue((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRys cC@st|j|S(N(RjR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_isdirRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRRscC@s%|jo$|j|j|j|S(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_listdirRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRPscC@s)|jr%|j|j|j|SgS(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs c B@sd|}|j|s4edjen|j|jdd}|jdd}|j|j|}||dR (RtbaseR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s2t|jdr"|jj|StddS(Ntget_datas=Can't perform this operation for loaders without 'get_data()'(RRRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRsN(RRRR3tegg_nameRRRRRRRRRRRRRRPRRFRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR~_s,                 cB@s eZdZdZdZRS(s&Provider based on a virtual filesystemcC@stj|||jdS(N(R~Rt _setup_prefix(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j}d}xt||krt|rdtjj||_tjj|d|_||_ Pn|}tjj |\}}qWdS(NsEGG-INFO( R~R3t _is_egg_pathRRtbasenameRR>Rtegg_rootR (RRtoldR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   (RRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cB@sJeZdZdZdZdZdZdZedZ RS(s6Provides access to package resources in the filesystemcC@stjj|S(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@stjj|S(N(RRR (RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s tj|S(N(Rtlistdir(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@st|j|j|dS(Ntrb(R RR~(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s&t|d}|jSWdQXdS(NR(R R(RRtstream((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s)ttdtd}t||dS(NtSourceFileLoader(Rtimportlib_machineryttypeR3R(Rt loader_cls((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _registers ( RRRRRRRRR(R(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs     cB@s<eZdZdZdZZdZdZdZ RS(s.Provider that returns nothing for all requestscC@stS(N(R(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR5R6cC@sdS(NR6((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@sgS(N((RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@sdS(N((R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR sN( RRRR3R~RRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR|s    t ZipManifestscB@s#eZdZedZeZRS(s zip manifest builder c@sBtj|-fdjD}t|SWdQXdS(s Build a dictionary similar to the zipimport directory caches, except instead of tuples, store ZipInfo objects. Use a platform-specific path separator (os.sep) for the path keys for compatibility with pypy on Windows. c3@s3|])}|jdtjj|fVqdS(RN(RRtseptgetinfo(R,R(tzfile(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys sN(tzipfiletZipFiletnamelistR(RRR#((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytbuilds  (RRRR(Rtload(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRstMemoizedZipManifestscB@s)eZdZejddZdZRS(s% Memoized zipfile manifests. t manifest_modsmanifest mtimecC@svtjj|}tj|j}||ksC||j|krk|j|}|j||||Rt_extract_resourcet _eager_to_zip(RRRRteagersR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRcs     cC@s/|j}|jd}tj|}||fS(Nii(iii(t file_sizet date_timettimetmktime(tzip_stattsizeRt timestamp((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_get_date_and_sizeps  c C@s||jkr^x9|j|D]'}|j|tjj||}q#Wtjj|S|j|j|\}}tst dny(|j |j |j |}|j ||r|Stddtjj|\}} tj||jj|tj|t| ||f|j| |yt| |Wnltjk rtjj|r|j ||r|Stjdkrt|t| ||SnnXWntjk r|jnX|S(Ns>"os.rename" and "os.unlink" are not supported on this platforms .$extracttdirR\(t_indexRRRR>R}RRt WRITE_SUPPORTtIOErrorR[RRt _is_currentt_mkstemptwriteRRtcloseRRlRterrortisfileRR RS( RRRRtlastRRt real_pathtoutfttmpnam((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRysD    c C@s|j|j|\}}tjj|s2tStj|}|j|ks_|j|krctS|j j |}t |d}|j }WdQX||kS(sK Return True if the file_path is current for this zip_path RN( RRRRRRR_tst_sizeRRRR R( Rt file_pathRRRR_t zip_contentstft file_contents((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@sa|jdkrZg}x6dD].}|j|r|j|j|qqW||_n|jS(Nsnative_libs.txtseager_resources.txt(snative_libs.txtseager_resources.txt(RR3RR R(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs  cC@sy |jSWntk ri}x~|jD]s}|jtj}xX|rtjj|d }||kr||j|dPqF|jg||RR(RtindRtpartstparent((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs     cC@s.|j|}||jkp-||jkS(N(RRR(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j||jkS(N(RR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s%t|jj|j|dS(N((RRRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(RRR~(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRsN(RRRR3RRRRRRRRRRnRRRRRRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR?s$     7      cB@s;eZdZdZdZdZdZdZRS(s*Metadata handler for standalone PKG-INFO files Usage:: metadata = FileMetadata("/path/to/PKG-INFO") This provider rejects all data and metadata requests except for PKG-INFO, which is treated as existing, and will be the contents of the file at the provided location. cC@s ||_dS(N(R(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s|dkotjj|jS(NsPKG-INFO(RRR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s_|dkrtdntj|jdddd}|j}WdQX|j||S(NsPKG-INFOs(No metadata except PKG-INFO is availabletencodingsutf-8terrorsR(RRR RRt_warn_on_replacement(RRRtmetadata((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs  ! cC@sGdjd}||krCd}|jt}tj|ndS(Ns�sutf-8s2{self.path} could not be properly decoded in UTF-8(RRRRcRd(RRtreplacement_charRQRg((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  cC@st|j|S(N(RjR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs(RRRRRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRys     cB@seZdZdZRS(ssMetadata provider for egg directories Usage:: # Development eggs: egg_info = "/path/to/PackageName.egg-info" base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: egg_path = "/path/to/PackageName-ver-pyver-etc.egg" metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) dist = Distribution.from_filename(egg_path, metadata=metadata) cC@s||_||_dS(N(R~R(RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR)s (RRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRzscB@seZdZdZRS(s Metadata provider for .egg filescC@s`|jtj|_||_|jrFtjj|j|j|_n |j|_|j dS(s-Create a metadata provider from a zipimporterN( RRRRRtprefixRR>R~R(Rtimporter((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR1s   ! (RRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR{.sRt_distribution_finderscC@s|t|tfind_eggs_in_zipt zipimportt zipimporterR7Rt from_location( RRRRtsubitemtsubpathRRtsubmeta((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRQs$     cC@sdS(N(((RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt find_nothingoscC@sd}t|d|dtS(sL Given a list of filenames, return them in descending order by version number. >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] cS@sYtjj|\}}tj|jd|g}g|D]}tjj|^q=S(s6 Parse each component of the filename Rr( RRtsplitextt itertoolstchainR RRR(RtextRtpart((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _by_versionsR.R;(tsortedR(RXR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_by_version_descendingvs c #@sttrHtjdttjjdVdSt}fd|D}t |}xQ|D]I}tjj|}t |}x||D] } | VqWqWdS(s6Yield distributions accessible on a sys.path directoryRsEGG-INFONc3@s'|]}t|r|VqdS(N(t dist_factory(R,R(RR(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys s( t_normalize_cachedt_is_unpacked_eggR]RRzRRR>t safe_listdirRR( RRRRtfilteredtpath_item_entriesRtfullpathtfactoryR((RRs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt find_on_paths       cC@sf|j}tt|jd}|r.tS| rEt|rEtS| r_|jdr_tStS(s9 Return a dist_factory for a path_item and entry s .egg-infos .dist-infos .egg-link(s .egg-infos .dist-info( R7R-RRtdistributions_from_metadataRRVtresolve_egg_linktNoDists(RRRR7tis_meta((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   RcB@s2eZdZdZejr'eZndZRS(sS >>> bool(NoDists()) False >>> list(NoDists()('anything')) [] cC@stS(N(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__bool__scC@s tdS(N((titer(RR ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt__call__s(RRRRRtPY2t __nonzero__R(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs    cC@sytj|SWnnttfk r*nXtk r}|jtjtjtjfkpot |dddk}|sqnXdS(sI Attempt to list contents of path, but suppress some exceptions. twinerrori N(( RRtPermissionErrortNotADirectoryErrortOSErrorterrnotENOTDIRtEACCEStENOENTRR3(RRxt ignorable((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s! cc@stjj|}tjj|rUttj|dkrCdSt||}n t|}tjj|}t j |||dt VdS(Nit precedence( RRR}R RRRzRyRR]RRv(RtrootRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cc@sDt|2}x(|D] }|j}|r|VqqWWdQXdS(s1 Yield non-empty lines from file at path N(R tstrip(RRtline((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytnon_empty_liness   c@s>t}fd|D}tt|}t|dS(sa Given a path to an .egg-link, resolve distributions present in the referenced path. c3@s0|]&}tjjtjj|VqdS(N(RRR>R}(R,tref(R(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys s((R%RRVtnext(Rtreferenced_pathstresolved_pathst dist_groups((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs    t FileFindert_namespace_handlerst_namespace_packagescC@s|t|(Rt path_partst module_partsR(t package_nameR=(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytposition_in_sys_pathTs NR.(R<RRRRRR/(t orig_pathRARtpRB((RAR=R<s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR5Ds"  cC@s tjz|tkrdStjd}}d|krdj|jdd }t||tkrxt |nytj |j }Wqt k rt d|qXntj|gj|tj|gx|D]}t||qWWdtjXdS(s9Declare that package 'packageName' is a namespace packageNR8isNot a package:(t_impt acquire_lockR-R<RR3R>R RSRRR/RRRRR8t release_lock(R6RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRSes&        cC@sbtjzFx?tj|dD]+}t||}|r t||q q WWdtjXdS(sDEnsure that previously-declared namespace packages include path_itemN((RERFR-RR8RRG(RRtpackageR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cC@s`tjj||jdd}t|}x+|jD]}t||kr8Pq8q8W|SdS(sBCompute an ns-package subpath for a filesystem or zipfile importerR8iN(RRR>R RR/(RRR6RRt normalizedR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytfile_ns_handlers " cC@sdS(N(R3(RRR6R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytnull_ns_handlerscC@stjjtjj|S(s1Normalize a file/dir name for comparison purposes(RRtnormcasetrealpath(Rk((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRqscC@s9y ||SWn&tk r4t|||<}|SXdS(N(RRq(RkRtresult((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@s|jjdS(s7 Determine if given path appears to be an egg. s.egg(R7R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRscC@s.t|o-tjjtjj|ddS(s@ Determine if given path appears to be an unpacked egg. sEGG-INFOsPKG-INFO(RRRRR>(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs cC@sU|jd}|j}|rQdj|}ttj||tj|ndS(NR8(R RR>tsetattrR<R(R6RRR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR3s  cc@st|tjrVxn|jD]0}|j}|r|jd r|VqqWn-x*|D]"}xt|D] }|VqpWq]WdS(s9Yield non-empty/non-comment lines of a string or sequencet#N(RRRt splitlinesR#RRj(tstrststss((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRjs  s \w+(\.\w+)*$s (?P[^-]+) ( -(?P[^-]+) ( -py(?P[^-]+) ( -(?P.+) )? )? )? cB@seZdZd d ddZdZdZedZdZ dddZ e j dZ eddZed Zedd Zedd ZRS(s3Object representing an advertised importable objectcC@s[t|std|n||_||_t||_t||_||_dS(NsInvalid module name(tMODULERARt module_namettupletattrsR R(RRRVRXR R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sfd|j|jf}|jr<|ddj|j7}n|jrb|ddj|j7}n|S(Ns%s = %st:R8s [%s]t,(RRVRXR>R (RRS((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRs   cC@sdt|S(NsEntryPoint.parse(%r)(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scO@sO| s|s|r,tjdtddn|rE|j||n|jS(sH Require packages for this EntryPoint, then resolve it. sJParameters to load are deprecated. Call .resolve and .require separately.t stackleveli(RcRdtDeprecationWarningRER(RRER4tkwargs((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s cC@sdt|jddgdd}ytjt|j|SWn%tk r_}tt|nXdS(sD Resolve the entry point from its module and attrs. tfromlistRtleveliN( RRVt functoolstreduceRRXRRR(RRtexc((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s cC@sr|jr%|j r%td|n|jj|j}tj|||d|j}tttj|dS(Ns&Can't require() without a distributionR ( R RRcRRTRRRR(RRRRR#((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRE s s]\s*(?P.+?)\s*=\s*(?P[\w.]+)\s*(:\s*(?P[\w.]+))?\s*(?P\[.*\])?\s*$cC@s|jj|}|s0d}t||n|j}|j|d}|drl|djdnd}||d|d|||S(sParse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional s9EntryPoint must be in 'name=module:attrs [extras]' formatR tattrR8RR((tpatternR;RAt groupdictt _parse_extrasR (RtsrcRRCRgtresR RX((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR6 s  #cC@s9|s dStjd|}|jr2tn|jS(Ntx((R^RtspecsRAR (Rt extras_specR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRfJ s   cC@st|std|ni}xZt|D]L}|j||}|j|krptd||jn|||j '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. s R(RRRRRRKRLR#RRcRdRtvarsR(RtLVt is_legacyRQ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_warn_legacy_version s  cC@sgy |jSWnUtk rbt|j|j}|dkr^d}t||j|n|SXdS(Ns(Missing 'Version:' header and/or %s file(R{RRyt _get_metadatatPKG_INFOR3RA(RRRQ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s   cC@s>y |jSWn)tk r6|j|j|_nX|jS(s~ A map of extra to its list of (direct) requirements for this distribution, including the null extra. (t_Distribution__dep_mapRt_filter_extrast_build_dep_map(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_dep_map s   cC@sxttd|D]}|}|j|}|jd\}}}|oet|pet| }|rwg}nt|pd}|j|gj |qW|S(s Given a mapping of extras to dependencies, strip off environment markers and filter out any dependencies not matching the markers. RYN( RRR3RRwRnRoRlRR (tdmR)t new_extraRRR*t fails_marker((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s   cC@s`i}xSdD]K}xBt|j|D]+\}}|j|gjt|q)Wq W|S(Ns requires.txts depends.txt(s requires.txts depends.txt(RkRRR Re(RRRR)R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR, s  "'cC@s|j}g}|j|jddxS|D]K}y|j|t|Wq/tk rytd||fq/Xq/W|S(s@List of Requirements needed for this distro if `extras` are useds%s has no such extra feature %rN((RR RR3RlRRc(RR RtdepsR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR3 s   cc@s5|j|r1x|j|D] }|VqWndS(N(RR(RRR$((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRA scC@s|dkrtj}n|j|d||tjkrt|jx6|jdD]"}|tjkrWt|qWqWWndS(s>Ensure distribution is importable on `path` (default=sys.path)Rsnamespace_packages.txtN( R3R<RRRRRRRS(RRRtpkg((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytactivateF s   cC@sOdt|jt|j|jp'tf}|jrK|d|j7}n|S(s@Return what this distribution's standard .egg filename should bes %s-%s-py%sRr(RmR RR3RFR=(RRk((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRQ s  cC@s(|jrd||jfSt|SdS(Ns%s (%s)(RR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR\ s cC@sMyt|dd}Wntk r/d}nX|p9d}d|j|fS(NRs[unknown version]s%s %s(RR3RAR (RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRb s    cC@s.|jdrt|nt|j|S(sADelegate all unrecognized public attributes to .metadata providerR(RRRR|(RRc((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt __getattr__j scK@s(|jt|tjj|||S(N(RRRRR(RRkRR ((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRp scC@sTt|jtjjr1d|j|jf}nd|j|jf}tj|S(s?Return a ``Requirement`` that matches this distribution exactlys%s==%ss%s===%s(RRRRRR R^R(Rtspec((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRw scC@sD|j||}|dkr:td||ffn|jS(s=Return the `name` entry point of `group` or raise ImportErrorsEntry point %r not foundN(RKR3RR(RR@RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRI s cC@scy |j}Wn3tk rBtj|jd|}|_nX|dk r_|j|iS|S(s=Return the entry point map for `group`, or the full entry mapsentry_points.txtN(t_ep_mapRR_RqRR3R(RR@tep_map((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRJ s    cC@s|j|j|S(s<Return the EntryPoint object for `group`+`name`, or ``None``(RJR(RR@R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRK sc C@s|p |j}|sdSt|}tjj|}g|D]}|rVt|pY|^q>}xt|D]\}}||kr|rPqdSqo||kro|jtkro| r|||krdS|tjkr|j n|j |||j ||PqoqoW|tjkr.|j n|rG|j d|n |j |dSxMt ry|j ||d} Wntk rPq[X|| =|| =| }q[WdS(sEnsure self.location is on path If replace=False (default): - If location is already in path anywhere, do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent. - Else: add to the end of path. If replace=True: - If location is already on path anywhere (not eggs) or higher priority than its parent (eggs) do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent, removing any lower-priority entries. - Else: add it to the front of path. Nii(RRRRR}t enumerateR!RrR<tcheck_version_conflictRRRR:RA( RRtlocRtnloctbdirRDtnpathRtnp((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR sB +       cC@s|jdkrdStj|jd}t|j}x|jdD]}|tjksJ||ksJ|tkr}qJn|dkrqJnt tj|dd}|rt|j |sJ|j |jrqJnt d|||jfqJWdS( Nt setuptoolssnamespace_packages.txts top_level.txtt pkg_resourcestsiteR|sIModule %s was already imported from %s, but %s is being added to sys.path(RRR( R.RRRRqRR<RR-RR3Rt issue_warning(RtnspRtmodnametfn((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s"  cC@s8y |jWn&tk r3tdt|tSXtS(NsUnbuilt egg for (RRARRRR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR< s   cK@sYd}x0|jD]"}|j|t||dqW|jd|j|j|S(s@Copy this distribution, substituting in any changed keyword argss<project_name version py_version platform location precedenceRN(R RRR3R|R(RR RXRc((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pytclone s  cC@s g|jD]}|r |^q S(N(R(Rtdep((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR  sN((.RRRRR3RFRrRR(RRRR:RRRRRRRR.RRRRRnRRRRRRRRRRRRRIRJRKRRR<RR (((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR] sR                      D   tEggInfoDistributioncB@seZdZRS(cC@s.t|j|j}|r*||_n|S(s Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. (RyRRR{(Rt md_version((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  (RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR stDistInfoDistributioncB@sJeZdZdZejdZedZedZ dZ RS(sV Wrap an actual or potential sys.path entry w/metadata, .dist-info style. tMETADATAs([\(,])\s*(\d.*?)\s*([,\)])cC@sTy |jSWnBtk rO|j|j}tjjj||_|jSXdS(sParse and cache metadataN(t _pkg_infoRRRtemailtparsertParsertparsestr(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_parsed_pkg_info! s   cC@s6y |jSWn$tk r1|j|_|jSXdS(N(t_DistInfoDistribution__dep_mapRt_compute_dependencies(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR+ s   c@sigd6}|_gx3|jjdp2gD]}jt|q3Wfd}t|d}|dj|xR|jjdpgD]8}t|j}t t|||||R4(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR` s(RRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR_ scc@stt|}x|D]~}d|krA||jd }n|jdr|d j}y|t|7}Wqtk rdSXnt|VqWdS(sYield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. s #s\iN(RRjRRR#R't StopIterationR^(RRRlR$((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRed s   cB@sMeZdZdZdZdZdZdZedZ RS(cC@sytt|j|Wn+tjjk rG}tt|nX|j|_ t |j}||j |_ |_ g|jD]}|j|jf^q|_ttt|j|_|j |jt|j|jrt|jndf|_t|j|_dS(s>DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!N(tsuperR^RRRtInvalidRequirementRRRt unsafe_nameRfR7R R.t specifierR=RRjRWRRlR RR*R3thashCmpRt_Requirement__hash(Rtrequirement_stringRxR R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR{ s + $cC@st|to|j|jkS(N(RR^R(RRA((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@s ||k S(N((RRA((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@sGt|tr1|j|jkr%tS|j}n|jj|dtS(Nt prereleases(RR]R.RRRtcontainsR(RR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s  cC@s|jS(N(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@sdt|S(NsRequirement.parse(%r)(R(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR scC@st|\}|S(N(Re(RSR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s( RRRRRRRRRnR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR^z s     cC@st|kr|tfS|S(sJ Ensure object appears in the mro even for old-style classes. (tobject(tclasses((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_always_object s  cC@sPttjt|dt|}x"|D]}||kr.||Sq.WdS(s2Return an adapter factory for `ob` from `registry`RN(RtinspecttgetmroRR(tregistryR/R1tt((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s'  cC@s)tjj|}tj|dtdS(s1Ensure that the parent directory of `path` existstexist_okN(RRR}R tmakedirsR(RR}((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRp scC@s^tstdnt|\}}|rZ|rZt| rZt|t|dndS(s/Sandbox-bypassing version of ensure_directory()s*"os.mkdir" not supported on this platform.iN(RRR R RTR(RR}Rk((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRT s  cc@sd}g}xt|D]y}|jdr|jdrs|sI|rW||fVn|dd!j}g}qtd|q|j|qW||fVdS(ssSplit a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. t[t]iisInvalid section headingN(R3RjRRR#RAR(RStsectiontcontentR$((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyRk s  cO@s7tj}ztt_tj||SWd|t_XdS(N(RR tos_openttempfiletmkstemp(R4R told_open((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR s   tignoretcategoryRcO@s||||S(N((RR4R]((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _call_aside s c@s:t|d<|jfdtDdS(s=Set up global resource manager (deliberately not state-saved)t_managerc3@s3|])}|jds|t|fVqdS(RN(RR(R,R(R(s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys sN(R\RR(R%((Rs:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt _initialize s    cC@stj}tdd||j}|j}|j}|j}|}td|D|ddtg|_ t t |j t jtjtdS(sE Prepare the master working set and make the ``require()`` API available. This function has explicit effects on the global state of pkg_resources. It is intended to be invoked once at the initialization of this module. Invocation by other packages is unsupported and done at their own risk. RRTcs@s!|]}|jdtVqdS(RN(RR(R,R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pys + scS@s|jdtS(NR(RR(R((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyR5/ R6R%N(R[RR!RERLR&RFRWRRRRRR<RRRR(RTRERLRURFR((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyt_initialize_master_working_set s        (Rt __future__RR<RRRRsR1RRRcR_R`tpkgutilR=R=RRt email.parserRRRRKRRRRERtimptpkg_resources.externRtpkg_resources.extern.six.movesRRRRRRR RRRR Rtos.pathR R timportlib.machineryt machineryRRR3R6R RRRt version_infot RuntimeErrorRRRRERTRUtresources_streamRXt resource_dirRNRWRRRMRLRPRORQRR,R-tRuntimeWarningRRRR!R'R)R,R0R1R2t _sget_nonet _sset_noneRDt__all__RUR`RaRRbRcRRRFRrRsRtRuRvRRGR?RR9RR:RRhRiRFRRHRIRJRKRwRxRR[RRRZRRdR\RYRfRgRlRmRnRoR~RRRR|R}RRRRRyRzR{RRVRRRRRRR RR%Rt ImpImporterRR+RR8R5RSRRJRKRqRRRR3RjR;RUtVERBOSEt IGNORECASERR_RuRyR]RRRRRARReRR^RRRpRTRkRtfilterwarningsRRRR(((s:/usr/lib/python2.7/site-packages/pkg_resources/__init__.pyts                                 +                                    .        d   '            ! !          }2   6      PK Z%,,*site-packages/pkg_resources/py31compat.pyonu[ fc@sddlZddlZddlZedZejd kpzd ejkoYd knpzd ejkoxd knZerenejZdS(iNcCsJytj|Wn2tk rE}| s<|jtjkrFqFnXdS(N(tostmakedirstOSErrorterrnotEEXIST(tpathtexist_oktexc((s</usr/lib/python2.7/site-packages/pkg_resources/py31compat.pyt _makedirs_31s iiiiii(iii(ii(iii(ii(iii(RRtsystFalseRt version_infotneeds_makedirsR(((s</usr/lib/python2.7/site-packages/pkg_resources/py31compat.pyts    PK ZCNP'site-packages/pkg_resources/__init__.pynu[# coding: utf-8 """ Package resource API -------------------- A resource is a logical file contained within a package, or a logical subdirectory thereof. The package resource API expects resource names to have their path parts separated with ``/``, *not* whatever the local path separator is. Do not use os.path operations to manipulate resource names being passed into the API. The package resource API is designed to work with normal filesystem packages, .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. """ from __future__ import absolute_import import sys import os import io import time import re import types import zipfile import zipimport import warnings import stat import functools import pkgutil import operator import platform import collections import plistlib import email.parser import errno import tempfile import textwrap import itertools import inspect from pkgutil import get_importer try: import _imp except ImportError: # Python 3.2 compatibility import imp as _imp from pkg_resources.extern import six from pkg_resources.extern.six.moves import urllib, map, filter # capture these to bypass sandboxing from os import utime try: from os import mkdir, rename, unlink WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE WRITE_SUPPORT = False from os import open as os_open from os.path import isdir, split try: import importlib.machinery as importlib_machinery # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: importlib_machinery = None from . import py31compat from pkg_resources.extern import appdirs from pkg_resources.extern import packaging __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') __import__('pkg_resources.extern.packaging.requirements') __import__('pkg_resources.extern.packaging.markers') if (3, 0) < sys.version_info < (3, 3): raise RuntimeError("Python 3.3 or later is required") if six.PY2: # Those builtin exceptions are only defined in Python 3 PermissionError = None NotADirectoryError = None # declare some globals that will be defined later to # satisfy the linters. require = None working_set = None add_activation_listener = None resources_stream = None cleanup_resources = None resource_dir = None resource_stream = None set_extraction_path = None resource_isdir = None resource_string = None iter_entry_points = None resource_listdir = None resource_filename = None resource_exists = None _distribution_finders = None _namespace_handlers = None _namespace_packages = None class PEP440Warning(RuntimeWarning): """ Used when there is an issue with a version or specifier not complying with PEP 440. """ def parse_version(v): try: return packaging.version.Version(v) except packaging.version.InvalidVersion: return packaging.version.LegacyVersion(v) _state_vars = {} def _declare_state(vartype, **kw): globals().update(kw) _state_vars.update(dict.fromkeys(kw, vartype)) def __getstate__(): state = {} g = globals() for k, v in _state_vars.items(): state[k] = g['_sget_' + v](g[k]) return state def __setstate__(state): g = globals() for k, v in state.items(): g['_sset_' + _state_vars[k]](k, g[k], v) return state def _sget_dict(val): return val.copy() def _sset_dict(key, ob, state): ob.clear() ob.update(state) def _sget_object(val): return val.__getstate__() def _sset_object(key, ob, state): ob.__setstate__(state) _sget_none = _sset_none = lambda *args: None def get_supported_platform(): """Return this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. """ plat = get_build_platform() m = macosVersionString.match(plat) if m is not None and sys.platform == "darwin": try: plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) except ValueError: # not Mac OS X pass return plat __all__ = [ # Basic resource access and distribution/entry point discovery 'require', 'run_script', 'get_provider', 'get_distribution', 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', 'resource_string', 'resource_stream', 'resource_filename', 'resource_listdir', 'resource_exists', 'resource_isdir', # Environmental control 'declare_namespace', 'working_set', 'add_activation_listener', 'find_distributions', 'set_extraction_path', 'cleanup_resources', 'get_default_cache', # Primary implementation classes 'Environment', 'WorkingSet', 'ResourceManager', 'Distribution', 'Requirement', 'EntryPoint', # Exceptions 'ResolutionError', 'VersionConflict', 'DistributionNotFound', 'UnknownExtra', 'ExtractionError', # Warnings 'PEP440Warning', # Parsing functions and string utilities 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', # filesystem utilities 'ensure_directory', 'normalize_path', # Distribution "precedence" constants 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', # "Provider" interfaces, implementations, and registration/lookup APIs 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', 'register_finder', 'register_namespace_handler', 'register_loader_type', 'fixup_namespace_packages', 'get_importer', # Deprecated/backward compatibility only 'run_main', 'AvailableDistributions', ] class ResolutionError(Exception): """Abstract base for dependency resolution errors""" def __repr__(self): return self.__class__.__name__ + repr(self.args) class VersionConflict(ResolutionError): """ An already-installed version conflicts with the requested version. Should be initialized with the installed Distribution and the requested Requirement. """ _template = "{self.dist} is installed but {self.req} is required" @property def dist(self): return self.args[0] @property def req(self): return self.args[1] def report(self): return self._template.format(**locals()) def with_context(self, required_by): """ If required_by is non-empty, return a version of self that is a ContextualVersionConflict. """ if not required_by: return self args = self.args + (required_by,) return ContextualVersionConflict(*args) class ContextualVersionConflict(VersionConflict): """ A VersionConflict that accepts a third parameter, the set of the requirements that required the installed Distribution. """ _template = VersionConflict._template + ' by {self.required_by}' @property def required_by(self): return self.args[2] class DistributionNotFound(ResolutionError): """A requested distribution was not found""" _template = ("The '{self.req}' distribution was not found " "and is required by {self.requirers_str}") @property def req(self): return self.args[0] @property def requirers(self): return self.args[1] @property def requirers_str(self): if not self.requirers: return 'the application' return ', '.join(self.requirers) def report(self): return self._template.format(**locals()) def __str__(self): return self.report() class UnknownExtra(ResolutionError): """Distribution doesn't have an "extra feature" of the given name""" _provider_factories = {} PY_MAJOR = sys.version[:3] EGG_DIST = 3 BINARY_DIST = 2 SOURCE_DIST = 1 CHECKOUT_DIST = 0 DEVELOP_DIST = -1 def register_loader_type(loader_type, provider_factory): """Register `provider_factory` to make providers for `loader_type` `loader_type` is the type or class of a PEP 302 ``module.__loader__``, and `provider_factory` is a function that, passed a *module* object, returns an ``IResourceProvider`` for that module. """ _provider_factories[loader_type] = provider_factory def get_provider(moduleOrReq): """Return an IResourceProvider for the named module or requirement""" if isinstance(moduleOrReq, Requirement): return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] try: module = sys.modules[moduleOrReq] except KeyError: __import__(moduleOrReq) module = sys.modules[moduleOrReq] loader = getattr(module, '__loader__', None) return _find_adapter(_provider_factories, loader)(module) def _macosx_vers(_cache=[]): if not _cache: version = platform.mac_ver()[0] # fallback for MacPorts if version == '': plist = '/System/Library/CoreServices/SystemVersion.plist' if os.path.exists(plist): if hasattr(plistlib, 'readPlist'): plist_content = plistlib.readPlist(plist) if 'ProductVersion' in plist_content: version = plist_content['ProductVersion'] _cache.append(version.split('.')) return _cache[0] def _macosx_arch(machine): return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) def get_build_platform(): """Return this platform's string for platform-specific distributions XXX Currently this is the same as ``distutils.util.get_platform()``, but it needs some hacks for Linux and Mac OS X. """ try: # Python 2.7 or >=3.2 from sysconfig import get_platform except ImportError: from distutils.util import get_platform plat = get_platform() if sys.platform == "darwin" and not plat.startswith('macosx-'): try: version = _macosx_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( int(version[0]), int(version[1]), _macosx_arch(machine), ) except ValueError: # if someone is running a non-Mac darwin system, this will fall # through to the default implementation pass return plat macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") # XXX backward compat get_platform = get_build_platform def compatible_platforms(provided, required): """Can code for the `provided` platform run on the `required` platform? Returns true if either platform is ``None``, or the platforms are equal. XXX Needs compatibility checks for Linux and other unixy OSes. """ if provided is None or required is None or provided == required: # easy case return True # Mac OS X special cases reqMac = macosVersionString.match(required) if reqMac: provMac = macosVersionString.match(provided) # is this a Mac package? if not provMac: # this is backwards compatibility for packages built before # setuptools 0.6. All packages built after this point will # use the new macosx designation. provDarwin = darwinVersionString.match(provided) if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) if dversion == 7 and macosversion >= "10.3" or \ dversion == 8 and macosversion >= "10.4": return True # egg isn't macosx or legacy darwin return False # are they the same major version and machine type? if provMac.group(1) != reqMac.group(1) or \ provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? if int(provMac.group(2)) > int(reqMac.group(2)): return False return True # XXX Linux and other platforms' special cases should go here return False def run_script(dist_spec, script_name): """Locate distribution `dist_spec` and run its `script_name` script""" ns = sys._getframe(1).f_globals name = ns['__name__'] ns.clear() ns['__name__'] = name require(dist_spec)[0].run_script(script_name, ns) # backward compatibility run_main = run_script def get_distribution(dist): """Return a current distribution object for a Requirement or string""" if isinstance(dist, six.string_types): dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) if not isinstance(dist, Distribution): raise TypeError("Expected string, Requirement, or Distribution", dist) return dist def load_entry_point(dist, group, name): """Return `name` entry point of `group` for `dist` or raise ImportError""" return get_distribution(dist).load_entry_point(group, name) def get_entry_map(dist, group=None): """Return the entry point map for `group`, or the full entry map""" return get_distribution(dist).get_entry_map(group) def get_entry_info(dist, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return get_distribution(dist).get_entry_info(group, name) class IMetadataProvider: def has_metadata(name): """Does the package's distribution contain the named metadata?""" def get_metadata(name): """The named metadata resource as a string""" def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.""" def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" def metadata_listdir(name): """List of metadata names in the directory (like ``os.listdir()``)""" def run_script(script_name, namespace): """Execute the named script in the supplied namespace dictionary""" class IResourceProvider(IMetadataProvider): """An object that provides access to package resources""" def get_resource_filename(manager, resource_name): """Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``""" def get_resource_stream(manager, resource_name): """Return a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``""" def get_resource_string(manager, resource_name): """Return a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``""" def has_resource(resource_name): """Does the package contain the named resource?""" def resource_isdir(resource_name): """Is the named resource a directory? (like ``os.path.isdir()``)""" def resource_listdir(resource_name): """List of resource names in the directory (like ``os.listdir()``)""" class WorkingSet(object): """A collection of active distributions on sys.path (or a similar list)""" def __init__(self, entries=None): """Create working set from list of path entries (default=sys.path)""" self.entries = [] self.entry_keys = {} self.by_key = {} self.callbacks = [] if entries is None: entries = sys.path for entry in entries: self.add_entry(entry) @classmethod def _build_master(cls): """ Prepare the master working set. """ ws = cls() try: from __main__ import __requires__ except ImportError: # The main program does not list any requirements return ws # ensure the requirements are met try: ws.require(__requires__) except VersionConflict: return cls._build_from_requirements(__requires__) return ws @classmethod def _build_from_requirements(cls, req_spec): """ Build a working set from a requirement spec. Rewrites sys.path. """ # try it without defaults already on sys.path # by starting with an empty path ws = cls([]) reqs = parse_requirements(req_spec) dists = ws.resolve(reqs, Environment()) for dist in dists: ws.add(dist) # add any missing entries from sys.path for entry in sys.path: if entry not in ws.entries: ws.add_entry(entry) # then copy back to sys.path sys.path[:] = ws.entries return ws def add_entry(self, entry): """Add a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) """ self.entry_keys.setdefault(entry, []) self.entries.append(entry) for dist in find_distributions(entry, True): self.add(dist, entry, False) def __contains__(self, dist): """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist def find(self, req): """Find a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. """ dist = self.by_key.get(req.key) if dist is not None and dist not in req: # XXX add more info raise VersionConflict(dist, req) return dist def iter_entry_points(self, group, name=None): """Yield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order). """ for dist in self: entries = dist.get_entry_map(group) if name is None: for ep in entries.values(): yield ep elif name in entries: yield entries[name] def run_script(self, requires, script_name): """Locate distribution for `requires` and run `script_name` script""" ns = sys._getframe(1).f_globals name = ns['__name__'] ns.clear() ns['__name__'] = name self.require(requires)[0].run_script(script_name, ns) def __iter__(self): """Yield distributions for non-duplicate projects in the working set The yield order is the order in which the items' path entries were added to the working set. """ seen = {} for item in self.entries: if item not in self.entry_keys: # workaround a cache issue continue for key in self.entry_keys[item]: if key not in seen: seen[key] = 1 yield self.by_key[key] def add(self, dist, entry=None, insert=True, replace=False): """Add `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set's ``.entries`` (if it wasn't already present). `dist` is only added to the working set if it's for a project that doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method will be called. """ if insert: dist.insert_on(self.entries, entry, replace=replace) if entry is None: entry = dist.location keys = self.entry_keys.setdefault(entry, []) keys2 = self.entry_keys.setdefault(dist.location, []) if not replace and dist.key in self.by_key: # ignore hidden distros return self.by_key[dist.key] = dist if dist.key not in keys: keys.append(dist.key) if dist.key not in keys2: keys2.append(dist.key) self._added_new(dist) def resolve(self, requirements, env=None, installer=None, replace_conflicting=False, extras=None): """List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, if supplied, should be an ``Environment`` instance. If not supplied, it defaults to all distributions available within any entry or distribution in the working set. `installer`, if supplied, will be invoked with each requirement that cannot be met by an already-installed distribution; it should return a ``Distribution`` or ``None``. Unless `replace_conflicting=True`, raises a VersionConflict exception if any requirements are found on the path that have the correct name but the wrong version. Otherwise, if an `installer` is supplied it will be invoked to obtain the correct version of the requirement and activate it. `extras` is a list of the extras to be used with these requirements. This is important because extra requirements may look like `my_req; extra = "my_extra"`, which would otherwise be interpreted as a purely optional requirement. Instead, we want to be able to assert that these requirements are truly required. """ # set up the stack requirements = list(requirements)[::-1] # set of processed requirements processed = {} # key -> dist best = {} to_activate = [] req_extras = _ReqExtras() # Mapping of requirement to set of distributions that required it; # useful for reporting info about conflicts. required_by = collections.defaultdict(set) while requirements: # process dependencies breadth-first req = requirements.pop(0) if req in processed: # Ignore cyclic or redundant dependencies continue if not req_extras.markers_pass(req, extras): continue dist = best.get(req.key) if dist is None: # Find the best distribution and add it to the map dist = self.by_key.get(req.key) if dist is None or (dist not in req and replace_conflicting): ws = self if env is None: if dist is None: env = Environment(self.entries) else: # Use an empty environment and workingset to avoid # any further conflicts with the conflicting # distribution env = Environment([]) ws = WorkingSet([]) dist = best[req.key] = env.best_match( req, ws, installer, replace_conflicting=replace_conflicting ) if dist is None: requirers = required_by.get(req, None) raise DistributionNotFound(req, requirers) to_activate.append(dist) if dist not in req: # Oops, the "best" so far conflicts with a dependency dependent_req = required_by[req] raise VersionConflict(dist, req).with_context(dependent_req) # push the new requirements onto the stack new_requirements = dist.requires(req.extras)[::-1] requirements.extend(new_requirements) # Register the new requirements needed by req for new_requirement in new_requirements: required_by[new_requirement].add(req.project_name) req_extras[new_requirement] = req.extras processed[req] = True # return list of distros to activate return to_activate def find_plugins( self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` Example usage:: distributions, errors = working_set.find_plugins( Environment(plugin_dirlist) ) # add plugins+libs to sys.path map(working_set.add, distributions) # display errors print('Could not load', errors) The `plugin_env` should be an ``Environment`` instance that contains only distributions that are in the project's "plugin directory" or directories. The `full_env`, if supplied, should be an ``Environment`` contains all currently-available distributions. If `full_env` is not supplied, one is created automatically from the ``WorkingSet`` this method is called on, which will typically mean that every directory on ``sys.path`` will be scanned for distributions. `installer` is a standard installer callback as used by the ``resolve()`` method. The `fallback` flag indicates whether we should attempt to resolve older versions of a plugin if the newest version cannot be resolved. This method returns a 2-tuple: (`distributions`, `error_info`), where `distributions` is a list of the distributions found in `plugin_env` that were loadable, along with any other distributions that are needed to resolve their dependencies. `error_info` is a dictionary mapping unloadable plugin distributions to an exception instance describing the error that occurred. Usually this will be a ``DistributionNotFound`` or ``VersionConflict`` instance. """ plugin_projects = list(plugin_env) # scan project names in alphabetic order plugin_projects.sort() error_info = {} distributions = {} if full_env is None: env = Environment(self.entries) env += plugin_env else: env = full_env + plugin_env shadow_set = self.__class__([]) # put all our entries in shadow_set list(map(shadow_set.add, self)) for project_name in plugin_projects: for dist in plugin_env[project_name]: req = [dist.as_requirement()] try: resolvees = shadow_set.resolve(req, env, installer) except ResolutionError as v: # save error info error_info[dist] = v if fallback: # try the next older version of project continue else: # give up on this project, keep going break else: list(map(shadow_set.add, resolvees)) distributions.update(dict.fromkeys(resolvees)) # success, no need to try any more versions of this project break distributions = list(distributions) distributions.sort() return distributions, error_info def require(self, *requirements): """Ensure that distributions matching `requirements` are activated `requirements` must be a string or a (possibly-nested) sequence thereof, specifying the distributions and versions required. The return value is a sequence of the distributions that needed to be activated to fulfill the requirements; all relevant distributions are included, even if they were already activated in this working set. """ needed = self.resolve(parse_requirements(requirements)) for dist in needed: self.add(dist) return needed def subscribe(self, callback, existing=True): """Invoke `callback` for all distributions If `existing=True` (default), call on all existing ones, as well. """ if callback in self.callbacks: return self.callbacks.append(callback) if not existing: return for dist in self: callback(dist) def _added_new(self, dist): for callback in self.callbacks: callback(dist) def __getstate__(self): return ( self.entries[:], self.entry_keys.copy(), self.by_key.copy(), self.callbacks[:] ) def __setstate__(self, e_k_b_c): entries, keys, by_key, callbacks = e_k_b_c self.entries = entries[:] self.entry_keys = keys.copy() self.by_key = by_key.copy() self.callbacks = callbacks[:] class _ReqExtras(dict): """ Map each requirement to the extras that demanded it. """ def markers_pass(self, req, extras=None): """ Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. """ extra_evals = ( req.marker.evaluate({'extra': extra}) for extra in self.get(req, ()) + (extras or (None,)) ) return not req.marker or any(extra_evals) class Environment(object): """Searchable snapshot of distributions on a search path""" def __init__( self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distributions must be compatible with. If unspecified, it defaults to the current platform. `python` is an optional string naming the desired version of Python (e.g. ``'3.3'``); it defaults to the current version. You may explicitly set `platform` (and/or `python`) to ``None`` if you wish to map *all* distributions, not just those compatible with the running platform or Python version. """ self._distmap = {} self.platform = platform self.python = python self.scan(search_path) def can_add(self, dist): """Is distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned. """ py_compat = ( self.python is None or dist.py_version is None or dist.py_version == self.python ) return py_compat and compatible_platforms(dist.platform, self.platform) def remove(self, dist): """Remove `dist` from the environment""" self._distmap[dist.key].remove(dist) def scan(self, search_path=None): """Scan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added. """ if search_path is None: search_path = sys.path for item in search_path: for dist in find_distributions(item): self.add(dist) def __getitem__(self, project_name): """Return a newest-to-oldest list of distributions for `project_name` Uses case-insensitive `project_name` comparison, assuming all the project's distributions use their project's name converted to all lowercase as their key. """ distribution_key = project_name.lower() return self._distmap.get(distribution_key, []) def add(self, dist): """Add `dist` if we ``can_add()`` it and it has not already been added """ if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) def best_match( self, req, working_set, installer=None, replace_conflicting=False): """Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable distribution isn't active, this method returns the newest distribution in the environment that meets the ``Requirement`` in `req`. If no suitable distribution is found, and `installer` is supplied, then the result of calling the environment's ``obtain(req, installer)`` method will be returned. """ try: dist = working_set.find(req) except VersionConflict: if not replace_conflicting: raise dist = None if dist is not None: return dist for dist in self[req.key]: if dist in req: return dist # try to download/install return self.obtain(req, installer) def obtain(self, requirement, installer=None): """Obtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.""" if installer is not None: return installer(requirement) def __iter__(self): """Yield the unique project names of the available distributions""" for key in self._distmap.keys(): if self[key]: yield key def __iadd__(self, other): """In-place addition of a distribution or environment""" if isinstance(other, Distribution): self.add(other) elif isinstance(other, Environment): for project in other: for dist in other[project]: self.add(dist) else: raise TypeError("Can't add %r to environment" % (other,)) return self def __add__(self, other): """Add an environment or distribution to an environment""" new = self.__class__([], platform=None, python=None) for env in self, other: new += env return new # XXX backward compatibility AvailableDistributions = Environment class ExtractionError(RuntimeError): """An error occurred extracting a resource The following attributes are available from instances of this exception: manager The resource manager that raised this exception cache_path The base directory for resource extraction original_error The exception instance that caused extraction to fail """ class ResourceManager: """Manage resource extraction and packages""" extraction_path = None def __init__(self): self.cached_files = {} def resource_exists(self, package_or_requirement, resource_name): """Does the named resource exist?""" return get_provider(package_or_requirement).has_resource(resource_name) def resource_isdir(self, package_or_requirement, resource_name): """Is the named resource an existing directory?""" return get_provider(package_or_requirement).resource_isdir( resource_name ) def resource_filename(self, package_or_requirement, resource_name): """Return a true filesystem path for specified resource""" return get_provider(package_or_requirement).get_resource_filename( self, resource_name ) def resource_stream(self, package_or_requirement, resource_name): """Return a readable file-like object for specified resource""" return get_provider(package_or_requirement).get_resource_stream( self, resource_name ) def resource_string(self, package_or_requirement, resource_name): """Return specified resource as a string""" return get_provider(package_or_requirement).get_resource_string( self, resource_name ) def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" return get_provider(package_or_requirement).resource_listdir( resource_name ) def extraction_error(self): """Give an error message for problems extracting file(s)""" old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() tmpl = textwrap.dedent(""" Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) to the Python egg cache: {old_exc} The Python egg cache directory is currently set to: {cache_path} Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. """).lstrip() err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path err.original_error = old_exc raise err def get_cache_path(self, archive_name, names=()): """Return absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if provided, should be a sequence of path name parts "under" the egg's extraction location. This method should only be called by resource providers that need to obtain an extraction location, and only for names they intend to extract, as it tracks the generated names for possible cleanup later. """ extract_path = self.extraction_path or get_default_cache() target_path = os.path.join(extract_path, archive_name + '-tmp', *names) try: _bypass_ensure_directory(target_path) except Exception: self.extraction_error() self._warn_unsafe_extraction_path(extract_path) self.cached_files[target_path] = 1 return target_path @staticmethod def _warn_unsafe_extraction_path(path): """ If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. """ if os.name == 'nt' and not path.startswith(os.environ['windir']): # On Windows, permissions are generally restrictive by default # and temp directories are not writable by other users, so # bypass the warning. return mode = os.stat(path).st_mode if mode & stat.S_IWOTH or mode & stat.S_IWGRP: msg = ( "%s is writable by group/others and vulnerable to attack " "when " "used with get_resource_filename. Consider a more secure " "location (set with .set_extraction_path or the " "PYTHON_EGG_CACHE environment variable)." % path ) warnings.warn(msg, UserWarning) def postprocess(self, tempname, filename): """Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don't have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are already in the filesystem. `tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine returns. """ if os.name == 'posix': # Make the resource executable mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 os.chmod(tempname, mode) def set_extraction_path(self, path): """Set the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that routine's documentation for more details.) Resources are extracted to subdirectories of this path based upon information given by the ``IResourceProvider``. You may set this to a temporary directory, but then you must call ``cleanup_resources()`` to delete the extracted files when done. There is no guarantee that ``cleanup_resources()`` will be able to remove all extracted files. (Note: you may not change the extraction path for a given resource manager once resources have been extracted, unless you first call ``cleanup_resources()``.) """ if self.cached_files: raise ValueError( "Can't change extraction path, files already extracted" ) self.extraction_path = path def cleanup_resources(self, force=False): """ Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. This function does not have any concurrency protection, so it should generally only be called when the extraction path is a temporary directory exclusive to a single process. This method is not automatically called; you must call it explicitly or register it as an ``atexit`` function if you wish to ensure cleanup of a temporary directory used for extractions. """ # XXX def get_default_cache(): """ Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". """ return ( os.environ.get('PYTHON_EGG_CACHE') or appdirs.user_cache_dir(appname='Python-Eggs') ) def safe_name(name): """Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ return re.sub('[^A-Za-z0-9.]+', '-', name) def safe_version(version): """ Convert an arbitrary string to a standard version string """ try: # normalize the version return str(packaging.version.Version(version)) except packaging.version.InvalidVersion: version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version) def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() def to_filename(name): """Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. """ return name.replace('-', '_') def invalid_marker(text): """ Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. """ try: evaluate_marker(text) except SyntaxError as e: e.filename = None e.lineno = None return e return False def evaluate_marker(text, extra=None): """ Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. """ try: marker = packaging.markers.Marker(text) return marker.evaluate() except packaging.markers.InvalidMarker as e: raise SyntaxError(e) class NullProvider: """Try to implement resources and metadata for arbitrary PEP 302 loaders""" egg_name = None egg_info = None loader = None def __init__(self, module): self.loader = getattr(module, '__loader__', None) self.module_path = os.path.dirname(getattr(module, '__file__', '')) def get_resource_filename(self, manager, resource_name): return self._fn(self.module_path, resource_name) def get_resource_stream(self, manager, resource_name): return io.BytesIO(self.get_resource_string(manager, resource_name)) def get_resource_string(self, manager, resource_name): return self._get(self._fn(self.module_path, resource_name)) def has_resource(self, resource_name): return self._has(self._fn(self.module_path, resource_name)) def has_metadata(self, name): return self.egg_info and self._has(self._fn(self.egg_info, name)) def get_metadata(self, name): if not self.egg_info: return "" value = self._get(self._fn(self.egg_info, name)) return value.decode('utf-8') if six.PY3 else value def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) def resource_isdir(self, resource_name): return self._isdir(self._fn(self.module_path, resource_name)) def metadata_isdir(self, name): return self.egg_info and self._isdir(self._fn(self.egg_info, name)) def resource_listdir(self, resource_name): return self._listdir(self._fn(self.module_path, resource_name)) def metadata_listdir(self, name): if self.egg_info: return self._listdir(self._fn(self.egg_info, name)) return [] def run_script(self, script_name, namespace): script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError( "Script {script!r} not found in metadata at {self.egg_info!r}" .format(**locals()), ) script_text = self.get_metadata(script).replace('\r\n', '\n') script_text = script_text.replace('\r', '\n') script_filename = self._fn(self.egg_info, script) namespace['__file__'] = script_filename if os.path.exists(script_filename): source = open(script_filename).read() code = compile(source, script_filename, 'exec') exec(code, namespace, namespace) else: from linecache import cache cache[script_filename] = ( len(script_text), 0, script_text.split('\n'), script_filename ) script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) def _has(self, path): raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) def _isdir(self, path): raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) def _listdir(self, path): raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) def _fn(self, base, resource_name): if resource_name: return os.path.join(base, *resource_name.split('/')) return base def _get(self, path): if hasattr(self.loader, 'get_data'): return self.loader.get_data(path) raise NotImplementedError( "Can't perform this operation for loaders without 'get_data()'" ) register_loader_type(object, NullProvider) class EggProvider(NullProvider): """Provider based on a virtual filesystem""" def __init__(self, module): NullProvider.__init__(self, module) self._setup_prefix() def _setup_prefix(self): # we assume here that our metadata may be nested inside a "basket" # of multiple eggs; that's why we use module_path instead of .archive path = self.module_path old = None while path != old: if _is_egg_path(path): self.egg_name = os.path.basename(path) self.egg_info = os.path.join(path, 'EGG-INFO') self.egg_root = path break old = path path, base = os.path.split(path) class DefaultProvider(EggProvider): """Provides access to package resources in the filesystem""" def _has(self, path): return os.path.exists(path) def _isdir(self, path): return os.path.isdir(path) def _listdir(self, path): return os.listdir(path) def get_resource_stream(self, manager, resource_name): return open(self._fn(self.module_path, resource_name), 'rb') def _get(self, path): with open(path, 'rb') as stream: return stream.read() @classmethod def _register(cls): loader_cls = getattr( importlib_machinery, 'SourceFileLoader', type(None), ) register_loader_type(loader_cls, cls) DefaultProvider._register() class EmptyProvider(NullProvider): """Provider that returns nothing for all requests""" module_path = None _isdir = _has = lambda self, path: False def _get(self, path): return '' def _listdir(self, path): return [] def __init__(self): pass empty_provider = EmptyProvider() class ZipManifests(dict): """ zip manifest builder """ @classmethod def build(cls, path): """ Build a dictionary similar to the zipimport directory caches, except instead of tuples, store ZipInfo objects. Use a platform-specific path separator (os.sep) for the path keys for compatibility with pypy on Windows. """ with zipfile.ZipFile(path) as zfile: items = ( ( name.replace('/', os.sep), zfile.getinfo(name), ) for name in zfile.namelist() ) return dict(items) load = build class MemoizedZipManifests(ZipManifests): """ Memoized zipfile manifests. """ manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') def load(self, path): """ Load a manifest at path or return a suitable manifest already loaded. """ path = os.path.normpath(path) mtime = os.stat(path).st_mtime if path not in self or self[path].mtime != mtime: manifest = self.build(path) self[path] = self.manifest_mod(manifest, mtime) return self[path].manifest class ZipProvider(EggProvider): """Resource support for zips and eggs""" eagers = None _zip_manifests = MemoizedZipManifests() def __init__(self, module): EggProvider.__init__(self, module) self.zip_pre = self.loader.archive + os.sep def _zipinfo_name(self, fspath): # Convert a virtual filename (full path to file) into a zipfile subpath # usable with the zipimport directory cache for our target archive fspath = fspath.rstrip(os.sep) if fspath == self.loader.archive: return '' if fspath.startswith(self.zip_pre): return fspath[len(self.zip_pre):] raise AssertionError( "%s is not a subpath of %s" % (fspath, self.zip_pre) ) def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path fspath = self.zip_pre + zip_path if fspath.startswith(self.egg_root + os.sep): return fspath[len(self.egg_root) + 1:].split(os.sep) raise AssertionError( "%s is not a subpath of %s" % (fspath, self.egg_root) ) @property def zipinfo(self): return self._zip_manifests.load(self.loader.archive) def get_resource_filename(self, manager, resource_name): if not self.egg_name: raise NotImplementedError( "resource_filename() only supported for .egg, not .zip" ) # no need to lock for extraction, since we use temp names zip_path = self._resource_to_zip(resource_name) eagers = self._get_eager_resources() if '/'.join(self._parts(zip_path)) in eagers: for name in eagers: self._extract_resource(manager, self._eager_to_zip(name)) return self._extract_resource(manager, zip_path) @staticmethod def _get_date_and_size(zip_stat): size = zip_stat.file_size # ymdhms+wday, yday, dst date_time = zip_stat.date_time + (0, 0, -1) # 1980 offset already done timestamp = time.mktime(date_time) return timestamp, size def _extract_resource(self, manager, zip_path): if zip_path in self._index(): for name in self._index()[zip_path]: last = self._extract_resource( manager, os.path.join(zip_path, name) ) # return the extracted directory name return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: raise IOError('"os.rename" and "os.unlink" are not supported ' 'on this platform') try: real_path = manager.get_cache_path( self.egg_name, self._parts(zip_path) ) if self._is_current(real_path, zip_path): return real_path outf, tmpnam = _mkstemp( ".$extract", dir=os.path.dirname(real_path), ) os.write(outf, self.loader.get_data(zip_path)) os.close(outf) utime(tmpnam, (timestamp, timestamp)) manager.postprocess(tmpnam, real_path) try: rename(tmpnam, real_path) except os.error: if os.path.isfile(real_path): if self._is_current(real_path, zip_path): # the file became current since it was checked above, # so proceed. return real_path # Windows, del old file and retry elif os.name == 'nt': unlink(real_path) rename(tmpnam, real_path) return real_path raise except os.error: # report a user-friendly error manager.extraction_error() return real_path def _is_current(self, file_path, zip_path): """ Return True if the file_path is current for this zip_path """ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not os.path.isfile(file_path): return False stat = os.stat(file_path) if stat.st_size != size or stat.st_mtime != timestamp: return False # check that the contents match zip_contents = self.loader.get_data(zip_path) with open(file_path, 'rb') as f: file_contents = f.read() return zip_contents == file_contents def _get_eager_resources(self): if self.eagers is None: eagers = [] for name in ('native_libs.txt', 'eager_resources.txt'): if self.has_metadata(name): eagers.extend(self.get_metadata_lines(name)) self.eagers = eagers return self.eagers def _index(self): try: return self._dirindex except AttributeError: ind = {} for path in self.zipinfo: parts = path.split(os.sep) while parts: parent = os.sep.join(parts[:-1]) if parent in ind: ind[parent].append(parts[-1]) break else: ind[parent] = [parts.pop()] self._dirindex = ind return ind def _has(self, fspath): zip_path = self._zipinfo_name(fspath) return zip_path in self.zipinfo or zip_path in self._index() def _isdir(self, fspath): return self._zipinfo_name(fspath) in self._index() def _listdir(self, fspath): return list(self._index().get(self._zipinfo_name(fspath), ())) def _eager_to_zip(self, resource_name): return self._zipinfo_name(self._fn(self.egg_root, resource_name)) def _resource_to_zip(self, resource_name): return self._zipinfo_name(self._fn(self.module_path, resource_name)) register_loader_type(zipimport.zipimporter, ZipProvider) class FileMetadata(EmptyProvider): """Metadata handler for standalone PKG-INFO files Usage:: metadata = FileMetadata("/path/to/PKG-INFO") This provider rejects all data and metadata requests except for PKG-INFO, which is treated as existing, and will be the contents of the file at the provided location. """ def __init__(self, path): self.path = path def has_metadata(self, name): return name == 'PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): if name != 'PKG-INFO': raise KeyError("No metadata except PKG-INFO is available") with io.open(self.path, encoding='utf-8', errors="replace") as f: metadata = f.read() self._warn_on_replacement(metadata) return metadata def _warn_on_replacement(self, metadata): # Python 2.7 compat for: replacement_char = '�' replacement_char = b'\xef\xbf\xbd'.decode('utf-8') if replacement_char in metadata: tmpl = "{self.path} could not be properly decoded in UTF-8" msg = tmpl.format(**locals()) warnings.warn(msg) def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) class PathMetadata(DefaultProvider): """Metadata provider for egg directories Usage:: # Development eggs: egg_info = "/path/to/PackageName.egg-info" base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: egg_path = "/path/to/PackageName-ver-pyver-etc.egg" metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) dist = Distribution.from_filename(egg_path, metadata=metadata) """ def __init__(self, path, egg_info): self.module_path = path self.egg_info = egg_info class EggMetadata(ZipProvider): """Metadata provider for .egg files""" def __init__(self, importer): """Create a metadata provider from a zipimporter""" self.zip_pre = importer.archive + os.sep self.loader = importer if importer.prefix: self.module_path = os.path.join(importer.archive, importer.prefix) else: self.module_path = importer.archive self._setup_prefix() _declare_state('dict', _distribution_finders={}) def register_finder(importer_type, distribution_finder): """Register `distribution_finder` to find distributions in sys.path items `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `distribution_finder` is a callable that, passed a path item and the importer instance, yields ``Distribution`` instances found on that path item. See ``pkg_resources.find_on_path`` for an example.""" _distribution_finders[importer_type] = distribution_finder def find_distributions(path_item, only=False): """Yield distributions accessible via `path_item`""" importer = get_importer(path_item) finder = _find_adapter(_distribution_finders, importer) return finder(importer, path_item, only) def find_eggs_in_zip(importer, path_item, only=False): """ Find eggs in zip files; possibly multiple nested eggs. """ if importer.archive.endswith('.whl'): # wheels are not supported with this finder # they don't have PKG-INFO metadata, and won't ever contain eggs return metadata = EggMetadata(importer) if metadata.has_metadata('PKG-INFO'): yield Distribution.from_filename(path_item, metadata=metadata) if only: # don't yield nested distros return for subitem in metadata.resource_listdir('/'): if _is_egg_path(subitem): subpath = os.path.join(path_item, subitem) dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) for dist in dists: yield dist elif subitem.lower().endswith('.dist-info'): subpath = os.path.join(path_item, subitem) submeta = EggMetadata(zipimport.zipimporter(subpath)) submeta.egg_info = subpath yield Distribution.from_location(path_item, subitem, submeta) register_finder(zipimport.zipimporter, find_eggs_in_zip) def find_nothing(importer, path_item, only=False): return () register_finder(object, find_nothing) def _by_version_descending(names): """ Given a list of filenames, return them in descending order by version number. >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] """ def _by_version(name): """ Parse each component of the filename """ name, ext = os.path.splitext(name) parts = itertools.chain(name.split('-'), [ext]) return [packaging.version.parse(part) for part in parts] return sorted(names, key=_by_version, reverse=True) def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if _is_unpacked_egg(path_item): yield Distribution.from_filename( path_item, metadata=PathMetadata( path_item, os.path.join(path_item, 'EGG-INFO') ) ) return entries = safe_listdir(path_item) # for performance, before sorting by version, # screen entries for only those that will yield # distributions filtered = ( entry for entry in entries if dist_factory(path_item, entry, only) ) # scan for .egg and .egg-info in directory path_item_entries = _by_version_descending(filtered) for entry in path_item_entries: fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) for dist in factory(fullpath): yield dist def dist_factory(path_item, entry, only): """ Return a dist_factory for a path_item and entry """ lower = entry.lower() is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) return ( distributions_from_metadata if is_meta else find_distributions if not only and _is_egg_path(entry) else resolve_egg_link if not only and lower.endswith('.egg-link') else NoDists() ) class NoDists: """ >>> bool(NoDists()) False >>> list(NoDists()('anything')) [] """ def __bool__(self): return False if six.PY2: __nonzero__ = __bool__ def __call__(self, fullpath): return iter(()) def safe_listdir(path): """ Attempt to list contents of path, but suppress some exceptions. """ try: return os.listdir(path) except (PermissionError, NotADirectoryError): pass except OSError as e: # Ignore the directory if does not exist, not a directory or # permission denied ignorable = ( e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) # Python 2 on Windows needs to be handled this way :( or getattr(e, "winerror", None) == 267 ) if not ignorable: raise return () def distributions_from_metadata(path): root = os.path.dirname(path) if os.path.isdir(path): if len(os.listdir(path)) == 0: # empty metadata dir; skip return metadata = PathMetadata(root, path) else: metadata = FileMetadata(path) entry = os.path.basename(path) yield Distribution.from_location( root, entry, metadata, precedence=DEVELOP_DIST, ) def non_empty_lines(path): """ Yield non-empty lines from file at path """ with open(path) as f: for line in f: line = line.strip() if line: yield line def resolve_egg_link(path): """ Given a path to an .egg-link, resolve distributions present in the referenced path. """ referenced_paths = non_empty_lines(path) resolved_paths = ( os.path.join(os.path.dirname(path), ref) for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ()) register_finder(pkgutil.ImpImporter, find_on_path) if hasattr(importlib_machinery, 'FileFinder'): register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) def register_namespace_handler(importer_type, namespace_handler): """Register `namespace_handler` to declare namespace packages `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `namespace_handler` is a callable like this:: def namespace_handler(importer, path_entry, moduleName, module): # return a path_entry to use for child packages Namespace handlers are only called if the importer object has already agreed that it can handle the relevant path item, and they should only return a subpath if the module __path__ does not already contain an equivalent subpath. For an example namespace handler, see ``pkg_resources.file_ns_handler``. """ _namespace_handlers[importer_type] = namespace_handler def _handle_ns(packageName, path_item): """Ensure that named package includes a subpath of path_item (if needed)""" importer = get_importer(path_item) if importer is None: return None loader = importer.find_module(packageName) if loader is None: return None module = sys.modules.get(packageName) if module is None: module = sys.modules[packageName] = types.ModuleType(packageName) module.__path__ = [] _set_parent_ns(packageName) elif not hasattr(module, '__path__'): raise TypeError("Not a package:", packageName) handler = _find_adapter(_namespace_handlers, importer) subpath = handler(importer, path_item, packageName, module) if subpath is not None: path = module.__path__ path.append(subpath) loader.load_module(packageName) _rebuild_mod_path(path, packageName, module) return subpath def _rebuild_mod_path(orig_path, package_name, module): """ Rebuild module.__path__ ensuring that all entries are ordered corresponding to their sys.path order """ sys_path = [_normalize_cached(p) for p in sys.path] def safe_sys_path_index(entry): """ Workaround for #520 and #513. """ try: return sys_path.index(entry) except ValueError: return float('inf') def position_in_sys_path(path): """ Return the ordinal of the path based on its position in sys.path """ path_parts = path.split(os.sep) module_parts = package_name.count('.') + 1 parts = path_parts[:-module_parts] return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) if not isinstance(orig_path, list): # Is this behavior useful when module.__path__ is not a list? return orig_path.sort(key=position_in_sys_path) module.__path__[:] = [_normalize_cached(p) for p in orig_path] def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" _imp.acquire_lock() try: if packageName in _namespace_packages: return path, parent = sys.path, None if '.' in packageName: parent = '.'.join(packageName.split('.')[:-1]) declare_namespace(parent) if parent not in _namespace_packages: __import__(parent) try: path = sys.modules[parent].__path__ except AttributeError: raise TypeError("Not a package:", parent) # Track what packages are namespaces, so when new path items are added, # they can be updated _namespace_packages.setdefault(parent, []).append(packageName) _namespace_packages.setdefault(packageName, []) for path_item in path: # Ensure all the parent's path items are reflected in the child, # if they apply _handle_ns(packageName, path_item) finally: _imp.release_lock() def fixup_namespace_packages(path_item, parent=None): """Ensure that previously-declared namespace packages include path_item""" _imp.acquire_lock() try: for package in _namespace_packages.get(parent, ()): subpath = _handle_ns(package, path_item) if subpath: fixup_namespace_packages(subpath, package) finally: _imp.release_lock() def file_ns_handler(importer, path_item, packageName, module): """Compute an ns-package subpath for a filesystem or zipfile importer""" subpath = os.path.join(path_item, packageName.split('.')[-1]) normalized = _normalize_cached(subpath) for item in module.__path__: if _normalize_cached(item) == normalized: break else: # Only return the path if it's not already there return subpath register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) if hasattr(importlib_machinery, 'FileFinder'): register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): return None register_namespace_handler(object, null_ns_handler) def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" return os.path.normcase(os.path.realpath(filename)) def _normalize_cached(filename, _cache={}): try: return _cache[filename] except KeyError: _cache[filename] = result = normalize_path(filename) return result def _is_egg_path(path): """ Determine if given path appears to be an egg. """ return path.lower().endswith('.egg') def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ return ( _is_egg_path(path) and os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) ) def _set_parent_ns(packageName): parts = packageName.split('.') name = parts.pop() if parts: parent = '.'.join(parts) setattr(sys.modules[parent], name, sys.modules[packageName]) def yield_lines(strs): """Yield non-empty/non-comment lines of a string or sequence""" if isinstance(strs, six.string_types): for s in strs.splitlines(): s = s.strip() # skip blank lines/comments if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s MODULE = re.compile(r"\w+(\.\w+)*$").match EGG_NAME = re.compile( r""" (?P[^-]+) ( -(?P[^-]+) ( -py(?P[^-]+) ( -(?P.+) )? )? )? """, re.VERBOSE | re.IGNORECASE, ).match class EntryPoint(object): """Object representing an advertised importable object""" def __init__(self, name, module_name, attrs=(), extras=(), dist=None): if not MODULE(module_name): raise ValueError("Invalid module name", module_name) self.name = name self.module_name = module_name self.attrs = tuple(attrs) self.extras = tuple(extras) self.dist = dist def __str__(self): s = "%s = %s" % (self.name, self.module_name) if self.attrs: s += ':' + '.'.join(self.attrs) if self.extras: s += ' [%s]' % ','.join(self.extras) return s def __repr__(self): return "EntryPoint.parse(%r)" % str(self) def load(self, require=True, *args, **kwargs): """ Require packages for this EntryPoint, then resolve it. """ if not require or args or kwargs: warnings.warn( "Parameters to load are deprecated. Call .resolve and " ".require separately.", DeprecationWarning, stacklevel=2, ) if require: self.require(*args, **kwargs) return self.resolve() def resolve(self): """ Resolve the entry point from its module and attrs. """ module = __import__(self.module_name, fromlist=['__name__'], level=0) try: return functools.reduce(getattr, self.attrs, module) except AttributeError as exc: raise ImportError(str(exc)) def require(self, env=None, installer=None): if self.extras and not self.dist: raise UnknownExtra("Can't require() without a distribution", self) # Get the requirements for this entry point with all its extras and # then resolve them. We have to pass `extras` along when resolving so # that the working set knows what extras we want. Otherwise, for # dist-info distributions, the working set will assume that the # requirements for that extra are purely optional and skip over them. reqs = self.dist.requires(self.extras) items = working_set.resolve(reqs, env, installer, extras=self.extras) list(map(working_set.add, items)) pattern = re.compile( r'\s*' r'(?P.+?)\s*' r'=\s*' r'(?P[\w.]+)\s*' r'(:\s*(?P[\w.]+))?\s*' r'(?P\[.*\])?\s*$' ) @classmethod def parse(cls, src, dist=None): """Parse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """ m = cls.pattern.match(src) if not m: msg = "EntryPoint must be in 'name=module:attrs [extras]' format" raise ValueError(msg, src) res = m.groupdict() extras = cls._parse_extras(res['extras']) attrs = res['attr'].split('.') if res['attr'] else () return cls(res['name'], res['module'], attrs, extras, dist) @classmethod def _parse_extras(cls, extras_spec): if not extras_spec: return () req = Requirement.parse('x' + extras_spec) if req.specs: raise ValueError() return req.extras @classmethod def parse_group(cls, group, lines, dist=None): """Parse an entry point group""" if not MODULE(group): raise ValueError("Invalid group name", group) this = {} for line in yield_lines(lines): ep = cls.parse(line, dist) if ep.name in this: raise ValueError("Duplicate entry point", group, ep.name) this[ep.name] = ep return this @classmethod def parse_map(cls, data, dist=None): """Parse a map of entry point groups""" if isinstance(data, dict): data = data.items() else: data = split_sections(data) maps = {} for group, lines in data: if group is None: if not lines: continue raise ValueError("Entry points must be listed in groups") group = group.strip() if group in maps: raise ValueError("Duplicate group name", group) maps[group] = cls.parse_group(group, lines, dist) return maps def _remove_md5_fragment(location): if not location: return '' parsed = urllib.parse.urlparse(location) if parsed[-1].startswith('md5='): return urllib.parse.urlunparse(parsed[:-1] + ('',)) return location def _version_from_file(lines): """ Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ def is_version_line(line): return line.lower().startswith('version:') version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') return safe_version(value.strip()) or None class Distribution(object): """Wrap an actual or potential sys.path entry w/metadata""" PKG_INFO = 'PKG-INFO' def __init__( self, location=None, metadata=None, project_name=None, version=None, py_version=PY_MAJOR, platform=None, precedence=EGG_DIST): self.project_name = safe_name(project_name or 'Unknown') if version is not None: self._version = safe_version(version) self.py_version = py_version self.platform = platform self.location = location self.precedence = precedence self._provider = metadata or empty_provider @classmethod def from_location(cls, location, basename, metadata=None, **kw): project_name, version, py_version, platform = [None] * 4 basename, ext = os.path.splitext(basename) if ext.lower() in _distributionImpl: cls = _distributionImpl[ext.lower()] match = EGG_NAME(basename) if match: project_name, version, py_version, platform = match.group( 'name', 'ver', 'pyver', 'plat' ) return cls( location, metadata, project_name=project_name, version=version, py_version=py_version, platform=platform, **kw )._reload_version() def _reload_version(self): return self @property def hashcmp(self): return ( self.parsed_version, self.precedence, self.key, _remove_md5_fragment(self.location), self.py_version or '', self.platform or '', ) def __hash__(self): return hash(self.hashcmp) def __lt__(self, other): return self.hashcmp < other.hashcmp def __le__(self, other): return self.hashcmp <= other.hashcmp def __gt__(self, other): return self.hashcmp > other.hashcmp def __ge__(self, other): return self.hashcmp >= other.hashcmp def __eq__(self, other): if not isinstance(other, self.__class__): # It's not a Distribution, so they are not equal return False return self.hashcmp == other.hashcmp def __ne__(self, other): return not self == other # These properties have to be lazy so that we don't have to load any # metadata until/unless it's actually needed. (i.e., some distributions # may not know their name or version without loading PKG-INFO) @property def key(self): try: return self._key except AttributeError: self._key = key = self.project_name.lower() return key @property def parsed_version(self): if not hasattr(self, "_parsed_version"): self._parsed_version = parse_version(self.version) return self._parsed_version def _warn_legacy_version(self): LV = packaging.version.LegacyVersion is_legacy = isinstance(self._parsed_version, LV) if not is_legacy: return # While an empty version is technically a legacy version and # is not a valid PEP 440 version, it's also unlikely to # actually come from someone and instead it is more likely that # it comes from setuptools attempting to parse a filename and # including it in the list. So for that we'll gate this warning # on if the version is anything at all or not. if not self.version: return tmpl = textwrap.dedent(""" '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. """).strip().replace('\n', ' ') warnings.warn(tmpl.format(**vars(self)), PEP440Warning) @property def version(self): try: return self._version except AttributeError: version = _version_from_file(self._get_metadata(self.PKG_INFO)) if version is None: tmpl = "Missing 'Version:' header and/or %s file" raise ValueError(tmpl % self.PKG_INFO, self) return version @property def _dep_map(self): """ A map of extra to its list of (direct) requirements for this distribution, including the null extra. """ try: return self.__dep_map except AttributeError: self.__dep_map = self._filter_extras(self._build_dep_map()) return self.__dep_map @staticmethod def _filter_extras(dm): """ Given a mapping of extras to dependencies, strip off environment markers and filter out any dependencies not matching the markers. """ for extra in list(filter(None, dm)): new_extra = extra reqs = dm.pop(extra) new_extra, _, marker = extra.partition(':') fails_marker = marker and ( invalid_marker(marker) or not evaluate_marker(marker) ) if fails_marker: reqs = [] new_extra = safe_extra(new_extra) or None dm.setdefault(new_extra, []).extend(reqs) return dm def _build_dep_map(self): dm = {} for name in 'requires.txt', 'depends.txt': for extra, reqs in split_sections(self._get_metadata(name)): dm.setdefault(extra, []).extend(parse_requirements(reqs)) return dm def requires(self, extras=()): """List of Requirements needed for this distro if `extras` are used""" dm = self._dep_map deps = [] deps.extend(dm.get(None, ())) for ext in extras: try: deps.extend(dm[safe_extra(ext)]) except KeyError: raise UnknownExtra( "%s has no such extra feature %r" % (self, ext) ) return deps def _get_metadata(self, name): if self.has_metadata(name): for line in self.get_metadata_lines(name): yield line def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" if path is None: path = sys.path self.insert_on(path, replace=replace) if path is sys.path: fixup_namespace_packages(self.location) for pkg in self._get_metadata('namespace_packages.txt'): if pkg in sys.modules: declare_namespace(pkg) def egg_name(self): """Return what this distribution's standard .egg filename should be""" filename = "%s-%s-py%s" % ( to_filename(self.project_name), to_filename(self.version), self.py_version or PY_MAJOR ) if self.platform: filename += '-' + self.platform return filename def __repr__(self): if self.location: return "%s (%s)" % (self, self.location) else: return str(self) def __str__(self): try: version = getattr(self, 'version', None) except ValueError: version = None version = version or "[unknown version]" return "%s %s" % (self.project_name, version) def __getattr__(self, attr): """Delegate all unrecognized public attributes to .metadata provider""" if attr.startswith('_'): raise AttributeError(attr) return getattr(self._provider, attr) @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( _normalize_cached(filename), os.path.basename(filename), metadata, **kw ) def as_requirement(self): """Return a ``Requirement`` that matches this distribution exactly""" if isinstance(self.parsed_version, packaging.version.Version): spec = "%s==%s" % (self.project_name, self.parsed_version) else: spec = "%s===%s" % (self.project_name, self.parsed_version) return Requirement.parse(spec) def load_entry_point(self, group, name): """Return the `name` entry point of `group` or raise ImportError""" ep = self.get_entry_info(group, name) if ep is None: raise ImportError("Entry point %r not found" % ((group, name),)) return ep.load() def get_entry_map(self, group=None): """Return the entry point map for `group`, or the full entry map""" try: ep_map = self._ep_map except AttributeError: ep_map = self._ep_map = EntryPoint.parse_map( self._get_metadata('entry_points.txt'), self ) if group is not None: return ep_map.get(group, {}) return ep_map def get_entry_info(self, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return self.get_entry_map(group).get(name) def insert_on(self, path, loc=None, replace=False): """Ensure self.location is on path If replace=False (default): - If location is already in path anywhere, do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent. - Else: add to the end of path. If replace=True: - If location is already on path anywhere (not eggs) or higher priority than its parent (eggs) do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent, removing any lower-priority entries. - Else: add it to the front of path. """ loc = loc or self.location if not loc: return nloc = _normalize_cached(loc) bdir = os.path.dirname(nloc) npath = [(p and _normalize_cached(p) or p) for p in path] for p, item in enumerate(npath): if item == nloc: if replace: break else: # don't modify path (even removing duplicates) if # found and not replace return elif item == bdir and self.precedence == EGG_DIST: # if it's an .egg, give it precedence over its directory # UNLESS it's already been added to sys.path and replace=False if (not replace) and nloc in npath[p:]: return if path is sys.path: self.check_version_conflict() path.insert(p, loc) npath.insert(p, nloc) break else: if path is sys.path: self.check_version_conflict() if replace: path.insert(0, loc) else: path.append(loc) return # p is the spot where we found or inserted loc; now remove duplicates while True: try: np = npath.index(nloc, p + 1) except ValueError: break else: del npath[np], path[np] # ha! p = np return def check_version_conflict(self): if self.key == 'setuptools': # ignore the inevitable setuptools self-conflicts :( return nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) for modname in self._get_metadata('top_level.txt'): if (modname not in sys.modules or modname in nsp or modname in _namespace_packages): continue if modname in ('pkg_resources', 'setuptools', 'site'): continue fn = getattr(sys.modules[modname], '__file__', None) if fn and (normalize_path(fn).startswith(loc) or fn.startswith(self.location)): continue issue_warning( "Module %s was already imported from %s, but %s is being added" " to sys.path" % (modname, fn, self.location), ) def has_version(self): try: self.version except ValueError: issue_warning("Unbuilt egg for " + repr(self)) return False return True def clone(self, **kw): """Copy this distribution, substituting in any changed keyword args""" names = 'project_name version py_version platform location precedence' for attr in names.split(): kw.setdefault(attr, getattr(self, attr, None)) kw.setdefault('metadata', self._provider) return self.__class__(**kw) @property def extras(self): return [dep for dep in self._dep_map if dep] class EggInfoDistribution(Distribution): def _reload_version(self): """ Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. """ md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) if md_version: self._version = md_version return self class DistInfoDistribution(Distribution): """ Wrap an actual or potential sys.path entry w/metadata, .dist-info style. """ PKG_INFO = 'METADATA' EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") @property def _parsed_pkg_info(self): """Parse and cache metadata""" try: return self._pkg_info except AttributeError: metadata = self.get_metadata(self.PKG_INFO) self._pkg_info = email.parser.Parser().parsestr(metadata) return self._pkg_info @property def _dep_map(self): try: return self.__dep_map except AttributeError: self.__dep_map = self._compute_dependencies() return self.__dep_map def _compute_dependencies(self): """Recompute this distribution's dependencies.""" dm = self.__dep_map = {None: []} reqs = [] # Including any condition expressions for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: reqs.extend(parse_requirements(req)) def reqs_for_extra(extra): for req in reqs: if not req.marker or req.marker.evaluate({'extra': extra}): yield req common = frozenset(reqs_for_extra(None)) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: s_extra = safe_extra(extra.strip()) dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) return dm _distributionImpl = { '.egg': Distribution, '.egg-info': EggInfoDistribution, '.dist-info': DistInfoDistribution, } def issue_warning(*args, **kw): level = 1 g = globals() try: # find the first stack frame that is *not* code in # the pkg_resources module, to use for the warning while sys._getframe(level).f_globals is g: level += 1 except ValueError: pass warnings.warn(stacklevel=level + 1, *args, **kw) class RequirementParseError(ValueError): def __str__(self): return ' '.join(self.args) def parse_requirements(strs): """Yield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. """ # create a steppable iterator, so we can handle \-continuations lines = iter(yield_lines(strs)) for line in lines: # Drop comments -- a hash without a space may be in a URL. if ' #' in line: line = line[:line.find(' #')] # If there is a line continuation, drop it, and append the next line. if line.endswith('\\'): line = line[:-2].strip() try: line += next(lines) except StopIteration: return yield Requirement(line) class Requirement(packaging.requirements.Requirement): def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" try: super(Requirement, self).__init__(requirement_string) except packaging.requirements.InvalidRequirement as e: raise RequirementParseError(str(e)) self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() self.specs = [ (spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, self.specifier, frozenset(self.extras), str(self.marker) if self.marker else None, ) self.__hash = hash(self.hashCmp) def __eq__(self, other): return ( isinstance(other, Requirement) and self.hashCmp == other.hashCmp ) def __ne__(self, other): return not self == other def __contains__(self, item): if isinstance(item, Distribution): if item.key != self.key: return False item = item.version # Allow prereleases always in order to match the previous behavior of # this method. In the future this should be smarter and follow PEP 440 # more accurately. return self.specifier.contains(item, prereleases=True) def __hash__(self): return self.__hash def __repr__(self): return "Requirement.parse(%r)" % str(self) @staticmethod def parse(s): req, = parse_requirements(s) return req def _always_object(classes): """ Ensure object appears in the mro even for old-style classes. """ if object not in classes: return classes + (object,) return classes def _find_adapter(registry, ob): """Return an adapter factory for `ob` from `registry`""" types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) for t in types: if t in registry: return registry[t] def ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) py31compat.makedirs(dirname, exist_ok=True) def _bypass_ensure_directory(path): """Sandbox-bypassing version of ensure_directory()""" if not WRITE_SUPPORT: raise IOError('"os.mkdir" not supported on this platform.') dirname, filename = split(path) if dirname and filename and not isdir(dirname): _bypass_ensure_directory(dirname) mkdir(dirname, 0o755) def split_sections(s): """Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """ section = None content = [] for line in yield_lines(s): if line.startswith("["): if line.endswith("]"): if section or content: yield section, content section = line[1:-1].strip() content = [] else: raise ValueError("Invalid section heading", line) else: content.append(line) # wrap up last segment yield section, content def _mkstemp(*args, **kw): old_open = os.open try: # temporarily bypass sandboxing os.open = os_open return tempfile.mkstemp(*args, **kw) finally: # and then put it back os.open = old_open # Silence the PEP440Warning by default, so that end users don't get hit by it # randomly just because they use pkg_resources. We want to append the rule # because we want earlier uses of filterwarnings to take precedence over this # one. warnings.filterwarnings("ignore", category=PEP440Warning, append=True) # from jaraco.functools 1.3 def _call_aside(f, *args, **kwargs): f(*args, **kwargs) return f @_call_aside def _initialize(g=globals()): "Set up global resource manager (deliberately not state-saved)" manager = ResourceManager() g['_manager'] = manager g.update( (name, getattr(manager, name)) for name in dir(manager) if not name.startswith('_') ) @_call_aside def _initialize_master_working_set(): """ Prepare the master working set and make the ``require()`` API available. This function has explicit effects on the global state of pkg_resources. It is intended to be invoked once at the initialization of this module. Invocation by other packages is unsupported and done at their own risk. """ working_set = WorkingSet._build_master() _declare_state('object', working_set=working_set) require = working_set.require iter_entry_points = working_set.iter_entry_points add_activation_listener = working_set.subscribe run_script = working_set.run_script # backward compatibility run_main = run_script # Activate all distributions already on sys.path with replace=False and # ensure that all distributions added to the working set in the future # (e.g. by calling ``require()``) will get activated as well, # with higher priority (replace=True). tuple( dist.activate(replace=False) for dist in working_set ) add_activation_listener( lambda dist: dist.activate(replace=True), existing=False, ) working_set.entries = [] # match order list(map(working_set.add_entry, sys.path)) globals().update(locals()) PK Zc+̓̓1site-packages/pkg_resources/_vendor/pyparsing.pyonu[ fci@sdZdZdZdZddlZddlmZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlmZyddlmZWn!ek rddlmZnXydd l mZWn?ek r=ydd lmZWnek r9eZnXnXd d d d ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrgiZee jds ZedtdskZere jZ e!Z"e#Z$e!Z%e&e'e(e)e*ee+e,e-e.e/g Z0nre j1Z e2Z3duZ%gZ0ddl4Z4xEdvj5D]7Z6ye0j7e8e4e6Wne9k rZq$nXq$We:dwe3dxDZ;dyZ<dze=fd{YZ>ej?ej@ZAd|ZBeBd}ZCeAeBZDe#d~ZEdjFdejGDZHd!eIfdYZJd#eJfdYZKd%eJfdYZLd'eLfdYZMd*eIfdYZNde=fdYZOd&e=fdYZPe jQjRePdZSdZTdZUdZVdZWdZXdZYddZZd(e=fdYZ[d0e[fdYZ\de\fdYZ]de\fdYZ^de\fdYZ_e_Z`e_e[_ade\fdYZbd e_fdYZcd ebfdYZddpe\fdYZed3e\fdYZfd+e\fdYZgd)e\fdYZhd e\fdYZid2e\fdYZjde\fdYZkdekfdYZldekfdYZmdekfdYZnd.ekfdYZod-ekfdYZpd5ekfdYZqd4ekfdYZrd$e[fdYZsd esfdYZtd esfdYZudesfdYZvdesfdYZwd"e[fdYZxdexfdYZydexfdYZzdexfdYZ{de{fdYZ|d6e{fdYZ}de=fdYZ~e~ZdexfdYZd,exfdYZdexfdYZdefdYZd1exfdYZdefdYZdefdYZdefdYZd/efdYZde=fdYZdZdedZedZdZdZdZdZeedZdZedZdZdZe]jdGZemjdMZenjdLZeojdeZepjddZefeEdddjdZegdjdZegdjdZeeBeBefeHddddxBegde jBZeeedeZe_dedjdee|eeBjddZdZdZdZdZdZedZedZdZdZdZdZe=e_ddZe>Ze=e_e=e_ededdZeZeegddjdZeegddjdZeegddegddBjdZee`dejjdZddeejdZedZedZedZeefeAeDdjd\ZZeedj5dZegddjFejdjdZdZeegddjdZegdjdZegd jjd Zegd jd ZeegddeBjd ZeZegdjdZee|efeHddeefde_denjjdZeeejeBddjd>ZdrfdYZedkrecdZecdZefeAeDdZeeddejeZeeejdZdeBZeeddejeZeeejdZededeedZejdejjdejjdejjd ddlZejjeejejjd!ndS("sS pyparsing module - Classes and methods to define and execute parsing grammars The pyparsing module is an alternative approach to creating and executing simple grammars, vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you don't need to learn a new syntax for defining grammars or matching expressions - the parsing module provides a library of classes that you use to construct the grammar directly in Python. Here is a program to parse "Hello, World!" (or any greeting of the form C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements (L{'+'} operator gives L{And} expressions, strings are auto-converted to L{Literal} expressions):: from pyparsing import Word, alphas # define grammar of a greeting greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) The program outputs the following:: Hello, World! -> ['Hello', ',', 'World', '!'] The Python representation of the grammar is quite readable, owing to the self-explanatory class names, and the use of '+', '|' and '^' operators. The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an object with named attributes. The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - quoted strings - embedded comments s2.1.10s07 Oct 2016 01:31 UTCs*Paul McGuire iN(tref(tdatetime(tRLock(t OrderedDicttAndtCaselessKeywordtCaselessLiteralt CharsNotIntCombinetDicttEachtEmptyt FollowedBytForwardt GoToColumntGrouptKeywordtLineEndt LineStarttLiteralt MatchFirsttNoMatchtNotAnyt OneOrMoretOnlyOncetOptionaltOrtParseBaseExceptiontParseElementEnhancetParseExceptiontParseExpressiontParseFatalExceptiont ParseResultstParseSyntaxExceptiont ParserElementt QuotedStringtRecursiveGrammarExceptiontRegextSkipTot StringEndt StringStarttSuppresstTokentTokenConvertertWhitetWordtWordEndt WordStartt ZeroOrMoret alphanumstalphast alphas8bitt anyCloseTagt anyOpenTagt cStyleCommenttcoltcommaSeparatedListtcommonHTMLEntityt countedArraytcppStyleCommenttdblQuotedStringtdblSlashCommentt delimitedListtdictOftdowncaseTokenstemptythexnumst htmlCommenttjavaStyleCommenttlinetlineEndt lineStarttlinenot makeHTMLTagst makeXMLTagstmatchOnlyAtColtmatchPreviousExprtmatchPreviousLiteralt nestedExprtnullDebugActiontnumstoneOftopAssoctoperatorPrecedencet printablestpunc8bittpythonStyleCommentt quotedStringt removeQuotestreplaceHTMLEntityt replaceWitht restOfLinetsglQuotedStringtsranget stringEndt stringStartttraceParseActiont unicodeStringt upcaseTokenst withAttributet indentedBlocktoriginalTextFortungroupt infixNotationt locatedExprt withClasst CloseMatchttokenMaptpyparsing_commoniicCs}t|tr|Syt|SWnUtk rxt|jtjd}td}|jd|j |SXdS(sDrop-in replacement for str(obj) that tries to be Unicode friendly. It first tries str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It then < returns the unicode object | encodes it with the default encoding | ... >. txmlcharrefreplaces&#\d+;cSs#dtt|ddd!dS(Ns\uiii(thextint(tt((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyttN( t isinstancetunicodetstrtUnicodeEncodeErrortencodetsystgetdefaultencodingR%tsetParseActionttransformString(tobjtrett xmlcharref((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_ustrs  s6sum len sorted reversed list tuple set any all min maxccs|] }|VqdS(N((t.0ty((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sicCsRd}ddjD}x/t||D]\}}|j||}q,W|S(s/Escape &, <, >, ", ', etc. in a string of data.s&><"'css|]}d|dVqdS(t&t;N((Rts((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ssamp gt lt quot apos(tsplittziptreplace(tdatat from_symbolst to_symbolstfrom_tto_((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _xml_escapes t _ConstantscBseZRS((t__name__t __module__(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRst 0123456789t ABCDEFabcdefi\Rrccs$|]}|tjkr|VqdS(N(tstringt whitespace(Rtc((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys scBs_eZdZdd d dZedZdZdZdZ ddZ d Z RS( s7base exception class for all parsing runtime exceptionsicCs[||_|dkr*||_d|_n||_||_||_|||f|_dS(NRr(tloctNonetmsgtpstrt parserElementtargs(tselfRRRtelem((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__init__s       cCs||j|j|j|jS(s internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses (RRRR(tclstpe((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_from_exceptionscCsm|dkrt|j|jS|dkr>t|j|jS|dkr]t|j|jSt|dS(ssupported attributes by name are: - lineno - returns the line number of the exception text - col - returns the column number of the exception text - line - returns the line containing the exception text RHR7tcolumnREN(R7R(RHRRR7REtAttributeError(Rtaname((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __getattr__s   cCs d|j|j|j|jfS(Ns"%s (at char %d), (line:%d, col:%d)(RRRHR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__str__scCs t|S(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__repr__ss>!} ('-' operator) indicates that parsing is to stop immediately because an unbacktrackable syntax error has been found(RRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR!scBs eZdZdZdZRS(sZexception thrown by L{ParserElement.validate} if the grammar could be improperly recursivecCs ||_dS(N(tparseElementTrace(RtparseElementList((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs d|jS(NsRecursiveGrammarException: %s(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s(RRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR$s t_ParseResultsWithOffsetcBs,eZdZdZdZdZRS(cCs||f|_dS(N(ttup(Rtp1tp2((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR$scCs |j|S(N(R(Rti((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __getitem__&scCst|jdS(Ni(treprR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR(scCs|jd|f|_dS(Ni(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt setOffset*s(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR#s   cBseZdZd-d-eedZd-d-eeedZdZedZ dZ dZ dZ dZ e Zd Zd Zd Zd Zd ZereZeZeZn-eZeZeZdZdZdZdZdZd-dZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'ddZ(d Z)d!Z*d"Z+d-e,ded#Z-d$Z.d%Z/dd&ed'Z0d(Z1d)Z2d*Z3d+Z4d,Z5RS(.sI Structured parse results, to provide multiple means of access to the parsed data: - as a list (C{len(results)}) - by list index (C{results[0], results[1]}, etc.) - by attribute (C{results.} - see L{ParserElement.setResultsName}) Example:: integer = Word(nums) date_str = (integer.setResultsName("year") + '/' + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") # parseString returns a ParseResults object result = date_str.parseString("1999/12/31") def test(s, fn=repr): print("%s -> %s" % (s, fn(eval(s)))) test("list(result)") test("result[0]") test("result['month']") test("result.day") test("'month' in result") test("'minutes' in result") test("result.dump()", str) prints:: list(result) -> ['1999', '/', '12', '/', '31'] result[0] -> '1999' result['month'] -> '12' result.day -> '31' 'month' in result -> True 'minutes' in result -> False result.dump() -> ['1999', '/', '12', '/', '31'] - day: 31 - month: 12 - year: 1999 cCs/t||r|Stj|}t|_|S(N(Rstobjectt__new__tTruet_ParseResults__doinit(RttoklisttnametasListtmodaltretobj((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRTs  cCs|jrt|_d|_d|_i|_||_||_|dkrTg}n||trp||_ n-||t rt||_ n |g|_ t |_ n|dk r|r|sd|j|s(R(R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _itervaluesscsfdjDS(Nc3s|]}||fVqdS(N((RR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(R(R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _iteritemsscCst|jS(sVReturns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).(RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytkeysscCst|jS(sXReturns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).(Rt itervalues(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytvaluesscCst|jS(sfReturns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).(Rt iteritems(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs t|jS(sSince keys() returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.(tboolR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pythaskeysscOs|sdg}nxI|jD];\}}|dkrJ|d|f}qtd|qWt|dtst|dks|d|kr|d}||}||=|S|d}|SdS(s Removes and returns item at specified index (default=C{last}). Supports both C{list} and C{dict} semantics for C{pop()}. If passed no argument or an integer argument, it will use C{list} semantics and pop tokens from the list of parsed tokens. If passed a non-integer argument (most likely a string), it will use C{dict} semantics and pop the corresponding value from any defined results names. A second default return value argument is supported, just as in C{dict.pop()}. Example:: def remove_first(tokens): tokens.pop(0) print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] label = Word(alphas) patt = label("LABEL") + OneOrMore(Word(nums)) print(patt.parseString("AAB 123 321").dump()) # Use pop() in a parse action to remove named result (note that corresponding value is not # removed from list form of results) def remove_LABEL(tokens): tokens.pop("LABEL") return tokens patt.addParseAction(remove_LABEL) print(patt.parseString("AAB 123 321").dump()) prints:: ['AAB', '123', '321'] - LABEL: AAB ['AAB', '123', '321'] itdefaultis-pop() got an unexpected keyword argument '%s'iN(RRRsRoR(RRtkwargsRRtindexR}t defaultvalue((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytpops"     cCs||kr||S|SdS(si Returns named result matching the given key, or if there is no such name, then returns the given C{defaultValue} or C{None} if no C{defaultValue} is specified. Similar to C{dict.get()}. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString("1999/12/31") print(result.get("year")) # -> '1999' print(result.get("hour", "not specified")) # -> 'not specified' print(result.get("hour")) # -> None N((Rtkeyt defaultValue((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCsw|jj||x]|jjD]L\}}x=t|D]/\}\}}t||||k|| ['0', '123', '321'] # use a parse action to insert the parse location in the front of the parsed results def insert_locn(locn, tokens): tokens.insert(0, locn) print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] N(RtinsertRRRR(RRtinsStrRRRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR2scCs|jj|dS(s Add single element to end of ParseResults list of elements. Example:: print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] # use a parse action to compute the sum of the parsed integers, and add it to the end def append_sum(tokens): tokens.append(sum(map(int, tokens))) print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] N(Rtappend(Rtitem((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRFs cCs0t|tr||7}n|jj|dS(s Add sequence of elements to end of ParseResults list of elements. Example:: patt = OneOrMore(Word(alphas)) # use a parse action to append the reverse of the matched strings, to make a palindrome def make_palindrome(tokens): tokens.extend(reversed([t[::-1] for t in tokens])) return ''.join(tokens) print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' N(RsR Rtextend(Rtitemseq((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRTs  cCs|j2|jjdS(s7 Clear all elements and results names. N(RRtclear(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRfscCsy ||SWntk r dSX||jkr}||jkrR|j|ddStg|j|D]}|d^qcSndSdS(NRrii(RRRR (RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRms  +cCs|j}||7}|S(N(R(RtotherR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__add__{s  c s|jrt|jfd}|jj}g|D]<\}}|D])}|t|d||df^qMq=}xJ|D]?\}}|||st](RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsRrcCsog}xb|jD]W}|r2|r2|j|nt|trT||j7}q|jt|qW|S(N(RRRsR t _asStringListR(RtseptoutR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCs5g|jD]'}t|tr+|jn|^q S(s Returns the parse results as a nested list of matching tokens, all converted to strings. Example:: patt = OneOrMore(Word(alphas)) result = patt.parseString("sldkj lsdkj sldkj") # even though the result prints in string-like form, it is actually a pyparsing ParseResults print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] # Use asList() to create an actual list result_list = result.asList() print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] (RRsR R(Rtres((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscsGtr|j}n |j}fdtfd|DS(s Returns the named parse results as a nested dictionary. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} # even though a ParseResults supports dict-like access, sometime you just need to have a dict import json print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} csMt|trE|jr%|jSg|D]}|^q,Sn|SdS(N(RsR RtasDict(R|R(ttoItem(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs    c3s'|]\}}||fVqdS(N((RRR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(tPY_3RRR(Rtitem_fn((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs    cCsPt|j}|jj|_|j|_|jj|j|j|_|S(sA Returns a new copy of a C{ParseResults} object. (R RRRRRR R(RR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs   c Csd}g}td|jjD}|d}|sPd}d}d}nd } |d k rk|} n|jr|j} n| s|rdSd} n|||d| dg7}x t|jD]\} } t| trI| |kr|| j || |o|d k||g7}q|| j d |o6|d k||g7}qd } | |krh|| } n| s|rzqqd} nt t | } |||d| d| d| dg 7}qW|||d| dg7}dj |S( s (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. s css2|](\}}|D]}|d|fVqqdS(iN((RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s s RrtITEMtsgss %s%s- %s: s icss|]}t|tVqdS(N(RsR (Rtvv((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sss %s%s[%d]: %s%s%sRr( RRRRtsortedRRsR tdumpRtanyRR( RR$tdepthtfullRtNLRRRRR1((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR3Ps,  B?cOstj|j||dS(s Pretty-printer for parsed results as a list, using the C{pprint} module. Accepts additional positional or keyword args as defined for the C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) Example:: ident = Word(alphas, alphanums) num = Word(nums) func = Forward() term = ident | num | Group('(' + func + ')') func <<= ident + Group(Optional(delimitedList(term))) result = func.parseString("fna a,b,(fnb c,d,200),100") result.pprint(width=40) prints:: ['fna', ['a', 'b', ['(', 'fnb', ['c', 'd', '200'], ')'], '100']] N(tpprintR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR8}scCsC|j|jj|jdk r-|jp0d|j|jffS(N(RRRRRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __getstate__s  cCsm|d|_|d\|_}}|_i|_|jj||dk r`t||_n d|_dS(Nii(RRRRR RRR(RtstateR/t inAccumNames((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __setstate__s   cCs|j|j|j|jfS(N(RRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__getnewargs__scCs tt|t|jS(N(RRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsN(6RRRRRRRsRRRRRRRt __nonzero__RRRRRRRRRRRRRRRRRRRRR RRRRRRRRRR!R-R0R3R8R9R<R=R(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR -sh& '              4             # =  %-   cCsW|}d|ko#t|knr@||ddkr@dS||jdd|S(sReturns current column within a string, counting newlines as line separators. The first column is number 1. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{ParserElement.parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. iis (Rtrfind(RtstrgR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR7s cCs|jdd|dS(sReturns current line number within a string, counting newlines as line separators. The first line is number 1. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{ParserElement.parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. s ii(tcount(RR@((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRHs cCsR|jdd|}|jd|}|dkrB||d|!S||dSdS(sfReturns the line of text containing loc within a string, counting newlines as line separators. s iiN(R?tfind(RR@tlastCRtnextCR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyREs  cCsAdt|dt|dt||t||fGHdS(NsMatch s at loc s(%d,%d)(RRHR7(tinstringRtexpr((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_defaultStartDebugActionscCs'dt|dt|jGHdS(NsMatched s -> (RRuR(REtstartloctendlocRFttoks((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_defaultSuccessDebugActionscCsdt|GHdS(NsException raised:(R(RERRFtexc((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_defaultExceptionDebugActionscGsdS(sG'Do-nothing' debug action, to suppress debugging output during parsing.N((R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyROsics tkrfdSdgtgtd dkrVdd}ddntj}tjd}|d dd }|d|d |ffd }d }y"tdtdj}Wntk rt }nX||_|S(Ncs |S(N((RtlRp(tfunc(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRriiiicSsJtdkrdnd}tjd| |d|}|j|jfgS( Niiiiitlimiti(iii(tsystem_versiont tracebackt extract_stacktfilenameRH(RPR t frame_summary((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRSscSs2tj|d|}|d}|j|jfgS(NRPi(RRt extract_tbRTRH(ttbRPtframesRU((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRVs iRPiicsxy&|d}td<|SWqtk rdrInAz:tjd}|dddd ksnWd~Xdkrdcd7Rt __class__(ii( tsingleArgBuiltinsRRQRRRSRVtgetattrRt ExceptionRu(ROR[RSt LINE_DIFFt this_lineR]t func_name((RVRZRORPR[R\sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _trim_aritys*          cBseZdZdZeZedZedZedZ dZ dZ edZ e dZd Zd Zd Zd Zd ZdZe dZdZe e dZdZdZdefdYZedFk rdefdYZndefdYZiZe Z!ddgZ"e e dZ#eZ$edZ%eZ&eddZ'edZ(e)edZ*d Z+e)d!Z,e)ed"Z-d#Z.d$Z/d%Z0d&Z1d'Z2d(Z3d)Z4d*Z5d+Z6d,Z7d-Z8d.Z9d/Z:dFd0Z;d1Z<d2Z=d3Z>d4Z?d5Z@d6ZAe d7ZBd8ZCd9ZDd:ZEd;ZFgd<ZGed=ZHd>ZId?ZJd@ZKdAZLdBZMe dCZNe dDe e edEZORS(Gs)Abstract base level parser element class.s cCs |t_dS(s Overrides the default whitespace chars Example:: # default whitespace chars are space, and newline OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] # change to just treat newline as significant ParserElement.setDefaultWhitespaceChars(" \t") OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] N(R"tDEFAULT_WHITE_CHARS(tchars((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDefaultWhitespaceChars=s cCs |t_dS(s Set class to be used for inclusion of string literals into a parser. Example:: # default literal class used is Literal integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # change to Suppress ParserElement.inlineLiteralsUsing(Suppress) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] N(R"t_literalStringClass(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytinlineLiteralsUsingLscCst|_d|_d|_d|_||_t|_t j |_ t|_ t |_t |_t|_t |_t |_t|_d|_t|_d|_d|_t|_t |_dS(NRr(NNN(Rt parseActionRt failActiontstrReprt resultsNamet saveAsListRtskipWhitespaceR"Rft whiteCharstcopyDefaultWhiteCharsRtmayReturnEmptytkeepTabst ignoreExprstdebugt streamlinedt mayIndexErrorterrmsgt modalResultst debugActionstret callPreparset callDuringTry(Rtsavelist((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRas(                   cCsEtj|}|j|_|j|_|jrAtj|_n|S(s$ Make a copy of this C{ParserElement}. Useful for defining different parse actions for the same parsing pattern, using copies of the original parse element. Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) prints:: [5120, 100, 655360, 268435456] Equivalent form of C{expr.copy()} is just C{expr()}:: integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") (RRkRuRrR"RfRq(Rtcpy((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRxs    cCs>||_d|j|_t|dr:|j|j_n|S(sf Define name for this expression, makes debugging and exception messages clearer. Example:: Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) s Expected t exception(RRyRRR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetNames  cCsE|j}|jdr.|d }t}n||_| |_|S(sP Define name for referencing matching tokens as a nested attribute of the returned parse results. NOTE: this returns a *copy* of the original C{ParserElement} object; this is so that the client can define a basic element, such as an integer, and reference it in multiple places with different names. You can also set results names using the abbreviated syntax, C{expr("name")} in place of C{expr.setResultsName("name")} - see L{I{__call__}<__call__>}. Example:: date_str = (integer.setResultsName("year") + '/' + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: date_str = integer("year") + '/' + integer("month") + '/' + integer("day") t*i(RtendswithRRnRz(RRtlistAllMatchestnewself((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetResultsNames     csa|r9|jttfd}|_||_n$t|jdr]|jj|_n|S(sMethod to invoke the Python pdb debugger when this element is about to be parsed. Set C{breakFlag} to True to enable, False to disable. cs)ddl}|j||||S(Ni(tpdbt set_trace(RERt doActionst callPreParseR(t _parseMethod(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytbreakers  t_originalParseMethod(t_parseRRR(Rt breakFlagR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetBreaks   cOs7tttt||_|jdt|_|S(s  Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substring - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Optional keyword arguments: - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. Example:: integer = Word(nums) date_str = integer + '/' + integer + '/' + integer date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # use parse action to convert to ints at parse time integer = Word(nums).setParseAction(lambda toks: int(toks[0])) date_str = integer + '/' + integer + '/' + integer # note that integer fields are now ints, not strings date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] R~(RtmapReRkRRR~(RtfnsR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRzs"cOsF|jtttt|7_|jp<|jdt|_|S(s Add parse action to expression's list of parse actions. See L{I{setParseAction}}. See examples in L{I{copy}}. R~(RkRRReR~RR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytaddParseActions$cs|jdd|jdtr*tntx3|D]+fd}|jj|q7W|jp~|jdt|_|S(sAdd a boolean predicate function to expression's list of parse actions. See L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, functions passed to C{addCondition} need to return boolean success/fail of the condition. Optional keyword arguments: - message = define a custom message to be used in the raised exception - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) year_int = integer.copy() year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") date_str = year_int + '/' + integer + '/' + integer result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) tmessagesfailed user-defined conditiontfatalcs7tt|||s3||ndS(N(RRe(RRNRp(texc_typetfnR(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytpasR~(RRRRRkRR~(RRRR((RRRsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt addConditions cCs ||_|S(s Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments C{fn(s,loc,expr,err)} where: - s = string being parsed - loc = location where expression match was attempted and failed - expr = the parse expression that failed - err = the exception thrown The function returns no value. It may throw C{L{ParseFatalException}} if it is desired to stop parsing immediately.(Rl(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt setFailActions cCsnt}xa|rit}xN|jD]C}y)x"|j||\}}t}q+WWqtk raqXqWq W|S(N(RRRuRR(RRERt exprsFoundtetdummy((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_skipIgnorables#s   cCsp|jr|j||}n|jrl|j}t|}x-||krh|||krh|d7}q?Wn|S(Ni(RuRRpRqR(RRERtwttinstrlen((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytpreParse0s    cCs |gfS(N((RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseImpl<scCs|S(N((RRERt tokenlist((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt postParse?sc Cs|j}|s|jr,|jdr?|jd|||n|rc|jrc|j||}n|}|}yUy|j|||\}}Wn/tk rt|t||j |nXWqt k r(} |jdr|jd|||| n|jr"|j|||| nqXn|rP|jrP|j||}n|}|}|j sw|t|kry|j|||\}}Wqtk rt|t||j |qXn|j|||\}}|j |||}t ||jd|jd|j} |jrf|s7|jrf|ryrxk|jD]`} | ||| }|dk rJt ||jd|jot|t tfd|j} qJqJWWqct k r} |jdr|jd|||| nqcXqfxn|jD]`} | ||| }|dk rt ||jd|joMt|t tfd|j} qqWn|r|jdr|jd||||| qn|| fS(NiiRRi(RvRlR{R}RRRRRRyRRxRR RnRoRzRkR~RRsR( RRERRRt debuggingtpreloct tokensStartttokensterrt retTokensR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _parseNoCacheCsp   &    %$       #cCsNy|j||dtdSWn)tk rIt|||j|nXdS(NRi(RRRRRy(RRER((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyttryParses cCs7y|j||Wnttfk r.tSXtSdS(N(RRRRR(RRER((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt canParseNexts t_UnboundedCachecBseZdZRS(csit|_fd}fd}fd}tj|||_tj|||_tj|||_dS(Ncsj|S(N(R(RR(tcachet not_in_cache(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscs||}) - define your parse action using the full C{(s,loc,toks)} signature, and reference the input string using the parse action's C{s} argument - explictly expand the tabs in your input string before calling C{parseString} Example:: Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text iN( R"RRwt streamlineRuRtt expandtabsRRR R'Rtverbose_stacktrace(RREtparseAllRRRtseRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseString#s$      ccs|js|jnx|jD]}|jq W|jsRt|j}nt|}d}|j}|j}t j d} yx||kra| |kray.|||} ||| dt \} } Wnt k r| d}qX| |krT| d7} | | | fV|rK|||} | |kr>| }qQ|d7}q^| }q| d}qWWn(t k r}t jrq|nXdS(s Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional C{maxMatches} argument, to clip scanning after 'n' matches are found. If C{overlap} is specified, then overlapping matches will be reported. Note that the start and end locations are reported relative to the string being parsed. See L{I{parseString}} for more information on parsing strings with embedded tabs. Example:: source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" print(source) for tokens,start,end in Word(alphas).scanString(source): print(' '*start + '^'*(end-start)) print(' '*start + tokens[0]) prints:: sldjf123lsdjjkf345sldkjf879lkjsfd987 ^^^^^ sldjf ^^^^^^^ lsdjjkf ^^^^^^ sldkjf ^^^^^^ lkjsfd iRiN(RwRRuRtRRRRRR"RRRRR(RREt maxMatchestoverlapRRRt preparseFntparseFntmatchesRtnextLocRtnextlocRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt scanStringUsB               c Cs%g}d}t|_yx|j|D]}\}}}|j|||!|rt|trs||j7}qt|tr||7}q|j|n|}q(W|j||g|D]}|r|^q}djt t t |SWn(t k r }t jrq!|nXdS(sf Extension to C{L{scanString}}, to modify matching text with modified tokens that may be returned from a parse action. To use C{transformString}, define a grammar and attach a parse action to it that modifies the returned token list. Invoking C{transformString()} on a target string will then scan for matches, and replace the matched text patterns according to the logic in the parse action. C{transformString()} returns the resulting transformed string. Example:: wd = Word(alphas) wd.setParseAction(lambda toks: toks[0].title()) print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) Prints:: Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. iRrN(RRtRRRsR RRRRRt_flattenRR"R( RRERtlastERpRRtoRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR{s(     cCsey6tg|j||D]\}}}|^qSWn(tk r`}tjrWqa|nXdS(s~ Another extension to C{L{scanString}}, simplifying the access to the tokens found to match the given parse expression. May be called with optional C{maxMatches} argument, to clip searching after 'n' matches are found. Example:: # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters cap_word = Word(alphas.upper(), alphas.lower()) print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) prints:: ['More', 'Iron', 'Lead', 'Gold', 'I'] N(R RRR"R(RRERRpRRRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt searchStrings 6 c csfd}d}xJ|j|d|D]3\}}}|||!V|rO|dVn|}q"W||VdS(s[ Generator method to split a string using the given expression as a separator. May be called with optional C{maxsplit} argument, to limit the number of splits; and the optional C{includeSeparators} argument (default=C{False}), if the separating matching text should be included in the split results. Example:: punc = oneOf(list(".,;:/-!?")) print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) prints:: ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] iRN(R( RREtmaxsplittincludeSeparatorstsplitstlastRpRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs %   cCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(s Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement converts them to L{Literal}s by default. Example:: greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) Prints:: Hello, World! -> ['Hello', ',', 'World', '!'] s4Cannot combine element of type %s with ParserElementt stackleveliN( RsRR"RitwarningstwarnRt SyntaxWarningRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cCs\t|tr!tj|}nt|tsTtjdt|tdddS||S(s] Implementation of + operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCsmt|tr!tj|}nt|tsTtjdt|tdddSt |t j |gS(sQ Implementation of - operator, returns C{L{And}} with error stop s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRRt _ErrorStop(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__sub__s cCs\t|tr!tj|}nt|tsTtjdt|tdddS||S(s] Implementation of - operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rsub__ s csEt|tr|d}}n-t|tr7|d d }|dd kr_d|df}nt|dtr|dd kr|ddkrtS|ddkrtS|dtSqLt|dtrt|dtr|\}}||8}qLtdt|dt|dntdt||dkrgtdn|dkrtdn||kodknrtdn|rfd |r |dkr|}qt g||}qA|}n(|dkr.}nt g|}|S( s Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} Note that C{expr*(None,n)} does not raise an exception if more than n exprs exist in the input stream; that is, C{expr*(None,n)} does not enforce a maximum number of expr occurrences. If this behavior is desired, then write C{expr*(None,n) + ~expr} iiis7cannot multiply 'ParserElement' and ('%s','%s') objectss0cannot multiply 'ParserElement' and '%s' objectss/cannot multiply ParserElement by negative values@second tuple value must be greater or equal to first tuple values+cannot multiply ParserElement by 0 or (0,0)cs2|dkr$t|dStSdS(Ni(R(tn(tmakeOptionalListR(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR]s N(NN( RsRottupleRR0RRRt ValueErrorR(RR t minElementst optElementsR}((RRsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__mul__,sD#  &  )      cCs |j|S(N(R(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rmul__pscCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(sI Implementation of | operator - returns C{L{MatchFirst}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__or__ss cCs\t|tr!tj|}nt|tsTtjdt|tdddS||BS(s] Implementation of | operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ror__s cCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(sA Implementation of ^ operator - returns C{L{Or}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__xor__s cCs\t|tr!tj|}nt|tsTtjdt|tdddS||AS(s] Implementation of ^ operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rxor__s cCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(sC Implementation of & operator - returns C{L{Each}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRR (RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__and__s cCs\t|tr!tj|}nt|tsTtjdt|tdddS||@S(s] Implementation of & operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rand__s cCs t|S(sE Implementation of ~ operator - returns C{L{NotAny}} (R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __invert__scCs'|dk r|j|S|jSdS(s  Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be passed as C{True}. If C{name} is omitted, same as calling C{L{copy}}. Example:: # these are equivalent userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") N(RRR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__call__s  cCs t|S(s Suppresses the output of this C{ParserElement}; useful to keep punctuation from cluttering up returned output. (R)(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsuppressscCs t|_|S(s Disables the skipping of whitespace before matching the characters in the C{ParserElement}'s defined pattern. This is normally only used internally by the pyparsing module, but may be needed in some whitespace-sensitive grammars. (RRp(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytleaveWhitespaces cCst|_||_t|_|S(s8 Overrides the default whitespace chars (RRpRqRRr(RRg((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetWhitespaceCharss   cCs t|_|S(s Overrides default behavior to expand C{}s to spaces before parsing the input string. Must be called before C{parseString} when the input grammar contains elements that match C{} characters. (RRt(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseWithTabss cCsrt|trt|}nt|trR||jkrn|jj|qnn|jjt|j|S(s Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. Example:: patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] (RsRR)RuRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytignores cCs1|p t|pt|ptf|_t|_|S(sT Enable display of debugging messages while doing pattern matching. (RGRKRMR{RRv(Rt startActiont successActiontexceptionAction((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDebugActions s    cCs)|r|jtttn t|_|S(s Enable display of debugging messages while doing pattern matching. Set C{flag} to True to enable, False to disable. Example:: wd = Word(alphas).setName("alphaword") integer = Word(nums).setName("numword") term = wd | integer # turn on debugging for wd wd.setDebug() OneOrMore(term).parseString("abc 123 xyz 890") prints:: Match alphaword at loc 0(1,1) Matched alphaword -> ['abc'] Match alphaword at loc 3(1,4) Exception raised:Expected alphaword (at char 4), (line:1, col:5) Match alphaword at loc 7(1,8) Matched alphaword -> ['xyz'] Match alphaword at loc 11(1,12) Exception raised:Expected alphaword (at char 12), (line:1, col:13) Match alphaword at loc 15(1,16) Exception raised:Expected alphaword (at char 15), (line:1, col:16) The output shown is that produced by the default debug actions - custom debug actions can be specified using L{setDebugActions}. Prior to attempting to match the C{wd} expression, the debugging message C{"Match at loc (,)"} is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, which makes debugging and exception messages easier to understand - for instance, the default name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. (RRGRKRMRRv(Rtflag((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDebugs# cCs|jS(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR@scCs t|S(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRCscCst|_d|_|S(N(RRwRRm(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRFs  cCsdS(N((RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcheckRecursionKscCs|jgdS(sj Check defined expressions for valid structure, check for infinite recursive definitions. N(R(Rt validateTrace((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytvalidateNscCsy|j}Wn5tk rGt|d}|j}WdQXnXy|j||SWn(tk r}tjr}q|nXdS(s Execute the parse expression on the given file or filename. If a filename is specified (instead of a file object), the entire file is opened, read, and closed before parsing. trN(treadRtopenRRR"R(Rtfile_or_filenameRt file_contentstfRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseFileTs  cCsdt|tr1||kp0t|t|kSt|trM|j|Stt||kSdS(N(RsR"tvarsRRtsuper(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__eq__hs " cCs ||k S(N((RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ne__pscCstt|S(N(thashtid(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__hash__sscCs ||kS(N((RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__req__vscCs ||k S(N((RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rne__yscCs:y!|jt|d|tSWntk r5tSXdS(s Method for quick testing of a parser against a test string. Good for simple inline microtests of sub expressions while building up larger parser. Parameters: - testString - to test against this expression for a match - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests Example:: expr = Word(nums) assert expr.matches("100") RN(RRRRR(Rt testStringR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR|s  t#cCsyt|tr6tttj|jj}nt|trTt|}ng}g}t } x|D]} |d k r|j | t s|r| r|j | qmn| sqmndj|| g} g}yQ| jdd} |j| d|} | j | jd|| o%| } Wntk r} t| trPdnd}d| kr| j t| j| | j dt| j| dd |n| j d| jd || j d t| | o|} | } n<tk r*}| j d t|| o|} |} nX|rX|rG| j dndj| GHn|j | | fqmW| |fS( s3 Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to run a parse expression against a list of sample strings. Parameters: - tests - a list of separate test strings, or a multiline string of test strings - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - comment - (default=C{'#'}) - expression for indicating embedded comments in the test string; pass None to disable comment filtering - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; if False, only dump nested list - printResults - (default=C{True}) prints test output to stdout - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing Returns: a (success, results) tuple, where success indicates that all tests succeeded (or failed if C{failureTests} is True), and the results contain a list of lines of each test's output Example:: number_expr = pyparsing_common.number.copy() result = number_expr.runTests(''' # unsigned integer 100 # negative integer -100 # float with scientific notation 6.02e23 # integer with scientific notation 1e-12 ''') print("Success" if result[0] else "Failed!") result = number_expr.runTests(''' # stray character 100Z # missing leading digit before '.' -.100 # too many '.' 3.14.159 ''', failureTests=True) print("Success" if result[0] else "Failed!") prints:: # unsigned integer 100 [100] # negative integer -100 [-100] # float with scientific notation 6.02e23 [6.02e+23] # integer with scientific notation 1e-12 [1e-12] Success # stray character 100Z ^ FAIL: Expected end of text (at char 3), (line:1, col:4) # missing leading digit before '.' -.100 ^ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) # too many '.' 3.14.159 ^ FAIL: Expected end of text (at char 4), (line:1, col:5) Success Each test string must be on a single line. If you want to test a string that spans multiple lines, create a test like this:: expr.runTest(r"this is a test\n of strings that spans \n 3 lines") (Note that this is a raw string literal, you must include the leading 'r'.) s s\nRR6s(FATAL)Rrt it^sFAIL: sFAIL-EXCEPTION: N(RsRRRRuRtrstript splitlinesRRRRRRRRRR3RRRERR7Ra(RttestsRtcommenttfullDumpt printResultst failureTestst allResultstcommentstsuccessRpRtresultRRRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytrunTestssNW' +  ,    N(PRRRRfRRt staticmethodRhRjRRRRRRRzRRRRRRRRRRRRRRRRRRRRRRRRRt_MAX_INTRR{RRR RRRRRRRRRRRRRRRRRRRRRRRRRR R R RRRRR"(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR"8s      &   H     " 2G +   D      )            cBseZdZdZRS(sT Abstract C{ParserElement} subclass, for defining atomic matching patterns. cCstt|jdtdS(NR(R R*RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s(RRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR* scBseZdZdZRS(s, An empty token, will always match. cCs2tt|jd|_t|_t|_dS(NR (R R RRRRsRRx(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (RRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR  scBs#eZdZdZedZRS(s( A token that will never match. cCs;tt|jd|_t|_t|_d|_dS(NRsUnmatchable token( R RRRRRsRRxRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR* s    cCst|||j|dS(N(RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR1 s(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR& s cBs#eZdZdZedZRS(s Token to exactly match a specified string. Example:: Literal('blah').parseString('blah') # -> ['blah'] Literal('blah').parseString('blahfooblah') # -> ['blah'] Literal('blah').parseString('bla') # -> Exception: Expected "blah" For case-insensitive matching, use L{CaselessLiteral}. For keyword matching (force word break before and after the matched string), use L{Keyword} or L{CaselessKeyword}. cCstt|j||_t||_y|d|_Wn0tk rntj dt ddt |_ nXdt |j|_d|j|_t|_t|_dS(Nis2null string passed to Literal; use Empty() insteadRis"%s"s Expected (R RRtmatchRtmatchLentfirstMatchCharRRRRR R^RRRyRRsRx(Rt matchString((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRC s      cCsg|||jkrK|jdks7|j|j|rK||j|jfSt|||j|dS(Ni(R'R&t startswithR%RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRV s$(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR5 s  cBsKeZdZedZdedZedZ dZ e dZ RS(s\ Token to exactly match a specified string as a keyword, that is, it must be immediately followed by a non-keyword character. Compare with C{L{Literal}}: - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} Accepts two optional constructor arguments in addition to the keyword string: - C{identChars} is a string of characters that would be valid identifier characters, defaulting to all alphanumerics + "_" and "$" - C{caseless} allows case-insensitive matching, default is C{False}. Example:: Keyword("start").parseString("start") # -> ['start'] Keyword("start").parseString("starting") # -> Exception For case-insensitive matching, use L{CaselessKeyword}. s_$cCstt|j|dkr+tj}n||_t||_y|d|_Wn't k r}t j dt ddnXd|j|_ d|j |_t|_t|_||_|r|j|_|j}nt||_dS(Nis2null string passed to Keyword; use Empty() insteadRis"%s"s Expected (R RRRtDEFAULT_KEYWORD_CHARSR%RR&R'RRRRRRyRRsRxtcaselesstuppert caselessmatchRt identChars(RR(R.R+((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq s&        cCsb|jr||||j!j|jkrF|t||jkse|||jj|jkrF|dks||dj|jkrF||j|jfSn|||jkrF|jdks|j|j|rF|t||jks|||j|jkrF|dks2||d|jkrF||j|jfSt |||j |dS(Nii( R+R&R,R-RR.R%R'R)RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s #9)$3#cCs%tt|j}tj|_|S(N(R RRR*R.(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCs |t_dS(s,Overrides the default Keyword chars N(RR*(Rg((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDefaultKeywordChars sN( RRRR1R*RRRRRRR#R/(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR^ s    cBs#eZdZdZedZRS(sl Token to match a specified string, ignoring case of letters. Note: the matched results will always be in the case of the given match string, NOT the case of the input text. Example:: OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] (Contrast with example for L{CaselessKeyword}.) cCsItt|j|j||_d|j|_d|j|_dS(Ns'%s's Expected (R RRR,t returnStringRRy(RR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCsS||||j!j|jkr7||j|jfSt|||j|dS(N(R&R,R%R0RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s#(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cBs&eZdZddZedZRS(s Caseless version of L{Keyword}. Example:: OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] (Contrast with example for L{CaselessLiteral}.) cCs#tt|j||dtdS(NR+(R RRR(RR(R.((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs||||j!j|jkrp|t||jks\|||jj|jkrp||j|jfSt|||j|dS(N(R&R,R-RR.R%RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s#9N(RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cBs&eZdZddZedZRS(sx A variation on L{Literal} which matches "close" matches, that is, strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - C{match_string} - string to be matched - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match The results from a successful parse will contain the matched text from the input string and the following named results: - C{mismatches} - a list of the positions within the match_string where mismatches were found - C{original} - the original match_string used to compare against the input string If C{mismatches} is an empty list, then the match was an exact match. Example:: patt = CloseMatch("ATCATCGAATGGA") patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) # exact match patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) # close match allowing up to 2 mismatches patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) icCs]tt|j||_||_||_d|j|jf|_t|_t|_ dS(Ns&Expected %r (with up to %d mismatches)( R RjRRt match_stringt maxMismatchesRyRRxRs(RR1R2((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s    cCs|}t|}|t|j}||kr|j}d}g} |j} xtt|||!|jD]J\}} | \} } | | kro| j|t| | krPqqoqoW|d}t|||!g}|j|d<| |d<||fSnt|||j|dS(Niitoriginalt mismatches( RR1R2RRRR RRy(RRERRtstartRtmaxlocR1tmatch_stringlocR4R2ts_mtsrctmattresults((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s(    ,        (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRj s cBs>eZdZddddeddZedZdZRS(s Token for matching words composed of allowed character sets. Defined with string containing all allowed initial characters, an optional string containing allowed body characters (if omitted, defaults to the initial character set), and an optional minimum, maximum, and/or exact length. The default value for C{min} is 1 (a minimum value < 1 is not valid); the default values for C{max} and C{exact} are 0, meaning no maximum or exact length restriction. An optional C{excludeChars} parameter can list characters that might be found in the input C{bodyChars} string; useful to define a word of all printables except for one or two characters, for instance. L{srange} is useful for defining custom character set strings for defining C{Word} expressions, using range notation from regular expression character sets. A common mistake is to use C{Word} to match a specific literal string, as in C{Word("Address")}. Remember that C{Word} uses the string argument to define I{sets} of matchable characters. This expression would match "Add", "AAA", "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an exact literal string, use L{Literal} or L{Keyword}. pyparsing includes helper strings for building Words: - L{alphas} - L{nums} - L{alphanums} - L{hexnums} - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - L{printables} (any non-whitespace character) Example:: # a word composed of digits integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) # a word with a leading capital, and zero or more lowercase capital_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' hostname = Word(alphas, alphanums+'-') # roman numeral (not a strict parser, accepts invalid mix of characters) roman = Word("IVXLCDM") # any string of non-whitespace characters, except for ',' csv_value = Word(printables, excludeChars=",") iicstt|jrcdjfd|D}|rcdjfd|D}qcn||_t||_|r||_t||_n||_t||_|dk|_ |dkrt dn||_ |dkr||_ n t |_ |dkr)||_ ||_ nt||_d|j|_t|_||_d|j|jkr}|dkr}|dkr}|dkr}|j|jkrd t|j|_net|jdkrd tj|jt|jf|_n%d t|jt|jf|_|jrDd |jd |_nytj|j|_Wq}tk ryd|_q}XndS( NRrc3s!|]}|kr|VqdS(N((RR(t excludeChars(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys 7 sc3s!|]}|kr|VqdS(N((RR(R<(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys 9 siisZcannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitteds Expected Rs[%s]+s%s[%s]*s [%s][%s]*s\b(R R-RRt initCharsOrigRt initCharst bodyCharsOrigt bodyCharst maxSpecifiedRtminLentmaxLenR$RRRyRRxt asKeywordt_escapeRegexRangeCharstreStringRR|tescapetcompileRaR(RR>R@tmintmaxtexactRDR<((R<sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR4 sT%             :   c Cs|jr[|jj||}|s?t|||j|n|j}||jfS|||jkrt|||j|n|}|d7}t|}|j}||j }t ||}x*||kr|||kr|d7}qWt } |||j krt } n|jrG||krG|||krGt } n|jr|dkrp||d|ks||kr|||krt } qn| rt|||j|n||||!fS(Nii(R|R%RRytendtgroupR>RR@RCRIRRBRRARD( RRERRR!R5Rt bodycharsR6tthrowException((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRj s6       %  < cCsytt|jSWntk r*nX|jdkrd}|j|jkr}d||j||jf|_qd||j|_n|jS(NcSs&t|dkr|d dS|SdS(Nis...(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt charsAsStr s s W:(%s,%s)sW:(%s)(R R-RRaRmRR=R?(RRP((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (N( RRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR- s.6 #cBsDeZdZeejdZddZedZ dZ RS(s Token for matching strings that match a given regular expression. Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as named parse results. Example:: realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") s[A-Z]icCs3tt|jt|tr|sAtjdtddn||_||_ y+t j |j|j |_ |j|_ Wqt jk rtjd|tddqXnIt|tjr||_ t||_|_ ||_ n tdt||_d|j|_t|_t|_dS(sThe parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.s0null string passed to Regex; use Empty() insteadRis$invalid pattern (%s) passed to RegexsCRegex may only be constructed with a string or a compiled RE objects Expected N(R R%RRsRRRRtpatterntflagsR|RHRFt sre_constantsterrortcompiledREtypeRuRRRRyRRxRRs(RRQRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s.          cCs|jj||}|s6t|||j|n|j}|j}t|j}|rx|D]}||||eZdZddeededZedZdZRS(s Token for matching strings that are delimited by quoting characters. Defined with the following parameters: - quoteChar - string of one or more characters defining the quote delimiting string - escChar - character to escape quotes, typically backslash (default=C{None}) - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) Example:: qs = QuotedString('"') print(qs.searchString('lsjdf "This is the quote" sldjf')) complex_qs = QuotedString('{{', endQuoteChar='}}') print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) sql_qs = QuotedString('"', escQuote='""') print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) prints:: [['This is the quote']] [['This is the "quote"']] [['This is the quote with "embedded" quotes']] c sttj|j}|sGtjdtddtn|dkr\|}n4|j}|stjdtddtn|_ t |_ |d_ |_ t |_|_|_|_|_|rTtjtjB_dtjj tj d|dk rDt|pGdf_nPd_dtjj tj d|dk rt|pdf_t j d krjd d jfd tt j d dd Dd7_n|r*jdtj|7_n|rhjdtj|7_tjjd_njdtjj 7_y+tjjj_j_Wn4tj k rtjdjtddnXt!_"dj"_#t$_%t&_'dS(Ns$quoteChar cannot be the empty stringRis'endQuoteChar cannot be the empty stringis %s(?:[^%s%s]Rrs%s(?:[^%s\n\r%s]is|(?:s)|(?:c3s<|]2}dtjj| tj|fVqdS(s%s[^%s]N(R|RGt endQuoteCharRE(RR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys / sit)s|(?:%s)s|(?:%s.)s(.)s)*%ss$invalid pattern (%s) passed to Regexs Expected ((R R#RRRRRt SyntaxErrorRt quoteCharRt quoteCharLentfirstQuoteCharRXtendQuoteCharLentescChartescQuotetunquoteResultstconvertWhitespaceEscapesR|t MULTILINEtDOTALLRRRGRERQRRtescCharReplacePatternRHRFRSRTRRRyRRxRRs(RR[R_R`t multilineRaRXRb((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR sf             ( %E  c CsT|||jkr(|jj||p+d}|sOt|||j|n|j}|j}|jrJ||j |j !}t |t rJd|kr|j ridd6dd6dd6dd 6}x/|jD]\}}|j||}qWn|jr tj|jd |}n|jrG|j|j|j}qGqJn||fS( Ns\s s\ts s\ns s\fs s\rs\g<1>(R]R|R%RRRyRLRMRaR\R^RsRRbRRR_RReR`RX( RRERRR!R}tws_maptwslittwschar((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRG s*.      !cCs]ytt|jSWntk r*nX|jdkrVd|j|jf|_n|jS(Ns.quoted string, starting with %s ending with %s(R R#RRaRmRR[RX(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRj s N( RRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR# sA #cBs5eZdZddddZedZdZRS(s Token for matching words composed of characters I{not} in a given set (will include whitespace in matched characters if not listed in the provided exclusion set - see example). Defined with string containing all disallowed characters, and an optional minimum, maximum, and/or exact length. The default value for C{min} is 1 (a minimum value < 1 is not valid); the default values for C{max} and C{exact} are 0, meaning no maximum or exact length restriction. Example:: # define a comma-separated-value as anything that is not a ',' csv_value = CharsNotIn(',') print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) prints:: ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] iicCstt|jt|_||_|dkr@tdn||_|dkra||_n t |_|dkr||_||_nt ||_ d|j |_ |jdk|_ t|_dS(Nisfcannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permittedis Expected (R RRRRptnotCharsRRBRCR$RRRyRsRx(RRjRIRJRK((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s           cCs|||jkr.t|||j|n|}|d7}|j}t||jt|}x*||kr|||kr|d7}qfW|||jkrt|||j|n||||!fS(Ni(RjRRyRIRCRRB(RRERRR5tnotcharstmaxlen((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cCsytt|jSWntk r*nX|jdkryt|jdkrfd|jd |_qyd|j|_n|jS(Nis !W:(%s...)s!W:(%s)(R RRRaRmRRRj(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s (RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRv s cBsXeZdZidd6dd6dd6dd6d d 6Zd d d d dZedZRS(s Special matching class for matching whitespace. Normally, whitespace is ignored by pyparsing grammars. This class is included when some whitespace structures are significant. Define with a string containing the whitespace characters to be matched; default is C{" \t\r\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, as defined for the C{L{Word}} class. sRss ss ss ss s iicsttj|_jdjfdjDdjdjD_t_ dj_ |_ |dkr|_ n t _ |dkr|_ |_ ndS(NRrc3s$|]}|jkr|VqdS(N(t matchWhite(RR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys scss|]}tj|VqdS(N(R,t whiteStrs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss Expected i(R R,RRmRRRqRRRsRyRBRCR$(RtwsRIRJRK((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s )       cCs|||jkr.t|||j|n|}|d7}||j}t|t|}x-||kr|||jkr|d7}qcW|||jkrt|||j|n||||!fS(Ni(RmRRyRCRIRRB(RRERRR5R6((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  "(RRRRnRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR, s t_PositionTokencBseZdZRS(cCs8tt|j|jj|_t|_t|_ dS(N( R RpRR^RRRRsRRx(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s (RRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRp scBs,eZdZdZdZedZRS(sb Token to advance to a specific column of input text; useful for tabular report scraping. cCs tt|j||_dS(N(R RRR7(Rtcolno((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCst|||jkrt|}|jrB|j||}nxE||kr||jrt|||jkr|d7}qEWn|S(Ni(R7RRuRtisspace(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  7cCs^t||}||jkr6t||d|n||j|}|||!}||fS(NsText not in expected column(R7R(RRERRtthiscoltnewlocR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cBs#eZdZdZedZRS(s Matches if current position is at the beginning of a line within the parse string Example:: test = ''' AAA this line AAA and this line AAA but not this one B AAA and definitely not this one ''' for t in (LineStart() + 'AAA' + restOfLine).searchString(test): print(t) Prints:: ['AAA', ' this line'] ['AAA', ' and this line'] cCs tt|jd|_dS(NsExpected start of line(R RRRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR& scCs;t||dkr|gfSt|||j|dS(Ni(R7RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR* s (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cBs#eZdZdZedZRS(sU Matches if current position is at the end of a line within the parse string cCs<tt|j|jtjjddd|_dS(Ns RrsExpected end of line(R RRRR"RfRRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR3 scCs|t|krK||dkr0|ddfSt|||j|n8|t|krk|dgfSt|||j|dS(Ns i(RRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR8 s(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR/ s cBs#eZdZdZedZRS(sM Matches if current position is at the beginning of the parse string cCs tt|jd|_dS(NsExpected start of text(R R(RRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRG scCsL|dkrB||j|dkrBt|||j|qBn|gfS(Ni(RRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRK s (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR(C s cBs#eZdZdZedZRS(sG Matches if current position is at the end of the parse string cCs tt|jd|_dS(NsExpected end of text(R R'RRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRV scCs|t|kr-t|||j|nT|t|krM|dgfS|t|kri|gfSt|||j|dS(Ni(RRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRZ s (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR'R s cBs&eZdZedZedZRS(sp Matches if the current position is at the beginning of a Word, and is not preceded by any character in a given set of C{wordChars} (default=C{printables}). To emulate the C{} behavior of regular expressions, use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of the string being parsed, or at the beginning of a line. cCs/tt|jt||_d|_dS(NsNot at the start of a word(R R/RRt wordCharsRy(RRu((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRl scCs^|dkrT||d|jks6|||jkrTt|||j|qTn|gfS(Nii(RuRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq s  (RRRRTRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR/d s cBs&eZdZedZedZRS(sZ Matches if the current position is at the end of a Word, and is not followed by any character in a given set of C{wordChars} (default=C{printables}). To emulate the C{} behavior of regular expressions, use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of the string being parsed, or at the end of a line. cCs8tt|jt||_t|_d|_dS(NsNot at the end of a word(R R.RRRuRRpRy(RRu((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCsvt|}|dkrl||krl|||jksN||d|jkrlt|||j|qln|gfS(Nii(RRuRRy(RRERRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (RRRRTRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR.x s cBsqeZdZedZdZdZdZdZdZ dZ edZ gd Z d Z RS( s^ Abstract subclass of ParserElement, for combining and post-processing parsed tokens. cCstt|j|t|tr4t|}nt|tr[tj|g|_ nt|t j rt|}t d|Drt tj|}nt||_ n3yt||_ Wntk r|g|_ nXt|_dS(Ncss|]}t|tVqdS(N(RsR(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(R RRRsRRRR"RitexprsRtIterabletallRRRR}(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cCs |j|S(N(Rv(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs|jj|d|_|S(N(RvRRRm(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCsPt|_g|jD]}|j^q|_x|jD]}|jq8W|S(s~Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on all contained expressions.(RRpRvRR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  %cCst|trb||jkrtt|j|x(|jD]}|j|jdq>Wqn>tt|j|x%|jD]}|j|jdqW|S(Ni(RsR)RuR RRRv(RR R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCsfytt|jSWntk r*nX|jdkr_d|jjt|j f|_n|jS(Ns%s:(%s)( R RRRaRmRR^RRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s %cCswtt|jx|jD]}|jqWt|jdkr`|jd}t||jr|j r|jdkr|j r|j|jdg|_d|_ |j |j O_ |j |j O_ n|jd}t||jr`|j r`|jdkr`|j r`|jd |j|_d|_ |j |j O_ |j |j O_ q`ndt||_|S(Niiiis Expected (R RRRvRRsR^RkRnRRvRmRsRxRRy(RRR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s0        cCstt|j||}|S(N(R RR(RRRR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs@||g}x|jD]}|j|qW|jgdS(N(RvRR(RRttmpR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs>tt|j}g|jD]}|j^q|_|S(N(R RRRv(RR}R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s%(RRRRRRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s    "  cBsWeZdZdefdYZedZedZdZdZ dZ RS(s  Requires all given C{ParseExpression}s to be found in the given order. Expressions may be separated by whitespace. May be constructed using the C{'+'} operator. May also be constructed using the C{'-'} operator, which will suppress backtracking. Example:: integer = Word(nums) name_expr = OneOrMore(Word(alphas)) expr = And([integer("id"),name_expr("name"),integer("age")]) # more easily written as: expr = integer("id") + name_expr("name") + integer("age") RcBseZdZRS(cOs3ttj|j||d|_|jdS(Nt-(R RRRRR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s (RRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCsltt|j||td|jD|_|j|jdj|jdj|_t |_ dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys  si( R RRRxRvRsRRqRpRR}(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s c Cs?|jdj|||dt\}}t}x|jdD]}t|tjr`t}q<n|ry|j|||\}}Wqtk rqtk r}d|_ tj |qt k rt|t ||j|qXn|j|||\}}|s$|jr<||7}q<q<W||fS(NiRi(RvRRRsRRRR!RRt __traceback__RRRRyR( RRERRt resultlistt errorStopRt exprtokensR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s((   %cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR5 scCs@||g}x+|jD] }|j||jsPqqWdS(N(RvRRs(RRtsubRecCheckListR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR: s   cCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRt{Rcss|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys F st}(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRA s *( RRRR RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s    cBsAeZdZedZedZdZdZdZ RS(s Requires that at least one C{ParseExpression} is found. If two expressions match, the expression that matches the longest string will be used. May be constructed using the C{'^'} operator. Example:: # construct Or using '^' operator number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) prints:: [['123'], ['3.1416'], ['789']] cCsNtt|j|||jrAtd|jD|_n t|_dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys \ s(R RRRvR4RsR(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRY s c Csd}d}g}x|jD]}y|j||}Wntk rw} d| _| j|kr| }| j}qqtk rt||krt|t||j|}t|}qqX|j ||fqW|rh|j ddxn|D]c\} }y|j |||SWqtk r`} d| _| j|kra| }| j}qaqXqWn|dk r|j|_ |nt||d|dS(NiRcSs |d S(Ni((tx((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqu Rrs no defined alternatives to match( RRvRRR{RRRRyRtsortRR( RRERRt maxExcLoct maxExceptionRRtloc2Rt_((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR` s<      cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ixor__ scCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs ^ css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sR(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s *cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s( RRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRK s    &  cBsAeZdZedZedZdZdZdZ RS(s Requires that at least one C{ParseExpression} is found. If two expressions match, the first one listed is the one that will match. May be constructed using the C{'|'} operator. Example:: # construct MatchFirst using '|' operator # watch the order of expressions to match number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] # put more selective expression first number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] cCsNtt|j|||jrAtd|jD|_n t|_dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(R RRRvR4RsR(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s c Csd}d}x|jD]}y|j|||}|SWqtk ro}|j|kr|}|j}qqtk rt||krt|t||j|}t|}qqXqW|dk r|j|_|nt||d|dS(Nis no defined alternatives to match( RRvRRRRRRyR( RRERRRRRR}R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s$    cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ior__ scCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs | css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sR(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s *cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s( RRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s     cBs8eZdZedZedZdZdZRS(sm Requires all given C{ParseExpression}s to be found, but in any order. Expressions may be separated by whitespace. May be constructed using the C{'&'} operator. Example:: color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") integer = Word(nums) shape_attr = "shape:" + shape_type("shape") posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") color_attr = "color:" + color("color") size_attr = "size:" + integer("size") # use Each (using operator '&') to accept attributes in any order # (shape and posn are required, color and size are optional) shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) shape_spec.runTests(''' shape: SQUARE color: BLACK posn: 100, 120 shape: CIRCLE size: 50 color: BLUE posn: 50,80 color:GREEN size:20 shape:TRIANGLE posn:20,40 ''' ) prints:: shape: SQUARE color: BLACK posn: 100, 120 ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - color: BLACK - posn: ['100', ',', '120'] - x: 100 - y: 120 - shape: SQUARE shape: CIRCLE size: 50 color: BLUE posn: 50,80 ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - color: BLUE - posn: ['50', ',', '80'] - x: 50 - y: 80 - shape: CIRCLE - size: 50 color: GREEN size: 20 shape: TRIANGLE posn: 20,40 ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - color: GREEN - posn: ['20', ',', '40'] - x: 20 - y: 40 - shape: TRIANGLE - size: 20 cCsKtt|j||td|jD|_t|_t|_dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s( R R RRxRvRsRRptinitExprGroups(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCs4|jrLtd|jD|_g|jD]}t|tr/|j^q/}g|jD]%}|jr]t|t r]|^q]}|||_g|jD]}t|t r|j^q|_ g|jD]}t|t r|j^q|_ g|jD]$}t|tt t fs|^q|_ |j |j 7_ t|_n|}|j }|j} g} t} x| r_|| |j |j } g} x| D]}y|j||}Wntk r| j|qX| j|jjt||||kr|j|q|| kr| j|qqWt| t| krut} ququW|rdjd|D}t||d|n| g|jD]*}t|tr|j| kr|^q7} g}x6| D].}|j|||\}}|j|qWt|tg}||fS(Ncss3|])}t|trt|j|fVqdS(N(RsRRRF(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss, css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys =ss*Missing one or more required elements (%s)(RRRvtopt1mapRsRRFRst optionalsR0tmultioptionalsRt multirequiredtrequiredRRRRRRRtremoveRRRtsumR (RRERRRtopt1topt2ttmpLocttmpReqdttmpOptt matchOrdert keepMatchingttmpExprstfailedtmissingR|R;t finalResults((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsP .5 117      "   > cCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs & css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys PsR(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRKs *cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRTs(RRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s 5  1 cBs_eZdZedZedZdZdZdZ dZ gdZ dZ RS( sa Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. cCstt|j|t|trattjtrItj|}qatjt |}n||_ d|_ |dk r|j |_ |j|_|j|j|j|_|j|_|j|_|jj|jndS(N(R RRRsRt issubclassR"RiR*RRFRRmRxRsRRqRpRoR}RuR(RRFR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR^s        cCsG|jdk r+|jj|||dtStd||j|dS(NRRr(RFRRRRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRpscCs>t|_|jj|_|jdk r:|jjn|S(N(RRpRFRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRvs  cCst|trc||jkrtt|j||jdk r`|jj|jdq`qn?tt|j||jdk r|jj|jdn|S(Ni(RsR)RuR RRRFR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR}s cCs6tt|j|jdk r2|jjn|S(N(R RRRFR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsV||kr"t||gn||g}|jdk rR|jj|ndS(N(R$RFRR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  cCsA||g}|jdk r0|jj|n|jgdS(N(RFRRR(RRRy((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsuytt|jSWntk r*nX|jdkrn|jdk rnd|jjt |jf|_n|jS(Ns%s:(%s)( R RRRaRmRRFR^RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs %( RRRRRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRZs      cBs#eZdZdZedZRS(s Lookahead matching of the given parse expression. C{FollowedBy} does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression matches at the current position. C{FollowedBy} always returns a null token list. Example:: # use FollowedBy to match a label only if it is followed by a ':' data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() prints:: [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] cCs#tt|j|t|_dS(N(R R RRRs(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs|jj|||gfS(N(RFR(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cBs,eZdZdZedZdZRS(s Lookahead to disallow matching with the given parse expression. C{NotAny} does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression does I{not} match at the current position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} always returns a null token list. May be constructed using the '~' operator. Example:: cCsBtt|j|t|_t|_dt|j|_ dS(NsFound unwanted token, ( R RRRRpRRsRRFRy(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  cCs:|jj||r0t|||j|n|gfS(N(RFRRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRs~{R(RRRmRRRF(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs (RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs   t_MultipleMatchcBs eZddZedZRS(cCsftt|j|t|_|}t|trFtj|}n|dk rY|nd|_ dS(N( R RRRRoRsRR"RiRt not_ender(RRFtstopOntender((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  c Cs|jj}|j}|jdk }|r9|jj}n|rO|||n||||dt\}}y|j } xo|r|||n| r|||} n|} ||| |\}} | s| jr~|| 7}q~q~WWnt t fk rnX||fS(NR( RFRRRRRRRuRRR( RRERRtself_expr_parsetself_skip_ignorablest check_endert try_not_enderRthasIgnoreExprsRt tmptokens((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs,   N(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cBseZdZdZRS(s Repetition of one or more of the given expression. Parameters: - expr - expression that must match one or more times - stopOn - (default=C{None}) - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition expression) Example:: data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: BLACK" OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] # use stopOn attribute for OneOrMore to avoid reading label string as part of the data attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] # could also be written as (attr_expr * (1,)).parseString(text).pprint() cCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRRs}...(RRRmRRRF(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR!s (RRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscBs/eZdZddZedZdZRS(sw Optional repetition of zero or more of the given expression. Parameters: - expr - expression that must match zero or more times - stopOn - (default=C{None}) - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition expression) Example: similar to L{OneOrMore} cCs)tt|j|d|t|_dS(NR(R R0RRRs(RRFR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR6scCsEy tt|j|||SWnttfk r@|gfSXdS(N(R R0RRR(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR:s cCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRRs]...(RRRmRRRF(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR@s N(RRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR0*s   t _NullTokencBs eZdZeZdZRS(cCstS(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRJscCsdS(NRr((R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRMs(RRRR>R(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRIs cBs/eZdZedZedZdZRS(sa Optional matching of the given expression. Parameters: - expr - expression that must match zero or more times - default (optional) - value to be returned if the optional expression is not found. Example:: # US postal code can be a 5-digit zip, plus optional 4-digit qualifier zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) zip.runTests(''' # traditional ZIP code 12345 # ZIP+4 form 12101-0001 # invalid ZIP 98765- ''') prints:: # traditional ZIP code 12345 ['12345'] # ZIP+4 form 12101-0001 ['12101-0001'] # invalid ZIP 98765- ^ FAIL: Expected end of text (at char 5), (line:1, col:6) cCsAtt|j|dt|jj|_||_t|_dS(NR( R RRRRFRoRRRs(RRFR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRts cCsy(|jj|||dt\}}Wnottfk r|jtk r|jjrt|jg}|j||jj ['3', '.', '1416'] # will also erroneously match the following print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] real = Combine(Word(nums) + '.' + Word(nums)) print(real.parseString('3.1416')) # -> ['3.1416'] # no match when there are internal spaces print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) RrcCsQtt|j||r)|jn||_t|_||_t|_dS(N( R RRRtadjacentRRpt joinStringR}(RRFRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRrs    cCs6|jrtj||ntt|j||S(N(RR"RR R(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR|s cCse|j}|2|tdj|j|jgd|j7}|jr]|jr]|gS|SdS(NRrR(RR RRRRzRnR(RRERRtretToks((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  1(RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRas cBs eZdZdZdZRS(s Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. Example:: ident = Word(alphas) num = Word(nums) term = ident | num func = ident + Optional(delimitedList(term)) print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] func = ident + Group(Optional(delimitedList(term))) print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] cCs#tt|j|t|_dS(N(R RRRRo(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs|gS(N((RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs(RRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  cBs eZdZdZdZRS(sW Converter to return a repetitive expression as a list, but also as a dictionary. Each element can also be referenced using the first token in the expression as its key. Useful for tabular report scraping when the first column can be used as a item key. Example:: data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) # print attributes as plain groups print(OneOrMore(attr_expr).parseString(text).dump()) # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names result = Dict(OneOrMore(Group(attr_expr))).parseString(text) print(result.dump()) # access named fields as dict entries, or output as dict print(result['shape']) print(result.asDict()) prints:: ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left - shape: SQUARE - texture: burlap SQUARE {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} See more examples at L{ParseResults} of accessing fields by results name. cCs#tt|j|t|_dS(N(R R RRRo(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsTx9t|D]+\}}t|dkr1q n|d}t|trct|dj}nt|dkrtd|||nX|S(ss Decorator for debugging parse actions. When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. Example:: wd = Word(alphas) @traceParseAction def remove_duplicate_chars(tokens): return ''.join(sorted(set(''.join(tokens))) wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) print(wds.parseString("slkdjs sld sldd sdlf sdljf")) prints:: >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) <>entering %s(line: '%s', %d, %r) s< ['aa', 'bb', 'cc'] delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] s [Rs]...N(RRR0RR)(RFtdelimtcombinetdlName((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR>9s ,!cstfd}|dkrBttjd}n |j}|jd|j|dt|jdt dS(s: Helper to define a counted list of expressions. This helper defines a pattern of the form:: integer expr expr expr... where the leading integer tells how many expr expressions follow. The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. Example:: countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] # in this parser, the leading integer value is given in binary, # '10' indicating that 2 values are in the array binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] cs;|d}|r,ttg|p5tt>gS(Ni(RRRA(RRNRpR(t arrayExprRF(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcountFieldParseAction_s -cSst|dS(Ni(Ro(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqdRrtarrayLenR~s(len) s...N( R RR-RPRzRRRRR(RFtintExprR((RRFsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR:Ls    cCsMg}x@|D]8}t|tr8|jt|q |j|q W|S(N(RsRRRR(tLR}R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRks  csFtfd}|j|dtjdt|S(s* Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a 'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousLiteral(first) matchExpr = first + ":" + second will match C{"1:1"}, but not C{"1:2"}. Because this matches a previous literal, will also match the leading C{"1:1"} in C{"1:10"}. If this is not desired, use C{matchPreviousExpr}. Do I{not} use with packrat parsing enabled. csc|rTt|dkr'|d>q_t|j}td|D>n t>dS(Niicss|]}t|VqdS(N(R(Rttt((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(RRRRR (RRNRpttflat(trep(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcopyTokenToRepeaters R~s(prev) (R RRRR(RFR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRMts  cs\t|j}|Kfd}|j|dtjdt|S(sS Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a 'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousExpr(first) matchExpr = first + ":" + second will match C{"1:1"}, but not C{"1:2"}. Because this matches by expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; the expressions are evaluated first, and then compared, so C{"1"} is compared with C{"10"}. Do I{not} use with packrat parsing enabled. cs8t|jfd}j|dtdS(Ncs7t|j}|kr3tdddndS(NRri(RRR(RRNRpt theseTokens(t matchTokens(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytmustMatchTheseTokenss R~(RRRzR(RRNRpR(R(RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsR~s(prev) (R RRRRR(RFte2R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRLs   cCsUx$dD]}|j|t|}qW|jdd}|jdd}t|S(Ns\^-]s s\ns s\t(Rt_bslashR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyREs  c sD|r!d}d}tnd}d}tg}t|tr]|j}n7t|tjr~t|}ntj dt dd|st Sd}x|t |d krV||}xt ||d D]f\}} || |r |||d =Pq||| r|||d =|j|| | }PqqW|d 7}qW| r|ryt |t d j|krtd d jd |Djd j|Stdjd|Djd j|SWqtk rtj dt ddqXntfd|Djd j|S(s Helper to quickly define a set of alternative Literals, and makes sure to do longest-first testing when there is a conflict, regardless of the input order, but returns a C{L{MatchFirst}} for best performance. Parameters: - strs - a string of space-delimited literals, or a collection of string literals - caseless - (default=C{False}) - treat all literals as caseless - useRegex - (default=C{True}) - as an optimization, will generate a Regex object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or if creating a C{Regex} raises an exception) Example:: comp_oper = oneOf("< = > <= >= !=") var = Word(alphas) number = Word(nums) term = var | number comparison_expr = term + comp_oper + term print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) prints:: [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] cSs|j|jkS(N(R,(R tb((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcSs|jj|jS(N(R,R)(R R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcSs ||kS(N((R R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcSs |j|S(N(R)(R R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrs6Invalid argument to oneOf, expected string or iterableRiiiRrs[%s]css|]}t|VqdS(N(RE(Rtsym((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss | t|css|]}tj|VqdS(N(R|RG(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss7Exception creating Regex for oneOf, building MatchFirstc3s|]}|VqdS(N((RR(tparseElementClass(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(RRRsRRRRwRRRRRRRRRR%RRaR( tstrsR+tuseRegextisequaltmaskstsymbolsRtcurRR ((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRQsL        ! !33  cCsttt||S(s Helper to easily and clearly define a dictionary by specifying the respective patterns for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens in the proper order. The key pattern can include delimiting markers or punctuation, as long as they are suppressed, thereby leaving the significant key text. The value pattern can include named results, so that the C{Dict} results can include named token fields. Example:: text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) print(OneOrMore(attr_expr).parseString(text).dump()) attr_label = label attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) # similar to Dict, but simpler call format result = dictOf(attr_label, attr_value).parseString(text) print(result.dump()) print(result['shape']) print(result.shape) # object attribute access works too print(result.asDict()) prints:: [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left - shape: SQUARE - texture: burlap SQUARE SQUARE {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} (R R0R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR?s!cCs|tjd}|j}t|_|d||d}|rVd}n d}|j||j|_|S(s Helper to return the original, untokenized text for a given expression. Useful to restore the parsed fields of an HTML start tag into the raw tag text itself, or to revert separate tokens with intervening whitespace back to the original matching input text. By default, returns astring containing the original parsed text. If the optional C{asString} argument is passed as C{False}, then the return value is a C{L{ParseResults}} containing any results names that were originally matched, and a single token containing the original matched text from the input string. So if the expression passed to C{L{originalTextFor}} contains expressions with defined results names, you must set C{asString} to C{False} if you want to preserve those results name values. Example:: src = "this is test bold text normal text " for tag in ("b","i"): opener,closer = makeHTMLTags(tag) patt = originalTextFor(opener + SkipTo(closer) + closer) print(patt.searchString(src)[0]) prints:: [' bold text '] ['text'] cSs|S(N((RRRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq8Rrt_original_startt _original_endcSs||j|j!S(N(RR(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq=RrcSs'||jd|jd!g|(dS(NRR(R(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt extractText?s(R RzRRR}Ru(RFtasStringt locMarkert endlocMarkert matchExprR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRe s      cCst|jdS(sp Helper to undo pyparsing's default grouping of And expressions, even if all but one are non-empty. cSs|dS(Ni((Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqJRr(R+Rz(RF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRfEscCsEtjd}t|d|d|jjdS(s Helper to decorate a returned token with its starting and ending locations in the input string. This helper adds the following results names: - locn_start = location where matched expression begins - locn_end = location where matched expression ends - value = the actual parsed results Be careful if the input text contains C{} characters, you may want to call C{L{ParserElement.parseWithTabs}} Example:: wd = Word(alphas) for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): print(match) prints:: [[0, 'ljsdf', 5]] [[8, 'lksdjjf', 15]] [[18, 'lkkjj', 23]] cSs|S(N((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq`Rrt locn_startRtlocn_end(R RzRRR(RFtlocator((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRhLss\[]-*.$+^?()~ RKcCs |ddS(Nii((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqkRrs\\0?[xX][0-9a-fA-F]+cCs tt|djddS(Nis\0xi(tunichrRotlstrip(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqlRrs \\0[0-7]+cCstt|dddS(Niii(RRo(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqmRrR<s\]s\wRzRRtnegatetbodyRcsOdy-djfdtj|jDSWntk rJdSXdS(s Helper to easily define string ranges for use in Word construction. Borrows syntax from regexp '[]' string range definitions:: srange("[0-9]") -> "0123456789" srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" The input string must be enclosed in []'s, and the returned string is the expanded character set joined into a single string. The values enclosed in the []'s may be: - a single character - an escaped character with a leading backslash (such as C{\-} or C{\]}) - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) (C{\0x##} is also supported for backwards compatibility) - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) cSsKt|ts|Sdjdtt|dt|ddDS(NRrcss|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sii(RsR RRtord(tp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrRrc3s|]}|VqdS(N((Rtpart(t _expanded(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sN(Rt_reBracketExprRRRa(R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR]rs  - csfd}|S(st Helper method for defining parse actions that require matching at a specific column in the input text. cs2t||kr.t||dndS(Nsmatched token not at column %d(R7R(R@tlocnRJ(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt verifyCols((RR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRKscs fdS(s Helper method for common parse actions that simply return a literal value. Especially useful when used with C{L{transformString}()}. Example:: num = Word(nums).setParseAction(lambda toks: int(toks[0])) na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) term = na | num OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] csgS(N((RRNRp(treplStr(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRr((R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRZs cCs|ddd!S(s Helper parse action for removing quotation marks from parsed quoted strings. Example:: # by default, quotation marks are included in parsed results quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] # use removeQuotes to strip quotation marks from parsed results quotedString.setParseAction(removeQuotes) quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] iii((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRXs csafd}y"tdtdj}Wntk rSt}nX||_|S(sG Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional args are passed, they are forwarded to the given function as additional arguments after the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the parsed data to an integer using base 16. Example (compare the last to example in L{ParserElement.transformString}:: hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) hex_ints.runTests(''' 00 11 22 aa FF 0a 0d 1a ''') upperword = Word(alphas).setParseAction(tokenMap(str.upper)) OneOrMore(upperword).runTests(''' my kingdom for a horse ''') wd = Word(alphas).setParseAction(tokenMap(str.title)) OneOrMore(wd).setParseAction(' '.join).runTests(''' now is the winter of our discontent made glorious summer by this sun of york ''') prints:: 00 11 22 aa FF 0a 0d 1a [0, 17, 34, 170, 255, 10, 13, 26] my kingdom for a horse ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] now is the winter of our discontent made glorious summer by this sun of york ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] cs g|D]}|^qS(N((RRNRpttokn(RRO(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsRR^(R`RRaRu(RORRRd((RROsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRks    cCst|jS(N(RR,(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcCst|jS(N(Rtlower(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcCs<t|tr+|}t|d| }n |j}tttd}|rtjj t }t d|dt t t|t d|tddtgjdj d t d }nd jd tD}tjj t t|B}t d|dt t t|j ttt d|tddtgjdj d t d }ttd|d }|jdd j|jddjjjd|}|jdd j|jddjjjd|}||_||_||fS(sRInternal helper to construct opening and closing tag expressions, given a tag nameR+s_-:Rttagt=t/RRAcSs|ddkS(NiR((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrR Rrcss!|]}|dkr|VqdS(R N((RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys scSs|ddkS(NiR((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrsRLs(RsRRRR-R2R1R<RRzRXR)R R0RRRRRRTRWR@Rt_LRttitleRRR(ttagStrtxmltresnamet tagAttrNamet tagAttrValuetopenTagtprintablesLessRAbracktcloseTag((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _makeTagss" o{AA  cCs t|tS(s  Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. Example:: text = 'More info at the pyparsing wiki page' # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple a,a_end = makeHTMLTags("A") link_expr = a + SkipTo(a_end)("link_text") + a_end for link in link_expr.searchString(text): # attributes in the tag (like "href" shown here) are also accessible as named results print(link.link_text, '->', link.href) prints:: pyparsing -> http://pyparsing.wikispaces.com (R R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRIscCs t|tS(s Helper to construct opening and closing tag expressions for XML, given a tag name. Matches tags only in the given upper/lower case. Example: similar to L{makeHTMLTags} (R R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRJscsT|r|n |jgD]\}}||f^q#fd}|S(s< Helper to create a validating parse action to be used with start tags created with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag with a required attribute value, to avoid false matches on common tags such as C{} or C{
}. Call C{withAttribute} with a series of attribute names and values. Specify the list of filter attributes names and values as: - keyword arguments, as in C{(align="right")}, or - as an explicit dict with C{**} operator, when an attribute name is also a Python reserved word, as in C{**{"class":"Customer", "align":"right"}} - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) For attribute names with a namespace prefix, you must use the second form. Attribute names are matched insensitive to upper/lower case. If just testing for C{class} (with or without a namespace), use C{L{withClass}}. To verify that the attribute exists, but without specifying a value, pass C{withAttribute.ANY_VALUE} as the value. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this has no type
''' div,div_end = makeHTMLTags("div") # only match div tag having a type attribute with value "grid" div_grid = div().setParseAction(withAttribute(type="grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) # construct a match with any div tag having a type attribute, regardless of the value div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 csx~D]v\}}||kr8t||d|n|tjkr|||krt||d||||fqqWdS(Nsno matching attribute s+attribute '%s' has value '%s', must be '%s'(RRct ANY_VALUE(RRNRtattrNamet attrValue(tattrs(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRRs   (R(RtattrDictRRR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRcs 2  %cCs'|rd|nd}ti||6S(s Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this <div> has no class
''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 s%s:classtclass(Rc(t classnamet namespacet classattr((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRi\s t(RYcCs<t}||||B}xt|D]\}}|d d \}} } } | dkrdd|nd|} | dkr|d kst|dkrtdn|\} }ntj| }| tjkr| dkr t||t |t |}q| dkrx|d k rQt|||t |t ||}qt||t |t |}q| dkrt|| |||t || |||}qtdn+| tj kr| dkr)t |t st |}nt|j|t ||}q| dkr|d k rpt|||t |t ||}qt||t |t |}q| dkrt|| |||t || |||}qtdn td | r |j| n||j| |BK}|}q(W||K}|S( s Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy. Operators may be unary or binary, left- or right-associative. Parse actions can also be attached to operator expressions. The generated parser will also recognize the use of parentheses to override operator precedences (see example below). Note: if you define a deep operator list, you may see performance issues when using infixNotation. See L{ParserElement.enablePackrat} for a mechanism to potentially improve your parser performance. Parameters: - baseExpr - expression representing the most basic element for the nested - opList - list of tuples, one for each operator precedence level in the expression grammar; each tuple is of the form (opExpr, numTerms, rightLeftAssoc, parseAction), where: - opExpr is the pyparsing expression for the operator; may also be a string, which will be converted to a Literal; if numTerms is 3, opExpr is a tuple of two expressions, for the two operators separating the 3 terms - numTerms is the number of terms for this operator (must be 1, 2, or 3) - rightLeftAssoc is the indicator whether the operator is right or left associative, using the pyparsing-defined constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - parseAction is the parse action to be associated with expressions matching this operator expression (the parse action tuple member may be omitted) - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) Example:: # simple example of four-function arithmetic with ints and variable names integer = pyparsing_common.signed_integer varname = pyparsing_common.identifier arith_expr = infixNotation(integer | varname, [ ('-', 1, opAssoc.RIGHT), (oneOf('* /'), 2, opAssoc.LEFT), (oneOf('+ -'), 2, opAssoc.LEFT), ]) arith_expr.runTests(''' 5+3*6 (5+3)*6 -2--11 ''', fullDump=False) prints:: 5+3*6 [[5, '+', [3, '*', 6]]] (5+3)*6 [[[5, '+', 3], '*', 6]] -2--11 [[['-', 2], '-', ['-', 11]]] iis%s terms %s%s termis@if numterms=3, opExpr must be a tuple or list of two expressionsis6operator must be unary (1), binary (2), or ternary (3)s2operator must indicate right or left associativityN(N(R RRRRRRRtLEFTR RRtRIGHTRsRRFRz(tbaseExprtopListtlpartrparR}tlastExprRtoperDeftopExprtaritytrightLeftAssocRttermNametopExpr1topExpr2tthisExprR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRgsR;    '  /'   $  /'     s4"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*t"s string enclosed in double quotess4'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*t's string enclosed in single quotess*quotedString using single or double quotestusunicode string literalcCs!||krtdn|d krt|trt|trt|dkrt|dkr|d k rtt|t||tj ddj d}q|t j t||tj j d}q|d k r9tt|t |t |ttj ddj d}qttt |t |ttj ddj d}qtdnt}|d k r|tt|t||B|Bt|K}n.|tt|t||Bt|K}|jd ||f|S( s~ Helper method for defining nested lists enclosed in opening and closing delimiters ("(" and ")" are the default). Parameters: - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - content - expression for items within the nested lists (default=C{None}) - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) If an expression is not provided for the content argument, the nested expression will capture all whitespace-delimited content between delimiters as a list of separate values. Use the C{ignoreExpr} argument to define expressions that may contain opening or closing characters that should not be treated as opening or closing characters for nesting, such as quotedString or a comment expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. The default is L{quotedString}, but if no expressions are to be ignored, then pass C{None} for this argument. Example:: data_type = oneOf("void int short long char float double") decl_data_type = Combine(data_type + Optional(Word('*'))) ident = Word(alphas+'_', alphanums+'_') number = pyparsing_common.number arg = Group(decl_data_type + ident) LPAR,RPAR = map(Suppress, "()") code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) c_function = (decl_data_type("type") + ident("name") + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + code_body("body")) c_function.ignore(cStyleComment) source_code = ''' int is_odd(int x) { return (x%2); } int dec_to_hex(char hchar) { if (hchar >= '0' && hchar <= '9') { return (ord(hchar)-ord('0')); } else { return (10+ord(hchar)-ord('A')); } } ''' for func in c_function.searchString(source_code): print("%(name)s (%(type)s) args: %(args)s" % func) prints:: is_odd (int) args: [['int', 'x']] dec_to_hex (int) args: [['char', 'hchar']] s.opening and closing strings cannot be the sameiRKcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq9RrcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq<RrcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqBRrcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqFRrsOopening and closing arguments must be strings if no content expression is givensnested %s%s expressionN(RRRsRRRRRR"RfRzRARRR RR)R0R(topenertclosertcontentRR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRNs4:  $  $    5.c s5fd}fd}fd}ttjdj}ttj|jd}tj|jd}tj|jd} |rtt||t|t|t|| } n0tt|t|t|t|} |j t t| jdS( s Helper method for defining space-delimited indentation blocks, such as those used to define block statements in Python source code. Parameters: - blockStatementExpr - expression defining syntax of statement that is repeated within the indented block - indentStack - list created by caller to manage indentation stack (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - indent - boolean indicating whether block must be indented beyond the the current level; set to False for block of left-most statements (default=C{True}) A valid block must contain at least one C{blockStatement}. Example:: data = ''' def A(z): A1 B = 100 G = A2 A2 A3 B def BB(a,b,c): BB1 def BBA(): bba1 bba2 bba3 C D def spam(x,y): def eggs(z): pass ''' indentStack = [1] stmt = Forward() identifier = Word(alphas, alphanums) funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") func_body = indentedBlock(stmt, indentStack) funcDef = Group( funcDecl + func_body ) rvalue = Forward() funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") rvalue << (funcCall | identifier | Word(nums)) assignment = Group(identifier + "=" + rvalue) stmt << ( funcDef | assignment | identifier ) module_body = OneOrMore(stmt) parseTree = module_body.parseString(data) parseTree.pprint() prints:: [['def', 'A', ['(', 'z', ')'], ':', [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], 'B', ['def', 'BB', ['(', 'a', 'b', 'c', ')'], ':', [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], 'C', 'D', ['def', 'spam', ['(', 'x', 'y', ')'], ':', [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] css|t|krdSt||}|dkro|dkrZt||dnt||dndS(Nisillegal nestingsnot a peer entry(RR7RR(RRNRptcurCol(t indentStack(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcheckPeerIndentscsEt||}|dkr/j|nt||ddS(Nisnot a subentry(R7RR(RRNRpR+(R,(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcheckSubIndentscsn|t|krdSt||}oH|dkoH|dks`t||dnjdS(Niisnot an unindent(RR7RR(RRNRpR+(R,(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt checkUnindents &s tINDENTRrtUNINDENTsindented block( RRRRR RzRRRRR( tblockStatementExprR,R$R-R.R/R7R0tPEERtUNDENTtsmExpr((R,sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRdQsN"8 $s#[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]s[\0xa1-\0xbf\0xd7\0xf7]s_:sany tagsgt lt amp nbsp quot aposs><& "'s &(?PRs);scommon HTML entitycCstj|jS(sRHelper parser action to replace common HTML entities with their special characters(t_htmlEntityMapRtentity(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRYss/\*(?:[^*]|\*(?!/))*s*/sC style commentss HTML comments.*s rest of lines//(?:\\\n|[^\n])*s // commentsC++ style comments#.*sPython style comments t commaItemRcBseZdZeeZeeZee j dj eZ ee j dj eedZedj dj eZej edej ej dZejdeeeed jeBj d Zejeed j d j eZed j dj eZeeBeBjZedj dj eZeededj dZedj dZedj dZ e de dj dZ!ee de d8dee de d9j dZ"e"j#ddej d Z$e%e!e$Be"Bj d!j d!Z&ed"j d#Z'e(d$d%Z)e(d&d'Z*ed(j d)Z+ed*j d+Z,ed,j d-Z-e.je/jBZ0e(d.Z1e%e2e3d/e4ee5d0d/ee6d1jj d2Z7e8ee9j:e7Bd3d4j d5Z;e(ed6Z<e(ed7Z=RS(:s Here are some common low-level expressions that may be useful in jump-starting parser development: - numeric forms (L{integers}, L{reals}, L{scientific notation}) - common L{programming identifiers} - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - ISO8601 L{dates} and L{datetime} - L{UUID} - L{comma-separated list} Parse actions: - C{L{convertToInteger}} - C{L{convertToFloat}} - C{L{convertToDate}} - C{L{convertToDatetime}} - C{L{stripHTMLTags}} - C{L{upcaseTokens}} - C{L{downcaseTokens}} Example:: pyparsing_common.number.runTests(''' # any int or real number, returned as the appropriate type 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.fnumber.runTests(''' # any int or real number, returned as float 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.hex_integer.runTests(''' # hex numbers 100 FF ''') pyparsing_common.fraction.runTests(''' # fractions 1/2 -3/4 ''') pyparsing_common.mixed_integer.runTests(''' # mixed fractions 1 1/2 -3/4 1-3/4 ''') import uuid pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) pyparsing_common.uuid.runTests(''' # uuid 12345678-1234-5678-1234-567812345678 ''') prints:: # any int or real number, returned as the appropriate type 100 [100] -100 [-100] +100 [100] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # any int or real number, returned as float 100 [100.0] -100 [-100.0] +100 [100.0] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # hex numbers 100 [256] FF [255] # fractions 1/2 [0.5] -3/4 [-0.75] # mixed fractions 1 [1] 1/2 [0.5] -3/4 [-0.75] 1-3/4 [1.75] # uuid 12345678-1234-5678-1234-567812345678 [UUID('12345678-1234-5678-1234-567812345678')] tintegers hex integeris[+-]?\d+ssigned integerRtfractioncCs|d|dS(Nii((Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrRzs"fraction or mixed integer-fractions [+-]?\d+\.\d*s real numbers+[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)s$real number with scientific notations[+-]?\d+\.?\d*([eE][+-]?\d+)?tfnumberRt identifiersK(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}s IPv4 addresss[0-9a-fA-F]{1,4}t hex_integerRisfull IPv6 addressiis::sshort IPv6 addresscCstd|DdkS(Ncss'|]}tjj|rdVqdS(iN(Rlt _ipv6_partR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys si(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrs::ffff:smixed IPv6 addresss IPv6 addresss:[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}s MAC addresss%Y-%m-%dcsfd}|S(s Helper to create a parse action for converting parsed date string to Python datetime.date Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) Example:: date_expr = pyparsing_common.iso8601_date.copy() date_expr.setParseAction(pyparsing_common.convertToDate()) print(date_expr.parseString("1999-12-31")) prints:: [datetime.date(1999, 12, 31)] csPytj|djSWn+tk rK}t||t|nXdS(Ni(RtstrptimetdateRRRu(RRNRptve(tfmt(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcvt_fns((RBRC((RBsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt convertToDatess%Y-%m-%dT%H:%M:%S.%fcsfd}|S(s Helper to create a parse action for converting parsed datetime string to Python datetime.datetime Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) prints:: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] csJytj|dSWn+tk rE}t||t|nXdS(Ni(RR?RRRu(RRNRpRA(RB(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRCs((RBRC((RBsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytconvertToDatetimess7(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?s ISO8601 dates(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?sISO8601 datetimes2[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}tUUIDcCstjj|dS(s Parse action to remove HTML tags from web page HTML source Example:: # strip HTML links from normal text text = 'More info at the
pyparsing wiki page' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' i(Rlt_html_stripperR{(RRNR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt stripHTMLTagss RR<s R8RRrscomma separated listcCst|jS(N(RR,(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcCst|jS(N(RR(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRr(ii(ii(>RRRRkRotconvertToIntegertfloattconvertToFloatR-RPRRzR9RBR=R%tsigned_integerR:RRRt mixed_integerRtrealtsci_realRtnumberR;R2R1R<t ipv4_addressR>t_full_ipv6_addresst_short_ipv6_addressRt_mixed_ipv6_addressRt ipv6_addresst mac_addressR#RDREt iso8601_datetiso8601_datetimetuuidR5R4RGRHRRRRTR,t _commasepitemR>RWRtcomma_separated_listRbR@(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRlsL  '/-  ;&J+t__main__tselecttfroms_$RRtcolumnsRttablestcommandsK # '*' as column list and dotted table name select * from SYS.XYZZY # caseless match on "SELECT", and casts back to "select" SELECT * from XYZZY, ABC # list of column names, and mixed case SELECT keyword Select AA,BB,CC from Sys.dual # multiple tables Select A, B, C from Sys.dual, Table2 # invalid SELECT keyword - should fail Xelect A, B, C from Sys.dual # incomplete command - should fail Select # invalid column name - should fail Select ^^^ frox Sys.dual s] 100 -100 +100 3.14159 6.02e23 1e-12 s 100 FF s6 12345678-1234-5678-1234-567812345678 (Rt __version__t__versionTime__t __author__RtweakrefRRRRxRR|RSRR8RRRRt_threadRt ImportErrort threadingRRt ordereddictRt__all__Rt version_infoRQRtmaxsizeR$RuRtchrRRRRR2treversedRRR4RxRIRJR_tmaxinttxrangeRt __builtin__RtfnameRR`RRRRRRtascii_uppercasetascii_lowercaseR2RPRBR1RRt printableRTRaRRRR!R$RR tMutableMappingtregisterR7RHRERGRKRMROReR"R*R RRRRiRRRRjR-R%R#RR,RpRRRR(R'R/R.RRRRR RR RRRR0RRRR&R RR+RRR R)RR`RR>R:RRMRLRERRQR?ReRfRhRRARGRFR_R^Rzt _escapedPunct_escapedHexChart_escapedOctChartUNICODEt _singleChart _charRangeRRR]RKRZRXRkRbR@R RIRJRcR RiRRRRRgRSR<R\RWRaRNRdR3RUR5R4RRR6RR9RYR6RCRR[R=R;RDRVRRZR8RlRt selectTokent fromTokentidentt columnNametcolumnNameListt columnSpect tableNamet tableNameListt simpleSQLR"RPR;R=RYRF(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt=s              *         8      @ & A=IG3pLOD|M &# @sQ,A ,    I # %  !4@    ,   ?  #   k%Z r  (, #8+    $     PK ZC&fWfW.site-packages/pkg_resources/_vendor/appdirs.pynu[#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2005-2010 ActiveState Software Inc. # Copyright (c) 2013 Eddy Petrișor """Utilities for determining application-specific dirs. See for details and usage. """ # Dev Notes: # - MSDN on where to store app data files: # http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html __version_info__ = (1, 4, 0) __version__ = '.'.join(map(str, __version_info__)) import sys import os PY3 = sys.version_info[0] == 3 if PY3: unicode = str if sys.platform.startswith('java'): import platform os_name = platform.java_ver()[3][0] if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. system = 'win32' elif os_name.startswith('Mac'): # "Mac OS X", etc. system = 'darwin' else: # "Linux", "SunOS", "FreeBSD", etc. # Setting this to "linux2" is not ideal, but only Windows or Mac # are actually checked for and the rest of the module expects # *sys.platform* style strings. system = 'linux2' else: system = sys.platform def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\Application Data\\ Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ Win 7 (not roaming): C:\Users\\AppData\Local\\ Win 7 (roaming): C:\Users\\AppData\Roaming\\ For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". """ if system == "win32": if appauthor is None: appauthor = appname const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" path = os.path.normpath(_get_win_folder(const)) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('~/Library/Application Support/') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): """Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/', if XDG_DATA_DIRS is not set Typical user data directories are: Mac OS X: /Library/Application Support/ Unix: /usr/local/share/ or /usr/share/ Win XP: C:\Documents and Settings\All Users\Application Data\\ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('/Library/Application Support') if appname: path = os.path.join(path, appname) else: # XDG default for $XDG_DATA_DIRS # only first, if multipath is False path = os.getenv('XDG_DATA_DIRS', os.pathsep.join(['/usr/local/share', '/usr/share'])) pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path if appname and version: path = os.path.join(path, version) return path def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: Mac OS X: same as user_data_dir Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/". """ if system in ["win32", "darwin"]: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): """Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set Typical user data directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system in ["win32", "darwin"]: path = site_data_dir(appname, appauthor) if appname and version: path = os.path.join(path, version) else: # XDG default for $XDG_CONFIG_DIRS # only first, if multipath is False path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache Vista: C:\Users\\AppData\Local\\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) if opinion: path = os.path.join(path, "Cache") elif system == 'darwin': path = os.path.expanduser('~/Library/Caches') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Logs/ Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs Vista: C:\Users\\AppData\Local\\\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. """ if system == "darwin": path = os.path.join( os.path.expanduser('~/Library/Logs'), appname) elif system == "win32": path = user_data_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "Logs") else: path = user_cache_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "log") if appname and version: path = os.path.join(path, version) return path class AppDirs(object): """Convenience wrapper for getting application dirs.""" def __init__(self, appname, appauthor=None, version=None, roaming=False, multipath=False): self.appname = appname self.appauthor = appauthor self.version = version self.roaming = roaming self.multipath = multipath @property def user_data_dir(self): return user_data_dir(self.appname, self.appauthor, version=self.version, roaming=self.roaming) @property def site_data_dir(self): return site_data_dir(self.appname, self.appauthor, version=self.version, multipath=self.multipath) @property def user_config_dir(self): return user_config_dir(self.appname, self.appauthor, version=self.version, roaming=self.roaming) @property def site_config_dir(self): return site_config_dir(self.appname, self.appauthor, version=self.version, multipath=self.multipath) @property def user_cache_dir(self): return user_cache_dir(self.appname, self.appauthor, version=self.version) @property def user_log_dir(self): return user_log_dir(self.appname, self.appauthor, version=self.version) #---- internal support stuff def _get_win_folder_from_registry(csidl_name): """This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. """ import _winreg shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", }[csidl_name] key = _winreg.OpenKey( _winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" ) dir, type = _winreg.QueryValueEx(key, shell_folder_name) return dir def _get_win_folder_with_pywin32(csidl_name): from win32com.shell import shellcon, shell dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) # Try to make this a unicode path because SHGetFolderPath does # not return unicode strings when there is unicode data in the # path. try: dir = unicode(dir) # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in dir: if ord(c) > 255: has_high_char = True break if has_high_char: try: import win32api dir = win32api.GetShortPathName(dir) except ImportError: pass except UnicodeError: pass return dir def _get_win_folder_with_ctypes(csidl_name): import ctypes csidl_const = { "CSIDL_APPDATA": 26, "CSIDL_COMMON_APPDATA": 35, "CSIDL_LOCAL_APPDATA": 28, }[csidl_name] buf = ctypes.create_unicode_buffer(1024) ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in buf: if ord(c) > 255: has_high_char = True break if has_high_char: buf2 = ctypes.create_unicode_buffer(1024) if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): buf = buf2 return buf.value def _get_win_folder_with_jna(csidl_name): import array from com.sun import jna from com.sun.jna.platform import win32 buf_size = win32.WinDef.MAX_PATH * 2 buf = array.zeros('c', buf_size) shell = win32.Shell32.INSTANCE shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) dir = jna.Native.toString(buf.tostring()).rstrip("\0") # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in dir: if ord(c) > 255: has_high_char = True break if has_high_char: buf = array.zeros('c', buf_size) kernel = win32.Kernel32.INSTANCE if kernal.GetShortPathName(dir, buf, buf_size): dir = jna.Native.toString(buf.tostring()).rstrip("\0") return dir if system == "win32": try: import win32com.shell _get_win_folder = _get_win_folder_with_pywin32 except ImportError: try: from ctypes import windll _get_win_folder = _get_win_folder_with_ctypes except ImportError: try: import com.sun.jna _get_win_folder = _get_win_folder_with_jna except ImportError: _get_win_folder = _get_win_folder_from_registry #---- self test code if __name__ == "__main__": appname = "MyApp" appauthor = "MyCompany" props = ("user_data_dir", "site_data_dir", "user_config_dir", "site_config_dir", "user_cache_dir", "user_log_dir") print("-- app dirs (with optional 'version')") dirs = AppDirs(appname, appauthor, version="1.0") for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (without optional 'version')") dirs = AppDirs(appname, appauthor) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (without optional 'appauthor')") dirs = AppDirs(appname) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (with disabled 'appauthor')") dirs = AppDirs(appname, appauthor=False) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) PK Z5B\|\|+site-packages/pkg_resources/_vendor/six.pycnu[ fcA@@sKdZddlmZddlZddlZddlZddlZddlZdZdZ ej ddkZ ej ddkZ ej dd!dakZ e refZefZefZeZeZejZnefZeefZeejfZeZeZejjd r$edcZnVd efd YZ ye!e Wne"k rjedeZn XedgZ[ dZ#dZ$defdYZ%de%fdYZ&dej'fdYZ(de%fdYZ)defdYZ*e*e+Z,de(fdYZ-e)dddde)d d!d"d#d e)d$d!d!d%d$e)d&d'd"d(d&e)d)d'd*e)d+d!d"d,d+e)d-d.d.d/d-e)d0d.d.d-d0e)d1d'd"d2d1e)d3d'e rd4nd5d6e)d7d'd8e)d9d:d;d<e)ddde)d=d=d>e)d?d?d>e)d@d@d>e)d2d'd"d2d1e)dAd!d"dBdAe)dCd!d!dDdCe&d"d'e&dEdFe&dGdHe&dIdJdKe&dLdMdLe&dNdOdPe&dQdRdSe&dTdUdVe&dWdXdYe&dZd[d\e&d]d^d_e&d`dadbe&dcdddee&dfdgdhe&dididje&dkdkdje&dldldje&dmdmdne&dodpe&dqdre&dsdte&dudvdue&dwdxe&dydzd{e&d|d}d~e&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddd~e&ddde&ddde&ddde&de+dde&de+dde&de+de+de&ddde&ddde&dddg>Z.ejdkr;e.e&ddg7Z.nxJe.D]BZ/e0e-e/j1e/e2e/e&rBe,j3e/de/j1qBqBW[/e.e-_.e-e+dZ4e,j3e4dde(fdYZ5e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)d<dde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddgZ6x!e6D]Z/e0e5e/j1e/q0W[/e6e5_.e,j3e5e+dddde(fdYZ7e)ddde)ddde)dddgZ8x!e8D]Z/e0e7e/j1e/qW[/e8e7_.e,j3e7e+dddde(fdYZ9e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddg!Z:x!e:D]Z/e0e9e/j1e/q W[/e:e9_.e,j3e9e+dddde(fdYZ;e)ddde)ddde)ddde)dddgZ<x!e<D]Z/e0e;e/j1e/q W[/e<e;_.e,j3e;e+d d d d e(fd YZ=e)dddgZ>x!e>D]Z/e0e=e/j1e/q; W[/e>e=_.e,j3e=e+ddddej'fdYZ?e,j3e?e+dddZ@dZAe r dZBdZCdZDdZEdZFdZGn$dZBdZCdZDd ZEd!ZFd"ZGy eHZIWneJk r= d#ZInXeIZHy eKZKWneJk rj d$ZKnXe r d%ZLejMZNd&ZOeZPn7d'ZLd(ZNd)ZOd*efd+YZPeKZKe#eLd,ejQeBZRejQeCZSejQeDZTejQeEZUejQeFZVejQeGZWe rd-ZXd.ZYd/ZZd0Z[ej\d1Z]ej\d2Z^ej\d3Z_nQd4ZXd5ZYd6ZZd7Z[ej\d8Z]ej\d9Z^ej\d:Z_e#eXd;e#eYd<e#eZd=e#e[d>e rd?Z`d@ZaebZcddldZdedjedAjfZg[dejhdZiejjZkelZmddlnZnenjoZoenjpZpdBZqej d d krdCZrdDZsq4dEZrdFZsnpdGZ`dHZaecZcebZgdIZidJZkejtejuevZmddloZoeojoZoZpdKZqdCZrdDZse#e`dLe#eadMdNZwdOZxdPZye reze4j{dQZ|ddRZ~ndddSZ|e|dTej d dhkre|dUn)ej d dikre|dVn dWZeze4j{dXdZedkrdYZnej d djkrDeZdZZne#e~d[ej dd!dkkrejejd\Zn ejZd]Zd^Zd_ZgZe+Zejd`dk rge_nejr7xOeejD]>\ZZeej+dkrej1e+kreje=PqqW[[nejje,dS(ls6Utilities for writing code that runs on Python 2 and 3i(tabsolute_importNs'Benjamin Peterson s1.10.0iiitjavaiitXcB@seZdZRS(cC@sdS(NiiI((tself((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__len__>s(t__name__t __module__R(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR<si?cC@s ||_dS(s Add documentation to a function.N(t__doc__(tfunctdoc((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt_add_docKscC@st|tj|S(s7Import module, returning the module after the last dot.(t __import__tsystmodules(tname((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt_import_modulePs t _LazyDescrcB@seZdZdZRS(cC@s ||_dS(N(R(RR((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__init__XscC@sN|j}t||j|yt|j|jWntk rInX|S(N(t_resolvetsetattrRtdelattrt __class__tAttributeError(Rtobjttptresult((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__get__[s  (RRRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRVs t MovedModulecB@s&eZddZdZdZRS(cC@sJtt|j|tr=|dkr1|}n||_n ||_dS(N(tsuperRRtPY3tNonetmod(RRtoldtnew((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRis    cC@s t|jS(N(RR(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRrscC@s/|j}t||}t||||S(N(RtgetattrR(Rtattrt_moduletvalue((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt __getattr__us N(RRRRRR&(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRgs t _LazyModulecB@s eZdZdZgZRS(cC@s)tt|j||jj|_dS(N(RR'RRR(RR((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR~scC@s3ddg}|g|jD]}|j^q7}|S(NRR(t_moved_attributesR(RtattrsR#((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__dir__s #(RRRR*R((((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR'|s  tMovedAttributecB@s eZdddZdZRS(cC@stt|j|trp|dkr1|}n||_|dkrd|dkr[|}qd|}n||_n'||_|dkr|}n||_dS(N(RR+RRRRR#(RRtold_modtnew_modtold_attrtnew_attr((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRs           cC@st|j}t||jS(N(RRR"R#(Rtmodule((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRsN(RRRRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR+st_SixMetaPathImportercB@s_eZdZdZdZdZd dZdZdZ dZ dZ e Z RS( s A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 cC@s||_i|_dS(N(Rt known_modules(Rtsix_module_name((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRs cG@s-x&|D]}||j|jd|(RR6((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt is_packagescC@s|j|dS(s;Return None Required, if is_package is implementedN(R>R(RR6((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytget_codes N( RRRRR7R8RR:R>RARDREt get_source(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR1s       t _MovedItemscB@seZdZgZRS(sLazy loading of moved objects(RRRRB(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRGst cStringIOtiotStringIOtfiltert itertoolstbuiltinstifiltert filterfalset ifilterfalsetinputt __builtin__t raw_inputtinternR tmaptimaptgetcwdtostgetcwdutgetcwdbtrangetxranget reload_modulet importlibtimptreloadtreducet functoolst shlex_quotetpipestshlextquotetUserDictt collectionstUserListt UserStringtziptizipt zip_longestt izip_longestt configparsert ConfigParsertcopyregtcopy_regtdbm_gnutgdbmsdbm.gnut _dummy_threadt dummy_threadthttp_cookiejart cookielibshttp.cookiejart http_cookiestCookies http.cookiest html_entitiesthtmlentitydefss html.entitiest html_parsert HTMLParsers html.parsert http_clientthttplibs http.clienttemail_mime_multipartsemail.MIMEMultipartsemail.mime.multiparttemail_mime_nonmultipartsemail.MIMENonMultipartsemail.mime.nonmultiparttemail_mime_textsemail.MIMETextsemail.mime.texttemail_mime_basesemail.MIMEBasesemail.mime.basetBaseHTTPServers http.servert CGIHTTPServertSimpleHTTPServertcPickletpickletqueuetQueuetreprlibtreprt socketservert SocketServert_threadtthreadttkintertTkinterttkinter_dialogtDialogstkinter.dialogttkinter_filedialogt FileDialogstkinter.filedialogttkinter_scrolledtextt ScrolledTextstkinter.scrolledtextttkinter_simpledialogt SimpleDialogstkinter.simpledialogt tkinter_tixtTixs tkinter.tixt tkinter_ttktttks tkinter.ttkttkinter_constantst Tkconstantsstkinter.constantst tkinter_dndtTkdnds tkinter.dndttkinter_colorchooserttkColorChooserstkinter.colorchooserttkinter_commondialogttkCommonDialogstkinter.commondialogttkinter_tkfiledialogt tkFileDialogt tkinter_fontttkFonts tkinter.fontttkinter_messageboxt tkMessageBoxstkinter.messageboxttkinter_tksimpledialogttkSimpleDialogt urllib_parses.moves.urllib_parses urllib.parset urllib_errors.moves.urllib_errors urllib.errorturllibs .moves.urllibturllib_robotparsert robotparsersurllib.robotparsert xmlrpc_clientt xmlrpclibs xmlrpc.clientt xmlrpc_servertSimpleXMLRPCServers xmlrpc.servertwin32twinregt_winregsmoves.s.movestmovestModule_six_moves_urllib_parsecB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_parse(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR@st ParseResultturlparset SplitResulttparse_qst parse_qslt urldefragturljointurlsplitt urlunparset urlunsplitt quote_plustunquotet unquote_plust urlencodet splitquerytsplittagt splitusert uses_fragmentt uses_netloct uses_paramst uses_queryt uses_relativesmoves.urllib_parsesmoves.urllib.parsetModule_six_moves_urllib_errorcB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_error(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRhstURLErrorturllib2t HTTPErrortContentTooShortErrors.moves.urllib.errorsmoves.urllib_errorsmoves.urllib.errortModule_six_moves_urllib_requestcB@seZdZRS(s9Lazy loading of moved objects in six.moves.urllib_request(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR|sturlopensurllib.requesttinstall_openert build_openert pathname2urlt url2pathnamet getproxiestRequesttOpenerDirectortHTTPDefaultErrorHandlertHTTPRedirectHandlertHTTPCookieProcessort ProxyHandlert BaseHandlertHTTPPasswordMgrtHTTPPasswordMgrWithDefaultRealmtAbstractBasicAuthHandlertHTTPBasicAuthHandlertProxyBasicAuthHandlertAbstractDigestAuthHandlertHTTPDigestAuthHandlertProxyDigestAuthHandlert HTTPHandlert HTTPSHandlert FileHandlert FTPHandlertCacheFTPHandlertUnknownHandlertHTTPErrorProcessort urlretrievet urlcleanupt URLopenertFancyURLopenert proxy_bypasss.moves.urllib.requestsmoves.urllib_requestsmoves.urllib.requestt Module_six_moves_urllib_responsecB@seZdZRS(s:Lazy loading of moved objects in six.moves.urllib_response(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRstaddbasesurllib.responset addclosehooktaddinfot addinfourls.moves.urllib.responsesmoves.urllib_responsesmoves.urllib.responset#Module_six_moves_urllib_robotparsercB@seZdZRS(s=Lazy loading of moved objects in six.moves.urllib_robotparser(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRstRobotFileParsers.moves.urllib.robotparsersmoves.urllib_robotparsersmoves.urllib.robotparsertModule_six_moves_urllibcB@sheZdZgZejdZejdZejdZejdZ ejdZ dZ RS(sICreate a six.moves.urllib namespace that resembles the Python 3 namespacesmoves.urllib_parsesmoves.urllib_errorsmoves.urllib_requestsmoves.urllib_responsesmoves.urllib_robotparsercC@sdddddgS(NtparseterrortrequesttresponseR((R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR*s( RRRRBt _importerR8RRRRRR*(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRss moves.urllibcC@stt|j|dS(sAdd an item to six.moves.N(RRGR(tmove((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytadd_movescC@s^ytt|WnFtk rYytj|=WqZtk rUtd|fqZXnXdS(sRemove item from six.moves.sno such move, %rN(RRGRRt__dict__R;(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt remove_moves  t__func__t__self__t __closure__t__code__t __defaults__t __globals__tim_functim_selft func_closuret func_codet func_defaultst func_globalscC@s |jS(N(tnext(tit((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytadvance_iterator scC@stdt|jDS(Ncs@s|]}d|jkVqdS(t__call__N(R (t.0tklass((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pys s(tanyttypet__mro__(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytcallablescC@s|S(N((tunbound((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytget_unbound_functionscC@s|S(N((Rtcls((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytcreate_unbound_methodscC@s|jS(N(R(R"((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR#"scC@stj|||jS(N(ttypest MethodTypeR(RR((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytcreate_bound_method%scC@stj|d|S(N(R&R'R(RR$((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR%(stIteratorcB@seZdZRS(cC@st|j|S(N(Rt__next__(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR-s(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR)+ss3Get the function out of a possibly unbound functioncK@st|j|S(N(titertkeys(tdtkw((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytiterkeys>scK@st|j|S(N(R+tvalues(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt itervaluesAscK@st|j|S(N(R+titems(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt iteritemsDscK@st|j|S(N(R+tlists(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt iterlistsGsR,R0R2cK@s |j|S(N(R/(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR/PscK@s |j|S(N(R1(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR1SscK@s |j|S(N(R3(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR3VscK@s |j|S(N(R5(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR5Ystviewkeyst viewvaluest viewitemss1Return an iterator over the keys of a dictionary.s3Return an iterator over the values of a dictionary.s?Return an iterator over the (key, value) pairs of a dictionary.sBReturn an iterator over the (key, [values]) pairs of a dictionary.cC@s |jdS(Nslatin-1(tencode(ts((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytbkscC@s|S(N((R:((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytunss>BtassertCountEqualtassertRaisesRegexptassertRegexpMatchestassertRaisesRegext assertRegexcC@s|S(N((R:((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR;scC@st|jdddS(Ns\\s\\\\tunicode_escape(tunicodetreplace(R:((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR<scC@st|dS(Ni(tord(tbs((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytbyte2intscC@st||S(N(RE(tbufti((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt indexbytesstassertItemsEquals Byte literals Text literalcO@st|t||S(N(R"t_assertCountEqual(Rtargstkwargs((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR=scO@st|t||S(N(R"t_assertRaisesRegex(RRMRN((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR@scO@st|t||S(N(R"t _assertRegex(RRMRN((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRAstexeccC@sC|dkr|}n|j|k r9|j|n|dS(N(Rt __traceback__twith_traceback(RR%ttb((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytreraises   cB@sc|dkrBejd}|j}|dkr<|j}n~n|dkrW|}nddUdS(sExecute code in a namespace.isexec _code_ in _globs_, _locs_N(RR t _getframet f_globalstf_locals(t_code_t_globs_t_locs_tframe((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytexec_s      s9def reraise(tp, value, tb=None): raise tp, value, tb srdef raise_from(value, from_value): if from_value is None: raise value raise value from from_value sCdef raise_from(value, from_value): raise value from from_value cC@s |dS(N((R%t from_value((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt raise_fromstprintc @s|jdtjdkr%dSfd}t}|jdd}|dk rt|trpt}qt|tst dqn|jdd}|dk rt|trt}qt|tst dqn|rt dn|s0x*|D]}t|tr t}Pq q Wn|rQtd }td }n d }d }|dkrr|}n|dkr|}nx7t |D])\} }| r||n||qW||dS( s4The new-style print function for Python 2.4 and 2.5.tfileNc@st|tst|}nttrt|trjdk rtdd}|dkrrd}n|jj|}nj |dS(Nterrorststrict( R?t basestringtstrRaRCtencodingRR"R9twrite(tdataRb(tfp(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRgs  tsepssep must be None or a stringtendsend must be None or a strings$invalid keyword arguments to print()s t ( tpopR tstdoutRtFalseR?RCtTrueRet TypeErrort enumerate( RMRNRgt want_unicodeRjRktargtnewlinetspaceRI((Ris=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytprint_sL              cO@sW|jdtj}|jdt}t|||rS|dk rS|jndS(NRatflush(tgetR RnRmRot_printRRx(RMRNRiRx((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRw s  sReraise an exception.c@sfd}|S(Nc@s(tj|}|_|S(N(Rbtwrapst __wrapped__(tf(tassignedtupdatedtwrapped(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytwrappers ((RR~RR((R~RRs=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR{sc@s5dffdY}tj|ddiS(s%Create a base class with a metaclass.t metaclassc@seZfdZRS(c@s||S(N((R$Rt this_basesR-(tbasestmeta(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__new__'s(RRR((RR(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR%sttemporary_class((RR(RRR((RRs=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytwith_metaclass sc@sfd}|S(s6Class decorator for creating a class with a metaclass.c@s|jj}|jd}|dk rft|trE|g}nx|D]}|j|qLWn|jdd|jdd|j|j|S(Nt __slots__R t __weakref__( R tcopyRyRR?ReRmRt __bases__(R$t orig_varstslotst slots_var(R(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR.s   ((RR((Rs=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt add_metaclass,s cC@sJtrFd|jkr+td|jn|j|_d|_n|S(s A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. t__str__sY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cS@s|jjdS(Nsutf-8(t __unicode__R9(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytJt(tPY2R t ValueErrorRRR(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytpython_2_unicode_compatible<s t__spec__(iiIiIill(ii(ii(ii(ii(Rt __future__RRbRLtoperatorR R&t __author__t __version__t version_infoRRtPY34Ret string_typestintt integer_typesRt class_typest text_typetbytest binary_typetmaxsizetMAXSIZERdtlongt ClassTypeRCtplatformt startswithtobjectRtlent OverflowErrorR RRRt ModuleTypeR'R+R1RRRGR(R#RRR?R7RRt_urllib_parse_moved_attributesRt_urllib_error_moved_attributesRt _urllib_request_moved_attributesRt!_urllib_response_moved_attributesRt$_urllib_robotparser_moved_attributesRR R t _meth_funct _meth_selft _func_closuret _func_codet_func_defaultst _func_globalsRRt NameErrorR!R#R'R(R%R)t attrgettertget_method_functiontget_method_selftget_function_closuretget_function_codetget_function_defaultstget_function_globalsR/R1R3R5t methodcallerR6R7R8R;R<tchrtunichrtstructtStructtpacktint2bytet itemgetterRGtgetitemRJR+t iterbytesRIRJtBytesIORLRORPtpartialRVRER=R@RAR"RMR]RRUR_RwRztWRAPPER_ASSIGNMENTStWRAPPER_UPDATESR{RRRRBt __package__tglobalsRyRtsubmodule_search_locationst meta_pathRrRItimportertappend(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyts               >                                                                                 5         PK ZD\M0site-packages/pkg_resources/_vendor/__init__.pycnu[ fc@sdS(N((((sB/usr/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.pyttPK ZXMZuu*site-packages/pkg_resources/_vendor/six.pynu["""Utilities for writing code that runs on Python 2 and 3""" # Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import import functools import itertools import operator import sys import types __author__ = "Benjamin Peterson " __version__ = "1.10.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): if from_value is None: raise value raise value from from_value """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): raise value from from_value """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer) PK ZD\M0site-packages/pkg_resources/_vendor/__init__.pyonu[ fc@sdS(N((((sB/usr/lib/python2.7/site-packages/pkg_resources/_vendor/__init__.pyttPK Z] dQdQ/site-packages/pkg_resources/_vendor/appdirs.pyonu[ fc@s@dZd,ZdjeeeZddlZddlZejddkZ e r^eZ nej j drddl Z e j ddZej d rd Zqej d rd Zqd Zn ej ZdddedZdddedZdddedZdddedZdddedZdddedZdefdYZdZdZdZdZed kr!yddlZ eZ!Wq!e"k ryddl#m$Z$eZ!Wqe"k ryddl%Z&eZ!Wqe"k reZ!qXqXq!Xne'dkr<dZ(dZ)d-Z*d$GHee(e)d%d&Z+x&e*D]Z,d'e,e-e+e,fGHq`Wd(GHee(e)Z+x&e*D]Z,d'e,e-e+e,fGHqWd)GHee(Z+x&e*D]Z,d'e,e-e+e,fGHqWd*GHee(d+eZ+x)e*D]Z,d'e,e-e+e,fGHqWndS(.syUtilities for determining application-specific dirs. See for details and usage. iiit.iNitjavatWindowstwin32tMactdarwintlinux2cCs6tdkr|dkr!|}n|r-dp0d}tjjt|}|r|tk rxtjj|||}qtjj||}qn{tdkrtjjd}|rtjj||}qn<tj dtjjd}|rtjj||}n|r2|r2tjj||}n|S( sJReturn full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\Application Data\\ Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ Win 7 (not roaming): C:\Users\\AppData\Local\\ Win 7 (roaming): C:\Users\\AppData\Roaming\\ For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". Rt CSIDL_APPDATAtCSIDL_LOCAL_APPDATARs~/Library/Application Support/t XDG_DATA_HOMEs~/.local/shareN( tsystemtNonetostpathtnormpatht_get_win_foldertFalsetjoint expandusertgetenv(tappnamet appauthortversiontroamingtconstR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt user_data_dir-s&      cCstdkr|d kr!|}ntjjtd}|r|tk rftjj|||}q~tjj||}qntdkrtjjd}|rtjj||}qntj dtj jddg}g|j tj D]$}tjj|j tj ^q}|rs|rEtjj||}ng|D]}tj j||g^qL}n|rtj j|}n |d}|S|r|rtjj||}n|S( siReturn full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/', if XDG_DATA_DIRS is not set Typical user data directories are: Mac OS X: /Library/Application Support/ Unix: /usr/local/share/ or /usr/share/ Win XP: C:\Documents and Settings\All Users\Application Data\\ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. RtCSIDL_COMMON_APPDATARs/Library/Application Supportt XDG_DATA_DIRSs/usr/local/shares /usr/shareiN(R R R R RRRRRRtpathseptsplittrstriptsep(RRRt multipathR txtpathlist((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt site_data_dirds4      =.  cCstdkr$t||d|}n<tjdtjjd}|r`tjj||}n|r|rtjj||}n|S(sReturn full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: Mac OS X: same as user_data_dir Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/". RRtXDG_CONFIG_HOMEs ~/.config(RRN(R RR R RR RR(RRRRR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pytuser_config_dirs  cCs tdkrBt||}|r|rtjj||}qntjdd}g|jtjD]$}tjj|j tj ^qg}|r|rtjj||}ng|D]}tj j||g^q}n|rtjj|}n |d}|S(sReturn full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set Typical user data directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. RRtXDG_CONFIG_DIRSs/etc/xdgi(RR( R R#R R RRRRRRR(RRRR R R!R"((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pytsite_config_dirs  =. cCsBtdkr|dkr!|}ntjjtd}|r|tk rftjj|||}ntjj||}|rtjj|d}qqn{tdkrtjjd}|rtjj||}qn<tj dtjjd}|rtjj||}n|r>|r>tjj||}n|S( sReturn full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache Vista: C:\Users\\AppData\Local\\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. RRtCacheRs~/Library/CachestXDG_CACHE_HOMEs~/.cacheN( R R R R RRRRRR(RRRtopinionR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pytuser_cache_dirs(!      cCstdkr0tjjtjjd|}n{tdkrut|||}t}|rtjj|d}qn6t|||}t}|rtjj|d}n|r|rtjj||}n|S(sReturn full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Logs/ Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs Vista: C:\Users\\AppData\Local\\\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. Rs~/Library/LogsRtLogstlog(R R R RRRRR+(RRRR*R ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt user_log_dir:s     tAppDirscBs}eZdZddeedZedZedZedZ edZ edZ edZ RS( s1Convenience wrapper for getting application dirs.cCs1||_||_||_||_||_dS(N(RRRRR (tselfRRRRR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt__init__os     cCs%t|j|jd|jd|jS(NRR(RRRRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyRwscCs%t|j|jd|jd|jS(NRR (R#RRRR (R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR#|scCs%t|j|jd|jd|jS(NRR(R%RRRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR%scCs%t|j|jd|jd|jS(NRR (R'RRRR (R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR'scCst|j|jd|jS(NR(R+RRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR+scCst|j|jd|jS(NR(R.RRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR.sN( t__name__t __module__t__doc__R RR1tpropertyRR#R%R'R+R.(((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR/ms  cCs\ddl}idd6dd6dd6|}|j|jd }|j||\}}|S( sThis is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. iNtAppDataRsCommon AppDataRs Local AppDataRs@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders(t_winregtOpenKeytHKEY_CURRENT_USERt QueryValueEx(t csidl_nameR7tshell_folder_nametkeytdirttype((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_from_registrys  cCsddlm}m}|jdt||dd}yt|}t}x*|D]"}t|dkrSt}PqSqSW|ryddl }|j |}Wqt k rqXnWnt k rnX|S(Ni(tshellcontshellii( twin32com.shellRARBtSHGetFolderPathtgetattrtunicodeRtordtTruetwin32apitGetShortPathNamet ImportErrort UnicodeError(R;RARBR>t has_high_chartcRI((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_with_pywin32s$!      cCsddl}idd6dd6dd6|}|jd}|jjjd|dd |t}x*|D]"}t|d krft}PqfqfW|r|jd}|jj j |j |dr|}qn|j S( NiiRi#RiRiii( tctypestcreate_unicode_buffertwindlltshell32tSHGetFolderPathWR RRGRHtkernel32tGetShortPathNameWtvalue(R;RPt csidl_consttbufRMRNtbuf2((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_with_ctypess$   c Cs=ddl}ddlm}ddlm}|jjd}|jd|}|jj }|j dt |j |d|j j||jj|jjd}t}x*|D]"} t| dkrt}PqqW|r9|jd|}|jj } tj|||r9|jj|jjd}q9n|S(Ni(tjna(RiRNsi(tarraytcom.sunR\tcom.sun.jna.platformRtWinDeftMAX_PATHtzerostShell32tINSTANCERDR REtShlObjtSHGFP_TYPE_CURRENTtNativettoStringttostringRRRGRHtKernel32tkernalRJ( R;R]R\Rtbuf_sizeRYRBR>RMRNtkernel((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_with_jnas&  +!  '(RRt__main__tMyAppt MyCompanyRR#R%R'R+R.s%-- app dirs (with optional 'version')Rs1.0s%s: %ss) -- app dirs (without optional 'version')s+ -- app dirs (without optional 'appauthor')s( -- app dirs (with disabled 'appauthor')R(iii(RR#R%R'R+R.(.R4t__version_info__Rtmaptstrt __version__tsysR t version_infotPY3RFtplatformt startswithtjava_vertos_nameR R RRR#R%R'RHR+R.tobjectR/R@ROR[RnRCtwin32comRRKRPRRt com.sun.jnatcomR2RRtpropstdirstpropRE(((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt s~        7B(393+                   PK Z/site-packages/pkg_resources/_vendor/__init__.pynu[PK Z5B\|\|+site-packages/pkg_resources/_vendor/six.pyonu[ fcA@@sKdZddlmZddlZddlZddlZddlZddlZdZdZ ej ddkZ ej ddkZ ej dd!dakZ e refZefZefZeZeZejZnefZeefZeejfZeZeZejjd r$edcZnVd efd YZ ye!e Wne"k rjedeZn XedgZ[ dZ#dZ$defdYZ%de%fdYZ&dej'fdYZ(de%fdYZ)defdYZ*e*e+Z,de(fdYZ-e)dddde)d d!d"d#d e)d$d!d!d%d$e)d&d'd"d(d&e)d)d'd*e)d+d!d"d,d+e)d-d.d.d/d-e)d0d.d.d-d0e)d1d'd"d2d1e)d3d'e rd4nd5d6e)d7d'd8e)d9d:d;d<e)ddde)d=d=d>e)d?d?d>e)d@d@d>e)d2d'd"d2d1e)dAd!d"dBdAe)dCd!d!dDdCe&d"d'e&dEdFe&dGdHe&dIdJdKe&dLdMdLe&dNdOdPe&dQdRdSe&dTdUdVe&dWdXdYe&dZd[d\e&d]d^d_e&d`dadbe&dcdddee&dfdgdhe&dididje&dkdkdje&dldldje&dmdmdne&dodpe&dqdre&dsdte&dudvdue&dwdxe&dydzd{e&d|d}d~e&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddd~e&ddde&ddde&ddde&de+dde&de+dde&de+de+de&ddde&ddde&dddg>Z.ejdkr;e.e&ddg7Z.nxJe.D]BZ/e0e-e/j1e/e2e/e&rBe,j3e/de/j1qBqBW[/e.e-_.e-e+dZ4e,j3e4dde(fdYZ5e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)d<dde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddgZ6x!e6D]Z/e0e5e/j1e/q0W[/e6e5_.e,j3e5e+dddde(fdYZ7e)ddde)ddde)dddgZ8x!e8D]Z/e0e7e/j1e/qW[/e8e7_.e,j3e7e+dddde(fdYZ9e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddg!Z:x!e:D]Z/e0e9e/j1e/q W[/e:e9_.e,j3e9e+dddde(fdYZ;e)ddde)ddde)ddde)dddgZ<x!e<D]Z/e0e;e/j1e/q W[/e<e;_.e,j3e;e+d d d d e(fd YZ=e)dddgZ>x!e>D]Z/e0e=e/j1e/q; W[/e>e=_.e,j3e=e+ddddej'fdYZ?e,j3e?e+dddZ@dZAe r dZBdZCdZDdZEdZFdZGn$dZBdZCdZDd ZEd!ZFd"ZGy eHZIWneJk r= d#ZInXeIZHy eKZKWneJk rj d$ZKnXe r d%ZLejMZNd&ZOeZPn7d'ZLd(ZNd)ZOd*efd+YZPeKZKe#eLd,ejQeBZRejQeCZSejQeDZTejQeEZUejQeFZVejQeGZWe rd-ZXd.ZYd/ZZd0Z[ej\d1Z]ej\d2Z^ej\d3Z_nQd4ZXd5ZYd6ZZd7Z[ej\d8Z]ej\d9Z^ej\d:Z_e#eXd;e#eYd<e#eZd=e#e[d>e rd?Z`d@ZaebZcddldZdedjedAjfZg[dejhdZiejjZkelZmddlnZnenjoZoenjpZpdBZqej d d krdCZrdDZsq4dEZrdFZsnpdGZ`dHZaecZcebZgdIZidJZkejtejuevZmddloZoeojoZoZpdKZqdCZrdDZse#e`dLe#eadMdNZwdOZxdPZye reze4j{dQZ|ddRZ~ndddSZ|e|dTej d dhkre|dUn)ej d dikre|dVn dWZeze4j{dXdZedkrdYZnej d djkrDeZdZZne#e~d[ej dd!dkkrejejd\Zn ejZd]Zd^Zd_ZgZe+Zejd`dk rge_nejr7xOeejD]>\ZZeej+dkrej1e+kreje=PqqW[[nejje,dS(ls6Utilities for writing code that runs on Python 2 and 3i(tabsolute_importNs'Benjamin Peterson s1.10.0iiitjavaiitXcB@seZdZRS(cC@sdS(NiiI((tself((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__len__>s(t__name__t __module__R(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR<si?cC@s ||_dS(s Add documentation to a function.N(t__doc__(tfunctdoc((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt_add_docKscC@st|tj|S(s7Import module, returning the module after the last dot.(t __import__tsystmodules(tname((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt_import_modulePs t _LazyDescrcB@seZdZdZRS(cC@s ||_dS(N(R(RR((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__init__XscC@sN|j}t||j|yt|j|jWntk rInX|S(N(t_resolvetsetattrRtdelattrt __class__tAttributeError(Rtobjttptresult((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__get__[s  (RRRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRVs t MovedModulecB@s&eZddZdZdZRS(cC@sJtt|j|tr=|dkr1|}n||_n ||_dS(N(tsuperRRtPY3tNonetmod(RRtoldtnew((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRis    cC@s t|jS(N(RR(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRrscC@s/|j}t||}t||||S(N(RtgetattrR(Rtattrt_moduletvalue((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt __getattr__us N(RRRRRR&(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRgs t _LazyModulecB@s eZdZdZgZRS(cC@s)tt|j||jj|_dS(N(RR'RRR(RR((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR~scC@s3ddg}|g|jD]}|j^q7}|S(NRR(t_moved_attributesR(RtattrsR#((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__dir__s #(RRRR*R((((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR'|s  tMovedAttributecB@s eZdddZdZRS(cC@stt|j|trp|dkr1|}n||_|dkrd|dkr[|}qd|}n||_n'||_|dkr|}n||_dS(N(RR+RRRRR#(RRtold_modtnew_modtold_attrtnew_attr((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRs           cC@st|j}t||jS(N(RRR"R#(Rtmodule((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRsN(RRRRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR+st_SixMetaPathImportercB@s_eZdZdZdZdZd dZdZdZ dZ dZ e Z RS( s A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 cC@s||_i|_dS(N(Rt known_modules(Rtsix_module_name((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRs cG@s-x&|D]}||j|jd|(RR6((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt is_packagescC@s|j|dS(s;Return None Required, if is_package is implementedN(R>R(RR6((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytget_codes N( RRRRR7R8RR:R>RARDREt get_source(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR1s       t _MovedItemscB@seZdZgZRS(sLazy loading of moved objects(RRRRB(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRGst cStringIOtiotStringIOtfiltert itertoolstbuiltinstifiltert filterfalset ifilterfalsetinputt __builtin__t raw_inputtinternR tmaptimaptgetcwdtostgetcwdutgetcwdbtrangetxranget reload_modulet importlibtimptreloadtreducet functoolst shlex_quotetpipestshlextquotetUserDictt collectionstUserListt UserStringtziptizipt zip_longestt izip_longestt configparsert ConfigParsertcopyregtcopy_regtdbm_gnutgdbmsdbm.gnut _dummy_threadt dummy_threadthttp_cookiejart cookielibshttp.cookiejart http_cookiestCookies http.cookiest html_entitiesthtmlentitydefss html.entitiest html_parsert HTMLParsers html.parsert http_clientthttplibs http.clienttemail_mime_multipartsemail.MIMEMultipartsemail.mime.multiparttemail_mime_nonmultipartsemail.MIMENonMultipartsemail.mime.nonmultiparttemail_mime_textsemail.MIMETextsemail.mime.texttemail_mime_basesemail.MIMEBasesemail.mime.basetBaseHTTPServers http.servert CGIHTTPServertSimpleHTTPServertcPickletpickletqueuetQueuetreprlibtreprt socketservert SocketServert_threadtthreadttkintertTkinterttkinter_dialogtDialogstkinter.dialogttkinter_filedialogt FileDialogstkinter.filedialogttkinter_scrolledtextt ScrolledTextstkinter.scrolledtextttkinter_simpledialogt SimpleDialogstkinter.simpledialogt tkinter_tixtTixs tkinter.tixt tkinter_ttktttks tkinter.ttkttkinter_constantst Tkconstantsstkinter.constantst tkinter_dndtTkdnds tkinter.dndttkinter_colorchooserttkColorChooserstkinter.colorchooserttkinter_commondialogttkCommonDialogstkinter.commondialogttkinter_tkfiledialogt tkFileDialogt tkinter_fontttkFonts tkinter.fontttkinter_messageboxt tkMessageBoxstkinter.messageboxttkinter_tksimpledialogttkSimpleDialogt urllib_parses.moves.urllib_parses urllib.parset urllib_errors.moves.urllib_errors urllib.errorturllibs .moves.urllibturllib_robotparsert robotparsersurllib.robotparsert xmlrpc_clientt xmlrpclibs xmlrpc.clientt xmlrpc_servertSimpleXMLRPCServers xmlrpc.servertwin32twinregt_winregsmoves.s.movestmovestModule_six_moves_urllib_parsecB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_parse(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR@st ParseResultturlparset SplitResulttparse_qst parse_qslt urldefragturljointurlsplitt urlunparset urlunsplitt quote_plustunquotet unquote_plust urlencodet splitquerytsplittagt splitusert uses_fragmentt uses_netloct uses_paramst uses_queryt uses_relativesmoves.urllib_parsesmoves.urllib.parsetModule_six_moves_urllib_errorcB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_error(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRhstURLErrorturllib2t HTTPErrortContentTooShortErrors.moves.urllib.errorsmoves.urllib_errorsmoves.urllib.errortModule_six_moves_urllib_requestcB@seZdZRS(s9Lazy loading of moved objects in six.moves.urllib_request(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR|sturlopensurllib.requesttinstall_openert build_openert pathname2urlt url2pathnamet getproxiestRequesttOpenerDirectortHTTPDefaultErrorHandlertHTTPRedirectHandlertHTTPCookieProcessort ProxyHandlert BaseHandlertHTTPPasswordMgrtHTTPPasswordMgrWithDefaultRealmtAbstractBasicAuthHandlertHTTPBasicAuthHandlertProxyBasicAuthHandlertAbstractDigestAuthHandlertHTTPDigestAuthHandlertProxyDigestAuthHandlert HTTPHandlert HTTPSHandlert FileHandlert FTPHandlertCacheFTPHandlertUnknownHandlertHTTPErrorProcessort urlretrievet urlcleanupt URLopenertFancyURLopenert proxy_bypasss.moves.urllib.requestsmoves.urllib_requestsmoves.urllib.requestt Module_six_moves_urllib_responsecB@seZdZRS(s:Lazy loading of moved objects in six.moves.urllib_response(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRstaddbasesurllib.responset addclosehooktaddinfot addinfourls.moves.urllib.responsesmoves.urllib_responsesmoves.urllib.responset#Module_six_moves_urllib_robotparsercB@seZdZRS(s=Lazy loading of moved objects in six.moves.urllib_robotparser(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRstRobotFileParsers.moves.urllib.robotparsersmoves.urllib_robotparsersmoves.urllib.robotparsertModule_six_moves_urllibcB@sheZdZgZejdZejdZejdZejdZ ejdZ dZ RS(sICreate a six.moves.urllib namespace that resembles the Python 3 namespacesmoves.urllib_parsesmoves.urllib_errorsmoves.urllib_requestsmoves.urllib_responsesmoves.urllib_robotparsercC@sdddddgS(NtparseterrortrequesttresponseR((R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR*s( RRRRBt _importerR8RRRRRR*(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRss moves.urllibcC@stt|j|dS(sAdd an item to six.moves.N(RRGR(tmove((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytadd_movescC@s^ytt|WnFtk rYytj|=WqZtk rUtd|fqZXnXdS(sRemove item from six.moves.sno such move, %rN(RRGRRt__dict__R;(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt remove_moves  t__func__t__self__t __closure__t__code__t __defaults__t __globals__tim_functim_selft func_closuret func_codet func_defaultst func_globalscC@s |jS(N(tnext(tit((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytadvance_iterator scC@stdt|jDS(Ncs@s|]}d|jkVqdS(t__call__N(R (t.0tklass((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pys s(tanyttypet__mro__(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytcallablescC@s|S(N((tunbound((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytget_unbound_functionscC@s|S(N((Rtcls((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytcreate_unbound_methodscC@s|jS(N(R(R"((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR#"scC@stj|||jS(N(ttypest MethodTypeR(RR((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytcreate_bound_method%scC@stj|d|S(N(R&R'R(RR$((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR%(stIteratorcB@seZdZRS(cC@st|j|S(N(Rt__next__(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR-s(RRR(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR)+ss3Get the function out of a possibly unbound functioncK@st|j|S(N(titertkeys(tdtkw((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytiterkeys>scK@st|j|S(N(R+tvalues(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt itervaluesAscK@st|j|S(N(R+titems(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt iteritemsDscK@st|j|S(N(R+tlists(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt iterlistsGsR,R0R2cK@s |j|S(N(R/(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR/PscK@s |j|S(N(R1(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR1SscK@s |j|S(N(R3(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR3VscK@s |j|S(N(R5(R-R.((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR5Ystviewkeyst viewvaluest viewitemss1Return an iterator over the keys of a dictionary.s3Return an iterator over the values of a dictionary.s?Return an iterator over the (key, value) pairs of a dictionary.sBReturn an iterator over the (key, [values]) pairs of a dictionary.cC@s |jdS(Nslatin-1(tencode(ts((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytbkscC@s|S(N((R:((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytunss>BtassertCountEqualtassertRaisesRegexptassertRegexpMatchestassertRaisesRegext assertRegexcC@s|S(N((R:((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR;scC@st|jdddS(Ns\\s\\\\tunicode_escape(tunicodetreplace(R:((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR<scC@st|dS(Ni(tord(tbs((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytbyte2intscC@st||S(N(RE(tbufti((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt indexbytesstassertItemsEquals Byte literals Text literalcO@st|t||S(N(R"t_assertCountEqual(Rtargstkwargs((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR=scO@st|t||S(N(R"t_assertRaisesRegex(RRMRN((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR@scO@st|t||S(N(R"t _assertRegex(RRMRN((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRAstexeccC@sC|dkr|}n|j|k r9|j|n|dS(N(Rt __traceback__twith_traceback(RR%ttb((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytreraises   cB@sc|dkrBejd}|j}|dkr<|j}n~n|dkrW|}nddUdS(sExecute code in a namespace.isexec _code_ in _globs_, _locs_N(RR t _getframet f_globalstf_locals(t_code_t_globs_t_locs_tframe((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytexec_s      s9def reraise(tp, value, tb=None): raise tp, value, tb srdef raise_from(value, from_value): if from_value is None: raise value raise value from from_value sCdef raise_from(value, from_value): raise value from from_value cC@s |dS(N((R%t from_value((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt raise_fromstprintc @s|jdtjdkr%dSfd}t}|jdd}|dk rt|trpt}qt|tst dqn|jdd}|dk rt|trt}qt|tst dqn|rt dn|s0x*|D]}t|tr t}Pq q Wn|rQtd }td }n d }d }|dkrr|}n|dkr|}nx7t |D])\} }| r||n||qW||dS( s4The new-style print function for Python 2.4 and 2.5.tfileNc@st|tst|}nttrt|trjdk rtdd}|dkrrd}n|jj|}nj |dS(Nterrorststrict( R?t basestringtstrRaRCtencodingRR"R9twrite(tdataRb(tfp(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRgs  tsepssep must be None or a stringtendsend must be None or a strings$invalid keyword arguments to print()s t ( tpopR tstdoutRtFalseR?RCtTrueRet TypeErrort enumerate( RMRNRgt want_unicodeRjRktargtnewlinetspaceRI((Ris=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytprint_sL              cO@sW|jdtj}|jdt}t|||rS|dk rS|jndS(NRatflush(tgetR RnRmRot_printRRx(RMRNRiRx((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyRw s  sReraise an exception.c@sfd}|S(Nc@s(tj|}|_|S(N(Rbtwrapst __wrapped__(tf(tassignedtupdatedtwrapped(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytwrappers ((RR~RR((R~RRs=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR{sc@s5dffdY}tj|ddiS(s%Create a base class with a metaclass.t metaclassc@seZfdZRS(c@s||S(N((R$Rt this_basesR-(tbasestmeta(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt__new__'s(RRR((RR(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR%sttemporary_class((RR(RRR((RRs=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytwith_metaclass sc@sfd}|S(s6Class decorator for creating a class with a metaclass.c@s|jj}|jd}|dk rft|trE|g}nx|D]}|j|qLWn|jdd|jdd|j|j|S(Nt __slots__R t __weakref__( R tcopyRyRR?ReRmRt __bases__(R$t orig_varstslotst slots_var(R(s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyR.s   ((RR((Rs=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyt add_metaclass,s cC@sJtrFd|jkr+td|jn|j|_d|_n|S(s A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. t__str__sY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cS@s|jjdS(Nsutf-8(t __unicode__R9(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytJt(tPY2R t ValueErrorRRR(R((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pytpython_2_unicode_compatible<s t__spec__(iiIiIill(ii(ii(ii(ii(Rt __future__RRbRLtoperatorR R&t __author__t __version__t version_infoRRtPY34Ret string_typestintt integer_typesRt class_typest text_typetbytest binary_typetmaxsizetMAXSIZERdtlongt ClassTypeRCtplatformt startswithtobjectRtlent OverflowErrorR RRRt ModuleTypeR'R+R1RRRGR(R#RRR?R7RRt_urllib_parse_moved_attributesRt_urllib_error_moved_attributesRt _urllib_request_moved_attributesRt!_urllib_response_moved_attributesRt$_urllib_robotparser_moved_attributesRR R t _meth_funct _meth_selft _func_closuret _func_codet_func_defaultst _func_globalsRRt NameErrorR!R#R'R(R%R)t attrgettertget_method_functiontget_method_selftget_function_closuretget_function_codetget_function_defaultstget_function_globalsR/R1R3R5t methodcallerR6R7R8R;R<tchrtunichrtstructtStructtpacktint2bytet itemgetterRGtgetitemRJR+t iterbytesRIRJtBytesIORLRORPtpartialRVRER=R@RAR"RMR]RRUR_RwRztWRAPPER_ASSIGNMENTStWRAPPER_UPDATESR{RRRRBt __package__tglobalsRyRtsubmodule_search_locationst meta_pathRrRItimportertappend(((s=/usr/lib/python2.7/site-packages/pkg_resources/_vendor/six.pyts               >                                                                                 5         PK Zx(0site-packages/pkg_resources/_vendor/pyparsing.pynu[# module pyparsing.py # # Copyright (c) 2003-2016 Paul T. McGuire # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __doc__ = \ """ pyparsing module - Classes and methods to define and execute parsing grammars The pyparsing module is an alternative approach to creating and executing simple grammars, vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you don't need to learn a new syntax for defining grammars or matching expressions - the parsing module provides a library of classes that you use to construct the grammar directly in Python. Here is a program to parse "Hello, World!" (or any greeting of the form C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements (L{'+'} operator gives L{And} expressions, strings are auto-converted to L{Literal} expressions):: from pyparsing import Word, alphas # define grammar of a greeting greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) The program outputs the following:: Hello, World! -> ['Hello', ',', 'World', '!'] The Python representation of the grammar is quite readable, owing to the self-explanatory class names, and the use of '+', '|' and '^' operators. The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an object with named attributes. The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - quoted strings - embedded comments """ __version__ = "2.1.10" __versionTime__ = "07 Oct 2016 01:31 UTC" __author__ = "Paul McGuire " import string from weakref import ref as wkref import copy import sys import warnings import re import sre_constants import collections import pprint import traceback import types from datetime import datetime try: from _thread import RLock except ImportError: from threading import RLock try: from collections import OrderedDict as _OrderedDict except ImportError: try: from ordereddict import OrderedDict as _OrderedDict except ImportError: _OrderedDict = None #~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) __all__ = [ 'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', 'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', 'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', 'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', 'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', 'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', 'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', 'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', 'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', 'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', 'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', 'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', 'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', 'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', 'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', 'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', 'CloseMatch', 'tokenMap', 'pyparsing_common', ] system_version = tuple(sys.version_info)[:3] PY_3 = system_version[0] == 3 if PY_3: _MAX_INT = sys.maxsize basestring = str unichr = chr _ustr = str # build list of single arg builtins, that can be used as parse actions singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] else: _MAX_INT = sys.maxint range = xrange def _ustr(obj): """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It then < returns the unicode object | encodes it with the default encoding | ... >. """ if isinstance(obj,unicode): return obj try: # If this works, then _ustr(obj) has the same behaviour as str(obj), so # it won't break any existing code. return str(obj) except UnicodeEncodeError: # Else encode it ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') xmlcharref = Regex('&#\d+;') xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) return xmlcharref.transformString(ret) # build list of single arg builtins, tolerant of Python version, that can be used as parse actions singleArgBuiltins = [] import __builtin__ for fname in "sum len sorted reversed list tuple set any all min max".split(): try: singleArgBuiltins.append(getattr(__builtin__,fname)) except AttributeError: continue _generatorType = type((y for y in range(1))) def _xml_escape(data): """Escape &, <, >, ", ', etc. in a string of data.""" # ampersand must be replaced first from_symbols = '&><"\'' to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) for from_,to_ in zip(from_symbols, to_symbols): data = data.replace(from_, to_) return data class _Constants(object): pass alphas = string.ascii_uppercase + string.ascii_lowercase nums = "0123456789" hexnums = nums + "ABCDEFabcdef" alphanums = alphas + nums _bslash = chr(92) printables = "".join(c for c in string.printable if c not in string.whitespace) class ParseBaseException(Exception): """base exception class for all parsing runtime exceptions""" # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible def __init__( self, pstr, loc=0, msg=None, elem=None ): self.loc = loc if msg is None: self.msg = pstr self.pstr = "" else: self.msg = msg self.pstr = pstr self.parserElement = elem self.args = (pstr, loc, msg) @classmethod def _from_exception(cls, pe): """ internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses """ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) def __getattr__( self, aname ): """supported attributes by name are: - lineno - returns the line number of the exception text - col - returns the column number of the exception text - line - returns the line containing the exception text """ if( aname == "lineno" ): return lineno( self.loc, self.pstr ) elif( aname in ("col", "column") ): return col( self.loc, self.pstr ) elif( aname == "line" ): return line( self.loc, self.pstr ) else: raise AttributeError(aname) def __str__( self ): return "%s (at char %d), (line:%d, col:%d)" % \ ( self.msg, self.loc, self.lineno, self.column ) def __repr__( self ): return _ustr(self) def markInputline( self, markerString = ">!<" ): """Extracts the exception line from the input string, and marks the location of the exception with a special symbol. """ line_str = self.line line_column = self.column - 1 if markerString: line_str = "".join((line_str[:line_column], markerString, line_str[line_column:])) return line_str.strip() def __dir__(self): return "lineno col line".split() + dir(type(self)) class ParseException(ParseBaseException): """ Exception thrown when parse expressions don't match class; supported attributes by name are: - lineno - returns the line number of the exception text - col - returns the column number of the exception text - line - returns the line containing the exception text Example:: try: Word(nums).setName("integer").parseString("ABC") except ParseException as pe: print(pe) print("column: {}".format(pe.col)) prints:: Expected integer (at char 0), (line:1, col:1) column: 1 """ pass class ParseFatalException(ParseBaseException): """user-throwable exception thrown when inconsistent parse content is found; stops all parsing immediately""" pass class ParseSyntaxException(ParseFatalException): """just like L{ParseFatalException}, but thrown internally when an L{ErrorStop} ('-' operator) indicates that parsing is to stop immediately because an unbacktrackable syntax error has been found""" pass #~ class ReparseException(ParseBaseException): #~ """Experimental class - parse actions can raise this exception to cause #~ pyparsing to reparse the input string: #~ - with a modified input string, and/or #~ - with a modified start location #~ Set the values of the ReparseException in the constructor, and raise the #~ exception in a parse action to cause pyparsing to use the new string/location. #~ Setting the values as None causes no change to be made. #~ """ #~ def __init_( self, newstring, restartLoc ): #~ self.newParseText = newstring #~ self.reparseLoc = restartLoc class RecursiveGrammarException(Exception): """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" def __init__( self, parseElementList ): self.parseElementTrace = parseElementList def __str__( self ): return "RecursiveGrammarException: %s" % self.parseElementTrace class _ParseResultsWithOffset(object): def __init__(self,p1,p2): self.tup = (p1,p2) def __getitem__(self,i): return self.tup[i] def __repr__(self): return repr(self.tup[0]) def setOffset(self,i): self.tup = (self.tup[0],i) class ParseResults(object): """ Structured parse results, to provide multiple means of access to the parsed data: - as a list (C{len(results)}) - by list index (C{results[0], results[1]}, etc.) - by attribute (C{results.} - see L{ParserElement.setResultsName}) Example:: integer = Word(nums) date_str = (integer.setResultsName("year") + '/' + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") # parseString returns a ParseResults object result = date_str.parseString("1999/12/31") def test(s, fn=repr): print("%s -> %s" % (s, fn(eval(s)))) test("list(result)") test("result[0]") test("result['month']") test("result.day") test("'month' in result") test("'minutes' in result") test("result.dump()", str) prints:: list(result) -> ['1999', '/', '12', '/', '31'] result[0] -> '1999' result['month'] -> '12' result.day -> '31' 'month' in result -> True 'minutes' in result -> False result.dump() -> ['1999', '/', '12', '/', '31'] - day: 31 - month: 12 - year: 1999 """ def __new__(cls, toklist=None, name=None, asList=True, modal=True ): if isinstance(toklist, cls): return toklist retobj = object.__new__(cls) retobj.__doinit = True return retobj # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): if self.__doinit: self.__doinit = False self.__name = None self.__parent = None self.__accumNames = {} self.__asList = asList self.__modal = modal if toklist is None: toklist = [] if isinstance(toklist, list): self.__toklist = toklist[:] elif isinstance(toklist, _generatorType): self.__toklist = list(toklist) else: self.__toklist = [toklist] self.__tokdict = dict() if name is not None and name: if not modal: self.__accumNames[name] = 0 if isinstance(name,int): name = _ustr(name) # will always return a str, but use _ustr for consistency self.__name = name if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): if isinstance(toklist,basestring): toklist = [ toklist ] if asList: if isinstance(toklist,ParseResults): self[name] = _ParseResultsWithOffset(toklist.copy(),0) else: self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) self[name].__name = name else: try: self[name] = toklist[0] except (KeyError,TypeError,IndexError): self[name] = toklist def __getitem__( self, i ): if isinstance( i, (int,slice) ): return self.__toklist[i] else: if i not in self.__accumNames: return self.__tokdict[i][-1][0] else: return ParseResults([ v[0] for v in self.__tokdict[i] ]) def __setitem__( self, k, v, isinstance=isinstance ): if isinstance(v,_ParseResultsWithOffset): self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] sub = v[0] elif isinstance(k,(int,slice)): self.__toklist[k] = v sub = v else: self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] sub = v if isinstance(sub,ParseResults): sub.__parent = wkref(self) def __delitem__( self, i ): if isinstance(i,(int,slice)): mylen = len( self.__toklist ) del self.__toklist[i] # convert int to slice if isinstance(i, int): if i < 0: i += mylen i = slice(i, i+1) # get removed indices removed = list(range(*i.indices(mylen))) removed.reverse() # fixup indices in token dictionary for name,occurrences in self.__tokdict.items(): for j in removed: for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) else: del self.__tokdict[i] def __contains__( self, k ): return k in self.__tokdict def __len__( self ): return len( self.__toklist ) def __bool__(self): return ( not not self.__toklist ) __nonzero__ = __bool__ def __iter__( self ): return iter( self.__toklist ) def __reversed__( self ): return iter( self.__toklist[::-1] ) def _iterkeys( self ): if hasattr(self.__tokdict, "iterkeys"): return self.__tokdict.iterkeys() else: return iter(self.__tokdict) def _itervalues( self ): return (self[k] for k in self._iterkeys()) def _iteritems( self ): return ((k, self[k]) for k in self._iterkeys()) if PY_3: keys = _iterkeys """Returns an iterator of all named result keys (Python 3.x only).""" values = _itervalues """Returns an iterator of all named result values (Python 3.x only).""" items = _iteritems """Returns an iterator of all named result key-value tuples (Python 3.x only).""" else: iterkeys = _iterkeys """Returns an iterator of all named result keys (Python 2.x only).""" itervalues = _itervalues """Returns an iterator of all named result values (Python 2.x only).""" iteritems = _iteritems """Returns an iterator of all named result key-value tuples (Python 2.x only).""" def keys( self ): """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.iterkeys()) def values( self ): """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" return list(self.itervalues()) def items( self ): """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" return list(self.iteritems()) def haskeys( self ): """Since keys() returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.""" return bool(self.__tokdict) def pop( self, *args, **kwargs): """ Removes and returns item at specified index (default=C{last}). Supports both C{list} and C{dict} semantics for C{pop()}. If passed no argument or an integer argument, it will use C{list} semantics and pop tokens from the list of parsed tokens. If passed a non-integer argument (most likely a string), it will use C{dict} semantics and pop the corresponding value from any defined results names. A second default return value argument is supported, just as in C{dict.pop()}. Example:: def remove_first(tokens): tokens.pop(0) print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] label = Word(alphas) patt = label("LABEL") + OneOrMore(Word(nums)) print(patt.parseString("AAB 123 321").dump()) # Use pop() in a parse action to remove named result (note that corresponding value is not # removed from list form of results) def remove_LABEL(tokens): tokens.pop("LABEL") return tokens patt.addParseAction(remove_LABEL) print(patt.parseString("AAB 123 321").dump()) prints:: ['AAB', '123', '321'] - LABEL: AAB ['AAB', '123', '321'] """ if not args: args = [-1] for k,v in kwargs.items(): if k == 'default': args = (args[0], v) else: raise TypeError("pop() got an unexpected keyword argument '%s'" % k) if (isinstance(args[0], int) or len(args) == 1 or args[0] in self): index = args[0] ret = self[index] del self[index] return ret else: defaultvalue = args[1] return defaultvalue def get(self, key, defaultValue=None): """ Returns named result matching the given key, or if there is no such name, then returns the given C{defaultValue} or C{None} if no C{defaultValue} is specified. Similar to C{dict.get()}. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString("1999/12/31") print(result.get("year")) # -> '1999' print(result.get("hour", "not specified")) # -> 'not specified' print(result.get("hour")) # -> None """ if key in self: return self[key] else: return defaultValue def insert( self, index, insStr ): """ Inserts new element at location index in the list of parsed tokens. Similar to C{list.insert()}. Example:: print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] # use a parse action to insert the parse location in the front of the parsed results def insert_locn(locn, tokens): tokens.insert(0, locn) print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] """ self.__toklist.insert(index, insStr) # fixup indices in token dictionary for name,occurrences in self.__tokdict.items(): for k, (value, position) in enumerate(occurrences): occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) def append( self, item ): """ Add single element to end of ParseResults list of elements. Example:: print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] # use a parse action to compute the sum of the parsed integers, and add it to the end def append_sum(tokens): tokens.append(sum(map(int, tokens))) print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] """ self.__toklist.append(item) def extend( self, itemseq ): """ Add sequence of elements to end of ParseResults list of elements. Example:: patt = OneOrMore(Word(alphas)) # use a parse action to append the reverse of the matched strings, to make a palindrome def make_palindrome(tokens): tokens.extend(reversed([t[::-1] for t in tokens])) return ''.join(tokens) print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' """ if isinstance(itemseq, ParseResults): self += itemseq else: self.__toklist.extend(itemseq) def clear( self ): """ Clear all elements and results names. """ del self.__toklist[:] self.__tokdict.clear() def __getattr__( self, name ): try: return self[name] except KeyError: return "" if name in self.__tokdict: if name not in self.__accumNames: return self.__tokdict[name][-1][0] else: return ParseResults([ v[0] for v in self.__tokdict[name] ]) else: return "" def __add__( self, other ): ret = self.copy() ret += other return ret def __iadd__( self, other ): if other.__tokdict: offset = len(self.__toklist) addoffset = lambda a: offset if a<0 else a+offset otheritems = other.__tokdict.items() otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) for (k,vlist) in otheritems for v in vlist] for k,v in otherdictitems: self[k] = v if isinstance(v[0],ParseResults): v[0].__parent = wkref(self) self.__toklist += other.__toklist self.__accumNames.update( other.__accumNames ) return self def __radd__(self, other): if isinstance(other,int) and other == 0: # useful for merging many ParseResults using sum() builtin return self.copy() else: # this may raise a TypeError - so be it return other + self def __repr__( self ): return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) def __str__( self ): return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' def _asStringList( self, sep='' ): out = [] for item in self.__toklist: if out and sep: out.append(sep) if isinstance( item, ParseResults ): out += item._asStringList() else: out.append( _ustr(item) ) return out def asList( self ): """ Returns the parse results as a nested list of matching tokens, all converted to strings. Example:: patt = OneOrMore(Word(alphas)) result = patt.parseString("sldkj lsdkj sldkj") # even though the result prints in string-like form, it is actually a pyparsing ParseResults print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] # Use asList() to create an actual list result_list = result.asList() print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] """ return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] def asDict( self ): """ Returns the named parse results as a nested dictionary. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} # even though a ParseResults supports dict-like access, sometime you just need to have a dict import json print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} """ if PY_3: item_fn = self.items else: item_fn = self.iteritems def toItem(obj): if isinstance(obj, ParseResults): if obj.haskeys(): return obj.asDict() else: return [toItem(v) for v in obj] else: return obj return dict((k,toItem(v)) for k,v in item_fn()) def copy( self ): """ Returns a new copy of a C{ParseResults} object. """ ret = ParseResults( self.__toklist ) ret.__tokdict = self.__tokdict.copy() ret.__parent = self.__parent ret.__accumNames.update( self.__accumNames ) ret.__name = self.__name return ret def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): """ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. """ nl = "\n" out = [] namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() for v in vlist) nextLevelIndent = indent + " " # collapse out indents if formatting is not desired if not formatted: indent = "" nextLevelIndent = "" nl = "" selfTag = None if doctag is not None: selfTag = doctag else: if self.__name: selfTag = self.__name if not selfTag: if namedItemsOnly: return "" else: selfTag = "ITEM" out += [ nl, indent, "<", selfTag, ">" ] for i,res in enumerate(self.__toklist): if isinstance(res,ParseResults): if i in namedItems: out += [ res.asXML(namedItems[i], namedItemsOnly and doctag is None, nextLevelIndent, formatted)] else: out += [ res.asXML(None, namedItemsOnly and doctag is None, nextLevelIndent, formatted)] else: # individual token, see if there is a name for it resTag = None if i in namedItems: resTag = namedItems[i] if not resTag: if namedItemsOnly: continue else: resTag = "ITEM" xmlBodyText = _xml_escape(_ustr(res)) out += [ nl, nextLevelIndent, "<", resTag, ">", xmlBodyText, "" ] out += [ nl, indent, "" ] return "".join(out) def __lookup(self,sub): for k,vlist in self.__tokdict.items(): for v,loc in vlist: if sub is v: return k return None def getName(self): """ Returns the results name for this token expression. Useful when several different expressions might match at a particular location. Example:: integer = Word(nums) ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") house_number_expr = Suppress('#') + Word(nums, alphanums) user_data = (Group(house_number_expr)("house_number") | Group(ssn_expr)("ssn") | Group(integer)("age")) user_info = OneOrMore(user_data) result = user_info.parseString("22 111-22-3333 #221B") for item in result: print(item.getName(), ':', item[0]) prints:: age : 22 ssn : 111-22-3333 house_number : 221B """ if self.__name: return self.__name elif self.__parent: par = self.__parent() if par: return par.__lookup(self) else: return None elif (len(self) == 1 and len(self.__tokdict) == 1 and next(iter(self.__tokdict.values()))[0][1] in (0,-1)): return next(iter(self.__tokdict.keys())) else: return None def dump(self, indent='', depth=0, full=True): """ Diagnostic method for listing out the contents of a C{ParseResults}. Accepts an optional C{indent} argument so that this string can be embedded in a nested display of other data. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(result.dump()) prints:: ['12', '/', '31', '/', '1999'] - day: 1999 - month: 31 - year: 12 """ out = [] NL = '\n' out.append( indent+_ustr(self.asList()) ) if full: if self.haskeys(): items = sorted((str(k), v) for k,v in self.items()) for k,v in items: if out: out.append(NL) out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) if isinstance(v,ParseResults): if v: out.append( v.dump(indent,depth+1) ) else: out.append(_ustr(v)) else: out.append(repr(v)) elif any(isinstance(vv,ParseResults) for vv in self): v = self for i,vv in enumerate(v): if isinstance(vv,ParseResults): out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) else: out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) return "".join(out) def pprint(self, *args, **kwargs): """ Pretty-printer for parsed results as a list, using the C{pprint} module. Accepts additional positional or keyword args as defined for the C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) Example:: ident = Word(alphas, alphanums) num = Word(nums) func = Forward() term = ident | num | Group('(' + func + ')') func <<= ident + Group(Optional(delimitedList(term))) result = func.parseString("fna a,b,(fnb c,d,200),100") result.pprint(width=40) prints:: ['fna', ['a', 'b', ['(', 'fnb', ['c', 'd', '200'], ')'], '100']] """ pprint.pprint(self.asList(), *args, **kwargs) # add support for pickle protocol def __getstate__(self): return ( self.__toklist, ( self.__tokdict.copy(), self.__parent is not None and self.__parent() or None, self.__accumNames, self.__name ) ) def __setstate__(self,state): self.__toklist = state[0] (self.__tokdict, par, inAccumNames, self.__name) = state[1] self.__accumNames = {} self.__accumNames.update(inAccumNames) if par is not None: self.__parent = wkref(par) else: self.__parent = None def __getnewargs__(self): return self.__toklist, self.__name, self.__asList, self.__modal def __dir__(self): return (dir(type(self)) + list(self.keys())) collections.MutableMapping.register(ParseResults) def col (loc,strg): """Returns current column within a string, counting newlines as line separators. The first column is number 1. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{ParserElement.parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. """ s = strg return 1 if 0} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. """ return strg.count("\n",0,loc) + 1 def line( loc, strg ): """Returns the line of text containing loc within a string, counting newlines as line separators. """ lastCR = strg.rfind("\n", 0, loc) nextCR = strg.find("\n", loc) if nextCR >= 0: return strg[lastCR+1:nextCR] else: return strg[lastCR+1:] def _defaultStartDebugAction( instring, loc, expr ): print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) def _defaultExceptionDebugAction( instring, loc, expr, exc ): print ("Exception raised:" + _ustr(exc)) def nullDebugAction(*args): """'Do-nothing' debug action, to suppress debugging output during parsing.""" pass # Only works on Python 3.x - nonlocal is toxic to Python 2 installs #~ 'decorator to trim function calls to match the arity of the target' #~ def _trim_arity(func, maxargs=3): #~ if func in singleArgBuiltins: #~ return lambda s,l,t: func(t) #~ limit = 0 #~ foundArity = False #~ def wrapper(*args): #~ nonlocal limit,foundArity #~ while 1: #~ try: #~ ret = func(*args[limit:]) #~ foundArity = True #~ return ret #~ except TypeError: #~ if limit == maxargs or foundArity: #~ raise #~ limit += 1 #~ continue #~ return wrapper # this version is Python 2.x-3.x cross-compatible 'decorator to trim function calls to match the arity of the target' def _trim_arity(func, maxargs=2): if func in singleArgBuiltins: return lambda s,l,t: func(t) limit = [0] foundArity = [False] # traceback return data structure changed in Py3.5 - normalize back to plain tuples if system_version[:2] >= (3,5): def extract_stack(limit=0): # special handling for Python 3.5.0 - extra deep call stack by 1 offset = -3 if system_version == (3,5,0) else -2 frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] return [(frame_summary.filename, frame_summary.lineno)] def extract_tb(tb, limit=0): frames = traceback.extract_tb(tb, limit=limit) frame_summary = frames[-1] return [(frame_summary.filename, frame_summary.lineno)] else: extract_stack = traceback.extract_stack extract_tb = traceback.extract_tb # synthesize what would be returned by traceback.extract_stack at the call to # user's parse action 'func', so that we don't incur call penalty at parse time LINE_DIFF = 6 # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! this_line = extract_stack(limit=2)[-1] pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) def wrapper(*args): while 1: try: ret = func(*args[limit[0]:]) foundArity[0] = True return ret except TypeError: # re-raise TypeErrors if they did not come from our arity testing if foundArity[0]: raise else: try: tb = sys.exc_info()[-1] if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: raise finally: del tb if limit[0] <= maxargs: limit[0] += 1 continue raise # copy func name to wrapper for sensible debug output func_name = "" try: func_name = getattr(func, '__name__', getattr(func, '__class__').__name__) except Exception: func_name = str(func) wrapper.__name__ = func_name return wrapper class ParserElement(object): """Abstract base level parser element class.""" DEFAULT_WHITE_CHARS = " \n\t\r" verbose_stacktrace = False @staticmethod def setDefaultWhitespaceChars( chars ): r""" Overrides the default whitespace chars Example:: # default whitespace chars are space, and newline OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] # change to just treat newline as significant ParserElement.setDefaultWhitespaceChars(" \t") OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] """ ParserElement.DEFAULT_WHITE_CHARS = chars @staticmethod def inlineLiteralsUsing(cls): """ Set class to be used for inclusion of string literals into a parser. Example:: # default literal class used is Literal integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # change to Suppress ParserElement.inlineLiteralsUsing(Suppress) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] """ ParserElement._literalStringClass = cls def __init__( self, savelist=False ): self.parseAction = list() self.failAction = None #~ self.name = "" # don't define self.name, let subclasses try/except upcall self.strRepr = None self.resultsName = None self.saveAsList = savelist self.skipWhitespace = True self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS self.copyDefaultWhiteChars = True self.mayReturnEmpty = False # used when checking for left-recursion self.keepTabs = False self.ignoreExprs = list() self.debug = False self.streamlined = False self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index self.errmsg = "" self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) self.debugActions = ( None, None, None ) #custom debug actions self.re = None self.callPreparse = True # used to avoid redundant calls to preParse self.callDuringTry = False def copy( self ): """ Make a copy of this C{ParserElement}. Useful for defining different parse actions for the same parsing pattern, using copies of the original parse element. Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) prints:: [5120, 100, 655360, 268435456] Equivalent form of C{expr.copy()} is just C{expr()}:: integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") """ cpy = copy.copy( self ) cpy.parseAction = self.parseAction[:] cpy.ignoreExprs = self.ignoreExprs[:] if self.copyDefaultWhiteChars: cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS return cpy def setName( self, name ): """ Define name for this expression, makes debugging and exception messages clearer. Example:: Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) """ self.name = name self.errmsg = "Expected " + self.name if hasattr(self,"exception"): self.exception.msg = self.errmsg return self def setResultsName( self, name, listAllMatches=False ): """ Define name for referencing matching tokens as a nested attribute of the returned parse results. NOTE: this returns a *copy* of the original C{ParserElement} object; this is so that the client can define a basic element, such as an integer, and reference it in multiple places with different names. You can also set results names using the abbreviated syntax, C{expr("name")} in place of C{expr.setResultsName("name")} - see L{I{__call__}<__call__>}. Example:: date_str = (integer.setResultsName("year") + '/' + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: date_str = integer("year") + '/' + integer("month") + '/' + integer("day") """ newself = self.copy() if name.endswith("*"): name = name[:-1] listAllMatches=True newself.resultsName = name newself.modalResults = not listAllMatches return newself def setBreak(self,breakFlag = True): """Method to invoke the Python pdb debugger when this element is about to be parsed. Set C{breakFlag} to True to enable, False to disable. """ if breakFlag: _parseMethod = self._parse def breaker(instring, loc, doActions=True, callPreParse=True): import pdb pdb.set_trace() return _parseMethod( instring, loc, doActions, callPreParse ) breaker._originalParseMethod = _parseMethod self._parse = breaker else: if hasattr(self._parse,"_originalParseMethod"): self._parse = self._parse._originalParseMethod return self def setParseAction( self, *fns, **kwargs ): """ Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substring - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Optional keyword arguments: - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. Example:: integer = Word(nums) date_str = integer + '/' + integer + '/' + integer date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # use parse action to convert to ints at parse time integer = Word(nums).setParseAction(lambda toks: int(toks[0])) date_str = integer + '/' + integer + '/' + integer # note that integer fields are now ints, not strings date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] """ self.parseAction = list(map(_trim_arity, list(fns))) self.callDuringTry = kwargs.get("callDuringTry", False) return self def addParseAction( self, *fns, **kwargs ): """ Add parse action to expression's list of parse actions. See L{I{setParseAction}}. See examples in L{I{copy}}. """ self.parseAction += list(map(_trim_arity, list(fns))) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self def addCondition(self, *fns, **kwargs): """Add a boolean predicate function to expression's list of parse actions. See L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, functions passed to C{addCondition} need to return boolean success/fail of the condition. Optional keyword arguments: - message = define a custom message to be used in the raised exception - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) year_int = integer.copy() year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") date_str = year_int + '/' + integer + '/' + integer result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) """ msg = kwargs.get("message", "failed user-defined condition") exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException for fn in fns: def pa(s,l,t): if not bool(_trim_arity(fn)(s,l,t)): raise exc_type(s,l,msg) self.parseAction.append(pa) self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) return self def setFailAction( self, fn ): """Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments C{fn(s,loc,expr,err)} where: - s = string being parsed - loc = location where expression match was attempted and failed - expr = the parse expression that failed - err = the exception thrown The function returns no value. It may throw C{L{ParseFatalException}} if it is desired to stop parsing immediately.""" self.failAction = fn return self def _skipIgnorables( self, instring, loc ): exprsFound = True while exprsFound: exprsFound = False for e in self.ignoreExprs: try: while 1: loc,dummy = e._parse( instring, loc ) exprsFound = True except ParseException: pass return loc def preParse( self, instring, loc ): if self.ignoreExprs: loc = self._skipIgnorables( instring, loc ) if self.skipWhitespace: wt = self.whiteChars instrlen = len(instring) while loc < instrlen and instring[loc] in wt: loc += 1 return loc def parseImpl( self, instring, loc, doActions=True ): return loc, [] def postParse( self, instring, loc, tokenlist ): return tokenlist #~ @profile def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): debugging = ( self.debug ) #and doActions ) if debugging or self.failAction: #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) if (self.debugActions[0] ): self.debugActions[0]( instring, loc, self ) if callPreParse and self.callPreparse: preloc = self.preParse( instring, loc ) else: preloc = loc tokensStart = preloc try: try: loc,tokens = self.parseImpl( instring, preloc, doActions ) except IndexError: raise ParseException( instring, len(instring), self.errmsg, self ) except ParseBaseException as err: #~ print ("Exception raised:", err) if self.debugActions[2]: self.debugActions[2]( instring, tokensStart, self, err ) if self.failAction: self.failAction( instring, tokensStart, self, err ) raise else: if callPreParse and self.callPreparse: preloc = self.preParse( instring, loc ) else: preloc = loc tokensStart = preloc if self.mayIndexError or loc >= len(instring): try: loc,tokens = self.parseImpl( instring, preloc, doActions ) except IndexError: raise ParseException( instring, len(instring), self.errmsg, self ) else: loc,tokens = self.parseImpl( instring, preloc, doActions ) tokens = self.postParse( instring, loc, tokens ) retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) if self.parseAction and (doActions or self.callDuringTry): if debugging: try: for fn in self.parseAction: tokens = fn( instring, tokensStart, retTokens ) if tokens is not None: retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), modal=self.modalResults ) except ParseBaseException as err: #~ print "Exception raised in user parse action:", err if (self.debugActions[2] ): self.debugActions[2]( instring, tokensStart, self, err ) raise else: for fn in self.parseAction: tokens = fn( instring, tokensStart, retTokens ) if tokens is not None: retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), modal=self.modalResults ) if debugging: #~ print ("Matched",self,"->",retTokens.asList()) if (self.debugActions[1] ): self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) return loc, retTokens def tryParse( self, instring, loc ): try: return self._parse( instring, loc, doActions=False )[0] except ParseFatalException: raise ParseException( instring, loc, self.errmsg, self) def canParseNext(self, instring, loc): try: self.tryParse(instring, loc) except (ParseException, IndexError): return False else: return True class _UnboundedCache(object): def __init__(self): cache = {} self.not_in_cache = not_in_cache = object() def get(self, key): return cache.get(key, not_in_cache) def set(self, key, value): cache[key] = value def clear(self): cache.clear() self.get = types.MethodType(get, self) self.set = types.MethodType(set, self) self.clear = types.MethodType(clear, self) if _OrderedDict is not None: class _FifoCache(object): def __init__(self, size): self.not_in_cache = not_in_cache = object() cache = _OrderedDict() def get(self, key): return cache.get(key, not_in_cache) def set(self, key, value): cache[key] = value if len(cache) > size: cache.popitem(False) def clear(self): cache.clear() self.get = types.MethodType(get, self) self.set = types.MethodType(set, self) self.clear = types.MethodType(clear, self) else: class _FifoCache(object): def __init__(self, size): self.not_in_cache = not_in_cache = object() cache = {} key_fifo = collections.deque([], size) def get(self, key): return cache.get(key, not_in_cache) def set(self, key, value): cache[key] = value if len(cache) > size: cache.pop(key_fifo.popleft(), None) key_fifo.append(key) def clear(self): cache.clear() key_fifo.clear() self.get = types.MethodType(get, self) self.set = types.MethodType(set, self) self.clear = types.MethodType(clear, self) # argument cache for optimizing repeated calls when backtracking through recursive expressions packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail packrat_cache_lock = RLock() packrat_cache_stats = [0, 0] # this method gets repeatedly called during backtracking with the same arguments - # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): HIT, MISS = 0, 1 lookup = (self, instring, loc, callPreParse, doActions) with ParserElement.packrat_cache_lock: cache = ParserElement.packrat_cache value = cache.get(lookup) if value is cache.not_in_cache: ParserElement.packrat_cache_stats[MISS] += 1 try: value = self._parseNoCache(instring, loc, doActions, callPreParse) except ParseBaseException as pe: # cache a copy of the exception, without the traceback cache.set(lookup, pe.__class__(*pe.args)) raise else: cache.set(lookup, (value[0], value[1].copy())) return value else: ParserElement.packrat_cache_stats[HIT] += 1 if isinstance(value, Exception): raise value return (value[0], value[1].copy()) _parse = _parseNoCache @staticmethod def resetCache(): ParserElement.packrat_cache.clear() ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) _packratEnabled = False @staticmethod def enablePackrat(cache_size_limit=128): """Enables "packrat" parsing, which adds memoizing to the parsing logic. Repeated parse attempts at the same string location (which happens often in many complex grammars) can immediately return a cached value, instead of re-executing parsing/validating code. Memoizing is done of both valid results and parsing exceptions. Parameters: - cache_size_limit - (default=C{128}) - if an integer value is provided will limit the size of the packrat cache; if None is passed, then the cache size will be unbounded; if 0 is passed, the cache will be effectively disabled. This speedup may break existing programs that use parse actions that have side-effects. For this reason, packrat parsing is disabled when you first import pyparsing. To activate the packrat feature, your program must call the class method C{ParserElement.enablePackrat()}. If your program uses C{psyco} to "compile as you go", you must call C{enablePackrat} before calling C{psyco.full()}. If you do not do this, Python will crash. For best results, call C{enablePackrat()} immediately after importing pyparsing. Example:: import pyparsing pyparsing.ParserElement.enablePackrat() """ if not ParserElement._packratEnabled: ParserElement._packratEnabled = True if cache_size_limit is None: ParserElement.packrat_cache = ParserElement._UnboundedCache() else: ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) ParserElement._parse = ParserElement._parseCache def parseString( self, instring, parseAll=False ): """ Execute the parse expression with the given string. This is the main interface to the client code, once the complete expression has been built. If you want the grammar to require that the entire input string be successfully parsed, then set C{parseAll} to True (equivalent to ending the grammar with C{L{StringEnd()}}). Note: C{parseString} implicitly calls C{expandtabs()} on the input string, in order to report proper column numbers in parse actions. If the input string contains tabs and the grammar uses parse actions that use the C{loc} argument to index into the string being parsed, you can ensure you have a consistent view of the input string by: - calling C{parseWithTabs} on your grammar before calling C{parseString} (see L{I{parseWithTabs}}) - define your parse action using the full C{(s,loc,toks)} signature, and reference the input string using the parse action's C{s} argument - explictly expand the tabs in your input string before calling C{parseString} Example:: Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text """ ParserElement.resetCache() if not self.streamlined: self.streamline() #~ self.saveAsList = True for e in self.ignoreExprs: e.streamline() if not self.keepTabs: instring = instring.expandtabs() try: loc, tokens = self._parse( instring, 0 ) if parseAll: loc = self.preParse( instring, loc ) se = Empty() + StringEnd() se._parse( instring, loc ) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc else: return tokens def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): """ Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional C{maxMatches} argument, to clip scanning after 'n' matches are found. If C{overlap} is specified, then overlapping matches will be reported. Note that the start and end locations are reported relative to the string being parsed. See L{I{parseString}} for more information on parsing strings with embedded tabs. Example:: source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" print(source) for tokens,start,end in Word(alphas).scanString(source): print(' '*start + '^'*(end-start)) print(' '*start + tokens[0]) prints:: sldjf123lsdjjkf345sldkjf879lkjsfd987 ^^^^^ sldjf ^^^^^^^ lsdjjkf ^^^^^^ sldkjf ^^^^^^ lkjsfd """ if not self.streamlined: self.streamline() for e in self.ignoreExprs: e.streamline() if not self.keepTabs: instring = _ustr(instring).expandtabs() instrlen = len(instring) loc = 0 preparseFn = self.preParse parseFn = self._parse ParserElement.resetCache() matches = 0 try: while loc <= instrlen and matches < maxMatches: try: preloc = preparseFn( instring, loc ) nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) except ParseException: loc = preloc+1 else: if nextLoc > loc: matches += 1 yield tokens, preloc, nextLoc if overlap: nextloc = preparseFn( instring, loc ) if nextloc > loc: loc = nextLoc else: loc += 1 else: loc = nextLoc else: loc = preloc+1 except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc def transformString( self, instring ): """ Extension to C{L{scanString}}, to modify matching text with modified tokens that may be returned from a parse action. To use C{transformString}, define a grammar and attach a parse action to it that modifies the returned token list. Invoking C{transformString()} on a target string will then scan for matches, and replace the matched text patterns according to the logic in the parse action. C{transformString()} returns the resulting transformed string. Example:: wd = Word(alphas) wd.setParseAction(lambda toks: toks[0].title()) print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) Prints:: Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. """ out = [] lastE = 0 # force preservation of s, to minimize unwanted transformation of string, and to # keep string locs straight between transformString and scanString self.keepTabs = True try: for t,s,e in self.scanString( instring ): out.append( instring[lastE:s] ) if t: if isinstance(t,ParseResults): out += t.asList() elif isinstance(t,list): out += t else: out.append(t) lastE = e out.append(instring[lastE:]) out = [o for o in out if o] return "".join(map(_ustr,_flatten(out))) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc def searchString( self, instring, maxMatches=_MAX_INT ): """ Another extension to C{L{scanString}}, simplifying the access to the tokens found to match the given parse expression. May be called with optional C{maxMatches} argument, to clip searching after 'n' matches are found. Example:: # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters cap_word = Word(alphas.upper(), alphas.lower()) print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) prints:: ['More', 'Iron', 'Lead', 'Gold', 'I'] """ try: return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): """ Generator method to split a string using the given expression as a separator. May be called with optional C{maxsplit} argument, to limit the number of splits; and the optional C{includeSeparators} argument (default=C{False}), if the separating matching text should be included in the split results. Example:: punc = oneOf(list(".,;:/-!?")) print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) prints:: ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] """ splits = 0 last = 0 for t,s,e in self.scanString(instring, maxMatches=maxsplit): yield instring[last:s] if includeSeparators: yield t[0] last = e yield instring[last:] def __add__(self, other ): """ Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement converts them to L{Literal}s by default. Example:: greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) Prints:: Hello, World! -> ['Hello', ',', 'World', '!'] """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return And( [ self, other ] ) def __radd__(self, other ): """ Implementation of + operator when left operand is not a C{L{ParserElement}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return other + self def __sub__(self, other): """ Implementation of - operator, returns C{L{And}} with error stop """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return And( [ self, And._ErrorStop(), other ] ) def __rsub__(self, other ): """ Implementation of - operator when left operand is not a C{L{ParserElement}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return other - self def __mul__(self,other): """ Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} Note that C{expr*(None,n)} does not raise an exception if more than n exprs exist in the input stream; that is, C{expr*(None,n)} does not enforce a maximum number of expr occurrences. If this behavior is desired, then write C{expr*(None,n) + ~expr} """ if isinstance(other,int): minElements, optElements = other,0 elif isinstance(other,tuple): other = (other + (None, None))[:2] if other[0] is None: other = (0, other[1]) if isinstance(other[0],int) and other[1] is None: if other[0] == 0: return ZeroOrMore(self) if other[0] == 1: return OneOrMore(self) else: return self*other[0] + ZeroOrMore(self) elif isinstance(other[0],int) and isinstance(other[1],int): minElements, optElements = other optElements -= minElements else: raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) else: raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) if minElements < 0: raise ValueError("cannot multiply ParserElement by negative value") if optElements < 0: raise ValueError("second tuple value must be greater or equal to first tuple value") if minElements == optElements == 0: raise ValueError("cannot multiply ParserElement by 0 or (0,0)") if (optElements): def makeOptionalList(n): if n>1: return Optional(self + makeOptionalList(n-1)) else: return Optional(self) if minElements: if minElements == 1: ret = self + makeOptionalList(optElements) else: ret = And([self]*minElements) + makeOptionalList(optElements) else: ret = makeOptionalList(optElements) else: if minElements == 1: ret = self else: ret = And([self]*minElements) return ret def __rmul__(self, other): return self.__mul__(other) def __or__(self, other ): """ Implementation of | operator - returns C{L{MatchFirst}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return MatchFirst( [ self, other ] ) def __ror__(self, other ): """ Implementation of | operator when left operand is not a C{L{ParserElement}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return other | self def __xor__(self, other ): """ Implementation of ^ operator - returns C{L{Or}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return Or( [ self, other ] ) def __rxor__(self, other ): """ Implementation of ^ operator when left operand is not a C{L{ParserElement}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return other ^ self def __and__(self, other ): """ Implementation of & operator - returns C{L{Each}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return Each( [ self, other ] ) def __rand__(self, other ): """ Implementation of & operator when left operand is not a C{L{ParserElement}} """ if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) if not isinstance( other, ParserElement ): warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), SyntaxWarning, stacklevel=2) return None return other & self def __invert__( self ): """ Implementation of ~ operator - returns C{L{NotAny}} """ return NotAny( self ) def __call__(self, name=None): """ Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be passed as C{True}. If C{name} is omitted, same as calling C{L{copy}}. Example:: # these are equivalent userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") """ if name is not None: return self.setResultsName(name) else: return self.copy() def suppress( self ): """ Suppresses the output of this C{ParserElement}; useful to keep punctuation from cluttering up returned output. """ return Suppress( self ) def leaveWhitespace( self ): """ Disables the skipping of whitespace before matching the characters in the C{ParserElement}'s defined pattern. This is normally only used internally by the pyparsing module, but may be needed in some whitespace-sensitive grammars. """ self.skipWhitespace = False return self def setWhitespaceChars( self, chars ): """ Overrides the default whitespace chars """ self.skipWhitespace = True self.whiteChars = chars self.copyDefaultWhiteChars = False return self def parseWithTabs( self ): """ Overrides default behavior to expand C{}s to spaces before parsing the input string. Must be called before C{parseString} when the input grammar contains elements that match C{} characters. """ self.keepTabs = True return self def ignore( self, other ): """ Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. Example:: patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] """ if isinstance(other, basestring): other = Suppress(other) if isinstance( other, Suppress ): if other not in self.ignoreExprs: self.ignoreExprs.append(other) else: self.ignoreExprs.append( Suppress( other.copy() ) ) return self def setDebugActions( self, startAction, successAction, exceptionAction ): """ Enable display of debugging messages while doing pattern matching. """ self.debugActions = (startAction or _defaultStartDebugAction, successAction or _defaultSuccessDebugAction, exceptionAction or _defaultExceptionDebugAction) self.debug = True return self def setDebug( self, flag=True ): """ Enable display of debugging messages while doing pattern matching. Set C{flag} to True to enable, False to disable. Example:: wd = Word(alphas).setName("alphaword") integer = Word(nums).setName("numword") term = wd | integer # turn on debugging for wd wd.setDebug() OneOrMore(term).parseString("abc 123 xyz 890") prints:: Match alphaword at loc 0(1,1) Matched alphaword -> ['abc'] Match alphaword at loc 3(1,4) Exception raised:Expected alphaword (at char 4), (line:1, col:5) Match alphaword at loc 7(1,8) Matched alphaword -> ['xyz'] Match alphaword at loc 11(1,12) Exception raised:Expected alphaword (at char 12), (line:1, col:13) Match alphaword at loc 15(1,16) Exception raised:Expected alphaword (at char 15), (line:1, col:16) The output shown is that produced by the default debug actions - custom debug actions can be specified using L{setDebugActions}. Prior to attempting to match the C{wd} expression, the debugging message C{"Match at loc (,)"} is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, which makes debugging and exception messages easier to understand - for instance, the default name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. """ if flag: self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) else: self.debug = False return self def __str__( self ): return self.name def __repr__( self ): return _ustr(self) def streamline( self ): self.streamlined = True self.strRepr = None return self def checkRecursion( self, parseElementList ): pass def validate( self, validateTrace=[] ): """ Check defined expressions for valid structure, check for infinite recursive definitions. """ self.checkRecursion( [] ) def parseFile( self, file_or_filename, parseAll=False ): """ Execute the parse expression on the given file or filename. If a filename is specified (instead of a file object), the entire file is opened, read, and closed before parsing. """ try: file_contents = file_or_filename.read() except AttributeError: with open(file_or_filename, "r") as f: file_contents = f.read() try: return self.parseString(file_contents, parseAll) except ParseBaseException as exc: if ParserElement.verbose_stacktrace: raise else: # catch and re-raise exception from here, clears out pyparsing internal stack trace raise exc def __eq__(self,other): if isinstance(other, ParserElement): return self is other or vars(self) == vars(other) elif isinstance(other, basestring): return self.matches(other) else: return super(ParserElement,self)==other def __ne__(self,other): return not (self == other) def __hash__(self): return hash(id(self)) def __req__(self,other): return self == other def __rne__(self,other): return not (self == other) def matches(self, testString, parseAll=True): """ Method for quick testing of a parser against a test string. Good for simple inline microtests of sub expressions while building up larger parser. Parameters: - testString - to test against this expression for a match - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests Example:: expr = Word(nums) assert expr.matches("100") """ try: self.parseString(_ustr(testString), parseAll=parseAll) return True except ParseBaseException: return False def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): """ Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to run a parse expression against a list of sample strings. Parameters: - tests - a list of separate test strings, or a multiline string of test strings - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - comment - (default=C{'#'}) - expression for indicating embedded comments in the test string; pass None to disable comment filtering - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; if False, only dump nested list - printResults - (default=C{True}) prints test output to stdout - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing Returns: a (success, results) tuple, where success indicates that all tests succeeded (or failed if C{failureTests} is True), and the results contain a list of lines of each test's output Example:: number_expr = pyparsing_common.number.copy() result = number_expr.runTests(''' # unsigned integer 100 # negative integer -100 # float with scientific notation 6.02e23 # integer with scientific notation 1e-12 ''') print("Success" if result[0] else "Failed!") result = number_expr.runTests(''' # stray character 100Z # missing leading digit before '.' -.100 # too many '.' 3.14.159 ''', failureTests=True) print("Success" if result[0] else "Failed!") prints:: # unsigned integer 100 [100] # negative integer -100 [-100] # float with scientific notation 6.02e23 [6.02e+23] # integer with scientific notation 1e-12 [1e-12] Success # stray character 100Z ^ FAIL: Expected end of text (at char 3), (line:1, col:4) # missing leading digit before '.' -.100 ^ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) # too many '.' 3.14.159 ^ FAIL: Expected end of text (at char 4), (line:1, col:5) Success Each test string must be on a single line. If you want to test a string that spans multiple lines, create a test like this:: expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") (Note that this is a raw string literal, you must include the leading 'r'.) """ if isinstance(tests, basestring): tests = list(map(str.strip, tests.rstrip().splitlines())) if isinstance(comment, basestring): comment = Literal(comment) allResults = [] comments = [] success = True for t in tests: if comment is not None and comment.matches(t, False) or comments and not t: comments.append(t) continue if not t: continue out = ['\n'.join(comments), t] comments = [] try: t = t.replace(r'\n','\n') result = self.parseString(t, parseAll=parseAll) out.append(result.dump(full=fullDump)) success = success and not failureTests except ParseBaseException as pe: fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" if '\n' in t: out.append(line(pe.loc, t)) out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) else: out.append(' '*pe.loc + '^' + fatal) out.append("FAIL: " + str(pe)) success = success and failureTests result = pe except Exception as exc: out.append("FAIL-EXCEPTION: " + str(exc)) success = success and failureTests result = exc if printResults: if fullDump: out.append('') print('\n'.join(out)) allResults.append((t, result)) return success, allResults class Token(ParserElement): """ Abstract C{ParserElement} subclass, for defining atomic matching patterns. """ def __init__( self ): super(Token,self).__init__( savelist=False ) class Empty(Token): """ An empty token, will always match. """ def __init__( self ): super(Empty,self).__init__() self.name = "Empty" self.mayReturnEmpty = True self.mayIndexError = False class NoMatch(Token): """ A token that will never match. """ def __init__( self ): super(NoMatch,self).__init__() self.name = "NoMatch" self.mayReturnEmpty = True self.mayIndexError = False self.errmsg = "Unmatchable token" def parseImpl( self, instring, loc, doActions=True ): raise ParseException(instring, loc, self.errmsg, self) class Literal(Token): """ Token to exactly match a specified string. Example:: Literal('blah').parseString('blah') # -> ['blah'] Literal('blah').parseString('blahfooblah') # -> ['blah'] Literal('blah').parseString('bla') # -> Exception: Expected "blah" For case-insensitive matching, use L{CaselessLiteral}. For keyword matching (force word break before and after the matched string), use L{Keyword} or L{CaselessKeyword}. """ def __init__( self, matchString ): super(Literal,self).__init__() self.match = matchString self.matchLen = len(matchString) try: self.firstMatchChar = matchString[0] except IndexError: warnings.warn("null string passed to Literal; use Empty() instead", SyntaxWarning, stacklevel=2) self.__class__ = Empty self.name = '"%s"' % _ustr(self.match) self.errmsg = "Expected " + self.name self.mayReturnEmpty = False self.mayIndexError = False # Performance tuning: this routine gets called a *lot* # if this is a single character match string and the first character matches, # short-circuit as quickly as possible, and avoid calling startswith #~ @profile def parseImpl( self, instring, loc, doActions=True ): if (instring[loc] == self.firstMatchChar and (self.matchLen==1 or instring.startswith(self.match,loc)) ): return loc+self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) _L = Literal ParserElement._literalStringClass = Literal class Keyword(Token): """ Token to exactly match a specified string as a keyword, that is, it must be immediately followed by a non-keyword character. Compare with C{L{Literal}}: - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} Accepts two optional constructor arguments in addition to the keyword string: - C{identChars} is a string of characters that would be valid identifier characters, defaulting to all alphanumerics + "_" and "$" - C{caseless} allows case-insensitive matching, default is C{False}. Example:: Keyword("start").parseString("start") # -> ['start'] Keyword("start").parseString("starting") # -> Exception For case-insensitive matching, use L{CaselessKeyword}. """ DEFAULT_KEYWORD_CHARS = alphanums+"_$" def __init__( self, matchString, identChars=None, caseless=False ): super(Keyword,self).__init__() if identChars is None: identChars = Keyword.DEFAULT_KEYWORD_CHARS self.match = matchString self.matchLen = len(matchString) try: self.firstMatchChar = matchString[0] except IndexError: warnings.warn("null string passed to Keyword; use Empty() instead", SyntaxWarning, stacklevel=2) self.name = '"%s"' % self.match self.errmsg = "Expected " + self.name self.mayReturnEmpty = False self.mayIndexError = False self.caseless = caseless if caseless: self.caselessmatch = matchString.upper() identChars = identChars.upper() self.identChars = set(identChars) def parseImpl( self, instring, loc, doActions=True ): if self.caseless: if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and (loc == 0 or instring[loc-1].upper() not in self.identChars) ): return loc+self.matchLen, self.match else: if (instring[loc] == self.firstMatchChar and (self.matchLen==1 or instring.startswith(self.match,loc)) and (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and (loc == 0 or instring[loc-1] not in self.identChars) ): return loc+self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) def copy(self): c = super(Keyword,self).copy() c.identChars = Keyword.DEFAULT_KEYWORD_CHARS return c @staticmethod def setDefaultKeywordChars( chars ): """Overrides the default Keyword chars """ Keyword.DEFAULT_KEYWORD_CHARS = chars class CaselessLiteral(Literal): """ Token to match a specified string, ignoring case of letters. Note: the matched results will always be in the case of the given match string, NOT the case of the input text. Example:: OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] (Contrast with example for L{CaselessKeyword}.) """ def __init__( self, matchString ): super(CaselessLiteral,self).__init__( matchString.upper() ) # Preserve the defining literal. self.returnString = matchString self.name = "'%s'" % self.returnString self.errmsg = "Expected " + self.name def parseImpl( self, instring, loc, doActions=True ): if instring[ loc:loc+self.matchLen ].upper() == self.match: return loc+self.matchLen, self.returnString raise ParseException(instring, loc, self.errmsg, self) class CaselessKeyword(Keyword): """ Caseless version of L{Keyword}. Example:: OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] (Contrast with example for L{CaselessLiteral}.) """ def __init__( self, matchString, identChars=None ): super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) def parseImpl( self, instring, loc, doActions=True ): if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): return loc+self.matchLen, self.match raise ParseException(instring, loc, self.errmsg, self) class CloseMatch(Token): """ A variation on L{Literal} which matches "close" matches, that is, strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - C{match_string} - string to be matched - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match The results from a successful parse will contain the matched text from the input string and the following named results: - C{mismatches} - a list of the positions within the match_string where mismatches were found - C{original} - the original match_string used to compare against the input string If C{mismatches} is an empty list, then the match was an exact match. Example:: patt = CloseMatch("ATCATCGAATGGA") patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) # exact match patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) # close match allowing up to 2 mismatches patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) """ def __init__(self, match_string, maxMismatches=1): super(CloseMatch,self).__init__() self.name = match_string self.match_string = match_string self.maxMismatches = maxMismatches self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) self.mayIndexError = False self.mayReturnEmpty = False def parseImpl( self, instring, loc, doActions=True ): start = loc instrlen = len(instring) maxloc = start + len(self.match_string) if maxloc <= instrlen: match_string = self.match_string match_stringloc = 0 mismatches = [] maxMismatches = self.maxMismatches for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): src,mat = s_m if src != mat: mismatches.append(match_stringloc) if len(mismatches) > maxMismatches: break else: loc = match_stringloc + 1 results = ParseResults([instring[start:loc]]) results['original'] = self.match_string results['mismatches'] = mismatches return loc, results raise ParseException(instring, loc, self.errmsg, self) class Word(Token): """ Token for matching words composed of allowed character sets. Defined with string containing all allowed initial characters, an optional string containing allowed body characters (if omitted, defaults to the initial character set), and an optional minimum, maximum, and/or exact length. The default value for C{min} is 1 (a minimum value < 1 is not valid); the default values for C{max} and C{exact} are 0, meaning no maximum or exact length restriction. An optional C{excludeChars} parameter can list characters that might be found in the input C{bodyChars} string; useful to define a word of all printables except for one or two characters, for instance. L{srange} is useful for defining custom character set strings for defining C{Word} expressions, using range notation from regular expression character sets. A common mistake is to use C{Word} to match a specific literal string, as in C{Word("Address")}. Remember that C{Word} uses the string argument to define I{sets} of matchable characters. This expression would match "Add", "AAA", "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an exact literal string, use L{Literal} or L{Keyword}. pyparsing includes helper strings for building Words: - L{alphas} - L{nums} - L{alphanums} - L{hexnums} - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - L{printables} (any non-whitespace character) Example:: # a word composed of digits integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) # a word with a leading capital, and zero or more lowercase capital_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' hostname = Word(alphas, alphanums+'-') # roman numeral (not a strict parser, accepts invalid mix of characters) roman = Word("IVXLCDM") # any string of non-whitespace characters, except for ',' csv_value = Word(printables, excludeChars=",") """ def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): super(Word,self).__init__() if excludeChars: initChars = ''.join(c for c in initChars if c not in excludeChars) if bodyChars: bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) self.initCharsOrig = initChars self.initChars = set(initChars) if bodyChars : self.bodyCharsOrig = bodyChars self.bodyChars = set(bodyChars) else: self.bodyCharsOrig = initChars self.bodyChars = set(initChars) self.maxSpecified = max > 0 if min < 1: raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") self.minLen = min if max > 0: self.maxLen = max else: self.maxLen = _MAX_INT if exact > 0: self.maxLen = exact self.minLen = exact self.name = _ustr(self) self.errmsg = "Expected " + self.name self.mayIndexError = False self.asKeyword = asKeyword if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): if self.bodyCharsOrig == self.initCharsOrig: self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) elif len(self.initCharsOrig) == 1: self.reString = "%s[%s]*" % \ (re.escape(self.initCharsOrig), _escapeRegexRangeChars(self.bodyCharsOrig),) else: self.reString = "[%s][%s]*" % \ (_escapeRegexRangeChars(self.initCharsOrig), _escapeRegexRangeChars(self.bodyCharsOrig),) if self.asKeyword: self.reString = r"\b"+self.reString+r"\b" try: self.re = re.compile( self.reString ) except Exception: self.re = None def parseImpl( self, instring, loc, doActions=True ): if self.re: result = self.re.match(instring,loc) if not result: raise ParseException(instring, loc, self.errmsg, self) loc = result.end() return loc, result.group() if not(instring[ loc ] in self.initChars): raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 instrlen = len(instring) bodychars = self.bodyChars maxloc = start + self.maxLen maxloc = min( maxloc, instrlen ) while loc < maxloc and instring[loc] in bodychars: loc += 1 throwException = False if loc - start < self.minLen: throwException = True if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: throwException = True if self.asKeyword: if (start>0 and instring[start-1] in bodychars) or (loc4: return s[:4]+"..." else: return s if ( self.initCharsOrig != self.bodyCharsOrig ): self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) else: self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) return self.strRepr class Regex(Token): """ Token for matching strings that match a given regular expression. Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as named parse results. Example:: realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") """ compiledREtype = type(re.compile("[A-Z]")) def __init__( self, pattern, flags=0): """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" super(Regex,self).__init__() if isinstance(pattern, basestring): if not pattern: warnings.warn("null string passed to Regex; use Empty() instead", SyntaxWarning, stacklevel=2) self.pattern = pattern self.flags = flags try: self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern except sre_constants.error: warnings.warn("invalid pattern (%s) passed to Regex" % pattern, SyntaxWarning, stacklevel=2) raise elif isinstance(pattern, Regex.compiledREtype): self.re = pattern self.pattern = \ self.reString = str(pattern) self.flags = flags else: raise ValueError("Regex may only be constructed with a string or a compiled RE object") self.name = _ustr(self) self.errmsg = "Expected " + self.name self.mayIndexError = False self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): result = self.re.match(instring,loc) if not result: raise ParseException(instring, loc, self.errmsg, self) loc = result.end() d = result.groupdict() ret = ParseResults(result.group()) if d: for k in d: ret[k] = d[k] return loc,ret def __str__( self ): try: return super(Regex,self).__str__() except Exception: pass if self.strRepr is None: self.strRepr = "Re:(%s)" % repr(self.pattern) return self.strRepr class QuotedString(Token): r""" Token for matching strings that are delimited by quoting characters. Defined with the following parameters: - quoteChar - string of one or more characters defining the quote delimiting string - escChar - character to escape quotes, typically backslash (default=C{None}) - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) Example:: qs = QuotedString('"') print(qs.searchString('lsjdf "This is the quote" sldjf')) complex_qs = QuotedString('{{', endQuoteChar='}}') print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) sql_qs = QuotedString('"', escQuote='""') print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) prints:: [['This is the quote']] [['This is the "quote"']] [['This is the quote with "embedded" quotes']] """ def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): super(QuotedString,self).__init__() # remove white space from quote chars - wont work anyway quoteChar = quoteChar.strip() if not quoteChar: warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) raise SyntaxError() if endQuoteChar is None: endQuoteChar = quoteChar else: endQuoteChar = endQuoteChar.strip() if not endQuoteChar: warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) raise SyntaxError() self.quoteChar = quoteChar self.quoteCharLen = len(quoteChar) self.firstQuoteChar = quoteChar[0] self.endQuoteChar = endQuoteChar self.endQuoteCharLen = len(endQuoteChar) self.escChar = escChar self.escQuote = escQuote self.unquoteResults = unquoteResults self.convertWhitespaceEscapes = convertWhitespaceEscapes if multiline: self.flags = re.MULTILINE | re.DOTALL self.pattern = r'%s(?:[^%s%s]' % \ ( re.escape(self.quoteChar), _escapeRegexRangeChars(self.endQuoteChar[0]), (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) else: self.flags = 0 self.pattern = r'%s(?:[^%s\n\r%s]' % \ ( re.escape(self.quoteChar), _escapeRegexRangeChars(self.endQuoteChar[0]), (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) if len(self.endQuoteChar) > 1: self.pattern += ( '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), _escapeRegexRangeChars(self.endQuoteChar[i])) for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' ) if escQuote: self.pattern += (r'|(?:%s)' % re.escape(escQuote)) if escChar: self.pattern += (r'|(?:%s.)' % re.escape(escChar)) self.escCharReplacePattern = re.escape(self.escChar)+"(.)" self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) try: self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern except sre_constants.error: warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, SyntaxWarning, stacklevel=2) raise self.name = _ustr(self) self.errmsg = "Expected " + self.name self.mayIndexError = False self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None if not result: raise ParseException(instring, loc, self.errmsg, self) loc = result.end() ret = result.group() if self.unquoteResults: # strip off quotes ret = ret[self.quoteCharLen:-self.endQuoteCharLen] if isinstance(ret,basestring): # replace escaped whitespace if '\\' in ret and self.convertWhitespaceEscapes: ws_map = { r'\t' : '\t', r'\n' : '\n', r'\f' : '\f', r'\r' : '\r', } for wslit,wschar in ws_map.items(): ret = ret.replace(wslit, wschar) # replace escaped characters if self.escChar: ret = re.sub(self.escCharReplacePattern,"\g<1>",ret) # replace escaped quotes if self.escQuote: ret = ret.replace(self.escQuote, self.endQuoteChar) return loc, ret def __str__( self ): try: return super(QuotedString,self).__str__() except Exception: pass if self.strRepr is None: self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) return self.strRepr class CharsNotIn(Token): """ Token for matching words composed of characters I{not} in a given set (will include whitespace in matched characters if not listed in the provided exclusion set - see example). Defined with string containing all disallowed characters, and an optional minimum, maximum, and/or exact length. The default value for C{min} is 1 (a minimum value < 1 is not valid); the default values for C{max} and C{exact} are 0, meaning no maximum or exact length restriction. Example:: # define a comma-separated-value as anything that is not a ',' csv_value = CharsNotIn(',') print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) prints:: ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] """ def __init__( self, notChars, min=1, max=0, exact=0 ): super(CharsNotIn,self).__init__() self.skipWhitespace = False self.notChars = notChars if min < 1: raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") self.minLen = min if max > 0: self.maxLen = max else: self.maxLen = _MAX_INT if exact > 0: self.maxLen = exact self.minLen = exact self.name = _ustr(self) self.errmsg = "Expected " + self.name self.mayReturnEmpty = ( self.minLen == 0 ) self.mayIndexError = False def parseImpl( self, instring, loc, doActions=True ): if instring[loc] in self.notChars: raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 notchars = self.notChars maxlen = min( start+self.maxLen, len(instring) ) while loc < maxlen and \ (instring[loc] not in notchars): loc += 1 if loc - start < self.minLen: raise ParseException(instring, loc, self.errmsg, self) return loc, instring[start:loc] def __str__( self ): try: return super(CharsNotIn, self).__str__() except Exception: pass if self.strRepr is None: if len(self.notChars) > 4: self.strRepr = "!W:(%s...)" % self.notChars[:4] else: self.strRepr = "!W:(%s)" % self.notChars return self.strRepr class White(Token): """ Special matching class for matching whitespace. Normally, whitespace is ignored by pyparsing grammars. This class is included when some whitespace structures are significant. Define with a string containing the whitespace characters to be matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, as defined for the C{L{Word}} class. """ whiteStrs = { " " : "", "\t": "", "\n": "", "\r": "", "\f": "", } def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): super(White,self).__init__() self.matchWhite = ws self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) #~ self.leaveWhitespace() self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) self.mayReturnEmpty = True self.errmsg = "Expected " + self.name self.minLen = min if max > 0: self.maxLen = max else: self.maxLen = _MAX_INT if exact > 0: self.maxLen = exact self.minLen = exact def parseImpl( self, instring, loc, doActions=True ): if not(instring[ loc ] in self.matchWhite): raise ParseException(instring, loc, self.errmsg, self) start = loc loc += 1 maxloc = start + self.maxLen maxloc = min( maxloc, len(instring) ) while loc < maxloc and instring[loc] in self.matchWhite: loc += 1 if loc - start < self.minLen: raise ParseException(instring, loc, self.errmsg, self) return loc, instring[start:loc] class _PositionToken(Token): def __init__( self ): super(_PositionToken,self).__init__() self.name=self.__class__.__name__ self.mayReturnEmpty = True self.mayIndexError = False class GoToColumn(_PositionToken): """ Token to advance to a specific column of input text; useful for tabular report scraping. """ def __init__( self, colno ): super(GoToColumn,self).__init__() self.col = colno def preParse( self, instring, loc ): if col(loc,instring) != self.col: instrlen = len(instring) if self.ignoreExprs: loc = self._skipIgnorables( instring, loc ) while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : loc += 1 return loc def parseImpl( self, instring, loc, doActions=True ): thiscol = col( loc, instring ) if thiscol > self.col: raise ParseException( instring, loc, "Text not in expected column", self ) newloc = loc + self.col - thiscol ret = instring[ loc: newloc ] return newloc, ret class LineStart(_PositionToken): """ Matches if current position is at the beginning of a line within the parse string Example:: test = '''\ AAA this line AAA and this line AAA but not this one B AAA and definitely not this one ''' for t in (LineStart() + 'AAA' + restOfLine).searchString(test): print(t) Prints:: ['AAA', ' this line'] ['AAA', ' and this line'] """ def __init__( self ): super(LineStart,self).__init__() self.errmsg = "Expected start of line" def parseImpl( self, instring, loc, doActions=True ): if col(loc, instring) == 1: return loc, [] raise ParseException(instring, loc, self.errmsg, self) class LineEnd(_PositionToken): """ Matches if current position is at the end of a line within the parse string """ def __init__( self ): super(LineEnd,self).__init__() self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) self.errmsg = "Expected end of line" def parseImpl( self, instring, loc, doActions=True ): if loc len(instring): return loc, [] else: raise ParseException(instring, loc, self.errmsg, self) class WordStart(_PositionToken): """ Matches if the current position is at the beginning of a Word, and is not preceded by any character in a given set of C{wordChars} (default=C{printables}). To emulate the C{\b} behavior of regular expressions, use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of the string being parsed, or at the beginning of a line. """ def __init__(self, wordChars = printables): super(WordStart,self).__init__() self.wordChars = set(wordChars) self.errmsg = "Not at the start of a word" def parseImpl(self, instring, loc, doActions=True ): if loc != 0: if (instring[loc-1] in self.wordChars or instring[loc] not in self.wordChars): raise ParseException(instring, loc, self.errmsg, self) return loc, [] class WordEnd(_PositionToken): """ Matches if the current position is at the end of a Word, and is not followed by any character in a given set of C{wordChars} (default=C{printables}). To emulate the C{\b} behavior of regular expressions, use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of the string being parsed, or at the end of a line. """ def __init__(self, wordChars = printables): super(WordEnd,self).__init__() self.wordChars = set(wordChars) self.skipWhitespace = False self.errmsg = "Not at the end of a word" def parseImpl(self, instring, loc, doActions=True ): instrlen = len(instring) if instrlen>0 and loc maxExcLoc: maxException = err maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: maxException = ParseException(instring,len(instring),e.errmsg,self) maxExcLoc = len(instring) else: # save match among all matches, to retry longest to shortest matches.append((loc2, e)) if matches: matches.sort(key=lambda x: -x[0]) for _,e in matches: try: return e._parse( instring, loc, doActions ) except ParseException as err: err.__traceback__ = None if err.loc > maxExcLoc: maxException = err maxExcLoc = err.loc if maxException is not None: maxException.msg = self.errmsg raise maxException else: raise ParseException(instring, loc, "no defined alternatives to match", self) def __ixor__(self, other ): if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) return self.append( other ) #Or( [ self, other ] ) def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" return self.strRepr def checkRecursion( self, parseElementList ): subRecCheckList = parseElementList[:] + [ self ] for e in self.exprs: e.checkRecursion( subRecCheckList ) class MatchFirst(ParseExpression): """ Requires that at least one C{ParseExpression} is found. If two expressions match, the first one listed is the one that will match. May be constructed using the C{'|'} operator. Example:: # construct MatchFirst using '|' operator # watch the order of expressions to match number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] # put more selective expression first number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] """ def __init__( self, exprs, savelist = False ): super(MatchFirst,self).__init__(exprs, savelist) if self.exprs: self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) else: self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): maxExcLoc = -1 maxException = None for e in self.exprs: try: ret = e._parse( instring, loc, doActions ) return ret except ParseException as err: if err.loc > maxExcLoc: maxException = err maxExcLoc = err.loc except IndexError: if len(instring) > maxExcLoc: maxException = ParseException(instring,len(instring),e.errmsg,self) maxExcLoc = len(instring) # only got here if no expression matched, raise exception for match that made it the furthest else: if maxException is not None: maxException.msg = self.errmsg raise maxException else: raise ParseException(instring, loc, "no defined alternatives to match", self) def __ior__(self, other ): if isinstance( other, basestring ): other = ParserElement._literalStringClass( other ) return self.append( other ) #MatchFirst( [ self, other ] ) def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" return self.strRepr def checkRecursion( self, parseElementList ): subRecCheckList = parseElementList[:] + [ self ] for e in self.exprs: e.checkRecursion( subRecCheckList ) class Each(ParseExpression): """ Requires all given C{ParseExpression}s to be found, but in any order. Expressions may be separated by whitespace. May be constructed using the C{'&'} operator. Example:: color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") integer = Word(nums) shape_attr = "shape:" + shape_type("shape") posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") color_attr = "color:" + color("color") size_attr = "size:" + integer("size") # use Each (using operator '&') to accept attributes in any order # (shape and posn are required, color and size are optional) shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) shape_spec.runTests(''' shape: SQUARE color: BLACK posn: 100, 120 shape: CIRCLE size: 50 color: BLUE posn: 50,80 color:GREEN size:20 shape:TRIANGLE posn:20,40 ''' ) prints:: shape: SQUARE color: BLACK posn: 100, 120 ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - color: BLACK - posn: ['100', ',', '120'] - x: 100 - y: 120 - shape: SQUARE shape: CIRCLE size: 50 color: BLUE posn: 50,80 ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - color: BLUE - posn: ['50', ',', '80'] - x: 50 - y: 80 - shape: CIRCLE - size: 50 color: GREEN size: 20 shape: TRIANGLE posn: 20,40 ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - color: GREEN - posn: ['20', ',', '40'] - x: 20 - y: 40 - shape: TRIANGLE - size: 20 """ def __init__( self, exprs, savelist = True ): super(Each,self).__init__(exprs, savelist) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) self.skipWhitespace = True self.initExprGroups = True def parseImpl( self, instring, loc, doActions=True ): if self.initExprGroups: self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] self.optionals = opt1 + opt2 self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] self.required += self.multirequired self.initExprGroups = False tmpLoc = loc tmpReqd = self.required[:] tmpOpt = self.optionals[:] matchOrder = [] keepMatching = True while keepMatching: tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired failed = [] for e in tmpExprs: try: tmpLoc = e.tryParse( instring, tmpLoc ) except ParseException: failed.append(e) else: matchOrder.append(self.opt1map.get(id(e),e)) if e in tmpReqd: tmpReqd.remove(e) elif e in tmpOpt: tmpOpt.remove(e) if len(failed) == len(tmpExprs): keepMatching = False if tmpReqd: missing = ", ".join(_ustr(e) for e in tmpReqd) raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) # add any unmatched Optionals, in case they have default values defined matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] resultlist = [] for e in matchOrder: loc,results = e._parse(instring,loc,doActions) resultlist.append(results) finalResults = sum(resultlist, ParseResults([])) return loc, finalResults def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" return self.strRepr def checkRecursion( self, parseElementList ): subRecCheckList = parseElementList[:] + [ self ] for e in self.exprs: e.checkRecursion( subRecCheckList ) class ParseElementEnhance(ParserElement): """ Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. """ def __init__( self, expr, savelist=False ): super(ParseElementEnhance,self).__init__(savelist) if isinstance( expr, basestring ): if issubclass(ParserElement._literalStringClass, Token): expr = ParserElement._literalStringClass(expr) else: expr = ParserElement._literalStringClass(Literal(expr)) self.expr = expr self.strRepr = None if expr is not None: self.mayIndexError = expr.mayIndexError self.mayReturnEmpty = expr.mayReturnEmpty self.setWhitespaceChars( expr.whiteChars ) self.skipWhitespace = expr.skipWhitespace self.saveAsList = expr.saveAsList self.callPreparse = expr.callPreparse self.ignoreExprs.extend(expr.ignoreExprs) def parseImpl( self, instring, loc, doActions=True ): if self.expr is not None: return self.expr._parse( instring, loc, doActions, callPreParse=False ) else: raise ParseException("",loc,self.errmsg,self) def leaveWhitespace( self ): self.skipWhitespace = False self.expr = self.expr.copy() if self.expr is not None: self.expr.leaveWhitespace() return self def ignore( self, other ): if isinstance( other, Suppress ): if other not in self.ignoreExprs: super( ParseElementEnhance, self).ignore( other ) if self.expr is not None: self.expr.ignore( self.ignoreExprs[-1] ) else: super( ParseElementEnhance, self).ignore( other ) if self.expr is not None: self.expr.ignore( self.ignoreExprs[-1] ) return self def streamline( self ): super(ParseElementEnhance,self).streamline() if self.expr is not None: self.expr.streamline() return self def checkRecursion( self, parseElementList ): if self in parseElementList: raise RecursiveGrammarException( parseElementList+[self] ) subRecCheckList = parseElementList[:] + [ self ] if self.expr is not None: self.expr.checkRecursion( subRecCheckList ) def validate( self, validateTrace=[] ): tmp = validateTrace[:]+[self] if self.expr is not None: self.expr.validate(tmp) self.checkRecursion( [] ) def __str__( self ): try: return super(ParseElementEnhance,self).__str__() except Exception: pass if self.strRepr is None and self.expr is not None: self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) return self.strRepr class FollowedBy(ParseElementEnhance): """ Lookahead matching of the given parse expression. C{FollowedBy} does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression matches at the current position. C{FollowedBy} always returns a null token list. Example:: # use FollowedBy to match a label only if it is followed by a ':' data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() prints:: [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] """ def __init__( self, expr ): super(FollowedBy,self).__init__(expr) self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): self.expr.tryParse( instring, loc ) return loc, [] class NotAny(ParseElementEnhance): """ Lookahead to disallow matching with the given parse expression. C{NotAny} does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression does I{not} match at the current position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} always returns a null token list. May be constructed using the '~' operator. Example:: """ def __init__( self, expr ): super(NotAny,self).__init__(expr) #~ self.leaveWhitespace() self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs self.mayReturnEmpty = True self.errmsg = "Found unwanted token, "+_ustr(self.expr) def parseImpl( self, instring, loc, doActions=True ): if self.expr.canParseNext(instring, loc): raise ParseException(instring, loc, self.errmsg, self) return loc, [] def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "~{" + _ustr(self.expr) + "}" return self.strRepr class _MultipleMatch(ParseElementEnhance): def __init__( self, expr, stopOn=None): super(_MultipleMatch, self).__init__(expr) self.saveAsList = True ender = stopOn if isinstance(ender, basestring): ender = ParserElement._literalStringClass(ender) self.not_ender = ~ender if ender is not None else None def parseImpl( self, instring, loc, doActions=True ): self_expr_parse = self.expr._parse self_skip_ignorables = self._skipIgnorables check_ender = self.not_ender is not None if check_ender: try_not_ender = self.not_ender.tryParse # must be at least one (but first see if we are the stopOn sentinel; # if so, fail) if check_ender: try_not_ender(instring, loc) loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) try: hasIgnoreExprs = (not not self.ignoreExprs) while 1: if check_ender: try_not_ender(instring, loc) if hasIgnoreExprs: preloc = self_skip_ignorables( instring, loc ) else: preloc = loc loc, tmptokens = self_expr_parse( instring, preloc, doActions ) if tmptokens or tmptokens.haskeys(): tokens += tmptokens except (ParseException,IndexError): pass return loc, tokens class OneOrMore(_MultipleMatch): """ Repetition of one or more of the given expression. Parameters: - expr - expression that must match one or more times - stopOn - (default=C{None}) - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition expression) Example:: data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: BLACK" OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] # use stopOn attribute for OneOrMore to avoid reading label string as part of the data attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] # could also be written as (attr_expr * (1,)).parseString(text).pprint() """ def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "{" + _ustr(self.expr) + "}..." return self.strRepr class ZeroOrMore(_MultipleMatch): """ Optional repetition of zero or more of the given expression. Parameters: - expr - expression that must match zero or more times - stopOn - (default=C{None}) - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition expression) Example: similar to L{OneOrMore} """ def __init__( self, expr, stopOn=None): super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): try: return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) except (ParseException,IndexError): return loc, [] def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "[" + _ustr(self.expr) + "]..." return self.strRepr class _NullToken(object): def __bool__(self): return False __nonzero__ = __bool__ def __str__(self): return "" _optionalNotMatched = _NullToken() class Optional(ParseElementEnhance): """ Optional matching of the given expression. Parameters: - expr - expression that must match zero or more times - default (optional) - value to be returned if the optional expression is not found. Example:: # US postal code can be a 5-digit zip, plus optional 4-digit qualifier zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) zip.runTests(''' # traditional ZIP code 12345 # ZIP+4 form 12101-0001 # invalid ZIP 98765- ''') prints:: # traditional ZIP code 12345 ['12345'] # ZIP+4 form 12101-0001 ['12101-0001'] # invalid ZIP 98765- ^ FAIL: Expected end of text (at char 5), (line:1, col:6) """ def __init__( self, expr, default=_optionalNotMatched ): super(Optional,self).__init__( expr, savelist=False ) self.saveAsList = self.expr.saveAsList self.defaultValue = default self.mayReturnEmpty = True def parseImpl( self, instring, loc, doActions=True ): try: loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) except (ParseException,IndexError): if self.defaultValue is not _optionalNotMatched: if self.expr.resultsName: tokens = ParseResults([ self.defaultValue ]) tokens[self.expr.resultsName] = self.defaultValue else: tokens = [ self.defaultValue ] else: tokens = [] return loc, tokens def __str__( self ): if hasattr(self,"name"): return self.name if self.strRepr is None: self.strRepr = "[" + _ustr(self.expr) + "]" return self.strRepr class SkipTo(ParseElementEnhance): """ Token for skipping over all undefined text until the matched expression is found. Parameters: - expr - target expression marking the end of the data to be skipped - include - (default=C{False}) if True, the target expression is also parsed (the skipped text and target expression are returned as a 2-element list). - ignore - (default=C{None}) used to define grammars (typically quoted strings and comments) that might contain false matches to the target expression - failOn - (default=C{None}) define expressions that are not allowed to be included in the skipped test; if found before the target expression is found, the SkipTo is not a match Example:: report = ''' Outstanding Issues Report - 1 Jan 2000 # | Severity | Description | Days Open -----+----------+-------------------------------------------+----------- 101 | Critical | Intermittent system crash | 6 94 | Cosmetic | Spelling error on Login ('log|n') | 14 79 | Minor | System slow when running too many reports | 47 ''' integer = Word(nums) SEP = Suppress('|') # use SkipTo to simply match everything up until the next SEP # - ignore quoted strings, so that a '|' character inside a quoted string does not match # - parse action will call token.strip() for each matched token, i.e., the description body string_data = SkipTo(SEP, ignore=quotedString) string_data.setParseAction(tokenMap(str.strip)) ticket_expr = (integer("issue_num") + SEP + string_data("sev") + SEP + string_data("desc") + SEP + integer("days_open")) for tkt in ticket_expr.searchString(report): print tkt.dump() prints:: ['101', 'Critical', 'Intermittent system crash', '6'] - days_open: 6 - desc: Intermittent system crash - issue_num: 101 - sev: Critical ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] - days_open: 14 - desc: Spelling error on Login ('log|n') - issue_num: 94 - sev: Cosmetic ['79', 'Minor', 'System slow when running too many reports', '47'] - days_open: 47 - desc: System slow when running too many reports - issue_num: 79 - sev: Minor """ def __init__( self, other, include=False, ignore=None, failOn=None ): super( SkipTo, self ).__init__( other ) self.ignoreExpr = ignore self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include self.asList = False if isinstance(failOn, basestring): self.failOn = ParserElement._literalStringClass(failOn) else: self.failOn = failOn self.errmsg = "No match found for "+_ustr(self.expr) def parseImpl( self, instring, loc, doActions=True ): startloc = loc instrlen = len(instring) expr = self.expr expr_parse = self.expr._parse self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None tmploc = loc while tmploc <= instrlen: if self_failOn_canParseNext is not None: # break if failOn expression matches if self_failOn_canParseNext(instring, tmploc): break if self_ignoreExpr_tryParse is not None: # advance past ignore expressions while 1: try: tmploc = self_ignoreExpr_tryParse(instring, tmploc) except ParseBaseException: break try: expr_parse(instring, tmploc, doActions=False, callPreParse=False) except (ParseException, IndexError): # no match, advance loc in string tmploc += 1 else: # matched skipto expr, done break else: # ran off the end of the input string without matching skipto expr, fail raise ParseException(instring, loc, self.errmsg, self) # build up return values loc = tmploc skiptext = instring[startloc:loc] skipresult = ParseResults(skiptext) if self.includeMatch: loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) skipresult += mat return loc, skipresult class Forward(ParseElementEnhance): """ Forward declaration of an expression to be defined later - used for recursive grammars, such as algebraic infix notation. When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. Note: take care when assigning to C{Forward} not to overlook precedence of operators. Specifically, '|' has a lower precedence than '<<', so that:: fwdExpr << a | b | c will actually be evaluated as:: (fwdExpr << a) | b | c thereby leaving b and c out as parseable alternatives. It is recommended that you explicitly group the values inserted into the C{Forward}:: fwdExpr << (a | b | c) Converting to use the '<<=' operator instead will avoid this problem. See L{ParseResults.pprint} for an example of a recursive parser created using C{Forward}. """ def __init__( self, other=None ): super(Forward,self).__init__( other, savelist=False ) def __lshift__( self, other ): if isinstance( other, basestring ): other = ParserElement._literalStringClass(other) self.expr = other self.strRepr = None self.mayIndexError = self.expr.mayIndexError self.mayReturnEmpty = self.expr.mayReturnEmpty self.setWhitespaceChars( self.expr.whiteChars ) self.skipWhitespace = self.expr.skipWhitespace self.saveAsList = self.expr.saveAsList self.ignoreExprs.extend(self.expr.ignoreExprs) return self def __ilshift__(self, other): return self << other def leaveWhitespace( self ): self.skipWhitespace = False return self def streamline( self ): if not self.streamlined: self.streamlined = True if self.expr is not None: self.expr.streamline() return self def validate( self, validateTrace=[] ): if self not in validateTrace: tmp = validateTrace[:]+[self] if self.expr is not None: self.expr.validate(tmp) self.checkRecursion([]) def __str__( self ): if hasattr(self,"name"): return self.name return self.__class__.__name__ + ": ..." # stubbed out for now - creates awful memory and perf issues self._revertClass = self.__class__ self.__class__ = _ForwardNoRecurse try: if self.expr is not None: retString = _ustr(self.expr) else: retString = "None" finally: self.__class__ = self._revertClass return self.__class__.__name__ + ": " + retString def copy(self): if self.expr is not None: return super(Forward,self).copy() else: ret = Forward() ret <<= self return ret class _ForwardNoRecurse(Forward): def __str__( self ): return "..." class TokenConverter(ParseElementEnhance): """ Abstract subclass of C{ParseExpression}, for converting parsed results. """ def __init__( self, expr, savelist=False ): super(TokenConverter,self).__init__( expr )#, savelist ) self.saveAsList = False class Combine(TokenConverter): """ Converter to concatenate all matching tokens to a single string. By default, the matching patterns must also be contiguous in the input string; this can be disabled by specifying C{'adjacent=False'} in the constructor. Example:: real = Word(nums) + '.' + Word(nums) print(real.parseString('3.1416')) # -> ['3', '.', '1416'] # will also erroneously match the following print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] real = Combine(Word(nums) + '.' + Word(nums)) print(real.parseString('3.1416')) # -> ['3.1416'] # no match when there are internal spaces print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) """ def __init__( self, expr, joinString="", adjacent=True ): super(Combine,self).__init__( expr ) # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself if adjacent: self.leaveWhitespace() self.adjacent = adjacent self.skipWhitespace = True self.joinString = joinString self.callPreparse = True def ignore( self, other ): if self.adjacent: ParserElement.ignore(self, other) else: super( Combine, self).ignore( other ) return self def postParse( self, instring, loc, tokenlist ): retToks = tokenlist.copy() del retToks[:] retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) if self.resultsName and retToks.haskeys(): return [ retToks ] else: return retToks class Group(TokenConverter): """ Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. Example:: ident = Word(alphas) num = Word(nums) term = ident | num func = ident + Optional(delimitedList(term)) print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] func = ident + Group(Optional(delimitedList(term))) print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] """ def __init__( self, expr ): super(Group,self).__init__( expr ) self.saveAsList = True def postParse( self, instring, loc, tokenlist ): return [ tokenlist ] class Dict(TokenConverter): """ Converter to return a repetitive expression as a list, but also as a dictionary. Each element can also be referenced using the first token in the expression as its key. Useful for tabular report scraping when the first column can be used as a item key. Example:: data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) # print attributes as plain groups print(OneOrMore(attr_expr).parseString(text).dump()) # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names result = Dict(OneOrMore(Group(attr_expr))).parseString(text) print(result.dump()) # access named fields as dict entries, or output as dict print(result['shape']) print(result.asDict()) prints:: ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left - shape: SQUARE - texture: burlap SQUARE {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} See more examples at L{ParseResults} of accessing fields by results name. """ def __init__( self, expr ): super(Dict,self).__init__( expr ) self.saveAsList = True def postParse( self, instring, loc, tokenlist ): for i,tok in enumerate(tokenlist): if len(tok) == 0: continue ikey = tok[0] if isinstance(ikey,int): ikey = _ustr(tok[0]).strip() if len(tok)==1: tokenlist[ikey] = _ParseResultsWithOffset("",i) elif len(tok)==2 and not isinstance(tok[1],ParseResults): tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) else: dictvalue = tok.copy() #ParseResults(i) del dictvalue[0] if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) else: tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) if self.resultsName: return [ tokenlist ] else: return tokenlist class Suppress(TokenConverter): """ Converter for ignoring the results of a parsed expression. Example:: source = "a, b, c,d" wd = Word(alphas) wd_list1 = wd + ZeroOrMore(',' + wd) print(wd_list1.parseString(source)) # often, delimiters that are useful during parsing are just in the # way afterward - use Suppress to keep them out of the parsed output wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) print(wd_list2.parseString(source)) prints:: ['a', ',', 'b', ',', 'c', ',', 'd'] ['a', 'b', 'c', 'd'] (See also L{delimitedList}.) """ def postParse( self, instring, loc, tokenlist ): return [] def suppress( self ): return self class OnlyOnce(object): """ Wrapper for parse actions, to ensure they are only called once. """ def __init__(self, methodCall): self.callable = _trim_arity(methodCall) self.called = False def __call__(self,s,l,t): if not self.called: results = self.callable(s,l,t) self.called = True return results raise ParseException(s,l,"") def reset(self): self.called = False def traceParseAction(f): """ Decorator for debugging parse actions. When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. Example:: wd = Word(alphas) @traceParseAction def remove_duplicate_chars(tokens): return ''.join(sorted(set(''.join(tokens))) wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) print(wds.parseString("slkdjs sld sldd sdlf sdljf")) prints:: >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) <3: thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) try: ret = f(*paArgs) except Exception as exc: sys.stderr.write( "< ['aa', 'bb', 'cc'] delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] """ dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." if combine: return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) else: return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) def countedArray( expr, intExpr=None ): """ Helper to define a counted list of expressions. This helper defines a pattern of the form:: integer expr expr expr... where the leading integer tells how many expr expressions follow. The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. Example:: countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] # in this parser, the leading integer value is given in binary, # '10' indicating that 2 values are in the array binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] """ arrayExpr = Forward() def countFieldParseAction(s,l,t): n = t[0] arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) return [] if intExpr is None: intExpr = Word(nums).setParseAction(lambda t:int(t[0])) else: intExpr = intExpr.copy() intExpr.setName("arrayLen") intExpr.addParseAction(countFieldParseAction, callDuringTry=True) return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') def _flatten(L): ret = [] for i in L: if isinstance(i,list): ret.extend(_flatten(i)) else: ret.append(i) return ret def matchPreviousLiteral(expr): """ Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a 'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousLiteral(first) matchExpr = first + ":" + second will match C{"1:1"}, but not C{"1:2"}. Because this matches a previous literal, will also match the leading C{"1:1"} in C{"1:10"}. If this is not desired, use C{matchPreviousExpr}. Do I{not} use with packrat parsing enabled. """ rep = Forward() def copyTokenToRepeater(s,l,t): if t: if len(t) == 1: rep << t[0] else: # flatten t tokens tflat = _flatten(t.asList()) rep << And(Literal(tt) for tt in tflat) else: rep << Empty() expr.addParseAction(copyTokenToRepeater, callDuringTry=True) rep.setName('(prev) ' + _ustr(expr)) return rep def matchPreviousExpr(expr): """ Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a 'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousExpr(first) matchExpr = first + ":" + second will match C{"1:1"}, but not C{"1:2"}. Because this matches by expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; the expressions are evaluated first, and then compared, so C{"1"} is compared with C{"10"}. Do I{not} use with packrat parsing enabled. """ rep = Forward() e2 = expr.copy() rep <<= e2 def copyTokenToRepeater(s,l,t): matchTokens = _flatten(t.asList()) def mustMatchTheseTokens(s,l,t): theseTokens = _flatten(t.asList()) if theseTokens != matchTokens: raise ParseException("",0,"") rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) expr.addParseAction(copyTokenToRepeater, callDuringTry=True) rep.setName('(prev) ' + _ustr(expr)) return rep def _escapeRegexRangeChars(s): #~ escape these chars: ^-] for c in r"\^-]": s = s.replace(c,_bslash+c) s = s.replace("\n",r"\n") s = s.replace("\t",r"\t") return _ustr(s) def oneOf( strs, caseless=False, useRegex=True ): """ Helper to quickly define a set of alternative Literals, and makes sure to do longest-first testing when there is a conflict, regardless of the input order, but returns a C{L{MatchFirst}} for best performance. Parameters: - strs - a string of space-delimited literals, or a collection of string literals - caseless - (default=C{False}) - treat all literals as caseless - useRegex - (default=C{True}) - as an optimization, will generate a Regex object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or if creating a C{Regex} raises an exception) Example:: comp_oper = oneOf("< = > <= >= !=") var = Word(alphas) number = Word(nums) term = var | number comparison_expr = term + comp_oper + term print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) prints:: [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] """ if caseless: isequal = ( lambda a,b: a.upper() == b.upper() ) masks = ( lambda a,b: b.upper().startswith(a.upper()) ) parseElementClass = CaselessLiteral else: isequal = ( lambda a,b: a == b ) masks = ( lambda a,b: b.startswith(a) ) parseElementClass = Literal symbols = [] if isinstance(strs,basestring): symbols = strs.split() elif isinstance(strs, collections.Iterable): symbols = list(strs) else: warnings.warn("Invalid argument to oneOf, expected string or iterable", SyntaxWarning, stacklevel=2) if not symbols: return NoMatch() i = 0 while i < len(symbols)-1: cur = symbols[i] for j,other in enumerate(symbols[i+1:]): if ( isequal(other, cur) ): del symbols[i+j+1] break elif ( masks(cur, other) ): del symbols[i+j+1] symbols.insert(i,other) cur = other break else: i += 1 if not caseless and useRegex: #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) try: if len(symbols)==len("".join(symbols)): return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) else: return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) except Exception: warnings.warn("Exception creating Regex for oneOf, building MatchFirst", SyntaxWarning, stacklevel=2) # last resort, just use MatchFirst return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) def dictOf( key, value ): """ Helper to easily and clearly define a dictionary by specifying the respective patterns for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens in the proper order. The key pattern can include delimiting markers or punctuation, as long as they are suppressed, thereby leaving the significant key text. The value pattern can include named results, so that the C{Dict} results can include named token fields. Example:: text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) print(OneOrMore(attr_expr).parseString(text).dump()) attr_label = label attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) # similar to Dict, but simpler call format result = dictOf(attr_label, attr_value).parseString(text) print(result.dump()) print(result['shape']) print(result.shape) # object attribute access works too print(result.asDict()) prints:: [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left - shape: SQUARE - texture: burlap SQUARE SQUARE {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} """ return Dict( ZeroOrMore( Group ( key + value ) ) ) def originalTextFor(expr, asString=True): """ Helper to return the original, untokenized text for a given expression. Useful to restore the parsed fields of an HTML start tag into the raw tag text itself, or to revert separate tokens with intervening whitespace back to the original matching input text. By default, returns astring containing the original parsed text. If the optional C{asString} argument is passed as C{False}, then the return value is a C{L{ParseResults}} containing any results names that were originally matched, and a single token containing the original matched text from the input string. So if the expression passed to C{L{originalTextFor}} contains expressions with defined results names, you must set C{asString} to C{False} if you want to preserve those results name values. Example:: src = "this is test bold text normal text " for tag in ("b","i"): opener,closer = makeHTMLTags(tag) patt = originalTextFor(opener + SkipTo(closer) + closer) print(patt.searchString(src)[0]) prints:: [' bold text '] ['text'] """ locMarker = Empty().setParseAction(lambda s,loc,t: loc) endlocMarker = locMarker.copy() endlocMarker.callPreparse = False matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") if asString: extractText = lambda s,l,t: s[t._original_start:t._original_end] else: def extractText(s,l,t): t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] matchExpr.setParseAction(extractText) matchExpr.ignoreExprs = expr.ignoreExprs return matchExpr def ungroup(expr): """ Helper to undo pyparsing's default grouping of And expressions, even if all but one are non-empty. """ return TokenConverter(expr).setParseAction(lambda t:t[0]) def locatedExpr(expr): """ Helper to decorate a returned token with its starting and ending locations in the input string. This helper adds the following results names: - locn_start = location where matched expression begins - locn_end = location where matched expression ends - value = the actual parsed results Be careful if the input text contains C{} characters, you may want to call C{L{ParserElement.parseWithTabs}} Example:: wd = Word(alphas) for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): print(match) prints:: [[0, 'ljsdf', 5]] [[8, 'lksdjjf', 15]] [[18, 'lkkjj', 23]] """ locator = Empty().setParseAction(lambda s,l,t: l) return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) # convenience constants for positional expressions empty = Empty().setName("empty") lineStart = LineStart().setName("lineStart") lineEnd = LineEnd().setName("lineEnd") stringStart = StringStart().setName("stringStart") stringEnd = StringEnd().setName("stringEnd") _escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) _escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) _escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) _singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE) _charRange = Group(_singleChar + Suppress("-") + _singleChar) _reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" def srange(s): r""" Helper to easily define string ranges for use in Word construction. Borrows syntax from regexp '[]' string range definitions:: srange("[0-9]") -> "0123456789" srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" The input string must be enclosed in []'s, and the returned string is the expanded character set joined into a single string. The values enclosed in the []'s may be: - a single character - an escaped character with a leading backslash (such as C{\-} or C{\]}) - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) (C{\0x##} is also supported for backwards compatibility) - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) """ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) try: return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) except Exception: return "" def matchOnlyAtCol(n): """ Helper method for defining parse actions that require matching at a specific column in the input text. """ def verifyCol(strg,locn,toks): if col(locn,strg) != n: raise ParseException(strg,locn,"matched token not at column %d" % n) return verifyCol def replaceWith(replStr): """ Helper method for common parse actions that simply return a literal value. Especially useful when used with C{L{transformString}()}. Example:: num = Word(nums).setParseAction(lambda toks: int(toks[0])) na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) term = na | num OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] """ return lambda s,l,t: [replStr] def removeQuotes(s,l,t): """ Helper parse action for removing quotation marks from parsed quoted strings. Example:: # by default, quotation marks are included in parsed results quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] # use removeQuotes to strip quotation marks from parsed results quotedString.setParseAction(removeQuotes) quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] """ return t[0][1:-1] def tokenMap(func, *args): """ Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional args are passed, they are forwarded to the given function as additional arguments after the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the parsed data to an integer using base 16. Example (compare the last to example in L{ParserElement.transformString}:: hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) hex_ints.runTests(''' 00 11 22 aa FF 0a 0d 1a ''') upperword = Word(alphas).setParseAction(tokenMap(str.upper)) OneOrMore(upperword).runTests(''' my kingdom for a horse ''') wd = Word(alphas).setParseAction(tokenMap(str.title)) OneOrMore(wd).setParseAction(' '.join).runTests(''' now is the winter of our discontent made glorious summer by this sun of york ''') prints:: 00 11 22 aa FF 0a 0d 1a [0, 17, 34, 170, 255, 10, 13, 26] my kingdom for a horse ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] now is the winter of our discontent made glorious summer by this sun of york ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] """ def pa(s,l,t): return [func(tokn, *args) for tokn in t] try: func_name = getattr(func, '__name__', getattr(func, '__class__').__name__) except Exception: func_name = str(func) pa.__name__ = func_name return pa upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) """(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) """(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" def _makeTags(tagStr, xml): """Internal helper to construct opening and closing tag expressions, given a tag name""" if isinstance(tagStr,basestring): resname = tagStr tagStr = Keyword(tagStr, caseless=not xml) else: resname = tagStr.name tagAttrName = Word(alphas,alphanums+"_-:") if (xml): tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) openTag = Suppress("<") + tagStr("tag") + \ Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") else: printablesLessRAbrack = "".join(c for c in printables if c not in ">") tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) openTag = Suppress("<") + tagStr("tag") + \ Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ Optional( Suppress("=") + tagAttrValue ) ))) + \ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") closeTag = Combine(_L("") openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("" % resname) openTag.tag = resname closeTag.tag = resname return openTag, closeTag def makeHTMLTags(tagStr): """ Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. Example:: text = 'More info at the pyparsing wiki page' # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple a,a_end = makeHTMLTags("A") link_expr = a + SkipTo(a_end)("link_text") + a_end for link in link_expr.searchString(text): # attributes in the tag (like "href" shown here) are also accessible as named results print(link.link_text, '->', link.href) prints:: pyparsing -> http://pyparsing.wikispaces.com """ return _makeTags( tagStr, False ) def makeXMLTags(tagStr): """ Helper to construct opening and closing tag expressions for XML, given a tag name. Matches tags only in the given upper/lower case. Example: similar to L{makeHTMLTags} """ return _makeTags( tagStr, True ) def withAttribute(*args,**attrDict): """ Helper to create a validating parse action to be used with start tags created with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag with a required attribute value, to avoid false matches on common tags such as C{} or C{
}. Call C{withAttribute} with a series of attribute names and values. Specify the list of filter attributes names and values as: - keyword arguments, as in C{(align="right")}, or - as an explicit dict with C{**} operator, when an attribute name is also a Python reserved word, as in C{**{"class":"Customer", "align":"right"}} - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) For attribute names with a namespace prefix, you must use the second form. Attribute names are matched insensitive to upper/lower case. If just testing for C{class} (with or without a namespace), use C{L{withClass}}. To verify that the attribute exists, but without specifying a value, pass C{withAttribute.ANY_VALUE} as the value. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this has no type
''' div,div_end = makeHTMLTags("div") # only match div tag having a type attribute with value "grid" div_grid = div().setParseAction(withAttribute(type="grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) # construct a match with any div tag having a type attribute, regardless of the value div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 """ if args: attrs = args[:] else: attrs = attrDict.items() attrs = [(k,v) for k,v in attrs] def pa(s,l,tokens): for attrName,attrValue in attrs: if attrName not in tokens: raise ParseException(s,l,"no matching attribute " + attrName) if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % (attrName, tokens[attrName], attrValue)) return pa withAttribute.ANY_VALUE = object() def withClass(classname, namespace=''): """ Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this <div> has no class
''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 """ classattr = "%s:class" % namespace if namespace else "class" return withAttribute(**{classattr : classname}) opAssoc = _Constants() opAssoc.LEFT = object() opAssoc.RIGHT = object() def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): """ Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy. Operators may be unary or binary, left- or right-associative. Parse actions can also be attached to operator expressions. The generated parser will also recognize the use of parentheses to override operator precedences (see example below). Note: if you define a deep operator list, you may see performance issues when using infixNotation. See L{ParserElement.enablePackrat} for a mechanism to potentially improve your parser performance. Parameters: - baseExpr - expression representing the most basic element for the nested - opList - list of tuples, one for each operator precedence level in the expression grammar; each tuple is of the form (opExpr, numTerms, rightLeftAssoc, parseAction), where: - opExpr is the pyparsing expression for the operator; may also be a string, which will be converted to a Literal; if numTerms is 3, opExpr is a tuple of two expressions, for the two operators separating the 3 terms - numTerms is the number of terms for this operator (must be 1, 2, or 3) - rightLeftAssoc is the indicator whether the operator is right or left associative, using the pyparsing-defined constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - parseAction is the parse action to be associated with expressions matching this operator expression (the parse action tuple member may be omitted) - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) Example:: # simple example of four-function arithmetic with ints and variable names integer = pyparsing_common.signed_integer varname = pyparsing_common.identifier arith_expr = infixNotation(integer | varname, [ ('-', 1, opAssoc.RIGHT), (oneOf('* /'), 2, opAssoc.LEFT), (oneOf('+ -'), 2, opAssoc.LEFT), ]) arith_expr.runTests(''' 5+3*6 (5+3)*6 -2--11 ''', fullDump=False) prints:: 5+3*6 [[5, '+', [3, '*', 6]]] (5+3)*6 [[[5, '+', 3], '*', 6]] -2--11 [[['-', 2], '-', ['-', 11]]] """ ret = Forward() lastExpr = baseExpr | ( lpar + ret + rpar ) for i,operDef in enumerate(opList): opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr if arity == 3: if opExpr is None or len(opExpr) != 2: raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") opExpr1, opExpr2 = opExpr thisExpr = Forward().setName(termName) if rightLeftAssoc == opAssoc.LEFT: if arity == 1: matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) elif arity == 2: if opExpr is not None: matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) else: matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) elif arity == 3: matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") elif rightLeftAssoc == opAssoc.RIGHT: if arity == 1: # try to avoid LR with this extra test if not isinstance(opExpr, Optional): opExpr = Optional(opExpr) matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) elif arity == 2: if opExpr is not None: matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) else: matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) elif arity == 3: matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) else: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") else: raise ValueError("operator must indicate right or left associativity") if pa: matchExpr.setParseAction( pa ) thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) lastExpr = thisExpr ret <<= lastExpr return ret operatorPrecedence = infixNotation """(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): """ Helper method for defining nested lists enclosed in opening and closing delimiters ("(" and ")" are the default). Parameters: - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - content - expression for items within the nested lists (default=C{None}) - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) If an expression is not provided for the content argument, the nested expression will capture all whitespace-delimited content between delimiters as a list of separate values. Use the C{ignoreExpr} argument to define expressions that may contain opening or closing characters that should not be treated as opening or closing characters for nesting, such as quotedString or a comment expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. The default is L{quotedString}, but if no expressions are to be ignored, then pass C{None} for this argument. Example:: data_type = oneOf("void int short long char float double") decl_data_type = Combine(data_type + Optional(Word('*'))) ident = Word(alphas+'_', alphanums+'_') number = pyparsing_common.number arg = Group(decl_data_type + ident) LPAR,RPAR = map(Suppress, "()") code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) c_function = (decl_data_type("type") + ident("name") + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + code_body("body")) c_function.ignore(cStyleComment) source_code = ''' int is_odd(int x) { return (x%2); } int dec_to_hex(char hchar) { if (hchar >= '0' && hchar <= '9') { return (ord(hchar)-ord('0')); } else { return (10+ord(hchar)-ord('A')); } } ''' for func in c_function.searchString(source_code): print("%(name)s (%(type)s) args: %(args)s" % func) prints:: is_odd (int) args: [['int', 'x']] dec_to_hex (int) args: [['char', 'hchar']] """ if opener == closer: raise ValueError("opening and closing strings cannot be the same") if content is None: if isinstance(opener,basestring) and isinstance(closer,basestring): if len(opener) == 1 and len(closer)==1: if ignoreExpr is not None: content = (Combine(OneOrMore(~ignoreExpr + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) ).setParseAction(lambda t:t[0].strip())) else: content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS ).setParseAction(lambda t:t[0].strip())) else: if ignoreExpr is not None: content = (Combine(OneOrMore(~ignoreExpr + ~Literal(opener) + ~Literal(closer) + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) ).setParseAction(lambda t:t[0].strip())) else: content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) ).setParseAction(lambda t:t[0].strip())) else: raise ValueError("opening and closing arguments must be strings if no content expression is given") ret = Forward() if ignoreExpr is not None: ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) else: ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) ret.setName('nested %s%s expression' % (opener,closer)) return ret def indentedBlock(blockStatementExpr, indentStack, indent=True): """ Helper method for defining space-delimited indentation blocks, such as those used to define block statements in Python source code. Parameters: - blockStatementExpr - expression defining syntax of statement that is repeated within the indented block - indentStack - list created by caller to manage indentation stack (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - indent - boolean indicating whether block must be indented beyond the the current level; set to False for block of left-most statements (default=C{True}) A valid block must contain at least one C{blockStatement}. Example:: data = ''' def A(z): A1 B = 100 G = A2 A2 A3 B def BB(a,b,c): BB1 def BBA(): bba1 bba2 bba3 C D def spam(x,y): def eggs(z): pass ''' indentStack = [1] stmt = Forward() identifier = Word(alphas, alphanums) funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") func_body = indentedBlock(stmt, indentStack) funcDef = Group( funcDecl + func_body ) rvalue = Forward() funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") rvalue << (funcCall | identifier | Word(nums)) assignment = Group(identifier + "=" + rvalue) stmt << ( funcDef | assignment | identifier ) module_body = OneOrMore(stmt) parseTree = module_body.parseString(data) parseTree.pprint() prints:: [['def', 'A', ['(', 'z', ')'], ':', [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], 'B', ['def', 'BB', ['(', 'a', 'b', 'c', ')'], ':', [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], 'C', 'D', ['def', 'spam', ['(', 'x', 'y', ')'], ':', [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] """ def checkPeerIndent(s,l,t): if l >= len(s): return curCol = col(l,s) if curCol != indentStack[-1]: if curCol > indentStack[-1]: raise ParseFatalException(s,l,"illegal nesting") raise ParseException(s,l,"not a peer entry") def checkSubIndent(s,l,t): curCol = col(l,s) if curCol > indentStack[-1]: indentStack.append( curCol ) else: raise ParseException(s,l,"not a subentry") def checkUnindent(s,l,t): if l >= len(s): return curCol = col(l,s) if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): raise ParseException(s,l,"not an unindent") indentStack.pop() NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') PEER = Empty().setParseAction(checkPeerIndent).setName('') UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') if indent: smExpr = Group( Optional(NL) + #~ FollowedBy(blockStatementExpr) + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) else: smExpr = Group( Optional(NL) + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) blockStatementExpr.ignore(_bslash + LineEnd()) return smExpr.setName('indented block') alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) _htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) commonHTMLEntity = Regex('&(?P' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") def replaceHTMLEntity(t): """Helper parser action to replace common HTML entities with their special characters""" return _htmlEntityMap.get(t.entity) # it's easy to get these comment structures wrong - they're very common, so may as well make them available cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") "Comment of the form C{/* ... */}" htmlComment = Regex(r"").setName("HTML comment") "Comment of the form C{}" restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") "Comment of the form C{// ... (to end of line)}" cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") "Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" javaStyleComment = cppStyleComment "Same as C{L{cppStyleComment}}" pythonStyleComment = Regex(r"#.*").setName("Python style comment") "Comment of the form C{# ... (to end of line)}" _commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + Optional( Word(" \t") + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") """(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" # some other useful expressions - using lower-case class name since we are really using this as a namespace class pyparsing_common: """ Here are some common low-level expressions that may be useful in jump-starting parser development: - numeric forms (L{integers}, L{reals}, L{scientific notation}) - common L{programming identifiers} - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - ISO8601 L{dates} and L{datetime} - L{UUID} - L{comma-separated list} Parse actions: - C{L{convertToInteger}} - C{L{convertToFloat}} - C{L{convertToDate}} - C{L{convertToDatetime}} - C{L{stripHTMLTags}} - C{L{upcaseTokens}} - C{L{downcaseTokens}} Example:: pyparsing_common.number.runTests(''' # any int or real number, returned as the appropriate type 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.fnumber.runTests(''' # any int or real number, returned as float 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.hex_integer.runTests(''' # hex numbers 100 FF ''') pyparsing_common.fraction.runTests(''' # fractions 1/2 -3/4 ''') pyparsing_common.mixed_integer.runTests(''' # mixed fractions 1 1/2 -3/4 1-3/4 ''') import uuid pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) pyparsing_common.uuid.runTests(''' # uuid 12345678-1234-5678-1234-567812345678 ''') prints:: # any int or real number, returned as the appropriate type 100 [100] -100 [-100] +100 [100] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # any int or real number, returned as float 100 [100.0] -100 [-100.0] +100 [100.0] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # hex numbers 100 [256] FF [255] # fractions 1/2 [0.5] -3/4 [-0.75] # mixed fractions 1 [1] 1/2 [0.5] -3/4 [-0.75] 1-3/4 [1.75] # uuid 12345678-1234-5678-1234-567812345678 [UUID('12345678-1234-5678-1234-567812345678')] """ convertToInteger = tokenMap(int) """ Parse action for converting parsed integers to Python int """ convertToFloat = tokenMap(float) """ Parse action for converting parsed numbers to Python float """ integer = Word(nums).setName("integer").setParseAction(convertToInteger) """expression that parses an unsigned integer, returns an int""" hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) """expression that parses a hexadecimal integer, returns an int""" signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) """expression that parses an integer with optional leading sign, returns an int""" fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") """fractional expression of an integer divided by an integer, returns a float""" fraction.addParseAction(lambda t: t[0]/t[-1]) mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" mixed_integer.addParseAction(sum) real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) """expression that parses a floating point number and returns a float""" sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) """expression that parses a floating point number with optional scientific notation and returns a float""" # streamlining this expression makes the docs nicer-looking number = (sci_real | real | signed_integer).streamline() """any numeric expression, returns the corresponding Python type""" fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) """any int or real number, returned as float""" identifier = Word(alphas+'_', alphanums+'_').setName("identifier") """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") "IPv4 address (C{0.0.0.0 - 255.255.255.255})" _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") "IPv6 address (long, short, or mixed form)" mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" @staticmethod def convertToDate(fmt="%Y-%m-%d"): """ Helper to create a parse action for converting parsed date string to Python datetime.date Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) Example:: date_expr = pyparsing_common.iso8601_date.copy() date_expr.setParseAction(pyparsing_common.convertToDate()) print(date_expr.parseString("1999-12-31")) prints:: [datetime.date(1999, 12, 31)] """ def cvt_fn(s,l,t): try: return datetime.strptime(t[0], fmt).date() except ValueError as ve: raise ParseException(s, l, str(ve)) return cvt_fn @staticmethod def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): """ Helper to create a parse action for converting parsed datetime string to Python datetime.datetime Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) prints:: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] """ def cvt_fn(s,l,t): try: return datetime.strptime(t[0], fmt) except ValueError as ve: raise ParseException(s, l, str(ve)) return cvt_fn iso8601_date = Regex(r'(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?').setName("ISO8601 date") "ISO8601 date (C{yyyy-mm-dd})" iso8601_datetime = Regex(r'(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() @staticmethod def stripHTMLTags(s, l, tokens): """ Parse action to remove HTML tags from web page HTML source Example:: # strip HTML links from normal text text = 'More info at the
pyparsing wiki page' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' """ return pyparsing_common._html_stripper.transformString(tokens[0]) _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + Optional( White(" \t") ) ) ).streamline().setName("commaItem") comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) """Parse action to convert tokens to upper case.""" downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) """Parse action to convert tokens to lower case.""" if __name__ == "__main__": selectToken = CaselessLiteral("select") fromToken = CaselessLiteral("from") ident = Word(alphas, alphanums + "_$") columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) columnNameList = Group(delimitedList(columnName)).setName("columns") columnSpec = ('*' | columnNameList) tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) tableNameList = Group(delimitedList(tableName)).setName("tables") simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") # demo runTests method, including embedded comments in test string simpleSQL.runTests(""" # '*' as column list and dotted table name select * from SYS.XYZZY # caseless match on "SELECT", and casts back to "select" SELECT * from XYZZY, ABC # list of column names, and mixed case SELECT keyword Select AA,BB,CC from Sys.dual # multiple tables Select A, B, C from Sys.dual, Table2 # invalid SELECT keyword - should fail Xelect A, B, C from Sys.dual # incomplete command - should fail Select # invalid column name - should fail Select ^^^ frox Sys.dual """) pyparsing_common.number.runTests(""" 100 -100 +100 3.14159 6.02e23 1e-12 """) # any int or real number, returned as float pyparsing_common.fnumber.runTests(""" 100 -100 +100 3.14159 6.02e23 1e-12 """) pyparsing_common.hex_integer.runTests(""" 100 FF """) import uuid pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) pyparsing_common.uuid.runTests(""" 12345678-1234-5678-1234-567812345678 """) PK Zc+̓̓1site-packages/pkg_resources/_vendor/pyparsing.pycnu[ fci@sdZdZdZdZddlZddlmZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlmZyddlmZWn!ek rddlmZnXydd l mZWn?ek r=ydd lmZWnek r9eZnXnXd d d d ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrgiZee jds ZedtdskZere jZ e!Z"e#Z$e!Z%e&e'e(e)e*ee+e,e-e.e/g Z0nre j1Z e2Z3duZ%gZ0ddl4Z4xEdvj5D]7Z6ye0j7e8e4e6Wne9k rZq$nXq$We:dwe3dxDZ;dyZ<dze=fd{YZ>ej?ej@ZAd|ZBeBd}ZCeAeBZDe#d~ZEdjFdejGDZHd!eIfdYZJd#eJfdYZKd%eJfdYZLd'eLfdYZMd*eIfdYZNde=fdYZOd&e=fdYZPe jQjRePdZSdZTdZUdZVdZWdZXdZYddZZd(e=fdYZ[d0e[fdYZ\de\fdYZ]de\fdYZ^de\fdYZ_e_Z`e_e[_ade\fdYZbd e_fdYZcd ebfdYZddpe\fdYZed3e\fdYZfd+e\fdYZgd)e\fdYZhd e\fdYZid2e\fdYZjde\fdYZkdekfdYZldekfdYZmdekfdYZnd.ekfdYZod-ekfdYZpd5ekfdYZqd4ekfdYZrd$e[fdYZsd esfdYZtd esfdYZudesfdYZvdesfdYZwd"e[fdYZxdexfdYZydexfdYZzdexfdYZ{de{fdYZ|d6e{fdYZ}de=fdYZ~e~ZdexfdYZd,exfdYZdexfdYZdefdYZd1exfdYZdefdYZdefdYZdefdYZd/efdYZde=fdYZdZdedZedZdZdZdZdZeedZdZedZdZdZe]jdGZemjdMZenjdLZeojdeZepjddZefeEdddjdZegdjdZegdjdZeeBeBefeHddddxBegde jBZeeedeZe_dedjdee|eeBjddZdZdZdZdZdZedZedZdZdZdZdZe=e_ddZe>Ze=e_e=e_ededdZeZeegddjdZeegddjdZeegddegddBjdZee`dejjdZddeejdZedZedZedZeefeAeDdjd\ZZeedj5dZegddjFejdjdZdZeegddjdZegdjdZegd jjd Zegd jd ZeegddeBjd ZeZegdjdZee|efeHddeefde_denjjdZeeejeBddjd>ZdrfdYZedkrecdZecdZefeAeDdZeeddejeZeeejdZdeBZeeddejeZeeejdZededeedZejdejjdejjdejjd ddlZejjeejejjd!ndS("sS pyparsing module - Classes and methods to define and execute parsing grammars The pyparsing module is an alternative approach to creating and executing simple grammars, vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you don't need to learn a new syntax for defining grammars or matching expressions - the parsing module provides a library of classes that you use to construct the grammar directly in Python. Here is a program to parse "Hello, World!" (or any greeting of the form C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements (L{'+'} operator gives L{And} expressions, strings are auto-converted to L{Literal} expressions):: from pyparsing import Word, alphas # define grammar of a greeting greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) The program outputs the following:: Hello, World! -> ['Hello', ',', 'World', '!'] The Python representation of the grammar is quite readable, owing to the self-explanatory class names, and the use of '+', '|' and '^' operators. The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an object with named attributes. The pyparsing module handles some of the problems that are typically vexing when writing text parsers: - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - quoted strings - embedded comments s2.1.10s07 Oct 2016 01:31 UTCs*Paul McGuire iN(tref(tdatetime(tRLock(t OrderedDicttAndtCaselessKeywordtCaselessLiteralt CharsNotIntCombinetDicttEachtEmptyt FollowedBytForwardt GoToColumntGrouptKeywordtLineEndt LineStarttLiteralt MatchFirsttNoMatchtNotAnyt OneOrMoretOnlyOncetOptionaltOrtParseBaseExceptiontParseElementEnhancetParseExceptiontParseExpressiontParseFatalExceptiont ParseResultstParseSyntaxExceptiont ParserElementt QuotedStringtRecursiveGrammarExceptiontRegextSkipTot StringEndt StringStarttSuppresstTokentTokenConvertertWhitetWordtWordEndt WordStartt ZeroOrMoret alphanumstalphast alphas8bitt anyCloseTagt anyOpenTagt cStyleCommenttcoltcommaSeparatedListtcommonHTMLEntityt countedArraytcppStyleCommenttdblQuotedStringtdblSlashCommentt delimitedListtdictOftdowncaseTokenstemptythexnumst htmlCommenttjavaStyleCommenttlinetlineEndt lineStarttlinenot makeHTMLTagst makeXMLTagstmatchOnlyAtColtmatchPreviousExprtmatchPreviousLiteralt nestedExprtnullDebugActiontnumstoneOftopAssoctoperatorPrecedencet printablestpunc8bittpythonStyleCommentt quotedStringt removeQuotestreplaceHTMLEntityt replaceWitht restOfLinetsglQuotedStringtsranget stringEndt stringStartttraceParseActiont unicodeStringt upcaseTokenst withAttributet indentedBlocktoriginalTextFortungroupt infixNotationt locatedExprt withClasst CloseMatchttokenMaptpyparsing_commoniicCs}t|tr|Syt|SWnUtk rxt|jtjd}td}|jd|j |SXdS(sDrop-in replacement for str(obj) that tries to be Unicode friendly. It first tries str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It then < returns the unicode object | encodes it with the default encoding | ... >. txmlcharrefreplaces&#\d+;cSs#dtt|ddd!dS(Ns\uiii(thextint(tt((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyttN( t isinstancetunicodetstrtUnicodeEncodeErrortencodetsystgetdefaultencodingR%tsetParseActionttransformString(tobjtrett xmlcharref((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_ustrs  s6sum len sorted reversed list tuple set any all min maxccs|] }|VqdS(N((t.0ty((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sicCsRd}ddjD}x/t||D]\}}|j||}q,W|S(s/Escape &, <, >, ", ', etc. in a string of data.s&><"'css|]}d|dVqdS(t&t;N((Rts((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ssamp gt lt quot apos(tsplittziptreplace(tdatat from_symbolst to_symbolstfrom_tto_((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _xml_escapes t _ConstantscBseZRS((t__name__t __module__(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRst 0123456789t ABCDEFabcdefi\Rrccs$|]}|tjkr|VqdS(N(tstringt whitespace(Rtc((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys scBs_eZdZdd d dZedZdZdZdZ ddZ d Z RS( s7base exception class for all parsing runtime exceptionsicCs[||_|dkr*||_d|_n||_||_||_|||f|_dS(NRr(tloctNonetmsgtpstrt parserElementtargs(tselfRRRtelem((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__init__s       cCs||j|j|j|jS(s internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses (RRRR(tclstpe((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_from_exceptionscCsm|dkrt|j|jS|dkr>t|j|jS|dkr]t|j|jSt|dS(ssupported attributes by name are: - lineno - returns the line number of the exception text - col - returns the column number of the exception text - line - returns the line containing the exception text RHR7tcolumnREN(R7R(RHRRR7REtAttributeError(Rtaname((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __getattr__s   cCs d|j|j|j|jfS(Ns"%s (at char %d), (line:%d, col:%d)(RRRHR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__str__scCs t|S(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__repr__ss>!} ('-' operator) indicates that parsing is to stop immediately because an unbacktrackable syntax error has been found(RRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR!scBs eZdZdZdZRS(sZexception thrown by L{ParserElement.validate} if the grammar could be improperly recursivecCs ||_dS(N(tparseElementTrace(RtparseElementList((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs d|jS(NsRecursiveGrammarException: %s(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s(RRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR$s t_ParseResultsWithOffsetcBs,eZdZdZdZdZRS(cCs||f|_dS(N(ttup(Rtp1tp2((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR$scCs |j|S(N(R(Rti((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __getitem__&scCst|jdS(Ni(treprR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR(scCs|jd|f|_dS(Ni(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt setOffset*s(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR#s   cBseZdZd-d-eedZd-d-eeedZdZedZ dZ dZ dZ dZ e Zd Zd Zd Zd Zd ZereZeZeZn-eZeZeZdZdZdZdZdZd-dZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'ddZ(d Z)d!Z*d"Z+d-e,ded#Z-d$Z.d%Z/dd&ed'Z0d(Z1d)Z2d*Z3d+Z4d,Z5RS(.sI Structured parse results, to provide multiple means of access to the parsed data: - as a list (C{len(results)}) - by list index (C{results[0], results[1]}, etc.) - by attribute (C{results.} - see L{ParserElement.setResultsName}) Example:: integer = Word(nums) date_str = (integer.setResultsName("year") + '/' + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") # parseString returns a ParseResults object result = date_str.parseString("1999/12/31") def test(s, fn=repr): print("%s -> %s" % (s, fn(eval(s)))) test("list(result)") test("result[0]") test("result['month']") test("result.day") test("'month' in result") test("'minutes' in result") test("result.dump()", str) prints:: list(result) -> ['1999', '/', '12', '/', '31'] result[0] -> '1999' result['month'] -> '12' result.day -> '31' 'month' in result -> True 'minutes' in result -> False result.dump() -> ['1999', '/', '12', '/', '31'] - day: 31 - month: 12 - year: 1999 cCs/t||r|Stj|}t|_|S(N(Rstobjectt__new__tTruet_ParseResults__doinit(RttoklisttnametasListtmodaltretobj((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRTs  cCs|jrt|_d|_d|_i|_||_||_|dkrTg}n||trp||_ n-||t rt||_ n |g|_ t |_ n|dk r|r|sd|j|s(R(R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _itervaluesscsfdjDS(Nc3s|]}||fVqdS(N((RR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(R(R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _iteritemsscCst|jS(sVReturns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).(RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytkeysscCst|jS(sXReturns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).(Rt itervalues(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytvaluesscCst|jS(sfReturns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).(Rt iteritems(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs t|jS(sSince keys() returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.(tboolR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pythaskeysscOs|sdg}nxI|jD];\}}|dkrJ|d|f}qtd|qWt|dtst|dks|d|kr|d}||}||=|S|d}|SdS(s Removes and returns item at specified index (default=C{last}). Supports both C{list} and C{dict} semantics for C{pop()}. If passed no argument or an integer argument, it will use C{list} semantics and pop tokens from the list of parsed tokens. If passed a non-integer argument (most likely a string), it will use C{dict} semantics and pop the corresponding value from any defined results names. A second default return value argument is supported, just as in C{dict.pop()}. Example:: def remove_first(tokens): tokens.pop(0) print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] label = Word(alphas) patt = label("LABEL") + OneOrMore(Word(nums)) print(patt.parseString("AAB 123 321").dump()) # Use pop() in a parse action to remove named result (note that corresponding value is not # removed from list form of results) def remove_LABEL(tokens): tokens.pop("LABEL") return tokens patt.addParseAction(remove_LABEL) print(patt.parseString("AAB 123 321").dump()) prints:: ['AAB', '123', '321'] - LABEL: AAB ['AAB', '123', '321'] itdefaultis-pop() got an unexpected keyword argument '%s'iN(RRRsRoR(RRtkwargsRRtindexR}t defaultvalue((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytpops"     cCs||kr||S|SdS(si Returns named result matching the given key, or if there is no such name, then returns the given C{defaultValue} or C{None} if no C{defaultValue} is specified. Similar to C{dict.get()}. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString("1999/12/31") print(result.get("year")) # -> '1999' print(result.get("hour", "not specified")) # -> 'not specified' print(result.get("hour")) # -> None N((Rtkeyt defaultValue((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCsw|jj||x]|jjD]L\}}x=t|D]/\}\}}t||||k|| ['0', '123', '321'] # use a parse action to insert the parse location in the front of the parsed results def insert_locn(locn, tokens): tokens.insert(0, locn) print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] N(RtinsertRRRR(RRtinsStrRRRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR2scCs|jj|dS(s Add single element to end of ParseResults list of elements. Example:: print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] # use a parse action to compute the sum of the parsed integers, and add it to the end def append_sum(tokens): tokens.append(sum(map(int, tokens))) print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] N(Rtappend(Rtitem((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRFs cCs0t|tr||7}n|jj|dS(s Add sequence of elements to end of ParseResults list of elements. Example:: patt = OneOrMore(Word(alphas)) # use a parse action to append the reverse of the matched strings, to make a palindrome def make_palindrome(tokens): tokens.extend(reversed([t[::-1] for t in tokens])) return ''.join(tokens) print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' N(RsR Rtextend(Rtitemseq((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRTs  cCs|j2|jjdS(s7 Clear all elements and results names. N(RRtclear(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRfscCsy ||SWntk r dSX||jkr}||jkrR|j|ddStg|j|D]}|d^qcSndSdS(NRrii(RRRR (RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRms  +cCs|j}||7}|S(N(R(RtotherR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__add__{s  c s|jrt|jfd}|jj}g|D]<\}}|D])}|t|d||df^qMq=}xJ|D]?\}}|||st](RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsRrcCsog}xb|jD]W}|r2|r2|j|nt|trT||j7}q|jt|qW|S(N(RRRsR t _asStringListR(RtseptoutR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCs5g|jD]'}t|tr+|jn|^q S(s Returns the parse results as a nested list of matching tokens, all converted to strings. Example:: patt = OneOrMore(Word(alphas)) result = patt.parseString("sldkj lsdkj sldkj") # even though the result prints in string-like form, it is actually a pyparsing ParseResults print(type(result), result) # -> ['sldkj', 'lsdkj', 'sldkj'] # Use asList() to create an actual list result_list = result.asList() print(type(result_list), result_list) # -> ['sldkj', 'lsdkj', 'sldkj'] (RRsR R(Rtres((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscsGtr|j}n |j}fdtfd|DS(s Returns the named parse results as a nested dictionary. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(type(result), repr(result)) # -> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) result_dict = result.asDict() print(type(result_dict), repr(result_dict)) # -> {'day': '1999', 'year': '12', 'month': '31'} # even though a ParseResults supports dict-like access, sometime you just need to have a dict import json print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} csMt|trE|jr%|jSg|D]}|^q,Sn|SdS(N(RsR RtasDict(R|R(ttoItem(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs    c3s'|]\}}||fVqdS(N((RRR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(tPY_3RRR(Rtitem_fn((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs    cCsPt|j}|jj|_|j|_|jj|j|j|_|S(sA Returns a new copy of a C{ParseResults} object. (R RRRRRR R(RR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs   c Csd}g}td|jjD}|d}|sPd}d}d}nd } |d k rk|} n|jr|j} n| s|rdSd} n|||d| dg7}x t|jD]\} } t| trI| |kr|| j || |o|d k||g7}q|| j d |o6|d k||g7}qd } | |krh|| } n| s|rzqqd} nt t | } |||d| d| d| dg 7}qW|||d| dg7}dj |S( s (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. s css2|](\}}|D]}|d|fVqqdS(iN((RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s s RrtITEMtsgss %s%s- %s: s icss|]}t|tVqdS(N(RsR (Rtvv((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sss %s%s[%d]: %s%s%sRr( RRRRtsortedRRsR tdumpRtanyRR( RR$tdepthtfullRtNLRRRRR1((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR3Ps,  B?cOstj|j||dS(s Pretty-printer for parsed results as a list, using the C{pprint} module. Accepts additional positional or keyword args as defined for the C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) Example:: ident = Word(alphas, alphanums) num = Word(nums) func = Forward() term = ident | num | Group('(' + func + ')') func <<= ident + Group(Optional(delimitedList(term))) result = func.parseString("fna a,b,(fnb c,d,200),100") result.pprint(width=40) prints:: ['fna', ['a', 'b', ['(', 'fnb', ['c', 'd', '200'], ')'], '100']] N(tpprintR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR8}scCsC|j|jj|jdk r-|jp0d|j|jffS(N(RRRRRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __getstate__s  cCsm|d|_|d\|_}}|_i|_|jj||dk r`t||_n d|_dS(Nii(RRRRR RRR(RtstateR/t inAccumNames((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __setstate__s   cCs|j|j|j|jfS(N(RRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__getnewargs__scCs tt|t|jS(N(RRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsN(6RRRRRRRsRRRRRRRt __nonzero__RRRRRRRRRRRRRRRRRRRRR RRRRRRRRRR!R-R0R3R8R9R<R=R(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR -sh& '              4             # =  %-   cCsW|}d|ko#t|knr@||ddkr@dS||jdd|S(sReturns current column within a string, counting newlines as line separators. The first column is number 1. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{ParserElement.parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. iis (Rtrfind(RtstrgR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR7s cCs|jdd|dS(sReturns current line number within a string, counting newlines as line separators. The first line is number 1. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{ParserElement.parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. s ii(tcount(RR@((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRHs cCsR|jdd|}|jd|}|dkrB||d|!S||dSdS(sfReturns the line of text containing loc within a string, counting newlines as line separators. s iiN(R?tfind(RR@tlastCRtnextCR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyREs  cCsAdt|dt|dt||t||fGHdS(NsMatch s at loc s(%d,%d)(RRHR7(tinstringRtexpr((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_defaultStartDebugActionscCs'dt|dt|jGHdS(NsMatched s -> (RRuR(REtstartloctendlocRFttoks((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_defaultSuccessDebugActionscCsdt|GHdS(NsException raised:(R(RERRFtexc((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_defaultExceptionDebugActionscGsdS(sG'Do-nothing' debug action, to suppress debugging output during parsing.N((R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyROsics tkrfdSdgtgtd dkrVdd}ddntj}tjd}|d dd }|d|d |ffd }d }y"tdtdj}Wntk rt }nX||_|S(Ncs |S(N((RtlRp(tfunc(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRriiiicSsJtdkrdnd}tjd| |d|}|j|jfgS( Niiiiitlimiti(iii(tsystem_versiont tracebackt extract_stacktfilenameRH(RPR t frame_summary((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRSscSs2tj|d|}|d}|j|jfgS(NRPi(RRt extract_tbRTRH(ttbRPtframesRU((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRVs iRPiicsxy&|d}td<|SWqtk rdrInAz:tjd}|dddd ksnWd~Xdkrdcd7Rt __class__(ii( tsingleArgBuiltinsRRQRRRSRVtgetattrRt ExceptionRu(ROR[RSt LINE_DIFFt this_lineR]t func_name((RVRZRORPR[R\sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _trim_aritys*          cBseZdZdZeZedZedZedZ dZ dZ edZ e dZd Zd Zd Zd Zd ZdZe dZdZe e dZdZdZdefdYZedFk rdefdYZndefdYZiZe Z!ddgZ"e e dZ#eZ$edZ%eZ&eddZ'edZ(e)edZ*d Z+e)d!Z,e)ed"Z-d#Z.d$Z/d%Z0d&Z1d'Z2d(Z3d)Z4d*Z5d+Z6d,Z7d-Z8d.Z9d/Z:dFd0Z;d1Z<d2Z=d3Z>d4Z?d5Z@d6ZAe d7ZBd8ZCd9ZDd:ZEd;ZFgd<ZGed=ZHd>ZId?ZJd@ZKdAZLdBZMe dCZNe dDe e edEZORS(Gs)Abstract base level parser element class.s cCs |t_dS(s Overrides the default whitespace chars Example:: # default whitespace chars are space, and newline OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] # change to just treat newline as significant ParserElement.setDefaultWhitespaceChars(" \t") OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] N(R"tDEFAULT_WHITE_CHARS(tchars((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDefaultWhitespaceChars=s cCs |t_dS(s Set class to be used for inclusion of string literals into a parser. Example:: # default literal class used is Literal integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # change to Suppress ParserElement.inlineLiteralsUsing(Suppress) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] N(R"t_literalStringClass(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytinlineLiteralsUsingLscCst|_d|_d|_d|_||_t|_t j |_ t|_ t |_t |_t|_t |_t |_t|_d|_t|_d|_d|_t|_t |_dS(NRr(NNN(Rt parseActionRt failActiontstrReprt resultsNamet saveAsListRtskipWhitespaceR"Rft whiteCharstcopyDefaultWhiteCharsRtmayReturnEmptytkeepTabst ignoreExprstdebugt streamlinedt mayIndexErrorterrmsgt modalResultst debugActionstret callPreparset callDuringTry(Rtsavelist((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRas(                   cCsEtj|}|j|_|j|_|jrAtj|_n|S(s$ Make a copy of this C{ParserElement}. Useful for defining different parse actions for the same parsing pattern, using copies of the original parse element. Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) prints:: [5120, 100, 655360, 268435456] Equivalent form of C{expr.copy()} is just C{expr()}:: integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") (RRkRuRrR"RfRq(Rtcpy((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRxs    cCs>||_d|j|_t|dr:|j|j_n|S(sf Define name for this expression, makes debugging and exception messages clearer. Example:: Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) s Expected t exception(RRyRRR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetNames  cCsE|j}|jdr.|d }t}n||_| |_|S(sP Define name for referencing matching tokens as a nested attribute of the returned parse results. NOTE: this returns a *copy* of the original C{ParserElement} object; this is so that the client can define a basic element, such as an integer, and reference it in multiple places with different names. You can also set results names using the abbreviated syntax, C{expr("name")} in place of C{expr.setResultsName("name")} - see L{I{__call__}<__call__>}. Example:: date_str = (integer.setResultsName("year") + '/' + integer.setResultsName("month") + '/' + integer.setResultsName("day")) # equivalent form: date_str = integer("year") + '/' + integer("month") + '/' + integer("day") t*i(RtendswithRRnRz(RRtlistAllMatchestnewself((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetResultsNames     csa|r9|jttfd}|_||_n$t|jdr]|jj|_n|S(sMethod to invoke the Python pdb debugger when this element is about to be parsed. Set C{breakFlag} to True to enable, False to disable. cs)ddl}|j||||S(Ni(tpdbt set_trace(RERt doActionst callPreParseR(t _parseMethod(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytbreakers  t_originalParseMethod(t_parseRRR(Rt breakFlagR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetBreaks   cOs7tttt||_|jdt|_|S(s  Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substring - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Optional keyword arguments: - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{parseString}} for more information on parsing strings containing C{}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. Example:: integer = Word(nums) date_str = integer + '/' + integer + '/' + integer date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] # use parse action to convert to ints at parse time integer = Word(nums).setParseAction(lambda toks: int(toks[0])) date_str = integer + '/' + integer + '/' + integer # note that integer fields are now ints, not strings date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] R~(RtmapReRkRRR~(RtfnsR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRzs"cOsF|jtttt|7_|jp<|jdt|_|S(s Add parse action to expression's list of parse actions. See L{I{setParseAction}}. See examples in L{I{copy}}. R~(RkRRReR~RR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytaddParseActions$cs|jdd|jdtr*tntx3|D]+fd}|jj|q7W|jp~|jdt|_|S(sAdd a boolean predicate function to expression's list of parse actions. See L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, functions passed to C{addCondition} need to return boolean success/fail of the condition. Optional keyword arguments: - message = define a custom message to be used in the raised exception - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException Example:: integer = Word(nums).setParseAction(lambda toks: int(toks[0])) year_int = integer.copy() year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") date_str = year_int + '/' + integer + '/' + integer result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) tmessagesfailed user-defined conditiontfatalcs7tt|||s3||ndS(N(RRe(RRNRp(texc_typetfnR(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytpasR~(RRRRRkRR~(RRRR((RRRsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt addConditions cCs ||_|S(s Define action to perform if parsing fails at this expression. Fail acton fn is a callable function that takes the arguments C{fn(s,loc,expr,err)} where: - s = string being parsed - loc = location where expression match was attempted and failed - expr = the parse expression that failed - err = the exception thrown The function returns no value. It may throw C{L{ParseFatalException}} if it is desired to stop parsing immediately.(Rl(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt setFailActions cCsnt}xa|rit}xN|jD]C}y)x"|j||\}}t}q+WWqtk raqXqWq W|S(N(RRRuRR(RRERt exprsFoundtetdummy((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt_skipIgnorables#s   cCsp|jr|j||}n|jrl|j}t|}x-||krh|||krh|d7}q?Wn|S(Ni(RuRRpRqR(RRERtwttinstrlen((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytpreParse0s    cCs |gfS(N((RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseImpl<scCs|S(N((RRERt tokenlist((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt postParse?sc Cs|j}|s|jr,|jdr?|jd|||n|rc|jrc|j||}n|}|}yUy|j|||\}}Wn/tk rt|t||j |nXWqt k r(} |jdr|jd|||| n|jr"|j|||| nqXn|rP|jrP|j||}n|}|}|j sw|t|kry|j|||\}}Wqtk rt|t||j |qXn|j|||\}}|j |||}t ||jd|jd|j} |jrf|s7|jrf|ryrxk|jD]`} | ||| }|dk rJt ||jd|jot|t tfd|j} qJqJWWqct k r} |jdr|jd|||| nqcXqfxn|jD]`} | ||| }|dk rt ||jd|joMt|t tfd|j} qqWn|r|jdr|jd||||| qn|| fS(NiiRRi(RvRlR{R}RRRRRRyRRxRR RnRoRzRkR~RRsR( RRERRRt debuggingtpreloct tokensStartttokensterrt retTokensR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _parseNoCacheCsp   &    %$       #cCsNy|j||dtdSWn)tk rIt|||j|nXdS(NRi(RRRRRy(RRER((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyttryParses cCs7y|j||Wnttfk r.tSXtSdS(N(RRRRR(RRER((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt canParseNexts t_UnboundedCachecBseZdZRS(csit|_fd}fd}fd}tj|||_tj|||_tj|||_dS(Ncsj|S(N(R(RR(tcachet not_in_cache(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscs||}) - define your parse action using the full C{(s,loc,toks)} signature, and reference the input string using the parse action's C{s} argument - explictly expand the tabs in your input string before calling C{parseString} Example:: Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text iN( R"RRwt streamlineRuRtt expandtabsRRR R'Rtverbose_stacktrace(RREtparseAllRRRtseRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseString#s$      ccs|js|jnx|jD]}|jq W|jsRt|j}nt|}d}|j}|j}t j d} yx||kra| |kray.|||} ||| dt \} } Wnt k r| d}qX| |krT| d7} | | | fV|rK|||} | |kr>| }qQ|d7}q^| }q| d}qWWn(t k r}t jrq|nXdS(s Scan the input string for expression matches. Each match will return the matching tokens, start location, and end location. May be called with optional C{maxMatches} argument, to clip scanning after 'n' matches are found. If C{overlap} is specified, then overlapping matches will be reported. Note that the start and end locations are reported relative to the string being parsed. See L{I{parseString}} for more information on parsing strings with embedded tabs. Example:: source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" print(source) for tokens,start,end in Word(alphas).scanString(source): print(' '*start + '^'*(end-start)) print(' '*start + tokens[0]) prints:: sldjf123lsdjjkf345sldkjf879lkjsfd987 ^^^^^ sldjf ^^^^^^^ lsdjjkf ^^^^^^ sldkjf ^^^^^^ lkjsfd iRiN(RwRRuRtRRRRRR"RRRRR(RREt maxMatchestoverlapRRRt preparseFntparseFntmatchesRtnextLocRtnextlocRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt scanStringUsB               c Cs%g}d}t|_yx|j|D]}\}}}|j|||!|rt|trs||j7}qt|tr||7}q|j|n|}q(W|j||g|D]}|r|^q}djt t t |SWn(t k r }t jrq!|nXdS(sf Extension to C{L{scanString}}, to modify matching text with modified tokens that may be returned from a parse action. To use C{transformString}, define a grammar and attach a parse action to it that modifies the returned token list. Invoking C{transformString()} on a target string will then scan for matches, and replace the matched text patterns according to the logic in the parse action. C{transformString()} returns the resulting transformed string. Example:: wd = Word(alphas) wd.setParseAction(lambda toks: toks[0].title()) print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) Prints:: Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. iRrN(RRtRRRsR RRRRRt_flattenRR"R( RRERtlastERpRRtoRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR{s(     cCsey6tg|j||D]\}}}|^qSWn(tk r`}tjrWqa|nXdS(s~ Another extension to C{L{scanString}}, simplifying the access to the tokens found to match the given parse expression. May be called with optional C{maxMatches} argument, to clip searching after 'n' matches are found. Example:: # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters cap_word = Word(alphas.upper(), alphas.lower()) print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) prints:: ['More', 'Iron', 'Lead', 'Gold', 'I'] N(R RRR"R(RRERRpRRRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt searchStrings 6 c csfd}d}xJ|j|d|D]3\}}}|||!V|rO|dVn|}q"W||VdS(s[ Generator method to split a string using the given expression as a separator. May be called with optional C{maxsplit} argument, to limit the number of splits; and the optional C{includeSeparators} argument (default=C{False}), if the separating matching text should be included in the split results. Example:: punc = oneOf(list(".,;:/-!?")) print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) prints:: ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] iRN(R( RREtmaxsplittincludeSeparatorstsplitstlastRpRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs %   cCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(s Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement converts them to L{Literal}s by default. Example:: greet = Word(alphas) + "," + Word(alphas) + "!" hello = "Hello, World!" print (hello, "->", greet.parseString(hello)) Prints:: Hello, World! -> ['Hello', ',', 'World', '!'] s4Cannot combine element of type %s with ParserElementt stackleveliN( RsRR"RitwarningstwarnRt SyntaxWarningRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cCs\t|tr!tj|}nt|tsTtjdt|tdddS||S(s] Implementation of + operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCsmt|tr!tj|}nt|tsTtjdt|tdddSt |t j |gS(sQ Implementation of - operator, returns C{L{And}} with error stop s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRRt _ErrorStop(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__sub__s cCs\t|tr!tj|}nt|tsTtjdt|tdddS||S(s] Implementation of - operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rsub__ s csEt|tr|d}}n-t|tr7|d d }|dd kr_d|df}nt|dtr|dd kr|ddkrtS|ddkrtS|dtSqLt|dtrt|dtr|\}}||8}qLtdt|dt|dntdt||dkrgtdn|dkrtdn||kodknrtdn|rfd |r |dkr|}qt g||}qA|}n(|dkr.}nt g|}|S( s Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} Note that C{expr*(None,n)} does not raise an exception if more than n exprs exist in the input stream; that is, C{expr*(None,n)} does not enforce a maximum number of expr occurrences. If this behavior is desired, then write C{expr*(None,n) + ~expr} iiis7cannot multiply 'ParserElement' and ('%s','%s') objectss0cannot multiply 'ParserElement' and '%s' objectss/cannot multiply ParserElement by negative values@second tuple value must be greater or equal to first tuple values+cannot multiply ParserElement by 0 or (0,0)cs2|dkr$t|dStSdS(Ni(R(tn(tmakeOptionalListR(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR]s N(NN( RsRottupleRR0RRRt ValueErrorR(RR t minElementst optElementsR}((RRsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__mul__,sD#  &  )      cCs |j|S(N(R(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rmul__pscCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(sI Implementation of | operator - returns C{L{MatchFirst}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__or__ss cCs\t|tr!tj|}nt|tsTtjdt|tdddS||BS(s] Implementation of | operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ror__s cCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(sA Implementation of ^ operator - returns C{L{Or}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__xor__s cCs\t|tr!tj|}nt|tsTtjdt|tdddS||AS(s] Implementation of ^ operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rxor__s cCsdt|tr!tj|}nt|tsTtjdt|tdddSt ||gS(sC Implementation of & operator - returns C{L{Each}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRRR (RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__and__s cCs\t|tr!tj|}nt|tsTtjdt|tdddS||@S(s] Implementation of & operator when left operand is not a C{L{ParserElement}} s4Cannot combine element of type %s with ParserElementRiN( RsRR"RiRRRRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rand__s cCs t|S(sE Implementation of ~ operator - returns C{L{NotAny}} (R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt __invert__scCs'|dk r|j|S|jSdS(s  Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be passed as C{True}. If C{name} is omitted, same as calling C{L{copy}}. Example:: # these are equivalent userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") N(RRR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__call__s  cCs t|S(s Suppresses the output of this C{ParserElement}; useful to keep punctuation from cluttering up returned output. (R)(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsuppressscCs t|_|S(s Disables the skipping of whitespace before matching the characters in the C{ParserElement}'s defined pattern. This is normally only used internally by the pyparsing module, but may be needed in some whitespace-sensitive grammars. (RRp(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytleaveWhitespaces cCst|_||_t|_|S(s8 Overrides the default whitespace chars (RRpRqRRr(RRg((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetWhitespaceCharss   cCs t|_|S(s Overrides default behavior to expand C{}s to spaces before parsing the input string. Must be called before C{parseString} when the input grammar contains elements that match C{} characters. (RRt(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseWithTabss cCsrt|trt|}nt|trR||jkrn|jj|qnn|jjt|j|S(s Define expression to be ignored (e.g., comments) while doing pattern matching; may be called repeatedly, to define multiple comment or other ignorable patterns. Example:: patt = OneOrMore(Word(alphas)) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] patt.ignore(cStyleComment) patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] (RsRR)RuRR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytignores cCs1|p t|pt|ptf|_t|_|S(sT Enable display of debugging messages while doing pattern matching. (RGRKRMR{RRv(Rt startActiont successActiontexceptionAction((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDebugActions s    cCs)|r|jtttn t|_|S(s Enable display of debugging messages while doing pattern matching. Set C{flag} to True to enable, False to disable. Example:: wd = Word(alphas).setName("alphaword") integer = Word(nums).setName("numword") term = wd | integer # turn on debugging for wd wd.setDebug() OneOrMore(term).parseString("abc 123 xyz 890") prints:: Match alphaword at loc 0(1,1) Matched alphaword -> ['abc'] Match alphaword at loc 3(1,4) Exception raised:Expected alphaword (at char 4), (line:1, col:5) Match alphaword at loc 7(1,8) Matched alphaword -> ['xyz'] Match alphaword at loc 11(1,12) Exception raised:Expected alphaword (at char 12), (line:1, col:13) Match alphaword at loc 15(1,16) Exception raised:Expected alphaword (at char 15), (line:1, col:16) The output shown is that produced by the default debug actions - custom debug actions can be specified using L{setDebugActions}. Prior to attempting to match the C{wd} expression, the debugging message C{"Match at loc (,)"} is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, which makes debugging and exception messages easier to understand - for instance, the default name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. (RRGRKRMRRv(Rtflag((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDebugs# cCs|jS(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR@scCs t|S(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRCscCst|_d|_|S(N(RRwRRm(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRFs  cCsdS(N((RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcheckRecursionKscCs|jgdS(sj Check defined expressions for valid structure, check for infinite recursive definitions. N(R(Rt validateTrace((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytvalidateNscCsy|j}Wn5tk rGt|d}|j}WdQXnXy|j||SWn(tk r}tjr}q|nXdS(s Execute the parse expression on the given file or filename. If a filename is specified (instead of a file object), the entire file is opened, read, and closed before parsing. trN(treadRtopenRRR"R(Rtfile_or_filenameRt file_contentstfRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt parseFileTs  cCsdt|tr1||kp0t|t|kSt|trM|j|Stt||kSdS(N(RsR"tvarsRRtsuper(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__eq__hs " cCs ||k S(N((RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ne__pscCstt|S(N(thashtid(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__hash__sscCs ||kS(N((RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__req__vscCs ||k S(N((RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__rne__yscCs:y!|jt|d|tSWntk r5tSXdS(s Method for quick testing of a parser against a test string. Good for simple inline microtests of sub expressions while building up larger parser. Parameters: - testString - to test against this expression for a match - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests Example:: expr = Word(nums) assert expr.matches("100") RN(RRRRR(Rt testStringR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR|s  t#cCsyt|tr6tttj|jj}nt|trTt|}ng}g}t } x|D]} |d k r|j | t s|r| r|j | qmn| sqmndj|| g} g}yQ| jdd} |j| d|} | j | jd|| o%| } Wntk r} t| trPdnd}d| kr| j t| j| | j dt| j| dd |n| j d| jd || j d t| | o|} | } n<tk r*}| j d t|| o|} |} nX|rX|rG| j dndj| GHn|j | | fqmW| |fS( s3 Execute the parse expression on a series of test strings, showing each test, the parsed results or where the parse failed. Quick and easy way to run a parse expression against a list of sample strings. Parameters: - tests - a list of separate test strings, or a multiline string of test strings - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests - comment - (default=C{'#'}) - expression for indicating embedded comments in the test string; pass None to disable comment filtering - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; if False, only dump nested list - printResults - (default=C{True}) prints test output to stdout - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing Returns: a (success, results) tuple, where success indicates that all tests succeeded (or failed if C{failureTests} is True), and the results contain a list of lines of each test's output Example:: number_expr = pyparsing_common.number.copy() result = number_expr.runTests(''' # unsigned integer 100 # negative integer -100 # float with scientific notation 6.02e23 # integer with scientific notation 1e-12 ''') print("Success" if result[0] else "Failed!") result = number_expr.runTests(''' # stray character 100Z # missing leading digit before '.' -.100 # too many '.' 3.14.159 ''', failureTests=True) print("Success" if result[0] else "Failed!") prints:: # unsigned integer 100 [100] # negative integer -100 [-100] # float with scientific notation 6.02e23 [6.02e+23] # integer with scientific notation 1e-12 [1e-12] Success # stray character 100Z ^ FAIL: Expected end of text (at char 3), (line:1, col:4) # missing leading digit before '.' -.100 ^ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) # too many '.' 3.14.159 ^ FAIL: Expected end of text (at char 4), (line:1, col:5) Success Each test string must be on a single line. If you want to test a string that spans multiple lines, create a test like this:: expr.runTest(r"this is a test\n of strings that spans \n 3 lines") (Note that this is a raw string literal, you must include the leading 'r'.) s s\nRR6s(FATAL)Rrt it^sFAIL: sFAIL-EXCEPTION: N(RsRRRRuRtrstript splitlinesRRRRRRRRRR3RRRERR7Ra(RttestsRtcommenttfullDumpt printResultst failureTestst allResultstcommentstsuccessRpRtresultRRRL((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytrunTestssNW' +  ,    N(PRRRRfRRt staticmethodRhRjRRRRRRRzRRRRRRRRRRRRRRRRRRRRRRRRRt_MAX_INTRR{RRR RRRRRRRRRRRRRRRRRRRRRRRRRR R R RRRRR"(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR"8s      &   H     " 2G +   D      )            cBseZdZdZRS(sT Abstract C{ParserElement} subclass, for defining atomic matching patterns. cCstt|jdtdS(NR(R R*RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s(RRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR* scBseZdZdZRS(s, An empty token, will always match. cCs2tt|jd|_t|_t|_dS(NR (R R RRRRsRRx(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (RRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR  scBs#eZdZdZedZRS(s( A token that will never match. cCs;tt|jd|_t|_t|_d|_dS(NRsUnmatchable token( R RRRRRsRRxRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR* s    cCst|||j|dS(N(RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR1 s(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR& s cBs#eZdZdZedZRS(s Token to exactly match a specified string. Example:: Literal('blah').parseString('blah') # -> ['blah'] Literal('blah').parseString('blahfooblah') # -> ['blah'] Literal('blah').parseString('bla') # -> Exception: Expected "blah" For case-insensitive matching, use L{CaselessLiteral}. For keyword matching (force word break before and after the matched string), use L{Keyword} or L{CaselessKeyword}. cCstt|j||_t||_y|d|_Wn0tk rntj dt ddt |_ nXdt |j|_d|j|_t|_t|_dS(Nis2null string passed to Literal; use Empty() insteadRis"%s"s Expected (R RRtmatchRtmatchLentfirstMatchCharRRRRR R^RRRyRRsRx(Rt matchString((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRC s      cCsg|||jkrK|jdks7|j|j|rK||j|jfSt|||j|dS(Ni(R'R&t startswithR%RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRV s$(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR5 s  cBsKeZdZedZdedZedZ dZ e dZ RS(s\ Token to exactly match a specified string as a keyword, that is, it must be immediately followed by a non-keyword character. Compare with C{L{Literal}}: - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} Accepts two optional constructor arguments in addition to the keyword string: - C{identChars} is a string of characters that would be valid identifier characters, defaulting to all alphanumerics + "_" and "$" - C{caseless} allows case-insensitive matching, default is C{False}. Example:: Keyword("start").parseString("start") # -> ['start'] Keyword("start").parseString("starting") # -> Exception For case-insensitive matching, use L{CaselessKeyword}. s_$cCstt|j|dkr+tj}n||_t||_y|d|_Wn't k r}t j dt ddnXd|j|_ d|j |_t|_t|_||_|r|j|_|j}nt||_dS(Nis2null string passed to Keyword; use Empty() insteadRis"%s"s Expected (R RRRtDEFAULT_KEYWORD_CHARSR%RR&R'RRRRRRyRRsRxtcaselesstuppert caselessmatchRt identChars(RR(R.R+((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq s&        cCsb|jr||||j!j|jkrF|t||jkse|||jj|jkrF|dks||dj|jkrF||j|jfSn|||jkrF|jdks|j|j|rF|t||jks|||j|jkrF|dks2||d|jkrF||j|jfSt |||j |dS(Nii( R+R&R,R-RR.R%R'R)RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s #9)$3#cCs%tt|j}tj|_|S(N(R RRR*R.(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCs |t_dS(s,Overrides the default Keyword chars N(RR*(Rg((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytsetDefaultKeywordChars sN( RRRR1R*RRRRRRR#R/(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR^ s    cBs#eZdZdZedZRS(sl Token to match a specified string, ignoring case of letters. Note: the matched results will always be in the case of the given match string, NOT the case of the input text. Example:: OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] (Contrast with example for L{CaselessKeyword}.) cCsItt|j|j||_d|j|_d|j|_dS(Ns'%s's Expected (R RRR,t returnStringRRy(RR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCsS||||j!j|jkr7||j|jfSt|||j|dS(N(R&R,R%R0RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s#(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cBs&eZdZddZedZRS(s Caseless version of L{Keyword}. Example:: OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] (Contrast with example for L{CaselessLiteral}.) cCs#tt|j||dtdS(NR+(R RRR(RR(R.((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs||||j!j|jkrp|t||jks\|||jj|jkrp||j|jfSt|||j|dS(N(R&R,R-RR.R%RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s#9N(RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cBs&eZdZddZedZRS(sx A variation on L{Literal} which matches "close" matches, that is, strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: - C{match_string} - string to be matched - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match The results from a successful parse will contain the matched text from the input string and the following named results: - C{mismatches} - a list of the positions within the match_string where mismatches were found - C{original} - the original match_string used to compare against the input string If C{mismatches} is an empty list, then the match was an exact match. Example:: patt = CloseMatch("ATCATCGAATGGA") patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) # exact match patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) # close match allowing up to 2 mismatches patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) icCs]tt|j||_||_||_d|j|jf|_t|_t|_ dS(Ns&Expected %r (with up to %d mismatches)( R RjRRt match_stringt maxMismatchesRyRRxRs(RR1R2((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s    cCs|}t|}|t|j}||kr|j}d}g} |j} xtt|||!|jD]J\}} | \} } | | kro| j|t| | krPqqoqoW|d}t|||!g}|j|d<| |d<||fSnt|||j|dS(Niitoriginalt mismatches( RR1R2RRRR RRy(RRERRtstartRtmaxlocR1tmatch_stringlocR4R2ts_mtsrctmattresults((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s(    ,        (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRj s cBs>eZdZddddeddZedZdZRS(s Token for matching words composed of allowed character sets. Defined with string containing all allowed initial characters, an optional string containing allowed body characters (if omitted, defaults to the initial character set), and an optional minimum, maximum, and/or exact length. The default value for C{min} is 1 (a minimum value < 1 is not valid); the default values for C{max} and C{exact} are 0, meaning no maximum or exact length restriction. An optional C{excludeChars} parameter can list characters that might be found in the input C{bodyChars} string; useful to define a word of all printables except for one or two characters, for instance. L{srange} is useful for defining custom character set strings for defining C{Word} expressions, using range notation from regular expression character sets. A common mistake is to use C{Word} to match a specific literal string, as in C{Word("Address")}. Remember that C{Word} uses the string argument to define I{sets} of matchable characters. This expression would match "Add", "AAA", "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. To match an exact literal string, use L{Literal} or L{Keyword}. pyparsing includes helper strings for building Words: - L{alphas} - L{nums} - L{alphanums} - L{hexnums} - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) - L{printables} (any non-whitespace character) Example:: # a word composed of digits integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) # a word with a leading capital, and zero or more lowercase capital_word = Word(alphas.upper(), alphas.lower()) # hostnames are alphanumeric, with leading alpha, and '-' hostname = Word(alphas, alphanums+'-') # roman numeral (not a strict parser, accepts invalid mix of characters) roman = Word("IVXLCDM") # any string of non-whitespace characters, except for ',' csv_value = Word(printables, excludeChars=",") iicstt|jrcdjfd|D}|rcdjfd|D}qcn||_t||_|r||_t||_n||_t||_|dk|_ |dkrt dn||_ |dkr||_ n t |_ |dkr)||_ ||_ nt||_d|j|_t|_||_d|j|jkr}|dkr}|dkr}|dkr}|j|jkrd t|j|_net|jdkrd tj|jt|jf|_n%d t|jt|jf|_|jrDd |jd |_nytj|j|_Wq}tk ryd|_q}XndS( NRrc3s!|]}|kr|VqdS(N((RR(t excludeChars(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys 7 sc3s!|]}|kr|VqdS(N((RR(R<(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys 9 siisZcannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitteds Expected Rs[%s]+s%s[%s]*s [%s][%s]*s\b(R R-RRt initCharsOrigRt initCharst bodyCharsOrigt bodyCharst maxSpecifiedRtminLentmaxLenR$RRRyRRxt asKeywordt_escapeRegexRangeCharstreStringRR|tescapetcompileRaR(RR>R@tmintmaxtexactRDR<((R<sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR4 sT%             :   c Cs|jr[|jj||}|s?t|||j|n|j}||jfS|||jkrt|||j|n|}|d7}t|}|j}||j }t ||}x*||kr|||kr|d7}qWt } |||j krt } n|jrG||krG|||krGt } n|jr|dkrp||d|ks||kr|||krt } qn| rt|||j|n||||!fS(Nii(R|R%RRytendtgroupR>RR@RCRIRRBRRARD( RRERRR!R5Rt bodycharsR6tthrowException((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRj s6       %  < cCsytt|jSWntk r*nX|jdkrd}|j|jkr}d||j||jf|_qd||j|_n|jS(NcSs&t|dkr|d dS|SdS(Nis...(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt charsAsStr s s W:(%s,%s)sW:(%s)(R R-RRaRmRR=R?(RRP((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (N( RRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR- s.6 #cBsDeZdZeejdZddZedZ dZ RS(s Token for matching strings that match a given regular expression. Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as named parse results. Example:: realnum = Regex(r"[+-]?\d+\.\d*") date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)') # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") s[A-Z]icCs3tt|jt|tr|sAtjdtddn||_||_ y+t j |j|j |_ |j|_ Wqt jk rtjd|tddqXnIt|tjr||_ t||_|_ ||_ n tdt||_d|j|_t|_t|_dS(sThe parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.s0null string passed to Regex; use Empty() insteadRis$invalid pattern (%s) passed to RegexsCRegex may only be constructed with a string or a compiled RE objects Expected N(R R%RRsRRRRtpatterntflagsR|RHRFt sre_constantsterrortcompiledREtypeRuRRRRyRRxRRs(RRQRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s.          cCs|jj||}|s6t|||j|n|j}|j}t|j}|rx|D]}||||eZdZddeededZedZdZRS(s Token for matching strings that are delimited by quoting characters. Defined with the following parameters: - quoteChar - string of one or more characters defining the quote delimiting string - escChar - character to escape quotes, typically backslash (default=C{None}) - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) Example:: qs = QuotedString('"') print(qs.searchString('lsjdf "This is the quote" sldjf')) complex_qs = QuotedString('{{', endQuoteChar='}}') print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) sql_qs = QuotedString('"', escQuote='""') print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) prints:: [['This is the quote']] [['This is the "quote"']] [['This is the quote with "embedded" quotes']] c sttj|j}|sGtjdtddtn|dkr\|}n4|j}|stjdtddtn|_ t |_ |d_ |_ t |_|_|_|_|_|rTtjtjB_dtjj tj d|dk rDt|pGdf_nPd_dtjj tj d|dk rt|pdf_t j d krjd d jfd tt j d dd Dd7_n|r*jdtj|7_n|rhjdtj|7_tjjd_njdtjj 7_y+tjjj_j_Wn4tj k rtjdjtddnXt!_"dj"_#t$_%t&_'dS(Ns$quoteChar cannot be the empty stringRis'endQuoteChar cannot be the empty stringis %s(?:[^%s%s]Rrs%s(?:[^%s\n\r%s]is|(?:s)|(?:c3s<|]2}dtjj| tj|fVqdS(s%s[^%s]N(R|RGt endQuoteCharRE(RR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys / sit)s|(?:%s)s|(?:%s.)s(.)s)*%ss$invalid pattern (%s) passed to Regexs Expected ((R R#RRRRRt SyntaxErrorRt quoteCharRt quoteCharLentfirstQuoteCharRXtendQuoteCharLentescChartescQuotetunquoteResultstconvertWhitespaceEscapesR|t MULTILINEtDOTALLRRRGRERQRRtescCharReplacePatternRHRFRSRTRRRyRRxRRs(RR[R_R`t multilineRaRXRb((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR sf             ( %E  c CsT|||jkr(|jj||p+d}|sOt|||j|n|j}|j}|jrJ||j |j !}t |t rJd|kr|j ridd6dd6dd6dd 6}x/|jD]\}}|j||}qWn|jr tj|jd |}n|jrG|j|j|j}qGqJn||fS( Ns\s s\ts s\ns s\fs s\rs\g<1>(R]R|R%RRRyRLRMRaR\R^RsRRbRRR_RReR`RX( RRERRR!R}tws_maptwslittwschar((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRG s*.      !cCs]ytt|jSWntk r*nX|jdkrVd|j|jf|_n|jS(Ns.quoted string, starting with %s ending with %s(R R#RRaRmRR[RX(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRj s N( RRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR# sA #cBs5eZdZddddZedZdZRS(s Token for matching words composed of characters I{not} in a given set (will include whitespace in matched characters if not listed in the provided exclusion set - see example). Defined with string containing all disallowed characters, and an optional minimum, maximum, and/or exact length. The default value for C{min} is 1 (a minimum value < 1 is not valid); the default values for C{max} and C{exact} are 0, meaning no maximum or exact length restriction. Example:: # define a comma-separated-value as anything that is not a ',' csv_value = CharsNotIn(',') print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) prints:: ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] iicCstt|jt|_||_|dkr@tdn||_|dkra||_n t |_|dkr||_||_nt ||_ d|j |_ |jdk|_ t|_dS(Nisfcannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permittedis Expected (R RRRRptnotCharsRRBRCR$RRRyRsRx(RRjRIRJRK((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s           cCs|||jkr.t|||j|n|}|d7}|j}t||jt|}x*||kr|||kr|d7}qfW|||jkrt|||j|n||||!fS(Ni(RjRRyRIRCRRB(RRERRR5tnotcharstmaxlen((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cCsytt|jSWntk r*nX|jdkryt|jdkrfd|jd |_qyd|j|_n|jS(Nis !W:(%s...)s!W:(%s)(R RRRaRmRRRj(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s (RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRv s cBsXeZdZidd6dd6dd6dd6d d 6Zd d d d dZedZRS(s Special matching class for matching whitespace. Normally, whitespace is ignored by pyparsing grammars. This class is included when some whitespace structures are significant. Define with a string containing the whitespace characters to be matched; default is C{" \t\r\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, as defined for the C{L{Word}} class. sRss ss ss ss s iicsttj|_jdjfdjDdjdjD_t_ dj_ |_ |dkr|_ n t _ |dkr|_ |_ ndS(NRrc3s$|]}|jkr|VqdS(N(t matchWhite(RR(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys scss|]}tj|VqdS(N(R,t whiteStrs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss Expected i(R R,RRmRRRqRRRsRyRBRCR$(RtwsRIRJRK((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s )       cCs|||jkr.t|||j|n|}|d7}||j}t|t|}x-||kr|||jkr|d7}qcW|||jkrt|||j|n||||!fS(Ni(RmRRyRCRIRRB(RRERRR5R6((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  "(RRRRnRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR, s t_PositionTokencBseZdZRS(cCs8tt|j|jj|_t|_t|_ dS(N( R RpRR^RRRRsRRx(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s (RRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRp scBs,eZdZdZdZedZRS(sb Token to advance to a specific column of input text; useful for tabular report scraping. cCs tt|j||_dS(N(R RRR7(Rtcolno((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCst|||jkrt|}|jrB|j||}nxE||kr||jrt|||jkr|d7}qEWn|S(Ni(R7RRuRtisspace(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  7cCs^t||}||jkr6t||d|n||j|}|||!}||fS(NsText not in expected column(R7R(RRERRtthiscoltnewlocR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cBs#eZdZdZedZRS(s Matches if current position is at the beginning of a line within the parse string Example:: test = ''' AAA this line AAA and this line AAA but not this one B AAA and definitely not this one ''' for t in (LineStart() + 'AAA' + restOfLine).searchString(test): print(t) Prints:: ['AAA', ' this line'] ['AAA', ' and this line'] cCs tt|jd|_dS(NsExpected start of line(R RRRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR& scCs;t||dkr|gfSt|||j|dS(Ni(R7RRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR* s (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cBs#eZdZdZedZRS(sU Matches if current position is at the end of a line within the parse string cCs<tt|j|jtjjddd|_dS(Ns RrsExpected end of line(R RRRR"RfRRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR3 scCs|t|krK||dkr0|ddfSt|||j|n8|t|krk|dgfSt|||j|dS(Ns i(RRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR8 s(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR/ s cBs#eZdZdZedZRS(sM Matches if current position is at the beginning of the parse string cCs tt|jd|_dS(NsExpected start of text(R R(RRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRG scCsL|dkrB||j|dkrBt|||j|qBn|gfS(Ni(RRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRK s (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR(C s cBs#eZdZdZedZRS(sG Matches if current position is at the end of the parse string cCs tt|jd|_dS(NsExpected end of text(R R'RRy(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRV scCs|t|kr-t|||j|nT|t|krM|dgfS|t|kri|gfSt|||j|dS(Ni(RRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRZ s (RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR'R s cBs&eZdZedZedZRS(sp Matches if the current position is at the beginning of a Word, and is not preceded by any character in a given set of C{wordChars} (default=C{printables}). To emulate the C{} behavior of regular expressions, use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of the string being parsed, or at the beginning of a line. cCs/tt|jt||_d|_dS(NsNot at the start of a word(R R/RRt wordCharsRy(RRu((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRl scCs^|dkrT||d|jks6|||jkrTt|||j|qTn|gfS(Nii(RuRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq s  (RRRRTRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR/d s cBs&eZdZedZedZRS(sZ Matches if the current position is at the end of a Word, and is not followed by any character in a given set of C{wordChars} (default=C{printables}). To emulate the C{} behavior of regular expressions, use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of the string being parsed, or at the end of a line. cCs8tt|jt||_t|_d|_dS(NsNot at the end of a word(R R.RRRuRRpRy(RRu((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCsvt|}|dkrl||krl|||jksN||d|jkrlt|||j|qln|gfS(Nii(RRuRRy(RRERRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  (RRRRTRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR.x s cBsqeZdZedZdZdZdZdZdZ dZ edZ gd Z d Z RS( s^ Abstract subclass of ParserElement, for combining and post-processing parsed tokens. cCstt|j|t|tr4t|}nt|tr[tj|g|_ nt|t j rt|}t d|Drt tj|}nt||_ n3yt||_ Wntk r|g|_ nXt|_dS(Ncss|]}t|tVqdS(N(RsR(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(R RRRsRRRR"RitexprsRtIterabletallRRRR}(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  cCs |j|S(N(Rv(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs|jj|d|_|S(N(RvRRRm(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cCsPt|_g|jD]}|j^q|_x|jD]}|jq8W|S(s~Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on all contained expressions.(RRpRvRR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s  %cCst|trb||jkrtt|j|x(|jD]}|j|jdq>Wqn>tt|j|x%|jD]}|j|jdqW|S(Ni(RsR)RuR RRRv(RR R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCsfytt|jSWntk r*nX|jdkr_d|jjt|j f|_n|jS(Ns%s:(%s)( R RRRaRmRR^RRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s %cCswtt|jx|jD]}|jqWt|jdkr`|jd}t||jr|j r|jdkr|j r|j|jdg|_d|_ |j |j O_ |j |j O_ n|jd}t||jr`|j r`|jdkr`|j r`|jd |j|_d|_ |j |j O_ |j |j O_ q`ndt||_|S(Niiiis Expected (R RRRvRRsR^RkRnRRvRmRsRxRRy(RRR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s0        cCstt|j||}|S(N(R RR(RRRR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs@||g}x|jD]}|j|qW|jgdS(N(RvRR(RRttmpR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCs>tt|j}g|jD]}|j^q|_|S(N(R RRRv(RR}R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s%(RRRRRRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s    "  cBsWeZdZdefdYZedZedZdZdZ dZ RS(s  Requires all given C{ParseExpression}s to be found in the given order. Expressions may be separated by whitespace. May be constructed using the C{'+'} operator. May also be constructed using the C{'-'} operator, which will suppress backtracking. Example:: integer = Word(nums) name_expr = OneOrMore(Word(alphas)) expr = And([integer("id"),name_expr("name"),integer("age")]) # more easily written as: expr = integer("id") + name_expr("name") + integer("age") RcBseZdZRS(cOs3ttj|j||d|_|jdS(Nt-(R RRRRR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s (RRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR scCsltt|j||td|jD|_|j|jdj|jdj|_t |_ dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys  si( R RRRxRvRsRRqRpRR}(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s c Cs?|jdj|||dt\}}t}x|jdD]}t|tjr`t}q<n|ry|j|||\}}Wqtk rqtk r}d|_ tj |qt k rt|t ||j|qXn|j|||\}}|s$|jr<||7}q<q<W||fS(NiRi(RvRRRsRRRR!RRt __traceback__RRRRyR( RRERRt resultlistt errorStopRt exprtokensR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s((   %cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR5 scCs@||g}x+|jD] }|j||jsPqqWdS(N(RvRRs(RRtsubRecCheckListR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR: s   cCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRt{Rcss|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys F st}(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRA s *( RRRR RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s    cBsAeZdZedZedZdZdZdZ RS(s Requires that at least one C{ParseExpression} is found. If two expressions match, the expression that matches the longest string will be used. May be constructed using the C{'^'} operator. Example:: # construct Or using '^' operator number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) prints:: [['123'], ['3.1416'], ['789']] cCsNtt|j|||jrAtd|jD|_n t|_dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys \ s(R RRRvR4RsR(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRY s c Csd}d}g}x|jD]}y|j||}Wntk rw} d| _| j|kr| }| j}qqtk rt||krt|t||j|}t|}qqX|j ||fqW|rh|j ddxn|D]c\} }y|j |||SWqtk r`} d| _| j|kra| }| j}qaqXqWn|dk r|j|_ |nt||d|dS(NiRcSs |d S(Ni((tx((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqu Rrs no defined alternatives to match( RRvRRR{RRRRyRtsortRR( RRERRt maxExcLoct maxExceptionRRtloc2Rt_((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR` s<      cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ixor__ scCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs ^ css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sR(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s *cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s( RRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRK s    &  cBsAeZdZedZedZdZdZdZ RS(s Requires that at least one C{ParseExpression} is found. If two expressions match, the first one listed is the one that will match. May be constructed using the C{'|'} operator. Example:: # construct MatchFirst using '|' operator # watch the order of expressions to match number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] # put more selective expression first number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] cCsNtt|j|||jrAtd|jD|_n t|_dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(R RRRvR4RsR(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s c Csd}d}x|jD]}y|j|||}|SWqtk ro}|j|kr|}|j}qqtk rt||krt|t||j|}t|}qqXqW|dk r|j|_|nt||d|dS(Nis no defined alternatives to match( RRvRRRRRRyR( RRERRRRRR}R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s$    cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt__ior__ scCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs | css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sR(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s *cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s( RRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s     cBs8eZdZedZedZdZdZRS(sm Requires all given C{ParseExpression}s to be found, but in any order. Expressions may be separated by whitespace. May be constructed using the C{'&'} operator. Example:: color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") integer = Word(nums) shape_attr = "shape:" + shape_type("shape") posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") color_attr = "color:" + color("color") size_attr = "size:" + integer("size") # use Each (using operator '&') to accept attributes in any order # (shape and posn are required, color and size are optional) shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) shape_spec.runTests(''' shape: SQUARE color: BLACK posn: 100, 120 shape: CIRCLE size: 50 color: BLUE posn: 50,80 color:GREEN size:20 shape:TRIANGLE posn:20,40 ''' ) prints:: shape: SQUARE color: BLACK posn: 100, 120 ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] - color: BLACK - posn: ['100', ',', '120'] - x: 100 - y: 120 - shape: SQUARE shape: CIRCLE size: 50 color: BLUE posn: 50,80 ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] - color: BLUE - posn: ['50', ',', '80'] - x: 50 - y: 80 - shape: CIRCLE - size: 50 color: GREEN size: 20 shape: TRIANGLE posn: 20,40 ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] - color: GREEN - posn: ['20', ',', '40'] - x: 20 - y: 40 - shape: TRIANGLE - size: 20 cCsKtt|j||td|jD|_t|_t|_dS(Ncss|]}|jVqdS(N(Rs(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s( R R RRxRvRsRRptinitExprGroups(RRvR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cCs4|jrLtd|jD|_g|jD]}t|tr/|j^q/}g|jD]%}|jr]t|t r]|^q]}|||_g|jD]}t|t r|j^q|_ g|jD]}t|t r|j^q|_ g|jD]$}t|tt t fs|^q|_ |j |j 7_ t|_n|}|j }|j} g} t} x| r_|| |j |j } g} x| D]}y|j||}Wntk r| j|qX| j|jjt||||kr|j|q|| kr| j|qqWt| t| krut} ququW|rdjd|D}t||d|n| g|jD]*}t|tr|j| kr|^q7} g}x6| D].}|j|||\}}|j|qWt|tg}||fS(Ncss3|])}t|trt|j|fVqdS(N(RsRRRF(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss, css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys =ss*Missing one or more required elements (%s)(RRRvtopt1mapRsRRFRst optionalsR0tmultioptionalsRt multirequiredtrequiredRRRRRRRtremoveRRRtsumR (RRERRRtopt1topt2ttmpLocttmpReqdttmpOptt matchOrdert keepMatchingttmpExprstfailedtmissingR|R;t finalResults((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsP .5 117      "   > cCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs & css|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys PsR(RRRmRRRv(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRKs *cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRTs(RRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s 5  1 cBs_eZdZedZedZdZdZdZ dZ gdZ dZ RS( sa Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. cCstt|j|t|trattjtrItj|}qatjt |}n||_ d|_ |dk r|j |_ |j|_|j|j|j|_|j|_|j|_|jj|jndS(N(R RRRsRt issubclassR"RiR*RRFRRmRxRsRRqRpRoR}RuR(RRFR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR^s        cCsG|jdk r+|jj|||dtStd||j|dS(NRRr(RFRRRRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRpscCs>t|_|jj|_|jdk r:|jjn|S(N(RRpRFRRR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRvs  cCst|trc||jkrtt|j||jdk r`|jj|jdq`qn?tt|j||jdk r|jj|jdn|S(Ni(RsR)RuR RRRFR(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR}s cCs6tt|j|jdk r2|jjn|S(N(R RRRFR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsV||kr"t||gn||g}|jdk rR|jj|ndS(N(R$RFRR(RRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  cCsA||g}|jdk r0|jj|n|jgdS(N(RFRRR(RRRy((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsuytt|jSWntk r*nX|jdkrn|jdk rnd|jjt |jf|_n|jS(Ns%s:(%s)( R RRRaRmRRFR^RR(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs %( RRRRRRRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRZs      cBs#eZdZdZedZRS(s Lookahead matching of the given parse expression. C{FollowedBy} does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression matches at the current position. C{FollowedBy} always returns a null token list. Example:: # use FollowedBy to match a label only if it is followed by a ':' data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() prints:: [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] cCs#tt|j|t|_dS(N(R R RRRs(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs|jj|||gfS(N(RFR(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR s cBs,eZdZdZedZdZRS(s Lookahead to disallow matching with the given parse expression. C{NotAny} does I{not} advance the parsing position within the input string, it only verifies that the specified parse expression does I{not} match at the current position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} always returns a null token list. May be constructed using the '~' operator. Example:: cCsBtt|j|t|_t|_dt|j|_ dS(NsFound unwanted token, ( R RRRRpRRsRRFRy(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  cCs:|jj||r0t|||j|n|gfS(N(RFRRRy(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRs~{R(RRRmRRRF(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs (RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs   t_MultipleMatchcBs eZddZedZRS(cCsftt|j|t|_|}t|trFtj|}n|dk rY|nd|_ dS(N( R RRRRoRsRR"RiRt not_ender(RRFtstopOntender((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  c Cs|jj}|j}|jdk }|r9|jj}n|rO|||n||||dt\}}y|j } xo|r|||n| r|||} n|} ||| |\}} | s| jr~|| 7}q~q~WWnt t fk rnX||fS(NR( RFRRRRRRRuRRR( RRERRtself_expr_parsetself_skip_ignorablest check_endert try_not_enderRthasIgnoreExprsRt tmptokens((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs,   N(RRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs cBseZdZdZRS(s Repetition of one or more of the given expression. Parameters: - expr - expression that must match one or more times - stopOn - (default=C{None}) - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition expression) Example:: data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: BLACK" OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] # use stopOn attribute for OneOrMore to avoid reading label string as part of the data attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] # could also be written as (attr_expr * (1,)).parseString(text).pprint() cCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRRs}...(RRRmRRRF(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR!s (RRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscBs/eZdZddZedZdZRS(sw Optional repetition of zero or more of the given expression. Parameters: - expr - expression that must match zero or more times - stopOn - (default=C{None}) - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition expression) Example: similar to L{OneOrMore} cCs)tt|j|d|t|_dS(NR(R R0RRRs(RRFR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR6scCsEy tt|j|||SWnttfk r@|gfSXdS(N(R R0RRR(RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR:s cCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRRs]...(RRRmRRRF(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR@s N(RRRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR0*s   t _NullTokencBs eZdZeZdZRS(cCstS(N(R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRJscCsdS(NRr((R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRMs(RRRR>R(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRIs cBs/eZdZedZedZdZRS(sa Optional matching of the given expression. Parameters: - expr - expression that must match zero or more times - default (optional) - value to be returned if the optional expression is not found. Example:: # US postal code can be a 5-digit zip, plus optional 4-digit qualifier zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) zip.runTests(''' # traditional ZIP code 12345 # ZIP+4 form 12101-0001 # invalid ZIP 98765- ''') prints:: # traditional ZIP code 12345 ['12345'] # ZIP+4 form 12101-0001 ['12101-0001'] # invalid ZIP 98765- ^ FAIL: Expected end of text (at char 5), (line:1, col:6) cCsAtt|j|dt|jj|_||_t|_dS(NR( R RRRRFRoRRRs(RRFR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRts cCsy(|jj|||dt\}}Wnottfk r|jtk r|jjrt|jg}|j||jj ['3', '.', '1416'] # will also erroneously match the following print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] real = Combine(Word(nums) + '.' + Word(nums)) print(real.parseString('3.1416')) # -> ['3.1416'] # no match when there are internal spaces print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) RrcCsQtt|j||r)|jn||_t|_||_t|_dS(N( R RRRtadjacentRRpt joinStringR}(RRFRR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRrs    cCs6|jrtj||ntt|j||S(N(RR"RR R(RR ((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR|s cCse|j}|2|tdj|j|jgd|j7}|jr]|jr]|gS|SdS(NRrR(RR RRRRzRnR(RRERRtretToks((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  1(RRRRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRas cBs eZdZdZdZRS(s Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. Example:: ident = Word(alphas) num = Word(nums) term = ident | num func = ident + Optional(delimitedList(term)) print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] func = ident + Group(Optional(delimitedList(term))) print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] cCs#tt|j|t|_dS(N(R RRRRo(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCs|gS(N((RRERR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs(RRRRR(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRs  cBs eZdZdZdZRS(sW Converter to return a repetitive expression as a list, but also as a dictionary. Each element can also be referenced using the first token in the expression as its key. Useful for tabular report scraping when the first column can be used as a item key. Example:: data_word = Word(alphas) label = data_word + FollowedBy(':') attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) # print attributes as plain groups print(OneOrMore(attr_expr).parseString(text).dump()) # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names result = Dict(OneOrMore(Group(attr_expr))).parseString(text) print(result.dump()) # access named fields as dict entries, or output as dict print(result['shape']) print(result.asDict()) prints:: ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left - shape: SQUARE - texture: burlap SQUARE {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} See more examples at L{ParseResults} of accessing fields by results name. cCs#tt|j|t|_dS(N(R R RRRo(RRF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRscCsTx9t|D]+\}}t|dkr1q n|d}t|trct|dj}nt|dkrtd|||nX|S(ss Decorator for debugging parse actions. When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. Example:: wd = Word(alphas) @traceParseAction def remove_duplicate_chars(tokens): return ''.join(sorted(set(''.join(tokens))) wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) print(wds.parseString("slkdjs sld sldd sdlf sdljf")) prints:: >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) <>entering %s(line: '%s', %d, %r) s< ['aa', 'bb', 'cc'] delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] s [Rs]...N(RRR0RR)(RFtdelimtcombinetdlName((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR>9s ,!cstfd}|dkrBttjd}n |j}|jd|j|dt|jdt dS(s: Helper to define a counted list of expressions. This helper defines a pattern of the form:: integer expr expr expr... where the leading integer tells how many expr expressions follow. The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. Example:: countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] # in this parser, the leading integer value is given in binary, # '10' indicating that 2 values are in the array binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] cs;|d}|r,ttg|p5tt>gS(Ni(RRRA(RRNRpR(t arrayExprRF(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcountFieldParseAction_s -cSst|dS(Ni(Ro(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqdRrtarrayLenR~s(len) s...N( R RR-RPRzRRRRR(RFtintExprR((RRFsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR:Ls    cCsMg}x@|D]8}t|tr8|jt|q |j|q W|S(N(RsRRRR(tLR}R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRks  csFtfd}|j|dtjdt|S(s* Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a 'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousLiteral(first) matchExpr = first + ":" + second will match C{"1:1"}, but not C{"1:2"}. Because this matches a previous literal, will also match the leading C{"1:1"} in C{"1:10"}. If this is not desired, use C{matchPreviousExpr}. Do I{not} use with packrat parsing enabled. csc|rTt|dkr'|d>q_t|j}td|D>n t>dS(Niicss|]}t|VqdS(N(R(Rttt((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(RRRRR (RRNRpttflat(trep(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcopyTokenToRepeaters R~s(prev) (R RRRR(RFR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRMts  cs\t|j}|Kfd}|j|dtjdt|S(sS Helper to define an expression that is indirectly defined from the tokens matched in a previous expression, that is, it looks for a 'repeat' of a previous expression. For example:: first = Word(nums) second = matchPreviousExpr(first) matchExpr = first + ":" + second will match C{"1:1"}, but not C{"1:2"}. Because this matches by expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; the expressions are evaluated first, and then compared, so C{"1"} is compared with C{"10"}. Do I{not} use with packrat parsing enabled. cs8t|jfd}j|dtdS(Ncs7t|j}|kr3tdddndS(NRri(RRR(RRNRpt theseTokens(t matchTokens(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytmustMatchTheseTokenss R~(RRRzR(RRNRpR(R(RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsR~s(prev) (R RRRRR(RFte2R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRLs   cCsUx$dD]}|j|t|}qW|jdd}|jdd}t|S(Ns\^-]s s\ns s\t(Rt_bslashR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyREs  c sD|r!d}d}tnd}d}tg}t|tr]|j}n7t|tjr~t|}ntj dt dd|st Sd}x|t |d krV||}xt ||d D]f\}} || |r |||d =Pq||| r|||d =|j|| | }PqqW|d 7}qW| r|ryt |t d j|krtd d jd |Djd j|Stdjd|Djd j|SWqtk rtj dt ddqXntfd|Djd j|S(s Helper to quickly define a set of alternative Literals, and makes sure to do longest-first testing when there is a conflict, regardless of the input order, but returns a C{L{MatchFirst}} for best performance. Parameters: - strs - a string of space-delimited literals, or a collection of string literals - caseless - (default=C{False}) - treat all literals as caseless - useRegex - (default=C{True}) - as an optimization, will generate a Regex object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or if creating a C{Regex} raises an exception) Example:: comp_oper = oneOf("< = > <= >= !=") var = Word(alphas) number = Word(nums) term = var | number comparison_expr = term + comp_oper + term print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) prints:: [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] cSs|j|jkS(N(R,(R tb((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcSs|jj|jS(N(R,R)(R R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcSs ||kS(N((R R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcSs |j|S(N(R)(R R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrs6Invalid argument to oneOf, expected string or iterableRiiiRrs[%s]css|]}t|VqdS(N(RE(Rtsym((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss | t|css|]}tj|VqdS(N(R|RG(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys ss7Exception creating Regex for oneOf, building MatchFirstc3s|]}|VqdS(N((RR(tparseElementClass(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys s(RRRsRRRRwRRRRRRRRRR%RRaR( tstrsR+tuseRegextisequaltmaskstsymbolsRtcurRR ((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRQsL        ! !33  cCsttt||S(s Helper to easily and clearly define a dictionary by specifying the respective patterns for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens in the proper order. The key pattern can include delimiting markers or punctuation, as long as they are suppressed, thereby leaving the significant key text. The value pattern can include named results, so that the C{Dict} results can include named token fields. Example:: text = "shape: SQUARE posn: upper left color: light blue texture: burlap" attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) print(OneOrMore(attr_expr).parseString(text).dump()) attr_label = label attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) # similar to Dict, but simpler call format result = dictOf(attr_label, attr_value).parseString(text) print(result.dump()) print(result['shape']) print(result.shape) # object attribute access works too print(result.asDict()) prints:: [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] - color: light blue - posn: upper left - shape: SQUARE - texture: burlap SQUARE SQUARE {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} (R R0R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR?s!cCs|tjd}|j}t|_|d||d}|rVd}n d}|j||j|_|S(s Helper to return the original, untokenized text for a given expression. Useful to restore the parsed fields of an HTML start tag into the raw tag text itself, or to revert separate tokens with intervening whitespace back to the original matching input text. By default, returns astring containing the original parsed text. If the optional C{asString} argument is passed as C{False}, then the return value is a C{L{ParseResults}} containing any results names that were originally matched, and a single token containing the original matched text from the input string. So if the expression passed to C{L{originalTextFor}} contains expressions with defined results names, you must set C{asString} to C{False} if you want to preserve those results name values. Example:: src = "this is test bold text normal text " for tag in ("b","i"): opener,closer = makeHTMLTags(tag) patt = originalTextFor(opener + SkipTo(closer) + closer) print(patt.searchString(src)[0]) prints:: [' bold text '] ['text'] cSs|S(N((RRRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq8Rrt_original_startt _original_endcSs||j|j!S(N(RR(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq=RrcSs'||jd|jd!g|(dS(NRR(R(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt extractText?s(R RzRRR}Ru(RFtasStringt locMarkert endlocMarkert matchExprR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRe s      cCst|jdS(sp Helper to undo pyparsing's default grouping of And expressions, even if all but one are non-empty. cSs|dS(Ni((Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqJRr(R+Rz(RF((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRfEscCsEtjd}t|d|d|jjdS(s Helper to decorate a returned token with its starting and ending locations in the input string. This helper adds the following results names: - locn_start = location where matched expression begins - locn_end = location where matched expression ends - value = the actual parsed results Be careful if the input text contains C{} characters, you may want to call C{L{ParserElement.parseWithTabs}} Example:: wd = Word(alphas) for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): print(match) prints:: [[0, 'ljsdf', 5]] [[8, 'lksdjjf', 15]] [[18, 'lkkjj', 23]] cSs|S(N((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq`Rrt locn_startRtlocn_end(R RzRRR(RFtlocator((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRhLss\[]-*.$+^?()~ RKcCs |ddS(Nii((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqkRrs\\0?[xX][0-9a-fA-F]+cCs tt|djddS(Nis\0xi(tunichrRotlstrip(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqlRrs \\0[0-7]+cCstt|dddS(Niii(RRo(RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqmRrR<s\]s\wRzRRtnegatetbodyRcsOdy-djfdtj|jDSWntk rJdSXdS(s Helper to easily define string ranges for use in Word construction. Borrows syntax from regexp '[]' string range definitions:: srange("[0-9]") -> "0123456789" srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" The input string must be enclosed in []'s, and the returned string is the expanded character set joined into a single string. The values enclosed in the []'s may be: - a single character - an escaped character with a leading backslash (such as C{\-} or C{\]}) - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) (C{\0x##} is also supported for backwards compatibility) - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) - a range of any of the above, separated by a dash (C{'a-z'}, etc.) - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) cSsKt|ts|Sdjdtt|dt|ddDS(NRrcss|]}t|VqdS(N(R(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sii(RsR RRtord(tp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrRrc3s|]}|VqdS(N((Rtpart(t _expanded(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys sN(Rt_reBracketExprRRRa(R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyR]rs  - csfd}|S(st Helper method for defining parse actions that require matching at a specific column in the input text. cs2t||kr.t||dndS(Nsmatched token not at column %d(R7R(R@tlocnRJ(R(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt verifyCols((RR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRKscs fdS(s Helper method for common parse actions that simply return a literal value. Especially useful when used with C{L{transformString}()}. Example:: num = Word(nums).setParseAction(lambda toks: int(toks[0])) na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) term = na | num OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] csgS(N((RRNRp(treplStr(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRr((R((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRZs cCs|ddd!S(s Helper parse action for removing quotation marks from parsed quoted strings. Example:: # by default, quotation marks are included in parsed results quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] # use removeQuotes to strip quotation marks from parsed results quotedString.setParseAction(removeQuotes) quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] iii((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRXs csafd}y"tdtdj}Wntk rSt}nX||_|S(sG Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional args are passed, they are forwarded to the given function as additional arguments after the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the parsed data to an integer using base 16. Example (compare the last to example in L{ParserElement.transformString}:: hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) hex_ints.runTests(''' 00 11 22 aa FF 0a 0d 1a ''') upperword = Word(alphas).setParseAction(tokenMap(str.upper)) OneOrMore(upperword).runTests(''' my kingdom for a horse ''') wd = Word(alphas).setParseAction(tokenMap(str.title)) OneOrMore(wd).setParseAction(' '.join).runTests(''' now is the winter of our discontent made glorious summer by this sun of york ''') prints:: 00 11 22 aa FF 0a 0d 1a [0, 17, 34, 170, 255, 10, 13, 26] my kingdom for a horse ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] now is the winter of our discontent made glorious summer by this sun of york ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] cs g|D]}|^qS(N((RRNRpttokn(RRO(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRsRR^(R`RRaRu(RORRRd((RROsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRks    cCst|jS(N(RR,(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcCst|jS(N(Rtlower(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcCs<t|tr+|}t|d| }n |j}tttd}|rtjj t }t d|dt t t|t d|tddtgjdj d t d }nd jd tD}tjj t t|B}t d|dt t t|j ttt d|tddtgjdj d t d }ttd|d }|jdd j|jddjjjd|}|jdd j|jddjjjd|}||_||_||fS(sRInternal helper to construct opening and closing tag expressions, given a tag nameR+s_-:Rttagt=t/RRAcSs|ddkS(NiR((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrR Rrcss!|]}|dkr|VqdS(R N((RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys scSs|ddkS(NiR((RRNRp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrsRLs(RsRRRR-R2R1R<RRzRXR)R R0RRRRRRTRWR@Rt_LRttitleRRR(ttagStrtxmltresnamet tagAttrNamet tagAttrValuetopenTagtprintablesLessRAbracktcloseTag((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt _makeTagss" o{AA  cCs t|tS(s  Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. Example:: text = 'More info at the pyparsing wiki page' # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple a,a_end = makeHTMLTags("A") link_expr = a + SkipTo(a_end)("link_text") + a_end for link in link_expr.searchString(text): # attributes in the tag (like "href" shown here) are also accessible as named results print(link.link_text, '->', link.href) prints:: pyparsing -> http://pyparsing.wikispaces.com (R R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRIscCs t|tS(s Helper to construct opening and closing tag expressions for XML, given a tag name. Matches tags only in the given upper/lower case. Example: similar to L{makeHTMLTags} (R R(R((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRJscsT|r|n |jgD]\}}||f^q#fd}|S(s< Helper to create a validating parse action to be used with start tags created with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag with a required attribute value, to avoid false matches on common tags such as C{} or C{
}. Call C{withAttribute} with a series of attribute names and values. Specify the list of filter attributes names and values as: - keyword arguments, as in C{(align="right")}, or - as an explicit dict with C{**} operator, when an attribute name is also a Python reserved word, as in C{**{"class":"Customer", "align":"right"}} - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) For attribute names with a namespace prefix, you must use the second form. Attribute names are matched insensitive to upper/lower case. If just testing for C{class} (with or without a namespace), use C{L{withClass}}. To verify that the attribute exists, but without specifying a value, pass C{withAttribute.ANY_VALUE} as the value. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this has no type
''' div,div_end = makeHTMLTags("div") # only match div tag having a type attribute with value "grid" div_grid = div().setParseAction(withAttribute(type="grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) # construct a match with any div tag having a type attribute, regardless of the value div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 csx~D]v\}}||kr8t||d|n|tjkr|||krt||d||||fqqWdS(Nsno matching attribute s+attribute '%s' has value '%s', must be '%s'(RRct ANY_VALUE(RRNRtattrNamet attrValue(tattrs(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRRs   (R(RtattrDictRRR((RsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRcs 2  %cCs'|rd|nd}ti||6S(s Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this <div> has no class
''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 s%s:classtclass(Rc(t classnamet namespacet classattr((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRi\s t(RYcCs<t}||||B}xt|D]\}}|d d \}} } } | dkrdd|nd|} | dkr|d kst|dkrtdn|\} }ntj| }| tjkr| dkr t||t |t |}q| dkrx|d k rQt|||t |t ||}qt||t |t |}q| dkrt|| |||t || |||}qtdn+| tj kr| dkr)t |t st |}nt|j|t ||}q| dkr|d k rpt|||t |t ||}qt||t |t |}q| dkrt|| |||t || |||}qtdn td | r |j| n||j| |BK}|}q(W||K}|S( s Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy. Operators may be unary or binary, left- or right-associative. Parse actions can also be attached to operator expressions. The generated parser will also recognize the use of parentheses to override operator precedences (see example below). Note: if you define a deep operator list, you may see performance issues when using infixNotation. See L{ParserElement.enablePackrat} for a mechanism to potentially improve your parser performance. Parameters: - baseExpr - expression representing the most basic element for the nested - opList - list of tuples, one for each operator precedence level in the expression grammar; each tuple is of the form (opExpr, numTerms, rightLeftAssoc, parseAction), where: - opExpr is the pyparsing expression for the operator; may also be a string, which will be converted to a Literal; if numTerms is 3, opExpr is a tuple of two expressions, for the two operators separating the 3 terms - numTerms is the number of terms for this operator (must be 1, 2, or 3) - rightLeftAssoc is the indicator whether the operator is right or left associative, using the pyparsing-defined constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - parseAction is the parse action to be associated with expressions matching this operator expression (the parse action tuple member may be omitted) - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) Example:: # simple example of four-function arithmetic with ints and variable names integer = pyparsing_common.signed_integer varname = pyparsing_common.identifier arith_expr = infixNotation(integer | varname, [ ('-', 1, opAssoc.RIGHT), (oneOf('* /'), 2, opAssoc.LEFT), (oneOf('+ -'), 2, opAssoc.LEFT), ]) arith_expr.runTests(''' 5+3*6 (5+3)*6 -2--11 ''', fullDump=False) prints:: 5+3*6 [[5, '+', [3, '*', 6]]] (5+3)*6 [[[5, '+', 3], '*', 6]] -2--11 [[['-', 2], '-', ['-', 11]]] iis%s terms %s%s termis@if numterms=3, opExpr must be a tuple or list of two expressionsis6operator must be unary (1), binary (2), or ternary (3)s2operator must indicate right or left associativityN(N(R RRRRRRRtLEFTR RRtRIGHTRsRRFRz(tbaseExprtopListtlpartrparR}tlastExprRtoperDeftopExprtaritytrightLeftAssocRttermNametopExpr1topExpr2tthisExprR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRgsR;    '  /'   $  /'     s4"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*t"s string enclosed in double quotess4'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*t's string enclosed in single quotess*quotedString using single or double quotestusunicode string literalcCs!||krtdn|d krt|trt|trt|dkrt|dkr|d k rtt|t||tj ddj d}q|t j t||tj j d}q|d k r9tt|t |t |ttj ddj d}qttt |t |ttj ddj d}qtdnt}|d k r|tt|t||B|Bt|K}n.|tt|t||Bt|K}|jd ||f|S( s~ Helper method for defining nested lists enclosed in opening and closing delimiters ("(" and ")" are the default). Parameters: - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - content - expression for items within the nested lists (default=C{None}) - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) If an expression is not provided for the content argument, the nested expression will capture all whitespace-delimited content between delimiters as a list of separate values. Use the C{ignoreExpr} argument to define expressions that may contain opening or closing characters that should not be treated as opening or closing characters for nesting, such as quotedString or a comment expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. The default is L{quotedString}, but if no expressions are to be ignored, then pass C{None} for this argument. Example:: data_type = oneOf("void int short long char float double") decl_data_type = Combine(data_type + Optional(Word('*'))) ident = Word(alphas+'_', alphanums+'_') number = pyparsing_common.number arg = Group(decl_data_type + ident) LPAR,RPAR = map(Suppress, "()") code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) c_function = (decl_data_type("type") + ident("name") + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + code_body("body")) c_function.ignore(cStyleComment) source_code = ''' int is_odd(int x) { return (x%2); } int dec_to_hex(char hchar) { if (hchar >= '0' && hchar <= '9') { return (ord(hchar)-ord('0')); } else { return (10+ord(hchar)-ord('A')); } } ''' for func in c_function.searchString(source_code): print("%(name)s (%(type)s) args: %(args)s" % func) prints:: is_odd (int) args: [['int', 'x']] dec_to_hex (int) args: [['char', 'hchar']] s.opening and closing strings cannot be the sameiRKcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq9RrcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRq<RrcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqBRrcSs|djS(Ni(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqFRrsOopening and closing arguments must be strings if no content expression is givensnested %s%s expressionN(RRRsRRRRRR"RfRzRARRR RR)R0R(topenertclosertcontentRR}((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRNs4:  $  $    5.c s5fd}fd}fd}ttjdj}ttj|jd}tj|jd}tj|jd} |rtt||t|t|t|| } n0tt|t|t|t|} |j t t| jdS( s Helper method for defining space-delimited indentation blocks, such as those used to define block statements in Python source code. Parameters: - blockStatementExpr - expression defining syntax of statement that is repeated within the indented block - indentStack - list created by caller to manage indentation stack (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - indent - boolean indicating whether block must be indented beyond the the current level; set to False for block of left-most statements (default=C{True}) A valid block must contain at least one C{blockStatement}. Example:: data = ''' def A(z): A1 B = 100 G = A2 A2 A3 B def BB(a,b,c): BB1 def BBA(): bba1 bba2 bba3 C D def spam(x,y): def eggs(z): pass ''' indentStack = [1] stmt = Forward() identifier = Word(alphas, alphanums) funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") func_body = indentedBlock(stmt, indentStack) funcDef = Group( funcDecl + func_body ) rvalue = Forward() funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") rvalue << (funcCall | identifier | Word(nums)) assignment = Group(identifier + "=" + rvalue) stmt << ( funcDef | assignment | identifier ) module_body = OneOrMore(stmt) parseTree = module_body.parseString(data) parseTree.pprint() prints:: [['def', 'A', ['(', 'z', ')'], ':', [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], 'B', ['def', 'BB', ['(', 'a', 'b', 'c', ')'], ':', [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], 'C', 'D', ['def', 'spam', ['(', 'x', 'y', ')'], ':', [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] css|t|krdSt||}|dkro|dkrZt||dnt||dndS(Nisillegal nestingsnot a peer entry(RR7RR(RRNRptcurCol(t indentStack(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcheckPeerIndentscsEt||}|dkr/j|nt||ddS(Nisnot a subentry(R7RR(RRNRpR+(R,(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcheckSubIndentscsn|t|krdSt||}oH|dkoH|dks`t||dnjdS(Niisnot an unindent(RR7RR(RRNRpR+(R,(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt checkUnindents &s tINDENTRrtUNINDENTsindented block( RRRRR RzRRRRR( tblockStatementExprR,R$R-R.R/R7R0tPEERtUNDENTtsmExpr((R,sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRdQsN"8 $s#[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]s[\0xa1-\0xbf\0xd7\0xf7]s_:sany tagsgt lt amp nbsp quot aposs><& "'s &(?PRs);scommon HTML entitycCstj|jS(sRHelper parser action to replace common HTML entities with their special characters(t_htmlEntityMapRtentity(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRYss/\*(?:[^*]|\*(?!/))*s*/sC style commentss HTML comments.*s rest of lines//(?:\\\n|[^\n])*s // commentsC++ style comments#.*sPython style comments t commaItemRcBseZdZeeZeeZee j dj eZ ee j dj eedZedj dj eZej edej ej dZejdeeeed jeBj d Zejeed j d j eZed j dj eZeeBeBjZedj dj eZeededj dZedj dZedj dZ e de dj dZ!ee de d8dee de d9j dZ"e"j#ddej d Z$e%e!e$Be"Bj d!j d!Z&ed"j d#Z'e(d$d%Z)e(d&d'Z*ed(j d)Z+ed*j d+Z,ed,j d-Z-e.je/jBZ0e(d.Z1e%e2e3d/e4ee5d0d/ee6d1jj d2Z7e8ee9j:e7Bd3d4j d5Z;e(ed6Z<e(ed7Z=RS(:s Here are some common low-level expressions that may be useful in jump-starting parser development: - numeric forms (L{integers}, L{reals}, L{scientific notation}) - common L{programming identifiers} - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - ISO8601 L{dates} and L{datetime} - L{UUID} - L{comma-separated list} Parse actions: - C{L{convertToInteger}} - C{L{convertToFloat}} - C{L{convertToDate}} - C{L{convertToDatetime}} - C{L{stripHTMLTags}} - C{L{upcaseTokens}} - C{L{downcaseTokens}} Example:: pyparsing_common.number.runTests(''' # any int or real number, returned as the appropriate type 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.fnumber.runTests(''' # any int or real number, returned as float 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.hex_integer.runTests(''' # hex numbers 100 FF ''') pyparsing_common.fraction.runTests(''' # fractions 1/2 -3/4 ''') pyparsing_common.mixed_integer.runTests(''' # mixed fractions 1 1/2 -3/4 1-3/4 ''') import uuid pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) pyparsing_common.uuid.runTests(''' # uuid 12345678-1234-5678-1234-567812345678 ''') prints:: # any int or real number, returned as the appropriate type 100 [100] -100 [-100] +100 [100] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # any int or real number, returned as float 100 [100.0] -100 [-100.0] +100 [100.0] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # hex numbers 100 [256] FF [255] # fractions 1/2 [0.5] -3/4 [-0.75] # mixed fractions 1 [1] 1/2 [0.5] -3/4 [-0.75] 1-3/4 [1.75] # uuid 12345678-1234-5678-1234-567812345678 [UUID('12345678-1234-5678-1234-567812345678')] tintegers hex integeris[+-]?\d+ssigned integerRtfractioncCs|d|dS(Nii((Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrRzs"fraction or mixed integer-fractions [+-]?\d+\.\d*s real numbers+[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)s$real number with scientific notations[+-]?\d+\.?\d*([eE][+-]?\d+)?tfnumberRt identifiersK(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}s IPv4 addresss[0-9a-fA-F]{1,4}t hex_integerRisfull IPv6 addressiis::sshort IPv6 addresscCstd|DdkS(Ncss'|]}tjj|rdVqdS(iN(Rlt _ipv6_partR(RR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pys si(R(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrs::ffff:smixed IPv6 addresss IPv6 addresss:[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}s MAC addresss%Y-%m-%dcsfd}|S(s Helper to create a parse action for converting parsed date string to Python datetime.date Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) Example:: date_expr = pyparsing_common.iso8601_date.copy() date_expr.setParseAction(pyparsing_common.convertToDate()) print(date_expr.parseString("1999-12-31")) prints:: [datetime.date(1999, 12, 31)] csPytj|djSWn+tk rK}t||t|nXdS(Ni(RtstrptimetdateRRRu(RRNRptve(tfmt(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytcvt_fns((RBRC((RBsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt convertToDatess%Y-%m-%dT%H:%M:%S.%fcsfd}|S(s Helper to create a parse action for converting parsed datetime string to Python datetime.datetime Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) prints:: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] csJytj|dSWn+tk rE}t||t|nXdS(Ni(RR?RRRu(RRNRpRA(RB(sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRCs((RBRC((RBsC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pytconvertToDatetimess7(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?s ISO8601 dates(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?sISO8601 datetimes2[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}tUUIDcCstjj|dS(s Parse action to remove HTML tags from web page HTML source Example:: # strip HTML links from normal text text = 'More info at the
pyparsing wiki page' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' i(Rlt_html_stripperR{(RRNR((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt stripHTMLTagss RR<s R8RRrscomma separated listcCst|jS(N(RR,(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRrcCst|jS(N(RR(Rp((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRqRr(ii(ii(>RRRRkRotconvertToIntegertfloattconvertToFloatR-RPRRzR9RBR=R%tsigned_integerR:RRRt mixed_integerRtrealtsci_realRtnumberR;R2R1R<t ipv4_addressR>t_full_ipv6_addresst_short_ipv6_addressRt_mixed_ipv6_addressRt ipv6_addresst mac_addressR#RDREt iso8601_datetiso8601_datetimetuuidR5R4RGRHRRRRTR,t _commasepitemR>RWRtcomma_separated_listRbR@(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyRlsL  '/-  ;&J+t__main__tselecttfroms_$RRtcolumnsRttablestcommandsK # '*' as column list and dotted table name select * from SYS.XYZZY # caseless match on "SELECT", and casts back to "select" SELECT * from XYZZY, ABC # list of column names, and mixed case SELECT keyword Select AA,BB,CC from Sys.dual # multiple tables Select A, B, C from Sys.dual, Table2 # invalid SELECT keyword - should fail Xelect A, B, C from Sys.dual # incomplete command - should fail Select # invalid column name - should fail Select ^^^ frox Sys.dual s] 100 -100 +100 3.14159 6.02e23 1e-12 s 100 FF s6 12345678-1234-5678-1234-567812345678 (Rt __version__t__versionTime__t __author__RtweakrefRRRRxRR|RSRR8RRRRt_threadRt ImportErrort threadingRRt ordereddictRt__all__Rt version_infoRQRtmaxsizeR$RuRtchrRRRRR2treversedRRR4RxRIRJR_tmaxinttxrangeRt __builtin__RtfnameRR`RRRRRRtascii_uppercasetascii_lowercaseR2RPRBR1RRt printableRTRaRRRR!R$RR tMutableMappingtregisterR7RHRERGRKRMROReR"R*R RRRRiRRRRjR-R%R#RR,RpRRRR(R'R/R.RRRRR RR RRRR0RRRR&R RR+RRR R)RR`RR>R:RRMRLRERRQR?ReRfRhRRARGRFR_R^Rzt _escapedPunct_escapedHexChart_escapedOctChartUNICODEt _singleChart _charRangeRRR]RKRZRXRkRbR@R RIRJRcR RiRRRRRgRSR<R\RWRaRNRdR3RUR5R4RRR6RR9RYR6RCRR[R=R;RDRVRRZR8RlRt selectTokent fromTokentidentt columnNametcolumnNameListt columnSpect tableNamet tableNameListt simpleSQLR"RPR;R=RYRF(((sC/usr/lib/python2.7/site-packages/pkg_resources/_vendor/pyparsing.pyt=s              *         8      @ & A=IG3pLOD|M &# @sQ,A ,    I # %  !4@    ,   ?  #   k%Z r  (, #8+    $     PK Z] dQdQ/site-packages/pkg_resources/_vendor/appdirs.pycnu[ fc@s@dZd,ZdjeeeZddlZddlZejddkZ e r^eZ nej j drddl Z e j ddZej d rd Zqej d rd Zqd Zn ej ZdddedZdddedZdddedZdddedZdddedZdddedZdefdYZdZdZdZdZed kr!yddlZ eZ!Wq!e"k ryddl#m$Z$eZ!Wqe"k ryddl%Z&eZ!Wqe"k reZ!qXqXq!Xne'dkr<dZ(dZ)d-Z*d$GHee(e)d%d&Z+x&e*D]Z,d'e,e-e+e,fGHq`Wd(GHee(e)Z+x&e*D]Z,d'e,e-e+e,fGHqWd)GHee(Z+x&e*D]Z,d'e,e-e+e,fGHqWd*GHee(d+eZ+x)e*D]Z,d'e,e-e+e,fGHqWndS(.syUtilities for determining application-specific dirs. See for details and usage. iiit.iNitjavatWindowstwin32tMactdarwintlinux2cCs6tdkr|dkr!|}n|r-dp0d}tjjt|}|r|tk rxtjj|||}qtjj||}qn{tdkrtjjd}|rtjj||}qn<tj dtjjd}|rtjj||}n|r2|r2tjj||}n|S( sJReturn full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\Application Data\\ Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ Win 7 (not roaming): C:\Users\\AppData\Local\\ Win 7 (roaming): C:\Users\\AppData\Roaming\\ For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". Rt CSIDL_APPDATAtCSIDL_LOCAL_APPDATARs~/Library/Application Support/t XDG_DATA_HOMEs~/.local/shareN( tsystemtNonetostpathtnormpatht_get_win_foldertFalsetjoint expandusertgetenv(tappnamet appauthortversiontroamingtconstR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt user_data_dir-s&      cCstdkr|d kr!|}ntjjtd}|r|tk rftjj|||}q~tjj||}qntdkrtjjd}|rtjj||}qntj dtj jddg}g|j tj D]$}tjj|j tj ^q}|rs|rEtjj||}ng|D]}tj j||g^qL}n|rtj j|}n |d}|S|r|rtjj||}n|S( siReturn full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/', if XDG_DATA_DIRS is not set Typical user data directories are: Mac OS X: /Library/Application Support/ Unix: /usr/local/share/ or /usr/share/ Win XP: C:\Documents and Settings\All Users\Application Data\\ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. RtCSIDL_COMMON_APPDATARs/Library/Application Supportt XDG_DATA_DIRSs/usr/local/shares /usr/shareiN(R R R R RRRRRRtpathseptsplittrstriptsep(RRRt multipathR txtpathlist((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt site_data_dirds4      =.  cCstdkr$t||d|}n<tjdtjjd}|r`tjj||}n|r|rtjj||}n|S(sReturn full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: Mac OS X: same as user_data_dir Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/". RRtXDG_CONFIG_HOMEs ~/.config(RRN(R RR R RR RR(RRRRR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pytuser_config_dirs  cCs tdkrBt||}|r|rtjj||}qntjdd}g|jtjD]$}tjj|j tj ^qg}|r|rtjj||}ng|D]}tj j||g^q}n|rtjj|}n |d}|S(sReturn full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set Typical user data directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. RRtXDG_CONFIG_DIRSs/etc/xdgi(RR( R R#R R RRRRRRR(RRRR R R!R"((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pytsite_config_dirs  =. cCsBtdkr|dkr!|}ntjjtd}|r|tk rftjj|||}ntjj||}|rtjj|d}qqn{tdkrtjjd}|rtjj||}qn<tj dtjjd}|rtjj||}n|r>|r>tjj||}n|S( sReturn full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache Vista: C:\Users\\AppData\Local\\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. RRtCacheRs~/Library/CachestXDG_CACHE_HOMEs~/.cacheN( R R R R RRRRRR(RRRtopinionR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pytuser_cache_dirs(!      cCstdkr0tjjtjjd|}n{tdkrut|||}t}|rtjj|d}qn6t|||}t}|rtjj|d}n|r|rtjj||}n|S(sReturn full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Logs/ Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs Vista: C:\Users\\AppData\Local\\\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. Rs~/Library/LogsRtLogstlog(R R R RRRRR+(RRRR*R ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt user_log_dir:s     tAppDirscBs}eZdZddeedZedZedZedZ edZ edZ edZ RS( s1Convenience wrapper for getting application dirs.cCs1||_||_||_||_||_dS(N(RRRRR (tselfRRRRR ((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt__init__os     cCs%t|j|jd|jd|jS(NRR(RRRRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyRwscCs%t|j|jd|jd|jS(NRR (R#RRRR (R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR#|scCs%t|j|jd|jd|jS(NRR(R%RRRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR%scCs%t|j|jd|jd|jS(NRR (R'RRRR (R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR'scCst|j|jd|jS(NR(R+RRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR+scCst|j|jd|jS(NR(R.RRR(R0((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR.sN( t__name__t __module__t__doc__R RR1tpropertyRR#R%R'R+R.(((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyR/ms  cCs\ddl}idd6dd6dd6|}|j|jd }|j||\}}|S( sThis is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. iNtAppDataRsCommon AppDataRs Local AppDataRs@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders(t_winregtOpenKeytHKEY_CURRENT_USERt QueryValueEx(t csidl_nameR7tshell_folder_nametkeytdirttype((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_from_registrys  cCsddlm}m}|jdt||dd}yt|}t}x*|D]"}t|dkrSt}PqSqSW|ryddl }|j |}Wqt k rqXnWnt k rnX|S(Ni(tshellcontshellii( twin32com.shellRARBtSHGetFolderPathtgetattrtunicodeRtordtTruetwin32apitGetShortPathNamet ImportErrort UnicodeError(R;RARBR>t has_high_chartcRI((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_with_pywin32s$!      cCsddl}idd6dd6dd6|}|jd}|jjjd|dd |t}x*|D]"}t|d krft}PqfqfW|r|jd}|jj j |j |dr|}qn|j S( NiiRi#RiRiii( tctypestcreate_unicode_buffertwindlltshell32tSHGetFolderPathWR RRGRHtkernel32tGetShortPathNameWtvalue(R;RPt csidl_consttbufRMRNtbuf2((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_with_ctypess$   c Cs=ddl}ddlm}ddlm}|jjd}|jd|}|jj }|j dt |j |d|j j||jj|jjd}t}x*|D]"} t| dkrt}PqqW|r9|jd|}|jj } tj|||r9|jj|jjd}q9n|S(Ni(tjna(RiRNsi(tarraytcom.sunR\tcom.sun.jna.platformRtWinDeftMAX_PATHtzerostShell32tINSTANCERDR REtShlObjtSHGFP_TYPE_CURRENTtNativettoStringttostringRRRGRHtKernel32tkernalRJ( R;R]R\Rtbuf_sizeRYRBR>RMRNtkernel((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt_get_win_folder_with_jnas&  +!  '(RRt__main__tMyAppt MyCompanyRR#R%R'R+R.s%-- app dirs (with optional 'version')Rs1.0s%s: %ss) -- app dirs (without optional 'version')s+ -- app dirs (without optional 'appauthor')s( -- app dirs (with disabled 'appauthor')R(iii(RR#R%R'R+R.(.R4t__version_info__Rtmaptstrt __version__tsysR t version_infotPY3RFtplatformt startswithtjava_vertos_nameR R RRR#R%R'RHR+R.tobjectR/R@ROR[RnRCtwin32comRRKRPRRt com.sun.jnatcomR2RRtpropstdirstpropRE(((sA/usr/lib/python2.7/site-packages/pkg_resources/_vendor/appdirs.pyt s~        7B(393+                   PK Z889site-packages/pkg_resources/_vendor/packaging/version.pyonu[ fc@`snddlmZmZmZddlZddlZddlZddlmZddddd gZ ej d d d d dddgZ dZ de fdYZdefdYZdefdYZejdejZidd6dd6dd6dd6dd 6ZdZdZdZdefd YZd!Zejd"Zd#Zd$ZdS(%i(tabsolute_importtdivisiontprint_functionNi(tInfinitytparsetVersiont LegacyVersiontInvalidVersiontVERSION_PATTERNt_VersiontepochtreleasetdevtpretposttlocalcC`s-yt|SWntk r(t|SXdS(s Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version. N(RRR(tversion((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRs cB`seZdZRS(sF An invalid version was found, users should refer to PEP 440. (t__name__t __module__t__doc__(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR$st _BaseVersioncB`sPeZdZdZdZdZdZdZdZdZ RS(cC`s t|jS(N(thasht_key(tself((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__hash__,scC`s|j|dS(NcS`s ||kS(N((tsto((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt0t(t_compare(Rtother((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__lt__/scC`s|j|dS(NcS`s ||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR3R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__le__2scC`s|j|dS(NcS`s ||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR6R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__eq__5scC`s|j|dS(NcS`s ||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR9R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__ge__8scC`s|j|dS(NcS`s ||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR<R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__gt__;scC`s|j|dS(NcS`s ||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR?R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__ne__>scC`s&t|tstS||j|jS(N(t isinstanceRtNotImplementedR(RRtmethod((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRAs( RRRRR R!R"R#R$R(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR*s       cB`sneZdZdZdZedZedZedZedZ edZ RS(cC`s%t||_t|j|_dS(N(tstrt_versiont_legacy_cmpkeyR(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__init__JscC`s|jS(N(R)(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__str__NscC`sdjtt|S(Ns(tformattreprR((R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__repr__QscC`s|jS(N(R)(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pytpublicTscC`s|jS(N(R)(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt base_versionXscC`sdS(N(tNone(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR\scC`stS(N(tFalse(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt is_prerelease`scC`stS(N(R3(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pytis_postreleaseds( RRR+R,R/tpropertyR0R1RR4R5(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRHs   s(\d+ | [a-z]+ | \.| -)tctpreviewsfinal-t-trct@cc`sxxltj|D][}tj||}| s|dkrAqn|d dkrb|jdVqd|VqWdVdS(Nt.it 0123456789it*s*final(t_legacy_version_component_retsplitt_legacy_version_replacement_maptgettzfill(Rtpart((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt_parse_version_partsrs cC`sd}g}xt|jD]}|jdr|dkrjx'|rf|ddkrf|jqCWnx'|r|ddkr|jqmWn|j|qWt|}||fS(NiR>s*finals*final-t00000000(REtlowert startswithtpoptappendttuple(RR tpartsRD((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR*s  s v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
            [-_\.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_\.]?
                (?Ppost|rev|r)
                [-_\.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_\.]?
            (?Pdev)
            [-_\.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
cB`seZejdedejejBZdZdZ	dZ
edZedZ
edZedZed	ZRS(
s^\s*s\s*$cC`s[|jj|}|s0tdj|ntd|jdrZt|jdnddtd|jdjdDdt	|jd|jd	d
t	|jd|jdp|jd
dt	|jd|jddt
|jd|_t|jj
|jj|jj|jj|jj|jj|_dS(NsInvalid version: '{0}'R
iRcs`s|]}t|VqdS(N(tint(t.0ti((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	sR<R
tpre_ltpre_nRtpost_ltpost_n1tpost_n2Rtdev_ltdev_nR(t_regextsearchRR-R	tgroupRMRKR@t_parse_letter_versiont_parse_local_versionR)t_cmpkeyR
RR
RRRR(RRtmatch((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR+s.*(!					cC`sdjtt|S(Ns(R-R.R((R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR/scC`sSg}|jjdkr7|jdj|jjn|jdjd|jjD|jjdk	r|jdjd|jjDn|jjdk	r|jdj|jjdn|jj	dk	r|jd	j|jj	dn|jj
dk	rF|jd
jdjd|jj
Dndj|S(Nis{0}!R<cs`s|]}t|VqdS(N(R((RNtx((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	sRcs`s|]}t|VqdS(N(R((RNR^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	ss.post{0}is.dev{0}s+{0}cs`s|]}t|VqdS(N(R((RNR^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	s(R)R
RJR-tjoinRR
R2RRR(RRL((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR,s&)##,cC`st|jdddS(Nt+ii(R(R@(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR0
scC`sjg}|jjdkr7|jdj|jjn|jdjd|jjDdj|S(Nis{0}!R<cs`s|]}t|VqdS(N(R((RNR^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	sR(R)R
RJR-R_R(RRL((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR1s
&cC`s0t|}d|kr,|jdddSdS(NR`i(R(R@(Rtversion_string((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRscC`st|jjp|jjS(N(tboolR)RR
(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR4!scC`st|jjS(N(RbR)R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR5%s(RRtretcompileRtVERBOSEt
IGNORECASERWR+R/R,R6R0R1RR4R5(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRs	#		
cC`s|r|dkrd}n|j}|dkr<d}n?|dkrQd}n*|d
krfd	}n|dkr{d}n|t|fS|r|rd}|t|fSdS(NitalphatatbetatbR7R
R8R:trevtrR(R7R
R8(RkRl(R2RGRM(tlettertnumber((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRZ*s 					
s[\._-]cC`s-|dk	r)tdtj|DSdS(sR
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    cs`s3|])}|js!|jn	t|VqdS(N(tisdigitRGRM(RNRD((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	RsN(R2RKt_local_version_seperatorsR@(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR[LscC`sttttjdt|}|dkr[|dkr[|dk	r[t}n|dkrpt}n|dkrt}n|dkrt}n|dkrt}ntd|D}||||||fS(NcS`s
|dkS(Ni((R^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR`Rcs`s7|]-}t|tr$|dfn
t|fVqdS(RN(R%RMR(RNRO((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	s(RKtreversedtlistt	itertoolst	dropwhileR2R(R
RR
RRR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR\Ws&	$
	
	
	
(t
__future__RRRtcollectionsRsRct_structuresRt__all__t
namedtupleR	Rt
ValueErrorRtobjectRRRdReR?RARER*RRRZRpR[R\(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyts0	!&		9k		PKZ[9cc<site-packages/pkg_resources/_vendor/packaging/specifiers.pycnu[
fc@`s<ddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
ddlmZm
Z
mZdefdYZde
ejefd	YZd
efdYZdefd
YZdZdefdYZejdZdZdZdefdYZdS(i(tabsolute_importtdivisiontprint_functionNi(tstring_typestwith_metaclass(tVersiont
LegacyVersiontparsetInvalidSpecifiercB`seZdZRS(sH
    An invalid specifier was found, users should refer to PEP 440.
    (t__name__t
__module__t__doc__(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRst
BaseSpecifiercB`seZejdZejdZejdZejdZejdZ	e	j
dZ	ejddZejddZ
RS(	cC`sdS(s
        Returns the str representation of this Specifier like object. This
        should be representative of the Specifier itself.
        N((tself((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__str__tcC`sdS(sF
        Returns a hash value for this Specifier like object.
        N((R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__hash__RcC`sdS(sq
        Returns a boolean representing whether or not the two Specifier like
        objects are equal.
        N((R
tother((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__eq__$RcC`sdS(su
        Returns a boolean representing whether or not the two Specifier like
        objects are not equal.
        N((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__ne__+RcC`sdS(sg
        Returns whether or not pre-releases as a whole are allowed by this
        specifier.
        N((R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytprereleases2RcC`sdS(sd
        Sets whether or not pre-releases as a whole are allowed by this
        specifier.
        N((R
tvalue((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR9RcC`sdS(sR
        Determines if the given item is contained within this specifier.
        N((R
titemR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytcontains@RcC`sdS(s
        Takes an iterable of items and filters them so that only items which
        are contained within this specifier are allowed in it.
        N((R
titerableR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytfilterFRN(R	R
tabctabstractmethodRRRRtabstractpropertyRtsettertNoneRR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRst_IndividualSpecifiercB`seZiZdddZdZdZdZdZdZ	dZ
dZed	Z
ed
ZedZejdZd
ZddZddZRS(RcC`sj|jj|}|s0tdj|n|jdj|jdjf|_||_dS(NsInvalid specifier: '{0}'toperatortversion(t_regextsearchRtformattgrouptstript_spect_prereleases(R
tspecRtmatch((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__init__RscC`sF|jdk	r!dj|jnd}dj|jjt||S(Ns, prereleases={0!r}Rs<{0}({1!r}{2})>(R(RR$Rt	__class__R	tstr(R
tpre((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__repr___s!		cC`sdj|jS(Ns{0}{1}(R$R'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRlscC`s
t|jS(N(thashR'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRoscC`s`t|tr:y|j|}WqPtk
r6tSXnt||jsPtS|j|jkS(N(t
isinstanceRR,RtNotImplementedR'(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRrs
cC`s`t|tr:y|j|}WqPtk
r6tSXnt||jsPtS|j|jkS(N(R1RR,RR2R'(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR}s
cC`st|dj|j|S(Ns_compare_{0}(tgetattrR$t
_operators(R
top((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt
_get_operatorscC`s(t|ttfs$t|}n|S(N(R1RRR(R
R!((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_coerce_versionscC`s|jdS(Ni(R'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR scC`s|jdS(Ni(R'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR!scC`s|jS(N(R((R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
||_dS(N(R((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
|j|S(N(R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__contains__scC`sW|dkr|j}n|j|}|jr;|r;tS|j|j||jS(N(RRR7t
is_prereleasetFalseR6R R!(R
RR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscc`st}g}i|dk	r!|ntd6}xf|D]^}|j|}|j||r2|jr|pn|jr|j|qt}|Vq2q2W|r|rx|D]}|VqWndS(NR(R:RtTrueR7RR9Rtappend(R
RRtyieldedtfound_prereleasestkwR!tparsed_version((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs
	

N(R	R
R4RR+R/RRRRR6R7tpropertyR R!RRR8RR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRNs 
	
							tLegacySpecifiercB`seZdZejdedejejBZidd6dd6dd6d	d
6dd6d
d6ZdZ	dZ
dZdZdZ
dZdZRS(s
        (?P(==|!=|<=|>=|<|>))
        \s*
        (?P
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
                      # string can be just about anything, we match everything
                      # except for whitespace, a semi-colon for marker support,
                      # a closing paren since versions can be enclosed in
                      # them, and a comma since it's a version separator.
        )
        s^\s*s\s*$tequals==t	not_equals!=tless_than_equals<=tgreater_than_equals>=t	less_thantcC`s(t|ts$tt|}n|S(N(R1RR-(R
R!((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR7scC`s||j|kS(N(R7(R
tprospectiveR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_not_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_less_than_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_greater_than_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_less_thanscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_greater_thans(R	R
t
_regex_strtretcompiletVERBOSEt
IGNORECASER"R4R7RLRMRNRORPRQ(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRBs"

						c`s"tjfd}|S(Nc`s#t|tstS|||S(N(R1RR:(R
RKR)(tfn(sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytwrappeds(t	functoolstwraps(RWRX((RWsN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_require_version_compare
st	SpecifiercB`seZdZejdedejejBZidd6dd6dd6d	d
6dd6d
d6dd6dd6Ze	dZ
e	dZe	dZe	dZ
e	dZe	dZe	dZdZedZejdZRS(s
        (?P(~=|==|!=|<=|>=|<|>|===))
        (?P
            (?:
                # The identity operators allow for an escape hatch that will
                # do an exact string match of the version you wish to install.
                # This will not be parsed by PEP 440 and we cannot determine
                # any semantic meaning from it. This operator is discouraged
                # but included entirely as an escape hatch.
                (?<====)  # Only match for the identity operator
                \s*
                [^\s]*    # We just match everything, except for whitespace
                          # since we are only testing for strict identity.
            )
            |
            (?:
                # The (non)equality operators allow for wild card and local
                # versions to be specified so we have to define these two
                # operators separately to enable that.
                (?<===|!=)            # Only match for equals and not equals

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?

                # You cannot use a wild card and a dev or local version
                # together so group them with a | and make them optional.
                (?:
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
                    |
                    \.\*  # Wild card syntax of .*
                )?
            )
            |
            (?:
                # The compatible operator requires at least two digits in the
                # release segment.
                (?<=~=)               # Only match for the compatible operator

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
            |
            (?:
                # All other operators only allow a sub set of what the
                # (non)equality operators do. Specifically they do not allow
                # local versions to be specified nor do they allow the prefix
                # matching wild cards.
                (?=RGRHRIRJt	arbitrarys===cC`sfdjttjdt|d }|d7}|jd||oe|jd||S(Nt.cS`s|jdo|jdS(Ntposttdev(t
startswith(tx((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytsis.*s>=s==(tjointlistt	itertoolst	takewhilet_version_splitR6(R
RKR)tprefix((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_compatibles

cC`s|jdrht|j}t|d }tt|}|t| }t||\}}n't|}|jst|j}n||kS(Ns.*i(tendswithRtpublicRiR-tlent_pad_versiontlocal(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRLs	cC`s|j||S(N(RL(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRMscC`s|t|kS(N(R(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRNscC`s|t|kS(N(R(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyROscC`sXt|}||kstS|jrT|jrTt|jt|jkrTtSntS(N(RR:R9tbase_versionR;(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRPscC`st|}||kstS|jrT|jrTt|jt|jkrTtSn|jdk	rt|jt|jkrtSntS(N(RR:tis_postreleaseRqRpRR;(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRQscC`s"t|jt|jkS(N(R-tlower(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_arbitraryscC`ss|jdk	r|jS|j\}}|dkro|dkrY|jdrY|d }nt|jrotSntS(	Ns==s>=s<=s~=s===s.*i(s==s>=s<=s~=s===(R(RR'RlRR9R;R:(R
R R!((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs
cC`s
||_dS(N(R((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs(R	R
RRRSRTRURVR"R4R[RkRLRMRNRORPRQRtRARR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR\s,^
#	s^([0-9]+)((?:a|b|c|rc)[0-9]+)$cC`s\g}xO|jdD]>}tj|}|rG|j|jq|j|qW|S(NR_(tsplitt
_prefix_regexR#textendtgroupsR<(R!tresultRR*((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRi'sc	C`sgg}}|jttjd||jttjd||j|t|d|j|t|d|jddgtdt|dt|d|jddgtdt|dt|dttj|ttj|fS(NcS`s
|jS(N(tisdigit(Rc((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRd6RcS`s
|jS(N(Rz(Rc((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRd7Riit0(R<RfRgRhRntinserttmaxtchain(tlefttrightt
left_splittright_split((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRo2s
""//tSpecifierSetcB`seZdddZdZdZdZdZdZdZ	dZ
d	Zed
Z
e
jdZ
dZdd
ZddZRS(RcC`sg|jdD]}|jr|j^q}t}xL|D]D}y|jt|WqDtk
r|jt|qDXqDWt||_||_	dS(Nt,(
RuR&tsettaddR\RRBt	frozensett_specsR((R
t
specifiersRtstparsedt	specifier((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR+Os4	

cC`s=|jdk	r!dj|jnd}djt||S(Ns, prereleases={0!r}Rs(R(RR$RR-(R
R.((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR/ds!cC`s djtd|jDS(NRcs`s|]}t|VqdS(N(R-(t.0R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pys	ns(RetsortedR(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRmscC`s
t|jS(N(R0R(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRpscC`st|trt|}nt|ts1tSt}t|j|jB|_|jdkr|jdk	r|j|_nZ|jdk	r|jdkr|j|_n-|j|jkr|j|_ntd|S(NsFCannot combine SpecifierSets with True and False prerelease overrides.(	R1RRR2RRR(Rt
ValueError(R
RR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__and__ss		cC`set|trt|}n7t|trBtt|}nt|tsUtS|j|jkS(N(R1RRRR-R2R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`set|trt|}n7t|trBtt|}nt|tsUtS|j|jkS(N(R1RRRR-R2R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
t|jS(N(RnR(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__len__scC`s
t|jS(N(titerR(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__iter__scC`s:|jdk	r|jS|js#dStd|jDS(Ncs`s|]}|jVqdS(N(R(RR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pys	s(R(RRtany(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs
	cC`s
||_dS(N(R((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
|j|S(N(R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR8sc`sptttfs$tndkr<|jnrPjrPtStfd|j	DS(Nc3`s$|]}|jdVqdS(RN(R(RR(RR(sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pys	s(
R1RRRRRR9R:tallR(R
RR((RRsN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s|dkr|j}n|jrTx,|jD]!}|j|dt|}q+W|Sg}g}x|D]{}t|ttfst|}n|}t|trqgn|j	r|r|s|j
|qqg|j
|qgW|r|r|dkr|S|SdS(NR(RRRRtboolR1RRRR9R<(R
RRR)tfilteredR>RR@((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs*	
N(R	R
RR+R/RRRRRRRRARRR8RR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRMs						
	
			(t
__future__RRRRRYRgRSt_compatRRR!RRRRRtABCMetatobjectRRRBR[R\RTRvRiRoR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyts""94				PKZ889site-packages/pkg_resources/_vendor/packaging/version.pycnu[
fc@`snddlmZmZmZddlZddlZddlZddlmZddddd	gZ	ej
d
ddd
dddgZdZde
fdYZdefdYZdefdYZejdejZidd6dd6dd6dd6dd
6ZdZdZdZdefd YZd!Zejd"Zd#Zd$ZdS(%i(tabsolute_importtdivisiontprint_functionNi(tInfinitytparsetVersiont
LegacyVersiontInvalidVersiontVERSION_PATTERNt_VersiontepochtreleasetdevtpretposttlocalcC`s-yt|SWntk
r(t|SXdS(s
    Parse the given version string and return either a :class:`Version` object
    or a :class:`LegacyVersion` object depending on if the given version is
    a valid PEP 440 version or a legacy version.
    N(RRR(tversion((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRs
cB`seZdZRS(sF
    An invalid version was found, users should refer to PEP 440.
    (t__name__t
__module__t__doc__(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR$st_BaseVersioncB`sPeZdZdZdZdZdZdZdZdZ	RS(cC`s
t|jS(N(thasht_key(tself((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__hash__,scC`s|j|dS(NcS`s
||kS(N((tsto((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt0t(t_compare(Rtother((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__lt__/scC`s|j|dS(NcS`s
||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR3R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__le__2scC`s|j|dS(NcS`s
||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR6R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__eq__5scC`s|j|dS(NcS`s
||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR9R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__ge__8scC`s|j|dS(NcS`s
||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR<R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__gt__;scC`s|j|dS(NcS`s
||kS(N((RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR?R(R(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__ne__>scC`s&t|tstS||j|jS(N(t
isinstanceRtNotImplementedR(RRtmethod((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRAs(
RRRRR R!R"R#R$R(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR*s							cB`sneZdZdZdZedZedZedZedZ	edZ
RS(cC`s%t||_t|j|_dS(N(tstrt_versiont_legacy_cmpkeyR(RR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__init__JscC`s|jS(N(R)(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__str__NscC`sdjtt|S(Ns(tformattreprR((R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt__repr__QscC`s|jS(N(R)(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pytpublicTscC`s|jS(N(R)(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pytbase_versionXscC`sdS(N(tNone(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR\scC`stS(N(tFalse(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt
is_prerelease`scC`stS(N(R3(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pytis_postreleaseds(RRR+R,R/tpropertyR0R1RR4R5(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRHs			s(\d+ | [a-z]+ | \.| -)tctpreviewsfinal-t-trct@cc`sxxltj|D][}tj||}|s|dkrAqn|d dkrb|jdVqd|VqWdVdS(Nt.it
0123456789it*s*final(t_legacy_version_component_retsplitt_legacy_version_replacement_maptgettzfill(Rtpart((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyt_parse_version_partsrs
cC`sd}g}xt|jD]}|jdr|dkrjx'|rf|ddkrf|jqCWnx'|r|ddkr|jqmWn|j|qWt|}||fS(NiR>s*finals*final-t00000000(REtlowert
startswithtpoptappendttuple(RR
tpartsRD((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR*ss
    v?
    (?:
        (?:(?P[0-9]+)!)?                           # epoch
        (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
        (?P
                                          # pre-release
            [-_\.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_\.]?
                (?Ppost|rev|r)
                [-_\.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_\.]?
            (?Pdev)
            [-_\.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
cB`seZejdedejejBZdZdZ	dZ
edZedZ
edZedZed	ZRS(
s^\s*s\s*$cC`s[|jj|}|s0tdj|ntd|jdrZt|jdnddtd|jdjdDdt	|jd|jd	d
t	|jd|jdp|jd
dt	|jd|jddt
|jd|_t|jj
|jj|jj|jj|jj|jj|_dS(NsInvalid version: '{0}'R
iRcs`s|]}t|VqdS(N(tint(t.0ti((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	sR<R
tpre_ltpre_nRtpost_ltpost_n1tpost_n2Rtdev_ltdev_nR(t_regextsearchRR-R	tgroupRMRKR@t_parse_letter_versiont_parse_local_versionR)t_cmpkeyR
RR
RRRR(RRtmatch((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR+s.*(!					cC`sdjtt|S(Ns(R-R.R((R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR/scC`sSg}|jjdkr7|jdj|jjn|jdjd|jjD|jjdk	r|jdjd|jjDn|jjdk	r|jdj|jjdn|jj	dk	r|jd	j|jj	dn|jj
dk	rF|jd
jdjd|jj
Dndj|S(Nis{0}!R<cs`s|]}t|VqdS(N(R((RNtx((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	sRcs`s|]}t|VqdS(N(R((RNR^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	ss.post{0}is.dev{0}s+{0}cs`s|]}t|VqdS(N(R((RNR^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	s(R)R
RJR-tjoinRR
R2RRR(RRL((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR,s&)##,cC`st|jdddS(Nt+ii(R(R@(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR0
scC`sjg}|jjdkr7|jdj|jjn|jdjd|jjDdj|S(Nis{0}!R<cs`s|]}t|VqdS(N(R((RNR^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	sR(R)R
RJR-R_R(RRL((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR1s
&cC`s0t|}d|kr,|jdddSdS(NR`i(R(R@(Rtversion_string((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRscC`st|jjp|jjS(N(tboolR)RR
(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR4!scC`st|jjS(N(RbR)R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR5%s(RRtretcompileRtVERBOSEt
IGNORECASERWR+R/R,R6R0R1RR4R5(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRs	#		
cC`s|r|dkrd}n|j}|dkr<d}n?|dkrQd}n*|d
krfd	}n|dkr{d}n|t|fS|r|rd}|t|fSdS(NitalphatatbetatbR7R
R8R:trevtrR(R7R
R8(RkRl(R2RGRM(tlettertnumber((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyRZ*s 					
s[\._-]cC`s-|dk	r)tdtj|DSdS(sR
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    cs`s3|])}|js!|jn	t|VqdS(N(tisdigitRGRM(RNRD((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	RsN(R2RKt_local_version_seperatorsR@(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR[LscC`sttttjdt|}|dkr[|dkr[|dk	r[t}n|dkrpt}n|dkrt}n|dkrt}n|dkrt}ntd|D}||||||fS(NcS`s
|dkS(Ni((R^((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR`Rcs`s7|]-}t|tr$|dfn
t|fVqdS(RN(R%RMR(RNRO((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pys	s(RKtreversedtlistt	itertoolst	dropwhileR2R(R
RR
RRR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyR\Ws&	$
	
	
	
(t
__future__RRRtcollectionsRsRct_structuresRt__all__t
namedtupleR	Rt
ValueErrorRtobjectRRRdReR?RARER*RRRZRpR[R\(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/version.pyts0	!&		9k		PKZh =site-packages/pkg_resources/_vendor/packaging/_structures.pycnu[
fc@`s^ddlmZmZmZdefdYZeZdefdYZeZdS(i(tabsolute_importtdivisiontprint_functiontInfinitycB`sYeZdZdZdZdZdZdZdZdZ	dZ
RS(	cC`sdS(NR((tself((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__repr__	scC`stt|S(N(thashtrepr(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__hash__scC`stS(N(tFalse(Rtother((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__lt__scC`stS(N(R	(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__le__scC`st||jS(N(t
isinstancet	__class__(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__eq__scC`st||jS(N(R
R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__ne__scC`stS(N(tTrue(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__gt__scC`stS(N(R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__ge__scC`stS(N(tNegativeInfinity(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__neg__!s(t__name__t
__module__RRRRRRRRR(((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyRs								RcB`sYeZdZdZdZdZdZdZdZdZ	dZ
RS(	cC`sdS(Ns	-Infinity((R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR)scC`stt|S(N(RR(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR,scC`stS(N(R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR/scC`stS(N(R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR2scC`st||jS(N(R
R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR5scC`st||jS(N(R
R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR8scC`stS(N(R	(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR;scC`stS(N(R	(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR>scC`stS(N(R(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyRAs(RRRRRRRRRRR(((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR's								N(t
__future__RRRtobjectRR(((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyts	PKZiJ\\8site-packages/pkg_resources/_vendor/packaging/_compat.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import sys


PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3

# flake8: noqa

if PY3:
    string_types = str,
else:
    string_types = basestring,


def with_metaclass(meta, *bases):
    """
    Create a base class with a metaclass.
    """
    # This requires a bit of explanation: the basic idea is to make a dummy
    # metaclass for one level of class instantiation that replaces itself with
    # the actual metaclass.
    class metaclass(meta):
        def __new__(cls, name, this_bases, d):
            return meta(name, bases, d)
    return type.__new__(metaclass, 'temporary_class', (), {})
PKZ08 8 8site-packages/pkg_resources/_vendor/packaging/markers.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import operator
import os
import platform
import sys

from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
from pkg_resources.extern.pyparsing import Literal as L  # noqa

from ._compat import string_types
from .specifiers import Specifier, InvalidSpecifier


__all__ = [
    "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
    "Marker", "default_environment",
]


class InvalidMarker(ValueError):
    """
    An invalid marker was found, users should refer to PEP 508.
    """


class UndefinedComparison(ValueError):
    """
    An invalid operation was attempted on a value that doesn't support it.
    """


class UndefinedEnvironmentName(ValueError):
    """
    A name was attempted to be used that does not exist inside of the
    environment.
    """


class Node(object):

    def __init__(self, value):
        self.value = value

    def __str__(self):
        return str(self.value)

    def __repr__(self):
        return "<{0}({1!r})>".format(self.__class__.__name__, str(self))

    def serialize(self):
        raise NotImplementedError


class Variable(Node):

    def serialize(self):
        return str(self)


class Value(Node):

    def serialize(self):
        return '"{0}"'.format(self)


class Op(Node):

    def serialize(self):
        return str(self)


VARIABLE = (
    L("implementation_version") |
    L("platform_python_implementation") |
    L("implementation_name") |
    L("python_full_version") |
    L("platform_release") |
    L("platform_version") |
    L("platform_machine") |
    L("platform_system") |
    L("python_version") |
    L("sys_platform") |
    L("os_name") |
    L("os.name") |  # PEP-345
    L("sys.platform") |  # PEP-345
    L("platform.version") |  # PEP-345
    L("platform.machine") |  # PEP-345
    L("platform.python_implementation") |  # PEP-345
    L("python_implementation") |  # undocumented setuptools legacy
    L("extra")
)
ALIASES = {
    'os.name': 'os_name',
    'sys.platform': 'sys_platform',
    'platform.version': 'platform_version',
    'platform.machine': 'platform_machine',
    'platform.python_implementation': 'platform_python_implementation',
    'python_implementation': 'platform_python_implementation'
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))

VERSION_CMP = (
    L("===") |
    L("==") |
    L(">=") |
    L("<=") |
    L("!=") |
    L("~=") |
    L(">") |
    L("<")
)

MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))

MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))

BOOLOP = L("and") | L("or")

MARKER_VAR = VARIABLE | MARKER_VALUE

MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))

LPAREN = L("(").suppress()
RPAREN = L(")").suppress()

MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)

MARKER = stringStart + MARKER_EXPR + stringEnd


def _coerce_parse_result(results):
    if isinstance(results, ParseResults):
        return [_coerce_parse_result(i) for i in results]
    else:
        return results


def _format_marker(marker, first=True):
    assert isinstance(marker, (list, tuple, string_types))

    # Sometimes we have a structure like [[...]] which is a single item list
    # where the single item is itself it's own list. In that case we want skip
    # the rest of this function so that we don't get extraneous () on the
    # outside.
    if (isinstance(marker, list) and len(marker) == 1 and
            isinstance(marker[0], (list, tuple))):
        return _format_marker(marker[0])

    if isinstance(marker, list):
        inner = (_format_marker(m, first=False) for m in marker)
        if first:
            return " ".join(inner)
        else:
            return "(" + " ".join(inner) + ")"
    elif isinstance(marker, tuple):
        return " ".join([m.serialize() for m in marker])
    else:
        return marker


_operators = {
    "in": lambda lhs, rhs: lhs in rhs,
    "not in": lambda lhs, rhs: lhs not in rhs,
    "<": operator.lt,
    "<=": operator.le,
    "==": operator.eq,
    "!=": operator.ne,
    ">=": operator.ge,
    ">": operator.gt,
}


def _eval_op(lhs, op, rhs):
    try:
        spec = Specifier("".join([op.serialize(), rhs]))
    except InvalidSpecifier:
        pass
    else:
        return spec.contains(lhs)

    oper = _operators.get(op.serialize())
    if oper is None:
        raise UndefinedComparison(
            "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
        )

    return oper(lhs, rhs)


_undefined = object()


def _get_env(environment, name):
    value = environment.get(name, _undefined)

    if value is _undefined:
        raise UndefinedEnvironmentName(
            "{0!r} does not exist in evaluation environment.".format(name)
        )

    return value


def _evaluate_markers(markers, environment):
    groups = [[]]

    for marker in markers:
        assert isinstance(marker, (list, tuple, string_types))

        if isinstance(marker, list):
            groups[-1].append(_evaluate_markers(marker, environment))
        elif isinstance(marker, tuple):
            lhs, op, rhs = marker

            if isinstance(lhs, Variable):
                lhs_value = _get_env(environment, lhs.value)
                rhs_value = rhs.value
            else:
                lhs_value = lhs.value
                rhs_value = _get_env(environment, rhs.value)

            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
        else:
            assert marker in ["and", "or"]
            if marker == "or":
                groups.append([])

    return any(all(item) for item in groups)


def format_full_version(info):
    version = '{0.major}.{0.minor}.{0.micro}'.format(info)
    kind = info.releaselevel
    if kind != 'final':
        version += kind[0] + str(info.serial)
    return version


def default_environment():
    if hasattr(sys, 'implementation'):
        iver = format_full_version(sys.implementation.version)
        implementation_name = sys.implementation.name
    else:
        iver = '0'
        implementation_name = ''

    return {
        "implementation_name": implementation_name,
        "implementation_version": iver,
        "os_name": os.name,
        "platform_machine": platform.machine(),
        "platform_release": platform.release(),
        "platform_system": platform.system(),
        "platform_version": platform.version(),
        "python_full_version": platform.python_version(),
        "platform_python_implementation": platform.python_implementation(),
        "python_version": platform.python_version()[:3],
        "sys_platform": sys.platform,
    }


class Marker(object):

    def __init__(self, marker):
        try:
            self._markers = _coerce_parse_result(MARKER.parseString(marker))
        except ParseException as e:
            err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
                marker, marker[e.loc:e.loc + 8])
            raise InvalidMarker(err_str)

    def __str__(self):
        return _format_marker(self._markers)

    def __repr__(self):
        return "".format(str(self))

    def evaluate(self, environment=None):
        """Evaluate a marker.

        Return the boolean from evaluating the given marker against the
        environment. environment is an optional argument to override all or
        part of the determined environment.

        The environment is determined from the current Python process.
        """
        current_environment = default_environment()
        if environment is not None:
            current_environment.update(environment)

        return _evaluate_markers(self._markers, current_environment)
PKZSff:site-packages/pkg_resources/_vendor/packaging/__init__.pycnu[
fc@`sxddlmZmZmZddlmZmZmZmZm	Z	m
Z
mZmZdddddd	d
dgZ
dS(
i(tabsolute_importtdivisiontprint_functioni(t
__author__t
__copyright__t	__email__t__license__t__summary__t	__title__t__uri__t__version__RRR	R
RRRRN(t
__future__RRRt	__about__RRRRRRR	R
t__all__(((sL/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.pyts:PKZb:cXX7site-packages/pkg_resources/_vendor/packaging/utils.pyonu[
fc@`sDddlmZmZmZddlZejdZdZdS(i(tabsolute_importtdivisiontprint_functionNs[-_.]+cC`stjd|jS(Nt-(t_canonicalize_regextsubtlower(tname((sI/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pytcanonicalize_names(t
__future__RRRtretcompileRR(((sI/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pytsPKZ<site-packages/pkg_resources/_vendor/packaging/_structures.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function


class Infinity(object):

    def __repr__(self):
        return "Infinity"

    def __hash__(self):
        return hash(repr(self))

    def __lt__(self, other):
        return False

    def __le__(self, other):
        return False

    def __eq__(self, other):
        return isinstance(other, self.__class__)

    def __ne__(self, other):
        return not isinstance(other, self.__class__)

    def __gt__(self, other):
        return True

    def __ge__(self, other):
        return True

    def __neg__(self):
        return NegativeInfinity

Infinity = Infinity()


class NegativeInfinity(object):

    def __repr__(self):
        return "-Infinity"

    def __hash__(self):
        return hash(repr(self))

    def __lt__(self, other):
        return True

    def __le__(self, other):
        return True

    def __eq__(self, other):
        return isinstance(other, self.__class__)

    def __ne__(self, other):
        return not isinstance(other, self.__class__)

    def __gt__(self, other):
        return False

    def __ge__(self, other):
        return False

    def __neg__(self):
        return Infinity

NegativeInfinity = NegativeInfinity()
PKZ[9cc<site-packages/pkg_resources/_vendor/packaging/specifiers.pyonu[
fc@`s<ddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
ddlmZm
Z
mZdefdYZde
ejefd	YZd
efdYZdefd
YZdZdefdYZejdZdZdZdefdYZdS(i(tabsolute_importtdivisiontprint_functionNi(tstring_typestwith_metaclass(tVersiont
LegacyVersiontparsetInvalidSpecifiercB`seZdZRS(sH
    An invalid specifier was found, users should refer to PEP 440.
    (t__name__t
__module__t__doc__(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRst
BaseSpecifiercB`seZejdZejdZejdZejdZejdZ	e	j
dZ	ejddZejddZ
RS(	cC`sdS(s
        Returns the str representation of this Specifier like object. This
        should be representative of the Specifier itself.
        N((tself((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__str__tcC`sdS(sF
        Returns a hash value for this Specifier like object.
        N((R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__hash__RcC`sdS(sq
        Returns a boolean representing whether or not the two Specifier like
        objects are equal.
        N((R
tother((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__eq__$RcC`sdS(su
        Returns a boolean representing whether or not the two Specifier like
        objects are not equal.
        N((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__ne__+RcC`sdS(sg
        Returns whether or not pre-releases as a whole are allowed by this
        specifier.
        N((R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytprereleases2RcC`sdS(sd
        Sets whether or not pre-releases as a whole are allowed by this
        specifier.
        N((R
tvalue((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR9RcC`sdS(sR
        Determines if the given item is contained within this specifier.
        N((R
titemR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytcontains@RcC`sdS(s
        Takes an iterable of items and filters them so that only items which
        are contained within this specifier are allowed in it.
        N((R
titerableR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytfilterFRN(R	R
tabctabstractmethodRRRRtabstractpropertyRtsettertNoneRR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRst_IndividualSpecifiercB`seZiZdddZdZdZdZdZdZ	dZ
dZed	Z
ed
ZedZejdZd
ZddZddZRS(RcC`sj|jj|}|s0tdj|n|jdj|jdjf|_||_dS(NsInvalid specifier: '{0}'toperatortversion(t_regextsearchRtformattgrouptstript_spect_prereleases(R
tspecRtmatch((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__init__RscC`sF|jdk	r!dj|jnd}dj|jjt||S(Ns, prereleases={0!r}Rs<{0}({1!r}{2})>(R(RR$Rt	__class__R	tstr(R
tpre((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__repr___s!		cC`sdj|jS(Ns{0}{1}(R$R'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRlscC`s
t|jS(N(thashR'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRoscC`s`t|tr:y|j|}WqPtk
r6tSXnt||jsPtS|j|jkS(N(t
isinstanceRR,RtNotImplementedR'(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRrs
cC`s`t|tr:y|j|}WqPtk
r6tSXnt||jsPtS|j|jkS(N(R1RR,RR2R'(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR}s
cC`st|dj|j|S(Ns_compare_{0}(tgetattrR$t
_operators(R
top((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt
_get_operatorscC`s(t|ttfs$t|}n|S(N(R1RRR(R
R!((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_coerce_versionscC`s|jdS(Ni(R'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR scC`s|jdS(Ni(R'(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR!scC`s|jS(N(R((R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
||_dS(N(R((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
|j|S(N(R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__contains__scC`sW|dkr|j}n|j|}|jr;|r;tS|j|j||jS(N(RRR7t
is_prereleasetFalseR6R R!(R
RR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscc`st}g}i|dk	r!|ntd6}xf|D]^}|j|}|j||r2|jr|pn|jr|j|qt}|Vq2q2W|r|rx|D]}|VqWndS(NR(R:RtTrueR7RR9Rtappend(R
RRtyieldedtfound_prereleasestkwR!tparsed_version((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs
	

N(R	R
R4RR+R/RRRRR6R7tpropertyR R!RRR8RR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRNs 
	
							tLegacySpecifiercB`seZdZejdedejejBZidd6dd6dd6d	d
6dd6d
d6ZdZ	dZ
dZdZdZ
dZdZRS(s
        (?P(==|!=|<=|>=|<|>))
        \s*
        (?P
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
                      # string can be just about anything, we match everything
                      # except for whitespace, a semi-colon for marker support,
                      # a closing paren since versions can be enclosed in
                      # them, and a comma since it's a version separator.
        )
        s^\s*s\s*$tequals==t	not_equals!=tless_than_equals<=tgreater_than_equals>=t	less_thantcC`s(t|ts$tt|}n|S(N(R1RR-(R
R!((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR7scC`s||j|kS(N(R7(R
tprospectiveR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_not_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_less_than_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_greater_than_equalscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_less_thanscC`s||j|kS(N(R7(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_greater_thans(R	R
t
_regex_strtretcompiletVERBOSEt
IGNORECASER"R4R7RLRMRNRORPRQ(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRBs"

						c`s"tjfd}|S(Nc`s#t|tstS|||S(N(R1RR:(R
RKR)(tfn(sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytwrappeds(t	functoolstwraps(RWRX((RWsN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_require_version_compare
st	SpecifiercB`seZdZejdedejejBZidd6dd6dd6d	d
6dd6d
d6dd6dd6Ze	dZ
e	dZe	dZe	dZ
e	dZe	dZe	dZdZedZejdZRS(s
        (?P(~=|==|!=|<=|>=|<|>|===))
        (?P
            (?:
                # The identity operators allow for an escape hatch that will
                # do an exact string match of the version you wish to install.
                # This will not be parsed by PEP 440 and we cannot determine
                # any semantic meaning from it. This operator is discouraged
                # but included entirely as an escape hatch.
                (?<====)  # Only match for the identity operator
                \s*
                [^\s]*    # We just match everything, except for whitespace
                          # since we are only testing for strict identity.
            )
            |
            (?:
                # The (non)equality operators allow for wild card and local
                # versions to be specified so we have to define these two
                # operators separately to enable that.
                (?<===|!=)            # Only match for equals and not equals

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?

                # You cannot use a wild card and a dev or local version
                # together so group them with a | and make them optional.
                (?:
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
                    |
                    \.\*  # Wild card syntax of .*
                )?
            )
            |
            (?:
                # The compatible operator requires at least two digits in the
                # release segment.
                (?<=~=)               # Only match for the compatible operator

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
            |
            (?:
                # All other operators only allow a sub set of what the
                # (non)equality operators do. Specifically they do not allow
                # local versions to be specified nor do they allow the prefix
                # matching wild cards.
                (?=RGRHRIRJt	arbitrarys===cC`sfdjttjdt|d }|d7}|jd||oe|jd||S(Nt.cS`s|jdo|jdS(Ntposttdev(t
startswith(tx((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pytsis.*s>=s==(tjointlistt	itertoolst	takewhilet_version_splitR6(R
RKR)tprefix((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_compatibles

cC`s|jdrht|j}t|d }tt|}|t| }t||\}}n't|}|jst|j}n||kS(Ns.*i(tendswithRtpublicRiR-tlent_pad_versiontlocal(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRLs	cC`s|j||S(N(RL(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRMscC`s|t|kS(N(R(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRNscC`s|t|kS(N(R(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyROscC`sXt|}||kstS|jrT|jrTt|jt|jkrTtSntS(N(RR:R9tbase_versionR;(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRPscC`st|}||kstS|jrT|jrTt|jt|jkrTtSn|jdk	rt|jt|jkrtSntS(N(RR:tis_postreleaseRqRpRR;(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRQscC`s"t|jt|jkS(N(R-tlower(R
RKR)((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt_compare_arbitraryscC`ss|jdk	r|jS|j\}}|dkro|dkrY|jdrY|d }nt|jrotSntS(	Ns==s>=s<=s~=s===s.*i(s==s>=s<=s~=s===(R(RR'RlRR9R;R:(R
R R!((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs
cC`s
||_dS(N(R((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs(R	R
RRRSRTRURVR"R4R[RkRLRMRNRORPRQRtRARR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR\s,^
#	s^([0-9]+)((?:a|b|c|rc)[0-9]+)$cC`s\g}xO|jdD]>}tj|}|rG|j|jq|j|qW|S(NR_(tsplitt
_prefix_regexR#textendtgroupsR<(R!tresultRR*((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRi'sc	C`sgg}}|jttjd||jttjd||j|t|d|j|t|d|jddgtdt|dt|d|jddgtdt|dt|dttj|ttj|fS(NcS`s
|jS(N(tisdigit(Rc((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRd6RcS`s
|jS(N(Rz(Rc((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRd7Riit0(R<RfRgRhRntinserttmaxtchain(tlefttrightt
left_splittright_split((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRo2s
""//tSpecifierSetcB`seZdddZdZdZdZdZdZdZ	dZ
d	Zed
Z
e
jdZ
dZdd
ZddZRS(RcC`sg|jdD]}|jr|j^q}t}xL|D]D}y|jt|WqDtk
r|jt|qDXqDWt||_||_	dS(Nt,(
RuR&tsettaddR\RRBt	frozensett_specsR((R
t
specifiersRtstparsedt	specifier((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR+Os4	

cC`s=|jdk	r!dj|jnd}djt||S(Ns, prereleases={0!r}Rs(R(RR$RR-(R
R.((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR/ds!cC`s djtd|jDS(NRcs`s|]}t|VqdS(N(R-(t.0R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pys	ns(RetsortedR(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRmscC`s
t|jS(N(R0R(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRpscC`st|trt|}nt|ts1tSt}t|j|jB|_|jdkr|jdk	r|j|_nZ|jdk	r|jdkr|j|_n-|j|jkr|j|_ntd|S(NsFCannot combine SpecifierSets with True and False prerelease overrides.(	R1RRR2RRR(Rt
ValueError(R
RR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__and__ss		cC`set|trt|}n7t|trBtt|}nt|tsUtS|j|jkS(N(R1RRRR-R2R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`set|trt|}n7t|trBtt|}nt|tsUtS|j|jkS(N(R1RRRR-R2R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
t|jS(N(RnR(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__len__scC`s
t|jS(N(titerR(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyt__iter__scC`s:|jdk	r|jS|js#dStd|jDS(Ncs`s|]}|jVqdS(N(R(RR((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pys	s(R(RRtany(R
((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs
	cC`s
||_dS(N(R((R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s
|j|S(N(R(R
R((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyR8sc`sptttfs$tndkr<|jnrPjrPtStfd|j	DS(Nc3`s$|]}|jdVqdS(RN(R(RR(RR(sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pys	s(
R1RRRRRR9R:tallR(R
RR((RRsN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRscC`s|dkr|j}n|jrTx,|jD]!}|j|dt|}q+W|Sg}g}x|D]{}t|ttfst|}n|}t|trqgn|j	r|r|s|j
|qqg|j
|qgW|r|r|dkr|S|SdS(NR(RRRRtboolR1RRRR9R<(R
RRR)tfilteredR>RR@((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRs*	
N(R	R
RR+R/RRRRRRRRARRR8RR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyRMs						
	
			(t
__future__RRRRRYRgRSt_compatRRR!RRRRRtABCMetatobjectRRRBR[R\RTRvRiRoR(((sN/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.pyts""94				PKZSff:site-packages/pkg_resources/_vendor/packaging/__init__.pyonu[
fc@`sxddlmZmZmZddlmZmZmZmZm	Z	m
Z
mZmZdddddd	d
dgZ
dS(
i(tabsolute_importtdivisiontprint_functioni(t
__author__t
__copyright__t	__email__t__license__t__summary__t	__title__t__uri__t__version__RRR	R
RRRRN(t
__future__RRRt	__about__RRRRRRR	R
t__all__(((sL/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__init__.pyts:PKZoMM>site-packages/pkg_resources/_vendor/packaging/requirements.pyonu[
fc@`sYddlmZmZmZddlZddlZddlmZmZm	Z	m
Z
ddlmZmZm
Z
mZmZddlmZddlmZddlmZmZdd	lmZmZmZd
efdYZeejejZ edj!Z"ed
j!Z#edj!Z$edj!Z%edj!Z&edj!Z'edj!Z(edZ)e ee)e BZ*ee ee*Z+e+dZ,e+Z-eddZ.e(e.Z/e-ee&e-Z0e"e
e0e#dZ1eej2ej3ej4BZ5eej2ej3ej4BZ6e5e6AZ7ee7ee&e7ddde8dZ9e
e$e9e%e9BZ:e:j;de	e:dZ<e<j;de	edZej;de'Z=e=eZ>e<e
e>Z?e/e
e>Z@e,e
e1e@e?BZAeeAeZBd eCfd!YZDdS("i(tabsolute_importtdivisiontprint_functionN(tstringStartt	stringEndtoriginalTextFortParseException(t
ZeroOrMoretWordtOptionaltRegextCombine(tLiteral(tparsei(tMARKER_EXPRtMarker(tLegacySpecifiert	SpecifiertSpecifierSettInvalidRequirementcB`seZdZRS(sJ
    An invalid requirement was found, users should refer to PEP 508.
    (t__name__t
__module__t__doc__(((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyRst[t]t(t)t,t;t@s-_.tnames[^ ]+turltextrast
joinStringtadjacentt	_raw_speccC`s
|jpdS(Nt(R#(tstltt((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt6R$t	specifiercC`s|dS(Ni((R%R&R'((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyR(9R$tmarkercC`st||j|j!S(N(Rt_original_startt
_original_end(R%R&R'((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyR(=R$tRequirementcB`s)eZdZdZdZdZRS(sParse a requirement.

    Parse a given requirement string into its parts, such as name, specifier,
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
    string.
    cC`sytj|}Wn9tk
rN}tdj||j|jd!nX|j|_|jrtj|j}|j	o|j
s|j	r|j
rtdn|j|_n	d|_t|j
r|j
jng|_
t|j|_|jr|jnd|_dS(Ns+Invalid requirement, parse error at "{0!r}"isInvalid URL given(tREQUIREMENTtparseStringRRtformattlocRRturlparsetschemetnetloctNonetsetR tasListRR)R*(tselftrequirement_stringtreqtet
parsed_url((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt__init__Xs"!		'cC`s|jg}|jr@|jdjdjt|jn|jrb|jt|jn|jr|jdj|jn|j	r|jdj|j	ndj|S(Ns[{0}]Rs@ {0}s; {0}R$(
RR tappendR0tjointsortedR)tstrRR*(R8tparts((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt__str__ms	+			cC`sdjt|S(Ns(R0RA(R8((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt__repr__~s(RRRR=RCRD(((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyR-Ks		(Et
__future__RRRtstringtretpkg_resources.extern.pyparsingRRRRRRR	R
RRtLt%pkg_resources.extern.six.moves.urllibR
R2tmarkersRRt
specifiersRRRt
ValueErrorRt
ascii_letterstdigitstALPHANUMtsuppresstLBRACKETtRBRACKETtLPARENtRPARENtCOMMAt	SEMICOLONtATtPUNCTUATIONtIDENTIFIER_ENDt
IDENTIFIERtNAMEtEXTRAtURItURLtEXTRAS_LISTtEXTRASt
_regex_strtVERBOSEt
IGNORECASEtVERSION_PEP440tVERSION_LEGACYtVERSION_ONEtFalsetVERSION_MANYt
_VERSION_SPECtsetParseActiontVERSION_SPECtMARKER_SEPERATORtMARKERtVERSION_AND_MARKERtURL_AND_MARKERtNAMED_REQUIREMENTR.tobjectR-(((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pytsZ"(



PKZoMM>site-packages/pkg_resources/_vendor/packaging/requirements.pycnu[
fc@`sYddlmZmZmZddlZddlZddlmZmZm	Z	m
Z
ddlmZmZm
Z
mZmZddlmZddlmZddlmZmZdd	lmZmZmZd
efdYZeejejZ edj!Z"ed
j!Z#edj!Z$edj!Z%edj!Z&edj!Z'edj!Z(edZ)e ee)e BZ*ee ee*Z+e+dZ,e+Z-eddZ.e(e.Z/e-ee&e-Z0e"e
e0e#dZ1eej2ej3ej4BZ5eej2ej3ej4BZ6e5e6AZ7ee7ee&e7ddde8dZ9e
e$e9e%e9BZ:e:j;de	e:dZ<e<j;de	edZej;de'Z=e=eZ>e<e
e>Z?e/e
e>Z@e,e
e1e@e?BZAeeAeZBd eCfd!YZDdS("i(tabsolute_importtdivisiontprint_functionN(tstringStartt	stringEndtoriginalTextFortParseException(t
ZeroOrMoretWordtOptionaltRegextCombine(tLiteral(tparsei(tMARKER_EXPRtMarker(tLegacySpecifiert	SpecifiertSpecifierSettInvalidRequirementcB`seZdZRS(sJ
    An invalid requirement was found, users should refer to PEP 508.
    (t__name__t
__module__t__doc__(((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyRst[t]t(t)t,t;t@s-_.tnames[^ ]+turltextrast
joinStringtadjacentt	_raw_speccC`s
|jpdS(Nt(R#(tstltt((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt6R$t	specifiercC`s|dS(Ni((R%R&R'((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyR(9R$tmarkercC`st||j|j!S(N(Rt_original_startt
_original_end(R%R&R'((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyR(=R$tRequirementcB`s)eZdZdZdZdZRS(sParse a requirement.

    Parse a given requirement string into its parts, such as name, specifier,
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
    string.
    cC`sytj|}Wn9tk
rN}tdj||j|jd!nX|j|_|jrtj|j}|j	o|j
s|j	r|j
rtdn|j|_n	d|_t|j
r|j
jng|_
t|j|_|jr|jnd|_dS(Ns+Invalid requirement, parse error at "{0!r}"isInvalid URL given(tREQUIREMENTtparseStringRRtformattlocRRturlparsetschemetnetloctNonetsetR tasListRR)R*(tselftrequirement_stringtreqtet
parsed_url((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt__init__Xs"!		'cC`s|jg}|jr@|jdjdjt|jn|jrb|jt|jn|jr|jdj|jn|j	r|jdj|j	ndj|S(Ns[{0}]Rs@ {0}s; {0}R$(
RR tappendR0tjointsortedR)tstrRR*(R8tparts((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt__str__ms	+			cC`sdjt|S(Ns(R0RA(R8((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyt__repr__~s(RRRR=RCRD(((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pyR-Ks		(Et
__future__RRRtstringtretpkg_resources.extern.pyparsingRRRRRRR	R
RRtLt%pkg_resources.extern.six.moves.urllibR
R2tmarkersRRt
specifiersRRRt
ValueErrorRt
ascii_letterstdigitstALPHANUMtsuppresstLBRACKETtRBRACKETtLPARENtRPARENtCOMMAt	SEMICOLONtATtPUNCTUATIONtIDENTIFIER_ENDt
IDENTIFIERtNAMEtEXTRAtURItURLtEXTRAS_LISTtEXTRASt
_regex_strtVERBOSEt
IGNORECASEtVERSION_PEP440tVERSION_LEGACYtVERSION_ONEtFalsetVERSION_MANYt
_VERSION_SPECtsetParseActiontVERSION_SPECtMARKER_SEPERATORtMARKERtVERSION_AND_MARKERtURL_AND_MARKERtNAMED_REQUIREMENTR.tobjectR-(((sP/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/requirements.pytsZ"(



PKZpxؙ..9site-packages/pkg_resources/_vendor/packaging/markers.pyonu[
fc@`suddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
mZmZddlm
Z
mZmZmZddlmZddlmZddlmZmZd	d
ddd
gZd	efdYZd
efdYZdefdYZdefdYZdefdYZdefdYZ defdYZ!ededBedBedBedBedBedBed Bed!Bed"Bed#Bed$Bed%Bed&Bed'Bed(Bed)Bed*BZ"id#d$6d"d%6dd&6dd'6dd(6dd)6Z#e"j$d+ed,ed-Bed.Bed/Bed0Bed1Bed2Bed3BZ%e%ed4Bed5BZ&e&j$d6ed7ed8BZ'e'j$d9ed:ed;BZ(e"e'BZ)ee)e&e)Z*e*j$d<ed=j+Z,ed>j+Z-eZ.e*ee,e.e-BZ/e.e/e
e(e.>ee.eZ0d?Z1e2d@Z3idAd56dBd46ej4d36ej5d/6ej6d-6ej7d06ej8d.6ej9d26Z:dCZ;eZ<dDZ=dEZ>dFZ?dGZ@defdHYZAdS(Ii(tabsolute_importtdivisiontprint_functionN(tParseExceptiontParseResultststringStartt	stringEnd(t
ZeroOrMoretGrouptForwardtQuotedString(tLiterali(tstring_types(t	SpecifiertInvalidSpecifiert
InvalidMarkertUndefinedComparisontUndefinedEnvironmentNametMarkertdefault_environmentcB`seZdZRS(sE
    An invalid marker was found, users should refer to PEP 508.
    (t__name__t
__module__t__doc__(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscB`seZdZRS(sP
    An invalid operation was attempted on a value that doesn't support it.
    (RRR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscB`seZdZRS(s\
    A name was attempted to be used that does not exist inside of the
    environment.
    (RRR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR%stNodecB`s,eZdZdZdZdZRS(cC`s
||_dS(N(tvalue(tselfR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt__init__.scC`s
t|jS(N(tstrR(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt__str__1scC`sdj|jjt|S(Ns<{0}({1!r})>(tformatt	__class__RR(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt__repr__4scC`s
tdS(N(tNotImplementedError(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt	serialize7s(RRRRRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR,s			tVariablecB`seZdZRS(cC`s
t|S(N(R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR!=s(RRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR";stValuecB`seZdZRS(cC`s
dj|S(Ns"{0}"(R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR!Cs(RRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR#AstOpcB`seZdZRS(cC`s
t|S(N(R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR!Is(RRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR$Gstimplementation_versiontplatform_python_implementationtimplementation_nametpython_full_versiontplatform_releasetplatform_versiontplatform_machinetplatform_systemtpython_versiontsys_platformtos_namesos.namessys.platformsplatform.versionsplatform.machinesplatform.python_implementationtpython_implementationtextracC`sttj|d|dS(Ni(R"tALIASEStget(tstltt((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pytits===s==s>=s<=s!=s~=t>tst RARB(RCtlisttlenR@RHtjoinR!(tmarkerRGtinnerRK((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRHs!
&cC`s
||kS(N((tlhstrhs((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR7R8cC`s
||kS(N((RRRS((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR7R8cC`sy%tdj|j|g}Wntk
r8nX|j|Stj|j}|dkrtdj	|||n|||S(NR8s#Undefined {0!r} on {1!r} and {2!r}.(
R
ROR!Rtcontainst
_operatorsR3tNoneRR(RRtopRStspectoper((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt_eval_ops%

cC`s:|j|t}|tkr6tdj|n|S(Ns/{0!r} does not exist in evaluation environment.(R3t
_undefinedRR(tenvironmenttnameR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt_get_envs
c	C`sgg}x|D]}t|trB|djt||qt|tr|\}}}t|trt||j}|j}n|j}t||j}|djt|||q|dkr|jgqqWt	d|DS(NiR?cs`s|]}t|VqdS(N(tall(RJtitem((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pys	s(
RCRMtappendt_evaluate_markersR@R"R^RRZtany(	tmarkersR\tgroupsRPRRRWRSt	lhs_valuet	rhs_value((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRbs	
	 cC`sFdj|}|j}|dkrB||dt|j7}n|S(Ns{0.major}.{0.minor}.{0.micro}tfinali(RtreleaselevelRtserial(tinfotversiontkind((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pytformat_full_versions
	cC`sttdr0ttjj}tjj}nd}d}i|d6|d6tjd6tjd6tj	d6tj
d	6tjd
6tjd6tjd6tjd
 d6tjd6S(Ntimplementationt0R8R'R%R/R+R)R,R*R(R&iR-R.(
thasattrtsysRnRoRlR]tostplatformtmachinetreleasetsystemR-R0(tiverR'((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRs"






cB`s/eZdZdZdZddZRS(cC`seyttj||_WnBtk
r`}dj|||j|jd!}t|nXdS(Ns+Invalid marker: {0!r}, parse error at {1!r}i(RDtMARKERtparseStringt_markersRRtlocR(RRPteterr_str((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscC`s
t|jS(N(RHR{(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscC`sdjt|S(Ns(RR(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscC`s5t}|dk	r%|j|nt|j|S(s$Evaluate a marker.

        Return the boolean from evaluating the given marker against the
        environment. environment is an optional argument to override all or
        part of the determined environment.

        The environment is determined from the current Python process.
        N(RRVtupdateRbR{(RR\tcurrent_environment((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pytevaluate s		N(RRRRRRVR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRs			(Bt
__future__RRRtoperatorRsRtRrtpkg_resources.extern.pyparsingRRRRRRR	R
RtLt_compatRt
specifiersR
Rt__all__t
ValueErrorRRRtobjectRR"R#R$tVARIABLER2tsetParseActiontVERSION_CMPt	MARKER_OPtMARKER_VALUEtBOOLOPt
MARKER_VARtMARKER_ITEMtsuppresstLPARENtRPARENtMARKER_EXPRtMARKER_ATOMRyRDtTrueRHtlttleteqtnetgetgtRURZR[R^RbRnRR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyts|""	

	E

		







						PKZ1;site-packages/pkg_resources/_vendor/packaging/__about__.pycnu[
fc@`srddlmZmZmZdddddddd	gZd
ZdZdZd
ZdZ	dZ
dZde	ZdS(i(tabsolute_importtdivisiontprint_functiont	__title__t__summary__t__uri__t__version__t
__author__t	__email__t__license__t
__copyright__t	packagings"Core utilities for Python packagess!https://github.com/pypa/packagings16.8s)Donald Stufft and individual contributorssdonald@stufft.ios"BSD or Apache License, Version 2.0sCopyright 2014-2016 %sN(
t
__future__RRRt__all__RRRRRRR	R
(((sM/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.pytsPKZ@Fk9site-packages/pkg_resources/_vendor/packaging/_compat.pyonu[
fc@`svddlmZmZmZddlZejddkZejddkZer`efZ	n	e
fZ	dZdS(i(tabsolute_importtdivisiontprint_functionNiic`s5dffdY}tj|ddiS(s/
    Create a base class with a metaclass.
    t	metaclassc`seZfdZRS(c`s||S(N((tclstnamet
this_basestd(tbasestmeta(sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyt__new__s(t__name__t
__module__R
((RR	(sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyRsttemporary_class((ttypeR
(R	RR((RR	sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pytwith_metaclasss(t
__future__RRRtsystversion_infotPY2tPY3tstrtstring_typest
basestringR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyts	PKZb:cXX7site-packages/pkg_resources/_vendor/packaging/utils.pycnu[
fc@`sDddlmZmZmZddlZejdZdZdS(i(tabsolute_importtdivisiontprint_functionNs[-_.]+cC`stjd|jS(Nt-(t_canonicalize_regextsubtlower(tname((sI/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pytcanonicalize_names(t
__future__RRRtretcompileRR(((sI/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/utils.pytsPKZv9site-packages/pkg_resources/_vendor/packaging/__init__.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

from .__about__ import (
    __author__, __copyright__, __email__, __license__, __summary__, __title__,
    __uri__, __version__
)

__all__ = [
    "__title__", "__summary__", "__uri__", "__version__", "__author__",
    "__email__", "__license__", "__copyright__",
]
PKZh =site-packages/pkg_resources/_vendor/packaging/_structures.pyonu[
fc@`s^ddlmZmZmZdefdYZeZdefdYZeZdS(i(tabsolute_importtdivisiontprint_functiontInfinitycB`sYeZdZdZdZdZdZdZdZdZ	dZ
RS(	cC`sdS(NR((tself((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__repr__	scC`stt|S(N(thashtrepr(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__hash__scC`stS(N(tFalse(Rtother((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__lt__scC`stS(N(R	(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__le__scC`st||jS(N(t
isinstancet	__class__(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__eq__scC`st||jS(N(R
R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__ne__scC`stS(N(tTrue(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__gt__scC`stS(N(R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__ge__scC`stS(N(tNegativeInfinity(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyt__neg__!s(t__name__t
__module__RRRRRRRRR(((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyRs								RcB`sYeZdZdZdZdZdZdZdZdZ	dZ
RS(	cC`sdS(Ns	-Infinity((R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR)scC`stt|S(N(RR(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR,scC`stS(N(R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR/scC`stS(N(R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR2scC`st||jS(N(R
R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR5scC`st||jS(N(R
R(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR8scC`stS(N(R	(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR;scC`stS(N(R	(RR
((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR>scC`stS(N(R(R((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyRAs(RRRRRRRRRRR(((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyR's								N(t
__future__RRRtobjectRR(((sO/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_structures.pyts	PKZ|Eymym;site-packages/pkg_resources/_vendor/packaging/specifiers.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import abc
import functools
import itertools
import re

from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse


class InvalidSpecifier(ValueError):
    """
    An invalid specifier was found, users should refer to PEP 440.
    """


class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):

    @abc.abstractmethod
    def __str__(self):
        """
        Returns the str representation of this Specifier like object. This
        should be representative of the Specifier itself.
        """

    @abc.abstractmethod
    def __hash__(self):
        """
        Returns a hash value for this Specifier like object.
        """

    @abc.abstractmethod
    def __eq__(self, other):
        """
        Returns a boolean representing whether or not the two Specifier like
        objects are equal.
        """

    @abc.abstractmethod
    def __ne__(self, other):
        """
        Returns a boolean representing whether or not the two Specifier like
        objects are not equal.
        """

    @abc.abstractproperty
    def prereleases(self):
        """
        Returns whether or not pre-releases as a whole are allowed by this
        specifier.
        """

    @prereleases.setter
    def prereleases(self, value):
        """
        Sets whether or not pre-releases as a whole are allowed by this
        specifier.
        """

    @abc.abstractmethod
    def contains(self, item, prereleases=None):
        """
        Determines if the given item is contained within this specifier.
        """

    @abc.abstractmethod
    def filter(self, iterable, prereleases=None):
        """
        Takes an iterable of items and filters them so that only items which
        are contained within this specifier are allowed in it.
        """


class _IndividualSpecifier(BaseSpecifier):

    _operators = {}

    def __init__(self, spec="", prereleases=None):
        match = self._regex.search(spec)
        if not match:
            raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))

        self._spec = (
            match.group("operator").strip(),
            match.group("version").strip(),
        )

        # Store whether or not this Specifier should accept prereleases
        self._prereleases = prereleases

    def __repr__(self):
        pre = (
            ", prereleases={0!r}".format(self.prereleases)
            if self._prereleases is not None
            else ""
        )

        return "<{0}({1!r}{2})>".format(
            self.__class__.__name__,
            str(self),
            pre,
        )

    def __str__(self):
        return "{0}{1}".format(*self._spec)

    def __hash__(self):
        return hash(self._spec)

    def __eq__(self, other):
        if isinstance(other, string_types):
            try:
                other = self.__class__(other)
            except InvalidSpecifier:
                return NotImplemented
        elif not isinstance(other, self.__class__):
            return NotImplemented

        return self._spec == other._spec

    def __ne__(self, other):
        if isinstance(other, string_types):
            try:
                other = self.__class__(other)
            except InvalidSpecifier:
                return NotImplemented
        elif not isinstance(other, self.__class__):
            return NotImplemented

        return self._spec != other._spec

    def _get_operator(self, op):
        return getattr(self, "_compare_{0}".format(self._operators[op]))

    def _coerce_version(self, version):
        if not isinstance(version, (LegacyVersion, Version)):
            version = parse(version)
        return version

    @property
    def operator(self):
        return self._spec[0]

    @property
    def version(self):
        return self._spec[1]

    @property
    def prereleases(self):
        return self._prereleases

    @prereleases.setter
    def prereleases(self, value):
        self._prereleases = value

    def __contains__(self, item):
        return self.contains(item)

    def contains(self, item, prereleases=None):
        # Determine if prereleases are to be allowed or not.
        if prereleases is None:
            prereleases = self.prereleases

        # Normalize item to a Version or LegacyVersion, this allows us to have
        # a shortcut for ``"2.0" in Specifier(">=2")
        item = self._coerce_version(item)

        # Determine if we should be supporting prereleases in this specifier
        # or not, if we do not support prereleases than we can short circuit
        # logic if this version is a prereleases.
        if item.is_prerelease and not prereleases:
            return False

        # Actually do the comparison to determine if this item is contained
        # within this Specifier or not.
        return self._get_operator(self.operator)(item, self.version)

    def filter(self, iterable, prereleases=None):
        yielded = False
        found_prereleases = []

        kw = {"prereleases": prereleases if prereleases is not None else True}

        # Attempt to iterate over all the values in the iterable and if any of
        # them match, yield them.
        for version in iterable:
            parsed_version = self._coerce_version(version)

            if self.contains(parsed_version, **kw):
                # If our version is a prerelease, and we were not set to allow
                # prereleases, then we'll store it for later incase nothing
                # else matches this specifier.
                if (parsed_version.is_prerelease and not
                        (prereleases or self.prereleases)):
                    found_prereleases.append(version)
                # Either this is not a prerelease, or we should have been
                # accepting prereleases from the begining.
                else:
                    yielded = True
                    yield version

        # Now that we've iterated over everything, determine if we've yielded
        # any values, and if we have not and we have any prereleases stored up
        # then we will go ahead and yield the prereleases.
        if not yielded and found_prereleases:
            for version in found_prereleases:
                yield version


class LegacySpecifier(_IndividualSpecifier):

    _regex_str = (
        r"""
        (?P(==|!=|<=|>=|<|>))
        \s*
        (?P
            [^,;\s)]* # Since this is a "legacy" specifier, and the version
                      # string can be just about anything, we match everything
                      # except for whitespace, a semi-colon for marker support,
                      # a closing paren since versions can be enclosed in
                      # them, and a comma since it's a version separator.
        )
        """
    )

    _regex = re.compile(
        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)

    _operators = {
        "==": "equal",
        "!=": "not_equal",
        "<=": "less_than_equal",
        ">=": "greater_than_equal",
        "<": "less_than",
        ">": "greater_than",
    }

    def _coerce_version(self, version):
        if not isinstance(version, LegacyVersion):
            version = LegacyVersion(str(version))
        return version

    def _compare_equal(self, prospective, spec):
        return prospective == self._coerce_version(spec)

    def _compare_not_equal(self, prospective, spec):
        return prospective != self._coerce_version(spec)

    def _compare_less_than_equal(self, prospective, spec):
        return prospective <= self._coerce_version(spec)

    def _compare_greater_than_equal(self, prospective, spec):
        return prospective >= self._coerce_version(spec)

    def _compare_less_than(self, prospective, spec):
        return prospective < self._coerce_version(spec)

    def _compare_greater_than(self, prospective, spec):
        return prospective > self._coerce_version(spec)


def _require_version_compare(fn):
    @functools.wraps(fn)
    def wrapped(self, prospective, spec):
        if not isinstance(prospective, Version):
            return False
        return fn(self, prospective, spec)
    return wrapped


class Specifier(_IndividualSpecifier):

    _regex_str = (
        r"""
        (?P(~=|==|!=|<=|>=|<|>|===))
        (?P
            (?:
                # The identity operators allow for an escape hatch that will
                # do an exact string match of the version you wish to install.
                # This will not be parsed by PEP 440 and we cannot determine
                # any semantic meaning from it. This operator is discouraged
                # but included entirely as an escape hatch.
                (?<====)  # Only match for the identity operator
                \s*
                [^\s]*    # We just match everything, except for whitespace
                          # since we are only testing for strict identity.
            )
            |
            (?:
                # The (non)equality operators allow for wild card and local
                # versions to be specified so we have to define these two
                # operators separately to enable that.
                (?<===|!=)            # Only match for equals and not equals

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)*   # release
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?

                # You cannot use a wild card and a dev or local version
                # together so group them with a | and make them optional.
                (?:
                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
                    |
                    \.\*  # Wild card syntax of .*
                )?
            )
            |
            (?:
                # The compatible operator requires at least two digits in the
                # release segment.
                (?<=~=)               # Only match for the compatible operator

                \s*
                v?
                (?:[0-9]+!)?          # epoch
                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
                (?:                   # pre release
                    [-_\.]?
                    (a|b|c|rc|alpha|beta|pre|preview)
                    [-_\.]?
                    [0-9]*
                )?
                (?:                                   # post release
                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
                )?
                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
            )
            |
            (?:
                # All other operators only allow a sub set of what the
                # (non)equality operators do. Specifically they do not allow
                # local versions to be specified nor do they allow the prefix
                # matching wild cards.
                (?=": "greater_than_equal",
        "<": "less_than",
        ">": "greater_than",
        "===": "arbitrary",
    }

    @_require_version_compare
    def _compare_compatible(self, prospective, spec):
        # Compatible releases have an equivalent combination of >= and ==. That
        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
        # implement this in terms of the other specifiers instead of
        # implementing it ourselves. The only thing we need to do is construct
        # the other specifiers.

        # We want everything but the last item in the version, but we want to
        # ignore post and dev releases and we want to treat the pre-release as
        # it's own separate segment.
        prefix = ".".join(
            list(
                itertools.takewhile(
                    lambda x: (not x.startswith("post") and not
                               x.startswith("dev")),
                    _version_split(spec),
                )
            )[:-1]
        )

        # Add the prefix notation to the end of our string
        prefix += ".*"

        return (self._get_operator(">=")(prospective, spec) and
                self._get_operator("==")(prospective, prefix))

    @_require_version_compare
    def _compare_equal(self, prospective, spec):
        # We need special logic to handle prefix matching
        if spec.endswith(".*"):
            # In the case of prefix matching we want to ignore local segment.
            prospective = Version(prospective.public)
            # Split the spec out by dots, and pretend that there is an implicit
            # dot in between a release segment and a pre-release segment.
            spec = _version_split(spec[:-2])  # Remove the trailing .*

            # Split the prospective version out by dots, and pretend that there
            # is an implicit dot in between a release segment and a pre-release
            # segment.
            prospective = _version_split(str(prospective))

            # Shorten the prospective version to be the same length as the spec
            # so that we can determine if the specifier is a prefix of the
            # prospective version or not.
            prospective = prospective[:len(spec)]

            # Pad out our two sides with zeros so that they both equal the same
            # length.
            spec, prospective = _pad_version(spec, prospective)
        else:
            # Convert our spec string into a Version
            spec = Version(spec)

            # If the specifier does not have a local segment, then we want to
            # act as if the prospective version also does not have a local
            # segment.
            if not spec.local:
                prospective = Version(prospective.public)

        return prospective == spec

    @_require_version_compare
    def _compare_not_equal(self, prospective, spec):
        return not self._compare_equal(prospective, spec)

    @_require_version_compare
    def _compare_less_than_equal(self, prospective, spec):
        return prospective <= Version(spec)

    @_require_version_compare
    def _compare_greater_than_equal(self, prospective, spec):
        return prospective >= Version(spec)

    @_require_version_compare
    def _compare_less_than(self, prospective, spec):
        # Convert our spec to a Version instance, since we'll want to work with
        # it as a version.
        spec = Version(spec)

        # Check to see if the prospective version is less than the spec
        # version. If it's not we can short circuit and just return False now
        # instead of doing extra unneeded work.
        if not prospective < spec:
            return False

        # This special case is here so that, unless the specifier itself
        # includes is a pre-release version, that we do not accept pre-release
        # versions for the version mentioned in the specifier (e.g. <3.1 should
        # not match 3.1.dev0, but should match 3.0.dev0).
        if not spec.is_prerelease and prospective.is_prerelease:
            if Version(prospective.base_version) == Version(spec.base_version):
                return False

        # If we've gotten to here, it means that prospective version is both
        # less than the spec version *and* it's not a pre-release of the same
        # version in the spec.
        return True

    @_require_version_compare
    def _compare_greater_than(self, prospective, spec):
        # Convert our spec to a Version instance, since we'll want to work with
        # it as a version.
        spec = Version(spec)

        # Check to see if the prospective version is greater than the spec
        # version. If it's not we can short circuit and just return False now
        # instead of doing extra unneeded work.
        if not prospective > spec:
            return False

        # This special case is here so that, unless the specifier itself
        # includes is a post-release version, that we do not accept
        # post-release versions for the version mentioned in the specifier
        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
        if not spec.is_postrelease and prospective.is_postrelease:
            if Version(prospective.base_version) == Version(spec.base_version):
                return False

        # Ensure that we do not allow a local version of the version mentioned
        # in the specifier, which is techincally greater than, to match.
        if prospective.local is not None:
            if Version(prospective.base_version) == Version(spec.base_version):
                return False

        # If we've gotten to here, it means that prospective version is both
        # greater than the spec version *and* it's not a pre-release of the
        # same version in the spec.
        return True

    def _compare_arbitrary(self, prospective, spec):
        return str(prospective).lower() == str(spec).lower()

    @property
    def prereleases(self):
        # If there is an explicit prereleases set for this, then we'll just
        # blindly use that.
        if self._prereleases is not None:
            return self._prereleases

        # Look at all of our specifiers and determine if they are inclusive
        # operators, and if they are if they are including an explicit
        # prerelease.
        operator, version = self._spec
        if operator in ["==", ">=", "<=", "~=", "==="]:
            # The == specifier can include a trailing .*, if it does we
            # want to remove before parsing.
            if operator == "==" and version.endswith(".*"):
                version = version[:-2]

            # Parse the version, and if it is a pre-release than this
            # specifier allows pre-releases.
            if parse(version).is_prerelease:
                return True

        return False

    @prereleases.setter
    def prereleases(self, value):
        self._prereleases = value


_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")


def _version_split(version):
    result = []
    for item in version.split("."):
        match = _prefix_regex.search(item)
        if match:
            result.extend(match.groups())
        else:
            result.append(item)
    return result


def _pad_version(left, right):
    left_split, right_split = [], []

    # Get the release segment of our versions
    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))

    # Get the rest of our versions
    left_split.append(left[len(left_split[0]):])
    right_split.append(right[len(right_split[0]):])

    # Insert our padding
    left_split.insert(
        1,
        ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
    )
    right_split.insert(
        1,
        ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
    )

    return (
        list(itertools.chain(*left_split)),
        list(itertools.chain(*right_split)),
    )


class SpecifierSet(BaseSpecifier):

    def __init__(self, specifiers="", prereleases=None):
        # Split on , to break each indidivual specifier into it's own item, and
        # strip each item to remove leading/trailing whitespace.
        specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]

        # Parsed each individual specifier, attempting first to make it a
        # Specifier and falling back to a LegacySpecifier.
        parsed = set()
        for specifier in specifiers:
            try:
                parsed.add(Specifier(specifier))
            except InvalidSpecifier:
                parsed.add(LegacySpecifier(specifier))

        # Turn our parsed specifiers into a frozen set and save them for later.
        self._specs = frozenset(parsed)

        # Store our prereleases value so we can use it later to determine if
        # we accept prereleases or not.
        self._prereleases = prereleases

    def __repr__(self):
        pre = (
            ", prereleases={0!r}".format(self.prereleases)
            if self._prereleases is not None
            else ""
        )

        return "".format(str(self), pre)

    def __str__(self):
        return ",".join(sorted(str(s) for s in self._specs))

    def __hash__(self):
        return hash(self._specs)

    def __and__(self, other):
        if isinstance(other, string_types):
            other = SpecifierSet(other)
        elif not isinstance(other, SpecifierSet):
            return NotImplemented

        specifier = SpecifierSet()
        specifier._specs = frozenset(self._specs | other._specs)

        if self._prereleases is None and other._prereleases is not None:
            specifier._prereleases = other._prereleases
        elif self._prereleases is not None and other._prereleases is None:
            specifier._prereleases = self._prereleases
        elif self._prereleases == other._prereleases:
            specifier._prereleases = self._prereleases
        else:
            raise ValueError(
                "Cannot combine SpecifierSets with True and False prerelease "
                "overrides."
            )

        return specifier

    def __eq__(self, other):
        if isinstance(other, string_types):
            other = SpecifierSet(other)
        elif isinstance(other, _IndividualSpecifier):
            other = SpecifierSet(str(other))
        elif not isinstance(other, SpecifierSet):
            return NotImplemented

        return self._specs == other._specs

    def __ne__(self, other):
        if isinstance(other, string_types):
            other = SpecifierSet(other)
        elif isinstance(other, _IndividualSpecifier):
            other = SpecifierSet(str(other))
        elif not isinstance(other, SpecifierSet):
            return NotImplemented

        return self._specs != other._specs

    def __len__(self):
        return len(self._specs)

    def __iter__(self):
        return iter(self._specs)

    @property
    def prereleases(self):
        # If we have been given an explicit prerelease modifier, then we'll
        # pass that through here.
        if self._prereleases is not None:
            return self._prereleases

        # If we don't have any specifiers, and we don't have a forced value,
        # then we'll just return None since we don't know if this should have
        # pre-releases or not.
        if not self._specs:
            return None

        # Otherwise we'll see if any of the given specifiers accept
        # prereleases, if any of them do we'll return True, otherwise False.
        return any(s.prereleases for s in self._specs)

    @prereleases.setter
    def prereleases(self, value):
        self._prereleases = value

    def __contains__(self, item):
        return self.contains(item)

    def contains(self, item, prereleases=None):
        # Ensure that our item is a Version or LegacyVersion instance.
        if not isinstance(item, (LegacyVersion, Version)):
            item = parse(item)

        # Determine if we're forcing a prerelease or not, if we're not forcing
        # one for this particular filter call, then we'll use whatever the
        # SpecifierSet thinks for whether or not we should support prereleases.
        if prereleases is None:
            prereleases = self.prereleases

        # We can determine if we're going to allow pre-releases by looking to
        # see if any of the underlying items supports them. If none of them do
        # and this item is a pre-release then we do not allow it and we can
        # short circuit that here.
        # Note: This means that 1.0.dev1 would not be contained in something
        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
        if not prereleases and item.is_prerelease:
            return False

        # We simply dispatch to the underlying specs here to make sure that the
        # given version is contained within all of them.
        # Note: This use of all() here means that an empty set of specifiers
        #       will always return True, this is an explicit design decision.
        return all(
            s.contains(item, prereleases=prereleases)
            for s in self._specs
        )

    def filter(self, iterable, prereleases=None):
        # Determine if we're forcing a prerelease or not, if we're not forcing
        # one for this particular filter call, then we'll use whatever the
        # SpecifierSet thinks for whether or not we should support prereleases.
        if prereleases is None:
            prereleases = self.prereleases

        # If we have any specifiers, then we want to wrap our iterable in the
        # filter method for each one, this will act as a logical AND amongst
        # each specifier.
        if self._specs:
            for spec in self._specs:
                iterable = spec.filter(iterable, prereleases=bool(prereleases))
            return iterable
        # If we do not have any specifiers, then we need to have a rough filter
        # which will filter out any pre-releases, unless there are no final
        # releases, and which will filter out LegacyVersion in general.
        else:
            filtered = []
            found_prereleases = []

            for item in iterable:
                # Ensure that we some kind of Version class for this item.
                if not isinstance(item, (LegacyVersion, Version)):
                    parsed_version = parse(item)
                else:
                    parsed_version = item

                # Filter out any item which is parsed as a LegacyVersion
                if isinstance(parsed_version, LegacyVersion):
                    continue

                # Store any item which is a pre-release for later unless we've
                # already found a final version or we are accepting prereleases
                if parsed_version.is_prerelease and not prereleases:
                    if not filtered:
                        found_prereleases.append(item)
                else:
                    filtered.append(item)

            # If we've found no items except for pre-releases, then we'll go
            # ahead and use the pre-releases
            if not filtered and found_prereleases and prereleases is None:
                return found_prereleases

            return filtered
PKZơ$-$-8site-packages/pkg_resources/_vendor/packaging/version.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import collections
import itertools
import re

from ._structures import Infinity


__all__ = [
    "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]


_Version = collections.namedtuple(
    "_Version",
    ["epoch", "release", "dev", "pre", "post", "local"],
)


def parse(version):
    """
    Parse the given version string and return either a :class:`Version` object
    or a :class:`LegacyVersion` object depending on if the given version is
    a valid PEP 440 version or a legacy version.
    """
    try:
        return Version(version)
    except InvalidVersion:
        return LegacyVersion(version)


class InvalidVersion(ValueError):
    """
    An invalid version was found, users should refer to PEP 440.
    """


class _BaseVersion(object):

    def __hash__(self):
        return hash(self._key)

    def __lt__(self, other):
        return self._compare(other, lambda s, o: s < o)

    def __le__(self, other):
        return self._compare(other, lambda s, o: s <= o)

    def __eq__(self, other):
        return self._compare(other, lambda s, o: s == o)

    def __ge__(self, other):
        return self._compare(other, lambda s, o: s >= o)

    def __gt__(self, other):
        return self._compare(other, lambda s, o: s > o)

    def __ne__(self, other):
        return self._compare(other, lambda s, o: s != o)

    def _compare(self, other, method):
        if not isinstance(other, _BaseVersion):
            return NotImplemented

        return method(self._key, other._key)


class LegacyVersion(_BaseVersion):

    def __init__(self, version):
        self._version = str(version)
        self._key = _legacy_cmpkey(self._version)

    def __str__(self):
        return self._version

    def __repr__(self):
        return "".format(repr(str(self)))

    @property
    def public(self):
        return self._version

    @property
    def base_version(self):
        return self._version

    @property
    def local(self):
        return None

    @property
    def is_prerelease(self):
        return False

    @property
    def is_postrelease(self):
        return False


_legacy_version_component_re = re.compile(
    r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)

_legacy_version_replacement_map = {
    "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}


def _parse_version_parts(s):
    for part in _legacy_version_component_re.split(s):
        part = _legacy_version_replacement_map.get(part, part)

        if not part or part == ".":
            continue

        if part[:1] in "0123456789":
            # pad for numeric comparison
            yield part.zfill(8)
        else:
            yield "*" + part

    # ensure that alpha/beta/candidate are before final
    yield "*final"


def _legacy_cmpkey(version):
    # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
    # greater than or equal to 0. This will effectively put the LegacyVersion,
    # which uses the defacto standard originally implemented by setuptools,
    # as before all PEP 440 versions.
    epoch = -1

    # This scheme is taken from pkg_resources.parse_version setuptools prior to
    # it's adoption of the packaging library.
    parts = []
    for part in _parse_version_parts(version.lower()):
        if part.startswith("*"):
            # remove "-" before a prerelease tag
            if part < "*final":
                while parts and parts[-1] == "*final-":
                    parts.pop()

            # remove trailing zeros from each series of numeric parts
            while parts and parts[-1] == "00000000":
                parts.pop()

        parts.append(part)
    parts = tuple(parts)

    return epoch, parts

# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
    v?
    (?:
        (?:(?P[0-9]+)!)?                           # epoch
        (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
        (?P
                                          # pre-release
            [-_\.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_\.]?
                (?Ppost|rev|r)
                [-_\.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_\.]?
            (?Pdev)
            [-_\.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
"""


class Version(_BaseVersion):

    _regex = re.compile(
        r"^\s*" + VERSION_PATTERN + r"\s*$",
        re.VERBOSE | re.IGNORECASE,
    )

    def __init__(self, version):
        # Validate the version and parse it into pieces
        match = self._regex.search(version)
        if not match:
            raise InvalidVersion("Invalid version: '{0}'".format(version))

        # Store the parsed out pieces of the version
        self._version = _Version(
            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
            release=tuple(int(i) for i in match.group("release").split(".")),
            pre=_parse_letter_version(
                match.group("pre_l"),
                match.group("pre_n"),
            ),
            post=_parse_letter_version(
                match.group("post_l"),
                match.group("post_n1") or match.group("post_n2"),
            ),
            dev=_parse_letter_version(
                match.group("dev_l"),
                match.group("dev_n"),
            ),
            local=_parse_local_version(match.group("local")),
        )

        # Generate a key which will be used for sorting
        self._key = _cmpkey(
            self._version.epoch,
            self._version.release,
            self._version.pre,
            self._version.post,
            self._version.dev,
            self._version.local,
        )

    def __repr__(self):
        return "".format(repr(str(self)))

    def __str__(self):
        parts = []

        # Epoch
        if self._version.epoch != 0:
            parts.append("{0}!".format(self._version.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self._version.release))

        # Pre-release
        if self._version.pre is not None:
            parts.append("".join(str(x) for x in self._version.pre))

        # Post-release
        if self._version.post is not None:
            parts.append(".post{0}".format(self._version.post[1]))

        # Development release
        if self._version.dev is not None:
            parts.append(".dev{0}".format(self._version.dev[1]))

        # Local version segment
        if self._version.local is not None:
            parts.append(
                "+{0}".format(".".join(str(x) for x in self._version.local))
            )

        return "".join(parts)

    @property
    def public(self):
        return str(self).split("+", 1)[0]

    @property
    def base_version(self):
        parts = []

        # Epoch
        if self._version.epoch != 0:
            parts.append("{0}!".format(self._version.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self._version.release))

        return "".join(parts)

    @property
    def local(self):
        version_string = str(self)
        if "+" in version_string:
            return version_string.split("+", 1)[1]

    @property
    def is_prerelease(self):
        return bool(self._version.dev or self._version.pre)

    @property
    def is_postrelease(self):
        return bool(self._version.post)


def _parse_letter_version(letter, number):
    if letter:
        # We consider there to be an implicit 0 in a pre-release if there is
        # not a numeral associated with it.
        if number is None:
            number = 0

        # We normalize any letters to their lower case form
        letter = letter.lower()

        # We consider some words to be alternate spellings of other words and
        # in those cases we want to normalize the spellings to our preferred
        # spelling.
        if letter == "alpha":
            letter = "a"
        elif letter == "beta":
            letter = "b"
        elif letter in ["c", "pre", "preview"]:
            letter = "rc"
        elif letter in ["rev", "r"]:
            letter = "post"

        return letter, int(number)
    if not letter and number:
        # We assume if we are given a number, but we are not given a letter
        # then this is using the implicit post release syntax (e.g. 1.0-1)
        letter = "post"

        return letter, int(number)


_local_version_seperators = re.compile(r"[\._-]")


def _parse_local_version(local):
    """
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    """
    if local is not None:
        return tuple(
            part.lower() if not part.isdigit() else int(part)
            for part in _local_version_seperators.split(local)
        )


def _cmpkey(epoch, release, pre, post, dev, local):
    # When we compare a release version, we want to compare it with all of the
    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    # leading zeros until we come to something non zero, then take the rest
    # re-reverse it back into the correct order and make it a tuple and use
    # that for our sorting key.
    release = tuple(
        reversed(list(
            itertools.dropwhile(
                lambda x: x == 0,
                reversed(release),
            )
        ))
    )

    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    # We'll do this by abusing the pre segment, but we _only_ want to do this
    # if there is not a pre or a post segment. If we have one of those then
    # the normal sorting rules will handle this case correctly.
    if pre is None and post is None and dev is not None:
        pre = -Infinity
    # Versions without a pre-release (except as noted above) should sort after
    # those with one.
    elif pre is None:
        pre = Infinity

    # Versions without a post segment should sort before those with one.
    if post is None:
        post = -Infinity

    # Versions without a development segment should sort after those with one.
    if dev is None:
        dev = Infinity

    if local is None:
        # Versions without a local segment should sort before those with one.
        local = -Infinity
    else:
        # Versions with a local segment need that segment parsed to implement
        # the sorting rules in PEP440.
        # - Alpha numeric segments sort before numeric segments
        # - Alpha numeric segments sort lexicographically
        # - Numeric segments sort numerically
        # - Shorter versions sort before longer versions when the prefixes
        #   match exactly
        local = tuple(
            (i, "") if isinstance(i, int) else (-Infinity, i)
            for i in local
        )

    return epoch, release, pre, post, dev, local
PKZ<)X:site-packages/pkg_resources/_vendor/packaging/__about__.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

__all__ = [
    "__title__", "__summary__", "__uri__", "__version__", "__author__",
    "__email__", "__license__", "__copyright__",
]

__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"

__version__ = "16.8"

__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"

__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2014-2016 %s" % __author__
PKZ=site-packages/pkg_resources/_vendor/packaging/requirements.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import string
import re

from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
from pkg_resources.extern.pyparsing import Literal as L  # noqa
from pkg_resources.extern.six.moves.urllib import parse as urlparse

from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet


class InvalidRequirement(ValueError):
    """
    An invalid requirement was found, users should refer to PEP 508.
    """


ALPHANUM = Word(string.ascii_letters + string.digits)

LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()

PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))

NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER

URI = Regex(r'[^ ]+')("url")
URL = (AT + URI)

EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")

VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)

VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
                       joinString=",", adjacent=False)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')

VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])

MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
    lambda s, l, t: Marker(s[t._original_start:t._original_end])
)
MARKER_SEPERATOR = SEMICOLON
MARKER = MARKER_SEPERATOR + MARKER_EXPR

VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)

NAMED_REQUIREMENT = \
    NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)

REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd


class Requirement(object):
    """Parse a requirement.

    Parse a given requirement string into its parts, such as name, specifier,
    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
    string.
    """

    # TODO: Can we test whether something is contained within a requirement?
    #       If so how do we do that? Do we need to test against the _name_ of
    #       the thing as well as the version? What about the markers?
    # TODO: Can we normalize the name and extra name?

    def __init__(self, requirement_string):
        try:
            req = REQUIREMENT.parseString(requirement_string)
        except ParseException as e:
            raise InvalidRequirement(
                "Invalid requirement, parse error at \"{0!r}\"".format(
                    requirement_string[e.loc:e.loc + 8]))

        self.name = req.name
        if req.url:
            parsed_url = urlparse.urlparse(req.url)
            if not (parsed_url.scheme and parsed_url.netloc) or (
                    not parsed_url.scheme and not parsed_url.netloc):
                raise InvalidRequirement("Invalid URL given")
            self.url = req.url
        else:
            self.url = None
        self.extras = set(req.extras.asList() if req.extras else [])
        self.specifier = SpecifierSet(req.specifier)
        self.marker = req.marker if req.marker else None

    def __str__(self):
        parts = [self.name]

        if self.extras:
            parts.append("[{0}]".format(",".join(sorted(self.extras))))

        if self.specifier:
            parts.append(str(self.specifier))

        if self.url:
            parts.append("@ {0}".format(self.url))

        if self.marker:
            parts.append("; {0}".format(self.marker))

        return "".join(parts)

    def __repr__(self):
        return "".format(str(self))
PKZR#/#/9site-packages/pkg_resources/_vendor/packaging/markers.pycnu[
fc@`suddlmZmZmZddlZddlZddlZddlZddlm	Z	m
Z
mZmZddlm
Z
mZmZmZddlmZddlmZddlmZmZd	d
ddd
gZd	efdYZd
efdYZdefdYZdefdYZdefdYZdefdYZ defdYZ!ededBedBedBedBedBedBed Bed!Bed"Bed#Bed$Bed%Bed&Bed'Bed(Bed)Bed*BZ"id#d$6d"d%6dd&6dd'6dd(6dd)6Z#e"j$d+ed,ed-Bed.Bed/Bed0Bed1Bed2Bed3BZ%e%ed4Bed5BZ&e&j$d6ed7ed8BZ'e'j$d9ed:ed;BZ(e"e'BZ)ee)e&e)Z*e*j$d<ed=j+Z,ed>j+Z-eZ.e*ee,e.e-BZ/e.e/e
e(e.>ee.eZ0d?Z1e2d@Z3idAd56dBd46ej4d36ej5d/6ej6d-6ej7d06ej8d.6ej9d26Z:dCZ;eZ<dDZ=dEZ>dFZ?dGZ@defdHYZAdS(Ii(tabsolute_importtdivisiontprint_functionN(tParseExceptiontParseResultststringStartt	stringEnd(t
ZeroOrMoretGrouptForwardtQuotedString(tLiterali(tstring_types(t	SpecifiertInvalidSpecifiert
InvalidMarkertUndefinedComparisontUndefinedEnvironmentNametMarkertdefault_environmentcB`seZdZRS(sE
    An invalid marker was found, users should refer to PEP 508.
    (t__name__t
__module__t__doc__(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscB`seZdZRS(sP
    An invalid operation was attempted on a value that doesn't support it.
    (RRR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscB`seZdZRS(s\
    A name was attempted to be used that does not exist inside of the
    environment.
    (RRR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR%stNodecB`s,eZdZdZdZdZRS(cC`s
||_dS(N(tvalue(tselfR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt__init__.scC`s
t|jS(N(tstrR(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt__str__1scC`sdj|jjt|S(Ns<{0}({1!r})>(tformatt	__class__RR(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt__repr__4scC`s
tdS(N(tNotImplementedError(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt	serialize7s(RRRRRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR,s			tVariablecB`seZdZRS(cC`s
t|S(N(R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR!=s(RRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR";stValuecB`seZdZRS(cC`s
dj|S(Ns"{0}"(R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR!Cs(RRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR#AstOpcB`seZdZRS(cC`s
t|S(N(R(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR!Is(RRR!(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR$Gstimplementation_versiontplatform_python_implementationtimplementation_nametpython_full_versiontplatform_releasetplatform_versiontplatform_machinetplatform_systemtpython_versiontsys_platformtos_namesos.namessys.platformsplatform.versionsplatform.machinesplatform.python_implementationtpython_implementationtextracC`sttj|d|dS(Ni(R"tALIASEStget(tstltt((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pytits===s==s>=s<=s!=s~=t>tst RARB(	RCtlistR@RtAssertionErrortlenRHtjoinR!(tmarkerRGtinnerRK((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRHs!
&cC`s
||kS(N((tlhstrhs((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR7R8cC`s
||kS(N((RSRT((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyR7R8cC`sy%tdj|j|g}Wntk
r8nX|j|Stj|j}|dkrtdj	|||n|||S(NR8s#Undefined {0!r} on {1!r} and {2!r}.(
R
RPR!Rtcontainst
_operatorsR3tNoneRR(RStopRTtspectoper((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt_eval_ops%

cC`s:|j|t}|tkr6tdj|n|S(Ns/{0!r} does not exist in evaluation environment.(R3t
_undefinedRR(tenvironmenttnameR((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyt_get_envs
c	C`s,gg}x|D]}t|tttfs4tt|tr`|djt||qt|tr|\}}}t|trt||j	}|j	}n|j	}t||j	}|djt
|||q|dkst|dkr|jgqqWtd|DS(NiR>R?cs`s|]}t|VqdS(N(tall(RJtitem((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pys	s(R>R?(RCRMR@RRNtappendt_evaluate_markersR"R_RR[tany(	tmarkersR]tgroupsRQRSRXRTt	lhs_valuet	rhs_value((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRcs"	
	 cC`sFdj|}|j}|dkrB||dt|j7}n|S(Ns{0.major}.{0.minor}.{0.micro}tfinali(RtreleaselevelRtserial(tinfotversiontkind((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pytformat_full_versions
	cC`sttdr0ttjj}tjj}nd}d}i|d6|d6tjd6tjd6tj	d6tj
d	6tjd
6tjd6tjd6tjd
 d6tjd6S(Ntimplementationt0R8R'R%R/R+R)R,R*R(R&iR-R.(
thasattrtsysRoRpRmR^tostplatformtmachinetreleasetsystemR-R0(tiverR'((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRs"






cB`s/eZdZdZdZddZRS(cC`seyttj||_WnBtk
r`}dj|||j|jd!}t|nXdS(Ns+Invalid marker: {0!r}, parse error at {1!r}i(RDtMARKERtparseStringt_markersRRtlocR(RRQteterr_str((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscC`s
t|jS(N(RHR|(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscC`sdjt|S(Ns(RR(R((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRscC`s5t}|dk	r%|j|nt|j|S(s$Evaluate a marker.

        Return the boolean from evaluating the given marker against the
        environment. environment is an optional argument to override all or
        part of the determined environment.

        The environment is determined from the current Python process.
        N(RRWtupdateRcR|(RR]tcurrent_environment((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pytevaluate s		N(RRRRRRWR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyRs			(Bt
__future__RRRtoperatorRtRuRstpkg_resources.extern.pyparsingRRRRRRR	R
RtLt_compatRt
specifiersR
Rt__all__t
ValueErrorRRRtobjectRR"R#R$tVARIABLER2tsetParseActiontVERSION_CMPt	MARKER_OPtMARKER_VALUEtBOOLOPt
MARKER_VARtMARKER_ITEMtsuppresstLPARENtRPARENtMARKER_EXPRtMARKER_ATOMRzRDtTrueRHtlttleteqtnetgetgtRVR[R\R_RcRoRR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/markers.pyts|""	

	E

		







						PKZ1;site-packages/pkg_resources/_vendor/packaging/__about__.pyonu[
fc@`srddlmZmZmZdddddddd	gZd
ZdZdZd
ZdZ	dZ
dZde	ZdS(i(tabsolute_importtdivisiontprint_functiont	__title__t__summary__t__uri__t__version__t
__author__t	__email__t__license__t
__copyright__t	packagings"Core utilities for Python packagess!https://github.com/pypa/packagings16.8s)Donald Stufft and individual contributorssdonald@stufft.ios"BSD or Apache License, Version 2.0sCopyright 2014-2016 %sN(
t
__future__RRRt__all__RRRRRRR	R
(((sM/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/__about__.pytsPKZ'6site-packages/pkg_resources/_vendor/packaging/utils.pynu[# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function

import re


_canonicalize_regex = re.compile(r"[-_.]+")


def canonicalize_name(name):
    # This is taken from PEP 503.
    return _canonicalize_regex.sub("-", name).lower()
PKZ@Fk9site-packages/pkg_resources/_vendor/packaging/_compat.pycnu[
fc@`svddlmZmZmZddlZejddkZejddkZer`efZ	n	e
fZ	dZdS(i(tabsolute_importtdivisiontprint_functionNiic`s5dffdY}tj|ddiS(s/
    Create a base class with a metaclass.
    t	metaclassc`seZfdZRS(c`s||S(N((tclstnamet
this_basestd(tbasestmeta(sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyt__new__s(t__name__t
__module__R
((RR	(sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyRsttemporary_class((ttypeR
(R	RR((RR	sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pytwith_metaclasss(t
__future__RRRtsystversion_infotPY2tPY3tstrtstring_typest
basestringR(((sK/usr/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/_compat.pyts	PKZ%,,*site-packages/pkg_resources/py31compat.pycnu[
fc@sddlZddlZddlZedZejd	kpzd
ejkoYdknpzdejkoxd
knZerenejZdS(iNcCsJytj|Wn2tk
rE}|s<|jtjkrFqFnXdS(N(tostmakedirstOSErrorterrnotEEXIST(tpathtexist_oktexc((s</usr/lib/python2.7/site-packages/pkg_resources/py31compat.pyt_makedirs_31s
iiiiii(iii(ii(iii(ii(iii(RRtsystFalseRtversion_infotneeds_makedirsR(((s</usr/lib/python2.7/site-packages/pkg_resources/py31compat.pytsPKZgg}}/site-packages/pkg_resources/extern/__init__.pycnu[
fc@s<ddlZdddYZd	ZeeejdS(
iNtVendorImportercBsJeZdZdddZedZddZdZdZ	RS(s
    A PEP 302 meta path importer for finding optionally-vendored
    or otherwise naturally-installed packages from root_name.
    cCs7||_t||_|p-|jdd|_dS(Ntexternt_vendor(t	root_nametsettvendored_namestreplacet
vendor_pkg(tselfRRR((sA/usr/lib/python2.7/site-packages/pkg_resources/extern/__init__.pyt__init__
s	ccs|jdVdVdS(sL
        Search first the vendor package then as a natural package.
        t.tN(R(R((sA/usr/lib/python2.7/site-packages/pkg_resources/extern/__init__.pytsearch_pathscCsL|j|jd\}}}|r)dStt|j|jsHdS|S(s
        Return self when fullname starts with root_name and the
        target module is one vendored through this importer.
        R
N(t	partitionRtanytmapt
startswithR(Rtfullnametpathtroottbasettarget((sA/usr/lib/python2.7/site-packages/pkg_resources/extern/__init__.pytfind_modulescCs|j|jd\}}}x|jD]l}yR||}t|tj|}|tj|sDPKZgg}}/site-packages/pkg_resources/extern/__init__.pyonu[
fc@s<ddlZdddYZd	ZeeejdS(
iNtVendorImportercBsJeZdZdddZedZddZdZdZ	RS(s
    A PEP 302 meta path importer for finding optionally-vendored
    or otherwise naturally-installed packages from root_name.
    cCs7||_t||_|p-|jdd|_dS(Ntexternt_vendor(t	root_nametsettvendored_namestreplacet
vendor_pkg(tselfRRR((sA/usr/lib/python2.7/site-packages/pkg_resources/extern/__init__.pyt__init__
s	ccs|jdVdVdS(sL
        Search first the vendor package then as a natural package.
        t.tN(R(R((sA/usr/lib/python2.7/site-packages/pkg_resources/extern/__init__.pytsearch_pathscCsL|j|jd\}}}|r)dStt|j|jsHdS|S(s
        Return self when fullname starts with root_name and the
        target module is one vendored through this importer.
        R
N(t	partitionRtanytmapt
startswithR(Rtfullnametpathtroottbasettarget((sA/usr/lib/python2.7/site-packages/pkg_resources/extern/__init__.pytfind_modulescCs|j|jd\}}}x|jD]l}yR||}t|tj|}|tj|sDPKZuy		.site-packages/pkg_resources/extern/__init__.pynu[import sys


class VendorImporter:
    """
    A PEP 302 meta path importer for finding optionally-vendored
    or otherwise naturally-installed packages from root_name.
    """

    def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
        self.root_name = root_name
        self.vendored_names = set(vendored_names)
        self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')

    @property
    def search_path(self):
        """
        Search first the vendor package then as a natural package.
        """
        yield self.vendor_pkg + '.'
        yield ''

    def find_module(self, fullname, path=None):
        """
        Return self when fullname starts with root_name and the
        target module is one vendored through this importer.
        """
        root, base, target = fullname.partition(self.root_name + '.')
        if root:
            return
        if not any(map(target.startswith, self.vendored_names)):
            return
        return self

    def load_module(self, fullname):
        """
        Iterate over the search path to locate and load fullname.
        """
        root, base, target = fullname.partition(self.root_name + '.')
        for prefix in self.search_path:
            try:
                extant = prefix + target
                __import__(extant)
                mod = sys.modules[extant]
                sys.modules[fullname] = mod
                # mysterious hack:
                # Remove the reference to the extant package/module
                # on later Python versions to cause relative imports
                # in the vendor package to resolve the same modules
                # as those going through this importer.
                if sys.version_info > (3, 3):
                    del sys.modules[extant]
                return mod
            except ImportError:
                pass
        else:
            raise ImportError(
                "The '{target}' package is required; "
                "normally this is bundled with this package so if you get "
                "this warning, consult the packager of your "
                "distribution.".format(**locals())
            )

    def install(self):
        """
        Install this importer into sys.meta_path if not already present.
        """
        if self not in sys.meta_path:
            sys.meta_path.append(self)


names = 'packaging', 'pyparsing', 'six', 'appdirs'
VendorImporter(__name__, names).install()
PKZ!Ί;;site-packages/easy_install.pycnu[
fc@s0dZedkr,ddlmZendS(sRun the EasyInstall commandt__main__i(tmainN(t__doc__t__name__tsetuptools.command.easy_installR(((s0/usr/lib/python2.7/site-packages/easy_install.pytsPKZa"site-packages/pip/status_codes.pyonu[
abc@@s8ddlmZdZdZdZdZdZdZdS(i(tabsolute_importiiiiiN(t
__future__RtSUCCESStERRORt
UNKNOWN_ERRORtVIRTUALENV_NOT_FOUNDtPREVIOUS_BUILD_DIR_ERRORtNO_MATCHES_FOUND(((s4/usr/lib/python2.7/site-packages/pip/status_codes.pytsPKZRYcYcsite-packages/pip/wheel.pycnu[
abc	@@s
dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z	ddlZddlZddl
Z
ddlZddlZddlZddlmZddlmZddlmZddlZddlmZddlmZmZdd	lmZmZm Z dd
l!m"Z"m#Z#ddlm$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*dd
l+m,Z,ddl-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6ddl7m8Z8dZ9ddfZ:ej;e<Z=de>fdYZ?dZ@dZAddd>dZBdZCdZDejEdejFZGd ZHd!ZIeJeKeKeLeKeJeKeKd"ZMd#ZNeNd$ZOd%ZPd&ZQd'e>fd(YZRd)e>fd*YZSdS(+sH
Support for installing and building the "wheel" binary package format.
i(tabsolute_importN(turlsafe_b64encode(tParser(tStringIO(t
expanduser(tpath_to_urlt
unpack_url(tInstallationErrortInvalidWheelFilenametUnsupportedWheel(tdistutils_schemetPIP_DELETE_MARKER_FILENAME(t
pep425tags(tcall_subprocesst
ensure_dirtcaptured_stdouttrmtreetread_chunks(topen_spinner(t
indent_log(tSETUPTOOLS_SHIM(tScriptMaker(t
pkg_resources(tcanonicalize_name(tconfigparsers.whlit
WheelCachecB@s eZdZdZdZRS(s&A cache of wheels for future installs.cC@s(|rt|nd|_||_dS(sCreate a wheel cache.

        :param cache_dir: The root of the cache.
        :param format_control: A pip.index.FormatControl object to limit
            binaries being read from the cache.
        N(RtNonet
_cache_dirt_format_control(tselft	cache_dirtformat_control((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt__init__8scC@st|j||j|S(N(tcached_wheelRR(Rtlinktpackage_name((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR!Bs(t__name__t
__module__t__doc__R R!(((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR5s	
cC@s|jg}|jdk	rO|jdk	rO|jdj|j|jgndj|}tj|jj	}|d |dd!|dd!|dg}t
jj|d|S(s
    Return a directory to store cached wheels in for link.

    Because there are M wheels for any one sdist, we provide a directory
    to cache them in, and then consult that directory when looking up
    cache hits.

    We only insert things into the cache if they have plausible version
    numbers, so that we don't contaminate the cache with things that were not
    unique. E.g. ./package might have dozens of installs done for it and build
    a version of 0.0...and if we built and cached a wheel, we'd end up using
    the same wheel even if the source has been edited.

    :param cache_dir: The cache_dir being used by pip.
    :param link: The link of the sdist for which this will cache wheels.
    t=t#iiitwheelsN(turl_without_fragmentt	hash_nameRthashtappendtjointhashlibtsha224tencodet	hexdigesttostpath(RR"t	key_partstkey_urlthashedtparts((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_cache_for_linkGs%(c
C@ss|s
|S|s|S|jr!|S|js.|S|s8|St|}tjj||}d|kri|St||}ytj|}Wn5t	k
r}|j
t
jt
jfkr|SnXg}	x`|D]X}
yt
|
}Wntk
rqnX|jsqn|	j|j|
fqW|	s6|S|	jtjj||	dd}tjjt|S(Ntbinaryii(tis_wheeltis_artifactRtpiptindextfmt_ctl_formatsR9R3tlistdirtOSErrorterrnotENOENTtENOTDIRtWheelRt	supportedR-tsupport_index_mintsortR4R.tLinkR(
RR"RR#tcanonical_nametformatstroottwheel_namestet
candidatest
wheel_nametwheelR4((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR!psF		


tsha256icC@stj|}d}t|dA}x7t|d|D]#}|t|7}|j|q:WWdQXdt|jjdj	d}||fS(s6Return (hash, length) for path using hashlib.new(algo)itrbtsizeNssha256=tlatin1R'(
R/tnewtopenRtlentupdateRtdigesttdecodetrstrip(R4talgot	blocksizethtlengthtftblockRZ((s-/usr/lib/python2.7/site-packages/pip/wheel.pytrehashscC@sItjddkr"i}d}nidd6}d}t||||S(Niitbttnewline(tsystversion_infoRW(tnametmodetnltbin((s-/usr/lib/python2.7/site-packages/pip/wheel.pytopen_for_csvs	
cC@stjj|rt|dd}|j}|jdsCtStjj	tj
}d|tjj	d}|j}WdQXt|d!}|j
||j
|WdQXtSdS(sLReplace #!python with #!/path/to/python
    Return True if file was changed.RSs#!pythons#!tasciiNtwb(R3R4tisfileRWtreadlinet
startswithtFalseRgt
executableR1tgetfilesystemencodingtlineseptreadtwritetTrue(R4tscriptt	firstlinetexenametrest((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt
fix_scripts
sZ^(?P(?P.+?)(-(?P\d.+?))?)
                                \.dist-info$cC@s|jdd}xtj|D]}tj|}|r"|jd|kr"ttjj||d:}x0|D](}|j	j
}|dkrztSqzWWdQXq"q"WtS(sP
    Return True if the extracted wheel in wheeldir should go into purelib.
    t-t_RitWHEELsroot-is-purelib: trueN(
treplaceR3R@tdist_info_retmatchtgroupRWR4R.tlowerR\RyRs(Ritwheeldirtname_foldedtitemRRQtline((s-/usr/lib/python2.7/site-packages/pip/wheel.pytroot_is_purelibs!
cC@stjj|siifSt|N}t}x.|D]&}|j|j|jdq;W|jdWdQXtj	}d|_
|j|i}i}|jdrt
|jd}n|jdrt
|jd}n||fS(Ns
icS@s|S(N((toption((s-/usr/lib/python2.7/site-packages/pip/wheel.pytRetconsole_scriptstgui_scripts(R3R4texistsRWRRxtstriptseekRtRawConfigParsertoptionxformtreadfpthas_sectiontdicttitems(tfilenametfptdataRtcptconsoletgui((s-/usr/lib/python2.7/site-packages/pip/wheel.pytget_entrypointss$
	

c,@s+|s3t|d|d|d|d|d|	}nt|rO|dn
|dggjtjjtjj}itg}|rt@}
tj	+tj
dtj|d	t
d
t
WdQXWdQXtj|
jndtfd
	dd	
fd}||t
sktd
tjjdd}t|\fd}xD]}d}d}xtjtjj|D]e}d}|dkrt}|}ntjj||}||}|||td|d|qWqWtd|dt
_td0_t
_
fd}|_d_jdd}|rdtjkrd|}|j j!|ntjj"dddkr:dt#j$d |f}|j j!|ndt#j$d |f}|j j!|gD]}t%j&d |rn|^qn}x|D]
}|=qWnjd!d}|rjdtjkrd"|}|j j!|nd#t#j$d |f}|j j!|gD]}t%j&d$|r+|^q+}x|D]
}|=qVWnt'dkr|j j(gj)D]}d%|^qnt'dkr|j j(gj)D]}d%|^qit
d&6ntjjdd'}tjjdd(}t*|d)} | j+d*WdQXt,j-|||j.|tjjdd+}!tjjdd,}"t/|!d-\}#t/|"d.D}$t0j1|#}%t0j2|$}&xj|%D]b}'j|'d|'d|'d<|'dkrHt3|'d\|'d<|'d/j|ndS(s6Map archive RECORD paths to installation RECORD paths.N(tadd(tsrcfiletdestfiletmodifiedtoldpathtnewpath(tchangedt	installedtlib_dirRR(s-/usr/lib/python2.7/site-packages/pip/wheel.pytrecord_installeds

c@st|xztj|D]i\}}}|t|jtjj}tjj||}	|r|jtjjddj	drqnx|D]}
tjj|||
}|r|dkr|j	drj
|
qq|r|
j	drt|
jtj
rsBtd|ddjj
|qqWx*|D]"}|r{||r{q]ntjj||}
tjj|||}t|	tj|
|tj|
}ttdrtj||j|jfntj|
tjrTtj|
}|jtjBtjBtjB}tj||nt}|ro||}n|
||q]WqWdS(	Niis.dataRes
.dist-infos!Multiple .dist-info directories: s, tutime(RR3twalkRXtlstripR4RR.tsplittendswithR-RRrRitAssertionErrortshutiltcopyfiletstatthasattrRtst_atimetst_mtimetaccesstX_OKtst_modetS_IXUSRtS_IXGRPtS_IXOTHtchmodRs(tsourcetdesttis_basetfixertfiltertdirtsubdirstfilestbasedirtdestdirtst
destsubdirRaRRtsttpermissionsR(t	data_dirstinfo_dirRtreq(s-/usr/lib/python2.7/site-packages/pip/wheel.pytclobbersJ
+
!



s!%s .dist-info directory not foundisentry_points.txtc@s|jjdr"|d }nJ|jjdrD|d }n(|jjdrf|d }n|}|kp|kS(Ns.exeis
-script.pyis.pya(RR(Rit	matchname(RR(s-/usr/lib/python2.7/site-packages/pip/wheel.pytis_entrypoint_wrapperas


tscriptsRRRec@s^|jdkr(td|fnji|jd6|jjddd6|jd6S(NsInvalid script entry point: %s for req: %s - A callable suffix is required. Cf https://packaging.python.org/en/latest/distributing.html#console-scripts for more information.tmodulet.itimport_nametfunc(tsuffixRRtscript_templateRR(tentry(tmakerR(s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_get_script_texts	
s# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
R=tENSUREPIP_OPTIONSspip = t
altinstalls
pip%s = %siispip(\d(\.\d)?)?$teasy_installseasy_install = seasy_install-%s = %sseasy_install(-\d\.\d)?$s%s = %sRt	INSTALLERs
INSTALLER.pipRospip
tRECORDs
RECORD.piptrsw+i(Re(7R
RR\R3R4RtsetRtwarningstcatch_warningstfilterwarningst
compilealltcompile_dirRytloggertdebugtgetvalueRsRRR.RR@R~RRtvariantstset_modeRRtpoptenvirontextendtmaketgetRgtversiontreRRXt
make_multipleRRWRxRtmoveR-RmtcsvtreadertwriterRctwriterowRrR(,RiRRRRRLt	pycompiletschemeRRtstrip_file_prefixRt	generatedtstdoutRtep_fileRtdatadirRRtsubdirRRt
pip_scripttspectktpip_epteasy_install_scriptteasy_install_eptkvt	installerttemp_installertinstaller_filetrecordttemp_recordt	record_int
record_outRRtrowRaR_tlt
final_path((RRRRRRRRRRRRs-/usr/lib/python2.7/site-packages/pip/wheel.pytmove_wheel_filess

	

%	;
%	
$			
	#
+

+
0 

!

*c@s"tjfd}|S(Nc?@sHt}x8||D]'}||kr|j||VqqWdS(N(RR(targstkwtseenR(tfn(s-/usr/lib/python2.7/site-packages/pip/wheel.pytuniques
	
(t	functoolstwraps(R%R&((R%s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_uniquescc@sddlm}tj||jd}x|D]y}tjj|j|d}|V|j	dr5tjj
|\}}|d }tjj||d}|Vq5q5WdS(s
    Yield all the uninstallation paths for dist based on RECORD-without-.pyc

    Yield paths to all the files in RECORD. For each .py file in RECORD, add
    the .pyc in the same directory.

    UninstallPathSet.add() takes care of the __pycache__ .pyc.
    i(tFakeFileRs.pyis.pycN(t	pip.utilsR*RRtget_metadata_linesR3R4R.tlocationRR(tdistR*RRR4tdnR%tbase((s-/usr/lib/python2.7/site-packages/pip/wheel.pytuninstallation_paths"s


cC@sygtjd|D]}|^qd}|jd}tj|}|dj}ttt	|j
d}|SWntSXdS(s
    Return the Wheel-Version of an extracted wheel, if possible.

    Otherwise, return False if we couldn't parse / extract it.
    iRs
Wheel-VersionRN(Rtfind_on_pathRtget_metadataRtparsestrRttupletmaptintRRs(t
source_dirtdR.t
wheel_dataR((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt
wheel_version8s)cC@s|std|n|dtdkrXtd|djtt|fn1|tkrtjddjtt|ndS(s
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    s(%s is in an unsupported or invalid wheelisB%s's Wheel-Version (%s) is not compatible with this version of pipRs*Installing from a newer Wheel-Version (%s)N(R	tVERSION_COMPATIBLER.R6tstrRtwarning(RRi((s-/usr/lib/python2.7/site-packages/pip/wheel.pytcheck_compatibilityKs
%REcB@sDeZdZejdejZdZddZ	ddZ
RS(sA wheel files^(?P(?P.+?)-(?P\d.*?))
        ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?)
        \.whl|\.dist-info)$c@sjj|}|s+td|n|_|jdjdd_|jdjdd_|jdjd_	|jdjd_
|jd	jd_tfd
j	D_
dS(sX
        :raises InvalidWheelFilename: when the filename is invalid for a wheel
        s!%s is not a valid wheel filename.RiRRtvertpyverRtabitplatc3@s>|]4}jD]$}jD]}|||fVqqqdS(N(tabistplats(t.0txtytz(R(s-/usr/lib/python2.7/site-packages/pip/wheel.pys	sN(t
wheel_file_reRRRRRRiRRt
pyversionsRDRERt	file_tags(RRt
wheel_info((Rs-/usr/lib/python2.7/site-packages/pip/wheel.pyR ts	cC@s]|dkrtj}ng|jD]!}||kr"|j|^q"}|rYt|SdS(s"
        Return the lowest index that one of the wheel's file_tag combinations
        achieves in the supported_tags list e.g. if there are 8 supported tags,
        and one of the file tags is first in the list, then return 0.  Returns
        None is the wheel is not supported.
        N(RRtsupported_tagsRLR>tmin(Rttagstctindexes((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRGs1cC@s4|dkrtj}ntt|j|jS(s'Is this wheel supported on this system?N(RRRNtboolRtintersectionRL(RRP((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRFsN(R$R%R&RtcompiletVERBOSERJR RRGRF(((s-/usr/lib/python2.7/site-packages/pip/wheel.pyREhs	tWheelBuildercB@sSeZdZdddZddZdZddZdZe	dZ
RS(s#Build wheels from a RequirementSet.cC@sO||_||_|jj|_|j|_|p6g|_|pEg|_dS(N(	trequirement_settfindert_wheel_cacheRt_cache_roottwheel_download_dirt
_wheel_dirt
build_optionstglobal_options(RRXRYR^R_((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR s		cC@stjd}z|j||d|ry_tj|d}tjj||}tjtjj|||t	j
d||SWqqXn|j|dSWdt
|XdS(siBuild one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        s
pip-wheel-t
python_tagisStored in directory: %sN(ttempfiletmkdtempt_WheelBuilder__build_oneR3R@R4R.RRRtinfot
_clean_oneRR(RRt
output_dirR`ttempdRPt
wheel_path((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt
_build_ones
cC@s'tjddt|jgt|jS(Ns-us-c(RgRtRtsetup_pytlistR_(RR((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_base_setup_argss
cC@s|j|}d|jf}t|}tjd||dd|g|j}|dk	rw|d|g7}ny't|d|jdt	d|t
SWn(|jd	tjd
|jt	SXWdQXdS(Ns#Running setup.py bdist_wheel for %ssDestination directory: %stbdist_wheels-ds--python-tagtcwdtshow_stdouttspinnerterrorsFailed building wheel for %s(
RlRiRRRR^RR
tsetup_py_dirRsRytfinishRq(RRRgR`t	base_argstspin_messageRpt
wheel_args((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt__build_ones

cC@su|j|}tjd|j|ddg}y!t|d|jdttSWntjd|jtSXdS(NsRunning setup.py clean for %stcleans--allRnRos Failed cleaning build dir for %s(	RlRRdRiR
R8RsRyRq(RRRtt
clean_args((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRescC@s|js|r|jst|jj|j|jjj}g}x!|D]}|jreqPn|j	r|sit
jd|jqiqP|r|j
rqP|r|jr|jjrqP|r|jrqP|r\|j}|j\}}tjj|d
|d
krqPndtjj|jjt|jkr\t
jd|jqPq\n|j|qPW|swtSt
jddjg|D]}|j^qtgg}}	x|D]{}d
}
|rAtj}
t|j|j}yt |WqJt!k
r=}t
j"d|j||	j|qqJXn	|j}|j#||d|
}
|
r2|j||r?|jrt$j%j&t$j%j|jt'rtdn|j(|j)|jj*|_tjj+t,|
|_|jj	stt-|j|jd
t.d	|jj/q?q|	j|qWWd
QX|rt
jddjg|D]}|j^qen|	rt
jd
djg|	D]}|j^qnt0|	dkS(sBuild wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        s(Skipping %s, due to already being wheel.R:sCSkipping bdist_wheel for %s, due to binaries being disabled for it.s*Building wheels for collected packages: %ss, s Building wheel for %s failed: %sR`sbad source dir - missing markertsessionNsSuccessfully built %st sFailed to build %si(1R]R[RRXt
prepare_filesRYtrequirementstvaluest
constraintR;RRdRiteditableR"R<R8tsplitextR=R>tegg_info_matchesRR?RRR-RyR.RRtimplementation_tagR9RRAR>RiR3R4RRtremove_temporary_sourcetbuild_locationt	build_dirRIRRRsRzRX(RtautobuildingtreqsettbuildsetRR"R0textt
build_successt
build_failureR`RfRNt
wheel_file((s-/usr/lib/python2.7/site-packages/pip/wheel.pytbuilds
				
	&


		


			

	))N(R$R%R&RR RiRlRcReRsR(((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRWs		(TR&t
__future__RRRRBR'R/tloggingR3tos.pathRRRRgRaRtbase64Rtemail.parserRtpip._vendor.sixRR=t
pip.compatRtpip.downloadRRtpip.exceptionsRRR	t
pip.locationsR
RRR+R
RRRRtpip.utils.uiRtpip.utils.loggingRtpip.utils.setuptools_buildRtpip._vendor.distlib.scriptsRtpip._vendorRtpip._vendor.packaging.utilsRtpip._vendor.six.movesRt	wheel_extR<t	getLoggerR$RtobjectRR9R!RcRmR~RURVRRRRsRRyR!R)R1R;R?RERW(((s-/usr/lib/python2.7/site-packages/pip/wheel.pytsl(	)	'	
				&			7PKZ:[EEsite-packages/pip/index.pycnu[
abc@@sdZddlmZddlZddlZddlmZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlmZddlmZddlmZddlmZmZmZmZmZdd	lmZdd
lmZddlm Z ddl!m"Z"m#Z#m$Z$m%Z%dd
l&m'Z'm(Z(m)Z)m*Z*ddl+m,Z,m-Z-ddl.m/Z/ddl0m1Z1m2Z2m3Z3ddl4mZ5ddl6m7Z7ddl8m9Z9ddl:m;Z;ddl<m=Z=dddgZ>dddfdddfdddfdddfdde?fdddfgZ@ejAeBZCdeDfd YZEdeDfd!YZFe
jGd"e
jHd#ZId$eDfd%YZJd&eDfd'YZKedd(ZLd)ZMd*ZNd+ZOd,ZPed-d.ZQdS(/s!Routines related to PyPI, indexesi(tabsolute_importN(t
namedtuple(tparse(trequest(t	ipaddress(tcached_propertytsplitexttnormalize_pathtARCHIVE_EXTENSIONStSUPPORTED_EXTENSIONS(tRemovedInPip10Warning(t
indent_log(tcheck_requires_python(tDistributionNotFoundtBestVersionAlreadyInstalledtInvalidWheelFilenametUnsupportedWheel(tHAS_TLStis_urltpath_to_urlturl_to_path(tWheelt	wheel_ext(t
get_supported(thtml5libtrequeststsix(tcanonicalize_name(t
specifiers(tSSLError(tunescapet
FormatControltfmt_ctl_handle_mutual_excludet
PackageFinderthttpst*t	localhosts127.0.0.0/8s::1/128tfiletsshtInstallationCandidatecB@sbeZdZdZdZdZdZdZdZdZ	dZ
d	ZRS(
cC@s@||_t||_||_|j|j|jf|_dS(N(tprojectt
parse_versiontversiontlocationt_key(tselfR(R*R+((s-/usr/lib/python2.7/site-packages/pip/index.pyt__init__>s		cC@sdj|j|j|jS(Ns,(tformatR(R*R+(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyt__repr__DscC@s
t|jS(N(thashR,(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyt__hash__IscC@s|j|dS(NcS@s
||kS(N((tsto((s-/usr/lib/python2.7/site-packages/pip/index.pytMt(t_compare(R-tother((s-/usr/lib/python2.7/site-packages/pip/index.pyt__lt__LscC@s|j|dS(NcS@s
||kS(N((R3R4((s-/usr/lib/python2.7/site-packages/pip/index.pyR5PR6(R7(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyt__le__OscC@s|j|dS(NcS@s
||kS(N((R3R4((s-/usr/lib/python2.7/site-packages/pip/index.pyR5SR6(R7(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyt__eq__RscC@s|j|dS(NcS@s
||kS(N((R3R4((s-/usr/lib/python2.7/site-packages/pip/index.pyR5VR6(R7(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyt__ge__UscC@s|j|dS(NcS@s
||kS(N((R3R4((s-/usr/lib/python2.7/site-packages/pip/index.pyR5YR6(R7(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyt__gt__XscC@s|j|dS(NcS@s
||kS(N((R3R4((s-/usr/lib/python2.7/site-packages/pip/index.pyR5\R6(R7(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyt__ne__[scC@s&t|tstS||j|jS(N(t
isinstanceR'tNotImplementedR,(R-R8tmethod((s-/usr/lib/python2.7/site-packages/pip/index.pyR7^s(t__name__t
__module__R.R0R2R9R:R;R<R=R>R7(((s-/usr/lib/python2.7/site-packages/pip/index.pyR'<s									c
B@seZdZededdddddd	ZdZeedZdZ	dZ
dZdZdZ
d	Zejd
ZdZdZd
ZdZdZRS(sThis finds packages.

    This is meant to match easy_install's technique for looking for
    packages, by reading pages and looking for appropriate links.
    c	C@s|d
krtdng|_xW|D]O}|jdrjt|}
tjj|
rj|
}qjn|jj|q+W||_	g|_
t|_|pt
tt|_g|r|ngD]}d|df^q|_||_||_||_td|	d|d|
d||_tsxRtj|j	|jD]5}tj|}|jdkrItjd	PqIqIWnd
S(sCreate a PackageFinder.

        :param format_control: A FormatControl object or None. Used to control
            the selection of source packages / binary packages when consulting
            the index and links.
        :param platform: A string or None. If None, searches for packages
            that are supported by the current system. Otherwise, will find
            packages that can be built on the platform passed in. These
            packages will only be downloaded for distribution: they will
            not be built locally.
        :param versions: A list of strings or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param abi: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param implementation: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        s>PackageFinder() missing 1 required keyword argument: 'session't~R#tversionstplatformtabitimplR"sipip is configured with locations that require TLS/SSL, however the ssl module in Python is not available.N(tNonet	TypeErrort
find_linkst
startswithRtostpathtexiststappendt
index_urlstdependency_linkstsettlogged_linksRtformat_controltsecure_originstallow_all_prereleasestprocess_dependency_linkstsessionRt
valid_tagsRt	itertoolstchainturllib_parseturlparsetschemetloggertwarning(R-RKRQRWt
trusted_hostsRXRYRURFRERGtimplementationtlinktnew_linkthosttparsed((s-/usr/lib/python2.7/site-packages/pip/index.pyR.ls>		
		.			cC@s0|jr,tjdt|jj|ndS(NsXDependency Links processing has been deprecated and will be removed in a future release.(RXtwarningstwarnR
RRtextend(R-tlinks((s-/usr/lib/python2.7/site-packages/pip/index.pytadd_dependency_linkss
	c@sXggfd}x-|D]%}tjj|}|jd}|sX|r|rg|}nt|}tjj|r|rtjj|}xItj|D]}|tjj||qWq|rj	|qqJtjj
|r||qJtjd|q%t
|r:j	|q%tjd|q%WfS(st
        Sort locations into "files" (archives) and "urls", and return
        a pair of lists (files,urls)
        c@sLt|}tj|dtddkr;j|n
j|dS(Ntstrictis	text/html(Rt	mimetypest
guess_typetFalseRP(RNturl(tfilesturls(s-/usr/lib/python2.7/site-packages/pip/index.pyt	sort_pathssfile:s:Url '%s' is ignored: it is neither a file nor a directory.sQUrl '%s' is ignored. It is either a non-existing path or lacks a specific scheme.(RMRNRORLRtisdirtrealpathtlistdirtjoinRPtisfileR`RaR(t	locationst
expand_dirRtRqt
is_local_pathtis_file_urlRNtitem((RrRss-/usr/lib/python2.7/site-packages/pip/index.pyt_sort_locationss8
	 

cC@st|j}|jjrkt|jj}|j|jsUtd|jn|j|j}n|}|j	|fS(s[
        Function used to generate link sort key for link tuples.
        The greater the return value, the more preferred it is.
        If not finding wheels, then sorted by version only.
        If finding wheels, then the sort order is by version, then:
          1. existing installs
          2. wheels ordered via Wheel.support_index_min(self.valid_tags)
          3. source archives
        Note: it was considered to embed this logic into the Link
              comparison operators, but then different sdist links
              with the same version, would have to be considered equal
        sB%s is not a supported wheel for this platform. It can't be sorted.(
tlenRZR+tis_wheelRtfilenamet	supportedRtsupport_index_minR*(R-t	candidatetsupport_numtwheeltpri((s-/usr/lib/python2.7/site-packages/pip/index.pyt_candidate_sort_keys
c	C@stjt|}|j|j|jf}|djddd}xht|jD]Y}||dkr|ddkrqUnyt	j
t|dtj
s|ddkr|dn|djd}t	jt|dtj
r|dn|djd}WnQtk
r`|drs|dj|djkrs|ddkrsqUqsnX||krsqUn|d|dkr|ddkr|ddk	rqUntSW|jd|j|jtS(	Nit+iiR#tutf8isThe repository located at %s is not a trusted or secure host and is being ignored. If this repository is available via HTTPS it is recommended to use HTTPS instead, otherwise you may silence this warning and allow it anyways with '--trusted-host %s'.(R]R^tstrR_thostnametporttrsplittSECURE_ORIGINSRVRt
ip_addressR?Rt	text_typeRItdecodet
ip_networkt
ValueErrortlowertTrueRaRp(	R-R`R+Rgtorigintprotocolt
secure_origintaddrtnetwork((s-/usr/lib/python2.7/site-packages/pip/index.pyt_validate_secure_origins>  

 

c@s/fd}g|jD]}||^qS(sReturns the locations found via self.index_urls

        Checks the url_name on the main (first in the list) index and
        use this url_name to produce all locations
        c@sAtj|tjt}|jds=|d}n|S(Nt/(t	posixpathRxR]tquoteRtendswith(Rqtloc(tproject_name(s-/usr/lib/python2.7/site-packages/pip/index.pytmkurl_pypi_urlhs
(RQ(R-RRRq((Rs-/usr/lib/python2.7/site-packages/pip/index.pyt_get_index_urls_locationsas
c	C@s|j|}|j|\}}|j|jdt\}}|j|j\}}dtj|||D}	gtjd|Dd|Dd|DD]}
|jt|
r|
^q}tj	dt
||x|D]}tj	d|qWt|}
t|j
|
}t||
|}|jd|jD|}g}xY|j||D]E}tj	d	|jt!|j|j|j|Wd
QXqsW|jd|jD|}|rtj	dd
jg|D]}|jj^qn|j|	|}|ry|jdttj	dd
jg|D]}t|jj^qTn||||S(sFind all available InstallationCandidate for project_name

        This checks index_urls, find_links and dependency_links.
        All versions found are returned as an InstallationCandidate list.

        See _link_package_versions for details on which files are accepted
        R{cs@s|]}t|VqdS(N(tLink(t.0Rq((s-/usr/lib/python2.7/site-packages/pip/index.pys	scs@s|]}t|VqdS(N(R(RRq((s-/usr/lib/python2.7/site-packages/pip/index.pys	scs@s|]}t|VqdS(N(R(RRq((s-/usr/lib/python2.7/site-packages/pip/index.pys	scs@s|]}t|VqdS(N(R(RRq((s-/usr/lib/python2.7/site-packages/pip/index.pys	ss,%d location(s) to search for versions of %s:s* %scs@s|]}t|dVqdS(s-fN(R(RRq((s-/usr/lib/python2.7/site-packages/pip/index.pys	ssAnalyzing links from page %sNcs@s|]}t|VqdS(N(R(RRq((s-/usr/lib/python2.7/site-packages/pip/index.pys	ssdependency_links found: %ss, treversesLocal files found: %s(RRRKRRRR[R\RR`tdebugRRtfmt_ctl_formatsRUtSearcht_package_versionst
_get_pagesRqRRjRkRxR+tsortR(R-Rtindex_locationstindex_file_loct
index_url_loctfl_file_loct
fl_url_loctdep_file_loctdep_url_loctfile_locationsRdt
url_locationsR+tcanonical_nametformatstsearchtfind_links_versionst
page_versionstpagetdependency_versionsR*t
file_versionsR((s-/usr/lib/python2.7/site-packages/pip/index.pytfind_all_candidateswsZ	

	
	
 &)c
C@s"|j|j}t|jjg|D]}t|j^q%d|jrR|jnd}g|D]!}t|j|kre|^qe}|rt	|d|j
}nd}|jdk	rt|jj}nd}|dkr=|dkr=t
jd|djttd|Ddttd|nt}	|rm|dksd|j|krmt}	n|r|dk	r|	rt
jd|nt
jd||jdS|	rt
jd	|djt|dtpd
tnt
jd|jdjt|dt|jS(
sTry to find a Link matching req

        Expects req, an InstallRequirement and upgrade, a boolean
        Returns a Link if found,
        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
        tprereleasestkeysNCould not find a version that satisfies the requirement %s (from versions: %s)s, cs@s|]}t|jVqdS(N(RR*(Rtc((s-/usr/lib/python2.7/site-packages/pip/index.pys	ss%No matching distribution found for %ssLExisting installed version (%s) is most up-to-date and satisfies requirementsUExisting installed version (%s) satisfies requirement (most up-to-date version is %s)s=Installed version (%s) is most up-to-date (past versions: %s)tnones)Using version %s (newest of versions: %s)N(RtnameRSt	specifiertfilterRR*RWRItmaxRtsatisfied_byR)R`tcriticalRxtsortedR
RpRRRR+(
R-treqtupgradetall_candidatesRtcompatible_versionstapplicable_candidatestbest_candidatetinstalled_versiontbest_installed((s-/usr/lib/python2.7/site-packages/pip/index.pytfind_requirementsd	".	
	

	cc@sct}xS|D]K}||kr(qn|j||j|}|dkrVqn|VqWdS(sp
        Yields (page, page_url) from the given locations, skipping
        locations that have errors.
        N(RStaddt	_get_pageRI(R-RzRtseenR+R((s-/usr/lib/python2.7/site-packages/pip/index.pyR-s	

s-py([123]\.?[0-9]?)$cC@sqgg}}t}xP|D]H}||kr|j||jrU|j|qe|j|qqW||S(s
        Returns elements of links in order, non-egg links first, egg links
        second, while eliminating duplicates
        (RSRtegg_fragmentRP(R-Rkteggstno_eggsRRd((s-/usr/lib/python2.7/site-packages/pip/index.pyt_sort_links@s
	

	cC@sRg}xE|j|D]4}|j||}|dk	r|j|qqW|S(N(Rt_link_package_versionsRIRP(R-RkRtresultRdtv((s-/usr/lib/python2.7/site-packages/pip/index.pyRPscC@s9||jkr5tjd|||jj|ndS(NsSkipping link %s; %s(RTR`RR(R-Rdtreason((s-/usr/lib/python2.7/site-packages/pip/index.pyt_log_skipped_linkXsc
C@sd}|jr$|j}|j}n^|j\}}|sP|j|ddS|tkrt|j|d|dSd|jkr|tkr|j|d|jdSd|j	kr|dkr|j|ddS|tkryt
|j}Wn"tk
r|j|d	dSXt
|j|jkrP|j|d
|jdS|j|jsv|j|ddS|j}nd|jkr|tkr|j|d
|jdS|st||j|}n|dkr|j|d
|jdS|jj|}|r^||j }|jd}|tjd kr^|j|ddSnyt|j}	Wn3tjk
rtjd|j|jt}	nX|	stjd||jdStjd||t |j||S(s'Return an InstallationCandidate or Nones
not a fileNsunsupported archive format: %stbinarysNo binaries permitted for %stmacosx10s.zipsmacosx10 onesinvalid wheel filenameswrong project name (not %s)s%it is not compatible with this PythontsourcesNo sources permitted for %siisPython version is incorrects3Package %s has an invalid Requires-Python entry: %ss_The package %s is incompatible with the pythonversion in use. Acceptable python versions are:%ssFound link %s, version: %s(!RIRtextRRR	RRtsuppliedRNRRRRRt	canonicalRRZR*tegg_info_matchest_py_version_reRtstarttgrouptsysRtrequires_pythonRtInvalidSpecifierR`RRR'(
R-RdRR*tegg_infoRRtmatcht
py_versiontsupport_this_python((s-/usr/lib/python2.7/site-packages/pip/index.pyR]s		


	
	
cC@stj|d|jS(NRY(tHTMLPagetget_pageRY(R-Rd((s-/usr/lib/python2.7/site-packages/pip/index.pyRsN(RBRCt__doc__RpRIR.RltstaticmethodRRRRRRRtretcompileRRRRRR(((s-/usr/lib/python2.7/site-packages/pip/index.pyR!es(	Q	
1		G		S	c					Ms([a-z0-9_.]+)-([a-z0-9_.!+-]+)cC@s|j|}|s)tjd|dS|dkrU|jd}||jdS|jdj}|jdd}|jd}|j|r|jdt	|SdSdS(sxPull the version part out of a string.

    :param egg_info: The string to parse. E.g. foo-2.1
    :param search_name: The name of the package this belongs to. None to
        infer the name. Note that this cannot unambiguously parse strings
        like foo-2-2 which might be foo, 2-2 or foo-2, 2.
    :param link: The link the string came from, for logging on failure.
    s%Could not parse version from link: %sit-t_N(
RR`RRIRtindexRtreplaceRLR(Rtsearch_nameRdt_egg_info_reRt
full_matchRtlook_for((s-/usr/lib/python2.7/site-packages/pip/index.pyRsRcB@seZdZd
dZdZeed
dZe	d
dZ
e	dZedZ
edZejdejZd	ZRS(s'Represents one page, along with its URLcC@sd}|rMd|krMtj|d\}}d|krM|d}qMn||_tj|jd|dt|_||_||_	dS(NsContent-Typetcharsetttransport_encodingtnamespaceHTMLElements(
RItcgitparse_headertcontentRRRpRgRqtheaders(R-RRqRtencodingtcontent_typetparams((s-/usr/lib/python2.7/site-packages/pip/index.pyR.s			cC@s|jS(N(Rq(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyt__str__scC@s|dkrtdn|j}|jddd}ddlm}xS|jD]H}|jj|rT|t	|dkrTt
jd||dSqTWy|r|j}xht
D]]}|j|r|j|d|}	|	jjd	rPqt
jd
||	dSqqWnt
jd|tj|\}}
}}}
}|dkrtjjtj|r|jd
s|d
7}ntj|d}t
jd|n|j|did	d6dd6}|j|jjdd}	|	jjd	s$t
jd
||	dS||j|j|j}Wntjk
rk}|j|||ntk
r}d|}|j|||dt
j nWtj!k
r}|j|d||n+tj"k
r|j|d|nX|SdS(Ns9get_page() missing 1 required keyword argument: 'session't#ii(t
VcsSupports+:sCannot look at %s URL %sRYs	text/htmls,Skipping page %s because of Content-Type: %ssGetting page %sR%Rs
index.htmls# file: URL is directory, getting %sRtAcceptsmax-age=600s
Cache-ControlsContent-Typetunknowns6There was a problem confirming the ssl certificate: %stmethsconnection error: %ss	timed out(#RIRJRqtsplittpip.vcsRtschemesRRLRR`RRRRt_get_content_typeR]R^RMRNRuturllib_requestturl2pathnameturljointgettraise_for_statusRRRt	HTTPErrort_handle_failRtinfotConnectionErrortTimeout(tclsRdt
skip_archivesRYRqRR_Rtbad_extRtnetlocRNR	tquerytfragmenttresptinsttexcR((s-/usr/lib/python2.7/site-packages/pip/index.pyRsr	+	
!


cC@s,|dkrtj}n|d||dS(Ns%Could not fetch URL %s: %s - skipping(RIR`R(RdRRqR((s-/usr/lib/python2.7/site-packages/pip/index.pyR9scC@s`tj|\}}}}}|dkr.dS|j|dt}|j|jjddS(s;Get the Content-Type of the given url, using a HEAD requestthttpR"R6tallow_redirectssContent-Type(R'R"(R]turlsplittheadRRRR(RqRYR_R!RNR"R#R$((s-/usr/lib/python2.7/site-packages/pip/index.pyR@s
cC@sog|jjdD]!}|jddk	r|^q}|rd|djdrd|djdS|jSdS(Ns.//basethrefi(RgtfindallRRIRq(R-txtbases((s-/usr/lib/python2.7/site-packages/pip/index.pytbase_urlNs
!cc@sx|jjdD]}|jdr|jd}|jtj|j|}|jd}|rvt|nd}t	||d|VqqWdS(sYields all links in the pages.//aR+sdata-requires-pythonRN(
RgR,Rt
clean_linkR]RR/RRIR(R-tanchorR+Rqt	pyrequire((s-/usr/lib/python2.7/site-packages/pip/index.pyRkYss[^a-z0-9$&+,/:;=?@.#%_\\|-]cC@s|jjd|S(sMakes sure a link is fully encoded.  That is, if a ' ' shows up in
        the link, it will be rewritten to %20 (while not over-quoting
        % or other characters).cS@sdt|jdS(Ns%%%2xi(tordR(R((s-/usr/lib/python2.7/site-packages/pip/index.pyR5mR6(t	_clean_retsub(R-Rq((s-/usr/lib/python2.7/site-packages/pip/index.pyR0hs	N(RBRCRRIR.R
tclassmethodRRRRRRR/tpropertyRkRRtIR4R0(((s-/usr/lib/python2.7/site-packages/pip/index.pyRs	U
RcB@saeZdddZdZdZdZdZdZdZ	dZ
dZd	Ze
d
Ze
dZe
dZe
d
ZdZe
dZe
dZejdZe
dZejdZe
dZejdZe
dZe
dZe
dZe
dZe
dZ RS(cC@sI|jdrt|}n||_||_|r<|nd|_dS(s
        Object representing a parsed link from https://pypi.python.org/simple/*

        url:
            url of the resource pointed to (href of the link)
        comes_from:
            instance of HTMLPage where the link was found, or string.
        requires_python:
            String containing the `Requires-Python` metadata field, specified
            in PEP 345. This may be specified by a data-requires-python
            attribute in the HTML link tag, as described in PEP 503.
        s\\N(RLRRqt
comes_fromRIR(R-RqR9R((s-/usr/lib/python2.7/site-packages/pip/index.pyR.rs
		cC@sP|jrd|j}nd}|jr?d|j|j|fSt|jSdS(Ns (requires-python:%s)R6s%s (from %s)%s(RR9RqR(R-trp((s-/usr/lib/python2.7/site-packages/pip/index.pyR
s		cC@sd|S(Ns	((R-((s-/usr/lib/python2.7/site-packages/pip/index.pyR0scC@s#t|tstS|j|jkS(N(R?RR@Rq(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyR;scC@s#t|tstS|j|jkS(N(R?RR@Rq(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyR>scC@s#t|tstS|j|jkS(N(R?RR@Rq(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyR9scC@s#t|tstS|j|jkS(N(R?RR@Rq(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyR:scC@s#t|tstS|j|jkS(N(R?RR@Rq(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyR=scC@s#t|tstS|j|jkS(N(R?RR@Rq(R-R8((s-/usr/lib/python2.7/site-packages/pip/index.pyR<scC@s
t|jS(N(R1Rq(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyR2scC@sktj|j\}}}}}tj|jdp<|}tj|}|sgtd|j|S(NRsURL %r produced no filename(R]R)RqRtbasenametrstriptunquotetAssertionError(R-RR!RNR((s-/usr/lib/python2.7/site-packages/pip/index.pyRs
!cC@stj|jdS(Ni(R]R)Rq(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyR_scC@stj|jdS(Ni(R]R)Rq(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyR!scC@stjtj|jdS(Ni(R]R=R)Rq(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyRNscC@sttj|jjdS(NR(RRR;RNR<(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyRscC@s|jdS(Ni(R(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyRscC@s=tj|j\}}}}}tj||||dfS(N(R]R)Rqt
urlunsplitRI(R-R_R!RNR"R#((s-/usr/lib/python2.7/site-packages/pip/index.pyturl_without_fragments!s[#&]egg=([^&]*)cC@s,|jj|j}|sdS|jdS(Ni(t_egg_fragment_reRRqRIR(R-R((s-/usr/lib/python2.7/site-packages/pip/index.pyRss[#&]subdirectory=([^&]*)cC@s,|jj|j}|sdS|jdS(Ni(t_subdirectory_fragment_reRRqRIR(R-R((s-/usr/lib/python2.7/site-packages/pip/index.pytsubdirectory_fragmentss2(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)cC@s,|jj|j}|r(|jdSdS(Ni(t_hash_reRRqRRI(R-R((s-/usr/lib/python2.7/site-packages/pip/index.pyR1s
cC@s,|jj|j}|r(|jdSdS(Ni(RDRRqRRI(R-R((s-/usr/lib/python2.7/site-packages/pip/index.pyt	hash_names
cC@s0tj|jjdddjdddS(NRiit?(RR;RqR(R-((s-/usr/lib/python2.7/site-packages/pip/index.pytshow_urlscC@s
|jtkS(N(RR(R-((s-/usr/lib/python2.7/site-packages/pip/index.pyRscC@s*ddlm}|j|jkr&tStS(s
        Determines if this points to an actual artifact (e.g. a tarball) or if
        it points to an "abstract" thing like a path or a VCS location.
        i(tvcs(RRHR_tall_schemesRpR(R-RH((s-/usr/lib/python2.7/site-packages/pip/index.pytis_artifactsN(!RBRCRIR.R
R0R;R>R9R:R=R<R2R7RR_R!RNRRR@RRRARRBRCRDR1RERGRRJ(((s-/usr/lib/python2.7/site-packages/pip/index.pyRps8	
										sno_binary only_binarycC@s|jd}xUd|krf|j|j|jd||jdd4d|krdSqWxP|D]H}|dkr|jqnnt|}|j||j|qnWdS(Nt,s:all:is:none:(RtclearRRRtdiscard(tvaluettargetR8tnewR((s-/usr/lib/python2.7/site-packages/pip/index.pyR s





cC@stddg}||jkr1|jdn]||jkrP|jdn>d|jkro|jdnd|jkr|jdnt|S(NRRs:all:(RStonly_binaryRMt	no_binaryt	frozenset(tfmt_ctlRR((s-/usr/lib/python2.7/site-packages/pip/index.pyR-scC@std|j|jdS(Ns:all:(R RRRQ(RT((s-/usr/lib/python2.7/site-packages/pip/index.pytfmt_ctl_no_binary:scC@s$t|tjdtdddS(Nsf--no-use-wheel is deprecated and will be removed in the future.  Please use --no-binary :all: instead.t
stackleveli(RURhRiR
(RT((s-/usr/lib/python2.7/site-packages/pip/index.pytfmt_ctl_no_use_wheel?s

Rssupplied canonical formats(RRt
__future__RtloggingRtcollectionsRR[RRMRRnRRhtpip._vendor.six.moves.urllibRR]RRt
pip.compatRt	pip.utilsRRRRR	tpip.utils.deprecationR
tpip.utils.loggingRtpip.utils.packagingRtpip.exceptionsR
RRRtpip.downloadRRRRt	pip.wheelRRtpip.pep425tagsRtpip._vendorRRRtpip._vendor.packaging.versionR)tpip._vendor.packaging.utilsRtpip._vendor.packagingRtpip._vendor.requests.exceptionsRtpip._vendor.distlib.compatRt__all__RIRt	getLoggerRBR`tobjectR'R!RR8RRRRR RRURWR(((s-/usr/lib/python2.7/site-packages/pip/index.pytsd("")M
		
		PKZB)00 site-packages/pip/exceptions.pycnu[
abc@@sdZddlmZddlmZmZmZddlmZde	fdYZ
de
fdYZd	e
fd
YZdefdYZ
d
efdYZde
fdYZde
fdYZde
fdYZde
fdYZdefdYZdefdYZdefdYZdefdYZdefd YZd!efd"YZd#efd$YZd%efd&YZd'efd(YZd)efd*YZd+S(,s"Exceptions used throughout packagei(tabsolute_import(tchaintgroupbytrepeat(t	iteritemstPipErrorcB@seZdZRS(sBase pip exception(t__name__t
__module__t__doc__(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR	stInstallationErrorcB@seZdZRS(s%General exception during installation(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR	
stUninstallationErrorcB@seZdZRS(s'General exception during uninstallation(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR
stDistributionNotFoundcB@seZdZRS(sCRaised when a distribution cannot be found to satisfy a requirement(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRstRequirementsFileParseErrorcB@seZdZRS(sDRaised when a general error occurs parsing a requirements file line.(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRstBestVersionAlreadyInstalledcB@seZdZRS(sNRaised when the most up-to-date version of a package is already
    installed.(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR
st
BadCommandcB@seZdZRS(s0Raised when virtualenv or a command is not found(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR"stCommandErrorcB@seZdZRS(s7Raised when there is an error in command-line arguments(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR&stPreviousBuildDirErrorcB@seZdZRS(s:Raised when there's a previous conflicting build directory(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR*stInvalidWheelFilenamecB@seZdZRS(sInvalid wheel filename.(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR.stUnsupportedWheelcB@seZdZRS(sUnsupported wheel.(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR2st
HashErrorscB@s;eZdZdZdZdZdZdZRS(s:Multiple HashError instances rolled into one for reportingcC@s
g|_dS(N(terrors(tself((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyt__init__9scC@s|jj|dS(N(Rtappend(Rterror((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR<scC@sg}|jjddxJt|jdD]3\}}|j|j|jd|Dq2W|r|dj|SdS(NtkeycS@s|jS(N(torder(te((s2/usr/lib/python2.7/site-packages/pip/exceptions.pytAtcS@s|jS(N(t	__class__(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRBRcs@s|]}|jVqdS(N(tbody(t.0R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pys	Dss
(RtsortRRtheadtextendtjoin(Rtlinestclst
errors_of_cls((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyt__str__?s"cC@s
t|jS(N(tboolR(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyt__nonzero__HscC@s
|jS(N(R*(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyt__bool__Ks(RRRRRR(R*R+(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR6s					t	HashErrorcB@s5eZdZdZdZdZdZdZRS(s
    A failure to verify a package against known-good hashes

    :cvar order: An int sorting hash exception classes by difficulty of
        recovery (lower being harder), so the user doesn't bother fretting
        about unpinned packages when he has deeper issues, like VCS
        dependencies, to deal with. Also keeps error reports in a
        deterministic order.
    :cvar head: A section heading for display above potentially many
        exceptions of this kind
    :ivar req: The InstallRequirement that triggered this error. This is
        pasted on after the exception is instantiated, because it's not
        typically available earlier.

    RcC@sd|jS(s)Return a summary of me for display under the heading.

        This default implementation simply prints a description of the
        triggering requirement.

        :param req: The InstallRequirement that provoked this error, with
            populate_link() having already been called

        s    %s(t_requirement_name(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRbs
cC@sd|j|jfS(Ns%s
%s(R"R(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR(nscC@s|jrt|jSdS(sReturn a description of the requirement that triggered me.

        This default implementation returns long description of the req, with
        line numbers

        sunknown package(treqtstr(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR-qsN(	RRRtNoneR.R"RR(R-(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR,Os		tVcsHashUnsupportedcB@seZdZdZdZRS(suA hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those.islCan't verify hashes for these requirements because we don't have a way to hash version control repositories:(RRRRR"(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR1{stDirectoryUrlHashUnsupportedcB@seZdZdZdZRS(suA hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those.isUCan't verify hashes for these file:// requirements because they point to directories:(RRRRR"(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR2stHashMissingcB@s,eZdZdZdZdZdZRS(s2A hash was needed for a requirement but is absent.iswHashes are required in --require-hashes mode, but they are missing from some requirements. Here is a list of those requirements along with the hashes their downloaded archives actually had. Add lines like these to your requirements files to prevent tampering. (If you did not enable --require-hashes manually, note that it turns on automatically when any package has a hash.)cC@s
||_dS(sq
        :param gotten_hash: The hash of the (possibly malicious) archive we
            just downloaded
        N(tgotten_hash(RR4((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRscC@siddlm}d}|jrO|jjr7|jjnt|jdd}nd|p[d||jfS(Ni(t
FAVORITE_HASHR.s    %s --hash=%s:%ssunknown package(tpip.utils.hashesR5R0R.t
original_linktgetattrR4(RR5tpackage((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRs	(RRRRR"RR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR3s
	tHashUnpinnedcB@seZdZdZdZRS(sPA requirement had a hash specified but was not pinned to a specific
    version.isaIn --require-hashes mode, all requirements must have their versions pinned with ==. These do not:(RRRRR"(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR:stHashMismatchcB@s5eZdZdZdZdZdZdZRS(s
    Distribution file hash values don't match.

    :ivar package_name: The name of the package that triggered the hash
        mismatch. Feel free to write to this after the exception is raise to
        improve its error message.

    isTHESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS FILE. If you have updated the package versions, please update the hashes. Otherwise, examine the package contents carefully; someone may have tampered with them.cC@s||_||_dS(s
        :param allowed: A dict of algorithm names pointing to lists of allowed
            hex digests
        :param gots: A dict of algorithm names pointing to hashes we
            actually got from the files under suspicion
        N(tallowedtgots(RR<R=((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRs	cC@sd|j|jfS(Ns
    %s:
%s(R-t_hash_comparison(R((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyRsc@sd}g}xjt|jD]Y\}}|||jfd|D|jd|j|jdqWdj|S(sE
        Return a comparison of actual and expected hash values.

        Example::

               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
                            or 123451234512345123451234512345123451234512345
                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef

        cS@st|gtdS(Ns    or(RR(t	hash_name((s2/usr/lib/python2.7/site-packages/pip/exceptions.pythash_then_orsc3@s%|]}dt|fVqdS(s        Expected %s %sN(tnext(R R(tprefix(s2/usr/lib/python2.7/site-packages/pip/exceptions.pys	ss             Got        %s
s    ors
(RR<R#RR=t	hexdigestR$(RR@R%R?t	expecteds((RBs2/usr/lib/python2.7/site-packages/pip/exceptions.pyR>s		
(RRRRR"RRR>(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyR;s	
	tUnsupportedPythonVersioncB@seZdZRS(sMUnsupported python version according to Requires-Python package
    metadata.(RRR(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyREsN(Rt
__future__Rt	itertoolsRRRtpip._vendor.sixRt	ExceptionRR	R
RRR
RRRRRRR,R1R2R3R:R;RE(((s2/usr/lib/python2.7/site-packages/pip/exceptions.pyts,,		$	8PKZV.F~~site-packages/pip/wheel.pynu["""
Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import

import compileall
import csv
import errno
import functools
import hashlib
import logging
import os
import os.path
import re
import shutil
import stat
import sys
import tempfile
import warnings

from base64 import urlsafe_b64encode
from email.parser import Parser

from pip._vendor.six import StringIO

import pip
from pip.compat import expanduser
from pip.download import path_to_url, unpack_url
from pip.exceptions import (
    InstallationError, InvalidWheelFilename, UnsupportedWheel)
from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
from pip import pep425tags
from pip.utils import (
    call_subprocess, ensure_dir, captured_stdout, rmtree, read_chunks,
)
from pip.utils.ui import open_spinner
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.six.moves import configparser


wheel_ext = '.whl'

VERSION_COMPATIBLE = (1, 0)


logger = logging.getLogger(__name__)


class WheelCache(object):
    """A cache of wheels for future installs."""

    def __init__(self, cache_dir, format_control):
        """Create a wheel cache.

        :param cache_dir: The root of the cache.
        :param format_control: A pip.index.FormatControl object to limit
            binaries being read from the cache.
        """
        self._cache_dir = expanduser(cache_dir) if cache_dir else None
        self._format_control = format_control

    def cached_wheel(self, link, package_name):
        return cached_wheel(
            self._cache_dir, link, self._format_control, package_name)


def _cache_for_link(cache_dir, link):
    """
    Return a directory to store cached wheels in for link.

    Because there are M wheels for any one sdist, we provide a directory
    to cache them in, and then consult that directory when looking up
    cache hits.

    We only insert things into the cache if they have plausible version
    numbers, so that we don't contaminate the cache with things that were not
    unique. E.g. ./package might have dozens of installs done for it and build
    a version of 0.0...and if we built and cached a wheel, we'd end up using
    the same wheel even if the source has been edited.

    :param cache_dir: The cache_dir being used by pip.
    :param link: The link of the sdist for which this will cache wheels.
    """

    # We want to generate an url to use as our cache key, we don't want to just
    # re-use the URL because it might have other items in the fragment and we
    # don't care about those.
    key_parts = [link.url_without_fragment]
    if link.hash_name is not None and link.hash is not None:
        key_parts.append("=".join([link.hash_name, link.hash]))
    key_url = "#".join(key_parts)

    # Encode our key url with sha224, we'll use this because it has similar
    # security properties to sha256, but with a shorter total output (and thus
    # less secure). However the differences don't make a lot of difference for
    # our use case here.
    hashed = hashlib.sha224(key_url.encode()).hexdigest()

    # We want to nest the directories some to prevent having a ton of top level
    # directories where we might run out of sub directories on some FS.
    parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]

    # Inside of the base location for cached wheels, expand our parts and join
    # them all together.
    return os.path.join(cache_dir, "wheels", *parts)


def cached_wheel(cache_dir, link, format_control, package_name):
    if not cache_dir:
        return link
    if not link:
        return link
    if link.is_wheel:
        return link
    if not link.is_artifact:
        return link
    if not package_name:
        return link
    canonical_name = canonicalize_name(package_name)
    formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
    if "binary" not in formats:
        return link
    root = _cache_for_link(cache_dir, link)
    try:
        wheel_names = os.listdir(root)
    except OSError as e:
        if e.errno in (errno.ENOENT, errno.ENOTDIR):
            return link
        raise
    candidates = []
    for wheel_name in wheel_names:
        try:
            wheel = Wheel(wheel_name)
        except InvalidWheelFilename:
            continue
        if not wheel.supported():
            # Built for a different python/arch/etc
            continue
        candidates.append((wheel.support_index_min(), wheel_name))
    if not candidates:
        return link
    candidates.sort()
    path = os.path.join(root, candidates[0][1])
    return pip.index.Link(path_to_url(path))


def rehash(path, algo='sha256', blocksize=1 << 20):
    """Return (hash, length) for path using hashlib.new(algo)"""
    h = hashlib.new(algo)
    length = 0
    with open(path, 'rb') as f:
        for block in read_chunks(f, size=blocksize):
            length += len(block)
            h.update(block)
    digest = 'sha256=' + urlsafe_b64encode(
        h.digest()
    ).decode('latin1').rstrip('=')
    return (digest, length)


def open_for_csv(name, mode):
    if sys.version_info[0] < 3:
        nl = {}
        bin = 'b'
    else:
        nl = {'newline': ''}
        bin = ''
    return open(name, mode + bin, **nl)


def fix_script(path):
    """Replace #!python with #!/path/to/python
    Return True if file was changed."""
    # XXX RECORD hashes will need to be updated
    if os.path.isfile(path):
        with open(path, 'rb') as script:
            firstline = script.readline()
            if not firstline.startswith(b'#!python'):
                return False
            exename = sys.executable.encode(sys.getfilesystemencoding())
            firstline = b'#!' + exename + os.linesep.encode("ascii")
            rest = script.read()
        with open(path, 'wb') as script:
            script.write(firstline)
            script.write(rest)
        return True

dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P\d.+?))?)
                                \.dist-info$""", re.VERBOSE)


def root_is_purelib(name, wheeldir):
    """
    Return True if the extracted wheel in wheeldir should go into purelib.
    """
    name_folded = name.replace("-", "_")
    for item in os.listdir(wheeldir):
        match = dist_info_re.match(item)
        if match and match.group('name') == name_folded:
            with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
                for line in wheel:
                    line = line.lower().rstrip()
                    if line == "root-is-purelib: true":
                        return True
    return False


def get_entrypoints(filename):
    if not os.path.exists(filename):
        return {}, {}

    # This is done because you can pass a string to entry_points wrappers which
    # means that they may or may not be valid INI files. The attempt here is to
    # strip leading and trailing whitespace in order to make them valid INI
    # files.
    with open(filename) as fp:
        data = StringIO()
        for line in fp:
            data.write(line.strip())
            data.write("\n")
        data.seek(0)

    cp = configparser.RawConfigParser()
    cp.optionxform = lambda option: option
    cp.readfp(data)

    console = {}
    gui = {}
    if cp.has_section('console_scripts'):
        console = dict(cp.items('console_scripts'))
    if cp.has_section('gui_scripts'):
        gui = dict(cp.items('gui_scripts'))
    return console, gui


def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
                     pycompile=True, scheme=None, isolated=False, prefix=None, strip_file_prefix=None):
    """Install a wheel"""

    if not scheme:
        scheme = distutils_scheme(
            name, user=user, home=home, root=root, isolated=isolated,
            prefix=prefix,
        )

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}
    changed = set()
    generated = []

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        with captured_stdout() as stdout:
            with warnings.catch_warnings():
                warnings.filterwarnings('ignore')
                compileall.compile_dir(source, force=True, quiet=True)
        logger.debug(stdout.getvalue())

    def normpath(src, p):
        return os.path.relpath(src, p).replace(os.path.sep, '/')

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        ensure_dir(dest)  # common for the 'include' path

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base and
                        s.endswith('.dist-info') and
                        canonicalize_name(s).startswith(
                            canonicalize_name(req.name))):
                    assert not info_dir, ('Multiple .dist-info directories: ' +
                                          destsubdir + ', ' +
                                          ', '.join(info_dir))
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                ensure_dir(destdir)

                # We use copyfile (not move, copy, or copy2) to be extra sure
                # that we are not moving directories over (copyfile fails for
                # directories) as well as to ensure that we are not copying
                # over any metadata because we want more control over what
                # metadata we actually copy over.
                shutil.copyfile(srcfile, destfile)

                # Copy over the metadata for the file, currently this only
                # includes the atime and mtime.
                st = os.stat(srcfile)
                if hasattr(os, "utime"):
                    os.utime(destfile, (st.st_atime, st.st_mtime))

                # If our file is executable, then make our destination file
                # executable.
                if os.access(srcfile, os.X_OK):
                    st = os.stat(srcfile)
                    permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
                    )
                    os.chmod(destfile, permissions)

                changed = False
                if fixer:
                    changed = fixer(destfile)
                record_installed(srcfile, destfile, changed)

    clobber(source, lib_dir, True)

    assert info_dir, "%s .dist-info directory not found" % req

    # Get the defined entry points
    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
    console, gui = get_entrypoints(ep_file)

    def is_entrypoint_wrapper(name):
        # EP, EP.exe and EP-script.py are scripts generated for
        # entry point EP by setuptools
        if name.lower().endswith('.exe'):
            matchname = name[:-4]
        elif name.lower().endswith('-script.py'):
            matchname = name[:-10]
        elif name.lower().endswith(".pya"):
            matchname = name[:-4]
        else:
            matchname = name
        # Ignore setuptools-generated scripts
        return (matchname in console or matchname in gui)

    for datadir in data_dirs:
        fixer = None
        filter = None
        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
            fixer = None
            if subdir == 'scripts':
                fixer = fix_script
                filter = is_entrypoint_wrapper
            source = os.path.join(wheeldir, datadir, subdir)
            dest = scheme[subdir]
            clobber(source, dest, False, fixer=fixer, filter=filter)

    maker = ScriptMaker(None, scheme['scripts'])

    # Ensure old scripts are overwritten.
    # See https://github.com/pypa/pip/issues/1800
    maker.clobber = True

    # Ensure we don't generate any variants for scripts because this is almost
    # never what somebody wants.
    # See https://bitbucket.org/pypa/distlib/issue/35/
    maker.variants = set(('', ))

    # This is required because otherwise distlib creates scripts that are not
    # executable.
    # See https://bitbucket.org/pypa/distlib/issue/32/
    maker.set_mode = True

    # Simplify the script and fix the fact that the default script swallows
    # every single stack trace.
    # See https://bitbucket.org/pypa/distlib/issue/34/
    # See https://bitbucket.org/pypa/distlib/issue/33/
    def _get_script_text(entry):
        if entry.suffix is None:
            raise InstallationError(
                "Invalid script entry point: %s for req: %s - A callable "
                "suffix is required. Cf https://packaging.python.org/en/"
                "latest/distributing.html#console-scripts for more "
                "information." % (entry, req)
            )
        return maker.script_template % {
            "module": entry.prefix,
            "import_name": entry.suffix.split(".")[0],
            "func": entry.suffix,
        }

    maker._get_script_text = _get_script_text
    maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
"""

    # Special case pip and setuptools to generate versioned wrappers
    #
    # The issue is that some projects (specifically, pip and setuptools) use
    # code in setup.py to create "versioned" entry points - pip2.7 on Python
    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
    # the wheel metadata at build time, and so if the wheel is installed with
    # a *different* version of Python the entry points will be wrong. The
    # correct fix for this is to enhance the metadata to be able to describe
    # such versioned entry points, but that won't happen till Metadata 2.0 is
    # available.
    # In the meantime, projects using versioned entry points will either have
    # incorrect versioned entry points, or they will not be able to distribute
    # "universal" wheels (i.e., they will need a wheel per Python version).
    #
    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
    # override the versioned entry points in the wheel and generate the
    # correct ones. This code is purely a short-term measure until Metadata 2.0
    # is available.
    #
    # To add the level of hack in this section of code, in order to support
    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
    # variable which will control which version scripts get installed.
    #
    # ENSUREPIP_OPTIONS=altinstall
    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
    # ENSUREPIP_OPTIONS=install
    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
    #     not altinstall
    # DEFAULT
    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
    #     and easy_install-X.Y.
    pip_script = console.pop('pip', None)
    if pip_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'pip = ' + pip_script
            generated.extend(maker.make(spec))

        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
            spec = 'pip%s = %s' % (sys.version[:1], pip_script)
            generated.extend(maker.make(spec))

        spec = 'pip%s = %s' % (sys.version[:3], pip_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned pip entry points
        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
        for k in pip_ep:
            del console[k]
    easy_install_script = console.pop('easy_install', None)
    if easy_install_script:
        if "ENSUREPIP_OPTIONS" not in os.environ:
            spec = 'easy_install = ' + easy_install_script
            generated.extend(maker.make(spec))

        spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
        generated.extend(maker.make(spec))
        # Delete any other versioned easy_install entry points
        easy_install_ep = [
            k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
        ]
        for k in easy_install_ep:
            del console[k]

    # Generate the console and GUI entry points specified in the wheel
    if len(console) > 0:
        generated.extend(
            maker.make_multiple(['%s = %s' % kv for kv in console.items()])
        )
    if len(gui) > 0:
        generated.extend(
            maker.make_multiple(
                ['%s = %s' % kv for kv in gui.items()],
                {'gui': True}
            )
        )

    # Record pip as the installer
    installer = os.path.join(info_dir[0], 'INSTALLER')
    temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
    with open(temp_installer, 'wb') as installer_file:
        installer_file.write(b'pip\n')
    shutil.move(temp_installer, installer)
    generated.append(installer)

    # Record details of all files installed
    record = os.path.join(info_dir[0], 'RECORD')
    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
    with open_for_csv(record, 'r') as record_in:
        with open_for_csv(temp_record, 'w+') as record_out:
            reader = csv.reader(record_in)
            writer = csv.writer(record_out)
            for row in reader:
                row[0] = installed.pop(row[0], row[0])
                if row[0] in changed:
                    row[1], row[2] = rehash(row[0])
                writer.writerow(row)
            for f in generated:
                h, l = rehash(f)
                final_path = normpath(f, lib_dir)
                if strip_file_prefix and final_path.startswith(strip_file_prefix):
                    final_path = os.path.join(os.sep,
                            os.path.relpath(final_path, strip_file_prefix))
                writer.writerow((final_path, h, l))
            for f in installed:
                writer.writerow((installed[f], '', ''))
    shutil.move(temp_record, record)


def _unique(fn):
    @functools.wraps(fn)
    def unique(*args, **kw):
        seen = set()
        for item in fn(*args, **kw):
            if item not in seen:
                seen.add(item)
                yield item
    return unique


# TODO: this goes somewhere besides the wheel module
@_unique
def uninstallation_paths(dist):
    """
    Yield all the uninstallation paths for dist based on RECORD-without-.pyc

    Yield paths to all the files in RECORD. For each .py file in RECORD, add
    the .pyc in the same directory.

    UninstallPathSet.add() takes care of the __pycache__ .pyc.
    """
    from pip.utils import FakeFile  # circular import
    r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
    for row in r:
        path = os.path.join(dist.location, row[0])
        yield path
        if path.endswith('.py'):
            dn, fn = os.path.split(path)
            base = fn[:-3]
            path = os.path.join(dn, base + '.pyc')
            yield path


def wheel_version(source_dir):
    """
    Return the Wheel-Version of an extracted wheel, if possible.

    Otherwise, return False if we couldn't parse / extract it.
    """
    try:
        dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]

        wheel_data = dist.get_metadata('WHEEL')
        wheel_data = Parser().parsestr(wheel_data)

        version = wheel_data['Wheel-Version'].strip()
        version = tuple(map(int, version.split('.')))
        return version
    except:
        return False


def check_compatibility(version, name):
    """
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    """
    if not version:
        raise UnsupportedWheel(
            "%s is in an unsupported or invalid wheel" % name
        )
    if version[0] > VERSION_COMPATIBLE[0]:
        raise UnsupportedWheel(
            "%s's Wheel-Version (%s) is not compatible with this version "
            "of pip" % (name, '.'.join(map(str, version)))
        )
    elif version > VERSION_COMPATIBLE:
        logger.warning(
            'Installing from a newer Wheel-Version (%s)',
            '.'.join(map(str, version)),
        )


class Wheel(object):
    """A wheel file"""

    # TODO: maybe move the install code into this class

    wheel_file_re = re.compile(
        r"""^(?P(?P.+?)-(?P\d.*?))
        ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?)
        \.whl|\.dist-info)$""",
        re.VERBOSE
    )

    def __init__(self, filename):
        """
        :raises InvalidWheelFilename: when the filename is invalid for a wheel
        """
        wheel_info = self.wheel_file_re.match(filename)
        if not wheel_info:
            raise InvalidWheelFilename(
                "%s is not a valid wheel filename." % filename
            )
        self.filename = filename
        self.name = wheel_info.group('name').replace('_', '-')
        # we'll assume "_" means "-" due to wheel naming scheme
        # (https://github.com/pypa/pip/issues/1150)
        self.version = wheel_info.group('ver').replace('_', '-')
        self.pyversions = wheel_info.group('pyver').split('.')
        self.abis = wheel_info.group('abi').split('.')
        self.plats = wheel_info.group('plat').split('.')

        # All the tag combinations from this file
        self.file_tags = set(
            (x, y, z) for x in self.pyversions
            for y in self.abis for z in self.plats
        )

    def support_index_min(self, tags=None):
        """
        Return the lowest index that one of the wheel's file_tag combinations
        achieves in the supported_tags list e.g. if there are 8 supported tags,
        and one of the file tags is first in the list, then return 0.  Returns
        None is the wheel is not supported.
        """
        if tags is None:  # for mock
            tags = pep425tags.supported_tags
        indexes = [tags.index(c) for c in self.file_tags if c in tags]
        return min(indexes) if indexes else None

    def supported(self, tags=None):
        """Is this wheel supported on this system?"""
        if tags is None:  # for mock
            tags = pep425tags.supported_tags
        return bool(set(tags).intersection(self.file_tags))


class WheelBuilder(object):
    """Build wheels from a RequirementSet."""

    def __init__(self, requirement_set, finder, build_options=None,
                 global_options=None):
        self.requirement_set = requirement_set
        self.finder = finder
        self._cache_root = requirement_set._wheel_cache._cache_dir
        self._wheel_dir = requirement_set.wheel_download_dir
        self.build_options = build_options or []
        self.global_options = global_options or []

    def _build_one(self, req, output_dir, python_tag=None):
        """Build one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        """
        tempd = tempfile.mkdtemp('pip-wheel-')
        try:
            if self.__build_one(req, tempd, python_tag=python_tag):
                try:
                    wheel_name = os.listdir(tempd)[0]
                    wheel_path = os.path.join(output_dir, wheel_name)
                    shutil.move(os.path.join(tempd, wheel_name), wheel_path)
                    logger.info('Stored in directory: %s', output_dir)
                    return wheel_path
                except:
                    pass
            # Ignore return, we can't do anything else useful.
            self._clean_one(req)
            return None
        finally:
            rmtree(tempd)

    def _base_setup_args(self, req):
        return [
            sys.executable, "-u", '-c',
            SETUPTOOLS_SHIM % req.setup_py
        ] + list(self.global_options)

    def __build_one(self, req, tempd, python_tag=None):
        base_args = self._base_setup_args(req)

        spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,)
        with open_spinner(spin_message) as spinner:
            logger.debug('Destination directory: %s', tempd)
            wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
                + self.build_options

            if python_tag is not None:
                wheel_args += ["--python-tag", python_tag]

            try:
                call_subprocess(wheel_args, cwd=req.setup_py_dir,
                                show_stdout=False, spinner=spinner)
                return True
            except:
                spinner.finish("error")
                logger.error('Failed building wheel for %s', req.name)
                return False

    def _clean_one(self, req):
        base_args = self._base_setup_args(req)

        logger.info('Running setup.py clean for %s', req.name)
        clean_args = base_args + ['clean', '--all']
        try:
            call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
            return True
        except:
            logger.error('Failed cleaning build dir for %s', req.name)
            return False

    def build(self, autobuilding=False):
        """Build wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        """
        assert self._wheel_dir or (autobuilding and self._cache_root)
        # unpack sdists and constructs req set
        self.requirement_set.prepare_files(self.finder)

        reqset = self.requirement_set.requirements.values()

        buildset = []
        for req in reqset:
            if req.constraint:
                continue
            if req.is_wheel:
                if not autobuilding:
                    logger.info(
                        'Skipping %s, due to already being wheel.', req.name)
            elif autobuilding and req.editable:
                pass
            elif autobuilding and req.link and not req.link.is_artifact:
                pass
            elif autobuilding and not req.source_dir:
                pass
            else:
                if autobuilding:
                    link = req.link
                    base, ext = link.splitext()
                    if pip.index.egg_info_matches(base, None, link) is None:
                        # Doesn't look like a package - don't autobuild a wheel
                        # because we'll have no way to lookup the result sanely
                        continue
                    if "binary" not in pip.index.fmt_ctl_formats(
                            self.finder.format_control,
                            canonicalize_name(req.name)):
                        logger.info(
                            "Skipping bdist_wheel for %s, due to binaries "
                            "being disabled for it.", req.name)
                        continue
                buildset.append(req)

        if not buildset:
            return True

        # Build the wheels.
        logger.info(
            'Building wheels for collected packages: %s',
            ', '.join([req.name for req in buildset]),
        )
        with indent_log():
            build_success, build_failure = [], []
            for req in buildset:
                python_tag = None
                if autobuilding:
                    python_tag = pep425tags.implementation_tag
                    output_dir = _cache_for_link(self._cache_root, req.link)
                    try:
                        ensure_dir(output_dir)
                    except OSError as e:
                        logger.warning("Building wheel for %s failed: %s",
                                       req.name, e)
                        build_failure.append(req)
                        continue
                else:
                    output_dir = self._wheel_dir
                wheel_file = self._build_one(
                    req, output_dir,
                    python_tag=python_tag,
                )
                if wheel_file:
                    build_success.append(req)
                    if autobuilding:
                        # XXX: This is mildly duplicative with prepare_files,
                        # but not close enough to pull out to a single common
                        # method.
                        # The code below assumes temporary source dirs -
                        # prevent it doing bad things.
                        if req.source_dir and not os.path.exists(os.path.join(
                                req.source_dir, PIP_DELETE_MARKER_FILENAME)):
                            raise AssertionError(
                                "bad source dir - missing marker")
                        # Delete the source we built the wheel from
                        req.remove_temporary_source()
                        # set the build directory again - name is known from
                        # the work prepare_files did.
                        req.source_dir = req.build_location(
                            self.requirement_set.build_dir)
                        # Update the link for this.
                        req.link = pip.index.Link(
                            path_to_url(wheel_file))
                        assert req.link.is_wheel
                        # extract the wheel into the dir
                        unpack_url(
                            req.link, req.source_dir, None, False,
                            session=self.requirement_set.session)
                else:
                    build_failure.append(req)

        # notify success/failure
        if build_success:
            logger.info(
                'Successfully built %s',
                ' '.join([req.name for req in build_success]),
            )
        if build_failure:
            logger.info(
                'Failed to build %s',
                ' '.join([req.name for req in build_failure]),
            )
        # Return True if all builds were successful
        return len(build_failure) == 0
PKZUg%g%site-packages/pip/__init__.pycnu[
abc@@siddlmZddlZddlZddlZddlZddlZddlZddlZddl	m
Z
ejdde
yddlZWne
k
rncXejdkr
eedddkr
ydd	lmZWne
efk
rq
Xejndd
lmZmZmZddlmZmZddlmZmZdd
lmZmZm Z m!Z!ddl"m#Z#m$Z$ddl%m&Z&m'Z'ddl%m(Z(ddl	m)Z)ddl*Z+e+j,Z,dZ-ej.e/Z0ejdde)dZ1dZ2dZ3dZ4ddZ6de7fdYZ8e/dkreej9e6ndS(i(tabsolute_importN(tDependencyWarningtignoretcategorytdarwintOPENSSL_VERSION_NUMBERi(tsecuretransport(tInstallationErrortCommandErrortPipError(tget_installed_distributionstget_prog(tdeprecationtdist_is_editable(tgitt	mercurialt
subversiontbazaar(tConfigOptionParsertUpdatingDefaultsHelpFormatter(t
get_summariestget_similar_commands(t
commands_dict(tInsecureRequestWarnings9.0.3cC@sdtjkrdStjdjd}ttjd}y||d}Wntk
rhd}nXgtD]\}}|^qs}g}y-g|D]}||kr|^qd}Wntk
rd}nXt}	|r|dkrtj	dn|d	kr|j
d
rg}
|j}xOtdt
D]>}|jj
|rA|j|dkrA|
j|jqAqAW|
rx|
D]}|GHqWtj	dqnt|}
|g|
jjD]-}|jtjkr|j|jf^q7}g|d|d!D]}|jdd^q}g|D]$\}}||kr;||f^q;}g|D]'\}}|j
|rl||f^ql}x|D],}|d}|dr|d7}n|GHqWn|j
d
s|j
d
rhg|	jD]}|j^q}|j|	jd|D}|g|D]$}|jtjkr:|j^q:7}ndjg|D]}|j
|ru|^quGHtj	ddS(sCommand and option completion for the main option parser (and options)
    and its subcommands (and options).

    Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
    tPIP_AUTO_COMPLETENt
COMP_WORDSit
COMP_CWORDtithelpt	uninstallt-t
local_onlyt=s--cs@s"|]}|D]}|Vq
qdS(N((t.0titto((s0/usr/lib/python2.7/site-packages/pip/__init__.pys	st (tostenvirontsplittintt
IndexErrorRtNonetcreate_main_parsertsystexitt
startswithtlowerR
tTruetkeytappendRtparsertoption_list_allRtoptparset
SUPPRESS_HELPtget_opt_stringtnargst
option_groupstoption_listtjoin(tcwordstcwordtcurrenttcmdtsummarytsubcommandstoptionstwtsubcommand_nameR3t	installedtlctdistt
subcommandtopttxt	prev_optstvtktoptiont	opt_labeltitopts((s0/usr/lib/python2.7/site-packages/pip/__init__.pytautocompleteEs`

"-

	%
	
.114



(0cC@sidd6td6td6dd6td6}t|}|jtjjtjjtjjt	}dt
|tjd	 f|_t
jt
j|}|j|t|_t}d
gg|D]\}}d||f^q}dj||_|S(
Ns
%prog  [options]tusagetadd_help_optiont	formattertglobaltnametprogspip %s from %s (python %s)iRs%-27s %ss
(tFalseRRRtdisable_interspersed_argsR%tpathtdirnametabspatht__file__t__version__R,tversiont
cmdoptionstmake_option_groupt
general_grouptadd_option_groupR0tmainRR;tdescription(t	parser_kwR3tpip_pkg_dirtgen_optstcommand_summariesRPtjRf((s0/usr/lib/python2.7/site-packages/pip/__init__.pyR+s$


*
		0cC@st}|j|\}}|jrZtjj|jtjjtjtjn|s|ddkrt	|dkr|j
tjn|d}|tkrt|}d|g}|r|j
d|ntdj|n|}|j|||fS(NiRisunknown command "%s"smaybe you meant "%s"s - (R+t
parse_argsR`R,tstdouttwriteR%tlinesepR-tlent
print_helpRRR2RR;tremove(targsR3tgeneral_optionst	args_elsetcmd_nametguesstmsgtcmd_args((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt	parseoptss&			
)




cC@st}d|krt}n|S(Ns
--isolated(RYR0(Rstisolated((s0/usr/lib/python2.7/site-packages/pip/__init__.pytcheck_isolateds	cC@s|dkrtjd}ntjtyt|\}}WnGtk
r}tjj	d|tjj	t
jtjdnXyt
jt
jdWn&t
jk
r}tjd|nXt|dt|}|j|S(Nis	ERROR: %sRs%Ignoring error %s when setting localeR{(R*R,targvRtinstall_warning_loggerRRRzR	tstderrRnR%RoR-tlocalet	setlocaletLC_ALLtErrortloggertdebugRR|Re(RsRvRytexctetcommand((s0/usr/lib/python2.7/site-packages/pip/__init__.pyRes 
tFrozenRequirementcB@sYeZddZejdZejdZedZe	dZ
dZRS(cC@s(||_||_||_||_dS(N(RWtreqteditabletcomments(tselfRWRRR((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt__init__s			s-r(\d+)$s-(20\d\d\d\d\d\d)$cC@s=tjjtjj|j}g}ddlm}m}t|r|j	|rt
}y|||}Wn)tk
r}	tj
d|	d}nX|dkr'tj
d||jd|j}t}q'nIt}|j}|j}
t|
dkr|
dddks5td	|
|f|
dd}|jj|}|jj|}
|ss|
r'|jd
}|r|j||}n|stj
d||jdq'|jd
||r|jd}nd|
jd}t
}d|||j|f}n||j|||S(Ni(tvcstget_src_requirementsYError when trying to get requirement for VCS system %s, falling back to uneditable formats-Could not determine repository location of %ss-## !! Could not determine repository locationis==s===s5Expected 1 spec with == or ===; specs = %r; dist = %rtsvns(Warning: cannot find svn location for %ssF## FIXME: could not find svn URL in dependency_links for this package:s3# Installing as editable to satisfy requirement %s:s{%s}s%s@%s#egg=%s(s==s===(R%R[tnormcaseR]tlocationtpip.vcsRRR
tget_backend_nameR0RRtwarningR*R2tas_requirementRYtspecsRptAssertionErrort_rev_retsearcht_date_retget_backendtget_locationtgrouptegg_nametproject_name(tclsRGtdependency_linksRRRRRRRRR`t	ver_matcht
date_matchtsvn_backendtsvn_locationtrev((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt	from_distsf!

	)	

cC@s;|j}tjd|}|r7||j }n|S(Ns
-py\d\.\d$(RtreRtstart(RGRWtmatch((s0/usr/lib/python2.7/site-packages/pip/__init__.pyRIs
cC@sF|j}|jrd|}ndjt|jt|gdS(Ns-e %ss
(RRR;tlistRtstr(RR((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt__str__Qs		
((t__name__t
__module__RRtcompileRRtclassmethodRtstaticmethodRR(((s0/usr/lib/python2.7/site-packages/pip/__init__.pyRsAt__main__(:t
__future__RRtloggingR%R5twarningsR,Rtpip._vendor.urllib3.exceptionsRtfilterwarningstssltImportErrortplatformtgetattrtpip._vendor.urllib3.contribRtOSErrortinject_into_urllib3tpip.exceptionsRRR	t	pip.utilsR
RRR
RRRRRtpip.baseparserRRtpip.commandsRRRRtpip.cmdoptionstpipRaR_t	getLoggerRRRRR+RzR|R*RetobjectRR-(((s0/usr/lib/python2.7/site-packages/pip/__init__.pytsR


"		I		*		[PKZVֲ)) site-packages/pip/baseparser.pycnu[
abc@@s1dZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZmZmZmZddlmZmZejd	ejZd
ejfdYZdefd
YZdejfdYZdefdYZdS(sBase option parser setupi(tabsolute_importN(t	strtobool(tstring_types(tconfigparser(tlegacy_config_filetconfig_basenametrunning_under_virtualenvtsite_config_files(tappdirstget_terminal_sizes^PIP_tPrettyHelpFormattercB@s\eZdZdZdZdddZdZdZdZd	Z	d
Z
RS(s4A prettier/less verbose help formatter for optparse.cO@sCd|ds, (t_format_option_strings(Rtoption((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_option_strings!ss <%s>s, cC@sg}|jr&|j|jdn|jrF|j|jdnt|dkrk|jd|n|jr|jp|jj}|j||jndj	|S(s
        Return a comma-separated list of option strings and metavars.

        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
        :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
        :param optsep:  separator
        iit(
t_short_optstappendt
_long_optstlentinsertttakes_valuetmetavartdesttlowertjoin(RRtmvarfmttoptseptoptsR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyR$s		cC@s|dkrdS|dS(NtOptionsRs:
((Rtheading((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_heading;scC@s#d|jtj|d}|S(sz
        Ensure there is only one newline between usage and the first heading
        if there is no description.
        s
Usage: %s
s  (tindent_linesttextwraptdedent(Rtusagetmsg((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_usage@scC@sy|rqt|jdr!d}nd}|jd}|j}|jtj|d}d||f}|SdSdS(NtmaintCommandstDescriptions
s  s%s:
%s
R(thasattrtparsertlstriptrstripR(R)R*(Rtdescriptiontlabel((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_descriptionHs	cC@s|r
|SdSdS(NR((Rtepilog((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyt
format_epilogZscC@s3g|jdD]}||^q}dj|S(Ns
(tsplitR!(Rttexttindenttlinet	new_lines((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyR(as&(t__name__t
__module__t__doc__RRRR'R-R7R9R((((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyR
s						tUpdatingDefaultsHelpFormattercB@seZdZdZRS(sCustom help formatter for use in ConfigOptionParser.

    This is updates the defaults before expanding them, allowing
    them to show up correctly in the help listing.
    cC@s;|jdk	r(|jj|jjntjj||S(N(R2tNonet_update_defaultstdefaultsRRtexpand_default(RR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRFms(R?R@RARF(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRBfstCustomOptionParsercB@s eZdZedZRS(cO@s6|j||}|jj|jj|||S(s*Insert an OptionGroup at a given position.(tadd_option_groupt
option_groupstpopR(RtidxRRtgroup((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytinsert_option_groupus
cC@s2|j}x!|jD]}|j|jqW|S(s<Get a list of all options, including those in option groups.(toption_listRItextend(Rtresti((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytoption_list_all~s
(R?R@RMtpropertyRR(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRGss		tConfigOptionParsercB@seeZdZeZdZdZdZdZdZ	dZ
dZdZd	Z
RS(
ssCustom option parser which updates its defaults by checking the
    configuration files and environmental variablescO@stj|_|jd|_|jdt|_|j|_|jrd|jj	|jn|jsst
tjj
|||dS(Ntnametisolated(RtRawConfigParsertconfigRJRUtFalseRVtget_config_filestfilestreadtAssertionErrorRtOptionParserR(RRR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRs	cC@stjjdt}|tjkr(gStt}|js|retjj	|re|j
|q|j
t|j
tjjt
jdtntrtjjtjt}tjj	|r|j
|qn|S(NtPIP_CONFIG_FILEtpip(tostenvirontgetRYtdevnulltlistRRVtpathtexistsRRR!Rtuser_config_dirRRtsystprefix(Rtconfig_fileR[tvenv_config_file((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRZs&	
	
			cC@sGy|j||SWn,tjk
rB}d|GHtjdnXdS(Ns*An error occurred during configuration: %si(tcheck_valueRtOptionValueErrorRitexit(RRtkeytvaltexc((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyt
check_defaults
	cC@si}x6d|jfD]%}|j|j|j|qW|jsg|j|j|jntj|j|_	t
}xG|jD]9\}}|sqn|j|}|dkrqn|jdkrt|}n|jdkr/|j}g|D]}|j|||^q}n|jdkr|j|j|j}	|j|	|}|jpxd	}
|jpi}|j||	|||
|n|j|||}|||j|j}|j||||jq>Wtj|S(sOverriding to make updating the defaults after instantiation of
        the option parser possible, _update_defaults() does the dirty work.(
tprocess_default_valuesRR}RERDtcopyt_get_all_optionsRcRt
isinstanceRRRm(RRERtdefaultR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytget_default_valuess	 cC@s(|jtj|jdd|dS(Nis%s
(tprint_usageRitstderrRo(RR,((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyterror#s(R?R@RARYRVRRZRsRDRzR{R|RR(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRTs	
	(		5				(RAt
__future__RRiRRatreR)tdistutils.utilRtpip._vendor.sixRtpip._vendor.six.movesRt
pip.locationsRRRRt	pip.utilsRR	tcompiletIRRR
RBR^RGRT(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyts "O
PKZ_OOsite-packages/pip/download.pynu[from __future__ import absolute_import

import cgi
import email.utils
import getpass
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile

try:
    import ssl  # noqa
    HAS_TLS = True
except ImportError:
    HAS_TLS = False

from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request

import pip

from pip.exceptions import InstallationError, HashMismatch
from pip.models import PyPI
from pip.utils import (splitext, rmtree, format_size, display_path,
                       backup_dir, ask_path_exists, unpack_file,
                       ARCHIVE_EXTENSIONS, consume, call_subprocess)
from pip.utils.encoding import auto_decode
from pip.utils.filesystem import check_path_owner
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip.utils.glibc import libc_ver
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.requests.utils import get_netrc_auth
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor import urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.six.moves import xmlrpc_client


__all__ = ['get_file_content',
           'is_url', 'url_to_path', 'path_to_url',
           'is_archive_file', 'unpack_vcs_link',
           'unpack_file_url', 'is_vcs_url', 'is_file_url',
           'unpack_http_url', 'unpack_url',
           'parse_content_disposition', 'sanitize_content_filename']


logger = logging.getLogger(__name__)


def user_agent():
    """
    Return a string representing the user agent.
    """
    data = {
        "installer": {"name": "pip", "version": pip.__version__},
        "python": platform.python_version(),
        "implementation": {
            "name": platform.python_implementation(),
        },
    }

    if data["implementation"]["name"] == 'CPython':
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'PyPy':
        if sys.pypy_version_info.releaselevel == 'final':
            pypy_version_info = sys.pypy_version_info[:3]
        else:
            pypy_version_info = sys.pypy_version_info
        data["implementation"]["version"] = ".".join(
            [str(x) for x in pypy_version_info]
        )
    elif data["implementation"]["name"] == 'Jython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()
    elif data["implementation"]["name"] == 'IronPython':
        # Complete Guess
        data["implementation"]["version"] = platform.python_version()

    if sys.platform.startswith("linux"):
        from pip._vendor import distro
        distro_infos = dict(filter(
            lambda x: x[1],
            zip(["name", "version", "id"], distro.linux_distribution()),
        ))
        libc = dict(filter(
            lambda x: x[1],
            zip(["lib", "version"], libc_ver()),
        ))
        if libc:
            distro_infos["libc"] = libc
        if distro_infos:
            data["distro"] = distro_infos

    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}

    if platform.system():
        data.setdefault("system", {})["name"] = platform.system()

    if platform.release():
        data.setdefault("system", {})["release"] = platform.release()

    if platform.machine():
        data["cpu"] = platform.machine()

    # Python 2.6 doesn't have ssl.OPENSSL_VERSION.
    if HAS_TLS and sys.version_info[:2] > (2, 6):
        data["openssl_version"] = ssl.OPENSSL_VERSION

    return "{data[installer][name]}/{data[installer][version]} {json}".format(
        data=data,
        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
    )


class MultiDomainBasicAuth(AuthBase):

    def __init__(self, prompting=True):
        self.prompting = prompting
        self.passwords = {}

    def __call__(self, req):
        parsed = urllib_parse.urlparse(req.url)

        # Get the netloc without any embedded credentials
        netloc = parsed.netloc.rsplit("@", 1)[-1]

        # Set the url of the request to the url without any credentials
        req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])

        # Use any stored credentials that we have for this netloc
        username, password = self.passwords.get(netloc, (None, None))

        # Extract credentials embedded in the url if we have none stored
        if username is None:
            username, password = self.parse_credentials(parsed.netloc)

        # Get creds from netrc if we still don't have them
        if username is None and password is None:
            netrc_auth = get_netrc_auth(req.url)
            username, password = netrc_auth if netrc_auth else (None, None)

        if username or password:
            # Store the username and password
            self.passwords[netloc] = (username, password)

            # Send the basic auth with this request
            req = HTTPBasicAuth(username or "", password or "")(req)

        # Attach a hook to handle 401 responses
        req.register_hook("response", self.handle_401)

        return req

    def handle_401(self, resp, **kwargs):
        # We only care about 401 responses, anything else we want to just
        #   pass through the actual response
        if resp.status_code != 401:
            return resp

        # We are not able to prompt the user so simply return the response
        if not self.prompting:
            return resp

        parsed = urllib_parse.urlparse(resp.url)

        # Prompt the user for a new username and password
        username = six.moves.input("User for %s: " % parsed.netloc)
        password = getpass.getpass("Password: ")

        # Store the new username and password to use for future requests
        if username or password:
            self.passwords[parsed.netloc] = (username, password)

        # Consume content and release the original connection to allow our new
        #   request to reuse the same one.
        resp.content
        resp.raw.release_conn()

        # Add our new username and password to the request
        req = HTTPBasicAuth(username or "", password or "")(resp.request)

        # Send our new request
        new_resp = resp.connection.send(req, **kwargs)
        new_resp.history.append(resp)

        return new_resp

    def parse_credentials(self, netloc):
        if "@" in netloc:
            userinfo = netloc.rsplit("@", 1)[0]
            if ":" in userinfo:
                return userinfo.split(":", 1)
            return userinfo, None
        return None, None


class LocalFSAdapter(BaseAdapter):

    def send(self, request, stream=None, timeout=None, verify=None, cert=None,
             proxies=None):
        pathname = url_to_path(request.url)

        resp = Response()
        resp.status_code = 200
        resp.url = request.url

        try:
            stats = os.stat(pathname)
        except OSError as exc:
            resp.status_code = 404
            resp.raw = exc
        else:
            modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
            content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
            resp.headers = CaseInsensitiveDict({
                "Content-Type": content_type,
                "Content-Length": stats.st_size,
                "Last-Modified": modified,
            })

            resp.raw = open(pathname, "rb")
            resp.close = resp.raw.close

        return resp

    def close(self):
        pass


class SafeFileCache(FileCache):
    """
    A file based cache which is safe to use even when the target directory may
    not be accessible or writable.
    """

    def __init__(self, *args, **kwargs):
        super(SafeFileCache, self).__init__(*args, **kwargs)

        # Check to ensure that the directory containing our cache directory
        # is owned by the user current executing pip. If it does not exist
        # we will check the parent directory until we find one that does exist.
        # If it is not owned by the user executing pip then we will disable
        # the cache and log a warning.
        if not check_path_owner(self.directory):
            logger.warning(
                "The directory '%s' or its parent directory is not owned by "
                "the current user and the cache has been disabled. Please "
                "check the permissions and owner of that directory. If "
                "executing pip with sudo, you may want sudo's -H flag.",
                self.directory,
            )

            # Set our directory to None to disable the Cache
            self.directory = None

    def get(self, *args, **kwargs):
        # If we don't have a directory, then the cache should be a no-op.
        if self.directory is None:
            return

        try:
            return super(SafeFileCache, self).get(*args, **kwargs)
        except (LockError, OSError, IOError):
            # We intentionally silence this error, if we can't access the cache
            # then we can just skip caching and process the request as if
            # caching wasn't enabled.
            pass

    def set(self, *args, **kwargs):
        # If we don't have a directory, then the cache should be a no-op.
        if self.directory is None:
            return

        try:
            return super(SafeFileCache, self).set(*args, **kwargs)
        except (LockError, OSError, IOError):
            # We intentionally silence this error, if we can't access the cache
            # then we can just skip caching and process the request as if
            # caching wasn't enabled.
            pass

    def delete(self, *args, **kwargs):
        # If we don't have a directory, then the cache should be a no-op.
        if self.directory is None:
            return

        try:
            return super(SafeFileCache, self).delete(*args, **kwargs)
        except (LockError, OSError, IOError):
            # We intentionally silence this error, if we can't access the cache
            # then we can just skip caching and process the request as if
            # caching wasn't enabled.
            pass


class InsecureHTTPAdapter(HTTPAdapter):

    def cert_verify(self, conn, url, verify, cert):
        conn.cert_reqs = 'CERT_NONE'
        conn.ca_certs = None


class PipSession(requests.Session):

    timeout = None

    def __init__(self, *args, **kwargs):
        retries = kwargs.pop("retries", 0)
        cache = kwargs.pop("cache", None)
        insecure_hosts = kwargs.pop("insecure_hosts", [])

        super(PipSession, self).__init__(*args, **kwargs)

        # Attach our User Agent to the request
        self.headers["User-Agent"] = user_agent()

        # Attach our Authentication handler to the session
        self.auth = MultiDomainBasicAuth()

        # Create our urllib3.Retry instance which will allow us to customize
        # how we handle retries.
        retries = urllib3.Retry(
            # Set the total number of retries that a particular request can
            # have.
            total=retries,

            # A 503 error from PyPI typically means that the Fastly -> Origin
            # connection got interrupted in some way. A 503 error in general
            # is typically considered a transient error so we'll go ahead and
            # retry it.
            status_forcelist=[503],

            # Add a small amount of back off between failed requests in
            # order to prevent hammering the service.
            backoff_factor=0.25,
        )

        # We want to _only_ cache responses on securely fetched origins. We do
        # this because we can't validate the response of an insecurely fetched
        # origin, and we don't want someone to be able to poison the cache and
        # require manual eviction from the cache to fix it.
        if cache:
            secure_adapter = CacheControlAdapter(
                cache=SafeFileCache(cache, use_dir_lock=True),
                max_retries=retries,
            )
        else:
            secure_adapter = HTTPAdapter(max_retries=retries)

        # Our Insecure HTTPAdapter disables HTTPS validation. It does not
        # support caching (see above) so we'll use it for all http:// URLs as
        # well as any https:// host that we've marked as ignoring TLS errors
        # for.
        insecure_adapter = InsecureHTTPAdapter(max_retries=retries)

        self.mount("https://", secure_adapter)
        self.mount("http://", insecure_adapter)

        # Enable file:// urls
        self.mount("file://", LocalFSAdapter())

        # We want to use a non-validating adapter for any requests which are
        # deemed insecure.
        for host in insecure_hosts:
            self.mount("https://{0}/".format(host), insecure_adapter)

    def request(self, method, url, *args, **kwargs):
        # Allow setting a default timeout on a session
        kwargs.setdefault("timeout", self.timeout)

        # Dispatch the actual request
        return super(PipSession, self).request(method, url, *args, **kwargs)


def get_file_content(url, comes_from=None, session=None):
    """Gets the content of a file; it may be a filename, file: URL, or
    http: URL.  Returns (location, content).  Content is unicode."""
    if session is None:
        raise TypeError(
            "get_file_content() missing 1 required keyword argument: 'session'"
        )

    match = _scheme_re.search(url)
    if match:
        scheme = match.group(1).lower()
        if (scheme == 'file' and comes_from and
                comes_from.startswith('http')):
            raise InstallationError(
                'Requirements file %s references URL %s, which is local'
                % (comes_from, url))
        if scheme == 'file':
            path = url.split(':', 1)[1]
            path = path.replace('\\', '/')
            match = _url_slash_drive_re.match(path)
            if match:
                path = match.group(1) + ':' + path.split('|', 1)[1]
            path = urllib_parse.unquote(path)
            if path.startswith('/'):
                path = '/' + path.lstrip('/')
            url = path
        else:
            # FIXME: catch some errors
            resp = session.get(url)
            resp.raise_for_status()
            return resp.url, resp.text
    try:
        with open(url, 'rb') as f:
            content = auto_decode(f.read())
    except IOError as exc:
        raise InstallationError(
            'Could not open requirements file: %s' % str(exc)
        )
    return url, content


_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)


def is_url(name):
    """Returns true if the name looks like a URL"""
    if ':' not in name:
        return False
    scheme = name.split(':', 1)[0].lower()
    return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes


def url_to_path(url):
    """
    Convert a file: URL to a path.
    """
    assert url.startswith('file:'), (
        "You can only turn file: urls into filenames (not %r)" % url)

    _, netloc, path, _, _ = urllib_parse.urlsplit(url)

    # if we have a UNC path, prepend UNC share notation
    if netloc:
        netloc = '\\\\' + netloc

    path = urllib_request.url2pathname(netloc + path)
    return path


def path_to_url(path):
    """
    Convert a path to a file: URL.  The path will be made absolute and have
    quoted path parts.
    """
    path = os.path.normpath(os.path.abspath(path))
    url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
    return url


def is_archive_file(name):
    """Return True if `name` is a considered as an archive file."""
    ext = splitext(name)[1].lower()
    if ext in ARCHIVE_EXTENSIONS:
        return True
    return False


def unpack_vcs_link(link, location):
    vcs_backend = _get_used_vcs_backend(link)
    vcs_backend.unpack(location)


def _get_used_vcs_backend(link):
    for backend in vcs.backends:
        if link.scheme in backend.schemes:
            vcs_backend = backend(link.url)
            return vcs_backend


def is_vcs_url(link):
    return bool(_get_used_vcs_backend(link))


def is_file_url(link):
    return link.url.lower().startswith('file:')


def is_dir_url(link):
    """Return whether a file:// Link points to a directory.

    ``link`` must not have any other scheme but file://. Call is_file_url()
    first.

    """
    link_path = url_to_path(link.url_without_fragment)
    return os.path.isdir(link_path)


def _progress_indicator(iterable, *args, **kwargs):
    return iterable


def _download_url(resp, link, content_file, hashes):
    try:
        total_length = int(resp.headers['content-length'])
    except (ValueError, KeyError, TypeError):
        total_length = 0

    cached_resp = getattr(resp, "from_cache", False)

    if logger.getEffectiveLevel() > logging.INFO:
        show_progress = False
    elif cached_resp:
        show_progress = False
    elif total_length > (40 * 1000):
        show_progress = True
    elif not total_length:
        show_progress = True
    else:
        show_progress = False

    show_url = link.show_url

    def resp_read(chunk_size):
        try:
            # Special case for urllib3.
            for chunk in resp.raw.stream(
                    chunk_size,
                    # We use decode_content=False here because we don't
                    # want urllib3 to mess with the raw bytes we get
                    # from the server. If we decompress inside of
                    # urllib3 then we cannot verify the checksum
                    # because the checksum will be of the compressed
                    # file. This breakage will only occur if the
                    # server adds a Content-Encoding header, which
                    # depends on how the server was configured:
                    # - Some servers will notice that the file isn't a
                    #   compressible file and will leave the file alone
                    #   and with an empty Content-Encoding
                    # - Some servers will notice that the file is
                    #   already compressed and will leave the file
                    #   alone and will add a Content-Encoding: gzip
                    #   header
                    # - Some servers won't notice anything at all and
                    #   will take a file that's already been compressed
                    #   and compress it again and set the
                    #   Content-Encoding: gzip header
                    #
                    # By setting this not to decode automatically we
                    # hope to eliminate problems with the second case.
                    decode_content=False):
                yield chunk
        except AttributeError:
            # Standard file-like object.
            while True:
                chunk = resp.raw.read(chunk_size)
                if not chunk:
                    break
                yield chunk

    def written_chunks(chunks):
        for chunk in chunks:
            content_file.write(chunk)
            yield chunk

    progress_indicator = _progress_indicator

    if link.netloc == PyPI.netloc:
        url = show_url
    else:
        url = link.url_without_fragment

    if show_progress:  # We don't show progress on cached responses
        if total_length:
            logger.info("Downloading %s (%s)", url, format_size(total_length))
            progress_indicator = DownloadProgressBar(max=total_length).iter
        else:
            logger.info("Downloading %s", url)
            progress_indicator = DownloadProgressSpinner().iter
    elif cached_resp:
        logger.info("Using cached %s", url)
    else:
        logger.info("Downloading %s", url)

    logger.debug('Downloading from URL %s', link)

    downloaded_chunks = written_chunks(
        progress_indicator(
            resp_read(CONTENT_CHUNK_SIZE),
            CONTENT_CHUNK_SIZE
        )
    )
    if hashes:
        hashes.check_against_chunks(downloaded_chunks)
    else:
        consume(downloaded_chunks)


def _copy_file(filename, location, link):
    copy = True
    download_location = os.path.join(location, link.filename)
    if os.path.exists(download_location):
        response = ask_path_exists(
            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
            display_path(download_location), ('i', 'w', 'b', 'a'))
        if response == 'i':
            copy = False
        elif response == 'w':
            logger.warning('Deleting %s', display_path(download_location))
            os.remove(download_location)
        elif response == 'b':
            dest_file = backup_dir(download_location)
            logger.warning(
                'Backing up %s to %s',
                display_path(download_location),
                display_path(dest_file),
            )
            shutil.move(download_location, dest_file)
        elif response == 'a':
            sys.exit(-1)
    if copy:
        shutil.copy(filename, download_location)
        logger.info('Saved %s', display_path(download_location))


def unpack_http_url(link, location, download_dir=None,
                    session=None, hashes=None):
    if session is None:
        raise TypeError(
            "unpack_http_url() missing 1 required keyword argument: 'session'"
        )

    temp_dir = tempfile.mkdtemp('-unpack', 'pip-')

    # If a download dir is specified, is the file already downloaded there?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
        content_type = mimetypes.guess_type(from_path)[0]
    else:
        # let's download to a tmp dir
        from_path, content_type = _download_http_url(link,
                                                     session,
                                                     temp_dir,
                                                     hashes)

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified; let's copy the archive there
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)

    if not already_downloaded_path:
        os.unlink(from_path)
    rmtree(temp_dir)


def unpack_file_url(link, location, download_dir=None, hashes=None):
    """Unpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    """
    link_path = url_to_path(link.url_without_fragment)

    # If it's a url to a local directory
    if is_dir_url(link):
        if os.path.isdir(location):
            rmtree(location)
        shutil.copytree(link_path, location, symlinks=True)
        if download_dir:
            logger.info('Link is a directory, ignoring download_dir')
        return

    # If --require-hashes is off, `hashes` is either empty, the
    # link's embedded hash, or MissingHashes; it is required to
    # match. If --require-hashes is on, we are satisfied by any
    # hash in `hashes` matching: a URL-based or an option-based
    # one; no internet-sourced hash will be in `hashes`.
    if hashes:
        hashes.check_against_path(link_path)

    # If a download dir is specified, is the file already there and valid?
    already_downloaded_path = None
    if download_dir:
        already_downloaded_path = _check_download_dir(link,
                                                      download_dir,
                                                      hashes)

    if already_downloaded_path:
        from_path = already_downloaded_path
    else:
        from_path = link_path

    content_type = mimetypes.guess_type(from_path)[0]

    # unpack the archive to the build dir location. even when only downloading
    # archives, they have to be unpacked to parse dependencies
    unpack_file(from_path, location, content_type, link)

    # a download dir is specified and not already downloaded
    if download_dir and not already_downloaded_path:
        _copy_file(from_path, download_dir, link)


def _copy_dist_from_dir(link_path, location):
    """Copy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    """

    # Note: This is currently VERY SLOW if you have a lot of data in the
    # directory, because it copies everything with `shutil.copytree`.
    # What it should really do is build an sdist and install that.
    # See https://github.com/pypa/pip/issues/2195

    if os.path.isdir(location):
        rmtree(location)

    # build an sdist
    setup_py = 'setup.py'
    sdist_args = [sys.executable]
    sdist_args.append('-c')
    sdist_args.append(SETUPTOOLS_SHIM % setup_py)
    sdist_args.append('sdist')
    sdist_args += ['--dist-dir', location]
    logger.info('Running setup.py sdist for %s', link_path)

    with indent_log():
        call_subprocess(sdist_args, cwd=link_path, show_stdout=False)

    # unpack sdist into `location`
    sdist = os.path.join(location, os.listdir(location)[0])
    logger.info('Unpacking sdist %s into %s', sdist, location)
    unpack_file(sdist, location, content_type=None, link=None)


class PipXmlrpcTransport(xmlrpc_client.Transport):
    """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
    object.
    """

    def __init__(self, index_url, session, use_datetime=False):
        xmlrpc_client.Transport.__init__(self, use_datetime)
        index_parts = urllib_parse.urlparse(index_url)
        self._scheme = index_parts.scheme
        self._session = session

    def request(self, host, handler, request_body, verbose=False):
        parts = (self._scheme, host, handler, None, None, None)
        url = urllib_parse.urlunparse(parts)
        try:
            headers = {'Content-Type': 'text/xml'}
            response = self._session.post(url, data=request_body,
                                          headers=headers, stream=True)
            response.raise_for_status()
            self.verbose = verbose
            return self.parse_response(response.raw)
        except requests.HTTPError as exc:
            logger.critical(
                "HTTP error %s while getting %s",
                exc.response.status_code, url,
            )
            raise


def unpack_url(link, location, download_dir=None,
               only_download=False, session=None, hashes=None):
    """Unpack link.
       If link is a VCS link:
         if only_download, export into download_dir and ignore location
          else unpack into location
       for other types of link:
         - unpack into location
         - if download_dir, copy the file into download_dir
         - if only_download, mark location for deletion

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    """
    # non-editable vcs urls
    if is_vcs_url(link):
        unpack_vcs_link(link, location)

    # file urls
    elif is_file_url(link):
        unpack_file_url(link, location, download_dir, hashes=hashes)

    # http urls
    else:
        if session is None:
            session = PipSession()

        unpack_http_url(
            link,
            location,
            download_dir,
            session,
            hashes=hashes
        )
    if only_download:
        write_delete_marker_file(location)


def sanitize_content_filename(filename):
    # type: (str) -> str
    """
    Sanitize the "filename" value from a Content-Disposition header.
    """
    return os.path.basename(filename)


def parse_content_disposition(content_disposition, default_filename):
    # type: (str, str) -> str
    """
    Parse the "filename" value from a Content-Disposition header, and
    return the default filename if the result is empty.
    """
    _type, params = cgi.parse_header(content_disposition)
    filename = params.get('filename')
    if filename:
        # We need to sanitize the filename to prevent directory traversal
        # in case the filename contains ".." path parts.
        filename = sanitize_content_filename(filename)
    return filename or default_filename


def _download_http_url(link, session, temp_dir, hashes):
    """Download link url into temp_dir using provided session"""
    target_url = link.url.split('#', 1)[0]
    try:
        resp = session.get(
            target_url,
            # We use Accept-Encoding: identity here because requests
            # defaults to accepting compressed responses. This breaks in
            # a variety of ways depending on how the server is configured.
            # - Some servers will notice that the file isn't a compressible
            #   file and will leave the file alone and with an empty
            #   Content-Encoding
            # - Some servers will notice that the file is already
            #   compressed and will leave the file alone and will add a
            #   Content-Encoding: gzip header
            # - Some servers won't notice anything at all and will take
            #   a file that's already been compressed and compress it again
            #   and set the Content-Encoding: gzip header
            # By setting this to request only the identity encoding We're
            # hoping to eliminate the third case. Hopefully there does not
            # exist a server which when given a file will notice it is
            # already compressed and that you're not asking for a
            # compressed file and will then decompress it before sending
            # because if that's the case I don't think it'll ever be
            # possible to make this work.
            headers={"Accept-Encoding": "identity"},
            stream=True,
        )
        resp.raise_for_status()
    except requests.HTTPError as exc:
        logger.critical(
            "HTTP error %s while getting %s", exc.response.status_code, link,
        )
        raise

    content_type = resp.headers.get('content-type', '')
    filename = link.filename  # fallback
    # Have a look at the Content-Disposition header for a better guess
    content_disposition = resp.headers.get('content-disposition')
    if content_disposition:
        filename = parse_content_disposition(content_disposition, filename)
    ext = splitext(filename)[1]
    if not ext:
        ext = mimetypes.guess_extension(content_type)
        if ext:
            filename += ext
    if not ext and link.url != resp.url:
        ext = os.path.splitext(resp.url)[1]
        if ext:
            filename += ext
    file_path = os.path.join(temp_dir, filename)
    with open(file_path, 'wb') as content_file:
        _download_url(resp, link, content_file, hashes)
    return file_path, content_type


def _check_download_dir(link, download_dir, hashes):
    """ Check download_dir for previously downloaded file with correct hash
        If a correct file is found return its path else None
    """
    download_path = os.path.join(download_dir, link.filename)
    if os.path.exists(download_path):
        # If already downloaded, does its hash match?
        logger.info('File was already downloaded %s', download_path)
        if hashes:
            try:
                hashes.check_against_path(download_path)
            except HashMismatch:
                logger.warning(
                    'Previously-downloaded file %s has bad hash. '
                    'Re-downloading.',
                    download_path
                )
                os.unlink(download_path)
                return None
        return download_path
    return None
PKZ׊)) site-packages/pip/baseparser.pyonu[
abc@@s1dZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZmZmZmZddlmZmZejd	ejZd
ejfdYZdefd
YZdejfdYZdefdYZdS(sBase option parser setupi(tabsolute_importN(t	strtobool(tstring_types(tconfigparser(tlegacy_config_filetconfig_basenametrunning_under_virtualenvtsite_config_files(tappdirstget_terminal_sizes^PIP_tPrettyHelpFormattercB@s\eZdZdZdZdddZdZdZdZd	Z	d
Z
RS(s4A prettier/less verbose help formatter for optparse.cO@sCd|ds, (t_format_option_strings(Rtoption((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_option_strings!ss <%s>s, cC@sg}|jr&|j|jdn|jrF|j|jdnt|dkrk|jd|n|jr|jp|jj}|j||jndj	|S(s
        Return a comma-separated list of option strings and metavars.

        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
        :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
        :param optsep:  separator
        iit(
t_short_optstappendt
_long_optstlentinsertttakes_valuetmetavartdesttlowertjoin(RRtmvarfmttoptseptoptsR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyR$s		cC@s|dkrdS|dS(NtOptionsRs:
((Rtheading((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_heading;scC@s#d|jtj|d}|S(sz
        Ensure there is only one newline between usage and the first heading
        if there is no description.
        s
Usage: %s
s  (tindent_linesttextwraptdedent(Rtusagetmsg((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_usage@scC@sy|rqt|jdr!d}nd}|jd}|j}|jtj|d}d||f}|SdSdS(NtmaintCommandstDescriptions
s  s%s:
%s
R(thasattrtparsertlstriptrstripR(R)R*(Rtdescriptiontlabel((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytformat_descriptionHs	cC@s|r
|SdSdS(NR((Rtepilog((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyt
format_epilogZscC@s3g|jdD]}||^q}dj|S(Ns
(tsplitR!(Rttexttindenttlinet	new_lines((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyR(as&(t__name__t
__module__t__doc__RRRR'R-R7R9R((((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyR
s						tUpdatingDefaultsHelpFormattercB@seZdZdZRS(sCustom help formatter for use in ConfigOptionParser.

    This is updates the defaults before expanding them, allowing
    them to show up correctly in the help listing.
    cC@s;|jdk	r(|jj|jjntjj||S(N(R2tNonet_update_defaultstdefaultsRRtexpand_default(RR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRFms(R?R@RARF(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRBfstCustomOptionParsercB@s eZdZedZRS(cO@s6|j||}|jj|jj|||S(s*Insert an OptionGroup at a given position.(tadd_option_groupt
option_groupstpopR(RtidxRRtgroup((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytinsert_option_groupus
cC@s2|j}x!|jD]}|j|jqW|S(s<Get a list of all options, including those in option groups.(toption_listRItextend(Rtresti((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytoption_list_all~s
(R?R@RMtpropertyRR(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRGss		tConfigOptionParsercB@seeZdZeZdZdZdZdZdZ	dZ
dZdZd	Z
RS(
ssCustom option parser which updates its defaults by checking the
    configuration files and environmental variablescO@s~tj|_|jd|_|jdt|_|j|_|jrd|jj	|jnt
jj|||dS(Ntnametisolated(
RtRawConfigParsertconfigRJRUtFalseRVtget_config_filestfilestreadRtOptionParserR(RRR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRs	cC@stjjdt}|tjkr(gStt}|js|retjj	|re|j
|q|j
t|j
tjjt
jdtntrtjjtjt}tjj	|r|j
|qn|S(NtPIP_CONFIG_FILEtpip(tostenvirontgetRYtdevnulltlistRRVtpathtexistsRRR!Rtuser_config_dirRRtsystprefix(Rtconfig_fileR[tvenv_config_file((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRZs&	
	
			cC@sGy|j||SWn,tjk
rB}d|GHtjdnXdS(Ns*An error occurred during configuration: %si(tcheck_valueRtOptionValueErrorRhtexit(RRtkeytvaltexc((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyt
check_defaults
	cC@si}x6d|jfD]%}|j|j|j|qW|jsg|j|j|jntj|j|_	t
}xG|jD]9\}}|sqn|j|}|dkrqn|jdkrt|}n|jdkr/|j}g|D]}|j|||^q}n|jdkr|j|j|j}	|j|	|}|jpxd	}
|jpi}|j||	|||
|n|j|||}|||j|j}|j||||jq>Wtj|S(sOverriding to make updating the defaults after instantiation of
        the option parser possible, _update_defaults() does the dirty work.(
tprocess_default_valuesRR|RERDtcopyt_get_all_optionsRbRt
isinstanceRRRl(RRERtdefaultR((s2/usr/lib/python2.7/site-packages/pip/baseparser.pytget_default_valuess	 cC@s(|jtj|jdd|dS(Nis%s
(tprint_usageRhtstderrRn(RR,((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyterror#s(R?R@RARYRVRRZRrRDRyRzR{RR(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyRTs	
	(		5				(RAt
__future__RRhRR`treR)tdistutils.utilRtpip._vendor.sixRtpip._vendor.six.movesRt
pip.locationsRRRRt	pip.utilsRR	tcompiletIRRR
RBR]RGRT(((s2/usr/lib/python2.7/site-packages/pip/baseparser.pyts "O
PKZ6%site-packages/pip/index.pynu["""Routines related to PyPI, indexes"""
from __future__ import absolute_import

import logging
import cgi
from collections import namedtuple
import itertools
import sys
import os
import re
import mimetypes
import posixpath
import warnings

from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request

from pip.compat import ipaddress
from pip.utils import (
    cached_property, splitext, normalize_path,
    ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS,
)
from pip.utils.deprecation import RemovedInPip10Warning
from pip.utils.logging import indent_log
from pip.utils.packaging import check_requires_python
from pip.exceptions import (
    DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
    UnsupportedWheel,
)
from pip.download import HAS_TLS, is_url, path_to_url, url_to_path
from pip.wheel import Wheel, wheel_ext
from pip.pep425tags import get_supported
from pip._vendor import html5lib, requests, six
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging import specifiers
from pip._vendor.requests.exceptions import SSLError
from pip._vendor.distlib.compat import unescape


__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder']


SECURE_ORIGINS = [
    # protocol, hostname, port
    # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
    ("https", "*", "*"),
    ("*", "localhost", "*"),
    ("*", "127.0.0.0/8", "*"),
    ("*", "::1/128", "*"),
    ("file", "*", None),
    # ssh is always secure.
    ("ssh", "*", "*"),
]


logger = logging.getLogger(__name__)


class InstallationCandidate(object):

    def __init__(self, project, version, location):
        self.project = project
        self.version = parse_version(version)
        self.location = location
        self._key = (self.project, self.version, self.location)

    def __repr__(self):
        return "".format(
            self.project, self.version, self.location,
        )

    def __hash__(self):
        return hash(self._key)

    def __lt__(self, other):
        return self._compare(other, lambda s, o: s < o)

    def __le__(self, other):
        return self._compare(other, lambda s, o: s <= o)

    def __eq__(self, other):
        return self._compare(other, lambda s, o: s == o)

    def __ge__(self, other):
        return self._compare(other, lambda s, o: s >= o)

    def __gt__(self, other):
        return self._compare(other, lambda s, o: s > o)

    def __ne__(self, other):
        return self._compare(other, lambda s, o: s != o)

    def _compare(self, other, method):
        if not isinstance(other, InstallationCandidate):
            return NotImplemented

        return method(self._key, other._key)


class PackageFinder(object):
    """This finds packages.

    This is meant to match easy_install's technique for looking for
    packages, by reading pages and looking for appropriate links.
    """

    def __init__(self, find_links, index_urls, allow_all_prereleases=False,
                 trusted_hosts=None, process_dependency_links=False,
                 session=None, format_control=None, platform=None,
                 versions=None, abi=None, implementation=None):
        """Create a PackageFinder.

        :param format_control: A FormatControl object or None. Used to control
            the selection of source packages / binary packages when consulting
            the index and links.
        :param platform: A string or None. If None, searches for packages
            that are supported by the current system. Otherwise, will find
            packages that can be built on the platform passed in. These
            packages will only be downloaded for distribution: they will
            not be built locally.
        :param versions: A list of strings or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param abi: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        :param implementation: A string or None. This is passed directly
            to pep425tags.py in the get_supported() method.
        """
        if session is None:
            raise TypeError(
                "PackageFinder() missing 1 required keyword argument: "
                "'session'"
            )

        # Build find_links. If an argument starts with ~, it may be
        # a local file relative to a home directory. So try normalizing
        # it and if it exists, use the normalized version.
        # This is deliberately conservative - it might be fine just to
        # blindly normalize anything starting with a ~...
        self.find_links = []
        for link in find_links:
            if link.startswith('~'):
                new_link = normalize_path(link)
                if os.path.exists(new_link):
                    link = new_link
            self.find_links.append(link)

        self.index_urls = index_urls
        self.dependency_links = []

        # These are boring links that have already been logged somehow:
        self.logged_links = set()

        self.format_control = format_control or FormatControl(set(), set())

        # Domains that we won't emit warnings for when not using HTTPS
        self.secure_origins = [
            ("*", host, "*")
            for host in (trusted_hosts if trusted_hosts else [])
        ]

        # Do we want to allow _all_ pre-releases?
        self.allow_all_prereleases = allow_all_prereleases

        # Do we process dependency links?
        self.process_dependency_links = process_dependency_links

        # The Session we'll use to make requests
        self.session = session

        # The valid tags to check potential found wheel candidates against
        self.valid_tags = get_supported(
            versions=versions,
            platform=platform,
            abi=abi,
            impl=implementation,
        )

        # If we don't have TLS enabled, then WARN if anyplace we're looking
        # relies on TLS.
        if not HAS_TLS:
            for link in itertools.chain(self.index_urls, self.find_links):
                parsed = urllib_parse.urlparse(link)
                if parsed.scheme == "https":
                    logger.warning(
                        "pip is configured with locations that require "
                        "TLS/SSL, however the ssl module in Python is not "
                        "available."
                    )
                    break

    def add_dependency_links(self, links):
        # # FIXME: this shouldn't be global list this, it should only
        # # apply to requirements of the package that specifies the
        # # dependency_links value
        # # FIXME: also, we should track comes_from (i.e., use Link)
        if self.process_dependency_links:
            warnings.warn(
                "Dependency Links processing has been deprecated and will be "
                "removed in a future release.",
                RemovedInPip10Warning,
            )
            self.dependency_links.extend(links)

    @staticmethod
    def _sort_locations(locations, expand_dir=False):
        """
        Sort locations into "files" (archives) and "urls", and return
        a pair of lists (files,urls)
        """
        files = []
        urls = []

        # puts the url for the given file path into the appropriate list
        def sort_path(path):
            url = path_to_url(path)
            if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
                urls.append(url)
            else:
                files.append(url)

        for url in locations:

            is_local_path = os.path.exists(url)
            is_file_url = url.startswith('file:')

            if is_local_path or is_file_url:
                if is_local_path:
                    path = url
                else:
                    path = url_to_path(url)
                if os.path.isdir(path):
                    if expand_dir:
                        path = os.path.realpath(path)
                        for item in os.listdir(path):
                            sort_path(os.path.join(path, item))
                    elif is_file_url:
                        urls.append(url)
                elif os.path.isfile(path):
                    sort_path(path)
                else:
                    logger.warning(
                        "Url '%s' is ignored: it is neither a file "
                        "nor a directory.", url)
            elif is_url(url):
                # Only add url with clear scheme
                urls.append(url)
            else:
                logger.warning(
                    "Url '%s' is ignored. It is either a non-existing "
                    "path or lacks a specific scheme.", url)

        return files, urls

    def _candidate_sort_key(self, candidate):
        """
        Function used to generate link sort key for link tuples.
        The greater the return value, the more preferred it is.
        If not finding wheels, then sorted by version only.
        If finding wheels, then the sort order is by version, then:
          1. existing installs
          2. wheels ordered via Wheel.support_index_min(self.valid_tags)
          3. source archives
        Note: it was considered to embed this logic into the Link
              comparison operators, but then different sdist links
              with the same version, would have to be considered equal
        """
        support_num = len(self.valid_tags)
        if candidate.location.is_wheel:
            # can raise InvalidWheelFilename
            wheel = Wheel(candidate.location.filename)
            if not wheel.supported(self.valid_tags):
                raise UnsupportedWheel(
                    "%s is not a supported wheel for this platform. It "
                    "can't be sorted." % wheel.filename
                )
            pri = -(wheel.support_index_min(self.valid_tags))
        else:  # sdist
            pri = -(support_num)
        return (candidate.version, pri)

    def _validate_secure_origin(self, logger, location):
        # Determine if this url used a secure transport mechanism
        parsed = urllib_parse.urlparse(str(location))
        origin = (parsed.scheme, parsed.hostname, parsed.port)

        # The protocol to use to see if the protocol matches.
        # Don't count the repository type as part of the protocol: in
        # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
        # the last scheme.)
        protocol = origin[0].rsplit('+', 1)[-1]

        # Determine if our origin is a secure origin by looking through our
        # hardcoded list of secure origins, as well as any additional ones
        # configured on this PackageFinder instance.
        for secure_origin in (SECURE_ORIGINS + self.secure_origins):
            if protocol != secure_origin[0] and secure_origin[0] != "*":
                continue

            try:
                # We need to do this decode dance to ensure that we have a
                # unicode object, even on Python 2.x.
                addr = ipaddress.ip_address(
                    origin[1]
                    if (
                        isinstance(origin[1], six.text_type) or
                        origin[1] is None
                    )
                    else origin[1].decode("utf8")
                )
                network = ipaddress.ip_network(
                    secure_origin[1]
                    if isinstance(secure_origin[1], six.text_type)
                    else secure_origin[1].decode("utf8")
                )
            except ValueError:
                # We don't have both a valid address or a valid network, so
                # we'll check this origin against hostnames.
                if (origin[1] and
                        origin[1].lower() != secure_origin[1].lower() and
                        secure_origin[1] != "*"):
                    continue
            else:
                # We have a valid address and network, so see if the address
                # is contained within the network.
                if addr not in network:
                    continue

            # Check to see if the port patches
            if (origin[2] != secure_origin[2] and
                    secure_origin[2] != "*" and
                    secure_origin[2] is not None):
                continue

            # If we've gotten here, then this origin matches the current
            # secure origin and we should return True
            return True

        # If we've gotten to this point, then the origin isn't secure and we
        # will not accept it as a valid location to search. We will however
        # log a warning that we are ignoring it.
        logger.warning(
            "The repository located at %s is not a trusted or secure host and "
            "is being ignored. If this repository is available via HTTPS it "
            "is recommended to use HTTPS instead, otherwise you may silence "
            "this warning and allow it anyways with '--trusted-host %s'.",
            parsed.hostname,
            parsed.hostname,
        )

        return False

    def _get_index_urls_locations(self, project_name):
        """Returns the locations found via self.index_urls

        Checks the url_name on the main (first in the list) index and
        use this url_name to produce all locations
        """

        def mkurl_pypi_url(url):
            loc = posixpath.join(
                url,
                urllib_parse.quote(canonicalize_name(project_name)))
            # For maximum compatibility with easy_install, ensure the path
            # ends in a trailing slash.  Although this isn't in the spec
            # (and PyPI can handle it without the slash) some other index
            # implementations might break if they relied on easy_install's
            # behavior.
            if not loc.endswith('/'):
                loc = loc + '/'
            return loc

        return [mkurl_pypi_url(url) for url in self.index_urls]

    def find_all_candidates(self, project_name):
        """Find all available InstallationCandidate for project_name

        This checks index_urls, find_links and dependency_links.
        All versions found are returned as an InstallationCandidate list.

        See _link_package_versions for details on which files are accepted
        """
        index_locations = self._get_index_urls_locations(project_name)
        index_file_loc, index_url_loc = self._sort_locations(index_locations)
        fl_file_loc, fl_url_loc = self._sort_locations(
            self.find_links, expand_dir=True)
        dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)

        file_locations = (
            Link(url) for url in itertools.chain(
                index_file_loc, fl_file_loc, dep_file_loc)
        )

        # We trust every url that the user has given us whether it was given
        #   via --index-url or --find-links
        # We explicitly do not trust links that came from dependency_links
        # We want to filter out any thing which does not have a secure origin.
        url_locations = [
            link for link in itertools.chain(
                (Link(url) for url in index_url_loc),
                (Link(url) for url in fl_url_loc),
                (Link(url) for url in dep_url_loc),
            )
            if self._validate_secure_origin(logger, link)
        ]

        logger.debug('%d location(s) to search for versions of %s:',
                     len(url_locations), project_name)

        for location in url_locations:
            logger.debug('* %s', location)

        canonical_name = canonicalize_name(project_name)
        formats = fmt_ctl_formats(self.format_control, canonical_name)
        search = Search(project_name, canonical_name, formats)
        find_links_versions = self._package_versions(
            # We trust every directly linked archive in find_links
            (Link(url, '-f') for url in self.find_links),
            search
        )

        page_versions = []
        for page in self._get_pages(url_locations, project_name):
            logger.debug('Analyzing links from page %s', page.url)
            with indent_log():
                page_versions.extend(
                    self._package_versions(page.links, search)
                )

        dependency_versions = self._package_versions(
            (Link(url) for url in self.dependency_links), search
        )
        if dependency_versions:
            logger.debug(
                'dependency_links found: %s',
                ', '.join([
                    version.location.url for version in dependency_versions
                ])
            )

        file_versions = self._package_versions(file_locations, search)
        if file_versions:
            file_versions.sort(reverse=True)
            logger.debug(
                'Local files found: %s',
                ', '.join([
                    url_to_path(candidate.location.url)
                    for candidate in file_versions
                ])
            )

        # This is an intentional priority ordering
        return (
            file_versions + find_links_versions + page_versions +
            dependency_versions
        )

    def find_requirement(self, req, upgrade):
        """Try to find a Link matching req

        Expects req, an InstallRequirement and upgrade, a boolean
        Returns a Link if found,
        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
        """
        all_candidates = self.find_all_candidates(req.name)

        # Filter out anything which doesn't match our specifier
        compatible_versions = set(
            req.specifier.filter(
                # We turn the version object into a str here because otherwise
                # when we're debundled but setuptools isn't, Python will see
                # packaging.version.Version and
                # pkg_resources._vendor.packaging.version.Version as different
                # types. This way we'll use a str as a common data interchange
                # format. If we stop using the pkg_resources provided specifier
                # and start using our own, we can drop the cast to str().
                [str(c.version) for c in all_candidates],
                prereleases=(
                    self.allow_all_prereleases
                    if self.allow_all_prereleases else None
                ),
            )
        )
        applicable_candidates = [
            # Again, converting to str to deal with debundling.
            c for c in all_candidates if str(c.version) in compatible_versions
        ]

        if applicable_candidates:
            best_candidate = max(applicable_candidates,
                                 key=self._candidate_sort_key)
        else:
            best_candidate = None

        if req.satisfied_by is not None:
            installed_version = parse_version(req.satisfied_by.version)
        else:
            installed_version = None

        if installed_version is None and best_candidate is None:
            logger.critical(
                'Could not find a version that satisfies the requirement %s '
                '(from versions: %s)',
                req,
                ', '.join(
                    sorted(
                        set(str(c.version) for c in all_candidates),
                        key=parse_version,
                    )
                )
            )

            raise DistributionNotFound(
                'No matching distribution found for %s' % req
            )

        best_installed = False
        if installed_version and (
                best_candidate is None or
                best_candidate.version <= installed_version):
            best_installed = True

        if not upgrade and installed_version is not None:
            if best_installed:
                logger.debug(
                    'Existing installed version (%s) is most up-to-date and '
                    'satisfies requirement',
                    installed_version,
                )
            else:
                logger.debug(
                    'Existing installed version (%s) satisfies requirement '
                    '(most up-to-date version is %s)',
                    installed_version,
                    best_candidate.version,
                )
            return None

        if best_installed:
            # We have an existing version, and its the best version
            logger.debug(
                'Installed version (%s) is most up-to-date (past versions: '
                '%s)',
                installed_version,
                ', '.join(sorted(compatible_versions, key=parse_version)) or
                "none",
            )
            raise BestVersionAlreadyInstalled

        logger.debug(
            'Using version %s (newest of versions: %s)',
            best_candidate.version,
            ', '.join(sorted(compatible_versions, key=parse_version))
        )
        return best_candidate.location

    def _get_pages(self, locations, project_name):
        """
        Yields (page, page_url) from the given locations, skipping
        locations that have errors.
        """
        seen = set()
        for location in locations:
            if location in seen:
                continue
            seen.add(location)

            page = self._get_page(location)
            if page is None:
                continue

            yield page

    _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')

    def _sort_links(self, links):
        """
        Returns elements of links in order, non-egg links first, egg links
        second, while eliminating duplicates
        """
        eggs, no_eggs = [], []
        seen = set()
        for link in links:
            if link not in seen:
                seen.add(link)
                if link.egg_fragment:
                    eggs.append(link)
                else:
                    no_eggs.append(link)
        return no_eggs + eggs

    def _package_versions(self, links, search):
        result = []
        for link in self._sort_links(links):
            v = self._link_package_versions(link, search)
            if v is not None:
                result.append(v)
        return result

    def _log_skipped_link(self, link, reason):
        if link not in self.logged_links:
            logger.debug('Skipping link %s; %s', link, reason)
            self.logged_links.add(link)

    def _link_package_versions(self, link, search):
        """Return an InstallationCandidate or None"""
        version = None
        if link.egg_fragment:
            egg_info = link.egg_fragment
            ext = link.ext
        else:
            egg_info, ext = link.splitext()
            if not ext:
                self._log_skipped_link(link, 'not a file')
                return
            if ext not in SUPPORTED_EXTENSIONS:
                self._log_skipped_link(
                    link, 'unsupported archive format: %s' % ext)
                return
            if "binary" not in search.formats and ext == wheel_ext:
                self._log_skipped_link(
                    link, 'No binaries permitted for %s' % search.supplied)
                return
            if "macosx10" in link.path and ext == '.zip':
                self._log_skipped_link(link, 'macosx10 one')
                return
            if ext == wheel_ext:
                try:
                    wheel = Wheel(link.filename)
                except InvalidWheelFilename:
                    self._log_skipped_link(link, 'invalid wheel filename')
                    return
                if canonicalize_name(wheel.name) != search.canonical:
                    self._log_skipped_link(
                        link, 'wrong project name (not %s)' % search.supplied)
                    return

                if not wheel.supported(self.valid_tags):
                    self._log_skipped_link(
                        link, 'it is not compatible with this Python')
                    return

                version = wheel.version

        # This should be up by the search.ok_binary check, but see issue 2700.
        if "source" not in search.formats and ext != wheel_ext:
            self._log_skipped_link(
                link, 'No sources permitted for %s' % search.supplied)
            return

        if not version:
            version = egg_info_matches(egg_info, search.supplied, link)
        if version is None:
            self._log_skipped_link(
                link, 'wrong project name (not %s)' % search.supplied)
            return

        match = self._py_version_re.search(version)
        if match:
            version = version[:match.start()]
            py_version = match.group(1)
            if py_version != sys.version[:3]:
                self._log_skipped_link(
                    link, 'Python version is incorrect')
                return
        try:
            support_this_python = check_requires_python(link.requires_python)
        except specifiers.InvalidSpecifier:
            logger.debug("Package %s has an invalid Requires-Python entry: %s",
                         link.filename, link.requires_python)
            support_this_python = True

        if not support_this_python:
            logger.debug("The package %s is incompatible with the python"
                         "version in use. Acceptable python versions are:%s",
                         link, link.requires_python)
            return
        logger.debug('Found link %s, version: %s', link, version)

        return InstallationCandidate(search.supplied, version, link)

    def _get_page(self, link):
        return HTMLPage.get_page(link, session=self.session)


def egg_info_matches(
        egg_info, search_name, link,
        _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
    """Pull the version part out of a string.

    :param egg_info: The string to parse. E.g. foo-2.1
    :param search_name: The name of the package this belongs to. None to
        infer the name. Note that this cannot unambiguously parse strings
        like foo-2-2 which might be foo, 2-2 or foo-2, 2.
    :param link: The link the string came from, for logging on failure.
    """
    match = _egg_info_re.search(egg_info)
    if not match:
        logger.debug('Could not parse version from link: %s', link)
        return None
    if search_name is None:
        full_match = match.group(0)
        return full_match[full_match.index('-'):]
    name = match.group(0).lower()
    # To match the "safe" name that pkg_resources creates:
    name = name.replace('_', '-')
    # project name and version must be separated by a dash
    look_for = search_name.lower() + "-"
    if name.startswith(look_for):
        return match.group(0)[len(look_for):]
    else:
        return None


class HTMLPage(object):
    """Represents one page, along with its URL"""

    def __init__(self, content, url, headers=None):
        # Determine if we have any encoding information in our headers
        encoding = None
        if headers and "Content-Type" in headers:
            content_type, params = cgi.parse_header(headers["Content-Type"])

            if "charset" in params:
                encoding = params['charset']

        self.content = content
        self.parsed = html5lib.parse(
            self.content,
            transport_encoding=encoding,
            namespaceHTMLElements=False,
        )
        self.url = url
        self.headers = headers

    def __str__(self):
        return self.url

    @classmethod
    def get_page(cls, link, skip_archives=True, session=None):
        if session is None:
            raise TypeError(
                "get_page() missing 1 required keyword argument: 'session'"
            )

        url = link.url
        url = url.split('#', 1)[0]

        # Check for VCS schemes that do not support lookup as web pages.
        from pip.vcs import VcsSupport
        for scheme in VcsSupport.schemes:
            if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
                logger.debug('Cannot look at %s URL %s', scheme, link)
                return None

        try:
            if skip_archives:
                filename = link.filename
                for bad_ext in ARCHIVE_EXTENSIONS:
                    if filename.endswith(bad_ext):
                        content_type = cls._get_content_type(
                            url, session=session,
                        )
                        if content_type.lower().startswith('text/html'):
                            break
                        else:
                            logger.debug(
                                'Skipping page %s because of Content-Type: %s',
                                link,
                                content_type,
                            )
                            return

            logger.debug('Getting page %s', url)

            # Tack index.html onto file:// URLs that point to directories
            (scheme, netloc, path, params, query, fragment) = \
                urllib_parse.urlparse(url)
            if (scheme == 'file' and
                    os.path.isdir(urllib_request.url2pathname(path))):
                # add trailing slash if not present so urljoin doesn't trim
                # final segment
                if not url.endswith('/'):
                    url += '/'
                url = urllib_parse.urljoin(url, 'index.html')
                logger.debug(' file: URL is directory, getting %s', url)

            resp = session.get(
                url,
                headers={
                    "Accept": "text/html",
                    "Cache-Control": "max-age=600",
                },
            )
            resp.raise_for_status()

            # The check for archives above only works if the url ends with
            # something that looks like an archive. However that is not a
            # requirement of an url. Unless we issue a HEAD request on every
            # url we cannot know ahead of time for sure if something is HTML
            # or not. However we can check after we've downloaded it.
            content_type = resp.headers.get('Content-Type', 'unknown')
            if not content_type.lower().startswith("text/html"):
                logger.debug(
                    'Skipping page %s because of Content-Type: %s',
                    link,
                    content_type,
                )
                return

            inst = cls(resp.content, resp.url, resp.headers)
        except requests.HTTPError as exc:
            cls._handle_fail(link, exc, url)
        except SSLError as exc:
            reason = ("There was a problem confirming the ssl certificate: "
                      "%s" % exc)
            cls._handle_fail(link, reason, url, meth=logger.info)
        except requests.ConnectionError as exc:
            cls._handle_fail(link, "connection error: %s" % exc, url)
        except requests.Timeout:
            cls._handle_fail(link, "timed out", url)
        else:
            return inst

    @staticmethod
    def _handle_fail(link, reason, url, meth=None):
        if meth is None:
            meth = logger.debug

        meth("Could not fetch URL %s: %s - skipping", link, reason)

    @staticmethod
    def _get_content_type(url, session):
        """Get the Content-Type of the given url, using a HEAD request"""
        scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
        if scheme not in ('http', 'https'):
            # FIXME: some warning or something?
            # assertion error?
            return ''

        resp = session.head(url, allow_redirects=True)
        resp.raise_for_status()

        return resp.headers.get("Content-Type", "")

    @cached_property
    def base_url(self):
        bases = [
            x for x in self.parsed.findall(".//base")
            if x.get("href") is not None
        ]
        if bases and bases[0].get("href"):
            return bases[0].get("href")
        else:
            return self.url

    @property
    def links(self):
        """Yields all links in the page"""
        for anchor in self.parsed.findall(".//a"):
            if anchor.get("href"):
                href = anchor.get("href")
                url = self.clean_link(
                    urllib_parse.urljoin(self.base_url, href)
                )
                pyrequire = anchor.get('data-requires-python')
                pyrequire = unescape(pyrequire) if pyrequire else None
                yield Link(url, self, requires_python=pyrequire)

    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)

    def clean_link(self, url):
        """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
        the link, it will be rewritten to %20 (while not over-quoting
        % or other characters)."""
        return self._clean_re.sub(
            lambda match: '%%%2x' % ord(match.group(0)), url)


class Link(object):

    def __init__(self, url, comes_from=None, requires_python=None):
        """
        Object representing a parsed link from https://pypi.python.org/simple/*

        url:
            url of the resource pointed to (href of the link)
        comes_from:
            instance of HTMLPage where the link was found, or string.
        requires_python:
            String containing the `Requires-Python` metadata field, specified
            in PEP 345. This may be specified by a data-requires-python
            attribute in the HTML link tag, as described in PEP 503.
        """

        # url can be a UNC windows share
        if url.startswith('\\\\'):
            url = path_to_url(url)

        self.url = url
        self.comes_from = comes_from
        self.requires_python = requires_python if requires_python else None

    def __str__(self):
        if self.requires_python:
            rp = ' (requires-python:%s)' % self.requires_python
        else:
            rp = ''
        if self.comes_from:
            return '%s (from %s)%s' % (self.url, self.comes_from, rp)
        else:
            return str(self.url)

    def __repr__(self):
        return '' % self

    def __eq__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url == other.url

    def __ne__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url != other.url

    def __lt__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url < other.url

    def __le__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url <= other.url

    def __gt__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url > other.url

    def __ge__(self, other):
        if not isinstance(other, Link):
            return NotImplemented
        return self.url >= other.url

    def __hash__(self):
        return hash(self.url)

    @property
    def filename(self):
        _, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
        name = posixpath.basename(path.rstrip('/')) or netloc
        name = urllib_parse.unquote(name)
        assert name, ('URL %r produced no filename' % self.url)
        return name

    @property
    def scheme(self):
        return urllib_parse.urlsplit(self.url)[0]

    @property
    def netloc(self):
        return urllib_parse.urlsplit(self.url)[1]

    @property
    def path(self):
        return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])

    def splitext(self):
        return splitext(posixpath.basename(self.path.rstrip('/')))

    @property
    def ext(self):
        return self.splitext()[1]

    @property
    def url_without_fragment(self):
        scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
        return urllib_parse.urlunsplit((scheme, netloc, path, query, None))

    _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')

    @property
    def egg_fragment(self):
        match = self._egg_fragment_re.search(self.url)
        if not match:
            return None
        return match.group(1)

    _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')

    @property
    def subdirectory_fragment(self):
        match = self._subdirectory_fragment_re.search(self.url)
        if not match:
            return None
        return match.group(1)

    _hash_re = re.compile(
        r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
    )

    @property
    def hash(self):
        match = self._hash_re.search(self.url)
        if match:
            return match.group(2)
        return None

    @property
    def hash_name(self):
        match = self._hash_re.search(self.url)
        if match:
            return match.group(1)
        return None

    @property
    def show_url(self):
        return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])

    @property
    def is_wheel(self):
        return self.ext == wheel_ext

    @property
    def is_artifact(self):
        """
        Determines if this points to an actual artifact (e.g. a tarball) or if
        it points to an "abstract" thing like a path or a VCS location.
        """
        from pip.vcs import vcs

        if self.scheme in vcs.all_schemes:
            return False

        return True


FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
"""This object has two fields, no_binary and only_binary.

If a field is falsy, it isn't set. If it is {':all:'}, it should match all
packages except those listed in the other field. Only one field can be set
to {':all:'} at a time. The rest of the time exact package name matches
are listed, with any given package only showing up in one field at a time.
"""


def fmt_ctl_handle_mutual_exclude(value, target, other):
    new = value.split(',')
    while ':all:' in new:
        other.clear()
        target.clear()
        target.add(':all:')
        del new[:new.index(':all:') + 1]
        if ':none:' not in new:
            # Without a none, we want to discard everything as :all: covers it
            return
    for name in new:
        if name == ':none:':
            target.clear()
            continue
        name = canonicalize_name(name)
        other.discard(name)
        target.add(name)


def fmt_ctl_formats(fmt_ctl, canonical_name):
    result = set(["binary", "source"])
    if canonical_name in fmt_ctl.only_binary:
        result.discard('source')
    elif canonical_name in fmt_ctl.no_binary:
        result.discard('binary')
    elif ':all:' in fmt_ctl.only_binary:
        result.discard('source')
    elif ':all:' in fmt_ctl.no_binary:
        result.discard('binary')
    return frozenset(result)


def fmt_ctl_no_binary(fmt_ctl):
    fmt_ctl_handle_mutual_exclude(
        ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary)


def fmt_ctl_no_use_wheel(fmt_ctl):
    fmt_ctl_no_binary(fmt_ctl)
    warnings.warn(
        '--no-use-wheel is deprecated and will be removed in the future. '
        ' Please use --no-binary :all: instead.', RemovedInPip10Warning,
        stacklevel=2)


Search = namedtuple('Search', 'supplied canonical formats')
"""Capture key aspects of a search.

:attribute supplied: The user supplied package.
:attribute canonical: The canonical package name.
:attribute formats: The formats allowed for this package. Should be a set
    with 'binary' or 'source' or both in it.
"""
PKZ۬w!site-packages/pip/status_codes.pynu[from __future__ import absolute_import

SUCCESS = 0
ERROR = 1
UNKNOWN_ERROR = 2
VIRTUALENV_NOT_FOUND = 3
PREVIOUS_BUILD_DIR_ERROR = 4
NO_MATCHES_FOUND = 23
PKZ0""!site-packages/pip/basecommand.pyonu[
abc@@sdZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZddlmZmZmZmZmZdd	lmZdd
lmZmZddlmZmZddlmZm Z m!Z!m"Z"m#Z#dd
l$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+dgZ,ej-e.Z/de0fdYZ1de1fdYZ2dS(s(Base Command class, and related routinesi(tabsolute_importN(t
cmdoptions(t
PackageFinder(trunning_under_virtualenv(t
PipSession(t
BadCommandtInstallationErrortUninstallationErrortCommandErrortPreviousBuildDirError(tlogging_dictConfig(tConfigOptionParsertUpdatingDefaultsHelpFormatter(tInstallRequirementtparse_requirements(tSUCCESStERRORt
UNKNOWN_ERRORtVIRTUALENV_NOT_FOUNDtPREVIOUS_BUILD_DIR_ERROR(tdeprecationtget_progtnormalize_path(tIndentingFormatter(tpip_version_checktCommandcB@sMeZdZdZeZdZedZdddZ	dZ
dZRS(sext://sys.stdoutsext://sys.stderrcC@si|jd6dt|jfd6td6td6|jd6|jd6|d6}t||_d	|jj}t	j
|j||_tj
tj|j}|jj|dS(
Ntusages%s %stprogt	formattertadd_help_optiontnametdescriptiontisolateds
%s Options(RRRRtFalset__doc__Rtparsert
capitalizetoptparsetOptionGrouptcmd_optsRtmake_option_groupt
general_grouptadd_option_group(tselfR t	parser_kwt
optgroup_nametgen_opts((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyt__init__)s




cC@std|jr-ttjj|jdndd|dk	rE|n|jd|j}|j	rr|j	|_
n|jr|j|_	n|js|r|dk	r|n|j|_n|j
ri|j
d6|j
d6|_n|j|j_|S(Ntcachethttptretriestinsecure_hoststhttps(Rt	cache_dirRtostpathtjointNoneR2t
trusted_hoststcerttverifytclient_certttimeouttproxytproxiestno_inputtautht	prompting(R+toptionsR2R>tsession((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyt_build_sessionAs -		!	
cC@s|jj|S(N(R#t
parse_args(R+targs((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyRGesc@s|j|\}}|jrW|jdkr6dn|jdkrNdqodn|jridnd}|jrd}ntidd6td	6iid
d6tjd6d
6d6iitd6dd6d6d6iid6dd6|j	dd6d
gd6dd6d6idd6dd6|j	dd6dd6d6idd6dd6|jpTdd6t
d6dd6d6d6i|d6ttddd|jrdndgd6d 6tfd!d"d#d$d%gDd&6tjd d7krtjd(tjn|jrd)tjd*d+j|jtjd,s
spip._vendortdistlibtrequeststurllib3tloggersisPython 2.6 is no longer supported by the Python core team, please upgrade your Python. A future version of pip will drop support for Python 2.6t1tPIP_NO_INPUTt tPIP_EXISTS_ACTIONs2Could not find an activated virtualenv (required).sException information:texc_infos	ERROR: %ssOperation cancelled by users
Exception:tno_indexR2R>i(ii(5RGtquiettverbosetlogR
R!tloggingRIRtlog_streamstTruetlisttfilterR9tdicttsystversion_infotwarningstwarnRtPython26DeprecationWarningRAR6tenviront
exists_actionR8trequire_venvRtloggertcriticaltexitRtrunt
isinstancetintR	tstrtdebugRRRRRRtKeyboardInterruptRtdisable_pip_version_checktgetattrRFtminR>RR(R+RHRDt
root_leveltstatustexcRE((ROs3/usr/lib/python2.7/site-packages/pip/basecommand.pytmainis								
	#

				

	
N(sext://sys.stdoutsext://sys.stderr(t__name__t
__module__R9RRR!thiddenRmR/RFRGR(((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyR#s$	tRequirementCommandcB@s,eZedZdddddZRS(c
C@sxS|jD]H}x?t|dtd|d|d|d|D]}|j|q;Wq
Wx6|D].}|jtj|d
d|jd|q]Wx?|jD]4}|jtj	|d|j
d|jd|qWt}	xS|jD]H}x?t|d|d|d|d|D]}t}	|j|qWqW|j
|_
|pK|jpK|	si|d6}
|jrd	t|
d
dj|j}n
d|
}tj|nd
S(s?
        Marshal cmd line args into a requirement set.
        t
constrainttfinderRDREtwheel_cacheR tdefault_vcsRs^You must give at least one requirement to %(name)s (maybe you meant "pip %(name)s %(links)s"?)tlinksResLYou must give at least one requirement to %(name)s (see "pip help %(name)s")N(tconstraintsRRntadd_requirementR
t	from_lineR9t
isolated_modet	editablest
from_editableRR!trequirementstrequire_hashest
find_linksRqR8Rztwarning(trequirement_setRHRDRRERRRYtreqtfound_req_in_filetoptstmsg((s3/usr/lib/python2.7/site-packages/pip/basecommand.pytpopulate_requirement_setsF
		

	"cC@s|jg|j}|jr>tjddj|g}ntd|jd|jd|d|j	d|j
d|jd	|d
|d|d|d
|S(sR
        Create a package finder appropriate to this requirement command.
        sIgnoring indexes: %st,Rtformat_controlt
index_urlsR:tallow_all_prereleasestprocess_dependency_linksREtplatformtversionstabitimplementation(t	index_urltextra_index_urlsRhRzRR8RRRR:tpreR(R+RDRERtpython_versionsRRR((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyt_build_package_finder:s 							N(RRtstaticmethodRR9R(((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyRs8(3R"t
__future__RRlR6RrR%RttpipRt	pip.indexRt
pip.locationsRtpip.downloadRtpip.exceptionsRRRRR	t
pip.compatR
tpip.baseparserRRtpip.reqR
Rtpip.status_codesRRRRRt	pip.utilsRRRtpip.utils.loggingRtpip.utils.outdatedRt__all__t	getLoggerRRztobjectRR(((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyts,((	PKZzsite-packages/pip/exceptions.pynu["""Exceptions used throughout package"""
from __future__ import absolute_import

from itertools import chain, groupby, repeat

from pip._vendor.six import iteritems


class PipError(Exception):
    """Base pip exception"""


class InstallationError(PipError):
    """General exception during installation"""


class UninstallationError(PipError):
    """General exception during uninstallation"""


class DistributionNotFound(InstallationError):
    """Raised when a distribution cannot be found to satisfy a requirement"""


class RequirementsFileParseError(InstallationError):
    """Raised when a general error occurs parsing a requirements file line."""


class BestVersionAlreadyInstalled(PipError):
    """Raised when the most up-to-date version of a package is already
    installed."""


class BadCommand(PipError):
    """Raised when virtualenv or a command is not found"""


class CommandError(PipError):
    """Raised when there is an error in command-line arguments"""


class PreviousBuildDirError(PipError):
    """Raised when there's a previous conflicting build directory"""


class InvalidWheelFilename(InstallationError):
    """Invalid wheel filename."""


class UnsupportedWheel(InstallationError):
    """Unsupported wheel."""


class HashErrors(InstallationError):
    """Multiple HashError instances rolled into one for reporting"""

    def __init__(self):
        self.errors = []

    def append(self, error):
        self.errors.append(error)

    def __str__(self):
        lines = []
        self.errors.sort(key=lambda e: e.order)
        for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
            lines.append(cls.head)
            lines.extend(e.body() for e in errors_of_cls)
        if lines:
            return '\n'.join(lines)

    def __nonzero__(self):
        return bool(self.errors)

    def __bool__(self):
        return self.__nonzero__()


class HashError(InstallationError):
    """
    A failure to verify a package against known-good hashes

    :cvar order: An int sorting hash exception classes by difficulty of
        recovery (lower being harder), so the user doesn't bother fretting
        about unpinned packages when he has deeper issues, like VCS
        dependencies, to deal with. Also keeps error reports in a
        deterministic order.
    :cvar head: A section heading for display above potentially many
        exceptions of this kind
    :ivar req: The InstallRequirement that triggered this error. This is
        pasted on after the exception is instantiated, because it's not
        typically available earlier.

    """
    req = None
    head = ''

    def body(self):
        """Return a summary of me for display under the heading.

        This default implementation simply prints a description of the
        triggering requirement.

        :param req: The InstallRequirement that provoked this error, with
            populate_link() having already been called

        """
        return '    %s' % self._requirement_name()

    def __str__(self):
        return '%s\n%s' % (self.head, self.body())

    def _requirement_name(self):
        """Return a description of the requirement that triggered me.

        This default implementation returns long description of the req, with
        line numbers

        """
        return str(self.req) if self.req else 'unknown package'


class VcsHashUnsupported(HashError):
    """A hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those."""

    order = 0
    head = ("Can't verify hashes for these requirements because we don't "
            "have a way to hash version control repositories:")


class DirectoryUrlHashUnsupported(HashError):
    """A hash was provided for a version-control-system-based requirement, but
    we don't have a method for hashing those."""

    order = 1
    head = ("Can't verify hashes for these file:// requirements because they "
            "point to directories:")


class HashMissing(HashError):
    """A hash was needed for a requirement but is absent."""

    order = 2
    head = ('Hashes are required in --require-hashes mode, but they are '
            'missing from some requirements. Here is a list of those '
            'requirements along with the hashes their downloaded archives '
            'actually had. Add lines like these to your requirements files to '
            'prevent tampering. (If you did not enable --require-hashes '
            'manually, note that it turns on automatically when any package '
            'has a hash.)')

    def __init__(self, gotten_hash):
        """
        :param gotten_hash: The hash of the (possibly malicious) archive we
            just downloaded
        """
        self.gotten_hash = gotten_hash

    def body(self):
        from pip.utils.hashes import FAVORITE_HASH  # Dodge circular import.

        package = None
        if self.req:
            # In the case of URL-based requirements, display the original URL
            # seen in the requirements file rather than the package name,
            # so the output can be directly copied into the requirements file.
            package = (self.req.original_link if self.req.original_link
                       # In case someone feeds something downright stupid
                       # to InstallRequirement's constructor.
                       else getattr(self.req, 'req', None))
        return '    %s --hash=%s:%s' % (package or 'unknown package',
                                        FAVORITE_HASH,
                                        self.gotten_hash)


class HashUnpinned(HashError):
    """A requirement had a hash specified but was not pinned to a specific
    version."""

    order = 3
    head = ('In --require-hashes mode, all requirements must have their '
            'versions pinned with ==. These do not:')


class HashMismatch(HashError):
    """
    Distribution file hash values don't match.

    :ivar package_name: The name of the package that triggered the hash
        mismatch. Feel free to write to this after the exception is raise to
        improve its error message.

    """
    order = 4
    head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
            'FILE. If you have updated the package versions, please update '
            'the hashes. Otherwise, examine the package contents carefully; '
            'someone may have tampered with them.')

    def __init__(self, allowed, gots):
        """
        :param allowed: A dict of algorithm names pointing to lists of allowed
            hex digests
        :param gots: A dict of algorithm names pointing to hashes we
            actually got from the files under suspicion
        """
        self.allowed = allowed
        self.gots = gots

    def body(self):
        return '    %s:\n%s' % (self._requirement_name(),
                                self._hash_comparison())

    def _hash_comparison(self):
        """
        Return a comparison of actual and expected hash values.

        Example::

               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
                            or 123451234512345123451234512345123451234512345
                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef

        """
        def hash_then_or(hash_name):
            # For now, all the decent hashes have 6-char names, so we can get
            # away with hard-coding space literals.
            return chain([hash_name], repeat('    or'))

        lines = []
        for hash_name, expecteds in iteritems(self.allowed):
            prefix = hash_then_or(hash_name)
            lines.extend(('        Expected %s %s' % (next(prefix), e))
                         for e in expecteds)
            lines.append('             Got        %s\n' %
                         self.gots[hash_name].hexdigest())
            prefix = '    or'
        return '\n'.join(lines)


class UnsupportedPythonVersion(InstallationError):
    """Unsupported python version according to Requires-Python package
    metadata."""
PKZ!{Jsite-packages/pip/locations.pycnu[
abc@@sdZddlmZddlZddlZddlZddlZddlmZddl	m
Z
mZddlm
Z
mZddlmZejdZd	Zd
ZdZdZd
ZerejjejdZn@yejjejdZWnek
r+ejdnXejjeZej Z!ej"Z#edZ$e
rejjejdZ%ejje#dZ&ejj'e%sejjejdZ%ejje#dZ&ndZ(ejje$dZ)ejje)e(Z*nejjejdZ%ejje#dZ&dZ(ejje$dZ)ejje)e(Z*ej+d dkrejd dkrdZ%ngej,dD]Zejjee(^qZ-e.dde.ddZ0dS(s7Locations where we look for configs, install stuff, etci(tabsolute_importN(t	sysconfig(tinstalltSCHEME_KEYS(tWINDOWSt
expanduser(tappdirstpipsThis file is placed here by pip to indicate the source was put
here by pip.

Once this package is successfully installed this source code will be
deleted (unless you remove this file).
spip-delete-this-directory.txtcC@s>tjj|t}t|d}|jtWdQXdS(s?
    Write the pip delete marker file into this directory.
    twN(tostpathtjointPIP_DELETE_MARKER_FILENAMEtopentwritetDELETE_MARKER_MESSAGE(t	directorytfilepatht	marker_fp((s1/usr/lib/python2.7/site-packages/pip/locations.pytwrite_delete_marker_filescC@s9ttdrtStjttdtjkr5tStS(sM
    Return True if we're running inside a virtualenv, False otherwise.

    treal_prefixtbase_prefix(thasattrtsystTruetprefixtgetattrtFalse(((s1/usr/lib/python2.7/site-packages/pip/locations.pytrunning_under_virtualenv's
cC@sYtjjtjjtj}tjj|d}trUtjj|rUt	SdS(s?
    Return True if in a venv and no system site packages.
    sno-global-site-packages.txtN(
R	R
tdirnametabspathtsitet__file__RRtisfileR(tsite_mod_dirtno_global_file((s1/usr/lib/python2.7/site-packages/pip/locations.pytvirtualenv_no_global4s!tsrcs=The folder you are executing pip from can no longer be found.t~tScriptstbinspip.inispip.confs.pipitdarwinis/System/Library/s/usr/local/bincC@sddlm}i}|r/idgd6}ni}i|d6}	|	j|||	}
|
j|
jddt}|o|stdj|||p|j|_|rd	|_	n|p|j	|_	|p|j
|_
|p|j|_|jx%t
D]}t|d
|||tisolatedRR*tschemetextra_dist_argst	dist_argstdtitkeyt
path_no_drive((s1/usr/lib/python2.7/site-packages/pip/locations.pytdistutils_scheme|sH


%

%		
		(1t__doc__t
__future__RR	tos.pathRRt	distutilsRtdistutils.command.installRRt
pip.compatRRt	pip.utilsRtuser_cache_dirtUSER_CACHE_DIRRRRRR$R
RRt
src_prefixtgetcwdtOSErrortexitRtget_python_libt
site_packagest	USER_SITEt	user_sitetuser_dirtbin_pytbin_usertexiststconfig_basenametlegacy_storage_dirtlegacy_config_filetplatformtsite_config_dirstsite_config_filesRRCRN(((s1/usr/lib/python2.7/site-packages/pip/locations.pytsd				
		
				&	.	PKZKIeHHsite-packages/pip/__main__.pynu[from __future__ import absolute_import

import os
import sys

# If we are running from a wheel, add the wheel to sys.path
# This allows the usage python pip-*.whl/pip install pip-*.whl
if __package__ == '':
    # __file__ is pip-*.whl/pip/__main__.py
    # first dirname call strips of '/__main__.py', second strips off '/pip'
    # Resulting path is the name of the wheel itself
    # Add that to sys.path so we can import pip
    path = os.path.dirname(os.path.dirname(__file__))
    sys.path.insert(0, path)

import pip  # noqa

if __name__ == '__main__':
    sys.exit(pip.main())
PKZ0""!site-packages/pip/basecommand.pycnu[
abc@@sdZddlmZddlZddlZddlZddlZddlZddlm	Z	ddl
mZddlm
Z
ddlmZddlmZmZmZmZmZdd	lmZdd
lmZmZddlmZmZddlmZm Z m!Z!m"Z"m#Z#dd
l$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+dgZ,ej-e.Z/de0fdYZ1de1fdYZ2dS(s(Base Command class, and related routinesi(tabsolute_importN(t
cmdoptions(t
PackageFinder(trunning_under_virtualenv(t
PipSession(t
BadCommandtInstallationErrortUninstallationErrortCommandErrortPreviousBuildDirError(tlogging_dictConfig(tConfigOptionParsertUpdatingDefaultsHelpFormatter(tInstallRequirementtparse_requirements(tSUCCESStERRORt
UNKNOWN_ERRORtVIRTUALENV_NOT_FOUNDtPREVIOUS_BUILD_DIR_ERROR(tdeprecationtget_progtnormalize_path(tIndentingFormatter(tpip_version_checktCommandcB@sMeZdZdZeZdZedZdddZ	dZ
dZRS(sext://sys.stdoutsext://sys.stderrcC@si|jd6dt|jfd6td6td6|jd6|jd6|d6}t||_d	|jj}t	j
|j||_tj
tj|j}|jj|dS(
Ntusages%s %stprogt	formattertadd_help_optiontnametdescriptiontisolateds
%s Options(RRRRtFalset__doc__Rtparsert
capitalizetoptparsetOptionGrouptcmd_optsRtmake_option_groupt
general_grouptadd_option_group(tselfR t	parser_kwt
optgroup_nametgen_opts((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyt__init__)s




cC@std|jr-ttjj|jdndd|dk	rE|n|jd|j}|j	rr|j	|_
n|jr|j|_	n|js|r|dk	r|n|j|_n|j
ri|j
d6|j
d6|_n|j|j_|S(Ntcachethttptretriestinsecure_hoststhttps(Rt	cache_dirRtostpathtjointNoneR2t
trusted_hoststcerttverifytclient_certttimeouttproxytproxiestno_inputtautht	prompting(R+toptionsR2R>tsession((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyt_build_sessionAs -		!	
cC@s|jj|S(N(R#t
parse_args(R+targs((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyRGesc@s|j|\}}|jrW|jdkr6dn|jdkrNdqodn|jridnd}|jrd}ntidd6td	6iid
d6tjd6d
6d6iitd6dd6d6d6iid6dd6|j	dd6d
gd6dd6d6idd6dd6|j	dd6dd6d6idd6dd6|jpTdd6t
d6dd6d6d6i|d6ttddd|jrdndgd6d 6tfd!d"d#d$d%gDd&6tjd d7krtjd(tjn|jrd)tjd*d+j|jtjd,s
spip._vendortdistlibtrequeststurllib3tloggersisPython 2.6 is no longer supported by the Python core team, please upgrade your Python. A future version of pip will drop support for Python 2.6t1tPIP_NO_INPUTt tPIP_EXISTS_ACTIONs2Could not find an activated virtualenv (required).sException information:texc_infos	ERROR: %ssOperation cancelled by users
Exception:tno_indexR2R>i(ii(5RGtquiettverbosetlogR
R!tloggingRIRtlog_streamstTruetlisttfilterR9tdicttsystversion_infotwarningstwarnRtPython26DeprecationWarningRAR6tenviront
exists_actionR8trequire_venvRtloggertcriticaltexitRtrunt
isinstancetintR	tstrtdebugRRRRRRtKeyboardInterruptRtdisable_pip_version_checktgetattrRFtminR>RR(R+RHRDt
root_leveltstatustexcRE((ROs3/usr/lib/python2.7/site-packages/pip/basecommand.pytmainis								
	#

				

	
N(sext://sys.stdoutsext://sys.stderr(t__name__t
__module__R9RRR!thiddenRmR/RFRGR(((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyR#s$	tRequirementCommandcB@s,eZedZdddddZRS(c
C@sxS|jD]H}x?t|dtd|d|d|d|D]}|j|q;Wq
Wx6|D].}|jtj|d
d|jd|q]Wx?|jD]4}|jtj	|d|j
d|jd|qWt}	xS|jD]H}x?t|d|d|d|d|D]}t}	|j|qWqW|j
|_
|pK|jpK|	si|d6}
|jrd	t|
d
dj|j}n
d|
}tj|nd
S(s?
        Marshal cmd line args into a requirement set.
        t
constrainttfinderRDREtwheel_cacheR tdefault_vcsRs^You must give at least one requirement to %(name)s (maybe you meant "pip %(name)s %(links)s"?)tlinksResLYou must give at least one requirement to %(name)s (see "pip help %(name)s")N(tconstraintsRRntadd_requirementR
t	from_lineR9t
isolated_modet	editablest
from_editableRR!trequirementstrequire_hashest
find_linksRqR8Rztwarning(trequirement_setRHRDRRERRRYtreqtfound_req_in_filetoptstmsg((s3/usr/lib/python2.7/site-packages/pip/basecommand.pytpopulate_requirement_setsF
		

	"cC@s|jg|j}|jr>tjddj|g}ntd|jd|jd|d|j	d|j
d|jd	|d
|d|d|d
|S(sR
        Create a package finder appropriate to this requirement command.
        sIgnoring indexes: %st,Rtformat_controlt
index_urlsR:tallow_all_prereleasestprocess_dependency_linksREtplatformtversionstabitimplementation(t	index_urltextra_index_urlsRhRzRR8RRRR:tpreR(R+RDRERtpython_versionsRRR((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyt_build_package_finder:s 							N(RRtstaticmethodRR9R(((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyRs8(3R"t
__future__RRlR6RrR%RttpipRt	pip.indexRt
pip.locationsRtpip.downloadRtpip.exceptionsRRRRR	t
pip.compatR
tpip.baseparserRRtpip.reqR
Rtpip.status_codesRRRRRt	pip.utilsRRRtpip.utils.loggingRtpip.utils.outdatedRt__all__t	getLoggerRRztobjectRR(((s3/usr/lib/python2.7/site-packages/pip/basecommand.pyts,((	PKZN$$site-packages/pip/__init__.pyonu[
abc@@siddlmZddlZddlZddlZddlZddlZddlZddlZddl	m
Z
ejdde
yddlZWne
k
rncXejdkr
eedddkr
ydd	lmZWne
efk
rq
Xejndd
lmZmZmZddlmZmZddlmZmZdd
lmZmZm Z m!Z!ddl"m#Z#m$Z$ddl%m&Z&m'Z'ddl%m(Z(ddl	m)Z)ddl*Z+e+j,Z,dZ-ej.e/Z0ejdde)dZ1dZ2dZ3dZ4ddZ6de7fdYZ8e/dkreej9e6ndS(i(tabsolute_importN(tDependencyWarningtignoretcategorytdarwintOPENSSL_VERSION_NUMBERi(tsecuretransport(tInstallationErrortCommandErrortPipError(tget_installed_distributionstget_prog(tdeprecationtdist_is_editable(tgitt	mercurialt
subversiontbazaar(tConfigOptionParsertUpdatingDefaultsHelpFormatter(t
get_summariestget_similar_commands(t
commands_dict(tInsecureRequestWarnings9.0.3cC@sdtjkrdStjdjd}ttjd}y||d}Wntk
rhd}nXgtD]\}}|^qs}g}y-g|D]}||kr|^qd}Wntk
rd}nXt}	|r|dkrtj	dn|d	kr|j
d
rg}
|j}xOtdt
D]>}|jj
|rA|j|dkrA|
j|jqAqAW|
rx|
D]}|GHqWtj	dqnt|}
|g|
jjD]-}|jtjkr|j|jf^q7}g|d|d!D]}|jdd^q}g|D]$\}}||kr;||f^q;}g|D]'\}}|j
|rl||f^ql}x|D],}|d}|dr|d7}n|GHqWn|j
d
s|j
d
rhg|	jD]}|j^q}|j|	jd|D}|g|D]$}|jtjkr:|j^q:7}ndjg|D]}|j
|ru|^quGHtj	ddS(sCommand and option completion for the main option parser (and options)
    and its subcommands (and options).

    Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
    tPIP_AUTO_COMPLETENt
COMP_WORDSit
COMP_CWORDtithelpt	uninstallt-t
local_onlyt=s--cs@s"|]}|D]}|Vq
qdS(N((t.0titto((s0/usr/lib/python2.7/site-packages/pip/__init__.pys	st (tostenvirontsplittintt
IndexErrorRtNonetcreate_main_parsertsystexitt
startswithtlowerR
tTruetkeytappendRtparsertoption_list_allRtoptparset
SUPPRESS_HELPtget_opt_stringtnargst
option_groupstoption_listtjoin(tcwordstcwordtcurrenttcmdtsummarytsubcommandstoptionstwtsubcommand_nameR3t	installedtlctdistt
subcommandtopttxt	prev_optstvtktoptiont	opt_labeltitopts((s0/usr/lib/python2.7/site-packages/pip/__init__.pytautocompleteEs`

"-

	%
	
.114



(0cC@sidd6td6td6dd6td6}t|}|jtjjtjjtjjt	}dt
|tjd	 f|_t
jt
j|}|j|t|_t}d
gg|D]\}}d||f^q}dj||_|S(
Ns
%prog  [options]tusagetadd_help_optiont	formattertglobaltnametprogspip %s from %s (python %s)iRs%-27s %ss
(tFalseRRRtdisable_interspersed_argsR%tpathtdirnametabspatht__file__t__version__R,tversiont
cmdoptionstmake_option_groupt
general_grouptadd_option_groupR0tmainRR;tdescription(t	parser_kwR3tpip_pkg_dirtgen_optstcommand_summariesRPtjRf((s0/usr/lib/python2.7/site-packages/pip/__init__.pyR+s$


*
		0cC@st}|j|\}}|jrZtjj|jtjjtjtjn|s|ddkrt	|dkr|j
tjn|d}|tkrt|}d|g}|r|j
d|ntdj|n|}|j|||fS(NiRisunknown command "%s"smaybe you meant "%s"s - (R+t
parse_argsR`R,tstdouttwriteR%tlinesepR-tlent
print_helpRRR2RR;tremove(targsR3tgeneral_optionst	args_elsetcmd_nametguesstmsgtcmd_args((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt	parseoptss&			
)




cC@st}d|krt}n|S(Ns
--isolated(RYR0(Rstisolated((s0/usr/lib/python2.7/site-packages/pip/__init__.pytcheck_isolateds	cC@s|dkrtjd}ntjtyt|\}}WnGtk
r}tjj	d|tjj	t
jtjdnXyt
jt
jdWn&t
jk
r}tjd|nXt|dt|}|j|S(Nis	ERROR: %sRs%Ignoring error %s when setting localeR{(R*R,targvRtinstall_warning_loggerRRRzR	tstderrRnR%RoR-tlocalet	setlocaletLC_ALLtErrortloggertdebugRR|Re(RsRvRytexctetcommand((s0/usr/lib/python2.7/site-packages/pip/__init__.pyRes 
tFrozenRequirementcB@sYeZddZejdZejdZedZe	dZ
dZRS(cC@s(||_||_||_||_dS(N(RWtreqteditabletcomments(tselfRWRRR((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt__init__s			s-r(\d+)$s-(20\d\d\d\d\d\d)$cC@stjjtjj|j}g}ddlm}m}t|r|j	|rt
}y|||}Wn)tk
r}	tj
d|	d}nX|dkrtj
d||jd|j}t}qn
t}|j}|j}
|
dd}|jj|}|jj|}
|s7|
r|jd}|rd|j||}n|stj
d||jd	q|jd
||r|jd}nd|
jd}t
}d|||j|f}n||j|||S(
Ni(tvcstget_src_requirementsYError when trying to get requirement for VCS system %s, falling back to uneditable formats-Could not determine repository location of %ss-## !! Could not determine repository locationitsvns(Warning: cannot find svn location for %ssF## FIXME: could not find svn URL in dependency_links for this package:s3# Installing as editable to satisfy requirement %s:s{%s}s%s@%s#egg=%s(R%R[tnormcaseR]tlocationtpip.vcsRRR
tget_backend_nameR0RRtwarningR*R2tas_requirementRYtspecst_rev_retsearcht_date_retget_backendtget_locationtgrouptegg_nametproject_name(tclsRGtdependency_linksRRRRRRRRR`t	ver_matcht
date_matchtsvn_backendtsvn_locationtrev((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt	from_dists`!

		

cC@s;|j}tjd|}|r7||j }n|S(Ns
-py\d\.\d$(RtreRtstart(RGRWtmatch((s0/usr/lib/python2.7/site-packages/pip/__init__.pyRIs
cC@sF|j}|jrd|}ndjt|jt|gdS(Ns-e %ss
(RRR;tlistRtstr(RR((s0/usr/lib/python2.7/site-packages/pip/__init__.pyt__str__Qs		
((t__name__t
__module__RRtcompileRRtclassmethodRtstaticmethodRR(((s0/usr/lib/python2.7/site-packages/pip/__init__.pyRsAt__main__(:t
__future__RRtloggingR%R5twarningsR,Rtpip._vendor.urllib3.exceptionsRtfilterwarningstssltImportErrortplatformtgetattrtpip._vendor.urllib3.contribRtOSErrortinject_into_urllib3tpip.exceptionsRRR	t	pip.utilsR
RRR
RRRRRtpip.baseparserRRtpip.commandsRRRRtpip.cmdoptionstpipRaR_t	getLoggerRRRRR+RzR|R*RetobjectRR-(((s0/usr/lib/python2.7/site-packages/pip/__init__.pytsR


"		I		*		[PKZa"site-packages/pip/status_codes.pycnu[
abc@@s8ddlmZdZdZdZdZdZdZdS(i(tabsolute_importiiiiiN(t
__future__RtSUCCESStERRORt
UNKNOWN_ERRORtVIRTUALENV_NOT_FOUNDtPREVIOUS_BUILD_DIR_ERRORtNO_MATCHES_FOUND(((s4/usr/lib/python2.7/site-packages/pip/status_codes.pytsPKZX?--"site-packages/pip/models/index.pycnu[
abc@s6ddlmZdefdYZedZdS(i(tparsetIndexcBseZdZdZRS(cCsX||_tj|j|_|jd|_|jd|_|jd|_dS(Ntsimpletpypis
pypi/pip/json(turlturllib_parseturlsplittnetlocturl_to_patht
simple_urltpypi_urltpip_json_url(tselfR((s4/usr/lib/python2.7/site-packages/pip/models/index.pyt__init__s
	cCstj|j|S(N(RturljoinR(Rtpath((s4/usr/lib/python2.7/site-packages/pip/models/index.pyRs(t__name__t
__module__R
R(((s4/usr/lib/python2.7/site-packages/pip/models/index.pyRs	shttps://pypi.python.org/N(tpip._vendor.six.moves.urllibRRtobjectRtPyPI(((s4/usr/lib/python2.7/site-packages/pip/models/index.pytsPKZI%site-packages/pip/models/__init__.pycnu[
abc@s&ddlmZmZddgZdS(i(tIndextPyPIRRN(tpip.models.indexRRt__all__(((s7/usr/lib/python2.7/site-packages/pip/models/__init__.pytsPKZhN2m!site-packages/pip/models/index.pynu[from pip._vendor.six.moves.urllib import parse as urllib_parse


class Index(object):
    def __init__(self, url):
        self.url = url
        self.netloc = urllib_parse.urlsplit(url).netloc
        self.simple_url = self.url_to_path('simple')
        self.pypi_url = self.url_to_path('pypi')
        self.pip_json_url = self.url_to_path('pypi/pip/json')

    def url_to_path(self, path):
        return urllib_parse.urljoin(self.url, path)


PyPI = Index('https://pypi.python.org/')
PKZI%site-packages/pip/models/__init__.pyonu[
abc@s&ddlmZmZddgZdS(i(tIndextPyPIRRN(tpip.models.indexRRt__all__(((s7/usr/lib/python2.7/site-packages/pip/models/__init__.pytsPKZgGG$site-packages/pip/models/__init__.pynu[from pip.models.index import Index, PyPI


__all__ = ["Index", "PyPI"]
PKZX?--"site-packages/pip/models/index.pyonu[
abc@s6ddlmZdefdYZedZdS(i(tparsetIndexcBseZdZdZRS(cCsX||_tj|j|_|jd|_|jd|_|jd|_dS(Ntsimpletpypis
pypi/pip/json(turlturllib_parseturlsplittnetlocturl_to_patht
simple_urltpypi_urltpip_json_url(tselfR((s4/usr/lib/python2.7/site-packages/pip/models/index.pyt__init__s
	cCstj|j|S(N(RturljoinR(Rtpath((s4/usr/lib/python2.7/site-packages/pip/models/index.pyRs(t__name__t
__module__R
R(((s4/usr/lib/python2.7/site-packages/pip/models/index.pyRs	shttps://pypi.python.org/N(tpip._vendor.six.moves.urllibRRtobjectRtPyPI(((s4/usr/lib/python2.7/site-packages/pip/models/index.pytsPKZK2!Z@Z@site-packages/pip/cmdoptions.pynu["""
shared options and groups

The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.

"""
from __future__ import absolute_import

from functools import partial
from optparse import OptionGroup, SUPPRESS_HELP, Option
import warnings

from pip.index import (
    FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary,
    fmt_ctl_no_use_wheel)
from pip.models import PyPI
from pip.locations import USER_CACHE_DIR, src_prefix
from pip.utils.hashes import STRONG_HASHES


def make_option_group(group, parser):
    """
    Return an OptionGroup object
    group  -- assumed to be dict with 'name' and 'options' keys
    parser -- an optparse Parser
    """
    option_group = OptionGroup(parser, group['name'])
    for option in group['options']:
        option_group.add_option(option())
    return option_group


def resolve_wheel_no_use_binary(options):
    if not options.use_wheel:
        control = options.format_control
        fmt_ctl_no_use_wheel(control)


def check_install_build_global(options, check_options=None):
    """Disable wheels if per-setup.py call options are set.

    :param options: The OptionParser options to update.
    :param check_options: The options to check, if not supplied defaults to
        options.
    """
    if check_options is None:
        check_options = options

    def getname(n):
        return getattr(check_options, n, None)
    names = ["build_options", "global_options", "install_options"]
    if any(map(getname, names)):
        control = options.format_control
        fmt_ctl_no_binary(control)
        warnings.warn(
            'Disabling all use of wheels due to the use of --build-options '
            '/ --global-options / --install-options.', stacklevel=2)


###########
# options #
###########

help_ = partial(
    Option,
    '-h', '--help',
    dest='help',
    action='help',
    help='Show help.')

isolated_mode = partial(
    Option,
    "--isolated",
    dest="isolated_mode",
    action="store_true",
    default=False,
    help=(
        "Run pip in an isolated mode, ignoring environment variables and user "
        "configuration."
    ),
)

require_virtualenv = partial(
    Option,
    # Run only if inside a virtualenv, bail if not.
    '--require-virtualenv', '--require-venv',
    dest='require_venv',
    action='store_true',
    default=False,
    help=SUPPRESS_HELP)

verbose = partial(
    Option,
    '-v', '--verbose',
    dest='verbose',
    action='count',
    default=0,
    help='Give more output. Option is additive, and can be used up to 3 times.'
)

version = partial(
    Option,
    '-V', '--version',
    dest='version',
    action='store_true',
    help='Show version and exit.')

quiet = partial(
    Option,
    '-q', '--quiet',
    dest='quiet',
    action='count',
    default=0,
    help=('Give less output. Option is additive, and can be used up to 3'
          ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
          ' levels).')
)

log = partial(
    Option,
    "--log", "--log-file", "--local-log",
    dest="log",
    metavar="path",
    help="Path to a verbose appending log."
)

no_input = partial(
    Option,
    # Don't ask for input
    '--no-input',
    dest='no_input',
    action='store_true',
    default=False,
    help=SUPPRESS_HELP)

proxy = partial(
    Option,
    '--proxy',
    dest='proxy',
    type='str',
    default='',
    help="Specify a proxy in the form [user:passwd@]proxy.server:port.")

retries = partial(
    Option,
    '--retries',
    dest='retries',
    type='int',
    default=5,
    help="Maximum number of retries each connection should attempt "
         "(default %default times).")

timeout = partial(
    Option,
    '--timeout', '--default-timeout',
    metavar='sec',
    dest='timeout',
    type='float',
    default=15,
    help='Set the socket timeout (default %default seconds).')

default_vcs = partial(
    Option,
    # The default version control system for editables, e.g. 'svn'
    '--default-vcs',
    dest='default_vcs',
    type='str',
    default='',
    help=SUPPRESS_HELP)

skip_requirements_regex = partial(
    Option,
    # A regex to be used to skip requirements
    '--skip-requirements-regex',
    dest='skip_requirements_regex',
    type='str',
    default='',
    help=SUPPRESS_HELP)


def exists_action():
    return Option(
        # Option when path already exist
        '--exists-action',
        dest='exists_action',
        type='choice',
        choices=['s', 'i', 'w', 'b', 'a'],
        default=[],
        action='append',
        metavar='action',
        help="Default action when a path already exists: "
        "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.")


cert = partial(
    Option,
    '--cert',
    dest='cert',
    type='str',
    metavar='path',
    help="Path to alternate CA bundle.")

client_cert = partial(
    Option,
    '--client-cert',
    dest='client_cert',
    type='str',
    default=None,
    metavar='path',
    help="Path to SSL client certificate, a single file containing the "
         "private key and the certificate in PEM format.")

index_url = partial(
    Option,
    '-i', '--index-url', '--pypi-url',
    dest='index_url',
    metavar='URL',
    default=PyPI.simple_url,
    help="Base URL of Python Package Index (default %default). "
         "This should point to a repository compliant with PEP 503 "
         "(the simple repository API) or a local directory laid out "
         "in the same format.")


def extra_index_url():
    return Option(
        '--extra-index-url',
        dest='extra_index_urls',
        metavar='URL',
        action='append',
        default=[],
        help="Extra URLs of package indexes to use in addition to "
             "--index-url. Should follow the same rules as "
             "--index-url."
    )


no_index = partial(
    Option,
    '--no-index',
    dest='no_index',
    action='store_true',
    default=False,
    help='Ignore package index (only looking at --find-links URLs instead).')


def find_links():
    return Option(
        '-f', '--find-links',
        dest='find_links',
        action='append',
        default=[],
        metavar='url',
        help="If a url or path to an html file, then parse for links to "
             "archives. If a local path or file:// url that's a directory, "
             "then look for archives in the directory listing.")


def allow_external():
    return Option(
        "--allow-external",
        dest="allow_external",
        action="append",
        default=[],
        metavar="PACKAGE",
        help=SUPPRESS_HELP,
    )


allow_all_external = partial(
    Option,
    "--allow-all-external",
    dest="allow_all_external",
    action="store_true",
    default=False,
    help=SUPPRESS_HELP,
)


def trusted_host():
    return Option(
        "--trusted-host",
        dest="trusted_hosts",
        action="append",
        metavar="HOSTNAME",
        default=[],
        help="Mark this host as trusted, even though it does not have valid "
             "or any HTTPS.",
    )


# Remove after 7.0
no_allow_external = partial(
    Option,
    "--no-allow-external",
    dest="allow_all_external",
    action="store_false",
    default=False,
    help=SUPPRESS_HELP,
)


# Remove --allow-insecure after 7.0
def allow_unsafe():
    return Option(
        "--allow-unverified", "--allow-insecure",
        dest="allow_unverified",
        action="append",
        default=[],
        metavar="PACKAGE",
        help=SUPPRESS_HELP,
    )

# Remove after 7.0
no_allow_unsafe = partial(
    Option,
    "--no-allow-insecure",
    dest="allow_all_insecure",
    action="store_false",
    default=False,
    help=SUPPRESS_HELP
)

# Remove after 1.5
process_dependency_links = partial(
    Option,
    "--process-dependency-links",
    dest="process_dependency_links",
    action="store_true",
    default=False,
    help="Enable the processing of dependency links.",
)


def constraints():
    return Option(
        '-c', '--constraint',
        dest='constraints',
        action='append',
        default=[],
        metavar='file',
        help='Constrain versions using the given constraints file. '
        'This option can be used multiple times.')


def requirements():
    return Option(
        '-r', '--requirement',
        dest='requirements',
        action='append',
        default=[],
        metavar='file',
        help='Install from the given requirements file. '
        'This option can be used multiple times.')


def editable():
    return Option(
        '-e', '--editable',
        dest='editables',
        action='append',
        default=[],
        metavar='path/url',
        help=('Install a project in editable mode (i.e. setuptools '
              '"develop mode") from a local project path or a VCS url.'),
    )

src = partial(
    Option,
    '--src', '--source', '--source-dir', '--source-directory',
    dest='src_dir',
    metavar='dir',
    default=src_prefix,
    help='Directory to check out editable projects into. '
    'The default in a virtualenv is "/src". '
    'The default for global installs is "/src".'
)

# XXX: deprecated, remove in 9.0
use_wheel = partial(
    Option,
    '--use-wheel',
    dest='use_wheel',
    action='store_true',
    default=True,
    help=SUPPRESS_HELP,
)

# XXX: deprecated, remove in 9.0
no_use_wheel = partial(
    Option,
    '--no-use-wheel',
    dest='use_wheel',
    action='store_false',
    default=True,
    help=('Do not Find and prefer wheel archives when searching indexes and '
          'find-links locations. DEPRECATED in favour of --no-binary.'),
)


def _get_format_control(values, option):
    """Get a format_control object."""
    return getattr(values, option.dest)


def _handle_no_binary(option, opt_str, value, parser):
    existing = getattr(parser.values, option.dest)
    fmt_ctl_handle_mutual_exclude(
        value, existing.no_binary, existing.only_binary)


def _handle_only_binary(option, opt_str, value, parser):
    existing = getattr(parser.values, option.dest)
    fmt_ctl_handle_mutual_exclude(
        value, existing.only_binary, existing.no_binary)


def no_binary():
    return Option(
        "--no-binary", dest="format_control", action="callback",
        callback=_handle_no_binary, type="str",
        default=FormatControl(set(), set()),
        help="Do not use binary packages. Can be supplied multiple times, and "
             "each time adds to the existing value. Accepts either :all: to "
             "disable all binary packages, :none: to empty the set, or one or "
             "more package names with commas between them. Note that some "
             "packages are tricky to compile and may fail to install when "
             "this option is used on them.")


def only_binary():
    return Option(
        "--only-binary", dest="format_control", action="callback",
        callback=_handle_only_binary, type="str",
        default=FormatControl(set(), set()),
        help="Do not use source packages. Can be supplied multiple times, and "
             "each time adds to the existing value. Accepts either :all: to "
             "disable all source packages, :none: to empty the set, or one or "
             "more package names with commas between them. Packages without "
             "binary distributions will fail to install when this option is "
             "used on them.")


cache_dir = partial(
    Option,
    "--cache-dir",
    dest="cache_dir",
    default=USER_CACHE_DIR,
    metavar="dir",
    help="Store the cache data in ."
)

no_cache = partial(
    Option,
    "--no-cache-dir",
    dest="cache_dir",
    action="store_false",
    help="Disable the cache.",
)

no_deps = partial(
    Option,
    '--no-deps', '--no-dependencies',
    dest='ignore_dependencies',
    action='store_true',
    default=False,
    help="Don't install package dependencies.")

build_dir = partial(
    Option,
    '-b', '--build', '--build-dir', '--build-directory',
    dest='build_dir',
    metavar='dir',
    help='Directory to unpack packages into and build in.'
)

ignore_requires_python = partial(
    Option,
    '--ignore-requires-python',
    dest='ignore_requires_python',
    action='store_true',
    help='Ignore the Requires-Python information.')

install_options = partial(
    Option,
    '--install-option',
    dest='install_options',
    action='append',
    metavar='options',
    help="Extra arguments to be supplied to the setup.py install "
         "command (use like --install-option=\"--install-scripts=/usr/local/"
         "bin\"). Use multiple --install-option options to pass multiple "
         "options to setup.py install. If you are using an option with a "
         "directory path, be sure to use absolute path.")

global_options = partial(
    Option,
    '--global-option',
    dest='global_options',
    action='append',
    metavar='options',
    help="Extra global options to be supplied to the setup.py "
         "call before the install command.")

no_clean = partial(
    Option,
    '--no-clean',
    action='store_true',
    default=False,
    help="Don't clean up build directories.")

pre = partial(
    Option,
    '--pre',
    action='store_true',
    default=False,
    help="Include pre-release and development versions. By default, "
         "pip only finds stable versions.")

disable_pip_version_check = partial(
    Option,
    "--disable-pip-version-check",
    dest="disable_pip_version_check",
    action="store_true",
    default=False,
    help="Don't periodically check PyPI to determine whether a new version "
         "of pip is available for download. Implied with --no-index.")

# Deprecated, Remove later
always_unzip = partial(
    Option,
    '-Z', '--always-unzip',
    dest='always_unzip',
    action='store_true',
    help=SUPPRESS_HELP,
)


def _merge_hash(option, opt_str, value, parser):
    """Given a value spelled "algo:digest", append the digest to a list
    pointed to in a dict by the algo name."""
    if not parser.values.hashes:
        parser.values.hashes = {}
    try:
        algo, digest = value.split(':', 1)
    except ValueError:
        parser.error('Arguments to %s must be a hash name '
                     'followed by a value, like --hash=sha256:abcde...' %
                     opt_str)
    if algo not in STRONG_HASHES:
        parser.error('Allowed hash algorithms for %s are %s.' %
                     (opt_str, ', '.join(STRONG_HASHES)))
    parser.values.hashes.setdefault(algo, []).append(digest)


hash = partial(
    Option,
    '--hash',
    # Hash values eventually end up in InstallRequirement.hashes due to
    # __dict__ copying in process_line().
    dest='hashes',
    action='callback',
    callback=_merge_hash,
    type='string',
    help="Verify that the package's archive matches this "
         'hash before installing. Example: --hash=sha256:abcdef...')


require_hashes = partial(
    Option,
    '--require-hashes',
    dest='require_hashes',
    action='store_true',
    default=False,
    help='Require a hash to check each requirement against, for '
         'repeatable installs. This option is implied when any package in a '
         'requirements file has a --hash option.')


##########
# groups #
##########

general_group = {
    'name': 'General Options',
    'options': [
        help_,
        isolated_mode,
        require_virtualenv,
        verbose,
        version,
        quiet,
        log,
        no_input,
        proxy,
        retries,
        timeout,
        default_vcs,
        skip_requirements_regex,
        exists_action,
        trusted_host,
        cert,
        client_cert,
        cache_dir,
        no_cache,
        disable_pip_version_check,
    ]
}

non_deprecated_index_group = {
    'name': 'Package Index Options',
    'options': [
        index_url,
        extra_index_url,
        no_index,
        find_links,
        process_dependency_links,
    ]
}

index_group = {
    'name': 'Package Index Options (including deprecated options)',
    'options': non_deprecated_index_group['options'] + [
        allow_external,
        allow_all_external,
        no_allow_external,
        allow_unsafe,
        no_allow_unsafe,
    ]
}
PKZ
=
= site-packages/pip/cmdoptions.pycnu[
abc@@sdZddlmZddlmZddlmZmZmZddl	Z	ddl
mZmZm
Z
mZddlmZddlmZmZdd	lmZd
ZdZddZeed
dddddddZeeddddddeddZeedddddddedeZeeddddddddddZeed d!dd"dddd#Zeed$d%dd&dddddd'Z eed(d)d*dd+d,d-dd.Z!eed/dd0dddedeZ"eed1dd2d3d4dd5dd6Z#eed7dd8d3d9dd:dd;Z$eed<d=d,d>dd?d3d@ddAddBZ%eedCddDd3d4dd5deZ&eedEddFd3d4dd5deZ'dGZ(eedHddId3d4d,d-ddJZ)eedKddLd3d4ddd,d-ddMZ*eedNdOdPddQd,dRdej+ddSZ,dTZ-eedUddVdddeddWZ.dXZ/dYZ0eedZdd[dddedeZ1d\Z2eed]dd[dd^dedeZ3d_Z4eed`ddadd^dedeZ5eedbddcdddedddZ6deZ7dfZ8dgZ9eedhdidjdkddld,dmdeddnZ:eedoddpddde;deZ<eedqddpdd^de;ddrZ=dsZ>dtZ?duZ@dvZAdwZBeedxddyded,dmddzZCeed{ddydd^dd|ZDeed}d~dddddeddZEeeddddddd,dmddZFeedddddddZGeedddddd,dddZHeedddddd,dddZIeeddddeddZJeeddddeddZKeeddddddeddZLeedddddddeZMdZNeeddddddeNd3dddZOeeddddddeddZPidd6eeeeee e!e"e#e$e%e&e'e(e2e)e*eCeDeLgd6ZQidd6e,e-e.e/e6gd6ZRidd6eRde0e1e3e4e5gd6ZSdS(sD
shared options and groups

The principle here is to define options once, but *not* instantiate them
globally. One reason being that options with action='append' can carry state
between parses. pip parses general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.

i(tabsolute_import(tpartial(tOptionGroupt
SUPPRESS_HELPtOptionN(t
FormatControltfmt_ctl_handle_mutual_excludetfmt_ctl_no_binarytfmt_ctl_no_use_wheel(tPyPI(tUSER_CACHE_DIRt
src_prefix(t
STRONG_HASHEScC@s<t||d}x"|dD]}|j|qW|S(s
    Return an OptionGroup object
    group  -- assumed to be dict with 'name' and 'options' keys
    parser -- an optparse Parser
    tnametoptions(Rt
add_option(tgrouptparsertoption_grouptoption((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pytmake_option_groupscC@s#|js|j}t|ndS(N(t	use_wheeltformat_controlR(Rtcontrol((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pytresolve_wheel_no_use_binary$s		c@sudkr|nfd}dddg}tt||rq|j}t|tjdddndS(	sDisable wheels if per-setup.py call options are set.

    :param options: The OptionParser options to update.
    :param check_options: The options to check, if not supplied defaults to
        options.
    c@st|dS(N(tgetattrtNone(tn(t
check_options(s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pytgetname4st
build_optionstglobal_optionstinstall_optionsseDisabling all use of wheels due to the use of --build-options / --global-options / --install-options.t
stackleveliN(RtanytmapRRtwarningstwarn(RRRtnamesR((Rs2/usr/lib/python2.7/site-packages/pip/cmdoptions.pytcheck_install_build_global*s		
s-hs--helptdestthelptactions
Show help.s
--isolatedt
isolated_modet
store_truetdefaultsSRun pip in an isolated mode, ignoring environment variables and user configuration.s--require-virtualenvs--require-venvtrequire_venvs-vs	--verbosetverbosetcountsDGive more output. Option is additive, and can be used up to 3 times.s-Vs	--versiontversionsShow version and exit.s-qs--quiettquietsGive less output. Option is additive, and can be used up to 3 times (corresponding to WARNING, ERROR, and CRITICAL logging levels).s--logs
--log-files--local-logtlogtmetavartpaths Path to a verbose appending log.s
--no-inputtno_inputs--proxytproxyttypetstrts<Specify a proxy in the form [user:passwd@]proxy.server:port.s	--retriestretriestintisRMaximum number of retries each connection should attempt (default %default times).s	--timeouts--default-timeouttsecttimeouttfloatis2Set the socket timeout (default %default seconds).s
--default-vcstdefault_vcss--skip-requirements-regextskip_requirements_regexcC@sCtddddddddd	d
dgdgd
ddd
ddS(Ns--exists-actionR(t
exists_actionR8tchoicetchoiceststitwtbtaR-R*tappendR4R)sYDefault action when a path already exists: (s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyRBss--certtcertsPath to alternate CA bundle.s
--client-certtclient_certskPath to SSL client certificate, a single file containing the private key and the certificate in PEM format.s-is--index-urls
--pypi-urlt	index_urltURLsBase URL of Python Package Index (default %default). This should point to a repository compliant with PEP 503 (the simple repository API) or a local directory laid out in the same format.cC@s(tddddddddgd	d
S(Ns--extra-index-urlR(textra_index_urlsR4RNR*RJR-R)smExtra URLs of package indexes to use in addition to --index-url. Should follow the same rules as --index-url.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pytextra_index_urlss
--no-indextno_indexsAIgnore package index (only looking at --find-links URLs instead).c
C@s+tdddddddgdd	d
dS(Ns-fs--find-linksR(t
find_linksR*RJR-R4turlR)sIf a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyRRs	cC@s(tddddddgddd	tS(
Ns--allow-externalR(tallow_externalR*RJR-R4tPACKAGER)(RR(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyRTss--allow-all-externaltallow_all_externalcC@s(tddddddddgd	d
S(Ns--trusted-hostR(t
trusted_hostsR*RJR4tHOSTNAMER-R)sKMark this host as trusted, even though it does not have valid or any HTTPS.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyttrusted_hostss--no-allow-externaltstore_falsec
C@s+tdddddddgdd	d
tS(Ns--allow-unverifieds--allow-insecureR(tallow_unverifiedR*RJR-R4RUR)(RR(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pytallow_unsafe3s	s--no-allow-insecuretallow_all_insecures--process-dependency-linkstprocess_dependency_linkss*Enable the processing of dependency links.c
C@s+tdddddddgdd	d
dS(Ns-cs--constraintR(tconstraintsR*RJR-R4tfileR)s\Constrain versions using the given constraints file. This option can be used multiple times.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyR_Rs	c
C@s+tdddddddgdd	d
dS(Ns-rs
--requirementR(trequirementsR*RJR-R4R`R)sQInstall from the given requirements file. This option can be used multiple times.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyRa]s	c
C@s+tdddddddgdd	d
dS(Ns-es
--editableR(t	editablesR*RJR-R4spath/urlR)skInstall a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.(R(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyteditablehs	s--srcs--sources--source-dirs--source-directorytsrc_dirtdirsDirectory to check out editable projects into. The default in a virtualenv is "/src". The default for global installs is "/src".s--use-wheelRs--no-use-wheels{Do not Find and prefer wheel archives when searching indexes and find-links locations. DEPRECATED in favour of --no-binary.cC@st||jS(sGet a format_control object.(RR((tvaluesR((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyt_get_format_controlscC@s/t|j|j}t||j|jdS(N(RRfR(Rt	no_binarytonly_binary(Rtopt_strtvalueRtexisting((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyt_handle_no_binaryscC@s/t|j|j}t||j|jdS(N(RRfR(RRiRh(RRjRkRRl((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyt_handle_only_binaryscC@s=tddddddtdddtttd	d
S(Ns--no-binaryR(RR*tcallbackR8R9R-R)sRDo not use binary packages. Can be supplied multiple times, and each time adds to the existing value. Accepts either :all: to disable all binary packages, :none: to empty the set, or one or more package names with commas between them. Note that some packages are tricky to compile and may fail to install when this option is used on them.(RRmRtset(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyRhs
cC@s=tddddddtdddtttd	d
S(Ns
--only-binaryR(RR*RoR8R9R-R)sGDo not use source packages. Can be supplied multiple times, and each time adds to the existing value. Accepts either :all: to disable all source packages, :none: to empty the set, or one or more package names with commas between them. Packages without binary distributions will fail to install when this option is used on them.(RRnRRp(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyRis
s--cache-dirt	cache_dirsStore the cache data in .s--no-cache-dirsDisable the cache.s	--no-depss--no-dependenciestignore_dependenciess#Don't install package dependencies.s-bs--builds--build-dirs--build-directoryt	build_dirs/Directory to unpack packages into and build in.s--ignore-requires-pythontignore_requires_pythons'Ignore the Requires-Python information.s--install-optionR RJRs"Extra arguments to be supplied to the setup.py install command (use like --install-option="--install-scripts=/usr/local/bin"). Use multiple --install-option options to pass multiple options to setup.py install. If you are using an option with a directory path, be sure to use absolute path.s--global-optionRsTExtra global options to be supplied to the setup.py call before the install command.s
--no-cleans!Don't clean up build directories.s--presYInclude pre-release and development versions. By default, pip only finds stable versions.s--disable-pip-version-checktdisable_pip_version_checks{Don't periodically check PyPI to determine whether a new version of pip is available for download. Implied with --no-index.s-Zs--always-unziptalways_unzipcC@s|jjsi|j_ny|jdd\}}Wn"tk
r[|jd|nX|tkr|jd|djtfn|jjj|gj|dS(skGiven a value spelled "algo:digest", append the digest to a list
    pointed to in a dict by the algo name.t:isTArguments to %s must be a hash name followed by a value, like --hash=sha256:abcde...s&Allowed hash algorithms for %s are %s.s, N(	Rfthashestsplitt
ValueErrorterrorRtjoint
setdefaultRJ(RRjRkRtalgotdigest((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyt_merge_hashs
		s--hashRxRotstringsgVerify that the package's archive matches this hash before installing. Example: --hash=sha256:abcdef...s--require-hashestrequire_hashessRequire a hash to check each requirement against, for repeatable installs. This option is implied when any package in a requirements file has a --hash option.sGeneral OptionsR
sPackage Index Optionss4Package Index Options (including deprecated options)(Tt__doc__t
__future__Rt	functoolsRtoptparseRRRR$t	pip.indexRRRRt
pip.modelsR	t
pip.locationsR
Rtpip.utils.hashesRRRRR'thelp_tFalseR+trequire_virtualenvR/R1R2R3R6R7R;R>R@RARBRKRLt
simple_urlRMRPRQRRRTRVRYtno_allow_externalR\tno_allow_unsafeR^R_RaRctsrctTrueRtno_use_wheelRgRmRnRhRiRqtno_cachetno_depsRsRtR Rtno_cleantpreRuRvRthashRt
general_grouptnon_deprecated_index_grouptindex_group(((s2/usr/lib/python2.7/site-packages/pip/cmdoptions.pyt	sx"																											
					
														
	
																	

PKZ
..!site-packages/pip/req/req_file.pynu["""
Requirements file parsing
"""

from __future__ import absolute_import

import os
import re
import shlex
import sys
import optparse
import warnings

from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves import filterfalse

import pip
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.exceptions import (RequirementsFileParseError)
from pip.utils.deprecation import RemovedInPip10Warning
from pip import cmdoptions

__all__ = ['parse_requirements']

SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
COMMENT_RE = re.compile(r'(^|\s)+#.*$')

SUPPORTED_OPTIONS = [
    cmdoptions.constraints,
    cmdoptions.editable,
    cmdoptions.requirements,
    cmdoptions.no_index,
    cmdoptions.index_url,
    cmdoptions.find_links,
    cmdoptions.extra_index_url,
    cmdoptions.allow_external,
    cmdoptions.allow_all_external,
    cmdoptions.no_allow_external,
    cmdoptions.allow_unsafe,
    cmdoptions.no_allow_unsafe,
    cmdoptions.use_wheel,
    cmdoptions.no_use_wheel,
    cmdoptions.always_unzip,
    cmdoptions.no_binary,
    cmdoptions.only_binary,
    cmdoptions.pre,
    cmdoptions.process_dependency_links,
    cmdoptions.trusted_host,
    cmdoptions.require_hashes,
]

# options to be passed to requirements
SUPPORTED_OPTIONS_REQ = [
    cmdoptions.install_options,
    cmdoptions.global_options,
    cmdoptions.hash,
]

# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]


def parse_requirements(filename, finder=None, comes_from=None, options=None,
                       session=None, constraint=False, wheel_cache=None):
    """Parse a requirements file and yield InstallRequirement instances.

    :param filename:    Path or url of requirements file.
    :param finder:      Instance of pip.index.PackageFinder.
    :param comes_from:  Origin description of requirements.
    :param options:     cli options.
    :param session:     Instance of pip.download.PipSession.
    :param constraint:  If true, parsing a constraint file rather than
        requirements file.
    :param wheel_cache: Instance of pip.wheel.WheelCache
    """
    if session is None:
        raise TypeError(
            "parse_requirements() missing 1 required keyword argument: "
            "'session'"
        )

    _, content = get_file_content(
        filename, comes_from=comes_from, session=session
    )

    lines_enum = preprocess(content, options)

    for line_number, line in lines_enum:
        req_iter = process_line(line, filename, line_number, finder,
                                comes_from, options, session, wheel_cache,
                                constraint=constraint)
        for req in req_iter:
            yield req


def preprocess(content, options):
    """Split, filter, and join lines, and return a line iterator

    :param content: the content of the requirements file
    :param options: cli options
    """
    lines_enum = enumerate(content.splitlines(), start=1)
    lines_enum = join_lines(lines_enum)
    lines_enum = ignore_comments(lines_enum)
    lines_enum = skip_regex(lines_enum, options)
    return lines_enum


def process_line(line, filename, line_number, finder=None, comes_from=None,
                 options=None, session=None, wheel_cache=None,
                 constraint=False):
    """Process a single requirements line; This can result in creating/yielding
    requirements, or updating the finder.

    For lines that contain requirements, the only options that have an effect
    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
    ignored.

    For lines that do not contain requirements, the only options that have an
    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
    be present, but are ignored. These lines may contain multiple options
    (although our docs imply only one is supported), and all our parsed and
    affect the finder.

    :param constraint: If True, parsing a constraints file.
    :param options: OptionParser options that we may update
    """
    parser = build_parser()
    defaults = parser.get_default_values()
    defaults.index_url = None
    if finder:
        # `finder.format_control` will be updated during parsing
        defaults.format_control = finder.format_control
    args_str, options_str = break_args_options(line)
    if sys.version_info < (2, 7, 3):
        # Prior to 2.7.3, shlex cannot deal with unicode entries
        options_str = options_str.encode('utf8')
    opts, _ = parser.parse_args(shlex.split(options_str), defaults)

    # preserve for the nested code path
    line_comes_from = '%s %s (line %s)' % (
        '-c' if constraint else '-r', filename, line_number)

    # yield a line requirement
    if args_str:
        isolated = options.isolated_mode if options else False
        if options:
            cmdoptions.check_install_build_global(options, opts)
        # get the options that apply to requirements
        req_options = {}
        for dest in SUPPORTED_OPTIONS_REQ_DEST:
            if dest in opts.__dict__ and opts.__dict__[dest]:
                req_options[dest] = opts.__dict__[dest]
        yield InstallRequirement.from_line(
            args_str, line_comes_from, constraint=constraint,
            isolated=isolated, options=req_options, wheel_cache=wheel_cache
        )

    # yield an editable requirement
    elif opts.editables:
        isolated = options.isolated_mode if options else False
        default_vcs = options.default_vcs if options else None
        yield InstallRequirement.from_editable(
            opts.editables[0], comes_from=line_comes_from,
            constraint=constraint, default_vcs=default_vcs, isolated=isolated,
            wheel_cache=wheel_cache
        )

    # parse a nested requirements file
    elif opts.requirements or opts.constraints:
        if opts.requirements:
            req_path = opts.requirements[0]
            nested_constraint = False
        else:
            req_path = opts.constraints[0]
            nested_constraint = True
        # original file is over http
        if SCHEME_RE.search(filename):
            # do a url join so relative paths work
            req_path = urllib_parse.urljoin(filename, req_path)
        # original file and nested file are paths
        elif not SCHEME_RE.search(req_path):
            # do a join so relative paths work
            req_path = os.path.join(os.path.dirname(filename), req_path)
        # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
        parser = parse_requirements(
            req_path, finder, comes_from, options, session,
            constraint=nested_constraint, wheel_cache=wheel_cache
        )
        for req in parser:
            yield req

    # percolate hash-checking option upward
    elif opts.require_hashes:
        options.require_hashes = opts.require_hashes

    # set finder options
    elif finder:
        if opts.allow_external:
            warnings.warn(
                "--allow-external has been deprecated and will be removed in "
                "the future. Due to changes in the repository protocol, it no "
                "longer has any effect.",
                RemovedInPip10Warning,
            )

        if opts.allow_all_external:
            warnings.warn(
                "--allow-all-external has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if opts.allow_unverified:
            warnings.warn(
                "--allow-unverified has been deprecated and will be removed "
                "in the future. Due to changes in the repository protocol, it "
                "no longer has any effect.",
                RemovedInPip10Warning,
            )

        if opts.index_url:
            finder.index_urls = [opts.index_url]
        if opts.use_wheel is False:
            finder.use_wheel = False
            pip.index.fmt_ctl_no_use_wheel(finder.format_control)
        if opts.no_index is True:
            finder.index_urls = []
        if opts.extra_index_urls:
            finder.index_urls.extend(opts.extra_index_urls)
        if opts.find_links:
            # FIXME: it would be nice to keep track of the source
            # of the find_links: support a find-links local path
            # relative to a requirements file.
            value = opts.find_links[0]
            req_dir = os.path.dirname(os.path.abspath(filename))
            relative_to_reqs_file = os.path.join(req_dir, value)
            if os.path.exists(relative_to_reqs_file):
                value = relative_to_reqs_file
            finder.find_links.append(value)
        if opts.pre:
            finder.allow_all_prereleases = True
        if opts.process_dependency_links:
            finder.process_dependency_links = True
        if opts.trusted_hosts:
            finder.secure_origins.extend(
                ("*", host, "*") for host in opts.trusted_hosts)


def break_args_options(line):
    """Break up the line into an args and options string.  We only want to shlex
    (and then optparse) the options, not the args.  args can contain markers
    which are corrupted by shlex.
    """
    tokens = line.split(' ')
    args = []
    options = tokens[:]
    for token in tokens:
        if token.startswith('-') or token.startswith('--'):
            break
        else:
            args.append(token)
            options.pop(0)
    return ' '.join(args), ' '.join(options)


def build_parser():
    """
    Return a parser for parsing requirement lines
    """
    parser = optparse.OptionParser(add_help_option=False)

    option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
    for option_factory in option_factories:
        option = option_factory()
        parser.add_option(option)

    # By default optparse sys.exits on parsing errors. We want to wrap
    # that in our own exception.
    def parser_exit(self, msg):
        raise RequirementsFileParseError(msg)
    parser.exit = parser_exit

    return parser


def join_lines(lines_enum):
    """Joins a line ending in '\' with the previous line (except when following
    comments).  The joined line takes on the index of the first line.
    """
    primary_line_number = None
    new_line = []
    for line_number, line in lines_enum:
        if not line.endswith('\\') or COMMENT_RE.match(line):
            if COMMENT_RE.match(line):
                # this ensures comments are always matched later
                line = ' ' + line
            if new_line:
                new_line.append(line)
                yield primary_line_number, ''.join(new_line)
                new_line = []
            else:
                yield line_number, line
        else:
            if not new_line:
                primary_line_number = line_number
            new_line.append(line.strip('\\'))

    # last line contains \
    if new_line:
        yield primary_line_number, ''.join(new_line)

    # TODO: handle space after '\'.


def ignore_comments(lines_enum):
    """
    Strips comments and filter empty lines.
    """
    for line_number, line in lines_enum:
        line = COMMENT_RE.sub('', line)
        line = line.strip()
        if line:
            yield line_number, line


def skip_regex(lines_enum, options):
    """
    Skip lines that match '--skip-requirements-regex' pattern

    Note: the regex pattern is only built once
    """
    skip_regex = options.skip_requirements_regex if options else None
    if skip_regex:
        pattern = re.compile(skip_regex)
        lines_enum = filterfalse(
            lambda e: pattern.search(e[1]),
            lines_enum)
    return lines_enum
PKZo"site-packages/pip/req/__init__.pycnu[
abc@@s\ddlmZddlmZddlmZmZddlmZdddd	gZ	d
S(i(tabsolute_importi(tInstallRequirement(tRequirementSettRequirements(tparse_requirementsRRRRN(
t
__future__Rtreq_installRtreq_setRRtreq_fileRt__all__(((s4/usr/lib/python2.7/site-packages/pip/req/__init__.pyts
	PKZo"site-packages/pip/req/__init__.pyonu[
abc@@s\ddlmZddlmZddlmZmZddlmZdddd	gZ	d
S(i(tabsolute_importi(tInstallRequirement(tRequirementSettRequirements(tparse_requirementsRRRRN(
t
__future__Rtreq_installRtreq_setRRtreq_fileRt__all__(((s4/usr/lib/python2.7/site-packages/pip/req/__init__.pyts
	PKZlq%site-packages/pip/req/req_install.pycnu[
abc@@sddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZddlmZddlmZmZddlmZddlmZdd	lmZmZdd
lmZddlmZmZ ddl!m"Z"ddl#Z$dd
l%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+m,Z,m-Z-ddl.m/Z/m0Z0ddl1m2Z2m3Z3m4Z4m5Z5ddl6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=m>Z>m?Z?m@Z@mAZAmBZBmCZCmDZDmEZEmFZFddlGmHZHddlImJZJddlKmLZLddlMmNZNddlOmPZPddlQmRZRddlSmTZTddl#mUZUmVZVejWeXZYejZj[j\Z]dZ^dZ_de`fdYZadZbecdZddS( i(tabsolute_importN(t	sysconfig(tchange_root(t
FeedParser(t
pkg_resourcestsix(t
specifiers(tMarker(tInvalidRequirementtRequirement(tcanonicalize_name(tVersiontparse(tconfigparser(t
native_strt
get_stdlibtWINDOWS(tis_urlturl_to_pathtpath_to_urltis_archive_file(tInstallationErrortUninstallationError(tbin_pytrunning_under_virtualenvtPIP_DELETE_MARKER_FILENAMEtbin_user(tdisplay_pathtrmtreetask_path_existst
backup_dirtis_installable_dirtdist_in_usersitetdist_in_site_packagest
egg_link_pathtcall_subprocesstread_text_filetFakeFilet_make_build_dirt
ensure_dirtget_installed_versiontnormalize_patht
dist_is_local(tHashes(tRemovedInPip10Warning(t
indent_log(tSETUPTOOLS_SHIM(topen_spinner(tUninstallPathSet(tvcs(tmove_wheel_filestWheelcC@sOtjd|}d}|r?|jd}|jd}n|}||fS(Ns^(.+)(\[[^\]]+\])$ii(tretmatchtNonetgroup(tpathtmtextrastpath_no_extras((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
_strip_extras9scC@std|DS(Ncs@s|]}tj|VqdS(N(Rt
safe_extra(t.0textra((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pys	Fs(tset(R:((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt_safe_extrasEstInstallRequirementcB@seZd'ed'eeed'ed'd'edZed'd'ed'd'edZed'ed'd'edZdZ	dZ
dZedZ
edZdZd	Zd
ZedZedZed
ZdZdZdZdZejdZedZdZedZedZdZ dZ!dZ"dZ#d'dZ$gd'd'd'dZ%dZ&dZ'dZ(d(d'd Z)d!Z*ed"Z+d'd'd'd#Z,d$Z-ed%Z.ed&Z/RS()c@sd|_ttjrytWntk
rtjjkrVd}n>dkrt	fdt
Drd}ntj}t
d|fnXtj|_n|_||_|
|_||_||_||_||_|_||_|	dk	r*|	|_no6j|_d|_d|_d|_d|_d|_||_ d|_!d|_"t#|_$t#|_%d|_&|r|ni|_'||_(t#|_)|
|_*dS(Ns%It looks like a path. Does it exist ?t=c3@s|]}|kVqdS(N((R>top(treq(s7/usr/lib/python2.7/site-packages/pip/req/req_install.pys	Vss,= is not a valid operator. Did you mean == ?sInvalid requirement: '%s'
%s((+R:t
isinstanceRtstring_typesR	RtosR8tseptanyt	operatorst	tracebackt
format_excRRAREt
comes_fromt
constraintt
source_dirteditablet_wheel_cachetlinkt
original_linktas_eggR6tmarkerstmarkert_egg_info_pathtsatisfied_bytconflicts_witht_temp_build_dirt_ideal_build_dirtupdatetinstall_succeededtuninstalledtFalsetnothing_to_uninstallt
use_user_sitet
target_dirtoptionst	pycompiletpreparedtisolated(tselfRERNRPRQRSRUR]ReRVRgRdtwheel_cacheROtadd_msg((REs7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt__init__KsN	
	)																					cC@sddlm}t||\}	}
}|
jdrFt|
}nd}||	|d|dtd||
d|d|d	|r|nid
|}
|dk	rt||
_n|
S(Ni(tLinksfile:RPRQRSRORgRdRi(	t	pip.indexRltparse_editablet
startswithRR6tTrueRAR:(tclsteditable_reqRNtdefault_vcsRgRdRiRORltnameturltextras_overrideRPtres((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
from_editables 	cC@sddlm}t|r%d}nd}||kry|j|d\}}	|	j}	|	sjd}	qt|	}	nd}	|j}d}
tjj	tjj
|}d}d}
t|r||}nt|\}}
tjj|rOtjj
|ks|jdrOt|s:td|n|t|}nFt|rtjj|stjd|n|t|}n|r,|jd	krtjd
|jr|ttjj	tjj
|j}n|jr t|j}d|j|jf}
q2|j}
n|}
|r>|ni}||
|d|d
|	d|d|d|d|}|
rt t!d|
j"|_"n|S(sCreates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        i(Rls; t;it.s;Directory %r is not installable. File 'setup.py' not found.sARequirement %r looks like a filename, but the file does not existtfiles\.\./s%s==%sRSRVRgRdRiROtplaceholderN(#RmRlRtsplittstripR6RRHR8tnormpathtabspathR<tisdirRIRoRRRRtisfiletloggertwarningtschemeR4tsearchRutis_wheelR3tfilenameRttversiontegg_fragmentRAR	R:(RqRtRNRgRdRiRORlt
marker_sepRVRER8RSR:tptwheelRw((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt	from_linesb		!

$-	cC@s|jr;t|j}|jrV|d|jj7}qVn|jrP|jjnd}|jdk	r|dt|jj7}n|jrt	|jt
jr|j}n|jj}|r|d|7}qn|S(Ns from %ss in %ss
 (from %s)(
REtstrRSRuR6RYRtlocationRNRFRRGt	from_path(RhtsRN((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt__str__s			cC@s d|jjt||jfS(Ns<%s object: %s editable=%r>(t	__class__t__name__RRQ(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt__repr__scC@s|jdkr'|j|||_n|jdk	r|r|j}|jj|j|j|_||jkrtjd|jqndS(sEnsure that if a link can be found for this, that it is found.

        Note that self.link may still be None - if Upgrade is False and the
        requirement is already installed.

        If require_hashes is True, don't use the wheel cache, because cached
        wheels, always built locally, have different hashes than the files
        downloaded from the index server and thus throw false hash mismatches.
        Furthermore, cached wheels at present have undeterministic contents due
        to file modification times.
        sUsing cached wheel link: %sN(RSR6tfind_requirementRRtcached_wheelRtRtdebug(Rhtfindertupgradetrequire_hashestold_link((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
populate_link	s	cC@s
|jjS(N(REt	specifier(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRscC@s4|j}t|dko3tt|jdkS(sReturn whether I am pinned to an exact version.

        For example, some-package==1.2 is pinned; some-package>1.2 is not.
        is==s===(s==s===(Rtlentnexttitertoperator(RhR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt	is_pinned!s	cC@sy|jdkrdSt|j}|jrut|jtjrL|j}n|jj}|ru|d|7}qun|S(Ns->(RER6RRNRFRRGR(RhRRN((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR+s	cC@s|jdk	r|jS|jdkrVtjjtjdd|_||_|jS|j	rq|j
j}n	|j
}tjj|st
jd|t|ntjj||S(Ns-buildspip-sCreating directory %s(R[R6RERHR8trealpathttempfiletmkdtempR\RQRttlowertexistsRRR&tjoin(Rht	build_dirRt((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytbuild_location8s				
cC@s|jdk	rdS|jdk	s(t|js7t|jsFt|j}d|_|j|j}tjj	|rt
dt|ntj
d|t|t|tj||||_d|_||_d|_dS(sMove self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        Ns<A package already exists in %s; please remove it to continues,Moving package %s from %s to new location %s(RPR6REtAssertionErrorR[R\RRHR8RRRRRtshutiltmoveRX(Rhtold_locationtnew_location((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt_correct_build_locationSs(
					cC@s,|jdkrdSttj|jjS(N(RER6RRt	safe_nameRt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRtsscC@s+tjj|j|jr$|jjp'dS(Nt(RHR8RRPRStsubdirectory_fragment(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytsetup_py_dirys	cC@s|jstd|yddl}WnHtk
rstddkrTd}ntj}td|nXt	j
j|jd}t
jrt|t
jr|jtj}n|S(NsNo source dir for %sit
setuptoolssPlease install setuptools.sWCould not import setuptools which is required to install from a source distribution.
%sssetup.py(RPRRtImportErrorR(R6RLRMRRHR8RRRtPY2RFt	text_typetencodetsystgetfilesystemencoding(RhRRjtsetup_py((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs
	cC@s|jst|jr4tjd|j|jntjd|j|jtt|j}t	j
d|g}|jr|dg7}n|dg}|jrg}n.t
jj|jd}t|ddg}t||d|jd	td
dWdQX|js~tt|jdtr6d
}nd}tdj|jd||jdg|_|jnbt|jd}t|jj|krtjd|j|j||jt||_ndS(Ns2Running setup.py (path:%s) egg_info for package %ss7Running setup.py (path:%s) egg_info for package from %ss-cs
--no-user-cfgtegg_infospip-egg-infos
--egg-basetcwdtshow_stdouttcommand_descspython setup.py egg_infoRs==s===RtNamesuRunning setup.py (path:%s) egg_info for package %s produced metadata for project name %s. Fix your #egg=%s fragments.(RPRRtRRRRSR-R.Rt
executableRgRQRHR8RRR'R#R`RERFt
parse_versiontpkg_infoRR	RR
R(Rhtscripttbase_cmdtegg_info_cmdtegg_base_optiontegg_info_dirRDt
metadata_name((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytrun_egg_infosP	

	
		

	
		

cC@sy|jdk	r5|jj|s%dS|jj|S|jsDt|j|}tjj	|sidSt
|}|S(N(RYR6thas_metadatatget_metadataRPRt
egg_info_pathRHR8RR$(RhRtdata((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
egg_info_datasc	C@s<|jdkr&|jr$|j}ntjj|jd}tj|}|jrg}xtj	|D]\}}}x-t
jD]"}||kr|j|qqWxt
|D]}tjjtjj||dds
tjjtjj||ddr|j|q|dks2|dkr|j|qqW|jg|D]}tjj||^qSqjWg|D]}|jdr|^q}n|std	||fn|std	||ft|d
kr|jddntjj||d
|_ntjj|j|S(Nspip-egg-infotbintpythontScriptss
Python.exettestttestss	.egg-infos$No files/directories in %s (from %s)itkeycS@s8|jtjjtjjr3|jtjjp6dS(Ni(tcountRHR8RItaltsep(tx((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
si(RXR6RQRPRHR8RRtlistdirtwalkR1tdirnamestremovetlisttlexistsRtextendtendswithRRRtsort(	RhRtbaset	filenamestroottdirstfilestdirtf((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRsB						*+		
cC@s]t}|jd}|s@tjdt|jdn|j|pOd|jS(NsPKG-INFOsNo PKG-INFO file found in %sR(RRRRRRtfeedtclose(RhRR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs	s	\[(.*?)\]cC@s
t|jS(N(R(Rt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytinstalled_version scC@sy|jst|jd}|jjrV||jjkrVtjd||jntjdt	|j||dS(NRs'Requested %s, but installing version %ss;Source in %s has version %s, which satisfies requirement %s(
RPRRRERRRRRR(RhR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytassert_source_matches_version$s
cC@s|js tjd|jdS|js/t|js>t|jjdkrTdSd|jjks|td|jj|jsdS|jjj	dd\}}t
j|}|r||jj}|r|j|jq|j
|jndstd|j|fdS(Ns>Cannot update repository at %s; repository location is unknownR{t+sbad url: %riis+Unexpected version control type (in %s): %s(RSRRRPRQRRRuR]R}R1tget_backendtobtaintexport(RhRtvc_typeRutbackendtvcs_backend((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytupdate_editable5s,	
(		cC@s|js%td|jfn|jp4|j}t|j}t|s{tj	d|j
|tjt
|_dS|tkrtj	d|j
|t
|_dSt|}t|}djtj|j}|jotjj|j}t|jdd}|r|jjdr|jj|r|j|j|jdrx'|j dj!D]7}	tjj"tjj#|j|	}
|j|
qsWq;|jd	r;|jd
r|j d
}ng}xg|j d	j!D]}|r||kr|^qD]^}
tjj#|j|
}
|j|
|j|
d|j|
d|j|
d
q"Wq;n|rt$j%dj|jt&|j|n|jjdr2|j|jtjj'|jd}tjj#tjj(|jd}|j)|d|n	|rw|jjdrwxt*j+j,|D]}
|j|
q]Wn|r%t-|d%}tjj.|j/j0}WdQX||jkst1d||j|jf|j|tjj#tjj(|d}|j)||jntj2d||j|jdr|j3drxz|j4dD]f}t5|rt6}nt7}|jtjj#||t8ri|jtjj#||dqiqiWn|jdrt9j:ri}n
id d6}t;j<|}|j=t>|j?d|j@drx|jAdD]\}}t5|rot6}nt7}|jtjj#||t8rN|jtjj#||d|jtjj#||d|jtjj#||dqNqNWqn|jB|||_CdS(!s
        Uninstall the distribution currently satisfying this requirement.

        Prompts before removing or modifying files unless
        ``auto_confirm`` is True.

        Refuses to delete or modify files outside of ``sys.prefix`` -
        thus uninstallation within a virtual environment can only
        modify that virtual environment, even if the virtualenv is
        linked to global site-packages.

        s.Cannot uninstall requirement %s, not installeds1Not uninstalling %s at %s, outside environment %sNs<Not uninstalling %s at %s, as it is in the standard library.s{0}.egg-infoR8s	.egg-infosinstalled-files.txts
top_level.txtsnamespace_packages.txts.pys.pycs.pyosUninstalling a distutils installed project ({0}) has been deprecated and will be removed in a future version. This is due to the fact that uninstalling a distutils project will only partially uninstall the project.s.eggiseasy-install.pths./s
.dist-infotrs;Egg-link %s does not match installed location of %s (at %s)s)Not sure how to uninstall: %s - Check: %stscriptss.batsentry_points.txtRCt
delimiterstconsole_scriptss.exes
.exe.manifests
-script.py(RC(Dtcheck_if_existsRRtRYRZR)RR*RtinfoRRtprefixRpRaRR0R"tformatRtto_filenametproject_nameRRHR8Rtgetattrt	_providerR6RtaddRRt
splitlinesRRtwarningstwarnR,R}tdirnametadd_pthtpipRtuninstallation_pathstopentnormcasetreadlineR~RRtmetadata_isdirtmetadata_listdirR RRRRRR
tSafeConfigParsertreadfpR%tget_metadata_linesthas_sectiontitemsRR_(Rhtauto_confirmtdistt	dist_pathtpaths_to_removetdevelop_egg_linktdevelop_egg_link_egg_infotegg_info_existstdistutils_egg_infotinstalled_fileR8t
namespacesRt
top_level_pkgteasy_install_eggteasy_install_pthtfhtlink_pointerRtbin_dirRdtconfigRttvalue((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt	uninstallRs

				"
		$
	
	*		
	'
cC@s0|jr|jjntjd|jdS(Ns'Can't rollback %s, nothing uninstalled.(R_trollbackRterrorRt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytrollback_uninstalls	cC@s<|jr|jjn|js8tjd|jndS(Ns%Can't commit %s, nothing uninstalled.(R_tcommitRaRR+Rt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytcommit_uninstalls
		cC@s|jstt}d|j|jdf}tjj||}tjj|r"t	dt
|d}|dkrt}q"|dkrtj
dt
|tj|q"|dkrt|}tj
d	t
|t
|tj||q"|dkr"tjd
q"n|rtj|dtjdt}tjjtjj|j}xtj|D]\}	}
}d|
kr|
jdnxl|
D]d}tjj|	|}|j||}
tj|jd
|
d
}d|_|j|dqWxb|D]Z}|tkr0qntjj|	|}|j||}
|j ||jd
|
qWqwW|j!tj"dt
|ndS(Ns	%s-%s.zipRs8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort titwtbtasDeleting %ssBacking up %s to %sit
allowZip64spip-egg-infot/iiRsSaved %s(R/R0R1R2i(#RPRRpRtRRHR8RRRRR`RRRRRRRtexittzipfiletZipFiletZIP_DEFLATEDRRRRt_clean_zip_nametZipInfot
external_attrtwritestrRtwriteRR(RhRtcreate_archivetarchive_nametarchive_pathtresponset	dest_filetzipRtdirpathRRR	RttzipdirR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytarchivesX		
	!
	
#
cC@s_|j|tjjs/td||f|t|d}|jtjjd}|S(Ns$name %r doesn't start with prefix %riR4(RoRHR8RIRRtreplace(RhRtR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR95s
c@s@|sd}njdk	r8tfd|DStSdS(NRc3@s(|]}jji|d6VqdS(R?N(RVtevaluate(R>R?(Rh(s7/usr/lib/python2.7/site-packages/pip/req/req_install.pys	Ds(R(RVR6RJRp(Rhtextras_requested((Rhs7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
match_markers=s	c@s"|jr#|j||d|dS|jrtjj|j}tjj||j|j	|jdd|d|t
|_dS||jj
dg7}||jj
dg7}|jrt|dg}ntjdd}tjj|d	}z|j|||}	d
|jf}
t|
:}t(t|	|d|jdtd
|WdQXWdQXtjj|stjd|dSt
|_|jrdSfd}t|]}
xS|
D]7}tjj |}|j!dr||}PqqWtj"d|dSWdQXg}t|k}
xa|
D]Y}|j#}tjj$|rz|tjj%7}n|j&tjj'|||qCWWdQXtjj|d}t|d!}
|
j(dj|dWdQXWdtjj|rtj)|nt*|XdS(NRRtstrip_file_prefixtglobal_optionstinstall_optionss
--no-user-cfgs-recordspip-sinstall-record.txtsRunning setup.py install for %sRRtspinnersRecord file %s not foundc@s4dkstjj|r#|St|SdS(N(R6RHR8tisabsR(R8(R(s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytprepend_root~ss	.egg-infos;Could not find .egg-info directory in install record for %ssinstalled-files.txtR0s
(+RQtinstall_editableRRRt
wheel_versionRPtcheck_compatibilityRtR2RpR^RdtgetRgRRRRHR8Rtget_install_argsR/R-R#RR`RRRRUR
R	RRR~RRItappendtrelpathR=RR(RhRMRLRRRKRt
temp_locationtrecord_filenametinstall_argstmsgRNRPRtlinet	directoryRt	new_linesRtinst_files_path((Rs7/usr/lib/python2.7/site-packages/pip/req/req_install.pytinstallIs~					

			


	$cC@s+|jdkr$|j||_n|jS(sAEnsure that a source_dir is set.

        This will create a temporary build dir if the name of the requirement
        isn't known yet.

        :param parent_dir: The ideal pip parent_dir for the source_dir.
            Generally src_dir for editables and build_dir for sdists.
        :return: self.source_dir
        N(RPR6R(Rht
parent_dir((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytensure_has_source_dirs
cC@stjdg}|jd|jt|j|t|dd|g7}|jsf|dg7}n|dk	r|d|g7}n|dk	r|d|g7}n|jr|dg7}n
|d	g7}t	rd
t
j}|dtj
jtjdd
||jg7}n|S(Ns-us-cR`s--records#--single-version-externally-manageds--roots--prefixs	--compiles--no-compileRs--install-headerstincludetsite(RRRVR.RRRUR6ReRRtget_python_versionRHR8RRRt(RhRLRYRRRZt
py_ver_str((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRUs(
		
	cC@s|jrPtjjtjj|jtrPtjd|jt|jnd|_|j
rtjj|j
rt|j
nd|_
dS(sVRemove the source files from this requirement, if they are marked
        for deletionsRemoving source in %sN(RPRHR8RRRRRRR6R[(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytremove_temporary_sources	c
C@stjd|j|jr2t|dg}n|r]dj|g}t||}ntOttj	dt
|jgt|ddgt|d|jdt
WdQXt|_dS(	NsRunning setup.py develop for %ss
--no-user-cfgs--prefix={0}s-ctdevelops	--no-depsRR(RRRtRgRRR-R#RRR.RRR`RpR^(RhRMRLRtprefix_param((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRQs	
*
	
cC@s|jdkrtSyett|j}d|_tjt||_|j	rw|jrw|j|_
d|_tSWntjk
rtStj
k
rtj|jj}|jrt|r||_
qtrt|rtd|j|jfqq||_
nXtS(sFind an installed distribution that satisfies or conflicts
        with this requirement, and set self.satisfied_by or
        self.conflicts_with appropriately.
        sVWill not install to the user site because it will lack sys.path precedence to %s in %sN(RER6R`R	RRWRtget_distributionRYRQRZRptDistributionNotFoundtVersionConflictRtRbR RR!RRR(Rht	no_markert
existing_dist((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs2				
cC@s|jo|jjS(N(RSR(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR scC@sPt|j|j|d|jd|jd|d|d|jd|jd|dS(NtuserthomeRRReRgRK(R2RtRERbRcReRg(RhtwheeldirRRRK((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR2$s				cC@s|jdjd}tjj|}tj||}tjjtjj|d}tj	tjj|d|d|S(sAReturn a pkg_resources.Distribution built from self.egg_info_pathRR4iRtmetadata(
RtrstripRHR8R	RtPathMetadatatsplitexttbasenametDistribution(RhRtbase_dirRrt	dist_name((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytget_dist0s"cC@st|jjdiS(sReturn whether any known-good hashes are specified as options.

        These activate --require-hashes mode; hashes specified as part of a
        URL do not.

        thashes(tboolRdRT(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pythas_hash_options;scC@sn|jjdij}|r*|jn|j}|rd|jrd|j|jgj|jnt	|S(sReturn a hash-comparer that considers my option- and URL-based
        hashes to be known-good.

        Hashes in URLs--ones embedded in the requirements file, not ones
        downloaded from an index server--are almost peers with ones from
        flags. They satisfy --require-hashes (whether it was implicitly or
        explicitly activated) but do not activate it. md5 and sha224 are not
        allowed in flags, which should nudge people toward good algos. We
        always OR all hashes together, even ones from URLs.

        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
            downloaded from the internet, as by populate_link()

        R{(
RdRTtcopyRSRTthasht
setdefaultt	hash_nameRVR+(Rhttrust_internettgood_hashesRS((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR{Es
"N((0Rt
__module__R6R`RpRktclassmethodRxRRRRtpropertyRRRRRRtRRRRRRR4tcompilet_requirements_section_reRRRR)R,R.RFR9RJR`RbRURgRQRRR2RzR}R{(((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRBIs^	;		M			
	
		 	:		6					0	\			
	)	
cC@s.tjd|}|r*|jd}n|S(s2
        Strip req postfix ( -dev, 0.2, etc )
    s^(.*?)(?:-dev|-\d.*)$i(R4RR7(RER5((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt_strip_postfix[scC@sPddlm}|}d}tjd|}|rU|jd}|jd}n|}tjj|rtjj	tjj
|dstd|nt|}n|j
jdr
||j}|r||td	|j
jfS||dfSnx;tD]3}|j
jd
|rd||f}PqqWd|kr|r{tjd
t|d|}qtd|n|jdddj
}	tj|	sd|dj
gtjD]}
|
jd^qd}t|n||j}|s$tdn|s=td|nt||dfS(sParses an editable requirement into:
        - a requirement name
        - an URL
        - extras
        - editable options
    Accepted requirements:
        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
        .[some_extra]
    i(Rls^(.+)(\[[^\]]+\])$iissetup.pys;Directory %r is not installable. File 'setup.py' not found.sfile:R|s%s:s%s+%sRsD--default-vcs has been deprecated and will be removed in the future.sb%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+sFor --editable=%s only s, s+URLs is currently supporteds@Could not detect requirement name, please specify one with #egg=s@--editable=%s is not the right format; it must have #egg=PackageN(RmRlR6R4R5R7RHR8RRRRRRRoRR	R:R1RRR,R}RtbackendsRtR(RrRsRlRuR:R9t
url_no_extrastpackage_nametversion_controlRRt
error_message((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRngs`!


1
(et
__future__RtloggingRHR4RRRRLRR6t	distutilsRtdistutils.utilRtemail.parserRtpip._vendorRRtpip._vendor.packagingRtpip._vendor.packaging.markersRt"pip._vendor.packaging.requirementsRR	tpip._vendor.packaging.utilsR
tpip._vendor.packaging.versionRRRtpip._vendor.six.movesR
t	pip.wheelRt
pip.compatRRRtpip.downloadRRRRtpip.exceptionsRRt
pip.locationsRRRRt	pip.utilsRRRRRR R!R"R#R$R%R&R'R(R)R*tpip.utils.hashesR+tpip.utils.deprecationR,tpip.utils.loggingR-tpip.utils.setuptools_buildR.tpip.utils.uiR/tpip.req.req_uninstallR0tpip.vcsR1R2R3t	getLoggerRRt	Specifiert
_operatorstkeysRKR<RAtobjectRBRR6Rn(((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytsX""j			PKZ~]]!site-packages/pip/req/req_set.pycnu[
abc@@sddlmZddlmZddlmZddlZddlZddlm	Z	ddlm
Z
ddlmZddl
mZmZmZmZmZdd	lmZmZmZmZmZmZmZmZmZmZdd
lmZddl m!Z!m"Z"m#Z#m$Z$ddl%m&Z&dd
l'm(Z(ddl)m*Z*ddl+m,Z,ddl-m.Z.ej/e0Z1de2fdYZ3de2fdYZ4dZ5de4fdYZ6de4fdYZ7de4fdYZ8de2fdYZ9dS(i(tabsolute_import(tdefaultdict(tchainN(t
pkg_resources(trequests(t
expanduser(tis_file_urlt
is_dir_urlt
is_vcs_urlturl_to_patht
unpack_url(
tInstallationErrortBestVersionAlreadyInstalledtDistributionNotFoundtPreviousBuildDirErrort	HashErrort
HashErrorstHashUnpinnedtDirectoryUrlHashUnsupportedtVcsHashUnsupportedtUnsupportedPythonVersion(tInstallRequirement(tdisplay_pathtdist_in_usersitet
ensure_dirtnormalize_path(t
MissingHashes(t
indent_log(tcheck_dist_requires_python(tvcs(tWheeltRequirementscB@sGeZdZdZdZdZdZdZdZRS(cC@sg|_i|_dS(N(t_keyst_dict(tself((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt__init__!s	cC@s|jS(N(R (R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytkeys%scC@s!g|jD]}|j|^q
S(N(R R!(R"tkey((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytvalues(scC@s
||jkS(N(R (R"titem((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt__contains__+scC@s3||jkr"|jj|n||j|cB@seZdZdZRS(cC@sttj|jjdS(Ni(tlistRtfind_distributionsR4t
source_dir(R"R7((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR6ls	cC@sdS(N((R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR8ps(R1R2R6R8(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR>js	R;cB@seZdZdZRS(cC@s;|jj}|jdr7|j|jdn|S(Nsdependency_links.txt(R4tget_distthas_metadatatadd_dependency_linkstget_metadata_lines(R"R7R6((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR6ws
cC@s|jj|jjdS(N(R4trun_egg_infotassert_source_matches_version(R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR8s
(R1R2R6R8(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR;us		t	InstalledcB@seZdZdZRS(cC@s
|jjS(N(R4tsatisfied_by(R"R7((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR6scC@sdS(N((R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR8s(R1R2R6R8(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRIs	tRequirementSetcB@seZeeeeeeeeeeeeeeedZdZdZeedZdZ	e
dZe
dZdZ
edZd	Zd
ZdZeedZd
ZdZfdZRS(cC@s|dkrtdn||_||_||_||_||_||_|
|_t	|_
i|_g|_|	|_
||_g|_g|_g|_||_||_||_||_|
|_||_|rt|}n||_||_||_tt|_dS(s3Create a RequirementSet.

        :param wheel_download_dir: Where still-packed .whl files should be
            written to. If None they are written to the download_dir parameter.
            Separate to download_dir to permit only keeping wheel archives for
            pip wheel.
        :param download_dir: Where still packed archives should be written to.
            If None they are not saved, and are deleted immediately after
            unpacking.
        :param wheel_cache: The pip wheel cache, for passing to
            InstallRequirement.
        s?RequirementSet() missing 1 required keyword argument: 'session'N(tNonet	TypeErrort	build_dirtsrc_dirtdownload_dirtupgradetupgrade_strategytignore_installedtforce_reinstallRtrequirementstrequirement_aliasestunnamed_requirementstignore_dependenciestignore_requires_pythontsuccessfully_downloadedtsuccessfully_installedtreqs_to_cleanuptas_eggt
use_user_sitet
target_dirtsessiont	pycompiletisolatedRtwheel_download_dirt_wheel_cachetrequire_hashesRR@t
_dependencies(R"RNRORPRQRRRSR]R_RXRTR^R`RaRbRctwheel_cacheReRY((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR#s<																							cC@sgg|jjD]}|js|^q}|jdddjg|D]}t|j^qKS(NR%cS@s
|jjS(N(tnametlower(treq((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyttt (RUR&t
comes_fromtsortR.tstrRj(R"Rjtreqs((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt__str__scC@s}g|jjD]}|^q}|jdddjg|D]}t|j^qB}d|jjt||fS(NR%cS@s
|jjS(N(RhRi(Rj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRkRls, s"<%s object; %d requirement(s): %s>(	RUR&RoR.RpRjt	__class__R1tlen(R"RjRqtreqs_str((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR0s
"+c	C@s|j}|j|s5tjd|j|jgS|jr|jjrt|jj}|j	st
d|jqn|j|_|j|_|j
|_
|j|_|dk|_|s|jj||gSy|j|}Wntk
r	d}nX|dkrm|rm|jrm|j|jkrm|jj|jjkrmt
d|||fn|s||j|<|j|kr||j|j>s(R@RUR&RW(R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pythas_requirements<scC@sa|jr]t|j|_tjj|jr4tStjdtdt	|jnt
S(Ns!Could not find download directorys0Could not find or access download directory '%s'(RPRtosRtexistsRRwtcriticalRRR(R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytis_downloadAs	
cC@slxU||jfD]A}||jkr3|j|S||jkr|j|j|SqWtd|dS(NsNo project with the name %r(RiRURVR~(R"RRh((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR}NscC@sGx@|jjD]/}|jr%qn|jd||jqWdS(Ntauto_confirm(RUR&Rt	uninstalltcommit_uninstall(R"RRj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRVs
	c	C@s|jrt|jn|j|jj}|jpKtd|D}|rl|jrltdng}t	}xot
||D]^}y,|j|j||d|d|j
Wqtk
r}||_|j|qXqW|r|ndS(sY
        Prepare process. Create temp directories, download and/or unpack files.
        cs@s|]}|jVqdS(N(thas_hash_options(RRj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pys	iss--egg is not allowed with --require-hashes mode, since it delegates dependency resolution to setuptools and could thus result in installation of unhashed packages.ReRXN(RcRRWRUR&RetanyR]RRRtextendt
_prepare_fileRXRRjR)(R"R7t	root_reqsRetdiscovered_reqsthash_errorsRjtexc((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt
prepare_files]s,				cC@s.|jo-|jdkp-|jdko-|jS(Nteagersonly-if-needed(RQRRR|(R"Rj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt_is_upgrade_alloweds	cC@s|j|jr|j|}t}|r|jp=|jsy|j||Wqtk
rmt}qt	k
r}qXn|s|j
ot|js|j|_nd|_qn|rd}n|jdkrd}nd}|SdSdS(sCheck if req_to_install should be skipped.

        This will check if the req is installed, and whether we should upgrade
        or reinstall it, taking into account all the relevant user options.

        After calling this req_to_install will only have satisfied_by set to
        None if the req_to_install is to be upgraded/reinstalled etc. Any
        other value will be a dist recording the current thing installed that
        satisfies the requirement.

        Note that for vcs urls and the like we can't assess skipping in this
        routine - we simply identify that we need to pull the thing down,
        then later on it is pulled down and introspected to assess upgrade/
        reinstalls etc.

        :return: A text reason for why it was skipped, or None.
        salready up-to-datesonly-if-neededs%not upgraded as not directly requiredsalready satisfiedN(tcheck_if_existsRJRRRTR<tfind_requirementRRR
R^Rtconflicts_withRLRR(R"R4R7tupgrade_allowedtbest_installedtskip_reason((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt_check_skip_installeds2
	
	
			c
@sjsjrgSt_jr;tjdnjdksPtj	snj
|}njr|dk	stdjftjd|nVjrjjdkrt
jj}tjdt|ntjdtjr|r1tdnjjjjt}|jjrjjnjnjr|rtjdnt}njjtjj tjj!j"d	rt#d
j"fnj$|j%|js:tj}|rt&|rat'n$t(|rt)|rt*nj+rj,rt-qnj.d|}	|r|	rt/}	nyj}
t}jj0rj1rj1}
njj0r+|
r"t}q+t2}nt3jj"|
|dj4d
|	WnEt5j6k
r}tj7d|td|jfnXt}|jjrjjt8j9krjjqnj	sjnjr^j:sj	rKj;o-t<js?j_=nd_q^tjdn|j>|}
yt?|
Wn@t@k
r}jArtjB|jCdqjDnXgfd}jEjFsjGdn|sjHr,tjddj!jHntItJjHtJ|
jH}x!|D]}tjBd|
|qUWtItJ|
jHtJjH@}x*|
jK|D]}||d|qWnjLjMjrjrjNjMnWdQXS(sxPrepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        sObtaining %ssP_check_skip_installed returned None but req_to_install.satisfied_by is set to %rsRequirement %s: %stfiles
Processing %ss
Collecting %ssoThe editable requirement %s cannot be installed when requiring hashes, because there is no single file to hash.sSince it is already installed, we are trusting this package without checking its hash. To ensure a completely repeatable environment, install into an empty virtualenv.ssetup.pyspip can't proceed with requirements '%s' due to a pre-existing build directory (%s). This is likely due to a previous installation that failed. pip is being responsible and not assuming it can delete this. Please delete it and try again.ttrust_internetR`thashess4Could not install requirement %s because of error %ssDCould not install requirement %s because of HTTP error %s for URL %ss<Requirement already satisfied (use --upgrade to upgrade): %sic@sMtt|djdj}jj|jd|dS(NRbRgR(RRpRbRdRRRh(tsubreqRtsub_install_req(t	more_reqsR4R"(s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytadd_reqs		s!Installing extra requirements: %rt,s"%s does not provide the extra '%s'RN(ORtpreparedRR:RwtinfoRJRLtAssertionErrorRSRR<tschemeR	turlRRRtensure_has_source_dirROtupdate_editableRR?R8tarchiveRPRRRIRNRRRR.RBRt
populate_linkRRRRRRt
original_linkt	is_pinnedRRRR=RcRR
R`Rt	HTTPErrorRRtall_schemesRQR^RRR6RRRYRxtargstremove_temporary_sourceRRhRRRRtrequiresR\R)RZ(R"R7R4ReRXRRt
abstract_distR<RRPtautodelete_unpackedRR6teRtmissing_requestedtmissingtavailable_requestedR((RR4R"s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRs
				
	
	

	
	
			

	

					
		
		

	

	
cC@s?tjdt#x|jD]}|jq!WWdQXdS(sClean up files, remove builds.sCleaning up...N(RwRRR\R(R"Rj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt
cleanup_filess

c@sOgtfdx!jjD]}|q7WS(sCreate the installation order.

        The installation order is topological - requirements are installed
        before the requiring thing. We break cycles at an arbitrary point,
        and make no other guarantees.
        c@sf|js|krdS|jr&dSj|xj|D]}|qAWj|dS(N(RJRtaddRfR)(Rjtdep(tordertordered_reqstscheduleR"(s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRs	
(RRUR&(R"R((RRRR"s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt_to_installs
		cO@s!|j}|rDtjddjg|D]}|j^q(ntx|D]}|jrtjd|jt|jdtWdQXny|j	||||Wn*|jr|j
r|jnn X|jr|j
r|jn|j
qUWWdQX||_dS(sl
        Install everything in this set (after having downloaded and unpacked
        the packages)
        s!Installing collected packages: %ss, sFound existing installation: %sRN(RRwRR.RhRRRRtinstalltinstall_succeededtrollback_uninstallRRR[(R"tinstall_optionstglobal_optionsRtkwargst
to_installRjtrequirement((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRs:)

	

	

		
(R1R2RRLRR#RrR0RRtpropertyRRR}RRRRRRRR(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRKs2			4		[		
		'		C		(:t
__future__RtcollectionsRt	itertoolsRtloggingRtpip._vendorRRt
pip.compatRtpip.downloadRRRR	R
tpip.exceptionsRRR
RRRRRRRtpip.req.req_installRt	pip.utilsRRRRtpip.utils.hashesRtpip.utils.loggingRtpip.utils.packagingRtpip.vcsRt	pip.wheelRt	getLoggerR1RwtobjectRR3R?R>R;RIRK(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyts0(F"		PKZ'site-packages/pip/req/req_uninstall.pyonu[
abc@@sddlmZddlZddlZddlZddlmZmZmZddl	m
Z
ddlmZm
Z
mZmZmZddlmZejeZdefdYZd	efd
YZdS(i(tabsolute_importN(tuses_pycachetWINDOWStcache_from_source(tUninstallationError(trmtreetasktis_localtrenamestnormalize_path(t
indent_logtUninstallPathSetcB@sbeZdZdZdZdZdZdZdZe	dZ
dZd	ZRS(
sMA set of file paths to be removed in the uninstallation of a
    requirement.cC@s@t|_t|_i|_||_d|_g|_dS(N(tsettpathst_refusetpthtdisttNonetsave_dirt_moved_paths(tselfR((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyt__init__s			cC@s
t|S(ss
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        (R(Rtpath((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyt
_permittedscC@stjj|\}}tjjt|tjj|}tjj|sUdS|j|rw|jj	|n|j
j	|tjj|ddkrtr|j	t
|ndS(Nis.py(tosRtsplittjoinR	tnormcasetexistsRR
taddRtsplitextRR(RRtheadttail((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyR#s'"cC@skt|}|j|rW||jkr@t||j|trollback(RRttmp_pathR((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRE{s
cC@s5|jdk	r1t|jd|_g|_ndS(s?Remove temporary save dir: rollback will no longer be possible.N(RRRR(R((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pytcommits
	(
t__name__t
__module__t__doc__RRRR$R.R1RDR?RERG(((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRs							*	R!cB@s,eZdZdZdZdZRS(cC@sGtjj|s%td|n||_t|_d|_dS(Ns.Cannot remove entries from nonexistent file %s(	RRtisfileRtfileRtentriesRt_saved_lines(RR"((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRs	cC@sXtjj|}trDtjj|drD|jdd}n|jj|dS(Nis\t/(RRRRR/treplaceRMR(RR#((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRscC@stjd|jt|jd}|j}||_WdQXtd|Drbd}nd}xS|jD]H}y.tjd||j||j	dWqrt
k
rqrXqrWt|jd}|j|WdQXdS(	NsRemoving pth entries from %s:trbcs@s|]}d|kVqdS(s
N((t.0tline((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pys	ss
s
sRemoving entry: %ssutf-8twb(R6R<RLtopent	readlinesRNR(RMR?tencodet
ValueErrort
writelines(RtfhtlinestendlineR#((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyR?s	
cC@sh|jdkr&tjd|jtStjd|jt|jd}|j|jWdQXt	S(Ns.Cannot roll back changes to %s, none were mades!Rolling %s back to previous stateRT(
RNRR6RCRLRDR<RURYtTrue(RRZ((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyREs
(RHRIRRR?RE(((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyR!s			
	(t
__future__RtloggingRR:t
pip.compatRRRtpip.exceptionsRt	pip.utilsRRRRR	tpip.utils.loggingR
t	getLoggerRHR6tobjectRR!(((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyts(PKZ'site-packages/pip/req/req_uninstall.pycnu[
abc@@sddlmZddlZddlZddlZddlmZmZmZddl	m
Z
ddlmZm
Z
mZmZmZddlmZejeZdefdYZd	efd
YZdS(i(tabsolute_importN(tuses_pycachetWINDOWStcache_from_source(tUninstallationError(trmtreetasktis_localtrenamestnormalize_path(t
indent_logtUninstallPathSetcB@sbeZdZdZdZdZdZdZdZe	dZ
dZd	ZRS(
sMA set of file paths to be removed in the uninstallation of a
    requirement.cC@s@t|_t|_i|_||_d|_g|_dS(N(tsettpathst_refusetpthtdisttNonetsave_dirt_moved_paths(tselfR((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyt__init__s			cC@s
t|S(ss
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        (R(Rtpath((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyt
_permittedscC@stjj|\}}tjjt|tjj|}tjj|sUdS|j|rw|jj	|n|j
j	|tjj|ddkrtr|j	t
|ndS(Nis.py(tosRtsplittjoinR	tnormcasetexistsRR
taddRtsplitextRR(RRtheadttail((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyR#s'"cC@skt|}|j|rW||jkr@t||j|trollback(RRttmp_pathR((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRE{s
cC@s5|jdk	r1t|jd|_g|_ndS(s?Remove temporary save dir: rollback will no longer be possible.N(RRRR(R((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pytcommits
	(
t__name__t
__module__t__doc__RRRR$R.R1RDR?RERG(((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRs							*	R!cB@s,eZdZdZdZdZRS(cC@sGtjj|s%td|n||_t|_d|_dS(Ns.Cannot remove entries from nonexistent file %s(	RRtisfileRtfileRtentriesRt_saved_lines(RR"((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRs	cC@sXtjj|}trDtjj|drD|jdd}n|jj|dS(Nis\t/(RRRRR/treplaceRMR(RR#((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyRscC@stjd|jt|jd}|j}||_WdQXtd|Drbd}nd}xS|jD]H}y.tjd||j||j	dWqrt
k
rqrXqrWt|jd}|j|WdQXdS(	NsRemoving pth entries from %s:trbcs@s|]}d|kVqdS(s
N((t.0tline((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pys	ss
s
sRemoving entry: %ssutf-8twb(R6R<RLtopent	readlinesRNR(RMR?tencodet
ValueErrort
writelines(RtfhtlinestendlineR#((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyR?s	
cC@sh|jdkr&tjd|jtStjd|jt|jd}|j|jWdQXt	S(Ns.Cannot roll back changes to %s, none were mades!Rolling %s back to previous stateRT(
RNRR6RCRLRDR<RURYtTrue(RRZ((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyREs
(RHRIRRR?RE(((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyR!s			
	(t
__future__RtloggingRR:t
pip.compatRRRtpip.exceptionsRt	pip.utilsRRRRR	tpip.utils.loggingR
t	getLoggerRHR6tobjectRR!(((s9/usr/lib/python2.7/site-packages/pip/req/req_uninstall.pyts(PKZ\i!site-packages/pip/req/__init__.pynu[from __future__ import absolute_import

from .req_install import InstallRequirement
from .req_set import RequirementSet, Requirements
from .req_file import parse_requirements

__all__ = [
    "RequirementSet", "Requirements", "InstallRequirement",
    "parse_requirements",
]
PKZݒv site-packages/pip/req/req_set.pynu[from __future__ import absolute_import

from collections import defaultdict
from itertools import chain
import logging
import os

from pip._vendor import pkg_resources
from pip._vendor import requests

from pip.compat import expanduser
from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,
                          unpack_url)
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
                            DistributionNotFound, PreviousBuildDirError,
                            HashError, HashErrors, HashUnpinned,
                            DirectoryUrlHashUnsupported, VcsHashUnsupported,
                            UnsupportedPythonVersion)
from pip.req.req_install import InstallRequirement
from pip.utils import (
    display_path, dist_in_usersite, ensure_dir, normalize_path)
from pip.utils.hashes import MissingHashes
from pip.utils.logging import indent_log
from pip.utils.packaging import check_dist_requires_python
from pip.vcs import vcs
from pip.wheel import Wheel

logger = logging.getLogger(__name__)


class Requirements(object):

    def __init__(self):
        self._keys = []
        self._dict = {}

    def keys(self):
        return self._keys

    def values(self):
        return [self._dict[key] for key in self._keys]

    def __contains__(self, item):
        return item in self._keys

    def __setitem__(self, key, value):
        if key not in self._keys:
            self._keys.append(key)
        self._dict[key] = value

    def __getitem__(self, key):
        return self._dict[key]

    def __repr__(self):
        values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
        return 'Requirements({%s})' % ', '.join(values)


class DistAbstraction(object):
    """Abstracts out the wheel vs non-wheel prepare_files logic.

    The requirements for anything installable are as follows:
     - we must be able to determine the requirement name
       (or we can't correctly handle the non-upgrade case).
     - we must be able to generate a list of run-time dependencies
       without installing any additional packages (or we would
       have to either burn time by doing temporary isolated installs
       or alternatively violate pips 'don't start installing unless
       all requirements are available' rule - neither of which are
       desirable).
     - for packages with setup requirements, we must also be able
       to determine their requirements without installing additional
       packages (for the same reason as run-time dependencies)
     - we must be able to create a Distribution object exposing the
       above metadata.
    """

    def __init__(self, req_to_install):
        self.req_to_install = req_to_install

    def dist(self, finder):
        """Return a setuptools Dist object."""
        raise NotImplementedError(self.dist)

    def prep_for_dist(self):
        """Ensure that we can get a Dist for this requirement."""
        raise NotImplementedError(self.dist)


def make_abstract_dist(req_to_install):
    """Factory to make an abstract dist object.

    Preconditions: Either an editable req with a source_dir, or satisfied_by or
    a wheel link, or a non-editable req with a source_dir.

    :return: A concrete DistAbstraction.
    """
    if req_to_install.editable:
        return IsSDist(req_to_install)
    elif req_to_install.link and req_to_install.link.is_wheel:
        return IsWheel(req_to_install)
    else:
        return IsSDist(req_to_install)


class IsWheel(DistAbstraction):

    def dist(self, finder):
        return list(pkg_resources.find_distributions(
            self.req_to_install.source_dir))[0]

    def prep_for_dist(self):
        # FIXME:https://github.com/pypa/pip/issues/1112
        pass


class IsSDist(DistAbstraction):

    def dist(self, finder):
        dist = self.req_to_install.get_dist()
        # FIXME: shouldn't be globally added:
        if dist.has_metadata('dependency_links.txt'):
            finder.add_dependency_links(
                dist.get_metadata_lines('dependency_links.txt')
            )
        return dist

    def prep_for_dist(self):
        self.req_to_install.run_egg_info()
        self.req_to_install.assert_source_matches_version()


class Installed(DistAbstraction):

    def dist(self, finder):
        return self.req_to_install.satisfied_by

    def prep_for_dist(self):
        pass


class RequirementSet(object):

    def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
                 upgrade_strategy=None, ignore_installed=False, as_egg=False,
                 target_dir=None, ignore_dependencies=False,
                 force_reinstall=False, use_user_site=False, session=None,
                 pycompile=True, isolated=False, wheel_download_dir=None,
                 wheel_cache=None, require_hashes=False,
                 ignore_requires_python=False):
        """Create a RequirementSet.

        :param wheel_download_dir: Where still-packed .whl files should be
            written to. If None they are written to the download_dir parameter.
            Separate to download_dir to permit only keeping wheel archives for
            pip wheel.
        :param download_dir: Where still packed archives should be written to.
            If None they are not saved, and are deleted immediately after
            unpacking.
        :param wheel_cache: The pip wheel cache, for passing to
            InstallRequirement.
        """
        if session is None:
            raise TypeError(
                "RequirementSet() missing 1 required keyword argument: "
                "'session'"
            )

        self.build_dir = build_dir
        self.src_dir = src_dir
        # XXX: download_dir and wheel_download_dir overlap semantically and may
        # be combined if we're willing to have non-wheel archives present in
        # the wheelhouse output by 'pip wheel'.
        self.download_dir = download_dir
        self.upgrade = upgrade
        self.upgrade_strategy = upgrade_strategy
        self.ignore_installed = ignore_installed
        self.force_reinstall = force_reinstall
        self.requirements = Requirements()
        # Mapping of alias: real_name
        self.requirement_aliases = {}
        self.unnamed_requirements = []
        self.ignore_dependencies = ignore_dependencies
        self.ignore_requires_python = ignore_requires_python
        self.successfully_downloaded = []
        self.successfully_installed = []
        self.reqs_to_cleanup = []
        self.as_egg = as_egg
        self.use_user_site = use_user_site
        self.target_dir = target_dir  # set from --target option
        self.session = session
        self.pycompile = pycompile
        self.isolated = isolated
        if wheel_download_dir:
            wheel_download_dir = normalize_path(wheel_download_dir)
        self.wheel_download_dir = wheel_download_dir
        self._wheel_cache = wheel_cache
        self.require_hashes = require_hashes
        # Maps from install_req -> dependencies_of_install_req
        self._dependencies = defaultdict(list)

    def __str__(self):
        reqs = [req for req in self.requirements.values()
                if not req.comes_from]
        reqs.sort(key=lambda req: req.name.lower())
        return ' '.join([str(req.req) for req in reqs])

    def __repr__(self):
        reqs = [req for req in self.requirements.values()]
        reqs.sort(key=lambda req: req.name.lower())
        reqs_str = ', '.join([str(req.req) for req in reqs])
        return ('<%s object; %d requirement(s): %s>'
                % (self.__class__.__name__, len(reqs), reqs_str))

    def add_requirement(self, install_req, parent_req_name=None,
                        extras_requested=None):
        """Add install_req as a requirement to install.

        :param parent_req_name: The name of the requirement that needed this
            added. The name is used because when multiple unnamed requirements
            resolve to the same name, we could otherwise end up with dependency
            links that point outside the Requirements set. parent_req must
            already be added. Note that None implies that this is a user
            supplied requirement, vs an inferred one.
        :param extras_requested: an iterable of extras used to evaluate the
            environement markers.
        :return: Additional requirements to scan. That is either [] if
            the requirement is not applicable, or [install_req] if the
            requirement is applicable and has just been added.
        """
        name = install_req.name
        if not install_req.match_markers(extras_requested):
            logger.warning("Ignoring %s: markers '%s' don't match your "
                           "environment", install_req.name,
                           install_req.markers)
            return []

        # This check has to come after we filter requirements with the
        # environment markers.
        if install_req.link and install_req.link.is_wheel:
            wheel = Wheel(install_req.link.filename)
            if not wheel.supported():
                raise InstallationError(
                    "%s is not a supported wheel on this platform." %
                    wheel.filename
                )

        install_req.as_egg = self.as_egg
        install_req.use_user_site = self.use_user_site
        install_req.target_dir = self.target_dir
        install_req.pycompile = self.pycompile
        install_req.is_direct = (parent_req_name is None)

        if not name:
            # url or path requirement w/o an egg fragment
            self.unnamed_requirements.append(install_req)
            return [install_req]
        else:
            try:
                existing_req = self.get_requirement(name)
            except KeyError:
                existing_req = None
            if (parent_req_name is None and existing_req and not
                    existing_req.constraint and
                    existing_req.extras == install_req.extras and not
                    existing_req.req.specifier == install_req.req.specifier):
                raise InstallationError(
                    'Double requirement given: %s (already in %s, name=%r)'
                    % (install_req, existing_req, name))
            if not existing_req:
                # Add requirement
                self.requirements[name] = install_req
                # FIXME: what about other normalizations?  E.g., _ vs. -?
                if name.lower() != name:
                    self.requirement_aliases[name.lower()] = name
                result = [install_req]
            else:
                # Assume there's no need to scan, and that we've already
                # encountered this for scanning.
                result = []
                if not install_req.constraint and existing_req.constraint:
                    if (install_req.link and not (existing_req.link and
                       install_req.link.path == existing_req.link.path)):
                        self.reqs_to_cleanup.append(install_req)
                        raise InstallationError(
                            "Could not satisfy constraints for '%s': "
                            "installation from path or url cannot be "
                            "constrained to a version" % name)
                    # If we're now installing a constraint, mark the existing
                    # object for real installation.
                    existing_req.constraint = False
                    existing_req.extras = tuple(
                        sorted(set(existing_req.extras).union(
                               set(install_req.extras))))
                    logger.debug("Setting %s extras to: %s",
                                 existing_req, existing_req.extras)
                    # And now we need to scan this.
                    result = [existing_req]
                # Canonicalise to the already-added object for the backref
                # check below.
                install_req = existing_req
            if parent_req_name:
                parent_req = self.get_requirement(parent_req_name)
                self._dependencies[parent_req].append(install_req)
            return result

    def has_requirement(self, project_name):
        name = project_name.lower()
        if (name in self.requirements and
           not self.requirements[name].constraint or
           name in self.requirement_aliases and
           not self.requirements[self.requirement_aliases[name]].constraint):
            return True
        return False

    @property
    def has_requirements(self):
        return list(req for req in self.requirements.values() if not
                    req.constraint) or self.unnamed_requirements

    @property
    def is_download(self):
        if self.download_dir:
            self.download_dir = expanduser(self.download_dir)
            if os.path.exists(self.download_dir):
                return True
            else:
                logger.critical('Could not find download directory')
                raise InstallationError(
                    "Could not find or access download directory '%s'"
                    % display_path(self.download_dir))
        return False

    def get_requirement(self, project_name):
        for name in project_name, project_name.lower():
            if name in self.requirements:
                return self.requirements[name]
            if name in self.requirement_aliases:
                return self.requirements[self.requirement_aliases[name]]
        raise KeyError("No project with the name %r" % project_name)

    def uninstall(self, auto_confirm=False):
        for req in self.requirements.values():
            if req.constraint:
                continue
            req.uninstall(auto_confirm=auto_confirm)
            req.commit_uninstall()

    def prepare_files(self, finder):
        """
        Prepare process. Create temp directories, download and/or unpack files.
        """
        # make the wheelhouse
        if self.wheel_download_dir:
            ensure_dir(self.wheel_download_dir)

        # If any top-level requirement has a hash specified, enter
        # hash-checking mode, which requires hashes from all.
        root_reqs = self.unnamed_requirements + self.requirements.values()
        require_hashes = (self.require_hashes or
                          any(req.has_hash_options for req in root_reqs))
        if require_hashes and self.as_egg:
            raise InstallationError(
                '--egg is not allowed with --require-hashes mode, since it '
                'delegates dependency resolution to setuptools and could thus '
                'result in installation of unhashed packages.')

        # Actually prepare the files, and collect any exceptions. Most hash
        # exceptions cannot be checked ahead of time, because
        # req.populate_link() needs to be called before we can make decisions
        # based on link type.
        discovered_reqs = []
        hash_errors = HashErrors()
        for req in chain(root_reqs, discovered_reqs):
            try:
                discovered_reqs.extend(self._prepare_file(
                    finder,
                    req,
                    require_hashes=require_hashes,
                    ignore_dependencies=self.ignore_dependencies))
            except HashError as exc:
                exc.req = req
                hash_errors.append(exc)

        if hash_errors:
            raise hash_errors

    def _is_upgrade_allowed(self, req):
        return self.upgrade and (
            self.upgrade_strategy == "eager" or (
                self.upgrade_strategy == "only-if-needed" and req.is_direct
            )
        )

    def _check_skip_installed(self, req_to_install, finder):
        """Check if req_to_install should be skipped.

        This will check if the req is installed, and whether we should upgrade
        or reinstall it, taking into account all the relevant user options.

        After calling this req_to_install will only have satisfied_by set to
        None if the req_to_install is to be upgraded/reinstalled etc. Any
        other value will be a dist recording the current thing installed that
        satisfies the requirement.

        Note that for vcs urls and the like we can't assess skipping in this
        routine - we simply identify that we need to pull the thing down,
        then later on it is pulled down and introspected to assess upgrade/
        reinstalls etc.

        :return: A text reason for why it was skipped, or None.
        """
        # Check whether to upgrade/reinstall this req or not.
        req_to_install.check_if_exists()
        if req_to_install.satisfied_by:
            upgrade_allowed = self._is_upgrade_allowed(req_to_install)

            # Is the best version is installed.
            best_installed = False

            if upgrade_allowed:
                # For link based requirements we have to pull the
                # tree down and inspect to assess the version #, so
                # its handled way down.
                if not (self.force_reinstall or req_to_install.link):
                    try:
                        finder.find_requirement(
                            req_to_install, upgrade_allowed)
                    except BestVersionAlreadyInstalled:
                        best_installed = True
                    except DistributionNotFound:
                        # No distribution found, so we squash the
                        # error - it will be raised later when we
                        # re-try later to do the install.
                        # Why don't we just raise here?
                        pass

                if not best_installed:
                    # don't uninstall conflict if user install and
                    # conflict is not user install
                    if not (self.use_user_site and not
                            dist_in_usersite(req_to_install.satisfied_by)):
                        req_to_install.conflicts_with = \
                            req_to_install.satisfied_by
                    req_to_install.satisfied_by = None

            # Figure out a nice message to say why we're skipping this.
            if best_installed:
                skip_reason = 'already up-to-date'
            elif self.upgrade_strategy == "only-if-needed":
                skip_reason = 'not upgraded as not directly required'
            else:
                skip_reason = 'already satisfied'

            return skip_reason
        else:
            return None

    def _prepare_file(self,
                      finder,
                      req_to_install,
                      require_hashes=False,
                      ignore_dependencies=False):
        """Prepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        """
        # Tell user what we are doing for this requirement:
        # obtain (editable), skipping, processing (local url), collecting
        # (remote url or package name)
        if req_to_install.constraint or req_to_install.prepared:
            return []

        req_to_install.prepared = True

        # ###################### #
        # # print log messages # #
        # ###################### #
        if req_to_install.editable:
            logger.info('Obtaining %s', req_to_install)
        else:
            # satisfied_by is only evaluated by calling _check_skip_installed,
            # so it must be None here.
            assert req_to_install.satisfied_by is None
            if not self.ignore_installed:
                skip_reason = self._check_skip_installed(
                    req_to_install, finder)

            if req_to_install.satisfied_by:
                assert skip_reason is not None, (
                    '_check_skip_installed returned None but '
                    'req_to_install.satisfied_by is set to %r'
                    % (req_to_install.satisfied_by,))
                logger.info(
                    'Requirement %s: %s', skip_reason,
                    req_to_install)
            else:
                if (req_to_install.link and
                        req_to_install.link.scheme == 'file'):
                    path = url_to_path(req_to_install.link.url)
                    logger.info('Processing %s', display_path(path))
                else:
                    logger.info('Collecting %s', req_to_install)

        with indent_log():
            # ################################ #
            # # vcs update or unpack archive # #
            # ################################ #
            if req_to_install.editable:
                if require_hashes:
                    raise InstallationError(
                        'The editable requirement %s cannot be installed when '
                        'requiring hashes, because there is no single file to '
                        'hash.' % req_to_install)
                req_to_install.ensure_has_source_dir(self.src_dir)
                req_to_install.update_editable(not self.is_download)
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    req_to_install.archive(self.download_dir)
                req_to_install.check_if_exists()
            elif req_to_install.satisfied_by:
                if require_hashes:
                    logger.debug(
                        'Since it is already installed, we are trusting this '
                        'package without checking its hash. To ensure a '
                        'completely repeatable environment, install into an '
                        'empty virtualenv.')
                abstract_dist = Installed(req_to_install)
            else:
                # @@ if filesystem packages are not marked
                # editable in a req, a non deterministic error
                # occurs when the script attempts to unpack the
                # build directory
                req_to_install.ensure_has_source_dir(self.build_dir)
                # If a checkout exists, it's unwise to keep going.  version
                # inconsistencies are logged later, but do not fail the
                # installation.
                # FIXME: this won't upgrade when there's an existing
                # package unpacked in `req_to_install.source_dir`
                if os.path.exists(
                        os.path.join(req_to_install.source_dir, 'setup.py')):
                    raise PreviousBuildDirError(
                        "pip can't proceed with requirements '%s' due to a"
                        " pre-existing build directory (%s). This is "
                        "likely due to a previous installation that failed"
                        ". pip is being responsible and not assuming it "
                        "can delete this. Please delete it and try again."
                        % (req_to_install, req_to_install.source_dir)
                    )
                req_to_install.populate_link(
                    finder,
                    self._is_upgrade_allowed(req_to_install),
                    require_hashes
                )
                # We can't hit this spot and have populate_link return None.
                # req_to_install.satisfied_by is None here (because we're
                # guarded) and upgrade has no impact except when satisfied_by
                # is not None.
                # Then inside find_requirement existing_applicable -> False
                # If no new versions are found, DistributionNotFound is raised,
                # otherwise a result is guaranteed.
                assert req_to_install.link
                link = req_to_install.link

                # Now that we have the real link, we can tell what kind of
                # requirements we have and raise some more informative errors
                # than otherwise. (For example, we can raise VcsHashUnsupported
                # for a VCS URL rather than HashMissing.)
                if require_hashes:
                    # We could check these first 2 conditions inside
                    # unpack_url and save repetition of conditions, but then
                    # we would report less-useful error messages for
                    # unhashable requirements, complaining that there's no
                    # hash provided.
                    if is_vcs_url(link):
                        raise VcsHashUnsupported()
                    elif is_file_url(link) and is_dir_url(link):
                        raise DirectoryUrlHashUnsupported()
                    if (not req_to_install.original_link and
                            not req_to_install.is_pinned):
                        # Unpinned packages are asking for trouble when a new
                        # version is uploaded. This isn't a security check, but
                        # it saves users a surprising hash mismatch in the
                        # future.
                        #
                        # file:/// URLs aren't pinnable, so don't complain
                        # about them not being pinned.
                        raise HashUnpinned()
                hashes = req_to_install.hashes(
                    trust_internet=not require_hashes)
                if require_hashes and not hashes:
                    # Known-good hashes are missing for this requirement, so
                    # shim it with a facade object that will provoke hash
                    # computation and then raise a HashMissing exception
                    # showing the user what the hash should be.
                    hashes = MissingHashes()

                try:
                    download_dir = self.download_dir
                    # We always delete unpacked sdists after pip ran.
                    autodelete_unpacked = True
                    if req_to_install.link.is_wheel \
                            and self.wheel_download_dir:
                        # when doing 'pip wheel` we download wheels to a
                        # dedicated dir.
                        download_dir = self.wheel_download_dir
                    if req_to_install.link.is_wheel:
                        if download_dir:
                            # When downloading, we only unpack wheels to get
                            # metadata.
                            autodelete_unpacked = True
                        else:
                            # When installing a wheel, we use the unpacked
                            # wheel.
                            autodelete_unpacked = False
                    unpack_url(
                        req_to_install.link, req_to_install.source_dir,
                        download_dir, autodelete_unpacked,
                        session=self.session, hashes=hashes)
                except requests.HTTPError as exc:
                    logger.critical(
                        'Could not install requirement %s because '
                        'of error %s',
                        req_to_install,
                        exc,
                    )
                    raise InstallationError(
                        'Could not install requirement %s because '
                        'of HTTP error %s for URL %s' %
                        (req_to_install, exc, req_to_install.link)
                    )
                abstract_dist = make_abstract_dist(req_to_install)
                abstract_dist.prep_for_dist()
                if self.is_download:
                    # Make a .zip of the source_dir we already created.
                    if req_to_install.link.scheme in vcs.all_schemes:
                        req_to_install.archive(self.download_dir)
                # req_to_install.req is only avail after unpack for URL
                # pkgs repeat check_if_exists to uninstall-on-upgrade
                # (#14)
                if not self.ignore_installed:
                    req_to_install.check_if_exists()
                if req_to_install.satisfied_by:
                    if self.upgrade or self.ignore_installed:
                        # don't uninstall conflict if user install and
                        # conflict is not user install
                        if not (self.use_user_site and not
                                dist_in_usersite(
                                    req_to_install.satisfied_by)):
                            req_to_install.conflicts_with = \
                                req_to_install.satisfied_by
                        req_to_install.satisfied_by = None
                    else:
                        logger.info(
                            'Requirement already satisfied (use '
                            '--upgrade to upgrade): %s',
                            req_to_install,
                        )

            # ###################### #
            # # parse dependencies # #
            # ###################### #
            dist = abstract_dist.dist(finder)
            try:
                check_dist_requires_python(dist)
            except UnsupportedPythonVersion as e:
                if self.ignore_requires_python:
                    logger.warning(e.args[0])
                else:
                    req_to_install.remove_temporary_source()
                    raise
            more_reqs = []

            def add_req(subreq, extras_requested):
                sub_install_req = InstallRequirement(
                    str(subreq),
                    req_to_install,
                    isolated=self.isolated,
                    wheel_cache=self._wheel_cache,
                )
                more_reqs.extend(self.add_requirement(
                    sub_install_req, req_to_install.name,
                    extras_requested=extras_requested))

            # We add req_to_install before its dependencies, so that we
            # can refer to it when adding dependencies.
            if not self.has_requirement(req_to_install.name):
                # 'unnamed' requirements will get added here
                self.add_requirement(req_to_install, None)

            if not ignore_dependencies:
                if (req_to_install.extras):
                    logger.debug(
                        "Installing extra requirements: %r",
                        ','.join(req_to_install.extras),
                    )
                missing_requested = sorted(
                    set(req_to_install.extras) - set(dist.extras)
                )
                for missing in missing_requested:
                    logger.warning(
                        '%s does not provide the extra \'%s\'',
                        dist, missing
                    )

                available_requested = sorted(
                    set(dist.extras) & set(req_to_install.extras)
                )
                for subreq in dist.requires(available_requested):
                    add_req(subreq, extras_requested=available_requested)

            # cleanup tmp src
            self.reqs_to_cleanup.append(req_to_install)

            if not req_to_install.editable and not req_to_install.satisfied_by:
                # XXX: --no-install leads this to report 'Successfully
                # downloaded' for only non-editable reqs, even though we took
                # action on them.
                self.successfully_downloaded.append(req_to_install)

        return more_reqs

    def cleanup_files(self):
        """Clean up files, remove builds."""
        logger.debug('Cleaning up...')
        with indent_log():
            for req in self.reqs_to_cleanup:
                req.remove_temporary_source()

    def _to_install(self):
        """Create the installation order.

        The installation order is topological - requirements are installed
        before the requiring thing. We break cycles at an arbitrary point,
        and make no other guarantees.
        """
        # The current implementation, which we may change at any point
        # installs the user specified things in the order given, except when
        # dependencies must come earlier to achieve topological order.
        order = []
        ordered_reqs = set()

        def schedule(req):
            if req.satisfied_by or req in ordered_reqs:
                return
            if req.constraint:
                return
            ordered_reqs.add(req)
            for dep in self._dependencies[req]:
                schedule(dep)
            order.append(req)
        for install_req in self.requirements.values():
            schedule(install_req)
        return order

    def install(self, install_options, global_options=(), *args, **kwargs):
        """
        Install everything in this set (after having downloaded and unpacked
        the packages)
        """
        to_install = self._to_install()

        if to_install:
            logger.info(
                'Installing collected packages: %s',
                ', '.join([req.name for req in to_install]),
            )

        with indent_log():
            for requirement in to_install:
                if requirement.conflicts_with:
                    logger.info(
                        'Found existing installation: %s',
                        requirement.conflicts_with,
                    )
                    with indent_log():
                        requirement.uninstall(auto_confirm=True)
                try:
                    requirement.install(
                        install_options,
                        global_options,
                        *args,
                        **kwargs
                    )
                except:
                    # if install did not succeed, rollback previous uninstall
                    if (requirement.conflicts_with and not
                            requirement.install_succeeded):
                        requirement.rollback_uninstall()
                    raise
                else:
                    if (requirement.conflicts_with and
                            requirement.install_succeeded):
                        requirement.commit_uninstall()
                requirement.remove_temporary_source()

        self.successfully_installed = to_install
PKZT#HH%site-packages/pip/req/req_install.pyonu[
abc@@sddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZddlmZddlmZmZddlmZddlmZdd	lmZmZdd
lmZddlmZmZ ddl!m"Z"ddl#Z$dd
l%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+m,Z,m-Z-ddl.m/Z/m0Z0ddl1m2Z2m3Z3m4Z4m5Z5ddl6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=m>Z>m?Z?m@Z@mAZAmBZBmCZCmDZDmEZEmFZFddlGmHZHddlImJZJddlKmLZLddlMmNZNddlOmPZPddlQmRZRddlSmTZTddl#mUZUmVZVejWeXZYejZj[j\Z]dZ^dZ_de`fdYZadZbecdZddS( i(tabsolute_importN(t	sysconfig(tchange_root(t
FeedParser(t
pkg_resourcestsix(t
specifiers(tMarker(tInvalidRequirementtRequirement(tcanonicalize_name(tVersiontparse(tconfigparser(t
native_strt
get_stdlibtWINDOWS(tis_urlturl_to_pathtpath_to_urltis_archive_file(tInstallationErrortUninstallationError(tbin_pytrunning_under_virtualenvtPIP_DELETE_MARKER_FILENAMEtbin_user(tdisplay_pathtrmtreetask_path_existst
backup_dirtis_installable_dirtdist_in_usersitetdist_in_site_packagest
egg_link_pathtcall_subprocesstread_text_filetFakeFilet_make_build_dirt
ensure_dirtget_installed_versiontnormalize_patht
dist_is_local(tHashes(tRemovedInPip10Warning(t
indent_log(tSETUPTOOLS_SHIM(topen_spinner(tUninstallPathSet(tvcs(tmove_wheel_filestWheelcC@sOtjd|}d}|r?|jd}|jd}n|}||fS(Ns^(.+)(\[[^\]]+\])$ii(tretmatchtNonetgroup(tpathtmtextrastpath_no_extras((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
_strip_extras9scC@std|DS(Ncs@s|]}tj|VqdS(N(Rt
safe_extra(t.0textra((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pys	Fs(tset(R:((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt_safe_extrasEstInstallRequirementcB@seZd'ed'eeed'ed'd'edZed'd'ed'd'edZed'ed'd'edZdZ	dZ
dZedZ
edZdZd	Zd
ZedZedZed
ZdZdZdZdZejdZedZdZedZedZdZ dZ!dZ"dZ#d'dZ$gd'd'd'dZ%dZ&dZ'dZ(d(d'd Z)d!Z*ed"Z+d'd'd'd#Z,d$Z-ed%Z.ed&Z/RS()c@sd|_ttjrytWntk
rtjjkrVd}n>dkrt	fdt
Drd}ntj}t
d|fnXtj|_n|_||_|
|_||_||_||_||_|_||_|	dk	r*|	|_no6j|_d|_d|_d|_d|_d|_||_ d|_!d|_"t#|_$t#|_%d|_&|r|ni|_'||_(t#|_)|
|_*dS(Ns%It looks like a path. Does it exist ?t=c3@s|]}|kVqdS(N((R>top(treq(s7/usr/lib/python2.7/site-packages/pip/req/req_install.pys	Vss,= is not a valid operator. Did you mean == ?sInvalid requirement: '%s'
%s((+R:t
isinstanceRtstring_typesR	RtosR8tseptanyt	operatorst	tracebackt
format_excRRAREt
comes_fromt
constraintt
source_dirteditablet_wheel_cachetlinkt
original_linktas_eggR6tmarkerstmarkert_egg_info_pathtsatisfied_bytconflicts_witht_temp_build_dirt_ideal_build_dirtupdatetinstall_succeededtuninstalledtFalsetnothing_to_uninstallt
use_user_sitet
target_dirtoptionst	pycompiletpreparedtisolated(tselfRERNRPRQRSRUR]ReRVRgRdtwheel_cacheROtadd_msg((REs7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt__init__KsN	
	)																					cC@sddlm}t||\}	}
}|
jdrFt|
}nd}||	|d|dtd||
d|d|d	|r|nid
|}
|dk	rt||
_n|
S(Ni(tLinksfile:RPRQRSRORgRdRi(	t	pip.indexRltparse_editablet
startswithRR6tTrueRAR:(tclsteditable_reqRNtdefault_vcsRgRdRiRORltnameturltextras_overrideRPtres((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
from_editables 	cC@sddlm}t|r%d}nd}||kry|j|d\}}	|	j}	|	sjd}	qt|	}	nd}	|j}d}
tjj	tjj
|}d}d}
t|r||}nt|\}}
tjj|rOtjj
|ks|jdrOt|s:td|n|t|}nFt|rtjj|stjd|n|t|}n|r,|jd	krtjd
|jr|ttjj	tjj
|j}n|jr t|j}d|j|jf}
q2|j}
n|}
|r>|ni}||
|d|d
|	d|d|d|d|}|
rt t!d|
j"|_"n|S(sCreates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        i(Rls; t;it.s;Directory %r is not installable. File 'setup.py' not found.sARequirement %r looks like a filename, but the file does not existtfiles\.\./s%s==%sRSRVRgRdRiROtplaceholderN(#RmRlRtsplittstripR6RRHR8tnormpathtabspathR<tisdirRIRoRRRRtisfiletloggertwarningtschemeR4tsearchRutis_wheelR3tfilenameRttversiontegg_fragmentRAR	R:(RqRtRNRgRdRiRORlt
marker_sepRVRER8RSR:tptwheelRw((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt	from_linesb		!

$-	cC@s|jr;t|j}|jrV|d|jj7}qVn|jrP|jjnd}|jdk	r|dt|jj7}n|jrt	|jt
jr|j}n|jj}|r|d|7}qn|S(Ns from %ss in %ss
 (from %s)(
REtstrRSRuR6RYRtlocationRNRFRRGt	from_path(RhtsRN((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt__str__s			cC@s d|jjt||jfS(Ns<%s object: %s editable=%r>(t	__class__t__name__RRQ(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt__repr__scC@s|jdkr'|j|||_n|jdk	r|r|j}|jj|j|j|_||jkrtjd|jqndS(sEnsure that if a link can be found for this, that it is found.

        Note that self.link may still be None - if Upgrade is False and the
        requirement is already installed.

        If require_hashes is True, don't use the wheel cache, because cached
        wheels, always built locally, have different hashes than the files
        downloaded from the index server and thus throw false hash mismatches.
        Furthermore, cached wheels at present have undeterministic contents due
        to file modification times.
        sUsing cached wheel link: %sN(RSR6tfind_requirementRRtcached_wheelRtRtdebug(Rhtfindertupgradetrequire_hashestold_link((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
populate_link	s	cC@s
|jjS(N(REt	specifier(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRscC@s4|j}t|dko3tt|jdkS(sReturn whether I am pinned to an exact version.

        For example, some-package==1.2 is pinned; some-package>1.2 is not.
        is==s===(s==s===(Rtlentnexttitertoperator(RhR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt	is_pinned!s	cC@sy|jdkrdSt|j}|jrut|jtjrL|j}n|jj}|ru|d|7}qun|S(Ns->(RER6RRNRFRRGR(RhRRN((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR+s	cC@s|jdk	r|jS|jdkrVtjjtjdd|_||_|jS|j	rq|j
j}n	|j
}tjj|st
jd|t|ntjj||S(Ns-buildspip-sCreating directory %s(R[R6RERHR8trealpathttempfiletmkdtempR\RQRttlowertexistsRRR&tjoin(Rht	build_dirRt((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytbuild_location8s				
cC@s|jdk	rdS|j}d|_|j|j}tjj|rbtdt	|nt
jd|t	|t	|tj
||||_d|_||_d|_dS(sMove self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        Ns<A package already exists in %s; please remove it to continues,Moving package %s from %s to new location %s(RPR6R[RR\RHR8RRRRRtshutiltmoveRX(Rhtold_locationtnew_location((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt_correct_build_locationSs"
					cC@s,|jdkrdSttj|jjS(N(RER6RRt	safe_nameRt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRtsscC@s+tjj|j|jr$|jjp'dS(Nt(RHR8RRPRStsubdirectory_fragment(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytsetup_py_dirys	cC@syddl}WnHtk
rZtddkr;d}ntj}td|nXtjj	|j
d}tjrt
|tjr|jtj}n|S(Nit
setuptoolssPlease install setuptools.sWCould not import setuptools which is required to install from a source distribution.
%sssetup.py(RtImportErrorR(R6RLRMRRHR8RRRtPY2RFt	text_typetencodetsystgetfilesystemencoding(RhRRjtsetup_py((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs
	cC@s|jr%tjd|j|jntjd|j|jtt|j}tjd|g}|j	r|dg7}n|dg}|j
rg}n.tjj
|jd}t|ddg}t||d|jd	td
dWdQX|jsott|jdtr'd
}nd}tdj
|jd||jdg|_|jnbt|jd}t|jj|krtjd|j|j||jt||_ndS(Ns2Running setup.py (path:%s) egg_info for package %ss7Running setup.py (path:%s) egg_info for package from %ss-cs
--no-user-cfgtegg_infospip-egg-infos
--egg-basetcwdtshow_stdouttcommand_descspython setup.py egg_infoRs==s===RtNamesuRunning setup.py (path:%s) egg_info for package %s produced metadata for project name %s. Fix your #egg=%s fragments.(RtRRRRSR-R.Rt
executableRgRQRHR8RRR'R#R`RERFt
parse_versiontpkg_infoRR	RR
R(Rhtscripttbase_cmdtegg_info_cmdtegg_base_optiontegg_info_dirRDt
metadata_name((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytrun_egg_infosN	

	
		

	
		

cC@sj|jdk	r5|jj|s%dS|jj|S|j|}tjj|sZdSt|}|S(N(	RYR6thas_metadatatget_metadatat
egg_info_pathRHR8RR$(RhRtdata((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
egg_info_datasc	C@s |jdkr
|jr$|j}ntjj|jd}tj|}|jrg}xtj	|D]\}}}x-t
jD]"}||kr|j|qqWxt
|D]}tjjtjj||dds
tjjtjj||ddr|j|q|dks2|dkr|j|qqW|jg|D]}tjj||^qSqjWg|D]}|jdr|^q}n|std	||fnt|d
kr|jddntjj||d
|_ntjj|j|S(Nspip-egg-infotbintpythontScriptss
Python.exettestttestss	.egg-infos$No files/directories in %s (from %s)itkeycS@s8|jtjjtjjr3|jtjjp6dS(Ni(tcountRHR8RItaltsep(tx((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
si(RXR6RQRPRHR8RRtlistdirtwalkR1tdirnamestremovetlisttlexistsRtextendtendswithRRtsort(	RhRtbaset	filenamestroottdirstfilestdirtf((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs>						*+	
cC@s]t}|jd}|s@tjdt|jdn|j|pOd|jS(NsPKG-INFOsNo PKG-INFO file found in %sR(RRRRRRtfeedtclose(RhRR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs	s	\[(.*?)\]cC@s
t|jS(N(R(Rt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytinstalled_version scC@sj|jd}|jjrG||jjkrGtjd||jntjdt|j||dS(NRs'Requested %s, but installing version %ss;Source in %s has version %s, which satisfies requirement %s(	RRERRRRRRRP(RhR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytassert_source_matches_version$s
cC@s|js tjd|jdS|jjdkr6dS|jsCdS|jjjdd\}}tj	|}|r||jj}|r|j
|jq|j|jndS(Ns>Cannot update repository at %s; repository location is unknownR{t+i(RSRRRPRR]RuR}R1tget_backendtobtaintexport(RhRtvc_typeRutbackendtvcs_backend((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytupdate_editable5s"	
	cC@s|js%td|jfn|jp4|j}t|j}t|s{tj	d|j
|tjt
|_dS|tkrtj	d|j
|t
|_dSt|}t|}djtj|j}|jotjj|j}t|jdd}|r|jjdr|jj|r|j|j|jdrx'|j dj!D]7}	tjj"tjj#|j|	}
|j|
qsWq
|jd	r
|jd
r|j d
}ng}xg|j d	j!D]}|r||kr|^qD]^}
tjj#|j|
}
|j|
|j|
d|j|
d|j|
d
q"Wq
n|rt$j%dj|jt&|j|nQ|jjdr2|j|jtjj'|jd}tjj#tjj(|jd}|j)|d|n|rw|jjdrwxt*j+j,|D]}
|j|
q]Wn|rt-|d%}tjj.|j/j0}WdQX|j|tjj#tjj(|d}|j)||jntj1d||j|jdr|j2drxz|j3dD]f}t4|rVt5}nt6}|jtjj#||t7r;|jtjj#||dq;q;Wn|jdrt8j9ri}n
idd6}t:j;|}|j<t=|j>d|j?drx|j@dD]\}}t4|rAt5}nt6}|jtjj#||t7r |jtjj#||d|jtjj#||d|jtjj#||dq q Wqn|jA|||_BdS( s
        Uninstall the distribution currently satisfying this requirement.

        Prompts before removing or modifying files unless
        ``auto_confirm`` is True.

        Refuses to delete or modify files outside of ``sys.prefix`` -
        thus uninstallation within a virtual environment can only
        modify that virtual environment, even if the virtualenv is
        linked to global site-packages.

        s.Cannot uninstall requirement %s, not installeds1Not uninstalling %s at %s, outside environment %sNs<Not uninstalling %s at %s, as it is in the standard library.s{0}.egg-infoR8s	.egg-infosinstalled-files.txts
top_level.txtsnamespace_packages.txts.pys.pycs.pyosUninstalling a distutils installed project ({0}) has been deprecated and will be removed in a future version. This is due to the fact that uninstalling a distutils project will only partially uninstall the project.s.eggiseasy-install.pths./s
.dist-infotrs)Not sure how to uninstall: %s - Check: %stscriptss.batsentry_points.txtRCt
delimiterstconsole_scriptss.exes
.exe.manifests
-script.py(RC(Ctcheck_if_existsRRtRYRZR)RR*RtinfoRRtprefixRpRaRR0R"tformatRtto_filenametproject_nameRRHR8Rtgetattrt	_providerR6RtaddRRt
splitlinesRRtwarningstwarnR,R}tdirnametadd_pthtpipRtuninstallation_pathstopentnormcasetreadlineR~Rtmetadata_isdirtmetadata_listdirR RRRRRR
tSafeConfigParsertreadfpR%tget_metadata_linesthas_sectiontitemsRR_(Rhtauto_confirmtdistt	dist_pathtpaths_to_removetdevelop_egg_linktdevelop_egg_link_egg_infotegg_info_existstdistutils_egg_infotinstalled_fileR8t
namespacesRt
top_level_pkgteasy_install_eggteasy_install_pthtfhtlink_pointerRtbin_dirRdtconfigRttvalue((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt	uninstallRs

				"
		$
	
	*		
	'
cC@s0|jr|jjntjd|jdS(Ns'Can't rollback %s, nothing uninstalled.(R_trollbackRterrorRt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytrollback_uninstalls	cC@s<|jr|jjn|js8tjd|jndS(Ns%Can't commit %s, nothing uninstalled.(R_tcommitRaRR*Rt(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytcommit_uninstalls
		cC@st}d|j|jdf}tjj||}tjj|rtdt|d}|dkrxt	}q|dkrt
jdt|tj|q|dkrt
|}t
jd	t|t|tj||q|dkrtjd
qn|rtj|dtjdt}tjjtjj|j}xtj|D]\}	}
}d|
kr|
jdnxl|
D]d}tjj|	|}|j||}
tj|jd
|
d
}d|_|j|dqWxb|D]Z}|tkr!q	ntjj|	|}|j||}
|j||jd
|
q	WqhW|jt
j dt|ndS(Ns	%s-%s.zipRs8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort titwtbtasDeleting %ssBacking up %s to %sit
allowZip64spip-egg-infot/iiRsSaved %s(R.R/R0R1i(!RpRtRRHR8RRRRR`RRRRRRRtexittzipfiletZipFiletZIP_DEFLATEDR
RRRt_clean_zip_nametZipInfot
external_attrtwritestrRtwriteRR(RhRtcreate_archivetarchive_nametarchive_pathtresponset	dest_filetzipRtdirpathRRRRttzipdirR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytarchivesV		
	!
	
#
cC@s0|t|d}|jtjjd}|S(NiR3(RtreplaceRHR8RI(RhRtR((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR85sc@s@|sd}njdk	r8tfd|DStSdS(NRc3@s(|]}jji|d6VqdS(R?N(RVtevaluate(R>R?(Rh(s7/usr/lib/python2.7/site-packages/pip/req/req_install.pys	Ds(R(RVR6RJRp(Rhtextras_requested((Rhs7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt
match_markers=s	c@s"|jr#|j||d|dS|jrtjj|j}tjj||j|j	|jdd|d|t
|_dS||jj
dg7}||jj
dg7}|jrt|dg}ntjdd}tjj|d	}z|j|||}	d
|jf}
t|
:}t(t|	|d|jdtd
|WdQXWdQXtjj|stjd|dSt
|_|jrdSfd}t|]}
xS|
D]7}tjj |}|j!dr||}PqqWtj"d|dSWdQXg}t|k}
xa|
D]Y}|j#}tjj$|rz|tjj%7}n|j&tjj'|||qCWWdQXtjj|d}t|d!}
|
j(dj|dWdQXWdtjj|rtj)|nt*|XdS(NRRtstrip_file_prefixtglobal_optionstinstall_optionss
--no-user-cfgs-recordspip-sinstall-record.txtsRunning setup.py install for %sRRtspinnersRecord file %s not foundc@s4dkstjj|r#|St|SdS(N(R6RHR8tisabsR(R8(R(s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytprepend_root~ss	.egg-infos;Could not find .egg-info directory in install record for %ssinstalled-files.txtR/s
(+RQtinstall_editableRR
Rt
wheel_versionRPtcheck_compatibilityRtR2RpR^RdtgetRgRRRRHR8Rtget_install_argsR/R-R#RR`RRRRURRRRR~RRItappendtrelpathR<RR(RhRLRKRRRJRt
temp_locationtrecord_filenametinstall_argstmsgRMRORtlinet	directoryRt	new_linesRtinst_files_path((Rs7/usr/lib/python2.7/site-packages/pip/req/req_install.pytinstallIs~					

			


	$cC@s+|jdkr$|j||_n|jS(sAEnsure that a source_dir is set.

        This will create a temporary build dir if the name of the requirement
        isn't known yet.

        :param parent_dir: The ideal pip parent_dir for the source_dir.
            Generally src_dir for editables and build_dir for sdists.
        :return: self.source_dir
        N(RPR6R(Rht
parent_dir((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytensure_has_source_dirs
cC@stjdg}|jd|jt|j|t|dd|g7}|jsf|dg7}n|dk	r|d|g7}n|dk	r|d|g7}n|jr|dg7}n
|d	g7}t	rd
t
j}|dtj
jtjdd
||jg7}n|S(Ns-us-cR_s--records#--single-version-externally-manageds--roots--prefixs	--compiles--no-compileRs--install-headerstincludetsite(RRRUR.RRRUR6ReRRtget_python_versionRHR8RRRt(RhRKRXRRRYt
py_ver_str((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRTs(
		
	cC@s|jrPtjjtjj|jtrPtjd|jt|jnd|_|j
rtjj|j
rt|j
nd|_
dS(sVRemove the source files from this requirement, if they are marked
        for deletionsRemoving source in %sN(RPRHR8RRRRRRR6R[(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytremove_temporary_sources	c
C@stjd|j|jr2t|dg}n|r]dj|g}t||}ntOttj	dt
|jgt|ddgt|d|jdt
WdQXt|_dS(	NsRunning setup.py develop for %ss
--no-user-cfgs--prefix={0}s-ctdevelops	--no-depsRR(RRRtRgRRR-R#RRR.RRR`RpR^(RhRLRKRtprefix_param((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRPs	
*
	
cC@s|jdkrtSyett|j}d|_tjt||_|j	rw|jrw|j|_
d|_tSWntjk
rtStj
k
rtj|jj}|jrt|r||_
qtrt|rtd|j|jfqq||_
nXtS(sFind an installed distribution that satisfies or conflicts
        with this requirement, and set self.satisfied_by or
        self.conflicts_with appropriately.
        sVWill not install to the user site because it will lack sys.path precedence to %s in %sN(RER6R`R	RRWRtget_distributionRYRQRZRptDistributionNotFoundtVersionConflictRtRbR RR!RRR(Rht	no_markert
existing_dist((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRs2				
cC@s|jo|jjS(N(RSR(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR scC@sPt|j|j|d|jd|jd|d|d|jd|jd|dS(NtuserthomeRRReRgRJ(R2RtRERbRcReRg(RhtwheeldirRRRJ((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyR2$s				cC@s|jdjd}tjj|}tj||}tjjtjj|d}tj	tjj|d|d|S(sAReturn a pkg_resources.Distribution built from self.egg_info_pathRR3iRtmetadata(
RtrstripRHR8RRtPathMetadatatsplitexttbasenametDistribution(RhRtbase_dirRqt	dist_name((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytget_dist0s"cC@st|jjdiS(sReturn whether any known-good hashes are specified as options.

        These activate --require-hashes mode; hashes specified as part of a
        URL do not.

        thashes(tboolRdRS(Rh((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pythas_hash_options;scC@sn|jjdij}|r*|jn|j}|rd|jrd|j|jgj|jnt	|S(sReturn a hash-comparer that considers my option- and URL-based
        hashes to be known-good.

        Hashes in URLs--ones embedded in the requirements file, not ones
        downloaded from an index server--are almost peers with ones from
        flags. They satisfy --require-hashes (whether it was implicitly or
        explicitly activated) but do not activate it. md5 and sha224 are not
        allowed in flags, which should nudge people toward good algos. We
        always OR all hashes together, even ones from URLs.

        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
            downloaded from the internet, as by populate_link()

        Rz(
RdRStcopyRSRTthasht
setdefaultt	hash_nameRUR+(Rhttrust_internettgood_hashesRS((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRzEs
"N((0Rt
__module__R6R`RpRktclassmethodRxRRRRtpropertyRRRRRRtRRRRRRR4tcompilet_requirements_section_reRRRR(R+R-RER8RIR_RaRTRfRPRRR2RyR|Rz(((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRBIs^	;		M			
	
		 	:		6					0	\			
	)	
cC@s.tjd|}|r*|jd}n|S(s2
        Strip req postfix ( -dev, 0.2, etc )
    s^(.*?)(?:-dev|-\d.*)$i(R4RR7(RER5((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyt_strip_postfix[scC@sPddlm}|}d}tjd|}|rU|jd}|jd}n|}tjj|rtjj	tjj
|dstd|nt|}n|j
jdr
||j}|r||td	|j
jfS||dfSnx;tD]3}|j
jd
|rd||f}PqqWd|kr|r{tjd
t|d|}qtd|n|jdddj
}	tj|	sd|dj
gtjD]}
|
jd^qd}t|n||j}|s$tdn|s=td|nt||dfS(sParses an editable requirement into:
        - a requirement name
        - an URL
        - extras
        - editable options
    Accepted requirements:
        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
        .[some_extra]
    i(Rls^(.+)(\[[^\]]+\])$iissetup.pys;Directory %r is not installable. File 'setup.py' not found.sfile:R|s%s:s%s+%sRsD--default-vcs has been deprecated and will be removed in the future.sb%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+sFor --editable=%s only s, s+URLs is currently supporteds@Could not detect requirement name, please specify one with #egg=s@--editable=%s is not the right format; it must have #egg=PackageN(RmRlR6R4R5R7RHR8RRRRRRRoRR	R:R1RRR,R}RtbackendsRtR(RrRsRlRuR:R9t
url_no_extrastpackage_nametversion_controlRRt
error_message((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pyRngs`!


1
(et
__future__RtloggingRHR4RRRRLRR5t	distutilsRtdistutils.utilRtemail.parserRtpip._vendorRRtpip._vendor.packagingRtpip._vendor.packaging.markersRt"pip._vendor.packaging.requirementsRR	tpip._vendor.packaging.utilsR
tpip._vendor.packaging.versionRRRtpip._vendor.six.movesR
t	pip.wheelR
t
pip.compatRRRtpip.downloadRRRRtpip.exceptionsRRt
pip.locationsRRRRt	pip.utilsRRRRRR R!R"R#R$R%R&R'R(R)R*tpip.utils.hashesR+tpip.utils.deprecationR,tpip.utils.loggingR-tpip.utils.setuptools_buildR.tpip.utils.uiR/tpip.req.req_uninstallR0tpip.vcsR1R2R3t	getLoggerRRt	Specifiert
_operatorstkeysRKR<RAtobjectRBRR6Rn(((s7/usr/lib/python2.7/site-packages/pip/req/req_install.pytsX""j			PK
Z5=z''"site-packages/pip/req/req_file.pyonu[
abc@@s2dZddlmZddlZddlZddlZddlZddlZddlZddl	m
Zddlm
Z
ddlZddlmZddlmZddlmZdd	lmZdd
lmZdgZejdejZejd
Zejejejej ej!ej"ej#ej$ej%ej&ej'ej(ej)ej*ej+ej,ej-ej.ej/ej0ej1gZ2ej3ej4ej5gZ6ge6D]Z7e7j8^qZ9dddde;ddZ<dZ=ddddde;dZ>dZ?dZ@dZAdZBdZCdS(s
Requirements file parsing
i(tabsolute_importN(tparse(tfilterfalse(tget_file_content(tInstallRequirement(tRequirementsFileParseError(tRemovedInPip10Warning(t
cmdoptionstparse_requirementss^(http|https|file):s(^|\s)+#.*$cc@s|dkrtdnt|d|d|\}}t||}	xQ|	D]I\}
}t|||
|||||d|}x|D]}
|
VqWqOWdS(sParse a requirements file and yield InstallRequirement instances.

    :param filename:    Path or url of requirements file.
    :param finder:      Instance of pip.index.PackageFinder.
    :param comes_from:  Origin description of requirements.
    :param options:     cli options.
    :param session:     Instance of pip.download.PipSession.
    :param constraint:  If true, parsing a constraint file rather than
        requirements file.
    :param wheel_cache: Instance of pip.wheel.WheelCache
    sCparse_requirements() missing 1 required keyword argument: 'session't
comes_fromtsessiont
constraintN(tNonet	TypeErrorRt
preprocesstprocess_line(tfilenametfinderR	toptionsR
Rtwheel_cachet_tcontentt
lines_enumtline_numbertlinetreq_itertreq((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR@s
	
cC@sCt|jdd}t|}t|}t||}|S(sSplit, filter, and join lines, and return a line iterator

    :param content: the content of the requirements file
    :param options: cli options
    tstarti(t	enumeratet
splitlinest
join_linestignore_commentst
skip_regex(RRR((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyRas
c	c@sTt}	|	j}
d|
_|r3|j|
_nt|\}}tjdkrf|jd}n|	j	t
j||
\}
}d|rdnd||f}|rM|r|jnt
}|rtj||
ni}x>tD]6}||
jkr|
j|r|
j|||sN(iii(;tbuild_parsertget_default_valuesRt	index_urltformat_controltbreak_args_optionstsystversion_infotencodet
parse_argstshlextsplitt
isolated_modetFalseRtcheck_install_build_globaltSUPPORTED_OPTIONS_REQ_DESTt__dict__Rt	from_linet	editablesR#t
from_editabletrequirementstconstraintstTruet	SCHEME_REtsearchturllib_parseturljointostpathtjointdirnameRtrequire_hashestallow_externaltwarningstwarnRtallow_all_externaltallow_unverifiedt
index_urlst	use_wheeltpiptindextfmt_ctl_no_use_wheeltno_indextextra_index_urlstextendt
find_linkstabspathtexiststappendtpretallow_all_prereleasestprocess_dependency_linkst
trusted_hoststsecure_origins(RRRRR	RR
RRtparsertdefaultstargs_strtoptions_strtoptsRtline_comes_fromR"treq_optionstdestR#treq_pathtnested_constraintRtvaluetreq_dirtrelative_to_reqs_file((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyRns		!
		
	
$
		
	
	
				
					cC@s|jd}g}|}xJ|D]B}|jdsG|jdrKPq#|j||jdq#Wdj|dj|fS(sBreak up the line into an args and options string.  We only want to shlex
    (and then optparse) the options, not the args.  args can contain markers
    which are corrupted by shlex.
    t t-s--i(R1t
startswithRVtpopRC(RttokenstargsRttoken((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR+s

cC@sYtjdt}tt}x$|D]}|}|j|q#Wd}||_|S(s7
    Return a parser for parsing requirement lines
    tadd_help_optioncS@st|dS(N(R(tselftmsg((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pytparser_exits(toptparsetOptionParserR3tSUPPORTED_OPTIONStSUPPORTED_OPTIONS_REQt
add_optiontexit(R\toption_factoriestoption_factorytoptionRs((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR's

			cc@sd}g}x|D]\}}|jds>tj|rtj|rZd|}n|r|j||dj|fVg}q||fVq|s|}n|j|jdqW|r|dj|fVndS(sJoins a line ending in '' with the previous line (except when following
    comments).  The joined line takes on the index of the first line.
    s\RitN(Rtendswitht
COMMENT_REtmatchRVRCtstrip(Rtprimary_line_numbertnew_lineRR((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR"s 

		cc@sMxF|D]>\}}tjd|}|j}|r||fVqqWdS(s1
    Strips comments and filter empty lines.
    R}N(RtsubR(RRR((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR?s
c@sI|r|jnd}|rEtj|tfd|}n|S(ss
    Skip lines that match '--skip-requirements-regex' pattern

    Note: the regex pattern is only built once
    c@sj|dS(Ni(R>(te(tpattern(s4/usr/lib/python2.7/site-packages/pip/req/req_file.pytTR}N(tskip_requirements_regexRtretcompileR(RRR ((Rs4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR Js(Dt__doc__t
__future__RRARR0R,RtRGtpip._vendor.six.moves.urllibRR?tpip._vendor.six.movesRRMtpip.downloadRtpip.req.req_installRtpip.exceptionsRtpip.utils.deprecationRRt__all__RtIR=RR;teditableR:RPR)RStextra_index_urlRFRItno_allow_externaltallow_unsafetno_allow_unsafeRLtno_use_wheeltalways_unzipt	no_binarytonly_binaryRWRYttrusted_hostRERvtinstall_optionstglobal_optionsthashRwtoRcR5RR3RRRR+R'RRR (((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pytsl		 	
					PK
ZFp&site-packages/pip/req/req_uninstall.pynu[from __future__ import absolute_import

import logging
import os
import tempfile

from pip.compat import uses_pycache, WINDOWS, cache_from_source
from pip.exceptions import UninstallationError
from pip.utils import rmtree, ask, is_local, renames, normalize_path
from pip.utils.logging import indent_log


logger = logging.getLogger(__name__)


class UninstallPathSet(object):
    """A set of file paths to be removed in the uninstallation of a
    requirement."""
    def __init__(self, dist):
        self.paths = set()
        self._refuse = set()
        self.pth = {}
        self.dist = dist
        self.save_dir = None
        self._moved_paths = []

    def _permitted(self, path):
        """
        Return True if the given path is one we are permitted to
        remove/modify, False otherwise.

        """
        return is_local(path)

    def add(self, path):
        head, tail = os.path.split(path)

        # we normalize the head to resolve parent directory symlinks, but not
        # the tail, since we only want to uninstall symlinks, not their targets
        path = os.path.join(normalize_path(head), os.path.normcase(tail))

        if not os.path.exists(path):
            return
        if self._permitted(path):
            self.paths.add(path)
        else:
            self._refuse.add(path)

        # __pycache__ files can show up after 'installed-files.txt' is created,
        # due to imports
        if os.path.splitext(path)[1] == '.py' and uses_pycache:
            self.add(cache_from_source(path))

    def add_pth(self, pth_file, entry):
        pth_file = normalize_path(pth_file)
        if self._permitted(pth_file):
            if pth_file not in self.pth:
                self.pth[pth_file] = UninstallPthEntries(pth_file)
            self.pth[pth_file].add(entry)
        else:
            self._refuse.add(pth_file)

    def compact(self, paths):
        """Compact a path set to contain the minimal number of paths
        necessary to contain all paths in the set. If /a/path/ and
        /a/path/to/a/file.txt are both in the set, leave only the
        shorter path."""
        short_paths = set()
        for path in sorted(paths, key=len):
            if not any([
                    (path.startswith(shortpath) and
                     path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
                    for shortpath in short_paths]):
                short_paths.add(path)
        return short_paths

    def _stash(self, path):
        return os.path.join(
            self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))

    def remove(self, auto_confirm=False):
        """Remove paths in ``self.paths`` with confirmation (unless
        ``auto_confirm`` is True)."""
        if not self.paths:
            logger.info(
                "Can't uninstall '%s'. No files were found to uninstall.",
                self.dist.project_name,
            )
            return
        logger.info(
            'Uninstalling %s-%s:',
            self.dist.project_name, self.dist.version
        )

        with indent_log():
            paths = sorted(self.compact(self.paths))

            if auto_confirm:
                response = 'y'
            else:
                for path in paths:
                    logger.info(path)
                response = ask('Proceed (y/n)? ', ('y', 'n'))
            if self._refuse:
                logger.info('Not removing or modifying (outside of prefix):')
                for path in self.compact(self._refuse):
                    logger.info(path)
            if response == 'y':
                self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
                                                 prefix='pip-')
                for path in paths:
                    new_path = self._stash(path)
                    logger.debug('Removing file or directory %s', path)
                    self._moved_paths.append(path)
                    renames(path, new_path)
                for pth in self.pth.values():
                    pth.remove()
                logger.info(
                    'Successfully uninstalled %s-%s',
                    self.dist.project_name, self.dist.version
                )

    def rollback(self):
        """Rollback the changes previously made by remove()."""
        if self.save_dir is None:
            logger.error(
                "Can't roll back %s; was not uninstalled",
                self.dist.project_name,
            )
            return False
        logger.info('Rolling back uninstall of %s', self.dist.project_name)
        for path in self._moved_paths:
            tmp_path = self._stash(path)
            logger.debug('Replacing %s', path)
            renames(tmp_path, path)
        for pth in self.pth.values():
            pth.rollback()

    def commit(self):
        """Remove temporary save dir: rollback will no longer be possible."""
        if self.save_dir is not None:
            rmtree(self.save_dir)
            self.save_dir = None
            self._moved_paths = []


class UninstallPthEntries(object):
    def __init__(self, pth_file):
        if not os.path.isfile(pth_file):
            raise UninstallationError(
                "Cannot remove entries from nonexistent file %s" % pth_file
            )
        self.file = pth_file
        self.entries = set()
        self._saved_lines = None

    def add(self, entry):
        entry = os.path.normcase(entry)
        # On Windows, os.path.normcase converts the entry to use
        # backslashes.  This is correct for entries that describe absolute
        # paths outside of site-packages, but all the others use forward
        # slashes.
        if WINDOWS and not os.path.splitdrive(entry)[0]:
            entry = entry.replace('\\', '/')
        self.entries.add(entry)

    def remove(self):
        logger.debug('Removing pth entries from %s:', self.file)
        with open(self.file, 'rb') as fh:
            # windows uses '\r\n' with py3k, but uses '\n' with py2.x
            lines = fh.readlines()
            self._saved_lines = lines
        if any(b'\r\n' in line for line in lines):
            endline = '\r\n'
        else:
            endline = '\n'
        for entry in self.entries:
            try:
                logger.debug('Removing entry: %s', entry)
                lines.remove((entry + endline).encode("utf-8"))
            except ValueError:
                pass
        with open(self.file, 'wb') as fh:
            fh.writelines(lines)

    def rollback(self):
        if self._saved_lines is None:
            logger.error(
                'Cannot roll back changes to %s, none were made', self.file
            )
            return False
        logger.debug('Rolling %s back to previous state', self.file)
        with open(self.file, 'wb') as fh:
            fh.writelines(self._saved_lines)
        return True
PK
Z5=z''"site-packages/pip/req/req_file.pycnu[
abc@@s2dZddlmZddlZddlZddlZddlZddlZddlZddl	m
Zddlm
Z
ddlZddlmZddlmZddlmZdd	lmZdd
lmZdgZejdejZejd
Zejejejej ej!ej"ej#ej$ej%ej&ej'ej(ej)ej*ej+ej,ej-ej.ej/ej0ej1gZ2ej3ej4ej5gZ6ge6D]Z7e7j8^qZ9dddde;ddZ<dZ=ddddde;dZ>dZ?dZ@dZAdZBdZCdS(s
Requirements file parsing
i(tabsolute_importN(tparse(tfilterfalse(tget_file_content(tInstallRequirement(tRequirementsFileParseError(tRemovedInPip10Warning(t
cmdoptionstparse_requirementss^(http|https|file):s(^|\s)+#.*$cc@s|dkrtdnt|d|d|\}}t||}	xQ|	D]I\}
}t|||
|||||d|}x|D]}
|
VqWqOWdS(sParse a requirements file and yield InstallRequirement instances.

    :param filename:    Path or url of requirements file.
    :param finder:      Instance of pip.index.PackageFinder.
    :param comes_from:  Origin description of requirements.
    :param options:     cli options.
    :param session:     Instance of pip.download.PipSession.
    :param constraint:  If true, parsing a constraint file rather than
        requirements file.
    :param wheel_cache: Instance of pip.wheel.WheelCache
    sCparse_requirements() missing 1 required keyword argument: 'session't
comes_fromtsessiont
constraintN(tNonet	TypeErrorRt
preprocesstprocess_line(tfilenametfinderR	toptionsR
Rtwheel_cachet_tcontentt
lines_enumtline_numbertlinetreq_itertreq((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR@s
	
cC@sCt|jdd}t|}t|}t||}|S(sSplit, filter, and join lines, and return a line iterator

    :param content: the content of the requirements file
    :param options: cli options
    tstarti(t	enumeratet
splitlinest
join_linestignore_commentst
skip_regex(RRR((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyRas
c	c@sTt}	|	j}
d|
_|r3|j|
_nt|\}}tjdkrf|jd}n|	j	t
j||
\}
}d|rdnd||f}|rM|r|jnt
}|rtj||
ni}x>tD]6}||
jkr|
j|r|
j|||sN(iii(;tbuild_parsertget_default_valuesRt	index_urltformat_controltbreak_args_optionstsystversion_infotencodet
parse_argstshlextsplitt
isolated_modetFalseRtcheck_install_build_globaltSUPPORTED_OPTIONS_REQ_DESTt__dict__Rt	from_linet	editablesR#t
from_editabletrequirementstconstraintstTruet	SCHEME_REtsearchturllib_parseturljointostpathtjointdirnameRtrequire_hashestallow_externaltwarningstwarnRtallow_all_externaltallow_unverifiedt
index_urlst	use_wheeltpiptindextfmt_ctl_no_use_wheeltno_indextextra_index_urlstextendt
find_linkstabspathtexiststappendtpretallow_all_prereleasestprocess_dependency_linkst
trusted_hoststsecure_origins(RRRRR	RR
RRtparsertdefaultstargs_strtoptions_strtoptsRtline_comes_fromR"treq_optionstdestR#treq_pathtnested_constraintRtvaluetreq_dirtrelative_to_reqs_file((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyRns		!
		
	
$
		
	
	
				
					cC@s|jd}g}|}xJ|D]B}|jdsG|jdrKPq#|j||jdq#Wdj|dj|fS(sBreak up the line into an args and options string.  We only want to shlex
    (and then optparse) the options, not the args.  args can contain markers
    which are corrupted by shlex.
    t t-s--i(R1t
startswithRVtpopRC(RttokenstargsRttoken((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR+s

cC@sYtjdt}tt}x$|D]}|}|j|q#Wd}||_|S(s7
    Return a parser for parsing requirement lines
    tadd_help_optioncS@st|dS(N(R(tselftmsg((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pytparser_exits(toptparsetOptionParserR3tSUPPORTED_OPTIONStSUPPORTED_OPTIONS_REQt
add_optiontexit(R\toption_factoriestoption_factorytoptionRs((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR's

			cc@sd}g}x|D]\}}|jds>tj|rtj|rZd|}n|r|j||dj|fVg}q||fVq|s|}n|j|jdqW|r|dj|fVndS(sJoins a line ending in '' with the previous line (except when following
    comments).  The joined line takes on the index of the first line.
    s\RitN(Rtendswitht
COMMENT_REtmatchRVRCtstrip(Rtprimary_line_numbertnew_lineRR((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR"s 

		cc@sMxF|D]>\}}tjd|}|j}|r||fVqqWdS(s1
    Strips comments and filter empty lines.
    R}N(RtsubR(RRR((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR?s
c@sI|r|jnd}|rEtj|tfd|}n|S(ss
    Skip lines that match '--skip-requirements-regex' pattern

    Note: the regex pattern is only built once
    c@sj|dS(Ni(R>(te(tpattern(s4/usr/lib/python2.7/site-packages/pip/req/req_file.pytTR}N(tskip_requirements_regexRtretcompileR(RRR ((Rs4/usr/lib/python2.7/site-packages/pip/req/req_file.pyR Js(Dt__doc__t
__future__RRARR0R,RtRGtpip._vendor.six.moves.urllibRR?tpip._vendor.six.movesRRMtpip.downloadRtpip.req.req_installRtpip.exceptionsRtpip.utils.deprecationRRt__all__RtIR=RR;teditableR:RPR)RStextra_index_urlRFRItno_allow_externaltallow_unsafetno_allow_unsafeRLtno_use_wheeltalways_unzipt	no_binarytonly_binaryRWRYttrusted_hostRERvtinstall_optionstglobal_optionsthashRwtoRcR5RR3RRRR+R'RRR (((s4/usr/lib/python2.7/site-packages/pip/req/req_file.pytsl		 	
					PK
ZEɯP\P\!site-packages/pip/req/req_set.pyonu[
abc@@sddlmZddlmZddlmZddlZddlZddlm	Z	ddlm
Z
ddlmZddl
mZmZmZmZmZdd	lmZmZmZmZmZmZmZmZmZmZdd
lmZddl m!Z!m"Z"m#Z#m$Z$ddl%m&Z&dd
l'm(Z(ddl)m*Z*ddl+m,Z,ddl-m.Z.ej/e0Z1de2fdYZ3de2fdYZ4dZ5de4fdYZ6de4fdYZ7de4fdYZ8de2fdYZ9dS(i(tabsolute_import(tdefaultdict(tchainN(t
pkg_resources(trequests(t
expanduser(tis_file_urlt
is_dir_urlt
is_vcs_urlturl_to_patht
unpack_url(
tInstallationErrortBestVersionAlreadyInstalledtDistributionNotFoundtPreviousBuildDirErrort	HashErrort
HashErrorstHashUnpinnedtDirectoryUrlHashUnsupportedtVcsHashUnsupportedtUnsupportedPythonVersion(tInstallRequirement(tdisplay_pathtdist_in_usersitet
ensure_dirtnormalize_path(t
MissingHashes(t
indent_log(tcheck_dist_requires_python(tvcs(tWheeltRequirementscB@sGeZdZdZdZdZdZdZdZRS(cC@sg|_i|_dS(N(t_keyst_dict(tself((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt__init__!s	cC@s|jS(N(R (R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytkeys%scC@s!g|jD]}|j|^q
S(N(R R!(R"tkey((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytvalues(scC@s
||jkS(N(R (R"titem((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt__contains__+scC@s3||jkr"|jj|n||j|cB@seZdZdZRS(cC@sttj|jjdS(Ni(tlistRtfind_distributionsR4t
source_dir(R"R7((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR6ls	cC@sdS(N((R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR8ps(R1R2R6R8(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR>js	R;cB@seZdZdZRS(cC@s;|jj}|jdr7|j|jdn|S(Nsdependency_links.txt(R4tget_distthas_metadatatadd_dependency_linkstget_metadata_lines(R"R7R6((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR6ws
cC@s|jj|jjdS(N(R4trun_egg_infotassert_source_matches_version(R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR8s
(R1R2R6R8(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR;us		t	InstalledcB@seZdZdZRS(cC@s
|jjS(N(R4tsatisfied_by(R"R7((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR6scC@sdS(N((R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR8s(R1R2R6R8(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRIs	tRequirementSetcB@seZeeeeeeeeeeeeeeedZdZdZeedZdZ	e
dZe
dZdZ
edZd	Zd
ZdZeedZd
ZdZfdZRS(cC@s|dkrtdn||_||_||_||_||_||_|
|_t	|_
i|_g|_|	|_
||_g|_g|_g|_||_||_||_||_|
|_||_|rt|}n||_||_||_tt|_dS(s3Create a RequirementSet.

        :param wheel_download_dir: Where still-packed .whl files should be
            written to. If None they are written to the download_dir parameter.
            Separate to download_dir to permit only keeping wheel archives for
            pip wheel.
        :param download_dir: Where still packed archives should be written to.
            If None they are not saved, and are deleted immediately after
            unpacking.
        :param wheel_cache: The pip wheel cache, for passing to
            InstallRequirement.
        s?RequirementSet() missing 1 required keyword argument: 'session'N(tNonet	TypeErrort	build_dirtsrc_dirtdownload_dirtupgradetupgrade_strategytignore_installedtforce_reinstallRtrequirementstrequirement_aliasestunnamed_requirementstignore_dependenciestignore_requires_pythontsuccessfully_downloadedtsuccessfully_installedtreqs_to_cleanuptas_eggt
use_user_sitet
target_dirtsessiont	pycompiletisolatedRtwheel_download_dirt_wheel_cachetrequire_hashesRR@t
_dependencies(R"RNRORPRQRRRSR]R_RXRTR^R`RaRbRctwheel_cacheReRY((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR#s<																							cC@sgg|jjD]}|js|^q}|jdddjg|D]}t|j^qKS(NR%cS@s
|jjS(N(tnametlower(treq((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyttt (RUR&t
comes_fromtsortR.tstrRj(R"Rjtreqs((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt__str__scC@s}g|jjD]}|^q}|jdddjg|D]}t|j^qB}d|jjt||fS(NR%cS@s
|jjS(N(RhRi(Rj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRkRls, s"<%s object; %d requirement(s): %s>(	RUR&RoR.RpRjt	__class__R1tlen(R"RjRqtreqs_str((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR0s
"+c	C@s|j}|j|s5tjd|j|jgS|jr|jjrt|jj}|j	st
d|jqn|j|_|j|_|j
|_
|j|_|dk|_|s|jj||gSy|j|}Wntk
r	d}nX|dkrm|rm|jrm|j|jkrm|jj|jjkrmt
d|||fn|s||j|<|j|kr||j|j>s(R@RUR&RW(R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pythas_requirements<scC@sa|jr]t|j|_tjj|jr4tStjdtdt	|jnt
S(Ns!Could not find download directorys0Could not find or access download directory '%s'(RPRtosRtexistsRRwtcriticalRRR(R"((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytis_downloadAs	
cC@slxU||jfD]A}||jkr3|j|S||jkr|j|j|SqWtd|dS(NsNo project with the name %r(RiRURVR~(R"RRh((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyR}NscC@sGx@|jjD]/}|jr%qn|jd||jqWdS(Ntauto_confirm(RUR&Rt	uninstalltcommit_uninstall(R"RRj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRVs
	c	C@s|jrt|jn|j|jj}|jpKtd|D}|rl|jrltdng}t	}xot
||D]^}y,|j|j||d|d|j
Wqtk
r}||_|j|qXqW|r|ndS(sY
        Prepare process. Create temp directories, download and/or unpack files.
        cs@s|]}|jVqdS(N(thas_hash_options(RRj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pys	iss--egg is not allowed with --require-hashes mode, since it delegates dependency resolution to setuptools and could thus result in installation of unhashed packages.ReRXN(RcRRWRUR&RetanyR]RRRtextendt
_prepare_fileRXRRjR)(R"R7t	root_reqsRetdiscovered_reqsthash_errorsRjtexc((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt
prepare_files]s,				cC@s.|jo-|jdkp-|jdko-|jS(Nteagersonly-if-needed(RQRRR|(R"Rj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt_is_upgrade_alloweds	cC@s|j|jr|j|}t}|r|jp=|jsy|j||Wqtk
rmt}qt	k
r}qXn|s|j
ot|js|j|_nd|_qn|rd}n|jdkrd}nd}|SdSdS(sCheck if req_to_install should be skipped.

        This will check if the req is installed, and whether we should upgrade
        or reinstall it, taking into account all the relevant user options.

        After calling this req_to_install will only have satisfied_by set to
        None if the req_to_install is to be upgraded/reinstalled etc. Any
        other value will be a dist recording the current thing installed that
        satisfies the requirement.

        Note that for vcs urls and the like we can't assess skipping in this
        routine - we simply identify that we need to pull the thing down,
        then later on it is pulled down and introspected to assess upgrade/
        reinstalls etc.

        :return: A text reason for why it was skipped, or None.
        salready up-to-datesonly-if-neededs%not upgraded as not directly requiredsalready satisfiedN(tcheck_if_existsRJRRRTR<tfind_requirementRRR
R^Rtconflicts_withRLRR(R"R4R7tupgrade_allowedtbest_installedtskip_reason((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt_check_skip_installeds2
	
	
			c
@sjsjrgSt_jr;tjdnjsYj|}njrxtjd|nVj	rj	j
dkrtj	j}tjdt
|ntjdtjrZ|rtdnjjjjt}|jjrMjjnjnjr|rytjdnt}njjtjjtjjj drt!d	j fnj"|j#|j	}|rbt$|rt%n$t&|r?t'|r?t(nj)rbj*rbt+qbnj,d
|}	|r|	rt-}	nyj}
t}j	j.rj/rj/}
nj	j.r|
rt}qt0}nt1j	j |
|dj2d|	WnEt3j4k
rU}tj5d
|td|j	fnXt}|jjrj	j
t6j7krjjqnjsjnjrj8sjrj9ot:jsj_;nd_qtjdn|j=|}
yt>|
Wn@t?k
rw}j@rgtjA|jBdqxjCnXgfd}jDjEsjFdn|s{jGrtjddjjGntHtIjGtI|
jG}x!|D]}tjAd|
|qWtHtI|
jGtIjG@}x*|
jJ|D]}||d|q^WnjKjLjrjrjMjLnWdQXS(sxPrepare a single requirements file.

        :return: A list of additional InstallRequirements to also install.
        sObtaining %ssRequirement %s: %stfiles
Processing %ss
Collecting %ssoThe editable requirement %s cannot be installed when requiring hashes, because there is no single file to hash.sSince it is already installed, we are trusting this package without checking its hash. To ensure a completely repeatable environment, install into an empty virtualenv.ssetup.pyspip can't proceed with requirements '%s' due to a pre-existing build directory (%s). This is likely due to a previous installation that failed. pip is being responsible and not assuming it can delete this. Please delete it and try again.ttrust_internetR`thashess4Could not install requirement %s because of error %ssDCould not install requirement %s because of HTTP error %s for URL %ss<Requirement already satisfied (use --upgrade to upgrade): %sic@sMtt|djdj}jj|jd|dS(NRbRgR(RRpRbRdRRRh(tsubreqRtsub_install_req(t	more_reqsR4R"(s3/usr/lib/python2.7/site-packages/pip/req/req_set.pytadd_reqs		s!Installing extra requirements: %rt,s"%s does not provide the extra '%s'RN(NRtpreparedRR:RwtinfoRSRRJR<tschemeR	turlRRRtensure_has_source_dirROtupdate_editableRR?R8tarchiveRPRRRIRNRRRR.RBRt
populate_linkRRRRRRt
original_linkt	is_pinnedRRRR=RcRR
R`Rt	HTTPErrorRRtall_schemesRQR^RRRLR6RRRYRxtargstremove_temporary_sourceRRhRRRRtrequiresR\R)RZ(R"R7R4ReRXRRt
abstract_distR<RRPtautodelete_unpackedRR6teRtmissing_requestedtmissingtavailable_requestedR((RR4R"s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRs				
	
	

	
	
	
	

	

					
		
		

	

	
cC@s?tjdt#x|jD]}|jq!WWdQXdS(sClean up files, remove builds.sCleaning up...N(RwRRR\R(R"Rj((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt
cleanup_filess

c@sOgtfdx!jjD]}|q7WS(sCreate the installation order.

        The installation order is topological - requirements are installed
        before the requiring thing. We break cycles at an arbitrary point,
        and make no other guarantees.
        c@sf|js|krdS|jr&dSj|xj|D]}|qAWj|dS(N(RJRtaddRfR)(Rjtdep(tordertordered_reqstscheduleR"(s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRs	
(RRUR&(R"R((RRRR"s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyt_to_installs
		cO@s!|j}|rDtjddjg|D]}|j^q(ntx|D]}|jrtjd|jt|jdtWdQXny|j	||||Wn*|jr|j
r|jnn X|jr|j
r|jn|j
qUWWdQX||_dS(sl
        Install everything in this set (after having downloaded and unpacked
        the packages)
        s!Installing collected packages: %ss, sFound existing installation: %sRN(RRwRR.RhRRRRtinstalltinstall_succeededtrollback_uninstallRRR[(R"tinstall_optionstglobal_optionsRtkwargst
to_installRjtrequirement((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRs:)

	

	

		
(R1R2RRLRR#RrR0RRtpropertyRRR}RRRRRRRR(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyRKs2			4		[		
		'		C		(:t
__future__RtcollectionsRt	itertoolsRtloggingRtpip._vendorRRt
pip.compatRtpip.downloadRRRR	R
tpip.exceptionsRRR
RRRRRRRtpip.req.req_installRt	pip.utilsRRRRtpip.utils.hashesRtpip.utils.loggingRtpip.utils.packagingRtpip.vcsRt	pip.wheelRt	getLoggerR1RwtobjectRR3R?R>R;RIRK(((s3/usr/lib/python2.7/site-packages/pip/req/req_set.pyts0(F"		PK
Z@hee[[$site-packages/pip/req/req_install.pynu[from __future__ import absolute_import

import logging
import os
import re
import shutil
import sys
import tempfile
import traceback
import warnings
import zipfile

from distutils import sysconfig
from distutils.util import change_root
from email.parser import FeedParser

from pip._vendor import pkg_resources, six
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version, parse as parse_version
from pip._vendor.six.moves import configparser

import pip.wheel

from pip.compat import native_str, get_stdlib, WINDOWS
from pip.download import is_url, url_to_path, path_to_url, is_archive_file
from pip.exceptions import (
    InstallationError, UninstallationError,
)
from pip.locations import (
    bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
)
from pip.utils import (
    display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
    dist_in_usersite, dist_in_site_packages, egg_link_path,
    call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir,
    get_installed_version, normalize_path, dist_is_local,
)

from pip.utils.hashes import Hashes
from pip.utils.deprecation import RemovedInPip10Warning
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip.utils.ui import open_spinner
from pip.req.req_uninstall import UninstallPathSet
from pip.vcs import vcs
from pip.wheel import move_wheel_files, Wheel


logger = logging.getLogger(__name__)

operators = specifiers.Specifier._operators.keys()


def _strip_extras(path):
    m = re.match(r'^(.+)(\[[^\]]+\])$', path)
    extras = None
    if m:
        path_no_extras = m.group(1)
        extras = m.group(2)
    else:
        path_no_extras = path

    return path_no_extras, extras


def _safe_extras(extras):
    return set(pkg_resources.safe_extra(extra) for extra in extras)


class InstallRequirement(object):

    def __init__(self, req, comes_from, source_dir=None, editable=False,
                 link=None, as_egg=False, update=True,
                 pycompile=True, markers=None, isolated=False, options=None,
                 wheel_cache=None, constraint=False):
        self.extras = ()
        if isinstance(req, six.string_types):
            try:
                req = Requirement(req)
            except InvalidRequirement:
                if os.path.sep in req:
                    add_msg = "It looks like a path. Does it exist ?"
                elif '=' in req and not any(op in req for op in operators):
                    add_msg = "= is not a valid operator. Did you mean == ?"
                else:
                    add_msg = traceback.format_exc()
                raise InstallationError(
                    "Invalid requirement: '%s'\n%s" % (req, add_msg))
            self.extras = _safe_extras(req.extras)

        self.req = req
        self.comes_from = comes_from
        self.constraint = constraint
        self.source_dir = source_dir
        self.editable = editable

        self._wheel_cache = wheel_cache
        self.link = self.original_link = link
        self.as_egg = as_egg
        if markers is not None:
            self.markers = markers
        else:
            self.markers = req and req.marker
        self._egg_info_path = None
        # This holds the pkg_resources.Distribution object if this requirement
        # is already available:
        self.satisfied_by = None
        # This hold the pkg_resources.Distribution object if this requirement
        # conflicts with another installed distribution:
        self.conflicts_with = None
        # Temporary build location
        self._temp_build_dir = None
        # Used to store the global directory where the _temp_build_dir should
        # have been created. Cf _correct_build_location method.
        self._ideal_build_dir = None
        # True if the editable should be updated:
        self.update = update
        # Set to True after successful installation
        self.install_succeeded = None
        # UninstallPathSet of uninstalled distribution (for possible rollback)
        self.uninstalled = None
        # Set True if a legitimate do-nothing-on-uninstall has happened - e.g.
        # system site packages, stdlib packages.
        self.nothing_to_uninstall = False
        self.use_user_site = False
        self.target_dir = None
        self.options = options if options else {}
        self.pycompile = pycompile
        # Set to True after successful preparation of this requirement
        self.prepared = False

        self.isolated = isolated

    @classmethod
    def from_editable(cls, editable_req, comes_from=None, default_vcs=None,
                      isolated=False, options=None, wheel_cache=None,
                      constraint=False):
        from pip.index import Link

        name, url, extras_override = parse_editable(
            editable_req, default_vcs)
        if url.startswith('file:'):
            source_dir = url_to_path(url)
        else:
            source_dir = None

        res = cls(name, comes_from, source_dir=source_dir,
                  editable=True,
                  link=Link(url),
                  constraint=constraint,
                  isolated=isolated,
                  options=options if options else {},
                  wheel_cache=wheel_cache)

        if extras_override is not None:
            res.extras = _safe_extras(extras_override)

        return res

    @classmethod
    def from_line(
            cls, name, comes_from=None, isolated=False, options=None,
            wheel_cache=None, constraint=False):
        """Creates an InstallRequirement from a name, which might be a
        requirement, directory containing 'setup.py', filename, or URL.
        """
        from pip.index import Link

        if is_url(name):
            marker_sep = '; '
        else:
            marker_sep = ';'
        if marker_sep in name:
            name, markers = name.split(marker_sep, 1)
            markers = markers.strip()
            if not markers:
                markers = None
            else:
                markers = Marker(markers)
        else:
            markers = None
        name = name.strip()
        req = None
        path = os.path.normpath(os.path.abspath(name))
        link = None
        extras = None

        if is_url(name):
            link = Link(name)
        else:
            p, extras = _strip_extras(path)
            if (os.path.isdir(p) and
                    (os.path.sep in name or name.startswith('.'))):

                if not is_installable_dir(p):
                    raise InstallationError(
                        "Directory %r is not installable. File 'setup.py' "
                        "not found." % name
                    )
                link = Link(path_to_url(p))
            elif is_archive_file(p):
                if not os.path.isfile(p):
                    logger.warning(
                        'Requirement %r looks like a filename, but the '
                        'file does not exist',
                        name
                    )
                link = Link(path_to_url(p))

        # it's a local file, dir, or url
        if link:
            # Handle relative file URLs
            if link.scheme == 'file' and re.search(r'\.\./', link.url):
                link = Link(
                    path_to_url(os.path.normpath(os.path.abspath(link.path))))
            # wheel file
            if link.is_wheel:
                wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
                req = "%s==%s" % (wheel.name, wheel.version)
            else:
                # set the req to the egg fragment.  when it's not there, this
                # will become an 'unnamed' requirement
                req = link.egg_fragment

        # a requirement specifier
        else:
            req = name

        options = options if options else {}
        res = cls(req, comes_from, link=link, markers=markers,
                  isolated=isolated, options=options,
                  wheel_cache=wheel_cache, constraint=constraint)

        if extras:
            res.extras = _safe_extras(
                Requirement('placeholder' + extras).extras)

        return res

    def __str__(self):
        if self.req:
            s = str(self.req)
            if self.link:
                s += ' from %s' % self.link.url
        else:
            s = self.link.url if self.link else None
        if self.satisfied_by is not None:
            s += ' in %s' % display_path(self.satisfied_by.location)
        if self.comes_from:
            if isinstance(self.comes_from, six.string_types):
                comes_from = self.comes_from
            else:
                comes_from = self.comes_from.from_path()
            if comes_from:
                s += ' (from %s)' % comes_from
        return s

    def __repr__(self):
        return '<%s object: %s editable=%r>' % (
            self.__class__.__name__, str(self), self.editable)

    def populate_link(self, finder, upgrade, require_hashes):
        """Ensure that if a link can be found for this, that it is found.

        Note that self.link may still be None - if Upgrade is False and the
        requirement is already installed.

        If require_hashes is True, don't use the wheel cache, because cached
        wheels, always built locally, have different hashes than the files
        downloaded from the index server and thus throw false hash mismatches.
        Furthermore, cached wheels at present have undeterministic contents due
        to file modification times.
        """
        if self.link is None:
            self.link = finder.find_requirement(self, upgrade)
        if self._wheel_cache is not None and not require_hashes:
            old_link = self.link
            self.link = self._wheel_cache.cached_wheel(self.link, self.name)
            if old_link != self.link:
                logger.debug('Using cached wheel link: %s', self.link)

    @property
    def specifier(self):
        return self.req.specifier

    @property
    def is_pinned(self):
        """Return whether I am pinned to an exact version.

        For example, some-package==1.2 is pinned; some-package>1.2 is not.
        """
        specifiers = self.specifier
        return (len(specifiers) == 1 and
                next(iter(specifiers)).operator in ('==', '==='))

    def from_path(self):
        if self.req is None:
            return None
        s = str(self.req)
        if self.comes_from:
            if isinstance(self.comes_from, six.string_types):
                comes_from = self.comes_from
            else:
                comes_from = self.comes_from.from_path()
            if comes_from:
                s += '->' + comes_from
        return s

    def build_location(self, build_dir):
        if self._temp_build_dir is not None:
            return self._temp_build_dir
        if self.req is None:
            # for requirement via a path to a directory: the name of the
            # package is not available yet so we create a temp directory
            # Once run_egg_info will have run, we'll be able
            # to fix it via _correct_build_location
            # Some systems have /tmp as a symlink which confuses custom
            # builds (such as numpy). Thus, we ensure that the real path
            # is returned.
            self._temp_build_dir = os.path.realpath(
                tempfile.mkdtemp('-build', 'pip-')
            )
            self._ideal_build_dir = build_dir
            return self._temp_build_dir
        if self.editable:
            name = self.name.lower()
        else:
            name = self.name
        # FIXME: Is there a better place to create the build_dir? (hg and bzr
        # need this)
        if not os.path.exists(build_dir):
            logger.debug('Creating directory %s', build_dir)
            _make_build_dir(build_dir)
        return os.path.join(build_dir, name)

    def _correct_build_location(self):
        """Move self._temp_build_dir to self._ideal_build_dir/self.req.name

        For some requirements (e.g. a path to a directory), the name of the
        package is not available until we run egg_info, so the build_location
        will return a temporary directory and store the _ideal_build_dir.

        This is only called by self.egg_info_path to fix the temporary build
        directory.
        """
        if self.source_dir is not None:
            return
        assert self.req is not None
        assert self._temp_build_dir
        assert self._ideal_build_dir
        old_location = self._temp_build_dir
        self._temp_build_dir = None
        new_location = self.build_location(self._ideal_build_dir)
        if os.path.exists(new_location):
            raise InstallationError(
                'A package already exists in %s; please remove it to continue'
                % display_path(new_location))
        logger.debug(
            'Moving package %s from %s to new location %s',
            self, display_path(old_location), display_path(new_location),
        )
        shutil.move(old_location, new_location)
        self._temp_build_dir = new_location
        self._ideal_build_dir = None
        self.source_dir = new_location
        self._egg_info_path = None

    @property
    def name(self):
        if self.req is None:
            return None
        return native_str(pkg_resources.safe_name(self.req.name))

    @property
    def setup_py_dir(self):
        return os.path.join(
            self.source_dir,
            self.link and self.link.subdirectory_fragment or '')

    @property
    def setup_py(self):
        assert self.source_dir, "No source dir for %s" % self
        try:
            import setuptools  # noqa
        except ImportError:
            if get_installed_version('setuptools') is None:
                add_msg = "Please install setuptools."
            else:
                add_msg = traceback.format_exc()
            # Setuptools is not available
            raise InstallationError(
                "Could not import setuptools which is required to "
                "install from a source distribution.\n%s" % add_msg
            )

        setup_py = os.path.join(self.setup_py_dir, 'setup.py')

        # Python2 __file__ should not be unicode
        if six.PY2 and isinstance(setup_py, six.text_type):
            setup_py = setup_py.encode(sys.getfilesystemencoding())

        return setup_py

    def run_egg_info(self):
        assert self.source_dir
        if self.name:
            logger.debug(
                'Running setup.py (path:%s) egg_info for package %s',
                self.setup_py, self.name,
            )
        else:
            logger.debug(
                'Running setup.py (path:%s) egg_info for package from %s',
                self.setup_py, self.link,
            )

        with indent_log():
            script = SETUPTOOLS_SHIM % self.setup_py
            base_cmd = [sys.executable, '-c', script]
            if self.isolated:
                base_cmd += ["--no-user-cfg"]
            egg_info_cmd = base_cmd + ['egg_info']
            # We can't put the .egg-info files at the root, because then the
            # source code will be mistaken for an installed egg, causing
            # problems
            if self.editable:
                egg_base_option = []
            else:
                egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
                ensure_dir(egg_info_dir)
                egg_base_option = ['--egg-base', 'pip-egg-info']
            call_subprocess(
                egg_info_cmd + egg_base_option,
                cwd=self.setup_py_dir,
                show_stdout=False,
                command_desc='python setup.py egg_info')

        if not self.req:
            if isinstance(parse_version(self.pkg_info()["Version"]), Version):
                op = "=="
            else:
                op = "==="
            self.req = Requirement(
                "".join([
                    self.pkg_info()["Name"],
                    op,
                    self.pkg_info()["Version"],
                ])
            )
            self._correct_build_location()
        else:
            metadata_name = canonicalize_name(self.pkg_info()["Name"])
            if canonicalize_name(self.req.name) != metadata_name:
                logger.warning(
                    'Running setup.py (path:%s) egg_info for package %s '
                    'produced metadata for project name %s. Fix your '
                    '#egg=%s fragments.',
                    self.setup_py, self.name, metadata_name, self.name
                )
                self.req = Requirement(metadata_name)

    def egg_info_data(self, filename):
        if self.satisfied_by is not None:
            if not self.satisfied_by.has_metadata(filename):
                return None
            return self.satisfied_by.get_metadata(filename)
        assert self.source_dir
        filename = self.egg_info_path(filename)
        if not os.path.exists(filename):
            return None
        data = read_text_file(filename)
        return data

    def egg_info_path(self, filename):
        if self._egg_info_path is None:
            if self.editable:
                base = self.source_dir
            else:
                base = os.path.join(self.setup_py_dir, 'pip-egg-info')
            filenames = os.listdir(base)
            if self.editable:
                filenames = []
                for root, dirs, files in os.walk(base):
                    for dir in vcs.dirnames:
                        if dir in dirs:
                            dirs.remove(dir)
                    # Iterate over a copy of ``dirs``, since mutating
                    # a list while iterating over it can cause trouble.
                    # (See https://github.com/pypa/pip/pull/462.)
                    for dir in list(dirs):
                        # Don't search in anything that looks like a virtualenv
                        # environment
                        if (
                                os.path.lexists(
                                    os.path.join(root, dir, 'bin', 'python')
                                ) or
                                os.path.exists(
                                    os.path.join(
                                        root, dir, 'Scripts', 'Python.exe'
                                    )
                                )):
                            dirs.remove(dir)
                        # Also don't search through tests
                        elif dir == 'test' or dir == 'tests':
                            dirs.remove(dir)
                    filenames.extend([os.path.join(root, dir)
                                     for dir in dirs])
                filenames = [f for f in filenames if f.endswith('.egg-info')]

            if not filenames:
                raise InstallationError(
                    'No files/directories in %s (from %s)' % (base, filename)
                )
            assert filenames, \
                "No files/directories in %s (from %s)" % (base, filename)

            # if we have more than one match, we pick the toplevel one.  This
            # can easily be the case if there is a dist folder which contains
            # an extracted tarball for testing purposes.
            if len(filenames) > 1:
                filenames.sort(
                    key=lambda x: x.count(os.path.sep) +
                    (os.path.altsep and x.count(os.path.altsep) or 0)
                )
            self._egg_info_path = os.path.join(base, filenames[0])
        return os.path.join(self._egg_info_path, filename)

    def pkg_info(self):
        p = FeedParser()
        data = self.egg_info_data('PKG-INFO')
        if not data:
            logger.warning(
                'No PKG-INFO file found in %s',
                display_path(self.egg_info_path('PKG-INFO')),
            )
        p.feed(data or '')
        return p.close()

    _requirements_section_re = re.compile(r'\[(.*?)\]')

    @property
    def installed_version(self):
        return get_installed_version(self.name)

    def assert_source_matches_version(self):
        assert self.source_dir
        version = self.pkg_info()['version']
        if self.req.specifier and version not in self.req.specifier:
            logger.warning(
                'Requested %s, but installing version %s',
                self,
                self.installed_version,
            )
        else:
            logger.debug(
                'Source in %s has version %s, which satisfies requirement %s',
                display_path(self.source_dir),
                version,
                self,
            )

    def update_editable(self, obtain=True):
        if not self.link:
            logger.debug(
                "Cannot update repository at %s; repository location is "
                "unknown",
                self.source_dir,
            )
            return
        assert self.editable
        assert self.source_dir
        if self.link.scheme == 'file':
            # Static paths don't get updated
            return
        assert '+' in self.link.url, "bad url: %r" % self.link.url
        if not self.update:
            return
        vc_type, url = self.link.url.split('+', 1)
        backend = vcs.get_backend(vc_type)
        if backend:
            vcs_backend = backend(self.link.url)
            if obtain:
                vcs_backend.obtain(self.source_dir)
            else:
                vcs_backend.export(self.source_dir)
        else:
            assert 0, (
                'Unexpected version control type (in %s): %s'
                % (self.link, vc_type))

    def uninstall(self, auto_confirm=False):
        """
        Uninstall the distribution currently satisfying this requirement.

        Prompts before removing or modifying files unless
        ``auto_confirm`` is True.

        Refuses to delete or modify files outside of ``sys.prefix`` -
        thus uninstallation within a virtual environment can only
        modify that virtual environment, even if the virtualenv is
        linked to global site-packages.

        """
        if not self.check_if_exists():
            raise UninstallationError(
                "Cannot uninstall requirement %s, not installed" % (self.name,)
            )
        dist = self.satisfied_by or self.conflicts_with

        dist_path = normalize_path(dist.location)
        if not dist_is_local(dist):
            logger.info(
                "Not uninstalling %s at %s, outside environment %s",
                dist.key,
                dist_path,
                sys.prefix,
            )
            self.nothing_to_uninstall = True
            return

        if dist_path in get_stdlib():
            logger.info(
                "Not uninstalling %s at %s, as it is in the standard library.",
                dist.key,
                dist_path,
            )
            self.nothing_to_uninstall = True
            return

        paths_to_remove = UninstallPathSet(dist)
        develop_egg_link = egg_link_path(dist)
        develop_egg_link_egg_info = '{0}.egg-info'.format(
            pkg_resources.to_filename(dist.project_name))
        egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
        # Special case for distutils installed package
        distutils_egg_info = getattr(dist._provider, 'path', None)

        # Uninstall cases order do matter as in the case of 2 installs of the
        # same package, pip needs to uninstall the currently detected version
        if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
                not dist.egg_info.endswith(develop_egg_link_egg_info)):
            # if dist.egg_info.endswith(develop_egg_link_egg_info), we
            # are in fact in the develop_egg_link case
            paths_to_remove.add(dist.egg_info)
            if dist.has_metadata('installed-files.txt'):
                for installed_file in dist.get_metadata(
                        'installed-files.txt').splitlines():
                    path = os.path.normpath(
                        os.path.join(dist.egg_info, installed_file)
                    )
                    paths_to_remove.add(path)
            # FIXME: need a test for this elif block
            # occurs with --single-version-externally-managed/--record outside
            # of pip
            elif dist.has_metadata('top_level.txt'):
                if dist.has_metadata('namespace_packages.txt'):
                    namespaces = dist.get_metadata('namespace_packages.txt')
                else:
                    namespaces = []
                for top_level_pkg in [
                        p for p
                        in dist.get_metadata('top_level.txt').splitlines()
                        if p and p not in namespaces]:
                    path = os.path.join(dist.location, top_level_pkg)
                    paths_to_remove.add(path)
                    paths_to_remove.add(path + '.py')
                    paths_to_remove.add(path + '.pyc')
                    paths_to_remove.add(path + '.pyo')

        elif distutils_egg_info:
            warnings.warn(
                "Uninstalling a distutils installed project ({0}) has been "
                "deprecated and will be removed in a future version. This is "
                "due to the fact that uninstalling a distutils project will "
                "only partially uninstall the project.".format(self.name),
                RemovedInPip10Warning,
            )
            paths_to_remove.add(distutils_egg_info)

        elif dist.location.endswith('.egg'):
            # package installed by easy_install
            # We cannot match on dist.egg_name because it can slightly vary
            # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
            paths_to_remove.add(dist.location)
            easy_install_egg = os.path.split(dist.location)[1]
            easy_install_pth = os.path.join(os.path.dirname(dist.location),
                                            'easy-install.pth')
            paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)

        elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
            for path in pip.wheel.uninstallation_paths(dist):
                paths_to_remove.add(path)

        elif develop_egg_link:
            # develop egg
            with open(develop_egg_link, 'r') as fh:
                link_pointer = os.path.normcase(fh.readline().strip())
            assert (link_pointer == dist.location), (
                'Egg-link %s does not match installed location of %s '
                '(at %s)' % (link_pointer, self.name, dist.location)
            )
            paths_to_remove.add(develop_egg_link)
            easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
                                            'easy-install.pth')
            paths_to_remove.add_pth(easy_install_pth, dist.location)

        else:
            logger.debug(
                'Not sure how to uninstall: %s - Check: %s',
                dist, dist.location)

        # find distutils scripts= scripts
        if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
            for script in dist.metadata_listdir('scripts'):
                if dist_in_usersite(dist):
                    bin_dir = bin_user
                else:
                    bin_dir = bin_py
                paths_to_remove.add(os.path.join(bin_dir, script))
                if WINDOWS:
                    paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')

        # find console_scripts
        if dist.has_metadata('entry_points.txt'):
            if six.PY2:
                options = {}
            else:
                options = {"delimiters": ('=', )}
            config = configparser.SafeConfigParser(**options)
            config.readfp(
                FakeFile(dist.get_metadata_lines('entry_points.txt'))
            )
            if config.has_section('console_scripts'):
                for name, value in config.items('console_scripts'):
                    if dist_in_usersite(dist):
                        bin_dir = bin_user
                    else:
                        bin_dir = bin_py
                    paths_to_remove.add(os.path.join(bin_dir, name))
                    if WINDOWS:
                        paths_to_remove.add(
                            os.path.join(bin_dir, name) + '.exe'
                        )
                        paths_to_remove.add(
                            os.path.join(bin_dir, name) + '.exe.manifest'
                        )
                        paths_to_remove.add(
                            os.path.join(bin_dir, name) + '-script.py'
                        )

        paths_to_remove.remove(auto_confirm)
        self.uninstalled = paths_to_remove

    def rollback_uninstall(self):
        if self.uninstalled:
            self.uninstalled.rollback()
        else:
            logger.error(
                "Can't rollback %s, nothing uninstalled.", self.name,
            )

    def commit_uninstall(self):
        if self.uninstalled:
            self.uninstalled.commit()
        elif not self.nothing_to_uninstall:
            logger.error(
                "Can't commit %s, nothing uninstalled.", self.name,
            )

    def archive(self, build_dir):
        assert self.source_dir
        create_archive = True
        archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
        archive_path = os.path.join(build_dir, archive_name)
        if os.path.exists(archive_path):
            response = ask_path_exists(
                'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
                display_path(archive_path), ('i', 'w', 'b', 'a'))
            if response == 'i':
                create_archive = False
            elif response == 'w':
                logger.warning('Deleting %s', display_path(archive_path))
                os.remove(archive_path)
            elif response == 'b':
                dest_file = backup_dir(archive_path)
                logger.warning(
                    'Backing up %s to %s',
                    display_path(archive_path),
                    display_path(dest_file),
                )
                shutil.move(archive_path, dest_file)
            elif response == 'a':
                sys.exit(-1)
        if create_archive:
            zip = zipfile.ZipFile(
                archive_path, 'w', zipfile.ZIP_DEFLATED,
                allowZip64=True
            )
            dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
            for dirpath, dirnames, filenames in os.walk(dir):
                if 'pip-egg-info' in dirnames:
                    dirnames.remove('pip-egg-info')
                for dirname in dirnames:
                    dirname = os.path.join(dirpath, dirname)
                    name = self._clean_zip_name(dirname, dir)
                    zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
                    zipdir.external_attr = 0x1ED << 16  # 0o755
                    zip.writestr(zipdir, '')
                for filename in filenames:
                    if filename == PIP_DELETE_MARKER_FILENAME:
                        continue
                    filename = os.path.join(dirpath, filename)
                    name = self._clean_zip_name(filename, dir)
                    zip.write(filename, self.name + '/' + name)
            zip.close()
            logger.info('Saved %s', display_path(archive_path))

    def _clean_zip_name(self, name, prefix):
        assert name.startswith(prefix + os.path.sep), (
            "name %r doesn't start with prefix %r" % (name, prefix)
        )
        name = name[len(prefix) + 1:]
        name = name.replace(os.path.sep, '/')
        return name

    def match_markers(self, extras_requested=None):
        if not extras_requested:
            # Provide an extra to safely evaluate the markers
            # without matching any extra
            extras_requested = ('',)
        if self.markers is not None:
            return any(
                self.markers.evaluate({'extra': extra})
                for extra in extras_requested)
        else:
            return True

    def install(self, install_options, global_options=[], root=None, prefix=None, strip_file_prefix=None):
        if self.editable:
            self.install_editable(
                install_options, global_options, prefix=prefix)
            return
        if self.is_wheel:
            version = pip.wheel.wheel_version(self.source_dir)
            pip.wheel.check_compatibility(version, self.name)

            self.move_wheel_files(
                self.source_dir,
                root=root,
                prefix=prefix,
                strip_file_prefix=strip_file_prefix
            )
            self.install_succeeded = True
            return

        # Extend the list of global and install options passed on to
        # the setup.py call with the ones from the requirements file.
        # Options specified in requirements file override those
        # specified on the command line, since the last option given
        # to setup.py is the one that is used.
        global_options += self.options.get('global_options', [])
        install_options += self.options.get('install_options', [])

        if self.isolated:
            global_options = list(global_options) + ["--no-user-cfg"]

        temp_location = tempfile.mkdtemp('-record', 'pip-')
        record_filename = os.path.join(temp_location, 'install-record.txt')
        try:
            install_args = self.get_install_args(
                global_options, record_filename, root, prefix)
            msg = 'Running setup.py install for %s' % (self.name,)
            with open_spinner(msg) as spinner:
                with indent_log():
                    call_subprocess(
                        install_args + install_options,
                        cwd=self.setup_py_dir,
                        show_stdout=False,
                        spinner=spinner,
                    )

            if not os.path.exists(record_filename):
                logger.debug('Record file %s not found', record_filename)
                return
            self.install_succeeded = True
            if self.as_egg:
                # there's no --always-unzip option we can pass to install
                # command so we unable to save the installed-files.txt
                return

            def prepend_root(path):
                if root is None or not os.path.isabs(path):
                    return path
                else:
                    return change_root(root, path)

            with open(record_filename) as f:
                for line in f:
                    directory = os.path.dirname(line)
                    if directory.endswith('.egg-info'):
                        egg_info_dir = prepend_root(directory)
                        break
                else:
                    logger.warning(
                        'Could not find .egg-info directory in install record'
                        ' for %s',
                        self,
                    )
                    # FIXME: put the record somewhere
                    # FIXME: should this be an error?
                    return
            new_lines = []
            with open(record_filename) as f:
                for line in f:
                    filename = line.strip()
                    if os.path.isdir(filename):
                        filename += os.path.sep
                    new_lines.append(
                        os.path.relpath(
                            prepend_root(filename), egg_info_dir)
                    )
            inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
            with open(inst_files_path, 'w') as f:
                f.write('\n'.join(new_lines) + '\n')
        finally:
            if os.path.exists(record_filename):
                os.remove(record_filename)
            rmtree(temp_location)

    def ensure_has_source_dir(self, parent_dir):
        """Ensure that a source_dir is set.

        This will create a temporary build dir if the name of the requirement
        isn't known yet.

        :param parent_dir: The ideal pip parent_dir for the source_dir.
            Generally src_dir for editables and build_dir for sdists.
        :return: self.source_dir
        """
        if self.source_dir is None:
            self.source_dir = self.build_location(parent_dir)
        return self.source_dir

    def get_install_args(self, global_options, record_filename, root, prefix):
        install_args = [sys.executable, "-u"]
        install_args.append('-c')
        install_args.append(SETUPTOOLS_SHIM % self.setup_py)
        install_args += list(global_options) + \
            ['install', '--record', record_filename]

        if not self.as_egg:
            install_args += ['--single-version-externally-managed']

        if root is not None:
            install_args += ['--root', root]
        if prefix is not None:
            install_args += ['--prefix', prefix]

        if self.pycompile:
            install_args += ["--compile"]
        else:
            install_args += ["--no-compile"]

        if running_under_virtualenv():
            py_ver_str = 'python' + sysconfig.get_python_version()
            install_args += ['--install-headers',
                             os.path.join(sys.prefix, 'include', 'site',
                                          py_ver_str, self.name)]

        return install_args

    def remove_temporary_source(self):
        """Remove the source files from this requirement, if they are marked
        for deletion"""
        if self.source_dir and os.path.exists(
                os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
            logger.debug('Removing source in %s', self.source_dir)
            rmtree(self.source_dir)
        self.source_dir = None
        if self._temp_build_dir and os.path.exists(self._temp_build_dir):
            rmtree(self._temp_build_dir)
        self._temp_build_dir = None

    def install_editable(self, install_options,
                         global_options=(), prefix=None):
        logger.info('Running setup.py develop for %s', self.name)

        if self.isolated:
            global_options = list(global_options) + ["--no-user-cfg"]

        if prefix:
            prefix_param = ['--prefix={0}'.format(prefix)]
            install_options = list(install_options) + prefix_param

        with indent_log():
            # FIXME: should we do --install-headers here too?
            call_subprocess(
                [
                    sys.executable,
                    '-c',
                    SETUPTOOLS_SHIM % self.setup_py
                ] +
                list(global_options) +
                ['develop', '--no-deps'] +
                list(install_options),

                cwd=self.setup_py_dir,
                show_stdout=False)

        self.install_succeeded = True

    def check_if_exists(self):
        """Find an installed distribution that satisfies or conflicts
        with this requirement, and set self.satisfied_by or
        self.conflicts_with appropriately.
        """
        if self.req is None:
            return False
        try:
            # get_distribution() will resolve the entire list of requirements
            # anyway, and we've already determined that we need the requirement
            # in question, so strip the marker so that we don't try to
            # evaluate it.
            no_marker = Requirement(str(self.req))
            no_marker.marker = None
            self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
            if self.editable and self.satisfied_by:
                self.conflicts_with = self.satisfied_by
                # when installing editables, nothing pre-existing should ever
                # satisfy
                self.satisfied_by = None
                return True
        except pkg_resources.DistributionNotFound:
            return False
        except pkg_resources.VersionConflict:
            existing_dist = pkg_resources.get_distribution(
                self.req.name
            )
            if self.use_user_site:
                if dist_in_usersite(existing_dist):
                    self.conflicts_with = existing_dist
                elif (running_under_virtualenv() and
                        dist_in_site_packages(existing_dist)):
                    raise InstallationError(
                        "Will not install to the user site because it will "
                        "lack sys.path precedence to %s in %s" %
                        (existing_dist.project_name, existing_dist.location)
                    )
            else:
                self.conflicts_with = existing_dist
        return True

    @property
    def is_wheel(self):
        return self.link and self.link.is_wheel

    def move_wheel_files(self, wheeldir, root=None, prefix=None, strip_file_prefix=None):
        move_wheel_files(
            self.name, self.req, wheeldir,
            user=self.use_user_site,
            home=self.target_dir,
            root=root,
            prefix=prefix,
            pycompile=self.pycompile,
            isolated=self.isolated,
            strip_file_prefix=strip_file_prefix,
        )

    def get_dist(self):
        """Return a pkg_resources.Distribution built from self.egg_info_path"""
        egg_info = self.egg_info_path('').rstrip('/')
        base_dir = os.path.dirname(egg_info)
        metadata = pkg_resources.PathMetadata(base_dir, egg_info)
        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
        return pkg_resources.Distribution(
            os.path.dirname(egg_info),
            project_name=dist_name,
            metadata=metadata)

    @property
    def has_hash_options(self):
        """Return whether any known-good hashes are specified as options.

        These activate --require-hashes mode; hashes specified as part of a
        URL do not.

        """
        return bool(self.options.get('hashes', {}))

    def hashes(self, trust_internet=True):
        """Return a hash-comparer that considers my option- and URL-based
        hashes to be known-good.

        Hashes in URLs--ones embedded in the requirements file, not ones
        downloaded from an index server--are almost peers with ones from
        flags. They satisfy --require-hashes (whether it was implicitly or
        explicitly activated) but do not activate it. md5 and sha224 are not
        allowed in flags, which should nudge people toward good algos. We
        always OR all hashes together, even ones from URLs.

        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
            downloaded from the internet, as by populate_link()

        """
        good_hashes = self.options.get('hashes', {}).copy()
        link = self.link if trust_internet else self.original_link
        if link and link.hash:
            good_hashes.setdefault(link.hash_name, []).append(link.hash)
        return Hashes(good_hashes)


def _strip_postfix(req):
    """
        Strip req postfix ( -dev, 0.2, etc )
    """
    # FIXME: use package_to_requirement?
    match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
    if match:
        # Strip off -dev, -0.2, etc.
        req = match.group(1)
    return req


def parse_editable(editable_req, default_vcs=None):
    """Parses an editable requirement into:
        - a requirement name
        - an URL
        - extras
        - editable options
    Accepted requirements:
        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
        .[some_extra]
    """

    from pip.index import Link

    url = editable_req
    extras = None

    # If a file path is specified with extras, strip off the extras.
    m = re.match(r'^(.+)(\[[^\]]+\])$', url)
    if m:
        url_no_extras = m.group(1)
        extras = m.group(2)
    else:
        url_no_extras = url

    if os.path.isdir(url_no_extras):
        if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
            raise InstallationError(
                "Directory %r is not installable. File 'setup.py' not found." %
                url_no_extras
            )
        # Treating it as code that has already been checked out
        url_no_extras = path_to_url(url_no_extras)

    if url_no_extras.lower().startswith('file:'):
        package_name = Link(url_no_extras).egg_fragment
        if extras:
            return (
                package_name,
                url_no_extras,
                Requirement("placeholder" + extras.lower()).extras,
            )
        else:
            return package_name, url_no_extras, None

    for version_control in vcs:
        if url.lower().startswith('%s:' % version_control):
            url = '%s+%s' % (version_control, url)
            break

    if '+' not in url:
        if default_vcs:
            warnings.warn(
                "--default-vcs has been deprecated and will be removed in "
                "the future.",
                RemovedInPip10Warning,
            )
            url = default_vcs + '+' + url
        else:
            raise InstallationError(
                '%s should either be a path to a local project or a VCS url '
                'beginning with svn+, git+, hg+, or bzr+' %
                editable_req
            )

    vc_type = url.split('+', 1)[0].lower()

    if not vcs.get_backend(vc_type):
        error_message = 'For --editable=%s only ' % editable_req + \
            ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
            ' is currently supported'
        raise InstallationError(error_message)

    package_name = Link(url).egg_fragment
    if not package_name:
        raise InstallationError(
            "Could not detect requirement name, please specify one with #egg="
        )
    if not package_name:
        raise InstallationError(
            '--editable=%s is not the right format; it must have '
            '#egg=Package' % editable_req
        )
    return _strip_postfix(package_name), url, None
PK
Zy6 site-packages/pip/vcs/bazaar.pycnu[
abc@@sddlmZddlZddlZddlZyddlmZWnek
rgddl	ZnXddl
mZmZddl
mZmZddlmZejeZdefdYZejedS(	i(tabsolute_importN(tparse(trmtreetdisplay_path(tvcstVersionControl(tpath_to_urltBazaarcB@s}eZdZdZdZdZdd	Zd
ZdZ	dZ
d
ZdZdZ
dZdZdZRS(tbzrs.bzrtbranchsbzr+https	bzr+httpssbzr+sshsbzr+sftpsbzr+ftpsbzr+lpcO@s[tt|j|||ttddrWtjjdgtjjdgndS(Nt
uses_fragmenttlp(	tsuperRt__init__tgetattrturllib_parsetNoneR
textendtnon_hierarchical(tselfturltargstkwargs((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR
scC@sstjdd}|j|tjj|r>t|nz#|jd|gd|dtWdt|XdS(sU
        Export the Bazaar repository at the url to the destination location
        s-exportspip-texporttcwdtshow_stdoutN(	ttempfiletmkdtemptunpacktostpathtexistsRtrun_commandtFalse(Rtlocationttemp_dir((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR&s

cC@s|jd|gd|dS(NtswitchR(R (RtdestRtrev_options((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR$5scC@s!|jddg|d|dS(Ntpulls-qR(R (RR%R&((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytupdate8scC@s|j\}}|r1d|g}d|}ng}d}|j||||rtjd||t||jddg|||gndS(Ns-rs (to revision %s)tsChecking out %s%s to %sR	s-q(tget_url_revtcheck_destinationtloggertinfoRR (RR%RtrevR&trev_display((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytobtain;s

cC@sAtt|j\}}|jdr7d|}n||fS(Nsssh://sbzr+(RRR*t
startswith(RRR.((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR*Ls
cC@s|jdgdtd|}xp|jD]b}|j}xMdD]E}|j|rD|j|d}|j|rt|S|SqDWq+WdS(NR-RRscheckout of branch: sparent branch: i(scheckout of branch: sparent branch: (	R R!t
splitlineststripR1tsplitt_is_local_repositoryRR(RR"turlstlinetxtrepo((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytget_urlSs

cC@s,|jdgdtd|}|jdS(NtrevnoRRi(R R!R2(RR"trevision((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytget_revision`scC@sw|j|}|sdS|jjds;d|}n|jjddd}|j|}d|||fS(Nsbzr:sbzr+t-iis%s@%s#egg=%s(R:RtlowerR1tegg_nameR4R=(RtdistR"R9tegg_project_nametcurrent_rev((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytget_src_requirementes
cC@stS(s&Always assume the versions don't match(R!(RR%R&((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyt
check_versionos(Rsbzr+https	bzr+httpssbzr+sshsbzr+sftpsbzr+ftpsbzr+lpN(t__name__t
__module__tnametdirnamet	repo_nametschemesRR
RR$R(R0R*R:R=RDRE(((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyRs						
		
(t
__future__RtloggingRRturllibRRtImportErrorturlparset	pip.utilsRRtpip.vcsRRtpip.downloadRt	getLoggerRFR,Rtregister(((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyts
_PK
Zy6 site-packages/pip/vcs/bazaar.pyonu[
abc@@sddlmZddlZddlZddlZyddlmZWnek
rgddl	ZnXddl
mZmZddl
mZmZddlmZejeZdefdYZejedS(	i(tabsolute_importN(tparse(trmtreetdisplay_path(tvcstVersionControl(tpath_to_urltBazaarcB@s}eZdZdZdZdZdd	Zd
ZdZ	dZ
d
ZdZdZ
dZdZdZRS(tbzrs.bzrtbranchsbzr+https	bzr+httpssbzr+sshsbzr+sftpsbzr+ftpsbzr+lpcO@s[tt|j|||ttddrWtjjdgtjjdgndS(Nt
uses_fragmenttlp(	tsuperRt__init__tgetattrturllib_parsetNoneR
textendtnon_hierarchical(tselfturltargstkwargs((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR
scC@sstjdd}|j|tjj|r>t|nz#|jd|gd|dtWdt|XdS(sU
        Export the Bazaar repository at the url to the destination location
        s-exportspip-texporttcwdtshow_stdoutN(	ttempfiletmkdtemptunpacktostpathtexistsRtrun_commandtFalse(Rtlocationttemp_dir((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR&s

cC@s|jd|gd|dS(NtswitchR(R (RtdestRtrev_options((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR$5scC@s!|jddg|d|dS(Ntpulls-qR(R (RR%R&((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytupdate8scC@s|j\}}|r1d|g}d|}ng}d}|j||||rtjd||t||jddg|||gndS(Ns-rs (to revision %s)tsChecking out %s%s to %sR	s-q(tget_url_revtcheck_destinationtloggertinfoRR (RR%RtrevR&trev_display((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytobtain;s

cC@sAtt|j\}}|jdr7d|}n||fS(Nsssh://sbzr+(RRR*t
startswith(RRR.((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyR*Ls
cC@s|jdgdtd|}xp|jD]b}|j}xMdD]E}|j|rD|j|d}|j|rt|S|SqDWq+WdS(NR-RRscheckout of branch: sparent branch: i(scheckout of branch: sparent branch: (	R R!t
splitlineststripR1tsplitt_is_local_repositoryRR(RR"turlstlinetxtrepo((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytget_urlSs

cC@s,|jdgdtd|}|jdS(NtrevnoRRi(R R!R2(RR"trevision((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytget_revision`scC@sw|j|}|sdS|jjds;d|}n|jjddd}|j|}d|||fS(Nsbzr:sbzr+t-iis%s@%s#egg=%s(R:RtlowerR1tegg_nameR4R=(RtdistR"R9tegg_project_nametcurrent_rev((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pytget_src_requirementes
cC@stS(s&Always assume the versions don't match(R!(RR%R&((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyt
check_versionos(Rsbzr+https	bzr+httpssbzr+sshsbzr+sftpsbzr+ftpsbzr+lpN(t__name__t
__module__tnametdirnamet	repo_nametschemesRR
RR$R(R0R*R:R=RDRE(((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyRs						
		
(t
__future__RtloggingRRturllibRRtImportErrorturlparset	pip.utilsRRtpip.vcsRRtpip.downloadRt	getLoggerRFR,Rtregister(((s2/usr/lib/python2.7/site-packages/pip/vcs/bazaar.pyts
_PK
Zf33"site-packages/pip/vcs/__init__.pycnu[
abc@@sdZddlmZddlZddlZddlZddlZddlZddlm	Z
ddlmZddl
mZmZmZmZmZddgZejeZd	efd
YZeZdefdYZd
ZdS(s)Handles all VCS (version control) supporti(tabsolute_importN(tparse(t
BadCommand(tdisplay_patht
backup_dirtcall_subprocesstrmtreetask_path_existstvcstget_src_requirementt
VcsSupportcB@seZiZddddddgZdZdZedZed	Zed
Z	dZ
dddZd
Z
dZdZRS(tsshtgitthgtbzrtsftptsvncC@sRtjj|jttddr;tjj|jntt|j	dS(Nt
uses_fragment(
turllib_parsetuses_netloctextendtschemestgetattrtNoneRtsuperR
t__init__(tself((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyRscC@s
|jjS(N(t	_registryt__iter__(R((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR$scC@st|jjS(N(tlistRtvalues(R((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytbackends'scC@sg|jD]}|j^q
S(N(Rtdirname(Rtbackend((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytdirnames+scC@s.g}x!|jD]}|j|jqW|S(N(RRR(RRR!((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytall_schemes/scC@sbt|ds&tjd|jdS|j|jkr^||j|js

cC@sJxC|jjD]2}|j|rtjd||j|jSqWdS(s
        Return the name of the version control backend if found at given
        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
        sDetermine that %s uses VCS: %sN(RRtcontrols_locationR&R)R$R(Rtlocationtvc_type((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_backend_nameFs	
cC@s*|j}||jkr&|j|SdS(N(tlowerR(RR$((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_backendRscC@s&|j|}|r"|j|SdS(N(R0R2R(RR.R/((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_backend_from_locationWs
N(R(t
__module__RRRRtpropertyRR"R#R+RR,R0R2R3(((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR
s						tVersionControlcB@seZdZdZdZddZdZdZdZ	dZ
dZdZdZ
d	Zd
ZdZdZd
ZdZdZdZdZeddddddZedZRS(tcO@s&||_tt|j||dS(N(turlRR6R(RR8targstkwargs((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyRgs	cC@s1tjj|\}}|jtjjp0|S(sy
           posix absolute paths start with os.path.sep,
           win32 ones start with drive (like c:\folder)
        (tostpatht
splitdrivet
startswithtsep(Rtrepotdrivettail((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyt_is_local_repositorykscC@s|jddS(Nt/t_(treplace(Rtsurname((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyttranslate_egg_surnameuscC@s
tdS(s
        Export the repository at the url to the destination location
        i.e. only download the files, without vcs informations
        N(tNotImplementedError(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytexportysc	C@sd}d|jks(t||j|jjddd}tj|\}}}}}d}d|kr|jdd\}}ntj||||df}||fS(sm
        Returns the correct repository URL and revision by parsing the given
        repository URL
        svSorry, '%s' is a malformed VCS url. The format is +://, e.g. svn+http://myrepo/svn/MyApp#egg=MyAppt+it@R7N(R8tAssertionErrortsplitRturlsplitRtrsplitt
urlunsplit(	Rt
error_messageR8tschemetnetlocR<tquerytfragtrev((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_url_revs"cC@sH|jdj|js,td||j||j|fS(sA
        Returns (url, revision), where both are strings
        RDsBad directory: %s(trstriptendswithR RMtget_urltget_revision(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_infos
cC@stj|jdS(si
        Normalize a URL for comparison by unquoting it and removing any
        trailing slash.
        RD(RtunquoteRY(RR8((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyt
normalize_urlscC@s|j||j|kS(sV
        Compare two repo URLs for identity, ignoring incidental differences.
        (R_(Rturl1turl2((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytcompare_urlsscC@s
tdS(sx
        Called when installing or updating an editable package, takes the
        source path of the checkout.
        N(RI(Rtdest((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytobtainscC@s
tdS(sB
        Switch the repo at ``dest`` to point to ``URL``.
        N(RI(RRcR8trev_options((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytswitchscC@s
tdS(sO
        Update an already-existing repo to the given ``rev_options``.
        N(RI(RRcRe((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytupdatescC@s
tdS(sp
        Return True if the version is identical to what exists and
        doesn't need to be updated.
        N(RI(RRcRe((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyt
check_versionsc
C@smt}t}tjj|rAt}tjjtjj||jr|j|}|j||rt	j
d|jjt
|||j||st	jdt
||j||j||qt	jdq>t	jd|j|jt
||ddf}qAt	jd
||j|jddf}n|rit	jd|j|td
|d|d}|dkrt	jd|jt
||||j|||qi|dkrqi|dkrt	jdt
|t|t}qi|d	krJt|}	t	jdt
||	tj||	t}qi|dkritjdqin|S(s
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        s)%s in %s exists, and has correct URL (%s)sUpdating %s %s%ss$Skipping because already up-to-date.s%s %s in %s exists with URL %ss%(s)witch, (i)gnore, (w)ipe, (b)ackup tstitwtbs0Directory %s already exists, and is not a %s %s.s(i)gnore, (w)ipe, (b)ackup s+The plan is to install the %s repository %ssWhat to do?  %siisSwitching %s %s to %s%ssDeleting %ssBacking up %s to %stai(RiRjRkRl(RjRkRl(tTruetFalseR;R<texiststjoinR R[RbR&R)t	repo_namettitleRRhtinfoRgR'R$RRfRRtshutiltmovetsystexit(
RRcR8Retrev_displaytcheckouttprompttexisting_urltresponsetdest_dir((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytcheck_destinations$		
	

	
		cC@s0tjj|rt|n|j|dS(sq
        Clean up current location and download the url repository
        (and vcs infos) into location
        N(R;R<RpRRd(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytunpacks
cC@s
tdS(s
        Return a string representing the requirement needed to
        redownload the files currently present in location, something
        like:
          {repository_url}@{revision}#egg={project_name}-{version_identifier}
        N(RI(RtdistR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR	 scC@s
tdS(s_
        Return the url used at location
        Used in get_info or check_destination
        N(RI(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR[)scC@s
tdS(s_
        Return the current revision of the files at location
        Used in get_info
        N(RI(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR\0straisec	C@su|jg|}y t|||||||SWn>tk
rp}|jtjkrjtd|jqqnXdS(s
        Run a VCS subcommand
        This is simply a wrapper around call_subprocess that adds the VCS
        command name, and checks that the VCS is available
        sCannot find command %rN(R$RtOSErrorterrnotENOENTR(	Rtcmdtshow_stdouttcwdt
on_returncodetcommand_desct
extra_environtspinnerte((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytrun_command7s	cC@sDtjd||j|jtjj||j}tjj|S(s
        Check if a location is controlled by the vcs.
        It is meant to be overridden to implement smarter detection
        mechanisms for specific vcs.
        sChecking in %s for %s (%s)...(R&R)R R$R;R<RqRp(R*R.R<((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR-Ns	(N(R(R4R$R RRRRCRHRJRXR]R_RbRdRfRgRhRRR	R[R\RnRtclassmethodR-(((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR6as2	
											U						cC@sztj|}|r`y|j||SWq`tk
r\tjd||j|jSXntjd||jS(NsPcannot determine version of editable source in %s (%s command not found in path)stcannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)(RR3R	RR&R'R$tas_requirement(RR.tversion_control((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR	[s

(t__doc__t
__future__RRtloggingR;RuRwtpip._vendor.six.moves.urllibRRtpip.exceptionsRt	pip.utilsRRRRRt__all__t	getLoggerR(R&tobjectR
RR6R	(((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyts(G	PK
Z좊++site-packages/pip/vcs/git.pycnu[
abc@@sddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZddl
m
ZddlmZddlmZmZddlmZmZejZejZejeZd	efd
YZejedS(i(tabsolute_importN(tsamefile(t
BadCommand(tparse(trequest(tdisplay_pathtrmtree(tvcstVersionControltGitcB@seZdZdZdZdZddZd	Zd
Z	dZ
dZd
ZdZ
dZdZdZdZdZdZdZdZdZdZdZdZdZdZedZRS( tgits.gittclonesgit+https	git+httpssgit+sshsgit+gitsgit+filecO@s|rt|\}}}}}|jdr|t|jd }	|	tj|jddjd}
t|||
||f}|jdd}|| t||||
||f}qnt	t
|j|||dS(Ntfilet/s\t+i(turlsplittendswithtlentlstripturllib_requestturl2pathnametreplacet
urlunsplittfindtsuperR	t__init__(tselfturltargstkwargstschemetnetloctpathtquerytfragmenttinitial_slashestnewpatht
after_plus((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyR s
#cC@sld}|jdgdt}|j|r@|t|}nd}dj|jdd }t|S(Nsgit version tversiontshow_stdouttt.i(trun_commandtFalset
startswithRtjointsplitt
parse_version(RtVERSION_PFXR&((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytget_git_version5scC@sytjdd}|j|zH|jds>|d}n|jdddd|gdtd	|Wd
t|Xd
S(s@Export the Git repository at the url to the destination locations-exportspip-R
scheckout-indexs-as-fs--prefixR'tcwdN(ttempfiletmkdtemptunpackRR*R+R(Rtlocationttemp_dir((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytexportBs

cC@s_|j|}d|}||kr0||gS||krG||gStjd||SdS(sCheck the revision options before checkout to compensate that tags
        and branches may need origin/ as a prefix.
        Returns the SHA1 of the branch or tag if found.
        s	origin/%ss5Could not find a tag or branch '%s', assuming commit.N(tget_short_refstloggertwarning(Rtrevtdesttrev_optionst	revisionst
origin_rev((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytcheck_rev_optionsOs

cC@s|j|j|dS(s

        Compare the current sha to the ref. ref may be a branch or tag name,
        but current rev will always point to a sha. This means that a branch
        or tag will never compare as True. So this ultimately only matches
        against exact shas.
        i(tget_revisionR,(RR=R>((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
check_versioncscC@sJ|jdd|gd||jddg|d||j|dS(Ntconfigsremote.origin.urlR2tcheckouts-q(R*tupdate_submodules(RR=RR>((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytswitchlscC@s|jtdkr7|jdddgd|n|jddgd||rr|j|d||}n|jdddg|d||j|dS(	Ns1.9.0tfetchs-qs--tagsR2itresets--hard(R1R/R*RARF(RR=R>((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytupdaters cC@s|j\}}|r.|g}d|}ndg}d}|j||||rtjd||t||jdd||g|r|j|||}|j||s|jddg|d|qn|j|ndS(	Ns (to %s)s
origin/masterR(sCloning %s%s to %sRs-qRER2(	tget_url_revtcheck_destinationR:tinfoRR*RARCRF(RR=RR<R>trev_display((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytobtains"	
	
cC@s|jdddgdtd|}|j}|d}x'|D]}|jdrA|}PqAqAW|jdd	}|jS(
s+Return URL of the first remote encountered.RDs--get-regexpsremote\..*\.urlR'R2isremote.origin.url t i(R*R+t
splitlinesR,R.tstrip(RR6tremotestfound_remotetremoteR((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytget_urls

cC@s+|jddgdtd|}|jS(Ns	rev-parsetHEADR'R2(R*R+RR(RR6tcurrent_rev((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyRBscc@sn|jdgdtd|}xI|jjD]5}|jdd\}}|j|jfVq1WdS(s4Yields tuples of (commit, ref) for branches and tagssshow-refR'R2RPiN(R*R+RRRQR.(RR6toutputtlinetcommittref((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
get_full_refss
cC@s
|jdS(Ns
refs/remotes/(R,(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_remotescC@s
|jdS(Nsrefs/heads/(R,(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_branchscC@s
|jdS(Ns
refs/tags/(R,(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_tagscC@s/t|j||j||j|fS(s0A ref is a commit sha if it is not anything else(tanyR^R_R`(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_commitscC@s
|j|S(N(R9(RR6((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytget_refsscC@si}x|j|D]\}}d}|j|rJ|td}nD|j|rl|td}n"|j|r|td}n|dk	r|||s 		PK
Z	@#site-packages/pip/vcs/mercurial.pycnu[
abc@@sddlmZddlZddlZddlZddlmZmZddlm	Z	m
Z
ddlmZddl
mZejeZde
fdYZe	jedS(	i(tabsolute_importN(tdisplay_pathtrmtree(tvcstVersionControl(tpath_to_url(tconfigparsert	MercurialcB@sqeZdZdZdZdZdZdZd	Zd
Z	dZ
dZd
ZdZ
dZRS(thgs.hgtcloneshg+httpshg+httpsshg+sshshg+static-httpcC@sTtjdd}|j|z#|jd|gdtd|Wdt|XdS(s?Export the Hg repository at the url to the destination locations-exportspip-tarchivetshow_stdouttcwdN(ttempfiletmkdtemptunpacktrun_commandtFalseR(tselftlocationttemp_dir((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytexports
cC@stjj||jd}tj}yI|j||jdd|t|d}|j	|WdQXWn/t
tjfk
r}tj
d||nX|jddg|d|dS(	Nthgrctpathstdefaulttws/Could not switch Mercurial repository to %s: %stupdates-qR(tostpathtjointdirnameRtSafeConfigParsertreadtsettopentwritetOSErrortNoSectionErrortloggertwarningR(Rtdestturltrev_optionstrepo_configtconfigtconfig_filetexc((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytswitch s
cC@s:|jddgd||jddg|d|dS(Ntpulls-qRR(R(RR(R*((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pyR/scC@s|j\}}|r.|g}d|}ng}d}|j||||rtjd||t||jddd||g|jddg|d|ndS(	Ns (to revision %s)tsCloning hg %s%s to %sR	s
--noupdates-qRR(tget_url_revtcheck_destinationR&tinfoRR(RR(R)trevR*trev_display((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytobtain3s	

cC@sO|jddgdtd|j}|j|rEt|}n|jS(Nt
showconfigs
paths.defaultRR(RRtstript_is_local_repositoryR(RRR)((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_urlEscC@s+|jddgdtd|j}|S(Ntparentss--template={rev}RR(RRR9(RRtcurrent_revision((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_revisionMscC@s+|jddgdtd|j}|S(NR<s--template={node}RR(RRR9(RRtcurrent_rev_hash((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_revision_hashSscC@sw|j|}|jjds1d|}n|jjddd}|sWdS|j|}d|||fS(Nshg:shg+t-iis%s@%s#egg=%s(R;tlowert
startswithtegg_nametsplittNoneR@(RtdistRtrepotegg_project_nameR?((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_src_requirementYs
cC@stS(s&Always assume the versions don't match(R(RR(R*((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pyt
check_versioncs(Rshg+httpshg+httpsshg+sshshg+static-http(t__name__t
__module__tnameRt	repo_nametschemesRR/RR7R;R>R@RJRK(((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pyRs	
							
(t
__future__RtloggingRR
t	pip.utilsRRtpip.vcsRRtpip.downloadRtpip._vendor.six.movesRt	getLoggerRLR&Rtregister(((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytsWPK
Zsite-packages/pip/vcs/bazaar.pynu[from __future__ import absolute_import

import logging
import os
import tempfile

# TODO: Get this into six.moves.urllib.parse
try:
    from urllib import parse as urllib_parse
except ImportError:
    import urlparse as urllib_parse

from pip.utils import rmtree, display_path
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url


logger = logging.getLogger(__name__)


class Bazaar(VersionControl):
    name = 'bzr'
    dirname = '.bzr'
    repo_name = 'branch'
    schemes = (
        'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
        'bzr+lp',
    )

    def __init__(self, url=None, *args, **kwargs):
        super(Bazaar, self).__init__(url, *args, **kwargs)
        # Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
        # Register lp but do not expose as a scheme to support bzr+lp.
        if getattr(urllib_parse, 'uses_fragment', None):
            urllib_parse.uses_fragment.extend(['lp'])
            urllib_parse.non_hierarchical.extend(['lp'])

    def export(self, location):
        """
        Export the Bazaar repository at the url to the destination location
        """
        temp_dir = tempfile.mkdtemp('-export', 'pip-')
        self.unpack(temp_dir)
        if os.path.exists(location):
            # Remove the location to make sure Bazaar can export it correctly
            rmtree(location)
        try:
            self.run_command(['export', location], cwd=temp_dir,
                             show_stdout=False)
        finally:
            rmtree(temp_dir)

    def switch(self, dest, url, rev_options):
        self.run_command(['switch', url], cwd=dest)

    def update(self, dest, rev_options):
        self.run_command(['pull', '-q'] + rev_options, cwd=dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = ['-r', rev]
            rev_display = ' (to revision %s)' % rev
        else:
            rev_options = []
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Checking out %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
            self.run_command(['branch', '-q'] + rev_options + [url, dest])

    def get_url_rev(self):
        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
        url, rev = super(Bazaar, self).get_url_rev()
        if url.startswith('ssh://'):
            url = 'bzr+' + url
        return url, rev

    def get_url(self, location):
        urls = self.run_command(['info'], show_stdout=False, cwd=location)
        for line in urls.splitlines():
            line = line.strip()
            for x in ('checkout of branch: ',
                      'parent branch: '):
                if line.startswith(x):
                    repo = line.split(x)[1]
                    if self._is_local_repository(repo):
                        return path_to_url(repo)
                    return repo
        return None

    def get_revision(self, location):
        revision = self.run_command(
            ['revno'], show_stdout=False, cwd=location)
        return revision.splitlines()[-1]

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if not repo:
            return None
        if not repo.lower().startswith('bzr:'):
            repo = 'bzr+' + repo
        egg_project_name = dist.egg_name().split('-', 1)[0]
        current_rev = self.get_revision(location)
        return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)

    def check_version(self, dest, rev_options):
        """Always assume the versions don't match"""
        return False


vcs.register(Bazaar)
PK
Z33E
3
3"site-packages/pip/vcs/__init__.pyonu[
abc@@sdZddlmZddlZddlZddlZddlZddlZddlm	Z
ddlmZddl
mZmZmZmZmZddgZejeZd	efd
YZeZdefdYZd
ZdS(s)Handles all VCS (version control) supporti(tabsolute_importN(tparse(t
BadCommand(tdisplay_patht
backup_dirtcall_subprocesstrmtreetask_path_existstvcstget_src_requirementt
VcsSupportcB@seZiZddddddgZdZdZedZed	Zed
Z	dZ
dddZd
Z
dZdZRS(tsshtgitthgtbzrtsftptsvncC@sRtjj|jttddr;tjj|jntt|j	dS(Nt
uses_fragment(
turllib_parsetuses_netloctextendtschemestgetattrtNoneRtsuperR
t__init__(tself((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyRscC@s
|jjS(N(t	_registryt__iter__(R((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR$scC@st|jjS(N(tlistRtvalues(R((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytbackends'scC@sg|jD]}|j^q
S(N(Rtdirname(Rtbackend((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytdirnames+scC@s.g}x!|jD]}|j|jqW|S(N(RRR(RRR!((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytall_schemes/scC@sbt|ds&tjd|jdS|j|jkr^||j|js

cC@sJxC|jjD]2}|j|rtjd||j|jSqWdS(s
        Return the name of the version control backend if found at given
        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
        sDetermine that %s uses VCS: %sN(RRtcontrols_locationR&R)R$R(Rtlocationtvc_type((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_backend_nameFs	
cC@s*|j}||jkr&|j|SdS(N(tlowerR(RR$((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_backendRscC@s&|j|}|r"|j|SdS(N(R0R2R(RR.R/((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_backend_from_locationWs
N(R(t
__module__RRRRtpropertyRR"R#R+RR,R0R2R3(((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR
s						tVersionControlcB@seZdZdZdZddZdZdZdZ	dZ
dZdZdZ
d	Zd
ZdZdZd
ZdZdZdZdZeddddddZedZRS(tcO@s&||_tt|j||dS(N(turlRR6R(RR8targstkwargs((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyRgs	cC@s1tjj|\}}|jtjjp0|S(sy
           posix absolute paths start with os.path.sep,
           win32 ones start with drive (like c:\folder)
        (tostpatht
splitdrivet
startswithtsep(Rtrepotdrivettail((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyt_is_local_repositorykscC@s|jddS(Nt/t_(treplace(Rtsurname((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyttranslate_egg_surnameuscC@s
tdS(s
        Export the repository at the url to the destination location
        i.e. only download the files, without vcs informations
        N(tNotImplementedError(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytexportysc	C@sd}|jjddd}tj|\}}}}}d}d|krj|jdd\}}ntj||||df}||fS(sm
        Returns the correct repository URL and revision by parsing the given
        repository URL
        svSorry, '%s' is a malformed VCS url. The format is +://, e.g. svn+http://myrepo/svn/MyApp#egg=MyAppt+it@R7N(R8tsplitRturlsplitRtrsplitt
urlunsplit(	Rt
error_messageR8tschemetnetlocR<tquerytfragtrev((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_url_revscC@s|j||j|fS(sA
        Returns (url, revision), where both are strings
        (tget_urltget_revision(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytget_infoscC@stj|jdS(si
        Normalize a URL for comparison by unquoting it and removing any
        trailing slash.
        RD(Rtunquotetrstrip(RR8((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyt
normalize_urlscC@s|j||j|kS(sV
        Compare two repo URLs for identity, ignoring incidental differences.
        (R](Rturl1turl2((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytcompare_urlsscC@s
tdS(sx
        Called when installing or updating an editable package, takes the
        source path of the checkout.
        N(RI(Rtdest((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytobtainscC@s
tdS(sB
        Switch the repo at ``dest`` to point to ``URL``.
        N(RI(RRaR8trev_options((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytswitchscC@s
tdS(sO
        Update an already-existing repo to the given ``rev_options``.
        N(RI(RRaRc((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytupdatescC@s
tdS(sp
        Return True if the version is identical to what exists and
        doesn't need to be updated.
        N(RI(RRaRc((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyt
check_versionsc
C@smt}t}tjj|rAt}tjjtjj||jr|j|}|j||rt	j
d|jjt
|||j||st	jdt
||j||j||qt	jdq>t	jd|j|jt
||ddf}qAt	jd
||j|jddf}n|rit	jd|j|td
|d|d}|dkrt	jd|jt
||||j|||qi|dkrqi|dkrt	jdt
|t|t}qi|d	krJt|}	t	jdt
||	tj||	t}qi|dkritjdqin|S(s
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        s)%s in %s exists, and has correct URL (%s)sUpdating %s %s%ss$Skipping because already up-to-date.s%s %s in %s exists with URL %ss%(s)witch, (i)gnore, (w)ipe, (b)ackup tstitwtbs0Directory %s already exists, and is not a %s %s.s(i)gnore, (w)ipe, (b)ackup s+The plan is to install the %s repository %ssWhat to do?  %siisSwitching %s %s to %s%ssDeleting %ssBacking up %s to %stai(RgRhRiRj(RhRiRj(tTruetFalseR;R<texiststjoinR RXR`R&R)t	repo_namettitleRRftinfoReR'R$RRdRRtshutiltmovetsystexit(
RRaR8Rctrev_displaytcheckouttprompttexisting_urltresponsetdest_dir((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytcheck_destinations$		
	

	
		cC@s0tjj|rt|n|j|dS(sq
        Clean up current location and download the url repository
        (and vcs infos) into location
        N(R;R<RnRRb(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytunpacks
cC@s
tdS(s
        Return a string representing the requirement needed to
        redownload the files currently present in location, something
        like:
          {repository_url}@{revision}#egg={project_name}-{version_identifier}
        N(RI(RtdistR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR	 scC@s
tdS(s_
        Return the url used at location
        Used in get_info or check_destination
        N(RI(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyRX)scC@s
tdS(s_
        Return the current revision of the files at location
        Used in get_info
        N(RI(RR.((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyRY0straisec	C@su|jg|}y t|||||||SWn>tk
rp}|jtjkrjtd|jqqnXdS(s
        Run a VCS subcommand
        This is simply a wrapper around call_subprocess that adds the VCS
        command name, and checks that the VCS is available
        sCannot find command %rN(R$RtOSErrorterrnotENOENTR(	Rtcmdtshow_stdouttcwdt
on_returncodetcommand_desct
extra_environtspinnerte((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pytrun_command7s	cC@sDtjd||j|jtjj||j}tjj|S(s
        Check if a location is controlled by the vcs.
        It is meant to be overridden to implement smarter detection
        mechanisms for specific vcs.
        sChecking in %s for %s (%s)...(R&R)R R$R;R<RoRn(R*R.R<((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR-Ns	(N(R(R4R$R RRRRCRHRJRWRZR]R`RbRdReRfR}R~R	RXRYRlRtclassmethodR-(((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR6as2	
											U						cC@sztj|}|r`y|j||SWq`tk
r\tjd||j|jSXntjd||jS(NsPcannot determine version of editable source in %s (%s command not found in path)stcannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)(RR3R	RR&R'R$tas_requirement(RR.tversion_control((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyR	[s

(t__doc__t
__future__RRtloggingR;RsRutpip._vendor.six.moves.urllibRRtpip.exceptionsRt	pip.utilsRRRRRt__all__t	getLoggerR(R&tobjectR
RR6R	(((s4/usr/lib/python2.7/site-packages/pip/vcs/__init__.pyts(G	PK
Z{^TZ!Z!$site-packages/pip/vcs/subversion.pyonu[
abc@@s)ddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZmZddl
mZddlmZmZejdZejd	Zejd
ZejdZejdZejd
ZejeZdefdYZdZejedS(i(tabsolute_importN(tparse(tLink(trmtreetdisplay_path(t
indent_log(tvcstVersionControls
url="([^"]+)"scommitted-rev="(\d+)"s	URL: (.+)sRevision: (.+)s\s*revision="(\d+)"s(.*)t
SubversioncB@seZdZdZdZdZdZdZd	Zd
Z	dZ
dZd
ZdZ
dZdZdZdZedZRS(tsvns.svntcheckoutssvn+sshssvn+https	svn+httpsssvn+svncC@s|jd|gdtdidd6}tj|}|sgtjdt|tjd|dS|j	dj
}tj|}|stjd	t|tjd||d
fS||j	dfS(s/Returns (url, revision), where both are stringstinfotshow_stdoutt
extra_environtCtLANGs'Cannot determine URL of svn checkout %ss!Output that cannot be parsed: 
%sis,Cannot determine revision of svn checkout %sN(NN(trun_commandtFalset_svn_url_retsearchtloggertwarningRtdebugtNonetgrouptstript_svn_revision_re(tselftlocationtoutputtmatchturl((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_infos(


cC@s|j\}}t||}|j|}tjd||tHtjj|rlt	|n|j
dg|||gdtWdQXdS(s@Export the svn repository at the url to the destination locations!Exporting svn repository %s to %stexportRN(tget_url_revtget_rev_optionstremove_auth_from_urlRRRtostpathtexistsRRR(RRRtrevtrev_options((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR!;s

cC@s"|jdg|||gdS(Ntswitch(R(RtdestRR)((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR*JscC@s|jdg||gdS(Ntupdate(R(RR+R)((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR,MscC@s|j\}}t||}|j|}|rCd|}nd}|j||||rtjd||t||jddg|||gndS(Ns (to revision %s)tsChecking out %s%s to %sR
s-q(R"R#R$tcheck_destinationRRRR(RR+RR(R)trev_display((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytobtainPs

cC@sx|D]{}t|j}|s(qnd|krYdj|jdd j}n|}||jkr|jdddSqWdS(Nt-it#ii(Rtegg_fragmenttjointsplittlowertkeyR(Rtdisttdependency_linksRR3R7((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_locationas
%c
C@sd}xtj|D]\}}}|j|krAg|(qn|j|jtjj||jd}tjj|sqn|j|\}}||kr|d}	n$|s|j|	rg|(qnt	||}qW|S(sR
        Return the maximum revision for all files under a given location
        itentriest/(
R%twalktdirnametremoveR&R4R't_get_svn_url_revt
startswithtmax(
RRtrevisiontbasetdirstfilest
entries_fntdirurltlocalrevtbase_url((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_revisionos"
cC@sAtt|j\}}|jdr7d|}n||fS(Nsssh://ssvn+(tsuperRR"RA(RRR(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR"s
cC@sw|}x]tjjtjj|dse|}tjj|}||kr	tjd|dSq	W|j|dS(Nssetup.pysGCould not find setup.py for directory %s (tried all parent directories)i(	R%R&R'R4R>RRRR@(RRt
orig_locationt
last_location((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_urls$c
C@sIddlm}tjj||jd}tjj|rat|}|j}WdQXnd}|j	ds|j	ds|j	drt
ttj
|jd}|dd=|dd	}g|D]2}t|d
kr|d
rt|d
^qdg}n
|j	drtj|}	|	sNtd|n|	jd
}gtj|D]}
t|
jd
^qmdg}nyk|jdd|gdt}tj|jd
}gtj|D]}
t|
jd
^q}Wn|k
r#dg}}nX|r9t|}nd}||fS(Ni(tInstallationErrorR;R-t8t9t10s

ii	sR'topentreadRAtlisttmaptstrt
splitlinesR5tlentintt_svn_xml_url_reRt
ValueErrorRt_svn_rev_retfinditerRRt_svn_info_xml_url_ret_svn_info_xml_rev_reRRB(
RRRPtentries_pathtftdataRtdtrevsRtmtxmlR(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR@s>!I;	5
cC@s[|j|}|dkrdS|jjddd}|j|}d|||fS(NR1iissvn+%s@%s#egg=%s(RORtegg_nameR5RK(RR8Rtrepotegg_project_nameR(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_src_requirementscC@stS(s&Always assume the versions don't match(R(RR+R)((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyt
check_versionscC@sYtj|}|jjdd}|j||j|j|jf}tj|}|S(Nt@i(	turllib_parseturlsplittnetlocR5tschemeR&tquerytfragmentt
urlunsplit(Rtpurltstripped_netloct
url_piecestsurl((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR$s
!(R	ssvn+sshssvn+https	svn+httpsssvn+svn(t__name__t
__module__tnameR>t	repo_nametschemesR R!R*R,R0R:RKR"ROR@RmRntstaticmethodR$(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyRs"										,			cC@s|rd|g}ng}tj|}t|drO|j|j}}nl|d}d|kr|jdd}d|kr|jdd\}}q|d}}nd	\}}|r|d|g7}n|r|d|g7}n|S(
Ns-rtusernameiRoit:s
--usernames
--password(NN(RpRqthasattrRtpasswordR5R(RR(R)trRRRrtauth((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR#s$
(t
__future__RtloggingR%tretpip._vendor.six.moves.urllibRRpt	pip.indexRt	pip.utilsRRtpip.utils.loggingRtpip.vcsRRtcompileR]R_RRRbRat	getLoggerR{RRR#tregister(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyts$	PK
ZH`!!$site-packages/pip/vcs/subversion.pycnu[
abc@@s)ddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZmZddl
mZddlmZmZejdZejd	Zejd
ZejdZejdZejd
ZejeZdefdYZdZejedS(i(tabsolute_importN(tparse(tLink(trmtreetdisplay_path(t
indent_log(tvcstVersionControls
url="([^"]+)"scommitted-rev="(\d+)"s	URL: (.+)sRevision: (.+)s\s*revision="(\d+)"s(.*)t
SubversioncB@seZdZdZdZdZdZdZd	Zd
Z	dZ
dZd
ZdZ
dZdZdZdZedZRS(tsvns.svntcheckoutssvn+sshssvn+https	svn+httpsssvn+svncC@s|jdj|js,td||jd|gdtdidd6}tj|}|stj	dt
|tjd	|d
S|j
d
j}tj|}|stj	dt
|tjd	||dfS||j
d
fS(s/Returns (url, revision), where both are stringst/sBad directory: %stinfotshow_stdoutt
extra_environtCtLANGs'Cannot determine URL of svn checkout %ss!Output that cannot be parsed: 
%sis,Cannot determine revision of svn checkout %sN(NN(trstriptendswithtdirnametAssertionErrortrun_commandtFalset_svn_url_retsearchtloggertwarningRtdebugtNonetgrouptstript_svn_revision_re(tselftlocationtoutputtmatchturl((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_infos,



cC@s|j\}}t||}|j|}tjd||tHtjj|rlt	|n|j
dg|||gdtWdQXdS(s@Export the svn repository at the url to the destination locations!Exporting svn repository %s to %stexportR
N(tget_url_revtget_rev_optionstremove_auth_from_urlRRRtostpathtexistsRRR(R R!R$trevtrev_options((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR&;s

cC@s"|jdg|||gdS(Ntswitch(R(R tdestR$R.((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR/JscC@s|jdg||gdS(Ntupdate(R(R R0R.((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR1MscC@s|j\}}t||}|j|}|rCd|}nd}|j||||rtjd||t||jddg|||gndS(Ns (to revision %s)tsChecking out %s%s to %sR
s-q(R'R(R)tcheck_destinationRRRR(R R0R$R-R.trev_display((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytobtainPs

cC@sx|D]{}t|j}|s(qnd|krYdj|jdd j}n|}||jkr|jdddSqWdS(Nt-it#ii(Rtegg_fragmenttjointsplittlowertkeyR(R tdisttdependency_linksR$R8R<((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_locationas
%c
C@sd}xtj|D]\}}}|j|krAg|(qn|j|jtjj||jd}tjj|sqn|j|\}}||kr|d}	n$|s|j|	rg|(qnt	||}qW|S(sR
        Return the maximum revision for all files under a given location
        itentriesR(
R*twalkRtremoveR+R9R,t_get_svn_url_revt
startswithtmax(
R R!trevisiontbasetdirstfilest
entries_fntdirurltlocalrevtbase_url((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_revisionos"
cC@sAtt|j\}}|jdr7d|}n||fS(Nsssh://ssvn+(tsuperRR'RD(R R$R-((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR's
cC@sw|}x]tjjtjj|dse|}tjj|}||kr	tjd|dSq	W|j|dS(Nssetup.pysGCould not find setup.py for directory %s (tried all parent directories)i(	R*R+R,R9RRRRRC(R R!t
orig_locationt
last_location((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_urls$c
C@sIddlm}tjj||jd}tjj|rat|}|j}WdQXnd}|j	ds|j	ds|j	drt
ttj
|jd}|dd=|dd	}g|D]2}t|d
kr|d
rt|d
^qdg}n
|j	drtj|}	|	sNtd|n|	jd
}gtj|D]}
t|
jd
^qmdg}nyk|jdd|gdt}tj|jd
}gtj|D]}
t|
jd
^q}Wn|k
r#dg}}nX|r9t|}nd}||fS(Ni(tInstallationErrorR@R2t8t9t10s

ii	s!I;	5
cC@s[|j|}|dkrdS|jjddd}|j|}d|||fS(NR6iissvn+%s@%s#egg=%s(RRRtegg_nameR:RN(R R=R!trepotegg_project_nameR-((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pytget_src_requirementscC@stS(s&Always assume the versions don't match(R(R R0R.((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyt
check_versionscC@sYtj|}|jjdd}|j||j|j|jf}tj|}|S(Nt@i(	turllib_parseturlsplittnetlocR:tschemeR+tquerytfragmentt
urlunsplit(R$tpurltstripped_netloct
url_piecestsurl((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR)s
!(R	ssvn+sshssvn+https	svn+httpsssvn+svn(t__name__t
__module__tnameRt	repo_nametschemesR%R&R/R1R5R?RNR'RRRCRpRqtstaticmethodR)(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyRs"										,			cC@s|rd|g}ng}tj|}t|drO|j|j}}nl|d}d|kr|jdd}d|kr|jdd\}}q|d}}nd	\}}|r|d|g7}n|r|d|g7}n|S(
Ns-rtusernameiRrit:s
--usernames
--password(NN(RsRtthasattrRtpasswordR:R(R$R-R.trRRRutauth((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyR(s$
(t
__future__RtloggingR*tretpip._vendor.six.moves.urllibRRst	pip.indexRt	pip.utilsRRtpip.utils.loggingRtpip.vcsRRtcompileR`RbRRReRdt	getLoggerR~RRR(tregister(((s6/usr/lib/python2.7/site-packages/pip/vcs/subversion.pyts$	PK
Zybj

"site-packages/pip/vcs/mercurial.pynu[from __future__ import absolute_import

import logging
import os
import tempfile

from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
from pip._vendor.six.moves import configparser


logger = logging.getLogger(__name__)


class Mercurial(VersionControl):
    name = 'hg'
    dirname = '.hg'
    repo_name = 'clone'
    schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')

    def export(self, location):
        """Export the Hg repository at the url to the destination location"""
        temp_dir = tempfile.mkdtemp('-export', 'pip-')
        self.unpack(temp_dir)
        try:
            self.run_command(
                ['archive', location], show_stdout=False, cwd=temp_dir)
        finally:
            rmtree(temp_dir)

    def switch(self, dest, url, rev_options):
        repo_config = os.path.join(dest, self.dirname, 'hgrc')
        config = configparser.SafeConfigParser()
        try:
            config.read(repo_config)
            config.set('paths', 'default', url)
            with open(repo_config, 'w') as config_file:
                config.write(config_file)
        except (OSError, configparser.NoSectionError) as exc:
            logger.warning(
                'Could not switch Mercurial repository to %s: %s', url, exc,
            )
        else:
            self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def update(self, dest, rev_options):
        self.run_command(['pull', '-q'], cwd=dest)
        self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to revision %s)' % rev
        else:
            rev_options = []
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning hg %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
            self.run_command(['clone', '--noupdate', '-q', url, dest])
            self.run_command(['update', '-q'] + rev_options, cwd=dest)

    def get_url(self, location):
        url = self.run_command(
            ['showconfig', 'paths.default'],
            show_stdout=False, cwd=location).strip()
        if self._is_local_repository(url):
            url = path_to_url(url)
        return url.strip()

    def get_revision(self, location):
        current_revision = self.run_command(
            ['parents', '--template={rev}'],
            show_stdout=False, cwd=location).strip()
        return current_revision

    def get_revision_hash(self, location):
        current_rev_hash = self.run_command(
            ['parents', '--template={node}'],
            show_stdout=False, cwd=location).strip()
        return current_rev_hash

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if not repo.lower().startswith('hg:'):
            repo = 'hg+' + repo
        egg_project_name = dist.egg_name().split('-', 1)[0]
        if not repo:
            return None
        current_rev_hash = self.get_revision_hash(location)
        return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name)

    def check_version(self, dest, rev_options):
        """Always assume the versions don't match"""
        return False

vcs.register(Mercurial)
PK
ZVKV0V0!site-packages/pip/vcs/__init__.pynu["""Handles all VCS (version control) support"""
from __future__ import absolute_import

import errno
import logging
import os
import shutil
import sys

from pip._vendor.six.moves.urllib import parse as urllib_parse

from pip.exceptions import BadCommand
from pip.utils import (display_path, backup_dir, call_subprocess,
                       rmtree, ask_path_exists)


__all__ = ['vcs', 'get_src_requirement']


logger = logging.getLogger(__name__)


class VcsSupport(object):
    _registry = {}
    schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']

    def __init__(self):
        # Register more schemes with urlparse for various version control
        # systems
        urllib_parse.uses_netloc.extend(self.schemes)
        # Python >= 2.7.4, 3.3 doesn't have uses_fragment
        if getattr(urllib_parse, 'uses_fragment', None):
            urllib_parse.uses_fragment.extend(self.schemes)
        super(VcsSupport, self).__init__()

    def __iter__(self):
        return self._registry.__iter__()

    @property
    def backends(self):
        return list(self._registry.values())

    @property
    def dirnames(self):
        return [backend.dirname for backend in self.backends]

    @property
    def all_schemes(self):
        schemes = []
        for backend in self.backends:
            schemes.extend(backend.schemes)
        return schemes

    def register(self, cls):
        if not hasattr(cls, 'name'):
            logger.warning('Cannot register VCS %s', cls.__name__)
            return
        if cls.name not in self._registry:
            self._registry[cls.name] = cls
            logger.debug('Registered VCS backend: %s', cls.name)

    def unregister(self, cls=None, name=None):
        if name in self._registry:
            del self._registry[name]
        elif cls in self._registry.values():
            del self._registry[cls.name]
        else:
            logger.warning('Cannot unregister because no class or name given')

    def get_backend_name(self, location):
        """
        Return the name of the version control backend if found at given
        location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
        """
        for vc_type in self._registry.values():
            if vc_type.controls_location(location):
                logger.debug('Determine that %s uses VCS: %s',
                             location, vc_type.name)
                return vc_type.name
        return None

    def get_backend(self, name):
        name = name.lower()
        if name in self._registry:
            return self._registry[name]

    def get_backend_from_location(self, location):
        vc_type = self.get_backend_name(location)
        if vc_type:
            return self.get_backend(vc_type)
        return None


vcs = VcsSupport()


class VersionControl(object):
    name = ''
    dirname = ''
    # List of supported schemes for this Version Control
    schemes = ()

    def __init__(self, url=None, *args, **kwargs):
        self.url = url
        super(VersionControl, self).__init__(*args, **kwargs)

    def _is_local_repository(self, repo):
        """
           posix absolute paths start with os.path.sep,
           win32 ones start with drive (like c:\\folder)
        """
        drive, tail = os.path.splitdrive(repo)
        return repo.startswith(os.path.sep) or drive

    # See issue #1083 for why this method was introduced:
    # https://github.com/pypa/pip/issues/1083
    def translate_egg_surname(self, surname):
        # For example, Django has branches of the form "stable/1.7.x".
        return surname.replace('/', '_')

    def export(self, location):
        """
        Export the repository at the url to the destination location
        i.e. only download the files, without vcs informations
        """
        raise NotImplementedError

    def get_url_rev(self):
        """
        Returns the correct repository URL and revision by parsing the given
        repository URL
        """
        error_message = (
            "Sorry, '%s' is a malformed VCS url. "
            "The format is +://, "
            "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
        )
        assert '+' in self.url, error_message % self.url
        url = self.url.split('+', 1)[1]
        scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
        rev = None
        if '@' in path:
            path, rev = path.rsplit('@', 1)
        url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
        return url, rev

    def get_info(self, location):
        """
        Returns (url, revision), where both are strings
        """
        assert not location.rstrip('/').endswith(self.dirname), \
            'Bad directory: %s' % location
        return self.get_url(location), self.get_revision(location)

    def normalize_url(self, url):
        """
        Normalize a URL for comparison by unquoting it and removing any
        trailing slash.
        """
        return urllib_parse.unquote(url).rstrip('/')

    def compare_urls(self, url1, url2):
        """
        Compare two repo URLs for identity, ignoring incidental differences.
        """
        return (self.normalize_url(url1) == self.normalize_url(url2))

    def obtain(self, dest):
        """
        Called when installing or updating an editable package, takes the
        source path of the checkout.
        """
        raise NotImplementedError

    def switch(self, dest, url, rev_options):
        """
        Switch the repo at ``dest`` to point to ``URL``.
        """
        raise NotImplementedError

    def update(self, dest, rev_options):
        """
        Update an already-existing repo to the given ``rev_options``.
        """
        raise NotImplementedError

    def check_version(self, dest, rev_options):
        """
        Return True if the version is identical to what exists and
        doesn't need to be updated.
        """
        raise NotImplementedError

    def check_destination(self, dest, url, rev_options, rev_display):
        """
        Prepare a location to receive a checkout/clone.

        Return True if the location is ready for (and requires) a
        checkout/clone, False otherwise.
        """
        checkout = True
        prompt = False
        if os.path.exists(dest):
            checkout = False
            if os.path.exists(os.path.join(dest, self.dirname)):
                existing_url = self.get_url(dest)
                if self.compare_urls(existing_url, url):
                    logger.debug(
                        '%s in %s exists, and has correct URL (%s)',
                        self.repo_name.title(),
                        display_path(dest),
                        url,
                    )
                    if not self.check_version(dest, rev_options):
                        logger.info(
                            'Updating %s %s%s',
                            display_path(dest),
                            self.repo_name,
                            rev_display,
                        )
                        self.update(dest, rev_options)
                    else:
                        logger.info(
                            'Skipping because already up-to-date.')
                else:
                    logger.warning(
                        '%s %s in %s exists with URL %s',
                        self.name,
                        self.repo_name,
                        display_path(dest),
                        existing_url,
                    )
                    prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
                              ('s', 'i', 'w', 'b'))
            else:
                logger.warning(
                    'Directory %s already exists, and is not a %s %s.',
                    dest,
                    self.name,
                    self.repo_name,
                )
                prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
        if prompt:
            logger.warning(
                'The plan is to install the %s repository %s',
                self.name,
                url,
            )
            response = ask_path_exists('What to do?  %s' % prompt[0],
                                       prompt[1])

            if response == 's':
                logger.info(
                    'Switching %s %s to %s%s',
                    self.repo_name,
                    display_path(dest),
                    url,
                    rev_display,
                )
                self.switch(dest, url, rev_options)
            elif response == 'i':
                # do nothing
                pass
            elif response == 'w':
                logger.warning('Deleting %s', display_path(dest))
                rmtree(dest)
                checkout = True
            elif response == 'b':
                dest_dir = backup_dir(dest)
                logger.warning(
                    'Backing up %s to %s', display_path(dest), dest_dir,
                )
                shutil.move(dest, dest_dir)
                checkout = True
            elif response == 'a':
                sys.exit(-1)
        return checkout

    def unpack(self, location):
        """
        Clean up current location and download the url repository
        (and vcs infos) into location
        """
        if os.path.exists(location):
            rmtree(location)
        self.obtain(location)

    def get_src_requirement(self, dist, location):
        """
        Return a string representing the requirement needed to
        redownload the files currently present in location, something
        like:
          {repository_url}@{revision}#egg={project_name}-{version_identifier}
        """
        raise NotImplementedError

    def get_url(self, location):
        """
        Return the url used at location
        Used in get_info or check_destination
        """
        raise NotImplementedError

    def get_revision(self, location):
        """
        Return the current revision of the files at location
        Used in get_info
        """
        raise NotImplementedError

    def run_command(self, cmd, show_stdout=True, cwd=None,
                    on_returncode='raise',
                    command_desc=None,
                    extra_environ=None, spinner=None):
        """
        Run a VCS subcommand
        This is simply a wrapper around call_subprocess that adds the VCS
        command name, and checks that the VCS is available
        """
        cmd = [self.name] + cmd
        try:
            return call_subprocess(cmd, show_stdout, cwd,
                                   on_returncode,
                                   command_desc, extra_environ,
                                   spinner)
        except OSError as e:
            # errno.ENOENT = no such file or directory
            # In other words, the VCS executable isn't available
            if e.errno == errno.ENOENT:
                raise BadCommand('Cannot find command %r' % self.name)
            else:
                raise  # re-raise exception if a different error occurred

    @classmethod
    def controls_location(cls, location):
        """
        Check if a location is controlled by the vcs.
        It is meant to be overridden to implement smarter detection
        mechanisms for specific vcs.
        """
        logger.debug('Checking in %s for %s (%s)...',
                     location, cls.dirname, cls.name)
        path = os.path.join(location, cls.dirname)
        return os.path.exists(path)


def get_src_requirement(dist, location):
    version_control = vcs.get_backend_from_location(location)
    if version_control:
        try:
            return version_control().get_src_requirement(dist,
                                                         location)
        except BadCommand:
            logger.warning(
                'cannot determine version of editable source in %s '
                '(%s command not found in path)',
                location,
                version_control.name,
            )
            return dist.as_requirement()
    logger.warning(
        'cannot determine version of editable source in %s (is not SVN '
        'checkout, Git clone, Mercurial clone or Bazaar branch)',
        location,
    )
    return dist.as_requirement()
PK
Z(0++site-packages/pip/vcs/git.pynu[from __future__ import absolute_import

import logging
import tempfile
import os.path

from pip.compat import samefile
from pip.exceptions import BadCommand
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.packaging.version import parse as parse_version

from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl


urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit


logger = logging.getLogger(__name__)


class Git(VersionControl):
    name = 'git'
    dirname = '.git'
    repo_name = 'clone'
    schemes = (
        'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
    )

    def __init__(self, url=None, *args, **kwargs):

        # Works around an apparent Git bug
        # (see http://article.gmane.org/gmane.comp.version-control.git/146500)
        if url:
            scheme, netloc, path, query, fragment = urlsplit(url)
            if scheme.endswith('file'):
                initial_slashes = path[:-len(path.lstrip('/'))]
                newpath = (
                    initial_slashes +
                    urllib_request.url2pathname(path)
                    .replace('\\', '/').lstrip('/')
                )
                url = urlunsplit((scheme, netloc, newpath, query, fragment))
                after_plus = scheme.find('+') + 1
                url = scheme[:after_plus] + urlunsplit(
                    (scheme[after_plus:], netloc, newpath, query, fragment),
                )

        super(Git, self).__init__(url, *args, **kwargs)

    def get_git_version(self):
        VERSION_PFX = 'git version '
        version = self.run_command(['version'], show_stdout=False)
        if version.startswith(VERSION_PFX):
            version = version[len(VERSION_PFX):]
        else:
            version = ''
        # get first 3 positions of the git version becasue
        # on windows it is x.y.z.windows.t, and this parses as
        # LegacyVersion which always smaller than a Version.
        version = '.'.join(version.split('.')[:3])
        return parse_version(version)

    def export(self, location):
        """Export the Git repository at the url to the destination location"""
        temp_dir = tempfile.mkdtemp('-export', 'pip-')
        self.unpack(temp_dir)
        try:
            if not location.endswith('/'):
                location = location + '/'
            self.run_command(
                ['checkout-index', '-a', '-f', '--prefix', location],
                show_stdout=False, cwd=temp_dir)
        finally:
            rmtree(temp_dir)

    def check_rev_options(self, rev, dest, rev_options):
        """Check the revision options before checkout to compensate that tags
        and branches may need origin/ as a prefix.
        Returns the SHA1 of the branch or tag if found.
        """
        revisions = self.get_short_refs(dest)

        origin_rev = 'origin/%s' % rev
        if origin_rev in revisions:
            # remote branch
            return [revisions[origin_rev]]
        elif rev in revisions:
            # a local tag or branch name
            return [revisions[rev]]
        else:
            logger.warning(
                "Could not find a tag or branch '%s', assuming commit.", rev,
            )
            return rev_options

    def check_version(self, dest, rev_options):
        """
        Compare the current sha to the ref. ref may be a branch or tag name,
        but current rev will always point to a sha. This means that a branch
        or tag will never compare as True. So this ultimately only matches
        against exact shas.
        """
        return self.get_revision(dest).startswith(rev_options[0])

    def switch(self, dest, url, rev_options):
        self.run_command(['config', 'remote.origin.url', url], cwd=dest)
        self.run_command(['checkout', '-q'] + rev_options, cwd=dest)

        self.update_submodules(dest)

    def update(self, dest, rev_options):
        # First fetch changes from the default remote
        if self.get_git_version() >= parse_version('1.9.0'):
            # fetch tags in addition to everything else
            self.run_command(['fetch', '-q', '--tags'], cwd=dest)
        else:
            self.run_command(['fetch', '-q'], cwd=dest)
        # Then reset to wanted revision (maybe even origin/master)
        if rev_options:
            rev_options = self.check_rev_options(
                rev_options[0], dest, rev_options,
            )
        self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest)
        #: update submodules
        self.update_submodules(dest)

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        if rev:
            rev_options = [rev]
            rev_display = ' (to %s)' % rev
        else:
            rev_options = ['origin/master']
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Cloning %s%s to %s', url, rev_display, display_path(dest),
            )
            self.run_command(['clone', '-q', url, dest])

            if rev:
                rev_options = self.check_rev_options(rev, dest, rev_options)
                # Only do a checkout if rev_options differs from HEAD
                if not self.check_version(dest, rev_options):
                    self.run_command(
                        ['checkout', '-q'] + rev_options,
                        cwd=dest,
                    )
            #: repo may contain submodules
            self.update_submodules(dest)

    def get_url(self, location):
        """Return URL of the first remote encountered."""
        remotes = self.run_command(
            ['config', '--get-regexp', 'remote\..*\.url'],
            show_stdout=False, cwd=location)
        remotes = remotes.splitlines()
        found_remote = remotes[0]
        for remote in remotes:
            if remote.startswith('remote.origin.url '):
                found_remote = remote
                break
        url = found_remote.split(' ')[1]
        return url.strip()

    def get_revision(self, location):
        current_rev = self.run_command(
            ['rev-parse', 'HEAD'], show_stdout=False, cwd=location)
        return current_rev.strip()

    def get_full_refs(self, location):
        """Yields tuples of (commit, ref) for branches and tags"""
        output = self.run_command(['show-ref'],
                                  show_stdout=False, cwd=location)
        for line in output.strip().splitlines():
            commit, ref = line.split(' ', 1)
            yield commit.strip(), ref.strip()

    def is_ref_remote(self, ref):
        return ref.startswith('refs/remotes/')

    def is_ref_branch(self, ref):
        return ref.startswith('refs/heads/')

    def is_ref_tag(self, ref):
        return ref.startswith('refs/tags/')

    def is_ref_commit(self, ref):
        """A ref is a commit sha if it is not anything else"""
        return not any((
            self.is_ref_remote(ref),
            self.is_ref_branch(ref),
            self.is_ref_tag(ref),
        ))

    # Should deprecate `get_refs` since it's ambiguous
    def get_refs(self, location):
        return self.get_short_refs(location)

    def get_short_refs(self, location):
        """Return map of named refs (branches or tags) to commit hashes."""
        rv = {}
        for commit, ref in self.get_full_refs(location):
            ref_name = None
            if self.is_ref_remote(ref):
                ref_name = ref[len('refs/remotes/'):]
            elif self.is_ref_branch(ref):
                ref_name = ref[len('refs/heads/'):]
            elif self.is_ref_tag(ref):
                ref_name = ref[len('refs/tags/'):]
            if ref_name is not None:
                rv[ref_name] = commit
        return rv

    def _get_subdirectory(self, location):
        """Return the relative path of setup.py to the git repo root."""
        # find the repo root
        git_dir = self.run_command(['rev-parse', '--git-dir'],
                                   show_stdout=False, cwd=location).strip()
        if not os.path.isabs(git_dir):
            git_dir = os.path.join(location, git_dir)
        root_dir = os.path.join(git_dir, '..')
        # find setup.py
        orig_location = location
        while not os.path.exists(os.path.join(location, 'setup.py')):
            last_location = location
            location = os.path.dirname(location)
            if location == last_location:
                # We've traversed up to the root of the filesystem without
                # finding setup.py
                logger.warning(
                    "Could not find setup.py for directory %s (tried all "
                    "parent directories)",
                    orig_location,
                )
                return None
        # relative path of setup.py to repo root
        if samefile(root_dir, location):
            return None
        return os.path.relpath(location, root_dir)

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if not repo.lower().startswith('git:'):
            repo = 'git+' + repo
        egg_project_name = dist.egg_name().split('-', 1)[0]
        if not repo:
            return None
        current_rev = self.get_revision(location)
        req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
        subdirectory = self._get_subdirectory(location)
        if subdirectory:
            req += '&subdirectory=' + subdirectory
        return req

    def get_url_rev(self):
        """
        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
        That's required because although they use SSH they sometimes doesn't
        work with a ssh:// scheme (e.g. Github). But we need a scheme for
        parsing. Hence we remove it again afterwards and return it as a stub.
        """
        if '://' not in self.url:
            assert 'file:' not in self.url
            self.url = self.url.replace('git+', 'git+ssh://')
            url, rev = super(Git, self).get_url_rev()
            url = url.replace('ssh://', '')
        else:
            url, rev = super(Git, self).get_url_rev()

        return url, rev

    def update_submodules(self, location):
        if not os.path.exists(os.path.join(location, '.gitmodules')):
            return
        self.run_command(
            ['submodule', 'update', '--init', '--recursive', '-q'],
            cwd=location,
        )

    @classmethod
    def controls_location(cls, location):
        if super(Git, cls).controls_location(location):
            return True
        try:
            r = cls().run_command(['rev-parse'],
                                  cwd=location,
                                  show_stdout=False,
                                  on_returncode='ignore')
            return not r
        except BadCommand:
            logger.debug("could not determine if %s is under git control "
                         "because git is not available", location)
            return False


vcs.register(Git)
PK
Zn4A$$#site-packages/pip/vcs/subversion.pynu[from __future__ import absolute_import

import logging
import os
import re

from pip._vendor.six.moves.urllib import parse as urllib_parse

from pip.index import Link
from pip.utils import rmtree, display_path
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl

_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'(.*)')


logger = logging.getLogger(__name__)


class Subversion(VersionControl):
    name = 'svn'
    dirname = '.svn'
    repo_name = 'checkout'
    schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')

    def get_info(self, location):
        """Returns (url, revision), where both are strings"""
        assert not location.rstrip('/').endswith(self.dirname), \
            'Bad directory: %s' % location
        output = self.run_command(
            ['info', location],
            show_stdout=False,
            extra_environ={'LANG': 'C'},
        )
        match = _svn_url_re.search(output)
        if not match:
            logger.warning(
                'Cannot determine URL of svn checkout %s',
                display_path(location),
            )
            logger.debug('Output that cannot be parsed: \n%s', output)
            return None, None
        url = match.group(1).strip()
        match = _svn_revision_re.search(output)
        if not match:
            logger.warning(
                'Cannot determine revision of svn checkout %s',
                display_path(location),
            )
            logger.debug('Output that cannot be parsed: \n%s', output)
            return url, None
        return url, match.group(1)

    def export(self, location):
        """Export the svn repository at the url to the destination location"""
        url, rev = self.get_url_rev()
        rev_options = get_rev_options(url, rev)
        url = self.remove_auth_from_url(url)
        logger.info('Exporting svn repository %s to %s', url, location)
        with indent_log():
            if os.path.exists(location):
                # Subversion doesn't like to check out over an existing
                # directory --force fixes this, but was only added in svn 1.5
                rmtree(location)
            self.run_command(
                ['export'] + rev_options + [url, location],
                show_stdout=False)

    def switch(self, dest, url, rev_options):
        self.run_command(['switch'] + rev_options + [url, dest])

    def update(self, dest, rev_options):
        self.run_command(['update'] + rev_options + [dest])

    def obtain(self, dest):
        url, rev = self.get_url_rev()
        rev_options = get_rev_options(url, rev)
        url = self.remove_auth_from_url(url)
        if rev:
            rev_display = ' (to revision %s)' % rev
        else:
            rev_display = ''
        if self.check_destination(dest, url, rev_options, rev_display):
            logger.info(
                'Checking out %s%s to %s',
                url,
                rev_display,
                display_path(dest),
            )
            self.run_command(['checkout', '-q'] + rev_options + [url, dest])

    def get_location(self, dist, dependency_links):
        for url in dependency_links:
            egg_fragment = Link(url).egg_fragment
            if not egg_fragment:
                continue
            if '-' in egg_fragment:
                # FIXME: will this work when a package has - in the name?
                key = '-'.join(egg_fragment.split('-')[:-1]).lower()
            else:
                key = egg_fragment
            if key == dist.key:
                return url.split('#', 1)[0]
        return None

    def get_revision(self, location):
        """
        Return the maximum revision for all files under a given location
        """
        # Note: taken from setuptools.command.egg_info
        revision = 0

        for base, dirs, files in os.walk(location):
            if self.dirname not in dirs:
                dirs[:] = []
                continue    # no sense walking uncontrolled subdirs
            dirs.remove(self.dirname)
            entries_fn = os.path.join(base, self.dirname, 'entries')
            if not os.path.exists(entries_fn):
                # FIXME: should we warn?
                continue

            dirurl, localrev = self._get_svn_url_rev(base)

            if base == location:
                base_url = dirurl + '/'   # save the root url
            elif not dirurl or not dirurl.startswith(base_url):
                dirs[:] = []
                continue    # not part of the same svn tree, skip it
            revision = max(revision, localrev)
        return revision

    def get_url_rev(self):
        # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
        url, rev = super(Subversion, self).get_url_rev()
        if url.startswith('ssh://'):
            url = 'svn+' + url
        return url, rev

    def get_url(self, location):
        # In cases where the source is in a subdirectory, not alongside
        # setup.py we have to look up in the location until we find a real
        # setup.py
        orig_location = location
        while not os.path.exists(os.path.join(location, 'setup.py')):
            last_location = location
            location = os.path.dirname(location)
            if location == last_location:
                # We've traversed up to the root of the filesystem without
                # finding setup.py
                logger.warning(
                    "Could not find setup.py for directory %s (tried all "
                    "parent directories)",
                    orig_location,
                )
                return None

        return self._get_svn_url_rev(location)[0]

    def _get_svn_url_rev(self, location):
        from pip.exceptions import InstallationError

        entries_path = os.path.join(location, self.dirname, 'entries')
        if os.path.exists(entries_path):
            with open(entries_path) as f:
                data = f.read()
        else:  # subversion >= 1.7 does not have the 'entries' file
            data = ''

        if (data.startswith('8') or
                data.startswith('9') or
                data.startswith('10')):
            data = list(map(str.splitlines, data.split('\n\x0c\n')))
            del data[0][0]  # get rid of the '8'
            url = data[0][3]
            revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
        elif data.startswith('= 1.7
                xml = self.run_command(
                    ['info', '--xml', location],
                    show_stdout=False,
                )
                url = _svn_info_xml_url_re.search(xml).group(1)
                revs = [
                    int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
                ]
            except InstallationError:
                url, revs = None, []

        if revs:
            rev = max(revs)
        else:
            rev = 0

        return url, rev

    def get_src_requirement(self, dist, location):
        repo = self.get_url(location)
        if repo is None:
            return None
        # FIXME: why not project name?
        egg_project_name = dist.egg_name().split('-', 1)[0]
        rev = self.get_revision(location)
        return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name)

    def check_version(self, dest, rev_options):
        """Always assume the versions don't match"""
        return False

    @staticmethod
    def remove_auth_from_url(url):
        # Return a copy of url with 'username:password@' removed.
        # username/pass params are passed to subversion through flags
        # and are not recognized in the url.

        # parsed url
        purl = urllib_parse.urlsplit(url)
        stripped_netloc = \
            purl.netloc.split('@')[-1]

        # stripped url
        url_pieces = (
            purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment
        )
        surl = urllib_parse.urlunsplit(url_pieces)
        return surl


def get_rev_options(url, rev):
    if rev:
        rev_options = ['-r', rev]
    else:
        rev_options = []

    r = urllib_parse.urlsplit(url)
    if hasattr(r, 'username'):
        # >= Python-2.5
        username, password = r.username, r.password
    else:
        netloc = r[1]
        if '@' in netloc:
            auth = netloc.split('@')[0]
            if ':' in auth:
                username, password = auth.split(':', 1)
            else:
                username, password = auth, None
        else:
            username, password = None, None

    if username:
        rev_options += ['--username', username]
    if password:
        rev_options += ['--password', password]
    return rev_options


vcs.register(Subversion)
PK
ZkM**site-packages/pip/vcs/git.pyonu[
abc@@sddlmZddlZddlZddlZddlmZddlm	Z	ddl
mZddl
m
ZddlmZddlmZmZddlmZmZejZejZejeZd	efd
YZejedS(i(tabsolute_importN(tsamefile(t
BadCommand(tparse(trequest(tdisplay_pathtrmtree(tvcstVersionControltGitcB@seZdZdZdZdZddZd	Zd
Z	dZ
dZd
ZdZ
dZdZdZdZdZdZdZdZdZdZdZdZdZdZedZRS( tgits.gittclonesgit+https	git+httpssgit+sshsgit+gitsgit+filecO@s|rt|\}}}}}|jdr|t|jd }	|	tj|jddjd}
t|||
||f}|jdd}|| t||||
||f}qnt	t
|j|||dS(Ntfilet/s\t+i(turlsplittendswithtlentlstripturllib_requestturl2pathnametreplacet
urlunsplittfindtsuperR	t__init__(tselfturltargstkwargstschemetnetloctpathtquerytfragmenttinitial_slashestnewpatht
after_plus((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyR s
#cC@sld}|jdgdt}|j|r@|t|}nd}dj|jdd }t|S(Nsgit version tversiontshow_stdouttt.i(trun_commandtFalset
startswithRtjointsplitt
parse_version(RtVERSION_PFXR&((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytget_git_version5scC@sytjdd}|j|zH|jds>|d}n|jdddd|gdtd	|Wd
t|Xd
S(s@Export the Git repository at the url to the destination locations-exportspip-R
scheckout-indexs-as-fs--prefixR'tcwdN(ttempfiletmkdtemptunpackRR*R+R(Rtlocationttemp_dir((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytexportBs

cC@s_|j|}d|}||kr0||gS||krG||gStjd||SdS(sCheck the revision options before checkout to compensate that tags
        and branches may need origin/ as a prefix.
        Returns the SHA1 of the branch or tag if found.
        s	origin/%ss5Could not find a tag or branch '%s', assuming commit.N(tget_short_refstloggertwarning(Rtrevtdesttrev_optionst	revisionst
origin_rev((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytcheck_rev_optionsOs

cC@s|j|j|dS(s

        Compare the current sha to the ref. ref may be a branch or tag name,
        but current rev will always point to a sha. This means that a branch
        or tag will never compare as True. So this ultimately only matches
        against exact shas.
        i(tget_revisionR,(RR=R>((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
check_versioncscC@sJ|jdd|gd||jddg|d||j|dS(Ntconfigsremote.origin.urlR2tcheckouts-q(R*tupdate_submodules(RR=RR>((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytswitchlscC@s|jtdkr7|jdddgd|n|jddgd||rr|j|d||}n|jdddg|d||j|dS(	Ns1.9.0tfetchs-qs--tagsR2itresets--hard(R1R/R*RARF(RR=R>((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytupdaters cC@s|j\}}|r.|g}d|}ndg}d}|j||||rtjd||t||jdd||g|r|j|||}|j||s|jddg|d|qn|j|ndS(	Ns (to %s)s
origin/masterR(sCloning %s%s to %sRs-qRER2(	tget_url_revtcheck_destinationR:tinfoRR*RARCRF(RR=RR<R>trev_display((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytobtains"	
	
cC@s|jdddgdtd|}|j}|d}x'|D]}|jdrA|}PqAqAW|jdd	}|jS(
s+Return URL of the first remote encountered.RDs--get-regexpsremote\..*\.urlR'R2isremote.origin.url t i(R*R+t
splitlinesR,R.tstrip(RR6tremotestfound_remotetremoteR((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytget_urls

cC@s+|jddgdtd|}|jS(Ns	rev-parsetHEADR'R2(R*R+RR(RR6tcurrent_rev((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyRBscc@sn|jdgdtd|}xI|jjD]5}|jdd\}}|j|jfVq1WdS(s4Yields tuples of (commit, ref) for branches and tagssshow-refR'R2RPiN(R*R+RRRQR.(RR6toutputtlinetcommittref((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
get_full_refss
cC@s
|jdS(Ns
refs/remotes/(R,(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_remotescC@s
|jdS(Nsrefs/heads/(R,(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_branchscC@s
|jdS(Ns
refs/tags/(R,(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_tagscC@s/t|j||j||j|fS(s0A ref is a commit sha if it is not anything else(tanyR^R_R`(RR\((s//usr/lib/python2.7/site-packages/pip/vcs/git.pyt
is_ref_commitscC@s
|j|S(N(R9(RR6((s//usr/lib/python2.7/site-packages/pip/vcs/git.pytget_refsscC@si}x|j|D]\}}d}|j|rJ|td}nD|j|rl|td}n"|j|r|td}n|dk	r|||s 		PK
Z	@#site-packages/pip/vcs/mercurial.pyonu[
abc@@sddlmZddlZddlZddlZddlmZmZddlm	Z	m
Z
ddlmZddl
mZejeZde
fdYZe	jedS(	i(tabsolute_importN(tdisplay_pathtrmtree(tvcstVersionControl(tpath_to_url(tconfigparsert	MercurialcB@sqeZdZdZdZdZdZdZd	Zd
Z	dZ
dZd
ZdZ
dZRS(thgs.hgtcloneshg+httpshg+httpsshg+sshshg+static-httpcC@sTtjdd}|j|z#|jd|gdtd|Wdt|XdS(s?Export the Hg repository at the url to the destination locations-exportspip-tarchivetshow_stdouttcwdN(ttempfiletmkdtemptunpacktrun_commandtFalseR(tselftlocationttemp_dir((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytexports
cC@stjj||jd}tj}yI|j||jdd|t|d}|j	|WdQXWn/t
tjfk
r}tj
d||nX|jddg|d|dS(	Nthgrctpathstdefaulttws/Could not switch Mercurial repository to %s: %stupdates-qR(tostpathtjointdirnameRtSafeConfigParsertreadtsettopentwritetOSErrortNoSectionErrortloggertwarningR(Rtdestturltrev_optionstrepo_configtconfigtconfig_filetexc((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytswitch s
cC@s:|jddgd||jddg|d|dS(Ntpulls-qRR(R(RR(R*((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pyR/scC@s|j\}}|r.|g}d|}ng}d}|j||||rtjd||t||jddd||g|jddg|d|ndS(	Ns (to revision %s)tsCloning hg %s%s to %sR	s
--noupdates-qRR(tget_url_revtcheck_destinationR&tinfoRR(RR(R)trevR*trev_display((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytobtain3s	

cC@sO|jddgdtd|j}|j|rEt|}n|jS(Nt
showconfigs
paths.defaultRR(RRtstript_is_local_repositoryR(RRR)((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_urlEscC@s+|jddgdtd|j}|S(Ntparentss--template={rev}RR(RRR9(RRtcurrent_revision((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_revisionMscC@s+|jddgdtd|j}|S(NR<s--template={node}RR(RRR9(RRtcurrent_rev_hash((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_revision_hashSscC@sw|j|}|jjds1d|}n|jjddd}|sWdS|j|}d|||fS(Nshg:shg+t-iis%s@%s#egg=%s(R;tlowert
startswithtegg_nametsplittNoneR@(RtdistRtrepotegg_project_nameR?((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytget_src_requirementYs
cC@stS(s&Always assume the versions don't match(R(RR(R*((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pyt
check_versioncs(Rshg+httpshg+httpsshg+sshshg+static-http(t__name__t
__module__tnameRt	repo_nametschemesRR/RR7R;R>R@RJRK(((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pyRs	
							
(t
__future__RtloggingRR
t	pip.utilsRRtpip.vcsRRtpip.downloadRtpip._vendor.six.movesRt	getLoggerRLR&Rtregister(((s5/usr/lib/python2.7/site-packages/pip/vcs/mercurial.pytsWPK
Z$F~WW%site-packages/pip/compat/__init__.pycnu[
abc
@`sdZddlmZmZddlZddlZddlmZyddlm	Z
Wn!ek
r{ddlm	Z
nXyddl
mZWn!ek
rddlmZnXyddlZWn]ek
r#yddlmZWq$ek
rddlZeje_eje_q$XnXyddlZdZWn*ek
ridd	lmZd
ZnXddd
dddddddg
Zejd)kreZddlmZn3ddl Z e!e dZere jZndZejd*krdZ#e$dZ%ndZ#e$dZ%dZ&dZ'dZ(d+Z)ejd,krbe)d-7Z)nej*j+d%pej*d&koej,d'kZ-d(Z.dS(.sKStuff that differs in different Python versions and platform
distributions.i(tabsolute_importtdivisionN(t	text_type(t
dictConfig(tOrderedDict(t	ipaddresscC`s1tjdtjdg}ttt|S(Ntstdlibt
platstdlib(t	sysconfigtget_pathtsettfiltertbool(tpaths((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyt
get_stdlib"s(RcC`s=tjdttjdtdtg}ttt|S(Ntstandard_libt
plat_specific(Rtget_python_libtTrueR
RR(R
((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyR+stlogging_dictConfigRtuses_pycachetconsole_to_strt
native_strtget_path_uidtstdlib_pkgstWINDOWStsamefileRii(tcache_from_sourceRcC`s9y|jtjjSWntk
r4|jdSXdS(Ntutf_8(tdecodetsyst
__stdout__tencodingtUnicodeDecodeError(ts((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRGs
cC`s/t|tr+|jd|r$dndS|S(Nsutf-8treplacetstrict(t
isinstancetbytesR(R"R#((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRMscC`s|S(N((R"((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRSscC`s t|tr|jdS|S(Nsutf-8(R%Rtencode(R"R#((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRVs
cC`sHt|dr|jS|j|j|jddd}|dSdS(Nt
total_secondsiii
ii@Bi@B(thasattrR(tmicrosecondstsecondstdays(ttdtval((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyR(]s
#cC`sttdrMtj|tjtjB}tj|j}tj|n7tjj	|sttj
|j}ntd||S(s)
    Return path's uid.

    Does not follow symlinks:
        https://github.com/pypa/pip/pull/935#discussion_r5307003

    Placed this function in compat due to differences on AIX and
    Jython, that should eventually go away.

    :raises OSError: When path is a symlink or can't be read.
    t
O_NOFOLLOWs1%s is a symlink; Will not return uid for symlinks(R)tostopentO_RDONLYR/tfstattst_uidtclosetpathtislinktstattOSError(R6tfdtfile_uid((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRes
cC`sAtjj|}|jdr=|jdr=|d}n|S(sl
    Expand ~ and ~user constructions.

    Includes a workaround for http://bugs.python.org/issue14768
    s~/s//i(R0R6t
expandusert
startswith(R6texpanded((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyR<s
tpythontwsgirefiitargparsetwintclitntcC`sottjdr%tjj||Stjjtjj|}tjjtjj|}||kSdS(s>Provide an alternative for os.path.samefile on Windows/Python2RN(R)R0R6Rtnormcasetabspath(tfile1tfile2tpath1tpath2((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRs
(ii(i(R?R@(ii(RA(/t__doc__t
__future__RRR0Rtpip._vendor.sixRtlogging.configRRtImportErrortpip.compat.dictconfigtcollectionsRtpip._vendor.ordereddictRtpip._vendortipaddrt	IPAddresst
ip_addresst	IPNetworkt
ip_networkRRt	distutilst__all__tversion_infoRRtimportlib.utilRtimpR)tNoneRtFalseRR(RR<RtplatformR=tnameRR(((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pytsh






							
PK
Z8Z8Z&site-packages/pip/compat/dictconfig.pynu[# This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import

import logging.handlers
import re
import sys
import types

from pip._vendor import six

# flake8: noqa

IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)


def valid_ident(s):
    m = IDENTIFIER.match(s)
    if not m:
        raise ValueError('Not a valid Python identifier: %r' % s)
    return True

#
# This function is defined in logging only in recent versions of Python
#
try:
    from logging import _checkLevel
except ImportError:
    def _checkLevel(level):
        if isinstance(level, int):
            rv = level
        elif str(level) == level:
            if level not in logging._levelNames:
                raise ValueError('Unknown level: %r' % level)
            rv = logging._levelNames[level]
        else:
            raise TypeError('Level not an integer or a '
                            'valid string: %r' % level)
        return rv

# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.


class ConvertingDict(dict):
    """A converting dictionary wrapper."""

    def __getitem__(self, key):
        value = dict.__getitem__(self, key)
        result = self.configurator.convert(value)
        # If the converted value is different, save for next time
        if value is not result:
            self[key] = result
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    def get(self, key, default=None):
        value = dict.get(self, key, default)
        result = self.configurator.convert(value)
        # If the converted value is different, save for next time
        if value is not result:
            self[key] = result
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    def pop(self, key, default=None):
        value = dict.pop(self, key, default)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result


class ConvertingList(list):
    """A converting list wrapper."""
    def __getitem__(self, key):
        value = list.__getitem__(self, key)
        result = self.configurator.convert(value)
        # If the converted value is different, save for next time
        if value is not result:
            self[key] = result
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    def pop(self, idx=-1):
        value = list.pop(self, idx)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
        return result


class ConvertingTuple(tuple):
    """A converting tuple wrapper."""
    def __getitem__(self, key):
        value = tuple.__getitem__(self, key)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result


class BaseConfigurator(object):
    """
    The configurator base class which defines some useful defaults.
    """

    CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$')

    WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
    DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
    INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
    DIGIT_PATTERN = re.compile(r'^\d+$')

    value_converters = {
        'ext' : 'ext_convert',
        'cfg' : 'cfg_convert',
    }

    # We might want to use a different one, e.g. importlib
    importer = __import__

    def __init__(self, config):
        self.config = ConvertingDict(config)
        self.config.configurator = self

    def resolve(self, s):
        """
        Resolve strings to objects using standard import and attribute
        syntax.
        """
        name = s.split('.')
        used = name.pop(0)
        try:
            found = self.importer(used)
            for frag in name:
                used += '.' + frag
                try:
                    found = getattr(found, frag)
                except AttributeError:
                    self.importer(used)
                    found = getattr(found, frag)
            return found
        except ImportError:
            e, tb = sys.exc_info()[1:]
            v = ValueError('Cannot resolve %r: %s' % (s, e))
            v.__cause__, v.__traceback__ = e, tb
            raise v

    def ext_convert(self, value):
        """Default converter for the ext:// protocol."""
        return self.resolve(value)

    def cfg_convert(self, value):
        """Default converter for the cfg:// protocol."""
        rest = value
        m = self.WORD_PATTERN.match(rest)
        if m is None:
            raise ValueError("Unable to convert %r" % value)
        else:
            rest = rest[m.end():]
            d = self.config[m.groups()[0]]
            # print d, rest
            while rest:
                m = self.DOT_PATTERN.match(rest)
                if m:
                    d = d[m.groups()[0]]
                else:
                    m = self.INDEX_PATTERN.match(rest)
                    if m:
                        idx = m.groups()[0]
                        if not self.DIGIT_PATTERN.match(idx):
                            d = d[idx]
                        else:
                            try:
                                n = int(idx)  # try as number first (most likely)
                                d = d[n]
                            except TypeError:
                                d = d[idx]
                if m:
                    rest = rest[m.end():]
                else:
                    raise ValueError('Unable to convert '
                                     '%r at %r' % (value, rest))
        # rest should be empty
        return d

    def convert(self, value):
        """
        Convert values to an appropriate type. dicts, lists and tuples are
        replaced by their converting alternatives. Strings are checked to
        see if they have a conversion format and are converted if they do.
        """
        if not isinstance(value, ConvertingDict) and isinstance(value, dict):
            value = ConvertingDict(value)
            value.configurator = self
        elif not isinstance(value, ConvertingList) and isinstance(value, list):
            value = ConvertingList(value)
            value.configurator = self
        elif not isinstance(value, ConvertingTuple) and\
                 isinstance(value, tuple):
            value = ConvertingTuple(value)
            value.configurator = self
        elif isinstance(value, six.string_types):  # str for py3k
            m = self.CONVERT_PATTERN.match(value)
            if m:
                d = m.groupdict()
                prefix = d['prefix']
                converter = self.value_converters.get(prefix, None)
                if converter:
                    suffix = d['suffix']
                    converter = getattr(self, converter)
                    value = converter(suffix)
        return value

    def configure_custom(self, config):
        """Configure an object with a user-supplied factory."""
        c = config.pop('()')
        if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
            c = self.resolve(c)
        props = config.pop('.', None)
        # Check for valid identifiers
        kwargs = dict((k, config[k]) for k in config if valid_ident(k))
        result = c(**kwargs)
        if props:
            for name, value in props.items():
                setattr(result, name, value)
        return result

    def as_tuple(self, value):
        """Utility function which converts lists to tuples."""
        if isinstance(value, list):
            value = tuple(value)
        return value


class DictConfigurator(BaseConfigurator):
    """
    Configure logging using a dictionary-like object to describe the
    configuration.
    """

    def configure(self):
        """Do the configuration."""

        config = self.config
        if 'version' not in config:
            raise ValueError("dictionary doesn't specify a version")
        if config['version'] != 1:
            raise ValueError("Unsupported version: %s" % config['version'])
        incremental = config.pop('incremental', False)
        EMPTY_DICT = {}
        logging._acquireLock()
        try:
            if incremental:
                handlers = config.get('handlers', EMPTY_DICT)
                # incremental handler config only if handler name
                # ties in to logging._handlers (Python 2.7)
                if sys.version_info[:2] == (2, 7):
                    for name in handlers:
                        if name not in logging._handlers:
                            raise ValueError('No handler found with '
                                             'name %r'  % name)
                        else:
                            try:
                                handler = logging._handlers[name]
                                handler_config = handlers[name]
                                level = handler_config.get('level', None)
                                if level:
                                    handler.setLevel(_checkLevel(level))
                            except StandardError as e:
                                raise ValueError('Unable to configure handler '
                                                 '%r: %s' % (name, e))
                loggers = config.get('loggers', EMPTY_DICT)
                for name in loggers:
                    try:
                        self.configure_logger(name, loggers[name], True)
                    except StandardError as e:
                        raise ValueError('Unable to configure logger '
                                         '%r: %s' % (name, e))
                root = config.get('root', None)
                if root:
                    try:
                        self.configure_root(root, True)
                    except StandardError as e:
                        raise ValueError('Unable to configure root '
                                         'logger: %s' % e)
            else:
                disable_existing = config.pop('disable_existing_loggers', True)

                logging._handlers.clear()
                del logging._handlerList[:]

                # Do formatters first - they don't refer to anything else
                formatters = config.get('formatters', EMPTY_DICT)
                for name in formatters:
                    try:
                        formatters[name] = self.configure_formatter(
                                                            formatters[name])
                    except StandardError as e:
                        raise ValueError('Unable to configure '
                                         'formatter %r: %s' % (name, e))
                # Next, do filters - they don't refer to anything else, either
                filters = config.get('filters', EMPTY_DICT)
                for name in filters:
                    try:
                        filters[name] = self.configure_filter(filters[name])
                    except StandardError as e:
                        raise ValueError('Unable to configure '
                                         'filter %r: %s' % (name, e))

                # Next, do handlers - they refer to formatters and filters
                # As handlers can refer to other handlers, sort the keys
                # to allow a deterministic order of configuration
                handlers = config.get('handlers', EMPTY_DICT)
                for name in sorted(handlers):
                    try:
                        handler = self.configure_handler(handlers[name])
                        handler.name = name
                        handlers[name] = handler
                    except StandardError as e:
                        raise ValueError('Unable to configure handler '
                                         '%r: %s' % (name, e))
                # Next, do loggers - they refer to handlers and filters

                # we don't want to lose the existing loggers,
                # since other threads may have pointers to them.
                # existing is set to contain all existing loggers,
                # and as we go through the new configuration we
                # remove any which are configured. At the end,
                # what's left in existing is the set of loggers
                # which were in the previous configuration but
                # which are not in the new configuration.
                root = logging.root
                existing = list(root.manager.loggerDict)
                # The list needs to be sorted so that we can
                # avoid disabling child loggers of explicitly
                # named loggers. With a sorted list it is easier
                # to find the child loggers.
                existing.sort()
                # We'll keep the list of existing loggers
                # which are children of named loggers here...
                child_loggers = []
                # now set up the new ones...
                loggers = config.get('loggers', EMPTY_DICT)
                for name in loggers:
                    if name in existing:
                        i = existing.index(name)
                        prefixed = name + "."
                        pflen = len(prefixed)
                        num_existing = len(existing)
                        i = i + 1  # look at the entry after name
                        while (i < num_existing) and\
                              (existing[i][:pflen] == prefixed):
                            child_loggers.append(existing[i])
                            i = i + 1
                        existing.remove(name)
                    try:
                        self.configure_logger(name, loggers[name])
                    except StandardError as e:
                        raise ValueError('Unable to configure logger '
                                         '%r: %s' % (name, e))

                # Disable any old loggers. There's no point deleting
                # them as other threads may continue to hold references
                # and by disabling them, you stop them doing any logging.
                # However, don't disable children of named loggers, as that's
                # probably not what was intended by the user.
                for log in existing:
                    logger = root.manager.loggerDict[log]
                    if log in child_loggers:
                        logger.level = logging.NOTSET
                        logger.handlers = []
                        logger.propagate = True
                    elif disable_existing:
                        logger.disabled = True

                # And finally, do the root logger
                root = config.get('root', None)
                if root:
                    try:
                        self.configure_root(root)
                    except StandardError as e:
                        raise ValueError('Unable to configure root '
                                         'logger: %s' % e)
        finally:
            logging._releaseLock()

    def configure_formatter(self, config):
        """Configure a formatter from a dictionary."""
        if '()' in config:
            factory = config['()']  # for use in exception handler
            try:
                result = self.configure_custom(config)
            except TypeError as te:
                if "'format'" not in str(te):
                    raise
                # Name of parameter changed from fmt to format.
                # Retry with old name.
                # This is so that code can be used with older Python versions
                #(e.g. by Django)
                config['fmt'] = config.pop('format')
                config['()'] = factory
                result = self.configure_custom(config)
        else:
            fmt = config.get('format', None)
            dfmt = config.get('datefmt', None)
            result = logging.Formatter(fmt, dfmt)
        return result

    def configure_filter(self, config):
        """Configure a filter from a dictionary."""
        if '()' in config:
            result = self.configure_custom(config)
        else:
            name = config.get('name', '')
            result = logging.Filter(name)
        return result

    def add_filters(self, filterer, filters):
        """Add filters to a filterer from a list of names."""
        for f in filters:
            try:
                filterer.addFilter(self.config['filters'][f])
            except StandardError as e:
                raise ValueError('Unable to add filter %r: %s' % (f, e))

    def configure_handler(self, config):
        """Configure a handler from a dictionary."""
        formatter = config.pop('formatter', None)
        if formatter:
            try:
                formatter = self.config['formatters'][formatter]
            except StandardError as e:
                raise ValueError('Unable to set formatter '
                                 '%r: %s' % (formatter, e))
        level = config.pop('level', None)
        filters = config.pop('filters', None)
        if '()' in config:
            c = config.pop('()')
            if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
                c = self.resolve(c)
            factory = c
        else:
            klass = self.resolve(config.pop('class'))
            # Special case for handler which refers to another handler
            if issubclass(klass, logging.handlers.MemoryHandler) and\
                'target' in config:
                try:
                    config['target'] = self.config['handlers'][config['target']]
                except StandardError as e:
                    raise ValueError('Unable to set target handler '
                                     '%r: %s' % (config['target'], e))
            elif issubclass(klass, logging.handlers.SMTPHandler) and\
                'mailhost' in config:
                config['mailhost'] = self.as_tuple(config['mailhost'])
            elif issubclass(klass, logging.handlers.SysLogHandler) and\
                'address' in config:
                config['address'] = self.as_tuple(config['address'])
            factory = klass
        kwargs = dict((k, config[k]) for k in config if valid_ident(k))
        try:
            result = factory(**kwargs)
        except TypeError as te:
            if "'stream'" not in str(te):
                raise
            # The argument name changed from strm to stream
            # Retry with old name.
            # This is so that code can be used with older Python versions
            #(e.g. by Django)
            kwargs['strm'] = kwargs.pop('stream')
            result = factory(**kwargs)
        if formatter:
            result.setFormatter(formatter)
        if level is not None:
            result.setLevel(_checkLevel(level))
        if filters:
            self.add_filters(result, filters)
        return result

    def add_handlers(self, logger, handlers):
        """Add handlers to a logger from a list of names."""
        for h in handlers:
            try:
                logger.addHandler(self.config['handlers'][h])
            except StandardError as e:
                raise ValueError('Unable to add handler %r: %s' % (h, e))

    def common_logger_config(self, logger, config, incremental=False):
        """
        Perform configuration which is common to root and non-root loggers.
        """
        level = config.get('level', None)
        if level is not None:
            logger.setLevel(_checkLevel(level))
        if not incremental:
            # Remove any existing handlers
            for h in logger.handlers[:]:
                logger.removeHandler(h)
            handlers = config.get('handlers', None)
            if handlers:
                self.add_handlers(logger, handlers)
            filters = config.get('filters', None)
            if filters:
                self.add_filters(logger, filters)

    def configure_logger(self, name, config, incremental=False):
        """Configure a non-root logger from a dictionary."""
        logger = logging.getLogger(name)
        self.common_logger_config(logger, config, incremental)
        propagate = config.get('propagate', None)
        if propagate is not None:
            logger.propagate = propagate

    def configure_root(self, config, incremental=False):
        """Configure a root logger from a dictionary."""
        root = logging.getLogger()
        self.common_logger_config(root, config, incremental)

dictConfigClass = DictConfigurator


def dictConfig(config):
    """Configure logging using a dictionary."""
    dictConfigClass(config).configure()
PK
Z$F~WW%site-packages/pip/compat/__init__.pyonu[
abc
@`sdZddlmZmZddlZddlZddlmZyddlm	Z
Wn!ek
r{ddlm	Z
nXyddl
mZWn!ek
rddlmZnXyddlZWn]ek
r#yddlmZWq$ek
rddlZeje_eje_q$XnXyddlZdZWn*ek
ridd	lmZd
ZnXddd
dddddddg
Zejd)kreZddlmZn3ddl Z e!e dZere jZndZejd*krdZ#e$dZ%ndZ#e$dZ%dZ&dZ'dZ(d+Z)ejd,krbe)d-7Z)nej*j+d%pej*d&koej,d'kZ-d(Z.dS(.sKStuff that differs in different Python versions and platform
distributions.i(tabsolute_importtdivisionN(t	text_type(t
dictConfig(tOrderedDict(t	ipaddresscC`s1tjdtjdg}ttt|S(Ntstdlibt
platstdlib(t	sysconfigtget_pathtsettfiltertbool(tpaths((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyt
get_stdlib"s(RcC`s=tjdttjdtdtg}ttt|S(Ntstandard_libt
plat_specific(Rtget_python_libtTrueR
RR(R
((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyR+stlogging_dictConfigRtuses_pycachetconsole_to_strt
native_strtget_path_uidtstdlib_pkgstWINDOWStsamefileRii(tcache_from_sourceRcC`s9y|jtjjSWntk
r4|jdSXdS(Ntutf_8(tdecodetsyst
__stdout__tencodingtUnicodeDecodeError(ts((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRGs
cC`s/t|tr+|jd|r$dndS|S(Nsutf-8treplacetstrict(t
isinstancetbytesR(R"R#((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRMscC`s|S(N((R"((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRSscC`s t|tr|jdS|S(Nsutf-8(R%Rtencode(R"R#((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRVs
cC`sHt|dr|jS|j|j|jddd}|dSdS(Nt
total_secondsiii
ii@Bi@B(thasattrR(tmicrosecondstsecondstdays(ttdtval((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyR(]s
#cC`sttdrMtj|tjtjB}tj|j}tj|n7tjj	|sttj
|j}ntd||S(s)
    Return path's uid.

    Does not follow symlinks:
        https://github.com/pypa/pip/pull/935#discussion_r5307003

    Placed this function in compat due to differences on AIX and
    Jython, that should eventually go away.

    :raises OSError: When path is a symlink or can't be read.
    t
O_NOFOLLOWs1%s is a symlink; Will not return uid for symlinks(R)tostopentO_RDONLYR/tfstattst_uidtclosetpathtislinktstattOSError(R6tfdtfile_uid((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRes
cC`sAtjj|}|jdr=|jdr=|d}n|S(sl
    Expand ~ and ~user constructions.

    Includes a workaround for http://bugs.python.org/issue14768
    s~/s//i(R0R6t
expandusert
startswith(R6texpanded((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyR<s
tpythontwsgirefiitargparsetwintclitntcC`sottjdr%tjj||Stjjtjj|}tjjtjj|}||kSdS(s>Provide an alternative for os.path.samefile on Windows/Python2RN(R)R0R6Rtnormcasetabspath(tfile1tfile2tpath1tpath2((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pyRs
(ii(i(R?R@(ii(RA(/t__doc__t
__future__RRR0Rtpip._vendor.sixRtlogging.configRRtImportErrortpip.compat.dictconfigtcollectionsRtpip._vendor.ordereddictRtpip._vendortipaddrt	IPAddresst
ip_addresst	IPNetworkt
ip_networkRRt	distutilst__all__tversion_infoRRtimportlib.utilRtimpR)tNoneRtFalseRR(RR<RtplatformR=tnameRR(((s7/usr/lib/python2.7/site-packages/pip/compat/__init__.pytsh






							
PK
Zc~AA'site-packages/pip/compat/dictconfig.pyonu[
abc@@s ddlmZddlZddlZddlZddlZddlmZej	dej
ZdZyddlm
Z
Wnek
rdZ
nXdefd	YZd
efdYZdefd
YZdefdYZdefdYZeZdZdS(i(tabsolute_importN(tsixs^[a-z_][a-z0-9_]*$cC@s,tj|}|s(td|ntS(Ns!Not a valid Python identifier: %r(t
IDENTIFIERtmatcht
ValueErrortTrue(tstm((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytvalid_ident"s(t_checkLevelcC@spt|tr|}nTt||kr\|tjkrLtd|ntj|}ntd||S(NsUnknown level: %rs*Level not an integer or a valid string: %r(t
isinstancetinttstrtloggingt_levelNamesRt	TypeError(tleveltrv((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR	.s	
tConvertingDictcB@s/eZdZdZddZddZRS(s A converting dictionary wrapper.cC@sqtj||}|jj|}||k	rm|||[a-z]+)://(?P.*)$s^\s*(\w+)\s*s^\.\s*(\w+)\s*s^\[\s*(\w+)\s*\]\s*s^\d+$text_converttexttcfg_converttcfgcC@st||_||j_dS(N(RtconfigR(RR.((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyt__init__sc	C@s|jd}|jd}yy|j|}x_|D]W}|d|7}yt||}Wq7tk
r|j|t||}q7Xq7W|SWnVtk
rtjd\}}td||f}|||_	|_
|nXdS(s`
        Resolve strings to objects using standard import and attribute
        syntax.
        t.iisCannot resolve %r: %sN(tsplitR!timportertgetattrtAttributeErrortImportErrortsystexc_infoRt	__cause__t
__traceback__(	RRtnametusedtfoundtfragtettbtv((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytresolves"



cC@s
|j|S(s*Default converter for the ext:// protocol.(RA(RR((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR*scC@sO|}|jj|}|dkr7td|n||j}|j|jd}x|rJ|jj|}|r||jd}n|jj|}|r|jd}|j	j|s||}qyt
|}||}Wqtk
r||}qXn|r1||j}qatd||fqaW|S(s*Default converter for the cfg:// protocol.sUnable to convert %risUnable to convert %r at %rN(tWORD_PATTERNRR%RtendR.tgroupstDOT_PATTERNt
INDEX_PATTERNt
DIGIT_PATTERNRR(RRtrestRtdR'tn((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR,s2	

cC@s2t|tr7t|tr7t|}||_nt|trnt|trnt|}||_nt|trt|trt|}||_nt|tj	r.|j
j|}|r.|j}|d}|j
j|d}|r+|d}t||}||}q+q.n|S(s
        Convert values to an appropriate type. dicts, lists and tuples are
        replaced by their converting alternatives. Strings are checked to
        see if they have a conversion format and are converted if they do.
        tprefixtsuffixN(R
RRRRR&RR(Rtstring_typestCONVERT_PATTERNRt	groupdicttvalue_convertersRR%R3(RRRRIRKt	converterRL((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRs*

c@sjd}t|drUttdrUt|tjkrU|j|}njdd}tfdD}||}|rx-|jD]\}}t	|||qWn|S(s1Configure an object with a user-supplied factory.s()t__call__t	ClassTypeR0c3@s+|]!}t|r||fVqdS(N(R(t.0tk(R.(s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pys	sN(
R!thasattrttypesRRSRAR%Rtitemstsetattr(RR.tctpropstkwargsRR:R((R.s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytconfigure_customs4cC@s"t|trt|}n|S(s0Utility function which converts lists to tuples.(R
R&R((RR((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytas_tuples(R"R#R$tretcompileRNRBRERFRGRPt
__import__R2R/RAR*R,RR]R^(((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR)s"
				"		tDictConfiguratorcB@sheZdZdZdZdZdZdZdZe	dZ
e	dZe	d	ZRS(
s]
    Configure logging using a dictionary-like object to describe the
    configuration.
    cC@sq|j}d|kr$tdn|ddkrKtd|dn|jdt}i}tjz|r|jd|}tjd dkrFx|D]}|tj	krtd	|qyItj	|}||}|jd
d}|r|jt|nWqt
k
r>}	td||	fqXqWn|jd|}
xU|
D]M}y|j||
|tWq_t
k
r}	td
||	fq_Xq_W|jdd}|r^y|j|tWqt
k
r}	td|	qXq^nV|jdt}tj	jtj2|jd|}
xU|
D]M}y|j|
||
|RfRgtdisable_existingRiRjtexistingt
child_loggerstitprefixedtpflentnum_existingtlogtlogger((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyt	configures	








		




	cC@sd|kr|d}y|j|}Wqtk
r}dt|krSn|jd|d<||d<|j|}qXn6|jdd}|jdd}tj||}|S(s(Configure a formatter from a dictionary.s()s'format'tformattfmttdatefmtN(R]RRR!RR%R
t	Formatter(RR.tfactoryRtteRtdfmt((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRus

cC@sCd|kr|j|}n!|jdd}tj|}|S(s%Configure a filter from a dictionary.s()R:t(R]RR
tFilter(RR.RR:((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRvs
cC@s]xV|D]N}y|j|jd|Wqtk
rT}td||fqXqWdS(s/Add filters to a filterer from a list of names.RjsUnable to add filter %r: %sN(t	addFilterR.RpR(RtfiltererRjtfR>((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytadd_filterss

c@sjdd}|r\y|jd|}Wq\tk
rX}td||fq\Xnjdd}jdd}dkrjd}t|drttdrt|tjkr|j	|}n|}n|j	jd	}t
|tjj
rsd
krsy|jdd
d
ss'stream'tstreamtstrmN(R!R%R.RpRRVRWRRSRAt
issubclassR
Ret
MemoryHandlertSMTPHandlerR^t
SysLogHandlerRRRtsetFormatterRoR	R(RR.RR>RRjRZRtklassR\RR((R.s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRxsX4	cC@s]xV|D]N}y|j|jd|Wqtk
rT}td||fqXqWdS(s.Add handlers to a logger from a list of names.ResUnable to add handler %r: %sN(t
addHandlerR.RpR(RRRethR>((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytadd_handlers	s

cC@s|jdd}|dk	r4|jt|n|sx|jD]}|j|qEW|jdd}|r|j||n|jdd}|r|j||qndS(sU
        Perform configuration which is common to root and non-root loggers.
        RReRjN(RR%RoR	Ret
removeHandlerRR(RRR.RdRRReRj((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytcommon_logger_configscC@sPtj|}|j||||jdd}|dk	rL||_ndS(s.Configure a non-root logger from a dictionary.RN(R
t	getLoggerRRR%R(RR:R.RdRR((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRq#s
cC@s#tj}|j|||dS(s*Configure a root logger from a dictionary.N(R
RR(RR.RdRg((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRr+s(
R"R#R$RRuRvRRxRRkRRqRr(((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRbs						5	cC@st|jdS(s%Configure logging using a dictionary.N(tdictConfigClassR(R.((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyt
dictConfig3s(t
__future__Rtlogging.handlersR
R_R6RWtpip._vendorRR`tIRRR	R5RRR&RR(RtobjectR)RbRR(((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyts&		

&
PK
Z'q@@$site-packages/pip/compat/__init__.pynu["""Stuff that differs in different Python versions and platform
distributions."""
from __future__ import absolute_import, division

import os
import sys

from pip._vendor.six import text_type

try:
    from logging.config import dictConfig as logging_dictConfig
except ImportError:
    from pip.compat.dictconfig import dictConfig as logging_dictConfig

try:
    from collections import OrderedDict
except ImportError:
    from pip._vendor.ordereddict import OrderedDict

try:
    import ipaddress
except ImportError:
    try:
        from pip._vendor import ipaddress
    except ImportError:
        import ipaddr as ipaddress
        ipaddress.ip_address = ipaddress.IPAddress
        ipaddress.ip_network = ipaddress.IPNetwork


try:
    import sysconfig

    def get_stdlib():
        paths = [
            sysconfig.get_path("stdlib"),
            sysconfig.get_path("platstdlib"),
        ]
        return set(filter(bool, paths))
except ImportError:
    from distutils import sysconfig

    def get_stdlib():
        paths = [
            sysconfig.get_python_lib(standard_lib=True),
            sysconfig.get_python_lib(standard_lib=True, plat_specific=True),
        ]
        return set(filter(bool, paths))


__all__ = [
    "logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str",
    "native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile",
    "OrderedDict",
]


if sys.version_info >= (3, 4):
    uses_pycache = True
    from importlib.util import cache_from_source
else:
    import imp
    uses_pycache = hasattr(imp, 'cache_from_source')
    if uses_pycache:
        cache_from_source = imp.cache_from_source
    else:
        cache_from_source = None


if sys.version_info >= (3,):
    def console_to_str(s):
        try:
            return s.decode(sys.__stdout__.encoding)
        except UnicodeDecodeError:
            return s.decode('utf_8')

    def native_str(s, replace=False):
        if isinstance(s, bytes):
            return s.decode('utf-8', 'replace' if replace else 'strict')
        return s

else:
    def console_to_str(s):
        return s

    def native_str(s, replace=False):
        # Replace is ignored -- unicode to UTF-8 can't fail
        if isinstance(s, text_type):
            return s.encode('utf-8')
        return s


def total_seconds(td):
    if hasattr(td, "total_seconds"):
        return td.total_seconds()
    else:
        val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6
        return val / 10 ** 6


def get_path_uid(path):
    """
    Return path's uid.

    Does not follow symlinks:
        https://github.com/pypa/pip/pull/935#discussion_r5307003

    Placed this function in compat due to differences on AIX and
    Jython, that should eventually go away.

    :raises OSError: When path is a symlink or can't be read.
    """
    if hasattr(os, 'O_NOFOLLOW'):
        fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
        file_uid = os.fstat(fd).st_uid
        os.close(fd)
    else:  # AIX and Jython
        # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
        if not os.path.islink(path):
            # older versions of Jython don't have `os.fstat`
            file_uid = os.stat(path).st_uid
        else:
            # raise OSError for parity with os.O_NOFOLLOW above
            raise OSError(
                "%s is a symlink; Will not return uid for symlinks" % path
            )
    return file_uid


def expanduser(path):
    """
    Expand ~ and ~user constructions.

    Includes a workaround for http://bugs.python.org/issue14768
    """
    expanded = os.path.expanduser(path)
    if path.startswith('~/') and expanded.startswith('//'):
        expanded = expanded[1:]
    return expanded


# packages in the stdlib that may have installation metadata, but should not be
# considered 'installed'.  this theoretically could be determined based on
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
# make this ineffective, so hard-coding
stdlib_pkgs = ('python', 'wsgiref')
if sys.version_info >= (2, 7):
    stdlib_pkgs += ('argparse',)


# windows detection, covers cpython and ironpython
WINDOWS = (sys.platform.startswith("win") or
           (sys.platform == 'cli' and os.name == 'nt'))


def samefile(file1, file2):
    """Provide an alternative for os.path.samefile on Windows/Python2"""
    if hasattr(os.path, 'samefile'):
        return os.path.samefile(file1, file2)
    else:
        path1 = os.path.normcase(os.path.abspath(file1))
        path2 = os.path.normcase(os.path.abspath(file2))
        return path1 == path2
PK
Zc~AA'site-packages/pip/compat/dictconfig.pycnu[
abc@@s ddlmZddlZddlZddlZddlZddlmZej	dej
ZdZyddlm
Z
Wnek
rdZ
nXdefd	YZd
efdYZdefd
YZdefdYZdefdYZeZdZdS(i(tabsolute_importN(tsixs^[a-z_][a-z0-9_]*$cC@s,tj|}|s(td|ntS(Ns!Not a valid Python identifier: %r(t
IDENTIFIERtmatcht
ValueErrortTrue(tstm((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytvalid_ident"s(t_checkLevelcC@spt|tr|}nTt||kr\|tjkrLtd|ntj|}ntd||S(NsUnknown level: %rs*Level not an integer or a valid string: %r(t
isinstancetinttstrtloggingt_levelNamesRt	TypeError(tleveltrv((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR	.s	
tConvertingDictcB@s/eZdZdZddZddZRS(s A converting dictionary wrapper.cC@sqtj||}|jj|}||k	rm|||[a-z]+)://(?P.*)$s^\s*(\w+)\s*s^\.\s*(\w+)\s*s^\[\s*(\w+)\s*\]\s*s^\d+$text_converttexttcfg_converttcfgcC@st||_||j_dS(N(RtconfigR(RR.((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyt__init__sc	C@s|jd}|jd}yy|j|}x_|D]W}|d|7}yt||}Wq7tk
r|j|t||}q7Xq7W|SWnVtk
rtjd\}}td||f}|||_	|_
|nXdS(s`
        Resolve strings to objects using standard import and attribute
        syntax.
        t.iisCannot resolve %r: %sN(tsplitR!timportertgetattrtAttributeErrortImportErrortsystexc_infoRt	__cause__t
__traceback__(	RRtnametusedtfoundtfragtettbtv((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytresolves"



cC@s
|j|S(s*Default converter for the ext:// protocol.(RA(RR((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR*scC@sO|}|jj|}|dkr7td|n||j}|j|jd}x|rJ|jj|}|r||jd}n|jj|}|r|jd}|j	j|s||}qyt
|}||}Wqtk
r||}qXn|r1||j}qatd||fqaW|S(s*Default converter for the cfg:// protocol.sUnable to convert %risUnable to convert %r at %rN(tWORD_PATTERNRR%RtendR.tgroupstDOT_PATTERNt
INDEX_PATTERNt
DIGIT_PATTERNRR(RRtrestRtdR'tn((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR,s2	

cC@s2t|tr7t|tr7t|}||_nt|trnt|trnt|}||_nt|trt|trt|}||_nt|tj	r.|j
j|}|r.|j}|d}|j
j|d}|r+|d}t||}||}q+q.n|S(s
        Convert values to an appropriate type. dicts, lists and tuples are
        replaced by their converting alternatives. Strings are checked to
        see if they have a conversion format and are converted if they do.
        tprefixtsuffixN(R
RRRRR&RR(Rtstring_typestCONVERT_PATTERNRt	groupdicttvalue_convertersRR%R3(RRRRIRKt	converterRL((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRs*

c@sjd}t|drUttdrUt|tjkrU|j|}njdd}tfdD}||}|rx-|jD]\}}t	|||qWn|S(s1Configure an object with a user-supplied factory.s()t__call__t	ClassTypeR0c3@s+|]!}t|r||fVqdS(N(R(t.0tk(R.(s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pys	sN(
R!thasattrttypesRRSRAR%Rtitemstsetattr(RR.tctpropstkwargsRR:R((R.s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytconfigure_customs4cC@s"t|trt|}n|S(s0Utility function which converts lists to tuples.(R
R&R((RR((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytas_tuples(R"R#R$tretcompileRNRBRERFRGRPt
__import__R2R/RAR*R,RR]R^(((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyR)s"
				"		tDictConfiguratorcB@sheZdZdZdZdZdZdZdZe	dZ
e	dZe	d	ZRS(
s]
    Configure logging using a dictionary-like object to describe the
    configuration.
    cC@sq|j}d|kr$tdn|ddkrKtd|dn|jdt}i}tjz|r|jd|}tjd dkrFx|D]}|tj	krtd	|qyItj	|}||}|jd
d}|r|jt|nWqt
k
r>}	td||	fqXqWn|jd|}
xU|
D]M}y|j||
|tWq_t
k
r}	td
||	fq_Xq_W|jdd}|r^y|j|tWqt
k
r}	td|	qXq^nV|jdt}tj	jtj2|jd|}
xU|
D]M}y|j|
||
|RfRgtdisable_existingRiRjtexistingt
child_loggerstitprefixedtpflentnum_existingtlogtlogger((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyt	configures	








		




	cC@sd|kr|d}y|j|}Wqtk
r}dt|krSn|jd|d<||d<|j|}qXn6|jdd}|jdd}tj||}|S(s(Configure a formatter from a dictionary.s()s'format'tformattfmttdatefmtN(R]RRR!RR%R
t	Formatter(RR.tfactoryRtteRtdfmt((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRus

cC@sCd|kr|j|}n!|jdd}tj|}|S(s%Configure a filter from a dictionary.s()R:t(R]RR
tFilter(RR.RR:((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRvs
cC@s]xV|D]N}y|j|jd|Wqtk
rT}td||fqXqWdS(s/Add filters to a filterer from a list of names.RjsUnable to add filter %r: %sN(t	addFilterR.RpR(RtfiltererRjtfR>((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytadd_filterss

c@sjdd}|r\y|jd|}Wq\tk
rX}td||fq\Xnjdd}jdd}dkrjd}t|drttdrt|tjkr|j	|}n|}n|j	jd	}t
|tjj
rsd
krsy|jdd
d
ss'stream'tstreamtstrmN(R!R%R.RpRRVRWRRSRAt
issubclassR
Ret
MemoryHandlertSMTPHandlerR^t
SysLogHandlerRRRtsetFormatterRoR	R(RR.RR>RRjRZRtklassR\RR((R.s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRxsX4	cC@s]xV|D]N}y|j|jd|Wqtk
rT}td||fqXqWdS(s.Add handlers to a logger from a list of names.ResUnable to add handler %r: %sN(t
addHandlerR.RpR(RRRethR>((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytadd_handlers	s

cC@s|jdd}|dk	r4|jt|n|sx|jD]}|j|qEW|jdd}|r|j||n|jdd}|r|j||qndS(sU
        Perform configuration which is common to root and non-root loggers.
        RReRjN(RR%RoR	Ret
removeHandlerRR(RRR.RdRRReRj((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pytcommon_logger_configscC@sPtj|}|j||||jdd}|dk	rL||_ndS(s.Configure a non-root logger from a dictionary.RN(R
t	getLoggerRRR%R(RR:R.RdRR((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRq#s
cC@s#tj}|j|||dS(s*Configure a root logger from a dictionary.N(R
RR(RR.RdRg((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRr+s(
R"R#R$RRuRvRRxRRkRRqRr(((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyRbs						5	cC@st|jdS(s%Configure logging using a dictionary.N(tdictConfigClassR(R.((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyt
dictConfig3s(t
__future__Rtlogging.handlersR
R_R6RWtpip._vendorRR`tIRRR	R5RRR&RR(RtobjectR)RbRR(((s9/usr/lib/python2.7/site-packages/pip/compat/dictconfig.pyts&		

&
PK
Zwsite-packages/pip/locations.pynu["""Locations where we look for configs, install stuff, etc"""
from __future__ import absolute_import

import os
import os.path
import site
import sys

from distutils import sysconfig
from distutils.command.install import install, SCHEME_KEYS  # noqa

from pip.compat import WINDOWS, expanduser
from pip.utils import appdirs


# Application Directories
USER_CACHE_DIR = appdirs.user_cache_dir("pip")


DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.

Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'


def write_delete_marker_file(directory):
    """
    Write the pip delete marker file into this directory.
    """
    filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
    with open(filepath, 'w') as marker_fp:
        marker_fp.write(DELETE_MARKER_MESSAGE)


def running_under_virtualenv():
    """
    Return True if we're running inside a virtualenv, False otherwise.

    """
    if hasattr(sys, 'real_prefix'):
        return True
    elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
        return True

    return False


def virtualenv_no_global():
    """
    Return True if in a venv and no system site packages.
    """
    # this mirrors the logic in virtualenv.py for locating the
    # no-global-site-packages.txt file
    site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
    no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
    if running_under_virtualenv() and os.path.isfile(no_global_file):
        return True


if running_under_virtualenv():
    src_prefix = os.path.join(sys.prefix, 'src')
else:
    # FIXME: keep src in cwd for now (it is not a temporary folder)
    try:
        src_prefix = os.path.join(os.getcwd(), 'src')
    except OSError:
        # In case the current working directory has been renamed or deleted
        sys.exit(
            "The folder you are executing pip from can no longer be found."
        )

# under macOS + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
src_prefix = os.path.abspath(src_prefix)

# FIXME doesn't account for venv linked to global site-packages

site_packages = sysconfig.get_python_lib()
user_site = site.USER_SITE
user_dir = expanduser('~')
if WINDOWS:
    bin_py = os.path.join(sys.prefix, 'Scripts')
    bin_user = os.path.join(user_site, 'Scripts')
    # buildout uses 'bin' on Windows too?
    if not os.path.exists(bin_py):
        bin_py = os.path.join(sys.prefix, 'bin')
        bin_user = os.path.join(user_site, 'bin')

    config_basename = 'pip.ini'

    legacy_storage_dir = os.path.join(user_dir, 'pip')
    legacy_config_file = os.path.join(
        legacy_storage_dir,
        config_basename,
    )
else:
    bin_py = os.path.join(sys.prefix, 'bin')
    bin_user = os.path.join(user_site, 'bin')

    config_basename = 'pip.conf'

    legacy_storage_dir = os.path.join(user_dir, '.pip')
    legacy_config_file = os.path.join(
        legacy_storage_dir,
        config_basename,
    )

    # Forcing to use /usr/local/bin for standard macOS framework installs
    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
    if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
        bin_py = '/usr/local/bin'

site_config_files = [
    os.path.join(path, config_basename)
    for path in appdirs.site_config_dirs('pip')
]


def distutils_scheme(dist_name, user=False, home=None, root=None,
                     isolated=False, prefix=None):
    """
    Return a distutils install scheme
    """
    from distutils.dist import Distribution

    scheme = {}

    if isolated:
        extra_dist_args = {"script_args": ["--no-user-cfg"]}
    else:
        extra_dist_args = {}
    dist_args = {'name': dist_name}
    dist_args.update(extra_dist_args)

    d = Distribution(dist_args)
    d.parse_config_files()
    i = d.get_command_obj('install', create=True)
    # NOTE: setting user or home has the side-effect of creating the home dir
    # or user base for installations during finalize_options()
    # ideally, we'd prefer a scheme class that has no side-effects.
    assert not (user and prefix), "user={0} prefix={1}".format(user, prefix)
    i.user = user or i.user
    if user:
        i.prefix = ""
    i.prefix = prefix or i.prefix
    i.home = home or i.home
    i.root = root or i.root
    i.finalize_options()
    for key in SCHEME_KEYS:
        scheme[key] = getattr(i, 'install_' + key)

    # install_lib specified in setup.cfg should install *everything*
    # into there (i.e. it takes precedence over both purelib and
    # platlib).  Note, i.install_lib is *always* set after
    # finalize_options(); we only want to override here if the user
    # has explicitly requested it hence going back to the config
    if 'install_lib' in d.get_option_dict('install'):
        scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))

    if running_under_virtualenv():
        scheme['headers'] = os.path.join(
            sys.prefix,
            'include',
            'site',
            'python' + sys.version[:3],
            dist_name,
        )

        if root is not None:
            path_no_drive = os.path.splitdrive(
                os.path.abspath(scheme["headers"]))[1]
            scheme["headers"] = os.path.join(
                root,
                path_no_drive[1:],
            )

    return scheme
PK
Z(**site-packages/pip/pep425tags.pynu["""Generate and work with PEP 425 Compatibility Tags."""
from __future__ import absolute_import

import re
import sys
import warnings
import platform
import logging

try:
    import sysconfig
except ImportError:  # pragma nocover
    # Python < 2.7
    import distutils.sysconfig as sysconfig
import distutils.util

from pip.compat import OrderedDict
import pip.utils.glibc

logger = logging.getLogger(__name__)

_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')


def get_config_var(var):
    try:
        return sysconfig.get_config_var(var)
    except IOError as e:  # Issue #1074
        warnings.warn("{0}".format(e), RuntimeWarning)
        return None


def get_abbr_impl():
    """Return abbreviated implementation name."""
    if hasattr(sys, 'pypy_version_info'):
        pyimpl = 'pp'
    elif sys.platform.startswith('java'):
        pyimpl = 'jy'
    elif sys.platform == 'cli':
        pyimpl = 'ip'
    else:
        pyimpl = 'cp'
    return pyimpl


def get_impl_ver():
    """Return implementation version."""
    impl_ver = get_config_var("py_version_nodot")
    if not impl_ver or get_abbr_impl() == 'pp':
        impl_ver = ''.join(map(str, get_impl_version_info()))
    return impl_ver


def get_impl_version_info():
    """Return sys.version_info-like tuple for use in decrementing the minor
    version."""
    if get_abbr_impl() == 'pp':
        # as per https://github.com/pypa/pip/issues/2882
        return (sys.version_info[0], sys.pypy_version_info.major,
                sys.pypy_version_info.minor)
    else:
        return sys.version_info[0], sys.version_info[1]


def get_impl_tag():
    """
    Returns the Tag for this specific implementation.
    """
    return "{0}{1}".format(get_abbr_impl(), get_impl_ver())


def get_flag(var, fallback, expected=True, warn=True):
    """Use a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable."""
    val = get_config_var(var)
    if val is None:
        if warn:
            logger.debug("Config variable '%s' is unset, Python ABI tag may "
                         "be incorrect", var)
        return fallback()
    return val == expected


def get_abi_tag():
    """Return the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy)."""
    soabi = get_config_var('SOABI')
    impl = get_abbr_impl()
    if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
        d = ''
        m = ''
        u = ''
        if get_flag('Py_DEBUG',
                    lambda: hasattr(sys, 'gettotalrefcount'),
                    warn=(impl == 'cp')):
            d = 'd'
        if get_flag('WITH_PYMALLOC',
                    lambda: impl == 'cp',
                    warn=(impl == 'cp')):
            m = 'm'
        if get_flag('Py_UNICODE_SIZE',
                    lambda: sys.maxunicode == 0x10ffff,
                    expected=4,
                    warn=(impl == 'cp' and
                          sys.version_info < (3, 3))) \
                and sys.version_info < (3, 3):
            u = 'u'
        abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
    elif soabi and soabi.startswith('cpython-'):
        abi = 'cp' + soabi.split('-')[1]
    elif soabi:
        abi = soabi.replace('.', '_').replace('-', '_')
    else:
        abi = None
    return abi


def _is_running_32bit():
    return sys.maxsize == 2147483647


def get_platform():
    """Return our platform name 'win32', 'linux_x86_64'"""
    if sys.platform == 'darwin':
        # distutils.util.get_platform() returns the release based on the value
        # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
        # be significantly older than the user's current machine.
        release, _, machine = platform.mac_ver()
        split_ver = release.split('.')

        if machine == "x86_64" and _is_running_32bit():
            machine = "i386"
        elif machine == "ppc64" and _is_running_32bit():
            machine = "ppc"

        return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine)

    # XXX remove distutils dependency
    result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
    if result == "linux_x86_64" and _is_running_32bit():
        # 32 bit Python program (running on a 64 bit Linux): pip should only
        # install and run 32 bit compiled extensions in that case.
        result = "linux_i686"

    return result


def is_manylinux1_compatible():
    # Only Linux, and only x86-64 / i686
    if get_platform() not in ("linux_x86_64", "linux_i686"):
        return False

    # Check for presence of _manylinux module
    try:
        import _manylinux
        return bool(_manylinux.manylinux1_compatible)
    except (ImportError, AttributeError):
        # Fall through to heuristic check below
        pass

    # Check glibc version. CentOS 5 uses glibc 2.5.
    return pip.utils.glibc.have_compatible_glibc(2, 5)


def get_darwin_arches(major, minor, machine):
    """Return a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    """
    arches = []

    def _supports_arch(major, minor, arch):
        # Looking at the application support for macOS versions in the chart
        # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
        # our timeline looks roughly like:
        #
        # 10.0 - Introduces ppc support.
        # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
        #        and x86_64 support is CLI only, and cannot be used for GUI
        #        applications.
        # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
        # 10.6 - Drops support for ppc64
        # 10.7 - Drops support for ppc
        #
        # Given that we do not know if we're installing a CLI or a GUI
        # application, we must be conservative and assume it might be a GUI
        # application and behave as if ppc64 and x86_64 support did not occur
        # until 10.5.
        #
        # Note: The above information is taken from the "Application support"
        #       column in the chart not the "Processor support" since I believe
        #       that we care about what instruction sets an application can use
        #       not which processors the OS supports.
        if arch == 'ppc':
            return (major, minor) <= (10, 5)
        if arch == 'ppc64':
            return (major, minor) == (10, 5)
        if arch == 'i386':
            return (major, minor) >= (10, 4)
        if arch == 'x86_64':
            return (major, minor) >= (10, 5)
        if arch in groups:
            for garch in groups[arch]:
                if _supports_arch(major, minor, garch):
                    return True
        return False

    groups = OrderedDict([
        ("fat", ("i386", "ppc")),
        ("intel", ("x86_64", "i386")),
        ("fat64", ("x86_64", "ppc64")),
        ("fat32", ("x86_64", "i386", "ppc")),
    ])

    if _supports_arch(major, minor, machine):
        arches.append(machine)

    for garch in groups:
        if machine in groups[garch] and _supports_arch(major, minor, garch):
            arches.append(garch)

    arches.append('universal')

    return arches


def get_supported(versions=None, noarch=False, platform=None,
                  impl=None, abi=None):
    """Return a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    """
    supported = []

    # Versions must be given with respect to the preference
    if versions is None:
        versions = []
        version_info = get_impl_version_info()
        major = version_info[:-1]
        # Support all previous minor Python versions.
        for minor in range(version_info[-1], -1, -1):
            versions.append(''.join(map(str, major + (minor,))))

    impl = impl or get_abbr_impl()

    abis = []

    abi = abi or get_abi_tag()
    if abi:
        abis[0:0] = [abi]

    abi3s = set()
    import imp
    for suffix in imp.get_suffixes():
        if suffix[0].startswith('.abi'):
            abi3s.add(suffix[0].split('.', 2)[1])

    abis.extend(sorted(list(abi3s)))

    abis.append('none')

    if not noarch:
        arch = platform or get_platform()
        if arch.startswith('macosx'):
            # support macosx-10.6-intel on macosx-10.9-x86_64
            match = _osx_arch_pat.match(arch)
            if match:
                name, major, minor, actual_arch = match.groups()
                tpl = '{0}_{1}_%i_%s'.format(name, major)
                arches = []
                for m in reversed(range(int(minor) + 1)):
                    for a in get_darwin_arches(int(major), m, actual_arch):
                        arches.append(tpl % (m, a))
            else:
                # arch pattern didn't match (?!)
                arches = [arch]
        elif platform is None and is_manylinux1_compatible():
            arches = [arch.replace('linux', 'manylinux1'), arch]
        else:
            arches = [arch]

        # Current version, current API (built specifically for our Python):
        for abi in abis:
            for arch in arches:
                supported.append(('%s%s' % (impl, versions[0]), abi, arch))

        # abi3 modules compatible with older version of Python
        for version in versions[1:]:
            # abi3 was introduced in Python 3.2
            if version in ('31', '30'):
                break
            for abi in abi3s:   # empty set if not Python 3
                for arch in arches:
                    supported.append(("%s%s" % (impl, version), abi, arch))

        # Has binaries, does not use the Python API:
        for arch in arches:
            supported.append(('py%s' % (versions[0][0]), 'none', arch))

    # No abi / arch, but requires our implementation:
    supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
    # Tagged specifically as being cross-version compatible
    # (with just the major version specified)
    supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))

    # No abi / arch, generic Python
    for i, version in enumerate(versions):
        supported.append(('py%s' % (version,), 'none', 'any'))
        if i == 0:
            supported.append(('py%s' % (version[0]), 'none', 'any'))

    return supported

supported_tags = get_supported()
supported_tags_noarch = get_supported(noarch=True)

implementation_tag = get_impl_tag()
PK
Zeb66%site-packages/pip/operations/check.pynu[

def check_requirements(installed_dists):
    missing_reqs_dict = {}
    incompatible_reqs_dict = {}

    for dist in installed_dists:
        key = '%s==%s' % (dist.project_name, dist.version)

        missing_reqs = list(get_missing_reqs(dist, installed_dists))
        if missing_reqs:
            missing_reqs_dict[key] = missing_reqs

        incompatible_reqs = list(get_incompatible_reqs(
            dist, installed_dists))
        if incompatible_reqs:
            incompatible_reqs_dict[key] = incompatible_reqs

    return (missing_reqs_dict, incompatible_reqs_dict)


def get_missing_reqs(dist, installed_dists):
    """Return all of the requirements of `dist` that aren't present in
    `installed_dists`.

    """
    installed_names = set(d.project_name.lower() for d in installed_dists)
    missing_requirements = set()

    for requirement in dist.requires():
        if requirement.project_name.lower() not in installed_names:
            missing_requirements.add(requirement)
            yield requirement


def get_incompatible_reqs(dist, installed_dists):
    """Return all of the requirements of `dist` that are present in
    `installed_dists`, but have incompatible versions.

    """
    installed_dists_by_name = {}
    for installed_dist in installed_dists:
        installed_dists_by_name[installed_dist.project_name] = installed_dist

    for requirement in dist.requires():
        present_dist = installed_dists_by_name.get(requirement.project_name)

        if present_dist and present_dist not in requirement:
            yield (requirement, present_dist)
PK
ZԞ+8ss'site-packages/pip/operations/freeze.pycnu[
abc
@@sddlmZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZddl
mZddlmZejeZddddddedd
d		ZdS(i(tabsolute_importN(tInstallRequirement(t
COMMENT_RE(tget_installed_distributions(t
pkg_resources(tcanonicalize_name(tRequirementParseErrorc	c@s|p	g}d}	|r-tj|j}	ng}
x9tjD].}|jdr=|
j|jdq=q=Wx*|D]"}d|krv|
j	|qvqvWx|D]}d|VqWi}
xst
d|dd#d|D]V}ytjj
||
}Wn'tk
r tjd|jqnX||
|jR((s-rs
--requirements-Zs--always-unzips-fs--find-linkss-is--index-urls--pres--trusted-hosts--process-dependency-linkss--extra-index-url(%tNonetretcompiletsearchRtworking_setthas_metadatatextendtget_metadata_linestappendRtpiptFrozenRequirementt	from_distRtloggertwarningtproject_nameRtsettopentstript
startswithtrstriptaddtlentlstripRt
from_editablet	from_lineRtsubtinfotstrtsortedtvaluesR(trequirementt
find_linksRR	t
skip_regexRR
RRt
skip_matchtdependency_linkstdisttlinkt
installationstreqtemitted_optionst
req_file_pathtreq_filetlinetline_reqtinstallation((s9/usr/lib/python2.7/site-packages/pip/operations/freeze.pytfreezes



	


	


	
		
((t
__future__RtloggingRRtpip.reqRtpip.req.req_fileRt	pip.utilsRtpip._vendorRtpip._vendor.packaging.utilsRtpip._vendor.pkg_resourcesRt	getLoggert__name__R!RtFalseRB(((s9/usr/lib/python2.7/site-packages/pip/operations/freeze.pyts PK
Zkv)site-packages/pip/operations/__init__.pycnu[
abc@sdS(N((((s;/usr/lib/python2.7/site-packages/pip/operations/__init__.pyttPK
Z%&site-packages/pip/operations/check.pycnu[
abc@sdZdZdZdS(cCsi}i}xt|D]l}d|j|jf}tt||}|rW||||jD]0}|jj|kr,|j||Vq,q,WdS(s\Return all of the requirements of `dist` that aren't present in
    `installed_dists`.

    css|]}|jjVqdS(N(Rtlower(t.0td((s8/usr/lib/python2.7/site-packages/pip/operations/check.pys	sN(tsettrequiresRR
tadd(RRtinstalled_namestmissing_requirementstrequirement((s8/usr/lib/python2.7/site-packages/pip/operations/check.pyRs	
ccsqi}x|D]}|||js		PK
Zkv)site-packages/pip/operations/__init__.pyonu[
abc@sdS(N((((s;/usr/lib/python2.7/site-packages/pip/operations/__init__.pyttPK
Z"33JJ&site-packages/pip/operations/freeze.pynu[from __future__ import absolute_import

import logging
import re

import pip
from pip.req import InstallRequirement
from pip.req.req_file import COMMENT_RE
from pip.utils import get_installed_distributions
from pip._vendor import pkg_resources
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError


logger = logging.getLogger(__name__)


def freeze(
        requirement=None,
        find_links=None, local_only=None, user_only=None, skip_regex=None,
        default_vcs=None,
        isolated=False,
        wheel_cache=None,
        skip=()):
    find_links = find_links or []
    skip_match = None

    if skip_regex:
        skip_match = re.compile(skip_regex).search

    dependency_links = []

    for dist in pkg_resources.working_set:
        if dist.has_metadata('dependency_links.txt'):
            dependency_links.extend(
                dist.get_metadata_lines('dependency_links.txt')
            )
    for link in find_links:
        if '#egg=' in link:
            dependency_links.append(link)
    for link in find_links:
        yield '-f %s' % link
    installations = {}
    for dist in get_installed_distributions(local_only=local_only,
                                            skip=(),
                                            user_only=user_only):
        try:
            req = pip.FrozenRequirement.from_dist(
                dist,
                dependency_links
            )
        except RequirementParseError:
            logger.warning(
                "Could not parse requirement: %s",
                dist.project_name
            )
            continue
        installations[req.name] = req

    if requirement:
        # the options that don't get turned into an InstallRequirement
        # should only be emitted once, even if the same option is in multiple
        # requirements files, so we need to keep track of what has been emitted
        # so that we don't emit it again if it's seen again
        emitted_options = set()
        for req_file_path in requirement:
            with open(req_file_path) as req_file:
                for line in req_file:
                    if (not line.strip() or
                            line.strip().startswith('#') or
                            (skip_match and skip_match(line)) or
                            line.startswith((
                                '-r', '--requirement',
                                '-Z', '--always-unzip',
                                '-f', '--find-links',
                                '-i', '--index-url',
                                '--pre',
                                '--trusted-host',
                                '--process-dependency-links',
                                '--extra-index-url'))):
                        line = line.rstrip()
                        if line not in emitted_options:
                            emitted_options.add(line)
                            yield line
                        continue

                    if line.startswith('-e') or line.startswith('--editable'):
                        if line.startswith('-e'):
                            line = line[2:].strip()
                        else:
                            line = line[len('--editable'):].strip().lstrip('=')
                        line_req = InstallRequirement.from_editable(
                            line,
                            default_vcs=default_vcs,
                            isolated=isolated,
                            wheel_cache=wheel_cache,
                        )
                    else:
                        line_req = InstallRequirement.from_line(
                            COMMENT_RE.sub('', line).strip(),
                            isolated=isolated,
                            wheel_cache=wheel_cache,
                        )

                    if not line_req.name:
                        logger.info(
                            "Skipping line in requirement file [%s] because "
                            "it's not clear what it would install: %s",
                            req_file_path, line.strip(),
                        )
                        logger.info(
                            "  (add #egg=PackageName to the URL to avoid"
                            " this warning)"
                        )
                    elif line_req.name not in installations:
                        logger.warning(
                            "Requirement file [%s] contains %s, but that "
                            "package is not installed",
                            req_file_path, COMMENT_RE.sub('', line).strip(),
                        )
                    else:
                        yield str(installations[line_req.name]).rstrip()
                        del installations[line_req.name]

        yield(
            '## The following requirements were added by '
            'pip freeze:'
        )
    for installation in sorted(
            installations.values(), key=lambda x: x.name.lower()):
        if canonicalize_name(installation.name) not in skip:
            yield str(installation).rstrip()
PK
ZԞ+8ss'site-packages/pip/operations/freeze.pyonu[
abc
@@sddlmZddlZddlZddlZddlmZddlmZddl	m
Z
ddlmZddl
mZddlmZejeZddddddedd
d		ZdS(i(tabsolute_importN(tInstallRequirement(t
COMMENT_RE(tget_installed_distributions(t
pkg_resources(tcanonicalize_name(tRequirementParseErrorc	c@s|p	g}d}	|r-tj|j}	ng}
x9tjD].}|jdr=|
j|jdq=q=Wx*|D]"}d|krv|
j	|qvqvWx|D]}d|VqWi}
xst
d|dd#d|D]V}ytjj
||
}Wn'tk
r tjd|jqnX||
|jR((s-rs
--requirements-Zs--always-unzips-fs--find-linkss-is--index-urls--pres--trusted-hosts--process-dependency-linkss--extra-index-url(%tNonetretcompiletsearchRtworking_setthas_metadatatextendtget_metadata_linestappendRtpiptFrozenRequirementt	from_distRtloggertwarningtproject_nameRtsettopentstript
startswithtrstriptaddtlentlstripRt
from_editablet	from_lineRtsubtinfotstrtsortedtvaluesR(trequirementt
find_linksRR	t
skip_regexRR
RRt
skip_matchtdependency_linkstdisttlinkt
installationstreqtemitted_optionst
req_file_pathtreq_filetlinetline_reqtinstallation((s9/usr/lib/python2.7/site-packages/pip/operations/freeze.pytfreezes



	


	


	
		
((t
__future__RtloggingRRtpip.reqRtpip.req.req_fileRt	pip.utilsRtpip._vendorRtpip._vendor.packaging.utilsRtpip._vendor.pkg_resourcesRt	getLoggert__name__R!RtFalseRB(((s9/usr/lib/python2.7/site-packages/pip/operations/freeze.pyts PK
Z%&site-packages/pip/operations/check.pyonu[
abc@sdZdZdZdS(cCsi}i}xt|D]l}d|j|jf}tt||}|rW||||jD]0}|jj|kr,|j||Vq,q,WdS(s\Return all of the requirements of `dist` that aren't present in
    `installed_dists`.

    css|]}|jjVqdS(N(Rtlower(t.0td((s8/usr/lib/python2.7/site-packages/pip/operations/check.pys	sN(tsettrequiresRR
tadd(RRtinstalled_namestmissing_requirementstrequirement((s8/usr/lib/python2.7/site-packages/pip/operations/check.pyRs	
ccsqi}x|D]}|||js		PK
Z(site-packages/pip/operations/__init__.pynu[PK
Z%E%% site-packages/pip/pep425tags.pyonu[
abc@@sqdZddlmZddlZddlZddlZddlZddlZyddlZWn e	k
rddl
jZnXddlZddl
mZddlZejeZejdZdZdZdZd	Zd
ZeedZdZd
ZdZdZ dZ!de#ddddZ$e$Z%e$deZ&eZ'dS(s2Generate and work with PEP 425 Compatibility Tags.i(tabsolute_importN(tOrderedDicts(.+)_(\d+)_(\d+)_(.+)cC@sEytj|SWn-tk
r@}tjdj|tdSXdS(Ns{0}(t	sysconfigtget_config_vartIOErrortwarningstwarntformattRuntimeWarningtNone(tvarte((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyRs
cC@sUttdrd}n9tjjdr3d}ntjdkrKd}nd}|S(s'Return abbreviated implementation name.tpypy_version_infotpptjavatjytclitiptcp(thasattrtsystplatformt
startswith(tpyimpl((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt
get_abbr_impl!s			cC@sDtd}|s"tdkr@djttt}n|S(sReturn implementation version.tpy_version_nodotR
t(RRtjointmaptstrtget_impl_version_info(timpl_ver((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_impl_ver.scC@sKtdkr/tjdtjjtjjfStjdtjdfSdS(sQReturn sys.version_info-like tuple for use in decrementing the minor
    version.R
iiN(RRtversion_infoRtmajortminor(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR6s
cC@sdjttS(s;
    Returns the Tag for this specific implementation.
    s{0}{1}(RRR (((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_impl_tagAscC@sBt|}|dkr8|r1tjd|n|S||kS(sgUse a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable.s>Config variable '%s' is unset, Python ABI tag may be incorrectN(RR	tloggertdebug(R
tfallbacktexpectedRtval((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_flagHs	
c	@sgtd}t|rdkrttdrd}d}d}tddddkrpd	}ntd
fdddkrd}ntd
dddddkotjdkrtjdkrd}ndt|||f}n\|r6|jdr6d|jdd}n-|r]|j	ddj	dd}nd}|S(sXReturn the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy).tSOABIRR
t
maxunicodeRtPy_DEBUGcS@s
ttdS(Ntgettotalrefcount(RR(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt^RRtdt
WITH_PYMALLOCc@s
dkS(NR(((timpl(s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR/bRtmtPy_UNICODE_SIZEcS@s
tjdkS(Ni(RR,(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR/fRR(iitus
%s%s%s%s%sscpython-t-it.t_(RR
(ii(iiN(RRRRR*R!R RtsplittreplaceR	(tsoabiR0R3R5tabi((R2s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_abi_tagTs8	"					!cC@s
tjdkS(Ni(Rtmaxsize(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt_is_running_32bitvscC@stjdkrtj\}}}|jd}|dkrQtrQd}n|dkrotrod}ndj|d|d	|Stjjj	dd
j	dd
}|dkrtrd
}n|S(s0Return our platform name 'win32', 'linux_x86_64'tdarwinR7tx86_64ti386tppc64tppcsmacosx_{0}_{1}_{2}iiR8R6tlinux_x86_64t
linux_i686(
RRtmac_verR9R?Rt	distutilstutiltget_platformR:(treleaseR8tmachinet	split_vertresult((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyRJzs		'	cC@s`tdkrtSyddl}t|jSWnttfk
rInXtjj	j
ddS(NRERFiii(RERF(RJtFalset
_manylinuxtbooltmanylinux1_compatibletImportErrortAttributeErrortpiptutilstglibcthave_compatible_glibc(RP((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytis_manylinux1_compatiblesc@sg}fdtddfddfdd
fd	dfg|||rj|j|nx@D]8}||krq|||rq|j|qqqqW|jd
|S(sReturn a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    c@s|dkr||fdkS|dkr8||fd	kS|dkrT||fd
kS|dkrp||fdkS|krx+|D]}|||rtSqWntS(NRDi
iRCRBiRA(i
i(i
i(i
i(i
i(tTrueRO(R"R#tarchtgarch(t_supports_archtgroups(s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR]stfatRBRDtintelRAtfat64RCtfat32t	universal(RBRD(RARB(RARC(RARBRD(Rtappend(R"R#RLtarchesR\((R]R^s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_darwin_archess$			
"
cC@sg}|dkrug}t}|d }xGt|dddD],}|jdjtt||fqBWn|pt}g}	|pt}|r|g|	dd+nt	}
ddl
}xK|jD]=}|djdr|
j
|djdddqqW|	jtt|
|	jd	|s0|pMt}
|
jd
rtj|
}|r|j\}}}}dj||}g}xjttt|dD]@}x7tt|||D]}|j|||fqWqWqM|
g}n9|dkrDtrD|
jdd
|
g}n	|
g}xC|	D];}x2|D]*}
|jd||df||
fqaWqTWxd|dD]X}|dkrPnx?|
D]7}x.|D]&}
|jd||f||
fqWqWqWx3|D](}
|jd|ddd	|
fqWn|jd||dfd	df|jd||ddfd	dfxdt|D]V\}}|jd|fd	df|dkr|jd|dd	dfqqW|S(scReturn a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    iRiNs.abiR7iitnonetmacosxs
{0}_{1}_%i_%stlinuxt
manylinux1s%s%st31t30spy%stany(RkRl(R	RtrangeRdRRRRR=tsettimptget_suffixesRtaddR9textendtsortedtlistRJt
_osx_arch_pattmatchR^RtreversedtintRfRYR:t	enumerate(tversionstnoarchRR2R<t	supportedR!R"R#tabistabi3sRptsuffixR[Rwtnametactual_archttplReR3tatversionti((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt
get_supportedsh	
-	(
#"	

,

,
)$(%R|((t__doc__t
__future__RtreRRRtloggingRRStdistutils.sysconfigtdistutils.utilRHt
pip.compatRtpip.utils.glibcRUt	getLoggert__name__R%tcompileRvRRR RR$RZR*R=R?RJRYRfR	RORtsupported_tagstsupported_tags_noarchtimplementation_tag(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyts>
		
				"				=	^	PK
Z%E%% site-packages/pip/pep425tags.pycnu[
abc@@sqdZddlmZddlZddlZddlZddlZddlZyddlZWn e	k
rddl
jZnXddlZddl
mZddlZejeZejdZdZdZdZd	Zd
ZeedZdZd
ZdZdZ dZ!de#ddddZ$e$Z%e$deZ&eZ'dS(s2Generate and work with PEP 425 Compatibility Tags.i(tabsolute_importN(tOrderedDicts(.+)_(\d+)_(\d+)_(.+)cC@sEytj|SWn-tk
r@}tjdj|tdSXdS(Ns{0}(t	sysconfigtget_config_vartIOErrortwarningstwarntformattRuntimeWarningtNone(tvarte((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyRs
cC@sUttdrd}n9tjjdr3d}ntjdkrKd}nd}|S(s'Return abbreviated implementation name.tpypy_version_infotpptjavatjytclitiptcp(thasattrtsystplatformt
startswith(tpyimpl((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt
get_abbr_impl!s			cC@sDtd}|s"tdkr@djttt}n|S(sReturn implementation version.tpy_version_nodotR
t(RRtjointmaptstrtget_impl_version_info(timpl_ver((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_impl_ver.scC@sKtdkr/tjdtjjtjjfStjdtjdfSdS(sQReturn sys.version_info-like tuple for use in decrementing the minor
    version.R
iiN(RRtversion_infoRtmajortminor(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR6s
cC@sdjttS(s;
    Returns the Tag for this specific implementation.
    s{0}{1}(RRR (((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_impl_tagAscC@sBt|}|dkr8|r1tjd|n|S||kS(sgUse a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable.s>Config variable '%s' is unset, Python ABI tag may be incorrectN(RR	tloggertdebug(R
tfallbacktexpectedRtval((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_flagHs	
c	@sgtd}t|rdkrttdrd}d}d}tddddkrpd	}ntd
fdddkrd}ntd
dddddkotjdkrtjdkrd}ndt|||f}n\|r6|jdr6d|jdd}n-|r]|j	ddj	dd}nd}|S(sXReturn the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy).tSOABIRR
t
maxunicodeRtPy_DEBUGcS@s
ttdS(Ntgettotalrefcount(RR(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt^RRtdt
WITH_PYMALLOCc@s
dkS(NR(((timpl(s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR/bRtmtPy_UNICODE_SIZEcS@s
tjdkS(Ni(RR,(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR/fRR(iitus
%s%s%s%s%sscpython-t-it.t_(RR
(ii(iiN(RRRRR*R!R RtsplittreplaceR	(tsoabiR0R3R5tabi((R2s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_abi_tagTs8	"					!cC@s
tjdkS(Ni(Rtmaxsize(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt_is_running_32bitvscC@stjdkrtj\}}}|jd}|dkrQtrQd}n|dkrotrod}ndj|d|d	|Stjjj	dd
j	dd
}|dkrtrd
}n|S(s0Return our platform name 'win32', 'linux_x86_64'tdarwinR7tx86_64ti386tppc64tppcsmacosx_{0}_{1}_{2}iiR8R6tlinux_x86_64t
linux_i686(
RRtmac_verR9R?Rt	distutilstutiltget_platformR:(treleaseR8tmachinet	split_vertresult((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyRJzs		'	cC@s`tdkrtSyddl}t|jSWnttfk
rInXtjj	j
ddS(NRERFiii(RERF(RJtFalset
_manylinuxtbooltmanylinux1_compatibletImportErrortAttributeErrortpiptutilstglibcthave_compatible_glibc(RP((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytis_manylinux1_compatiblesc@sg}fdtddfddfdd
fd	dfg|||rj|j|nx@D]8}||krq|||rq|j|qqqqW|jd
|S(sReturn a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    c@s|dkr||fdkS|dkr8||fd	kS|dkrT||fd
kS|dkrp||fdkS|krx+|D]}|||rtSqWntS(NRDi
iRCRBiRA(i
i(i
i(i
i(i
i(tTrueRO(R"R#tarchtgarch(t_supports_archtgroups(s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyR]stfatRBRDtintelRAtfat64RCtfat32t	universal(RBRD(RARB(RARC(RARBRD(Rtappend(R"R#RLtarchesR\((R]R^s2/usr/lib/python2.7/site-packages/pip/pep425tags.pytget_darwin_archess$			
"
cC@sg}|dkrug}t}|d }xGt|dddD],}|jdjtt||fqBWn|pt}g}	|pt}|r|g|	dd+nt	}
ddl
}xK|jD]=}|djdr|
j
|djdddqqW|	jtt|
|	jd	|s0|pMt}
|
jd
rtj|
}|r|j\}}}}dj||}g}xjttt|dD]@}x7tt|||D]}|j|||fqWqWqM|
g}n9|dkrDtrD|
jdd
|
g}n	|
g}xC|	D];}x2|D]*}
|jd||df||
fqaWqTWxd|dD]X}|dkrPnx?|
D]7}x.|D]&}
|jd||f||
fqWqWqWx3|D](}
|jd|ddd	|
fqWn|jd||dfd	df|jd||ddfd	dfxdt|D]V\}}|jd|fd	df|dkr|jd|dd	dfqqW|S(scReturn a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    iRiNs.abiR7iitnonetmacosxs
{0}_{1}_%i_%stlinuxt
manylinux1s%s%st31t30spy%stany(RkRl(R	RtrangeRdRRRRR=tsettimptget_suffixesRtaddR9textendtsortedtlistRJt
_osx_arch_pattmatchR^RtreversedtintRfRYR:t	enumerate(tversionstnoarchRR2R<t	supportedR!R"R#tabistabi3sRptsuffixR[Rwtnametactual_archttplReR3tatversionti((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyt
get_supportedsh	
-	(
#"	

,

,
)$(%R|((t__doc__t
__future__RtreRRRtloggingRRStdistutils.sysconfigtdistutils.utilRHt
pip.compatRtpip.utils.glibcRUt	getLoggert__name__R%tcompileRvRRR RR$RZR*R=R?RJRYRfR	RORtsupported_tagstsupported_tags_noarchtimplementation_tag(((s2/usr/lib/python2.7/site-packages/pip/pep425tags.pyts>
		
				"				=	^	PK
Z`bbsite-packages/pip/wheel.pyonu[
abc	@@s
dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z	ddlZddlZddl
Z
ddlZddlZddlZddlmZddlmZddlmZddlZddlmZddlmZmZdd	lmZmZm Z dd
l!m"Z"m#Z#ddlm$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*dd
l+m,Z,ddl-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6ddl7m8Z8dZ9ddfZ:ej;e<Z=de>fdYZ?dZ@dZAddd>dZBdZCdZDejEdejFZGd ZHd!ZIeJeKeKeLeKeJeKeKd"ZMd#ZNeNd$ZOd%ZPd&ZQd'e>fd(YZRd)e>fd*YZSdS(+sH
Support for installing and building the "wheel" binary package format.
i(tabsolute_importN(turlsafe_b64encode(tParser(tStringIO(t
expanduser(tpath_to_urlt
unpack_url(tInstallationErrortInvalidWheelFilenametUnsupportedWheel(tdistutils_schemetPIP_DELETE_MARKER_FILENAME(t
pep425tags(tcall_subprocesst
ensure_dirtcaptured_stdouttrmtreetread_chunks(topen_spinner(t
indent_log(tSETUPTOOLS_SHIM(tScriptMaker(t
pkg_resources(tcanonicalize_name(tconfigparsers.whlit
WheelCachecB@s eZdZdZdZRS(s&A cache of wheels for future installs.cC@s(|rt|nd|_||_dS(sCreate a wheel cache.

        :param cache_dir: The root of the cache.
        :param format_control: A pip.index.FormatControl object to limit
            binaries being read from the cache.
        N(RtNonet
_cache_dirt_format_control(tselft	cache_dirtformat_control((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt__init__8scC@st|j||j|S(N(tcached_wheelRR(Rtlinktpackage_name((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR!Bs(t__name__t
__module__t__doc__R R!(((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR5s	
cC@s|jg}|jdk	rO|jdk	rO|jdj|j|jgndj|}tj|jj	}|d |dd!|dd!|dg}t
jj|d|S(s
    Return a directory to store cached wheels in for link.

    Because there are M wheels for any one sdist, we provide a directory
    to cache them in, and then consult that directory when looking up
    cache hits.

    We only insert things into the cache if they have plausible version
    numbers, so that we don't contaminate the cache with things that were not
    unique. E.g. ./package might have dozens of installs done for it and build
    a version of 0.0...and if we built and cached a wheel, we'd end up using
    the same wheel even if the source has been edited.

    :param cache_dir: The cache_dir being used by pip.
    :param link: The link of the sdist for which this will cache wheels.
    t=t#iiitwheelsN(turl_without_fragmentt	hash_nameRthashtappendtjointhashlibtsha224tencodet	hexdigesttostpath(RR"t	key_partstkey_urlthashedtparts((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_cache_for_linkGs%(c
C@ss|s
|S|s|S|jr!|S|js.|S|s8|St|}tjj||}d|kri|St||}ytj|}Wn5t	k
r}|j
t
jt
jfkr|SnXg}	x`|D]X}
yt
|
}Wntk
rqnX|jsqn|	j|j|
fqW|	s6|S|	jtjj||	dd}tjjt|S(Ntbinaryii(tis_wheeltis_artifactRtpiptindextfmt_ctl_formatsR9R3tlistdirtOSErrorterrnotENOENTtENOTDIRtWheelRt	supportedR-tsupport_index_mintsortR4R.tLinkR(
RR"RR#tcanonical_nametformatstroottwheel_namestet
candidatest
wheel_nametwheelR4((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR!psF		


tsha256icC@stj|}d}t|dA}x7t|d|D]#}|t|7}|j|q:WWdQXdt|jjdj	d}||fS(s6Return (hash, length) for path using hashlib.new(algo)itrbtsizeNssha256=tlatin1R'(
R/tnewtopenRtlentupdateRtdigesttdecodetrstrip(R4talgot	blocksizethtlengthtftblockRZ((s-/usr/lib/python2.7/site-packages/pip/wheel.pytrehashscC@sItjddkr"i}d}nidd6}d}t||||S(Niitbttnewline(tsystversion_infoRW(tnametmodetnltbin((s-/usr/lib/python2.7/site-packages/pip/wheel.pytopen_for_csvs	
cC@stjj|rt|dd}|j}|jdsCtStjj	tj
}d|tjj	d}|j}WdQXt|d!}|j
||j
|WdQXtSdS(sLReplace #!python with #!/path/to/python
    Return True if file was changed.RSs#!pythons#!tasciiNtwb(R3R4tisfileRWtreadlinet
startswithtFalseRgt
executableR1tgetfilesystemencodingtlineseptreadtwritetTrue(R4tscriptt	firstlinetexenametrest((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt
fix_scripts
sZ^(?P(?P.+?)(-(?P\d.+?))?)
                                \.dist-info$cC@s|jdd}xtj|D]}tj|}|r"|jd|kr"ttjj||d:}x0|D](}|j	j
}|dkrztSqzWWdQXq"q"WtS(sP
    Return True if the extracted wheel in wheeldir should go into purelib.
    t-t_RitWHEELsroot-is-purelib: trueN(
treplaceR3R@tdist_info_retmatchtgroupRWR4R.tlowerR\RyRs(Ritwheeldirtname_foldedtitemRRQtline((s-/usr/lib/python2.7/site-packages/pip/wheel.pytroot_is_purelibs!
cC@stjj|siifSt|N}t}x.|D]&}|j|j|jdq;W|jdWdQXtj	}d|_
|j|i}i}|jdrt
|jd}n|jdrt
|jd}n||fS(Ns
icS@s|S(N((toption((s-/usr/lib/python2.7/site-packages/pip/wheel.pytRetconsole_scriptstgui_scripts(R3R4texistsRWRRxtstriptseekRtRawConfigParsertoptionxformtreadfpthas_sectiontdicttitems(tfilenametfptdataRtcptconsoletgui((s-/usr/lib/python2.7/site-packages/pip/wheel.pytget_entrypointss$
	

c,@s|s3t|d|d|d|d|d|	}nt|rO|dn
|dggjtjjtjj}itg}|rt@}
tj	+tj
dtj|d	t
d
t
WdQXWdQXtj|
jndtfd
	dd	
fd}||t
tjjdd}t|\fd}xD]}d}d}xtjtjj|D]e}d}|dkrt}|}ntjj||}||}|||td|d|qWqWtd|dt
_td/_t
_
fd}|_d_jdd}|rdtjkrd|}|jj |ntjj!dddkr$dt"j#d |f}|jj |ndt"j#d |f}|jj |gD]}t$j%d|rX|^qX}x|D]
}|=qWnjd d}|rTdtjkrd!|}|jj |nd"t"j#d |f}|jj |gD]}t$j%d#|r|^q}x|D]
}|=q@Wnt&dkr|jj'gj(D]}d$|^qnt&dkr|jj'gj(D]}d$|^qit
d%6ntjjdd&}tjjdd'}t)|d(} | j*d)WdQXt+j,|||j-|tjjdd*}!tjjdd+}"t.|!d,\}#t.|"d-D}$t/j0|#}%t/j1|$}&xj|%D]b}'j|'d|'d|'d<|'dkr2t2|'d\|'d<|'d.j|ndS(s6Map archive RECORD paths to installation RECORD paths.N(tadd(tsrcfiletdestfiletmodifiedtoldpathtnewpath(tchangedt	installedtlib_dirRR(s-/usr/lib/python2.7/site-packages/pip/wheel.pytrecord_installeds

c@sct|xRtj|D]A\}}}|t|jtjj}tjj||}	|r|jtjjddj	drqnx|D]}
tjj|||
}|r|dkr|j	drj
|
qq|r|
j	drt|
jtj
rj
|qqWx*|D]"}|rS||rSq5ntjj||}
tjj|||}t|	tj|
|tj|
}ttdrtj||j|jfntj|
tjr,tj|
}|jtjBtjBtjB}tj||nt}|rG||}n|
||q5WqWdS(Niis.dataRes
.dist-infotutime(RR3twalkRXtlstripR4RR.tsplittendswithR-RRrRitshutiltcopyfiletstatthasattrRtst_atimetst_mtimetaccesstX_OKtst_modetS_IXUSRtS_IXGRPtS_IXOTHtchmodRs(tsourcetdesttis_basetfixertfiltertdirtsubdirstfilestbasedirtdestdirtst
destsubdirRaRRtsttpermissionsR(t	data_dirstinfo_dirRtreq(s-/usr/lib/python2.7/site-packages/pip/wheel.pytclobbersD
+
!


isentry_points.txtc@s|jjdr"|d }nJ|jjdrD|d }n(|jjdrf|d }n|}|kp|kS(Ns.exeis
-script.pyis.pya(RR(Rit	matchname(RR(s-/usr/lib/python2.7/site-packages/pip/wheel.pytis_entrypoint_wrapperas


tscriptsRRRec@s^|jdkr(td|fnji|jd6|jjddd6|jd6S(NsInvalid script entry point: %s for req: %s - A callable suffix is required. Cf https://packaging.python.org/en/latest/distributing.html#console-scripts for more information.tmodulet.itimport_nametfunc(tsuffixRRtscript_templateRR(tentry(tmakerR(s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_get_script_texts	
s# -*- coding: utf-8 -*-
import re
import sys

from %(module)s import %(import_name)s

if __name__ == '__main__':
    sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
    sys.exit(%(func)s())
R=tENSUREPIP_OPTIONSspip = t
altinstalls
pip%s = %siispip(\d(\.\d)?)?$teasy_installseasy_install = seasy_install-%s = %sseasy_install(-\d\.\d)?$s%s = %sRt	INSTALLERs
INSTALLER.pipRospip
tRECORDs
RECORD.piptrsw+i(Re(6R
RR\R3R4RtsetRtwarningstcatch_warningstfilterwarningst
compilealltcompile_dirRytloggertdebugtgetvalueRsRR.RR@R~RRtvariantstset_modeRRtpoptenvirontextendtmaketgetRgtversiontreRRXt
make_multipleRRWRxRtmoveR-RmtcsvtreadertwriterRctwriterowRrR(,RiRRRRRLt	pycompiletschemeRRtstrip_file_prefixRt	generatedtstdoutRtep_fileRtdatadirRRtsubdirRRt
pip_scripttspectktpip_epteasy_install_scriptteasy_install_eptkvt	installerttemp_installertinstaller_filetrecordttemp_recordt	record_int
record_outRRtrowRaR_tlt
final_path((RRRRRRRRRRRRs-/usr/lib/python2.7/site-packages/pip/wheel.pytmove_wheel_filess

	

%	;
%	
$			
	#
+

+
0 

!

*c@s"tjfd}|S(Nc?@sHt}x8||D]'}||kr|j||VqqWdS(N(RR(targstkwtseenR(tfn(s-/usr/lib/python2.7/site-packages/pip/wheel.pytuniques
	
(t	functoolstwraps(R$R%((R$s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_uniquescc@sddlm}tj||jd}x|D]y}tjj|j|d}|V|j	dr5tjj
|\}}|d }tjj||d}|Vq5q5WdS(s
    Yield all the uninstallation paths for dist based on RECORD-without-.pyc

    Yield paths to all the files in RECORD. For each .py file in RECORD, add
    the .pyc in the same directory.

    UninstallPathSet.add() takes care of the __pycache__ .pyc.
    i(tFakeFileRs.pyis.pycN(t	pip.utilsR)RRtget_metadata_linesR3R4R.tlocationRR(tdistR)RRR4tdnR$tbase((s-/usr/lib/python2.7/site-packages/pip/wheel.pytuninstallation_paths"s


cC@sygtjd|D]}|^qd}|jd}tj|}|dj}ttt	|j
d}|SWntSXdS(s
    Return the Wheel-Version of an extracted wheel, if possible.

    Otherwise, return False if we couldn't parse / extract it.
    iRs
Wheel-VersionRN(Rtfind_on_pathRtget_metadataRtparsestrRttupletmaptintRRs(t
source_dirtdR-t
wheel_dataR((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt
wheel_version8s)cC@s|std|n|dtdkrXtd|djtt|fn1|tkrtjddjtt|ndS(s
    Raises errors or warns if called with an incompatible Wheel-Version.

    Pip should refuse to install a Wheel-Version that's a major series
    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
    installing a version only minor version ahead (e.g 1.2 > 1.1).

    version: a 2-tuple representing a Wheel-Version (Major, Minor)
    name: name of wheel or package to raise exception about

    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
    s(%s is in an unsupported or invalid wheelisB%s's Wheel-Version (%s) is not compatible with this version of pipRs*Installing from a newer Wheel-Version (%s)N(R	tVERSION_COMPATIBLER.R5tstrRtwarning(RRi((s-/usr/lib/python2.7/site-packages/pip/wheel.pytcheck_compatibilityKs
%REcB@sDeZdZejdejZdZddZ	ddZ
RS(sA wheel files^(?P(?P.+?)-(?P\d.*?))
        ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?)
        \.whl|\.dist-info)$c@sjj|}|s+td|n|_|jdjdd_|jdjdd_|jdjd_	|jdjd_
|jd	jd_tfd
j	D_
dS(sX
        :raises InvalidWheelFilename: when the filename is invalid for a wheel
        s!%s is not a valid wheel filename.RiRRtvertpyverRtabitplatc3@s>|]4}jD]$}jD]}|||fVqqqdS(N(tabistplats(t.0txtytz(R(s-/usr/lib/python2.7/site-packages/pip/wheel.pys	sN(t
wheel_file_reRRRRRRiRRt
pyversionsRCRDRt	file_tags(RRt
wheel_info((Rs-/usr/lib/python2.7/site-packages/pip/wheel.pyR ts	cC@s]|dkrtj}ng|jD]!}||kr"|j|^q"}|rYt|SdS(s"
        Return the lowest index that one of the wheel's file_tag combinations
        achieves in the supported_tags list e.g. if there are 8 supported tags,
        and one of the file tags is first in the list, then return 0.  Returns
        None is the wheel is not supported.
        N(RRtsupported_tagsRKR>tmin(Rttagstctindexes((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRGs1cC@s4|dkrtj}ntt|j|jS(s'Is this wheel supported on this system?N(RRRMtboolRtintersectionRK(RRO((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRFsN(R$R%R&RtcompiletVERBOSERIR RRGRF(((s-/usr/lib/python2.7/site-packages/pip/wheel.pyREhs	tWheelBuildercB@sSeZdZdddZddZdZddZdZe	dZ
RS(s#Build wheels from a RequirementSet.cC@sO||_||_|jj|_|j|_|p6g|_|pEg|_dS(N(	trequirement_settfindert_wheel_cacheRt_cache_roottwheel_download_dirt
_wheel_dirt
build_optionstglobal_options(RRWRXR]R^((s-/usr/lib/python2.7/site-packages/pip/wheel.pyR s		cC@stjd}z|j||d|ry_tj|d}tjj||}tjtjj|||t	j
d||SWqqXn|j|dSWdt
|XdS(siBuild one wheel.

        :return: The filename of the built wheel, or None if the build failed.
        s
pip-wheel-t
python_tagisStored in directory: %sN(ttempfiletmkdtempt_WheelBuilder__build_oneR3R@R4R.RRRtinfot
_clean_oneRR(RRt
output_dirR_ttempdRPt
wheel_path((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt
_build_ones
cC@s'tjddt|jgt|jS(Ns-us-c(RgRtRtsetup_pytlistR^(RR((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt_base_setup_argss
cC@s|j|}d|jf}t|}tjd||dd|g|j}|dk	rw|d|g7}ny't|d|jdt	d|t
SWn(|jd	tjd
|jt	SXWdQXdS(Ns#Running setup.py bdist_wheel for %ssDestination directory: %stbdist_wheels-ds--python-tagtcwdtshow_stdouttspinnerterrorsFailed building wheel for %s(
RkRiRRRR]RR
tsetup_py_dirRsRytfinishRp(RRRfR_t	base_argstspin_messageRot
wheel_args((s-/usr/lib/python2.7/site-packages/pip/wheel.pyt__build_ones

cC@su|j|}tjd|j|ddg}y!t|d|jdttSWntjd|jtSXdS(NsRunning setup.py clean for %stcleans--allRmRns Failed cleaning build dir for %s(	RkRRcRiR
R7RsRyRp(RRRst
clean_args((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRdscC@s|jj|j|jjj}g}x!|D]}|jrGq2n|jro|sKtjd|j	qKq2|r|j
rq2|r|jr|jjrq2|r|j
rq2|r>|j}|j\}}tjj|d
|d
krq2ndtjj|jjt|j	kr>tjd|j	q2q>n|j|q2W|sYtStjddjg|D]}|j	^qotgg}}	xq|D]i}d
}
|r#tj}
t|j|j}yt|Wq,tk
r}tj d|j	||	j|qq,Xn	|j!}|j"||d|
}
|
r|j||r|j
rt#j$j%t#j$j|j
t&rt'dn|j(|j)|jj*|_
tjj+t,|
|_t-|j|j
d
t.d	|jj/qq|	j|qWWd
QX|rQtjddjg|D]}|j	^q5n|	rtjd
djg|	D]}|j	^qmnt0|	dkS(sBuild wheels.

        :param unpack: If True, replace the sdist we built from with the
            newly built wheel, in preparation for installation.
        :return: True if all the wheels built correctly.
        s(Skipping %s, due to already being wheel.R:sCSkipping bdist_wheel for %s, due to binaries being disabled for it.s*Building wheels for collected packages: %ss, s Building wheel for %s failed: %sR_sbad source dir - missing markertsessionNsSuccessfully built %st sFailed to build %si(1RWt
prepare_filesRXtrequirementstvaluest
constraintR;RRcRiteditableR"R<R7tsplitextR=R>tegg_info_matchesRR?RRR-RyR.RRtimplementation_tagR9RZRRAR=R\RhR3R4RRtAssertionErrortremove_temporary_sourcetbuild_locationt	build_dirRIRRRsRyRX(RtautobuildingtreqsettbuildsetRR"R/textt
build_successt
build_failureR_ReRNt
wheel_file((s-/usr/lib/python2.7/site-packages/pip/wheel.pytbuilds	
				
	&


		


			

	))N(R$R%R&RR RhRkRbRdRsR(((s-/usr/lib/python2.7/site-packages/pip/wheel.pyRVs		(TR&t
__future__RRRRBR&R/tloggingR3tos.pathRRRRgR`Rtbase64Rtemail.parserRtpip._vendor.sixRR=t
pip.compatRtpip.downloadRRtpip.exceptionsRRR	t
pip.locationsR
RRR*R
RRRRtpip.utils.uiRtpip.utils.loggingRtpip.utils.setuptools_buildRtpip._vendor.distlib.scriptsRtpip._vendorRtpip._vendor.packaging.utilsRtpip._vendor.six.movesRt	wheel_extR;t	getLoggerR$RtobjectRR9R!RcRmR~RTRURRRRsRRyR R(R0R:R>RERV(((s-/usr/lib/python2.7/site-packages/pip/wheel.pytsl(	)	'	
				&			7PK
Zv9A..site-packages/pip/__init__.pynu[from __future__ import absolute_import

import locale
import logging
import os
import optparse
import warnings

import sys
import re

# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
# isn't available.  requests unconditionally imports urllib3's socks contrib
# module, triggering this warning.  The warning breaks DEP-8 tests (because of
# the stderr output) and is just plain annoying in normal usage.  I don't want
# to add socks as yet another dependency for pip, nor do I want to allow-stder
# in the DEP-8 tests, so just suppress the warning.  pdb tells me this has to
# be done before the import of pip.vcs.
from pip._vendor.urllib3.exceptions import DependencyWarning
warnings.filterwarnings("ignore", category=DependencyWarning)  # noqa

# We want to inject the use of SecureTransport as early as possible so that any
# references or sessions or what have you are ensured to have it, however we
# only want to do this in the case that we're running on macOS and the linked
# OpenSSL is too old to handle TLSv1.2
try:
    import ssl
except ImportError:
    pass
else:
    if (sys.platform == "darwin" and
            getattr(ssl, "OPENSSL_VERSION_NUMBER", 0) < 0x1000100f):  # OpenSSL 1.0.1
        try:
            from pip._vendor.urllib3.contrib import securetransport
        except (ImportError, OSError):
            pass
        else:
            securetransport.inject_into_urllib3()

from pip.exceptions import InstallationError, CommandError, PipError
from pip.utils import get_installed_distributions, get_prog
from pip.utils import deprecation, dist_is_editable
from pip.vcs import git, mercurial, subversion, bazaar  # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import get_summaries, get_similar_commands
from pip.commands import commands_dict
from pip._vendor.urllib3.exceptions import InsecureRequestWarning


# assignment for flake8 to be happy

# This fixes a peculiarity when importing via __import__ - as we are
# initialising the pip module, "from pip import cmdoptions" is recursive
# and appears not to work properly in that situation.
import pip.cmdoptions
cmdoptions = pip.cmdoptions

# The version as used in the setup.py and the docs conf.py
__version__ = "9.0.3"


logger = logging.getLogger(__name__)

# Hide the InsecureRequestWarning from urllib3
warnings.filterwarnings("ignore", category=InsecureRequestWarning)


def autocomplete():
    """Command and option completion for the main option parser (and options)
    and its subcommands (and options).

    Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
    """
    # Don't complete if user hasn't sourced bash_completion file.
    if 'PIP_AUTO_COMPLETE' not in os.environ:
        return
    cwords = os.environ['COMP_WORDS'].split()[1:]
    cword = int(os.environ['COMP_CWORD'])
    try:
        current = cwords[cword - 1]
    except IndexError:
        current = ''

    subcommands = [cmd for cmd, summary in get_summaries()]
    options = []
    # subcommand
    try:
        subcommand_name = [w for w in cwords if w in subcommands][0]
    except IndexError:
        subcommand_name = None

    parser = create_main_parser()
    # subcommand options
    if subcommand_name:
        # special case: 'help' subcommand has no options
        if subcommand_name == 'help':
            sys.exit(1)
        # special case: list locally installed dists for uninstall command
        if subcommand_name == 'uninstall' and not current.startswith('-'):
            installed = []
            lc = current.lower()
            for dist in get_installed_distributions(local_only=True):
                if dist.key.startswith(lc) and dist.key not in cwords[1:]:
                    installed.append(dist.key)
            # if there are no dists installed, fall back to option completion
            if installed:
                for dist in installed:
                    print(dist)
                sys.exit(1)

        subcommand = commands_dict[subcommand_name]()
        options += [(opt.get_opt_string(), opt.nargs)
                    for opt in subcommand.parser.option_list_all
                    if opt.help != optparse.SUPPRESS_HELP]

        # filter out previously specified options from available options
        prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
        options = [(x, v) for (x, v) in options if x not in prev_opts]
        # filter options by current input
        options = [(k, v) for k, v in options if k.startswith(current)]
        for option in options:
            opt_label = option[0]
            # append '=' to options which require args
            if option[1]:
                opt_label += '='
            print(opt_label)
    else:
        # show main parser options only when necessary
        if current.startswith('-') or current.startswith('--'):
            opts = [i.option_list for i in parser.option_groups]
            opts.append(parser.option_list)
            opts = (o for it in opts for o in it)

            subcommands += [i.get_opt_string() for i in opts
                            if i.help != optparse.SUPPRESS_HELP]

        print(' '.join([x for x in subcommands if x.startswith(current)]))
    sys.exit(1)


def create_main_parser():
    parser_kw = {
        'usage': '\n%prog  [options]',
        'add_help_option': False,
        'formatter': UpdatingDefaultsHelpFormatter(),
        'name': 'global',
        'prog': get_prog(),
    }

    parser = ConfigOptionParser(**parser_kw)
    parser.disable_interspersed_args()

    pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    parser.version = 'pip %s from %s (python %s)' % (
        __version__, pip_pkg_dir, sys.version[:3])

    # add the general options
    gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
    parser.add_option_group(gen_opts)

    parser.main = True  # so the help formatter knows

    # create command listing for description
    command_summaries = get_summaries()
    description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
    parser.description = '\n'.join(description)

    return parser


def parseopts(args):
    parser = create_main_parser()

    # Note: parser calls disable_interspersed_args(), so the result of this
    # call is to split the initial args into the general options before the
    # subcommand and everything else.
    # For example:
    #  args: ['--timeout=5', 'install', '--user', 'INITools']
    #  general_options: ['--timeout==5']
    #  args_else: ['install', '--user', 'INITools']
    general_options, args_else = parser.parse_args(args)

    # --version
    if general_options.version:
        sys.stdout.write(parser.version)
        sys.stdout.write(os.linesep)
        sys.exit()

    # pip || pip help -> print_help()
    if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
        parser.print_help()
        sys.exit()

    # the subcommand name
    cmd_name = args_else[0]

    if cmd_name not in commands_dict:
        guess = get_similar_commands(cmd_name)

        msg = ['unknown command "%s"' % cmd_name]
        if guess:
            msg.append('maybe you meant "%s"' % guess)

        raise CommandError(' - '.join(msg))

    # all the args without the subcommand
    cmd_args = args[:]
    cmd_args.remove(cmd_name)

    return cmd_name, cmd_args


def check_isolated(args):
    isolated = False

    if "--isolated" in args:
        isolated = True

    return isolated


def main(args=None):
    if args is None:
        args = sys.argv[1:]

    # Configure our deprecation warnings to be sent through loggers
    deprecation.install_warning_logger()

    autocomplete()

    try:
        cmd_name, cmd_args = parseopts(args)
    except PipError as exc:
        sys.stderr.write("ERROR: %s" % exc)
        sys.stderr.write(os.linesep)
        sys.exit(1)

    # Needed for locale.getpreferredencoding(False) to work
    # in pip.utils.encoding.auto_decode
    try:
        locale.setlocale(locale.LC_ALL, '')
    except locale.Error as e:
        # setlocale can apparently crash if locale are uninitialized
        logger.debug("Ignoring error %s when setting locale", e)
    command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
    return command.main(cmd_args)


# ###########################################################
# # Writing freeze files

class FrozenRequirement(object):

    def __init__(self, name, req, editable, comments=()):
        self.name = name
        self.req = req
        self.editable = editable
        self.comments = comments

    _rev_re = re.compile(r'-r(\d+)$')
    _date_re = re.compile(r'-(20\d\d\d\d\d\d)$')

    @classmethod
    def from_dist(cls, dist, dependency_links):
        location = os.path.normcase(os.path.abspath(dist.location))
        comments = []
        from pip.vcs import vcs, get_src_requirement
        if dist_is_editable(dist) and vcs.get_backend_name(location):
            editable = True
            try:
                req = get_src_requirement(dist, location)
            except InstallationError as exc:
                logger.warning(
                    "Error when trying to get requirement for VCS system %s, "
                    "falling back to uneditable format", exc
                )
                req = None
            if req is None:
                logger.warning(
                    'Could not determine repository location of %s', location
                )
                comments.append(
                    '## !! Could not determine repository location'
                )
                req = dist.as_requirement()
                editable = False
        else:
            editable = False
            req = dist.as_requirement()
            specs = req.specs
            assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
                'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
                (specs, dist)
            version = specs[0][1]
            ver_match = cls._rev_re.search(version)
            date_match = cls._date_re.search(version)
            if ver_match or date_match:
                svn_backend = vcs.get_backend('svn')
                if svn_backend:
                    svn_location = svn_backend().get_location(
                        dist,
                        dependency_links,
                    )
                if not svn_location:
                    logger.warning(
                        'Warning: cannot find svn location for %s', req)
                    comments.append(
                        '## FIXME: could not find svn URL in dependency_links '
                        'for this package:'
                    )
                else:
                    comments.append(
                        '# Installing as editable to satisfy requirement %s:' %
                        req
                    )
                    if ver_match:
                        rev = ver_match.group(1)
                    else:
                        rev = '{%s}' % date_match.group(1)
                    editable = True
                    req = '%s@%s#egg=%s' % (
                        svn_location,
                        rev,
                        cls.egg_name(dist)
                    )
        return cls(dist.project_name, req, editable, comments)

    @staticmethod
    def egg_name(dist):
        name = dist.egg_name()
        match = re.search(r'-py\d\.\d$', name)
        if match:
            name = name[:match.start()]
        return name

    def __str__(self):
        req = self.req
        if self.editable:
            req = '-e %s' % req
        return '\n'.join(list(self.comments) + [str(req)]) + '\n'


if __name__ == '__main__':
    sys.exit(main())
PK
ZQq"TeTesite-packages/pip/download.pycnu[
abc
@@sddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZyddlZeZWnek
reZnXddlmZddlmZddlZddlmZmZddlmZddlm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)ddl*m+Z+dd	l,m-Z-dd
l.m/Z/ddl0m1Z1ddl2m3Z3dd
l4m5Z5m6Z6ddl7m8Z8ddl9m:Z:ddl;m<Z<m=Z=ddl>m?Z?m@Z@ddlAmBZBmCZCddlDmEZEmFZFddlGmHZHddlImJZJddl;mKZKddlLmMZMddlNmOZOddlPmQZQddlRmSZSdddddd d!d"d#d$d%d&d'g
ZTejUeVZWd(ZXd)eBfd*YZYd+e?fd,YZZd-eOfd.YZ[d/e@fd0YZ\d1e<j]fd2YZ^ddd3Z`ejad4ejbZcejad5ejbZdd6Zed7Zfd8Zgd9Zhd:Zid;Zjd<Zkd=Zld>Zmd?Znd@ZodAZpddddBZqdddCZrdDZsdEeSjtfdFYZudedddGZvdHZwdIZxdJZydKZzdS(Li(tabsolute_importN(tparse(trequest(tInstallationErrortHashMismatch(tPyPI(
tsplitexttrmtreetformat_sizetdisplay_patht
backup_dirtask_path_existstunpack_filetARCHIVE_EXTENSIONStconsumetcall_subprocess(tauto_decode(tcheck_path_owner(t
indent_log(tSETUPTOOLS_SHIM(tlibc_ver(tDownloadProgressBartDownloadProgressSpinner(twrite_delete_marker_file(tvcs(trequeststsix(tBaseAdaptertHTTPAdapter(tAuthBaset
HTTPBasicAuth(tCONTENT_CHUNK_SIZEtResponse(tget_netrc_auth(tCaseInsensitiveDict(turllib3(tCacheControlAdapter(t	FileCache(t	LockError(t
xmlrpc_clienttget_file_contenttis_urlturl_to_pathtpath_to_urltis_archive_filetunpack_vcs_linktunpack_file_urlt
is_vcs_urltis_file_urltunpack_http_urlt
unpack_urltparse_content_dispositiontsanitize_content_filenamec
C@siidd6tjd6d6tjd6itjd6d6}|dddkrjtj|dd`ttidcS@s|dS(Ni((RC((s0/usr/lib/python2.7/site-packages/pip/download.pyRDdREtlibtlibcRBtdarwintmacOStsystemtreleasetcpuiitopenssl_versions9{data[installer][name]}/{data[installer][version]} {json}tdatatjsont
separatorst,t:t	sort_keys(ii(RRRS(R5t__version__tplatformtpython_versiontpython_implementationtsystpypy_version_infotreleaseleveltjointstrt
startswithtpip._vendorRBtdicttfiltertziptlinux_distributionRtmac_verRKt
setdefaultRLtmachinetHAS_TLStversion_infotssltOPENSSL_VERSIONtformatRPtdumpstTrue(RORZRCRBtdistro_infosRH((s0/usr/lib/python2.7/site-packages/pip/download.pyt
user_agent@sT
	-$
"%	tMultiDomainBasicAuthcB@s/eZedZdZdZdZRS(cC@s||_i|_dS(N(t	promptingt	passwords(tselfRq((s0/usr/lib/python2.7/site-packages/pip/download.pyt__init__s	cC@s.tj|j}|jjddd}tj|d |f|d|_|jj|d\}}|dkr|j	|j\}}n|dkr|dkrt
|j}|r|nd\}}n|s|r||f|j||jddS|dfSdS(NRuiiRS(NN(R{tsplitR~(RsRztuserinfo((s0/usr/lib/python2.7/site-packages/pip/download.pyRs
(t__name__t
__module__RmRtRRR(((s0/usr/lib/python2.7/site-packages/pip/download.pyRps	!	"tLocalFSAdaptercB@s)eZddddddZdZRS(c
C@st|j}t}d|_|j|_ytj|}	Wn%tk
rg}
d|_|
|_n}Xtj	j
|	jdt}t
j|dpd}ti|d6|	jd6|d6|_t|d	|_|jj|_|S(
Niitusegmtis
text/plainsContent-TypesContent-Lengths
Last-Modifiedtrb(R*RyR RtoststattOSErrorRtemailtutilst
formatdatetst_mtimeRmt	mimetypest
guess_typeR"tst_sizetheaderstopentclose(
RsRtstreamttimeouttverifytcerttproxiestpathnameRtstatstexctmodifiedtcontent_type((s0/usr/lib/python2.7/site-packages/pip/download.pyRs$			

cC@sdS(N((Rs((s0/usr/lib/python2.7/site-packages/pip/download.pyRsN(RRR~RR(((s0/usr/lib/python2.7/site-packages/pip/download.pyRst
SafeFileCachecB@s2eZdZdZdZdZdZRS(sw
    A file based cache which is safe to use even when the target directory may
    not be accessible or writable.
    cO@sKtt|j||t|jsGtjd|jd|_ndS(NsThe directory '%s' or its parent directory is not owned by the current user and the cache has been disabled. Please check the permissions and owner of that directory. If executing pip with sudo, you may want sudo's -H flag.(tsuperRRtRt	directorytloggertwarningR~(RstargsR((s0/usr/lib/python2.7/site-packages/pip/download.pyRts
cO@sQ|jdkrdSytt|j||SWntttfk
rLnXdS(N(RR~RRR}R&RtIOError(RsRR((s0/usr/lib/python2.7/site-packages/pip/download.pyR}scO@sQ|jdkrdSytt|j||SWntttfk
rLnXdS(N(RR~RRtsetR&RR(RsRR((s0/usr/lib/python2.7/site-packages/pip/download.pyRscO@sQ|jdkrdSytt|j||SWntttfk
rLnXdS(N(RR~RRtdeleteR&RR(RsRR((s0/usr/lib/python2.7/site-packages/pip/download.pyR)s(RRt__doc__RtR}RR(((s0/usr/lib/python2.7/site-packages/pip/download.pyRs
		
	
tInsecureHTTPAdaptercB@seZdZRS(cC@sd|_d|_dS(Nt	CERT_NONE(t	cert_reqsR~tca_certs(RstconnRyRR((s0/usr/lib/python2.7/site-packages/pip/download.pytcert_verify9s	(RRR(((s0/usr/lib/python2.7/site-packages/pip/download.pyR7st
PipSessioncB@s eZdZdZdZRS(c	O@s5|jdd}|jdd}|jdg}tt|j||t|jds	*	
s^(http|https|file):s/*([a-z])\|cC@sId|krtS|jdddj}|ddddgtjkS(s)Returns true if the name looks like a URLRSiiRthttpsRtftp(tFalseRRRtall_schemes(R6R((s0/usr/lib/python2.7/site-packages/pip/download.pyR)scC@sg|jdstd|tj|\}}}}}|rPd|}ntj||}|S(s(
    Convert a file: URL to a path.
    sfile:s4You can only turn file: urls into filenames (not %r)s\\(R^tAssertionErrorRwturlsplitturllib_requestturl2pathname(Ryt_RzR((s0/usr/lib/python2.7/site-packages/pip/download.pyR*s

cC@s=tjjtjj|}tjdtj|}|S(sh
    Convert a path to a file: URL.  The path will be made absolute and have
    quoted path parts.
    sfile:(RRtnormpathtabspathRwturljoinRtpathname2url(RRy((s0/usr/lib/python2.7/site-packages/pip/download.pyR+scC@s*t|dj}|tkr&tStS(s9Return True if `name` is a considered as an archive file.i(RRR
RmR(R6text((s0/usr/lib/python2.7/site-packages/pip/download.pyR,scC@st|}|j|dS(N(t_get_used_vcs_backendtunpack(tlinktlocationtvcs_backend((s0/usr/lib/python2.7/site-packages/pip/download.pyR-scC@s=x6tjD]+}|j|jkr
||j}|Sq
WdS(N(RtbackendsRtschemesRy(RtbackendR((s0/usr/lib/python2.7/site-packages/pip/download.pyRscC@stt|S(N(tboolR(R((s0/usr/lib/python2.7/site-packages/pip/download.pyR/scC@s|jjjdS(Nsfile:(RyRR^(R((s0/usr/lib/python2.7/site-packages/pip/download.pyR0scC@st|j}tjj|S(sReturn whether a file:// Link points to a directory.

    ``link`` must not have any other scheme but file://. Call is_file_url()
    first.

    (R*turl_without_fragmentRRtisdir(Rt	link_path((s0/usr/lib/python2.7/site-packages/pip/download.pyt
is_dir_urlscO@s|S(N((titerableRR((s0/usr/lib/python2.7/site-packages/pip/download.pyt_progress_indicatorsc
@sytjd}Wn tttfk
r9d}nXtdt}tjt	j
krjt}n9|ryt}n*|d
krt}n|st}nt}|j}fd}fd}	t
}
|jtjkr|}n	|j}|rM|r.tjd|t|td	|j}
qvtjd
|tj}
n)|rftjd|ntjd
|tjd||	|
|tt}|r|j|n
t|dS(Nscontent-lengthit
from_cachei(ic3@sry,x%jj|dtD]}|VqWWn?tk
rmx/trijj|}|saPn|Vq?WnXdS(Ntdecode_content(RRRtAttributeErrorRmR(t
chunk_sizetchunk(R(s0/usr/lib/python2.7/site-packages/pip/download.pyt	resp_reads


	c3@s'x |D]}j||VqWdS(N(twrite(tchunksR(tcontent_file(s0/usr/lib/python2.7/site-packages/pip/download.pytwritten_chunks;s

sDownloading %s (%s)tmaxsDownloading %ssUsing cached %ssDownloading from URL %si@(tintRt
ValueErrortKeyErrorRtgetattrRRtgetEffectiveLeveltloggingtINFORmtshow_urlRRzRRtinfoRRtiterRtdebugRtcheck_against_chunksR(
RRRthashesttotal_lengthtcached_respt
show_progressRR
Rtprogress_indicatorRytdownloaded_chunks((RRs0/usr/lib/python2.7/site-packages/pip/download.pyt
_download_urlsL
					%			cC@s,t}tjj||j}tjj|rtdt|d
}|dkr^t}q|dkrt	j
dt|tj|q|dkrt|}t	j
dt|t|t
j||q|dkrtjdqn|r(t
j||t	jd	t|ndS(Ns8The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)aborttitwtbtasDeleting %ssBacking up %s to %sisSaved %s(R&R'R(R)(RmRRR\tfilenametexistsRR	RRRtremoveR
tshutiltmoveRYtexittcopyR(R*RRR0tdownload_locationRvt	dest_file((s0/usr/lib/python2.7/site-packages/pip/download.pyt
_copy_fileas.		
c	C@s|dkrtdntjdd}d}|rNt|||}n|rp|}tj|d}nt||||\}}t|||||r|rt	|||n|st
j|nt|dS(Ns@unpack_http_url() missing 1 required keyword argument: 'session's-unpackspip-i(
R~Rttempfiletmkdtempt_check_download_dirRRt_download_http_urlRR3RtunlinkR(	RRtdownload_dirRRttemp_dirtalready_downloaded_patht	from_pathR((s0/usr/lib/python2.7/site-packages/pip/download.pyR1|s,
cC@st|j}t|rjtjj|r:t|ntj||dt	|rft
jdndS|r|j|nd}|rt|||}n|r|}n|}tj|d}t|||||r|rt|||ndS(sUnpack link into location.

    If download_dir is provided and link points to a file, make a copy
    of the link file inside download_dir.
    tsymlinkss*Link is a directory, ignoring download_dirNi(R*RRRRRRR-tcopytreeRmRRtcheck_against_pathR~R6RRRR3(RRR9RRR;R<R((s0/usr/lib/python2.7/site-packages/pip/download.pyR.s,
	
c
C@stjj|rt|nd}tjg}|jd|jt||jd|d|g7}tj	d|t
t|d|dtWdQXtjj
|tj|d	}tj	d
||t||dddddS(
sCopy distribution files in `link_path` to `location`.

    Invoked when user requests to install a local directory. E.g.:

        pip install .
        pip install ~/dev/git-repos/python-prompt-toolkit

    ssetup.pys-ctsdists
--dist-dirsRunning setup.py sdist for %stcwdtshow_stdoutNisUnpacking sdist %s into %sRR(RRRRRYt
executableRRRRRRRR\tlistdirRR~(RRtsetup_pyt
sdist_argsR@((s0/usr/lib/python2.7/site-packages/pip/download.pyt_copy_dist_from_dirs



"tPipXmlrpcTransportcB@s&eZdZedZedZRS(sRProvide a `xmlrpclib.Transport` implementation via a `PipSession`
    object.
    cC@s;tjj||tj|}|j|_||_dS(N(R't	TransportRtRwRxRt_schemet_session(Rst	index_urlRtuse_datetimetindex_parts((s0/usr/lib/python2.7/site-packages/pip/download.pyRtsc
C@s|j||dddf}tj|}yXidd6}|jj|d|d|dt}|j||_|j	|j
SWn2tjk
r}	t
jd|	jj|nXdS(Nstext/xmlsContent-TypeRORRsHTTP error %s while getting %s(RJR~RwR|RKtpostRmRtverbosetparse_responseRRt	HTTPErrorRtcriticalRvR(
RsRthandlertrequest_bodyRPtpartsRyRRvR((s0/usr/lib/python2.7/site-packages/pip/download.pyRs

	(RRRRRtR(((s0/usr/lib/python2.7/site-packages/pip/download.pyRHscC@st|rt||nVt|rAt|||d|n1|dkrYt}nt||||d||rt|ndS(svUnpack link.
       If link is a VCS link:
         if only_download, export into download_dir and ignore location
          else unpack into location
       for other types of link:
         - unpack into location
         - if download_dir, copy the file into download_dir
         - if only_download, mark location for deletion

    :param hashes: A Hashes object, one of whose embedded hashes must match,
        or HashMismatch will be raised. If the Hashes is empty, no matches are
        required, and unhashable types of requirements (like VCS ones, which
        would ordinarily raise HashUnsupported) are allowed.
    RN(R/R-R0R.R~RR1R(RRR9t
only_downloadRR((s0/usr/lib/python2.7/site-packages/pip/download.pyR2scC@stjj|S(sJ
    Sanitize the "filename" value from a Content-Disposition header.
    (RRtbasename(R*((s0/usr/lib/python2.7/site-packages/pip/download.pyR4<scC@sCtj|\}}|jd}|r9t|}n|pB|S(s
    Parse the "filename" value from a Content-Disposition header, and
    return the default filename if the result is empty.
    R*(tcgitparse_headerR}R4(tcontent_dispositiontdefault_filenamet_typetparamsR*((s0/usr/lib/python2.7/site-packages/pip/download.pyR3Ds
c
	C@s|jjddd}y0|j|didd6dt}|jWn2tjk
r}}tjd|j	j
|nX|jjd	d
}|j}|jjd}	|	rt
|	|}nt|d}
|
stj|}
|
r||
7}qn|
rI|j|jkrItjj|jd}
|
rI||
7}qIntjj||}t|d}t||||Wd
QX||fS(s6Download link url into temp_dir using provided sessiont#iiRtidentitysAccept-EncodingRsHTTP error %s while getting %sscontent-typeREscontent-dispositiontwbN(RyRR}RmRRRRRRSRvRRR*R3RRtguess_extensionRRR\RR%(
RRR:Rt
target_urlRRRR*R[Rt	file_pathR((s0/usr/lib/python2.7/site-packages/pip/download.pyR7Ss:
		cC@stjj||j}tjj|rtjd||ry|j|Wqtk
rtj	d|tj
|dSXn|SdS(s Check download_dir for previously downloaded file with correct hash
        If a correct file is found return its path else None
    sFile was already downloaded %ss;Previously-downloaded file %s has bad hash. Re-downloading.N(RRR\R*R+RRR?RRR8R~(RR9Rt
download_path((s0/usr/lib/python2.7/site-packages/pip/download.pyR6s

({t
__future__RRYtemail.utilsRRRPRRRRVtreR-RYR4RiRmRgtImportErrorRtpip._vendor.six.moves.urllibRRwRRR5tpip.exceptionsRRt
pip.modelsRt	pip.utilsRRRR	R
RRR
RRtpip.utils.encodingRtpip.utils.filesystemRtpip.utils.loggingRtpip.utils.setuptools_buildRtpip.utils.glibcRtpip.utils.uiRRt
pip.locationsRtpip.vcsRR_RRtpip._vendor.requests.adaptersRRtpip._vendor.requests.authRRtpip._vendor.requests.modelsRR tpip._vendor.requests.utilsR!tpip._vendor.requests.structuresR"R#tpip._vendor.cachecontrolR$tpip._vendor.cachecontrol.cachesR%tpip._vendor.lockfileR&tpip._vendor.six.movesR't__all__t	getLoggerRRRoRpRRRtSessionRR~R(tcompiletIRRR)R*R+R,R-RR/R0RRR%R3R1R.RGRIRHR2R4R3R7R6(((s0/usr/lib/python2.7/site-packages/pip/download.pyts


F			BR!BH)			
								`	&0	$'			8PK
Z&Dff-site-packages/pip/_vendor/distlib/version.pyonu[
abc@srdZddlZddlZddlmZddddd	d
ddgZejeZde	fd
YZ
defdYZdefdYZ
ejdZdZeZdefdYZdZde
fdYZejddfejddfejddfejddfejddfejd dfejd!d"fejd#d$fejd%d&fejd'd(ff
Zejd)dfejd*dfejd+d"fejd!d"fejd,dffZejd-Zd.Zd/Zejd0ejZid1d26d1d36d4d56d1d66d7d86dd6dd"6Zd9Zdefd:YZde
fd;YZ ejd<ejZ!d=Z"d>Z#d	efd?YZ$d
e
fd@YZ%dAefdBYZ&ie&eeedC6e&ee dDdE6e&e#e%edF6Z'e'dCe'dG=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$s
^\d+(\.\d+)*$cCs
||kS(N((tvtctp((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pytWttcCs||kp||kS(N((R(R)R*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+YR,s<=cCs||kp||kS(N((R(R)R*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+ZR,s>=cCs
||kS(N((R(R)R*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+[R,s==cCs
||kS(N((R(R)R*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+\R,s===cCs||kp||kS(N((R(R)R*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+^R,s~=cCs
||kS(N((R(R)R*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+_R,s!=c
Cs|jdkrtdn|j|_}|jj|}|s\td|n|jd}|dj|_|jj	|_
g}|drg|djdD]}|j^q}x|D]}|jj|}|s
td||fn|j}|dp#d}|d	}|j
d
r|dkr^td
|n|d t}}	|jj|s|j|qn|j|t}}	|j|||	fqWnt||_dS(NsPlease specify a version classs
Not valid: %rR,iit,sInvalid %r in %rs~=is.*s==s!=s#'.*' not allowed for %r constraintsi(s==s!=(t
version_classtNonet
ValueErrorR
Rtdist_retmatchtgroupstnametlowertkeytsplittcomp_retendswithtTruetnum_retFalsetappendttupleR(
RRtmR5tclistR)tconstraintstoptvntprefix((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRbs:
,


cCst|tr!|j|}nx|jD]\}}}|jj|}t|trmt||}n|sd||jjf}t	|n||||s+t
Sq+WtS(s
        Check if the provided version matches the constraints.

        :param version: The version to match against this instance.
        :type version: String or :class:`Version` instance.
        s%r not implemented for %s(t
isinstanceRR0Rt
_operatorstgettgetattrR"R	RR>R<(Rtversiontoperatort
constraintRFtftmsg((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR4scCsJd}t|jdkrF|jdddkrF|jdd}n|S(Niis==s===(s==s===(R1tlenR(Rtresult((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
exact_versions,cCsGt|t|ks*|j|jkrCtd||fndS(Nscannot compare %s and %s(RR6R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs*cCs/|j||j|jko.|j|jkS(N(RR8R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs
cCs|j|S(N(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRscCst|jt|jS(N(R R8R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR!scCsd|jj|jfS(Ns%s(%r)(R"R	R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR#scCs|jS(N(R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR$sN(R	R
R1R0tretcompileR3R:R=RHRR4R&RRRRRR!R#R$(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR'Ns,







	%						sk^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?(\.(post)(\d+))?(\.(dev)(\d+))?(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$cCsK|j}tj|}|s4td|n|j}td|djdD}x0t|dkr|ddkr|d }qfW|dsd}nt|d}|dd!}|d	d
!}|dd!}|d
}|dkrd}n|dt|df}|dkr.d}n|dt|df}|dkr]d}n|dt|df}|dkrd}nfg}	xQ|jdD]@}
|
j
rdt|
f}
nd|
f}
|	j|
qWt|	}|s|r|rd}qd}n|s&d}n|s5d}n||||||fS(NsNot a valid version: %scss|]}t|VqdS(N(tint(t.0R(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pys	sit.iiiiii	i
ii
tatzt_tfinal(NN((NN((NN(((RXi(RY(RZ(R[(R
tPEP440_VERSION_RER4RR5R@R9RPRUR1tisdigitR?(RRAR5tnumstepochtpretposttdevtlocalRtpart((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_pep_440_keysT#%
	



				
				cBsAeZdZdZedddddgZedZRS(sIA rational version.

    Good:
        1.2         # equivalent to "1.2.0"
        1.2.0
        1.2a1
        1.2.3a2
        1.2.3b1
        1.2.3c1
        1.2.3.4
        TODO: fill this out

    Bad:
        1           # minimum two numbers
        1.2a        # release level must have a release serial
        1.2.3b
    cCsQt|}tj|}|j}td|djdD|_|S(Ncss|]}t|VqdS(N(RU(RVR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pys	siRW(t_normalized_keyR\R4R5R@R9t_release_clause(RRRQRAR5((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs
&RXtbR)trcRbcstfdjDS(Nc3s(|]}|r|djkVqdS(iN(tPREREL_TAGS(RVtt(R(s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pys	s(tanyR(R((Rs?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR%s(R	R
RRtsetRjR&R%(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs	cCsUt|}t|}||kr(tS|j|s;tSt|}||dkS(NRW(tstrR<t
startswithR>RP(txtytn((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
_match_prefix"scBseZeZidd6dd6dd6dd6dd	6d
d6dd
6dd6ZdZdZdZdZdZ	dZ
dZdZdZ
RS(t_match_compatibles~=t	_match_ltR-t	_match_gtR.t	_match_les<=t	_match_ges>=t	_match_eqs==t_match_arbitrarys===t	_match_nes!=cCsx|r"d|ko|jd}n|jdo:|jd}|rn|jjddd}|j|}n||fS(Nt+iii(RRR9R0(RRKRMRFtstrip_localR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
_adjust_local<scCsj|j|||\}}||kr+tS|j}djg|D]}t|^qA}t||S(NRW(R~R>RgtjoinRnRs(RRKRMRFtrelease_clausetitpfx((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRuJs	(cCsj|j|||\}}||kr+tS|j}djg|D]}t|^qA}t||S(NRW(R~R>RgRRnRs(RRKRMRFRRR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRvRs	(cCs%|j|||\}}||kS(N(R~(RRKRMRF((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRwZscCs%|j|||\}}||kS(N(R~(RRKRMRF((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRx^scCsC|j|||\}}|s0||k}nt||}|S(N(R~Rs(RRKRMRFRQ((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRybs
cCst|t|kS(N(Rn(RRKRMRF((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRzjscCsD|j|||\}}|s0||k}nt||}|S(N(R~Rs(RRKRMRFRQ((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR{ms
cCs|j|||\}}||kr+tS||kr;tS|j}t|dkrc|d }ndjg|D]}t|^qp}t||S(NiiRW(R~R<R>RgRPRRnRs(RRKRMRFRRR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRtus	
((R	R
RR0RHR~RuRvRwRxRyRzR{Rt(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR-s&
								s[.+-]$R,s^[.](\d)s0.\1s^[.-]s
^\((.*)\)$s\1s^v(ersion)?\s*(\d+)s\2s^r(ev)?\s*(\d+)s[.]{2,}RWs\b(alfa|apha)\btalphas\b(pre-alpha|prealpha)\bs	pre.alphas	\(beta\)$tbetas
^[:~._+-]+s
[,*")([\]]s[~:+_ -]s\.$s
(\d+(\.\d+)*)c	Cs|jj}x&tD]\}}|j||}qW|sJd}ntj|}|snd}|}n|jdjd}g|D]}t|^q}x#t	|dkr|j
dqWt	|dkr||j}nDdjg|dD]}t
|^q||j}|d }djg|D]}t
|^qB}|j}|rx)tD]\}}|j||}qvWn|s|}n&d|krdnd}|||}t|sd}n|S(	s
    Try to suggest a semantic form for a version for which
    _suggest_normalized_version couldn't come up with anything.
    s0.0.0iRWiRbt-R|N(R
R7t
_REPLACEMENTStsubt_NUMERIC_PREFIXR4R5R9RURPR?tendRRnt_SUFFIX_REPLACEMENTSt	is_semverR1(	RRQtpattreplRARFtsuffixRtsep((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_suggest_semantic_versions:		:
(		cCs yt||SWntk
r%nX|j}xSd2d3d4d5d6d7d8d9d:d;d<d=d>d?d@fD]\}}|j||}qfWtjdd|}tjdd|}tjdd|}tjdd|}tjdd|}|jdr
|d }ntjd!d|}tjd"d#|}tjd$d%|}tjd&d|}tjd'd(|}tjd)d(|}tjd*d
|}tjd+d,|}tjd-d%|}tjd.d/|}tjd0d1|}yt|Wntk
rdA}nX|S(BsSuggest a normalized version close to the given version string.

    If you have a version string that isn't rational (i.e. NormalizedVersion
    doesn't like it) then you might be able to get an equivalent (or close)
    rational version from this function.

    This does a number of simple normalizations to the given string, based
    on observation of versions currently in use on PyPI. Given a dump of
    those version during PyCon 2009, 4287 of them:
    - 2312 (53.93%) match NormalizedVersion without change
      with the automatic suggestion
    - 3474 (81.04%) match when using this suggestion method

    @param s {str} An irrational version string.
    @returns A rational version string, or None, if couldn't determine one.
    s-alphaRXs-betaRhRRRiR)s-finalR,s-pres-releases.releases-stableR|RWRZt s.finalR[spre$tpre0sdev$tdev0s([abc]|rc)[\-\.](\d+)$s\1\2s[\-\.](dev)[\-\.]?r?(\d+)$s.\1\2s[.~]?([abc])\.?s\1R(is\b0+(\d+)(?!\d)s(\d+[abc])$s\g<1>0s\.?(dev-r|dev\.r)\.?(\d+)$s.dev\2s-(a|b|c)(\d+)$s[\.\-](dev|devel)$s.dev0s(?![\.\-])dev$s(final|stable)$s\.?(r|-|-r)\.?(\d+)$s.post\2s\.?(dev|git|bzr)\.?(\d+)$s\.?(pre|preview|-c)(\d+)$sc\g<2>sp(\d+)$s.post\1(s-alphaRX(s-betaRh(RRX(RRh(RiR)(s-finalR,(s-preR)(s-releaseR,(s.releaseR,(s-stableR,(R|RW(RZRW(RR,(s.finalR,(R[R,N(RfRR7treplaceRSRRoR1(RtrstorigR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_suggest_normalized_versionsH

		
	

s([a-z]+|\d+|[\.-])R)R`tpreviewsfinal-RRit@RbcCsd}g}x||D]}|jdr|dkrgx'|rc|ddkrc|jq@Wnx'|r|ddkr|jqjWn|j|qWt|S(NcSsg}xtj|jD]j}tj||}|rd|d koUdknrl|jd}n
d|}|j|qqW|jd|S(Nt0it9it*s*final(t
_VERSION_PARTR9R7t_VERSION_REPLACERItzfillR?(RRQR*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt	get_partsIs 

Rs*finalis*final-t00000000(RotpopR?R@(RRRQR*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_legacy_keyHs	
cBs eZdZedZRS(cCs
t|S(N(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRcscCsRt}xE|jD]:}t|tr|jdr|dkrt}PqqW|S(NRs*final(R>RRGRRoR<(RRQRp((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR%fs(R	R
RR&R%(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRbs	cBs?eZeZeejZdedt
numeric_reR4RntloggertwarningR<R5trsplitRs(RRKRMRFRAR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRtys	
(R	R
RR0tdictR'RHRSRTRRt(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRqs

sN^(\d+)\.(\d+)\.(\d+)(-[a-z0-9]+(\.[a-z0-9-]+)*)?(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$cCs
tj|S(N(t
_SEMVER_RER4(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRsc
Csd}t|}|s*t|n|j}g|d D]}t|^qA\}}}||dd||dd}}	|||f||	fS(NcSsi|dkr|f}nM|djd}tg|D]'}|jrV|jdn|^q5}|S(NiRWi(R1R9R@R]R(RtabsentRQRR*((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
make_tuples
:it|iR(RRR5RU(
RRRAR5RtmajortminortpatchR`tbuild((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
_semantic_keys	
,'cBs eZdZedZRS(cCs
t|S(N(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRscCs|jdddkS(NiiR(R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR%s(R	R
RR&R%(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs	cBseZeZRS((R	R
RR0(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRst
VersionSchemecBs8eZddZdZdZdZdZRS(cCs||_||_||_dS(N(R8tmatchert	suggester(RR8RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs		cCs8y|jj|t}Wntk
r3t}nX|S(N(RR0R<RR>(RRRQ((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pytis_valid_versions


cCs5y|j|t}Wntk
r0t}nX|S(N(RR<RR>(RRRQ((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pytis_valid_matchers



cCs|jd|S(s:
        Used for processing some metadata fields
        sdummy_name (%s)(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pytis_valid_constraint_listscCs+|jdkrd}n|j|}|S(N(RR1(RRRQ((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pytsuggests	N(R	R
R1RRRRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs
			t
normalizedcCs|S(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+R,tlegacytsemantictdefaultcCs'|tkrtd|nt|S(Nsunknown scheme name: %r(t_SCHEMESR2(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs()RtloggingRStcompatRt__all__t	getLoggerR	RR2RtobjectRR'RTR\ReRfRRsRRRRRRtIRR1RRRRRRRRRRRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt	s~1k	=$	W	.	r
					#	
	
PK
Z"gg-site-packages/pip/_vendor/distlib/version.pycnu[
abc@srdZddlZddlZddlmZddddd	d
ddgZejeZde	fd
YZ
defdYZdefdYZ
ejdZdZeZdefdYZdZde
fdYZejddfejddfejddfejddfejddfejd dfejd!d"fejd#d$fejd%d&fejd'd(ff
Zejd)dfejd*dfejd+d"fejd!d"fejd,dffZejd-Zd.Zd/Zejd0ejZid1d26d1d36d4d56d1d66d7d86dd6dd"6Zd9Zdefd:YZde
fd;YZ ejd<ejZ!d=Z"d>Z#d	efd?YZ$d
e
fd@YZ%dAefdBYZ&ie&eeedC6e&ee dDdE6e&e#e%edF6Z'e'dCe'dGtt|dksVtdS(Ni(tstript_stringtparset_partst
isinstancettupletAssertionErrortlen(tselftstparts((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__init__scCstddS(Nsplease implement in a subclass(tNotImplementedError(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR$scCs5t|t|kr1td||fndS(Nscannot compare %r and %r(ttypet	TypeError(Rtother((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_check_compatible'scCs|j||j|jkS(N(RR(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__eq__+s
cCs|j|S(N(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__ne__/scCs|j||j|jkS(N(RR(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__lt__2s
cCs|j|p|j|S(N(R R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__gt__6scCs|j|p|j|S(N(R R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__le__9scCs|j|p|j|S(N(R!R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__ge__<scCs
t|jS(N(thashR(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__hash__@scCsd|jj|jfS(Ns%s('%s')(t	__class__R	R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__repr__CscCs|jS(N(R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt__str__FscCstddS(NsPlease implement in subclasses.(R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
is_prereleaseIs(R	R
RRRRRR R!R"R#R%R'R(tpropertyR)(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs												tMatchercBseZdZejdZejdZejdZidd6dd6dd6d	d
6dd6d
d6dd6dd6Z	dZ
dZedZ
dZdZdZdZdZdZRS(s^(\w[\s\w'.-]*)(\((.*)\))?s'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$s
^\d+(\.\d+)*$cCs
||kS(N((tvtctp((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pytWttcCs||kp||kS(N((R,R-R.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR/YR0s<=cCs||kp||kS(N((R,R-R.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR/ZR0s>=cCs
||kS(N((R,R-R.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR/[R0s==cCs
||kS(N((R,R-R.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR/\R0s===cCs||kp||kS(N((R,R-R.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR/^R0s~=cCs
||kS(N((R,R-R.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR/_R0s!=c
Cs|jdkrtdn|j|_}|jj|}|s\td|n|jd}|dj|_|jj	|_
g}|drg|djdD]}|j^q}x|D]}|jj|}|s
td||fn|j}|dp#d}|d	}|j
d
r|dkr^td
|n|d t}}	|jj|s|j|qn|j|t}}	|j|||	fqWnt||_dS(NsPlease specify a version classs
Not valid: %rR0iit,sInvalid %r in %rs~=is.*s==s!=s#'.*' not allowed for %r constraintsi(s==s!=(t
version_classtNonet
ValueErrorR
Rtdist_retmatchtgroupstnametlowertkeytsplittcomp_retendswithtTruetnum_retFalsetappendRR(
RRtmR9tclistR-tconstraintstoptvntprefix((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRbs:
,


cCst|tr!|j|}nx|jD]\}}}|jj|}t|trmt||}n|sd||jjf}t	|n||||s+t
Sq+WtS(s
        Check if the provided version matches the constraints.

        :param version: The version to match against this instance.
        :type version: String or :class:`Version` instance.
        s%r not implemented for %s(RRR4Rt
_operatorstgettgetattrR&R	RRBR@(Rtversiontoperatort
constraintRItftmsg((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR8scCsJd}t|jdkrF|jdddkrF|jdd}n|S(Niis==s===(s==s===(R5RR(Rtresult((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
exact_versions,cCsGt|t|ks*|j|jkrCtd||fndS(Nscannot compare %s and %s(RR:R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs*cCs/|j||j|jko.|j|jkS(N(RR<R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs
cCs|j|S(N(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRscCst|jt|jS(N(R$R<R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR%scCsd|jj|jfS(Ns%s(%r)(R&R	R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR'scCs|jS(N(R(R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR(sN(R	R
R5R4tretcompileR7R>RARJRR8R*RSRRRR%R'R((((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR+Ns,







	%						sk^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?(\.(post)(\d+))?(\.(dev)(\d+))?(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$cCsK|j}tj|}|s4td|n|j}td|djdD}x0t|dkr|ddkr|d }qfW|dsd}nt|d}|dd!}|d	d
!}|dd!}|d
}|dkrd}n|dt|df}|dkr.d}n|dt|df}|dkr]d}n|dt|df}|dkrd}nfg}	xQ|jdD]@}
|
j
rdt|
f}
nd|
f}
|	j|
qWt|	}|s|r|rd}qd}n|s&d}n|s5d}n||||||fS(NsNot a valid version: %scss|]}t|VqdS(N(tint(t.0R,((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pys	sit.iiiiii	i
ii
tatzt_tfinal(NN((NN((NN(((RYi(RZ(R[(R\(R
tPEP440_VERSION_RER8RR9RR=RRVR5tisdigitRC(RRDR9tnumstepochtpretposttdevtlocalRtpart((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_pep_440_keysT#%
	



				
				cBsAeZdZdZedddddgZedZRS(sIA rational version.

    Good:
        1.2         # equivalent to "1.2.0"
        1.2.0
        1.2a1
        1.2.3a2
        1.2.3b1
        1.2.3c1
        1.2.3.4
        TODO: fill this out

    Bad:
        1           # minimum two numbers
        1.2a        # release level must have a release serial
        1.2.3b
    cCsQt|}tj|}|j}td|djdD|_|S(Ncss|]}t|VqdS(N(RV(RWR,((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pys	siRX(t_normalized_keyR]R8R9RR=t_release_clause(RRRRRDR9((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs
&RYtbR-trcRccstfdjDS(Nc3s(|]}|r|djkVqdS(iN(tPREREL_TAGS(RWtt(R(s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pys	s(tanyR(R((Rs?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR)s(R	R
RRtsetRkR*R)(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRs	cCsUt|}t|}||kr(tS|j|s;tSt|}||dkS(NRX(tstrR@t
startswithRBR(txtytn((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
_match_prefix"scBseZeZidd6dd6dd6dd6dd	6d
d6dd
6dd6ZdZdZdZdZdZ	dZ
dZdZdZ
RS(t_match_compatibles~=t	_match_ltR1t	_match_gtR2t	_match_les<=t	_match_ges>=t	_match_eqs==t_match_arbitrarys===t	_match_nes!=cCsx|r"d|ko|jd}n|jdo:|jd}|rn|jjddd}|j|}n||fS(Nt+iii(RRR=R4(RRMRORItstrip_localR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt
_adjust_local<scCsj|j|||\}}||kr+tS|j}djg|D]}t|^qA}t||S(NRX(RRBRhtjoinRoRt(RRMRORItrelease_clausetitpfx((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRvJs	(cCsj|j|||\}}||kr+tS|j}djg|D]}t|^qA}t||S(NRX(RRBRhRRoRt(RRMRORIRRR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRwRs	(cCs%|j|||\}}||kS(N(R(RRMRORI((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRxZscCs%|j|||\}}||kS(N(R(RRMRORI((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRy^scCsC|j|||\}}|s0||k}nt||}|S(N(RRt(RRMRORIRR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRzbs
cCst|t|kS(N(Ro(RRMRORI((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR{jscCsD|j|||\}}|s0||k}nt||}|S(N(RRt(RRMRORIRR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR|ms
cCs|j|||\}}||kr+tS||kr;tS|j}t|dkrc|d }ndjg|D]}t|^qp}t||S(NiiRX(RR@RBRhRRRoRt(RRMRORIRRR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRuus	
((R	R
RR4RJRRvRwRxRyRzR{R|Ru(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR-s&
								s[.+-]$R0s^[.](\d)s0.\1s^[.-]s
^\((.*)\)$s\1s^v(ersion)?\s*(\d+)s\2s^r(ev)?\s*(\d+)s[.]{2,}RXs\b(alfa|apha)\btalphas\b(pre-alpha|prealpha)\bs	pre.alphas	\(beta\)$tbetas
^[:~._+-]+s
[,*")([\]]s[~:+_ -]s\.$s
(\d+(\.\d+)*)c	Cs|jj}x&tD]\}}|j||}qW|sJd}ntj|}|snd}|}n|jdjd}g|D]}t|^q}x#t	|dkr|j
dqWt	|dkr||j}nDdjg|dD]}t
|^q||j}|d }djg|D]}t
|^qB}|j}|rx)tD]\}}|j||}qvWn|s|}n&d|krdnd}|||}t|sd}n|S(	s
    Try to suggest a semantic form for a version for which
    _suggest_normalized_version couldn't come up with anything.
    s0.0.0iRXiRct-R}N(R
R;t
_REPLACEMENTStsubt_NUMERIC_PREFIXR8R9R=RVRRCtendRRot_SUFFIX_REPLACEMENTSt	is_semverR5(	RRRtpattreplRDRItsuffixRtsep((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_suggest_semantic_versions:		:
(		cCs yt||SWntk
r%nX|j}xSd2d3d4d5d6d7d8d9d:d;d<d=d>d?d@fD]\}}|j||}qfWtjdd|}tjdd|}tjdd|}tjdd|}tjdd|}|jdr
|d }ntjd!d|}tjd"d#|}tjd$d%|}tjd&d|}tjd'd(|}tjd)d(|}tjd*d
|}tjd+d,|}tjd-d%|}tjd.d/|}tjd0d1|}yt|Wntk
rdA}nX|S(BsSuggest a normalized version close to the given version string.

    If you have a version string that isn't rational (i.e. NormalizedVersion
    doesn't like it) then you might be able to get an equivalent (or close)
    rational version from this function.

    This does a number of simple normalizations to the given string, based
    on observation of versions currently in use on PyPI. Given a dump of
    those version during PyCon 2009, 4287 of them:
    - 2312 (53.93%) match NormalizedVersion without change
      with the automatic suggestion
    - 3474 (81.04%) match when using this suggestion method

    @param s {str} An irrational version string.
    @returns A rational version string, or None, if couldn't determine one.
    s-alphaRYs-betaRiRRRjR-s-finalR0s-pres-releases.releases-stableR}RXR[t s.finalR\spre$tpre0sdev$tdev0s([abc]|rc)[\-\.](\d+)$s\1\2s[\-\.](dev)[\-\.]?r?(\d+)$s.\1\2s[.~]?([abc])\.?s\1R,is\b0+(\d+)(?!\d)s(\d+[abc])$s\g<1>0s\.?(dev-r|dev\.r)\.?(\d+)$s.dev\2s-(a|b|c)(\d+)$s[\.\-](dev|devel)$s.dev0s(?![\.\-])dev$s(final|stable)$s\.?(r|-|-r)\.?(\d+)$s.post\2s\.?(dev|git|bzr)\.?(\d+)$s\.?(pre|preview|-c)(\d+)$sc\g<2>sp(\d+)$s.post\1(s-alphaRY(s-betaRi(RRY(RRi(RjR-(s-finalR0(s-preR-(s-releaseR0(s.releaseR0(s-stableR0(R}RX(R[RX(RR0(s.finalR0(R\R0N(RgRR;treplaceRTRRpR5(RtrstorigR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_suggest_normalized_versionsH

		
	

s([a-z]+|\d+|[\.-])R-Ratpreviewsfinal-RRjt@RccCsd}g}x||D]}|jdr|dkrgx'|rc|ddkrc|jq@Wnx'|r|ddkr|jqjWn|j|qWt|S(NcSsg}xtj|jD]j}tj||}|rd|d koUdknrl|jd}n
d|}|j|qqW|jd|S(Nt0it9it*s*final(t
_VERSION_PARTR=R;t_VERSION_REPLACERKtzfillRC(RRRR.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt	get_partsIs 

Rs*finalis*final-t00000000(RptpopRCR(RRRRR.((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyt_legacy_keyHs	
cBs eZdZedZRS(cCs
t|S(N(R(RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRcscCsRt}xE|jD]:}t|tr|jdr|dkrt}PqqW|S(NRs*final(RBRRRRpR@(RRRRq((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyR)fs(R	R
RR*R)(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/version.pyRbs	cBs?eZeZeejZded	s~1k	=$	W	.	r
					#	
	
PK
Zx;x;,site-packages/pip/_vendor/distlib/scripts.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys

from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
                   get_executable, in_venv)

logger = logging.getLogger(__name__)

_DEFAULT_MANIFEST = '''


 

 
 
 
 
 
 
 
 
'''.strip()

# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
    import sys, re

    def _resolve(module, func):
        __import__(module)
        mod = sys.modules[module]
        parts = func.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
        return result

    try:
        sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])

        func = _resolve('%(module)s', '%(func)s')
        rc = func() # None interpreted as 0
    except Exception as e:  # only supporting Python >= 2.6
        sys.stderr.write('%%s\\n' %% e)
        rc = 1
    sys.exit(rc)
'''


def _enquote_executable(executable):
    if ' ' in executable:
        # make sure we quote only the executable in case of env
        # for example /usr/bin/env "/dir with spaces/bin/jython"
        # instead of "/usr/bin/env /dir with spaces/bin/jython"
        # otherwise whole
        if executable.startswith('/usr/bin/env '):
            env, _executable = executable.split(' ', 1)
            if ' ' in _executable and not _executable.startswith('"'):
                executable = '%s "%s"' % (env, _executable)
        else:
            if not executable.startswith('"'):
                executable = '"%s"' % executable
    return executable


class ScriptMaker(object):
    """
    A class to copy or create scripts from source scripts or callable
    specifications.
    """
    script_template = SCRIPT_TEMPLATE

    executable = None  # for shebangs

    def __init__(self, source_dir, target_dir, add_launchers=True,
                 dry_run=False, fileop=None):
        self.source_dir = source_dir
        self.target_dir = target_dir
        self.add_launchers = add_launchers
        self.force = False
        self.clobber = False
        # It only makes sense to set mode bits on POSIX.
        self.set_mode = (os.name == 'posix') or (os.name == 'java' and
                                                 os._name == 'posix')
        self.variants = set(('', 'X.Y'))
        self._fileop = fileop or FileOperator(dry_run)

        self._is_nt = os.name == 'nt' or (
            os.name == 'java' and os._name == 'nt')

    def _get_alternate_executable(self, executable, options):
        if options.get('gui', False) and self._is_nt:  # pragma: no cover
            dn, fn = os.path.split(executable)
            fn = fn.replace('python', 'pythonw')
            executable = os.path.join(dn, fn)
        return executable

    if sys.platform.startswith('java'):  # pragma: no cover
        def _is_shell(self, executable):
            """
            Determine if the specified executable is a script
            (contains a #! line)
            """
            try:
                with open(executable) as fp:
                    return fp.read(2) == '#!'
            except (OSError, IOError):
                logger.warning('Failed to open %s', executable)
                return False

        def _fix_jython_executable(self, executable):
            if self._is_shell(executable):
                # Workaround for Jython is not needed on Linux systems.
                import java

                if java.lang.System.getProperty('os.name') == 'Linux':
                    return executable
            elif executable.lower().endswith('jython.exe'):
                # Use wrapper exe for Jython on Windows
                return executable
            return '/usr/bin/env %s' % executable

    def _get_shebang(self, encoding, post_interp=b'', options=None):
        enquote = True
        if self.executable:
            executable = self.executable
            enquote = False     # assume this will be taken care of
        elif not sysconfig.is_python_build():
            executable = get_executable()
        elif in_venv():  # pragma: no cover
            executable = os.path.join(sysconfig.get_path('scripts'),
                            'python%s' % sysconfig.get_config_var('EXE'))
        else:  # pragma: no cover
            executable = os.path.join(
                sysconfig.get_config_var('BINDIR'),
               'python%s%s' % (sysconfig.get_config_var('VERSION'),
                               sysconfig.get_config_var('EXE')))
        if options:
            executable = self._get_alternate_executable(executable, options)

        if sys.platform.startswith('java'):  # pragma: no cover
            executable = self._fix_jython_executable(executable)
        # Normalise case for Windows
        executable = os.path.normcase(executable)
        # If the user didn't specify an executable, it may be necessary to
        # cater for executable paths with spaces (not uncommon on Windows)
        if enquote:
            executable = _enquote_executable(executable)
        # Issue #51: don't use fsencode, since we later try to
        # check that the shebang is decodable using utf-8.
        executable = executable.encode('utf-8')
        # in case of IronPython, play safe and enable frames support
        if (sys.platform == 'cli' and '-X:Frames' not in post_interp
            and '-X:FullFrames' not in post_interp):  # pragma: no cover
            post_interp += b' -X:Frames'
        shebang = b'#!' + executable + post_interp + b'\n'
        # Python parser starts to read a script using UTF-8 until
        # it gets a #coding:xxx cookie. The shebang has to be the
        # first line of a file, the #coding:xxx cookie cannot be
        # written before. So the shebang has to be decodable from
        # UTF-8.
        try:
            shebang.decode('utf-8')
        except UnicodeDecodeError:  # pragma: no cover
            raise ValueError(
                'The shebang (%r) is not decodable from utf-8' % shebang)
        # If the script is encoded to a custom encoding (use a
        # #coding:xxx cookie), the shebang has to be decodable from
        # the script encoding too.
        if encoding != 'utf-8':
            try:
                shebang.decode(encoding)
            except UnicodeDecodeError:  # pragma: no cover
                raise ValueError(
                    'The shebang (%r) is not decodable '
                    'from the script encoding (%r)' % (shebang, encoding))
        return shebang

    def _get_script_text(self, entry):
        return self.script_template % dict(module=entry.prefix,
                                           func=entry.suffix)

    manifest = _DEFAULT_MANIFEST

    def get_manifest(self, exename):
        base = os.path.basename(exename)
        return self.manifest % base

    def _write_script(self, names, shebang, script_bytes, filenames, ext):
        use_launcher = self.add_launchers and self._is_nt
        linesep = os.linesep.encode('utf-8')
        if not use_launcher:
            script_bytes = shebang + linesep + script_bytes
        else:  # pragma: no cover
            if ext == 'py':
                launcher = self._get_launcher('t')
            else:
                launcher = self._get_launcher('w')
            stream = BytesIO()
            with ZipFile(stream, 'w') as zf:
                zf.writestr('__main__.py', script_bytes)
            zip_data = stream.getvalue()
            script_bytes = launcher + shebang + linesep + zip_data
        for name in names:
            outname = os.path.join(self.target_dir, name)
            if use_launcher:  # pragma: no cover
                n, e = os.path.splitext(outname)
                if e.startswith('.py'):
                    outname = n
                outname = '%s.exe' % outname
                try:
                    self._fileop.write_binary_file(outname, script_bytes)
                except Exception:
                    # Failed writing an executable - it might be in use.
                    logger.warning('Failed to write executable - trying to '
                                   'use .deleteme logic')
                    dfname = '%s.deleteme' % outname
                    if os.path.exists(dfname):
                        os.remove(dfname)       # Not allowed to fail here
                    os.rename(outname, dfname)  # nor here
                    self._fileop.write_binary_file(outname, script_bytes)
                    logger.debug('Able to replace executable using '
                                 '.deleteme logic')
                    try:
                        os.remove(dfname)
                    except Exception:
                        pass    # still in use - ignore error
            else:
                if self._is_nt and not outname.endswith('.' + ext):  # pragma: no cover
                    outname = '%s.%s' % (outname, ext)
                if os.path.exists(outname) and not self.clobber:
                    logger.warning('Skipping existing file %s', outname)
                    continue
                self._fileop.write_binary_file(outname, script_bytes)
                if self.set_mode:
                    self._fileop.set_executable_mode([outname])
            filenames.append(outname)

    def _make_script(self, entry, filenames, options=None):
        post_interp = b''
        if options:
            args = options.get('interpreter_args', [])
            if args:
                args = ' %s' % ' '.join(args)
                post_interp = args.encode('utf-8')
        shebang = self._get_shebang('utf-8', post_interp, options=options)
        script = self._get_script_text(entry).encode('utf-8')
        name = entry.name
        scriptnames = set()
        if '' in self.variants:
            scriptnames.add(name)
        if 'X' in self.variants:
            scriptnames.add('%s%s' % (name, sys.version[0]))
        if 'X.Y' in self.variants:
            scriptnames.add('%s-%s' % (name, sys.version[:3]))
        if options and options.get('gui', False):
            ext = 'pyw'
        else:
            ext = 'py'
        self._write_script(scriptnames, shebang, script, filenames, ext)

    def _copy_script(self, script, filenames):
        adjust = False
        script = os.path.join(self.source_dir, convert_path(script))
        outname = os.path.join(self.target_dir, os.path.basename(script))
        if not self.force and not self._fileop.newer(script, outname):
            logger.debug('not copying %s (up-to-date)', script)
            return

        # Always open the file, but ignore failures in dry-run mode --
        # that way, we'll get accurate feedback if we can read the
        # script.
        try:
            f = open(script, 'rb')
        except IOError:  # pragma: no cover
            if not self.dry_run:
                raise
            f = None
        else:
            first_line = f.readline()
            if not first_line:  # pragma: no cover
                logger.warning('%s: %s is an empty file (skipping)',
                               self.get_command_name(),  script)
                return

            match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
            if match:
                adjust = True
                post_interp = match.group(1) or b''

        if not adjust:
            if f:
                f.close()
            self._fileop.copy_file(script, outname)
            if self.set_mode:
                self._fileop.set_executable_mode([outname])
            filenames.append(outname)
        else:
            logger.info('copying and adjusting %s -> %s', script,
                        self.target_dir)
            if not self._fileop.dry_run:
                encoding, lines = detect_encoding(f.readline)
                f.seek(0)
                shebang = self._get_shebang(encoding, post_interp)
                if b'pythonw' in first_line:  # pragma: no cover
                    ext = 'pyw'
                else:
                    ext = 'py'
                n = os.path.basename(outname)
                self._write_script([n], shebang, f.read(), filenames, ext)
            if f:
                f.close()

    @property
    def dry_run(self):
        return self._fileop.dry_run

    @dry_run.setter
    def dry_run(self, value):
        self._fileop.dry_run = value

    if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):  # pragma: no cover
        # Executable launcher support.
        # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/

        def _get_launcher(self, kind):
            if struct.calcsize('P') == 8:   # 64-bit
                bits = '64'
            else:
                bits = '32'
            name = '%s%s.exe' % (kind, bits)
            # Issue 31: don't hardcode an absolute package name, but
            # determine it relative to the current package
            distlib_package = __name__.rsplit('.', 1)[0]
            result = finder(distlib_package).find(name).bytes
            return result

    # Public API follows

    def make(self, specification, options=None):
        """
        Make a script.

        :param specification: The specification, which is either a valid export
                              entry specification (to make a script from a
                              callable) or a filename (to make a script by
                              copying from a source location).
        :param options: A dictionary of options controlling script generation.
        :return: A list of all absolute pathnames written to.
        """
        filenames = []
        entry = get_export_entry(specification)
        if entry is None:
            self._copy_script(specification, filenames)
        else:
            self._make_script(entry, filenames, options=options)
        return filenames

    def make_multiple(self, specifications, options=None):
        """
        Take a list of specifications and make scripts from them,
        :param specifications: A list of specifications.
        :return: A list of all absolute pathnames written to,
        """
        filenames = []
        for specification in specifications:
            filenames.extend(self.make(specification, options))
        return filenames
PK
Z:}yy+site-packages/pip/_vendor/distlib/wheel.pycnu[
abc@sddlmZddlZddlZddlZddlZddlmZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlZddlmZmZddlmZmZmZmZmZddlmZddlm Z m!Z!dd	l"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+dd
l,m-Z-m.Z.ej/e0Z1e2a3e4edrdZ5n9ej6j7d
rdZ5nej6dkrdZ5ndZ5ej8dZ9e9rdej:d Z9nde9Z;e5e9Z<ej"j=j>ddj>ddZ?ej8dZ@e@oze@j7dre@j>ddZ@ndZAeAZ@[AejBdejCejDBZEejBdejCejDBZFejBdZGejBdZHd ZId!ZJe
jKd"kr$d#ZLn	d$ZLd%eMfd&YZNeNZOd'eMfd(YZPd)ZQeQZR[Qe2d*ZSdS(+i(tunicode_literalsN(tmessage_from_filei(t__version__tDistlibException(t	sysconfigtZipFiletfsdecodet	text_typetfilter(tInstalledDistribution(tMetadatatMETADATA_FILENAME(	tFileOperatortconvert_patht	CSVReadert	CSVWritertCachetcached_propertytget_cache_basetread_exportsttempdir(tNormalizedVersiontUnsupportedVersionErrorupypy_version_infouppujavaujyucliuipucpupy_version_nodotu%s%siupyu-u_u.uSOABIucpython-cCs|dtg}tjdr+|jdntjdrJ|jdntjddkro|jdnd	j|S(
NucpuPy_DEBUGudu
WITH_PYMALLOCumuPy_UNICODE_SIZEiuuu(t
VER_SUFFIXRtget_config_vartappendtjoin(tparts((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt_derive_abi;suz
(?P[^-]+)
-(?P\d+[^-]*)
(-(?P\d+[^-]*))?
-(?P\w+\d+(\.\w+\d+)*)
-(?P\w+)
-(?P\w+(\.\w+)*)
\.whl$
u7
(?P[^-]+)
-(?P\d+[^-]*)
(-(?P\d+[^-]*))?$
s
\s*#![^\r\n]*s^(\s*#!("[^"]+"|\S+))\s+(.*)$s#!pythons	#!pythonwu/cCs|S(N((to((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt]tcCs|jtjdS(Nu/(treplacetostsep(R((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR_RtMountercBs8eZdZdZdZddZdZRS(cCsi|_i|_dS(N(t
impure_wheelstlibs(tself((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt__init__cs	cCs!||j|<|jj|dS(N(R$R%tupdate(R&tpathnamet
extensions((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytaddgs
cCsI|jj|}x0|D](\}}||jkr|j|=qqWdS(N(R$tpopR%(R&R)R*tktv((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytremovekscCs"||jkr|}nd}|S(N(R%tNone(R&tfullnametpathtresult((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytfind_moduleqs	cCs|tjkrtj|}nx||jkrAtd|ntj||j|}||_|jdd}t|dkr|d|_	n|S(Nuunable to find extension for %su.ii(
tsystmodulesR%tImportErrortimptload_dynamict
__loader__trsplittlent__package__(R&R1R3R((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytload_modulexs	N(t__name__t
__module__R'R+R/R0R4R>(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR#bs
			tWheelcBseZdZdZdZdeedZedZ	edZ
edZedZ
dZed	Zd
ZddZdZd
ZdZdddZdZdZdZdZdZedZdZdZddZRS(u@
    Class to build and install from Wheel files (PEP 427).
    iusha256cCs||_||_d|_tg|_dg|_dg|_tj|_	|dkr{d|_d|_|j
|_nEtj|}|r|jd}|d|_|djdd	|_|d
|_|j
|_ntjj|\}}tj|}|s!td|n|r?tjj||_	n||_|jd}|d|_|d|_|d
|_|djd
|_|djd
|_|djd
|_dS(uB
        Initialise an instance using a (valid) filename.
        uunoneuanyudummyu0.1unmuvnu_u-ubnuInvalid name or filename: %rupyu.ubiuarN(tsignt
should_verifytbuildvertPYVERtpyvertabitarchR!tgetcwdtdirnameR0tnametversiontfilenamet	_filenametNAME_VERSION_REtmatcht	groupdictR R2tsplittFILENAME_RERtabspath(R&RMRBtverifytmtinfoRJ((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR'sB					


	


cCs|jrd|j}nd}dj|j}dj|j}dj|j}|jjdd}d|j|||||fS(uJ
        Build and return a filename from the various components.
        u-uu.u_u%s-%s%s-%s-%s-%s.whl(RDRRFRGRHRLR RK(R&RDRFRGRHRL((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRMs	cCs+tjj|j|j}tjj|S(N(R!R2RRJRMtisfile(R&R2((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytexistssccsNxG|jD]<}x3|jD](}x|jD]}|||fVq*WqWq
WdS(N(RFRGRH(R&RFRGRH((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyttagsscCs8tjj|j|j}d|j|jf}d|}tjd}t	|d}|j
|}|djdd}tg|D]}t
|^q}	|	dkrd}
nt}
yItj||
}|j|"}||}
td	|
}WdQXWn!tk
r-td
|
nXWdQX|S(Nu%s-%su%s.dist-infouutf-8uru
Wheel-Versionu.iuMETADATAtfileobju$Invalid wheel, because %s is missing(ii(R!R2RRJRMRKRLtcodecst	getreaderRtget_wheel_metadataRRttupletintRt	posixpathtopenR
tKeyErrort
ValueError(R&R)tname_vertinfo_dirtwrappertzftwheel_metadatatwvtitfile_versiontfntmetadata_filenametbftwfR3((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytmetadatas(
%	
cCsud|j|jf}d|}tj|d}|j|(}tjd|}t|}WdQXt|S(Nu%s-%su%s.dist-infouWHEELuutf-8(	RKRLRaRRbR\R]Rtdict(R&RhReRfRnRoRptmessage((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR^s
cCsFtjj|j|j}t|d}|j|}WdQX|S(Nur(R!R2RRJRMRR^(R&R)RhR3((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRWscCstj|}|r|j}|| ||}}d|jkrQt}nt}tj|}|rd|jd}nd}||}||}ns|jd}|jd}	|dks||	krd}
n&|||d!d	krd	}
nd}
t|
|}|S(
Ntpythonwt iRs
s
iis
(	t
SHEBANG_RERPtendtlowertSHEBANG_PYTHONWtSHEBANG_PYTHONtSHEBANG_DETAIL_REtgroupstfind(R&tdataRVRwtshebangtdata_after_shebangtshebang_pythontargstcrtlftterm((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytprocess_shebangs,	

		cCs|dkr|j}nytt|}Wn!tk
rNtd|nX||j}tj|j	dj
d}||fS(NuUnsupported hash algorithm: %rt=uascii(R0t	hash_kindtgetattrthashlibtAttributeErrorRtdigesttbase64turlsafe_b64encodetrstriptdecode(R&R~RthasherR3((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytget_hashs
!cCs~t|}ttjj||}|j|ddf|jt|%}x|D]}|j|q]WWdQXdS(Nu(	tlisttto_posixR!R2trelpathRtsortRtwriterow(R&trecordstrecord_pathtbasetptwritertrow((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytwrite_record's

cCsg}|\}}tt|j}xs|D]k\}}	t|	d}
|
j}WdQXd|j|}tjj|	}
|j	|||
fq+Wtjj
|d}	|j||	|ttjj
|d}|j	||	fdS(Nurbu%s=%suRECORD(
RRRRbtreadRR!R2tgetsizeRRRR(R&RWtlibdirt
archive_pathsRtdistinfoRfRtapRtfR~Rtsize((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt
write_records0sc	Cs\t|dtjA}x7|D]/\}}tjd|||j||qWWdQXdS(NuwuWrote %s to %s in wheel(RtzipfiletZIP_DEFLATEDtloggertdebugtwrite(R&R)RRhRR((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt	build_zip@sc!
s|dkri}nttfdd$d}|dkrgd}tg}tg}tg}n!d}tg}dg}dg}|jd	||_|jd
||_	|jd||_
|}	d|j|jf}
d
|
}d|
}g}
xKd%D]C}|kr qn|}t
jj|rxt
j|D]\}}}x|D]}tt
jj||}t
jj||}tt
jj|||}|
j||f|dkrb|jdrbt|d}|j}WdQX|j|}t|d}|j|WdQXqbqbWqLWqqW|	}d}xt
j|D]\}}}||krxUt|D]G\}}t|}|jdrt
jj||}||=PqqW|stdnxl|D]d}t|jd&rqnt
jj||}tt
jj||}|
j||fqWqkWt
j|}xf|D]^}|d'kr|tt
jj||}tt
jj||}|
j||fq|q|Wd|p|jdtd |g}x4|j D])\}}}|jd!|||fqWt
jj|d}t|d"}|jd#j|WdQXtt
jj|d}|
j||f|j!||f|	|
t
jj|j"|j#} |j$| |
| S((u
        Build a wheel from files in specified paths, and use any specified tags
        when determining the name of the wheel.
        cs
|kS(N((R(tpaths(s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRNRupurelibuplatlibiufalseutrueunoneuanyupyveruabiuarchu%s-%su%s.datau%s.dist-infoudatauheadersuscriptsu.exeurbNuwbu
.dist-infou(.dist-info directory expected, not foundu.pycu.pyouRECORDu	INSTALLERuSHAREDuWHEELuWheel-Version: %d.%duGenerator: distlib %suRoot-Is-Purelib: %su
Tag: %s-%s-%suwu
(upurelibuplatlib(udatauheadersuscripts(u.pycu.pyo(uRECORDu	INSTALLERuSHAREDuWHEEL(%R0RRtIMPVERtABItARCHREtgetRFRGRHRKRLR!R2tisdirtwalkRRRRRtendswithRbRRRt	enumeratetAssertionErrortlistdirt
wheel_versionRRZRRJRMR(!R&RRZRtlibkeytis_puret
default_pyvertdefault_abitdefault_archRRetdata_dirRfRtkeyR2troottdirstfilesRmRtrpRRR~RRktdnRiRFRGRHR)((Rs=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytbuildFs	"					





%


cCKs	|j}|jd}|jdt}tjj|j|j}d|j|j	f}d|}	d|}
t
j|
t}t
j|
d}t
j|
d}
tj
d}t|d	}|j|}||}t|}Wd
QX|djdd
}tg|D]}t|^q}||jkrY|rY||j|n|ddkrv|d}n
|d}i}|j|
D}td|,}x"|D]}|d}||||jd.}6|6r|6jd/}6nWd
QXWnt1k
rt+j2d0nX|6r|6jd1i}>|6jd2i}?|>s|?r|jdd}@tjj?|@st@d3n|@|_xF|>jAD]8\}:}<d4|:|<f}A|j4|A}4|j5|4q(W|?ritd(6}BxL|?jAD];\}:}<d4|:|<f}A|j4|A|B}4|j5|4qWqqntjj||
}tB|}5tC|}|d=|d=||d5<|5jD||}|r9	|!j/|n|5jE|!|d6||5SWn+t1k
r	t+jFd7|jGnXWd
tHjI|"XWd
QXd
S(9u
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        uwarnerulib_onlyu%s-%su%s.datau%s.dist-infouWHEELuRECORDuutf-8urNu
Wheel-Versionu.iuRoot-Is-Purelibutrueupurelibuplatlibtstreamiuuscriptstdry_runu/RECORD.jwsiusize mismatch for %su=udigest mismatch for %sulib_only: skipping %su.exeu/urbudigest mismatch on write for %su.pyuByte-compilation failedtexc_infoulib_only: returning Noneu1.0uentry_points.txtuconsoleuguiu
%s_scriptsuwrap_%su%s:%su %suAUnable to read legacy script metadata, so cannot generate scriptsu
extensionsupython.commandsu8Unable to read JSON metadata, so cannot generate scriptsuwrap_consoleuwrap_guiuValid script path not specifiedu%s = %sulibuprefixuinstallation failed.(uconsoleugui(JRRtFalseR!R2RRJRMRKRLRaRR\R]RRbRRRR_R`RRRtTruetrecordR5tdont_write_bytecodettempfiletmkdtempt
source_dirR0t
target_dirtinfolistt
isinstanceRRRtstrt	file_sizeRRRt
startswithRRR
tcopy_streamRtbyte_compilet	Exceptiontwarningtbasenametmaketset_executable_modetextendRWRtvaluestprefixtsuffixtflagstjsontloadRRdtitemsR	Rrtwrite_shared_locationstwrite_installed_filest	exceptiontrollbacktshutiltrmtree(CR&RtmakertkwargsRtwarnertlib_onlyR)ReRRft
metadata_nametwheel_metadata_nametrecord_nameRgRhtbwfRpRsRjRkRlRRRotreaderRRtdata_pfxtinfo_pfxt
script_pfxtfileoptbctoutfilestworkdirtzinfotarcnamet	u_arcnametkindtvalueR~t_Rt	is_scripttwhereRtoutfilet	newdigesttpycRmtworknameRt	filenamestdisttcommandsteptepdataRR-tdR.tstconsole_scriptstgui_scriptst
script_dirtscripttoptions((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytinstallsD	

%



	
				
#

"

	

	


	

	




cCsGtdkrCtjjttdtjd }t	|antS(Nudylib-cachei(
tcacheR0R!R2RRRR5RLR(R&R((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt_get_dylib_caches
c
Cstjj|j|j}d|j|jf}d|}tj|d}tj	d}g}t
|dw}y\|j|G}||}	tj
|	}
|j}|j|}tjj|j|}
tjj|
stj|
nx|
jD]\}}tjj|
t|}tjj|sHt}nQtj|j}tjj|}|j|}tj|j}||k}|r|j||
n|j||fqWWdQXWntk
rnXWdQX|S(Nu%s-%su%s.dist-infou
EXTENSIONSuutf-8ur( R!R2RRJRMRKRLRaR\R]RRbRRRt
prefix_to_dirRRtmakedirsRR
RYRtstattst_mtimetdatetimet
fromtimestamptgetinfot	date_timetextractRRc(R&R)ReRfRRgR3RhRoRpR*RRt
cache_baseRKRtdestRt	file_timeRWt
wheel_time((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt_get_extensionss>
	!

cCs
t|S(uM
        Determine if a wheel is compatible with the running system.
        (t
is_compatible(R&((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR%scCstS(uP
        Determine if a wheel is asserted as mountable by its metadata.
        (R(R&((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytis_mountablescCs
tjjtjj|j|j}|jsLd|}t|n|jsqd|}t|n|t	jkrt
jd|ns|rt	jj|nt	jj
d||j}|rtt	jkrt	jjtntj||ndS(Nu)Wheel %s not compatible with this Python.u$Wheel %s is marked as not mountable.u%s already in pathi(R!R2RTRRJRMR%RR&R5RRRtinsertR$t_hookt	meta_pathR+(R&RR)tmsgR*((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytmounts"'

cCstjjtjj|j|j}|tjkrItjd|n]tjj	||t
jkrxt
j	|nt
jst
tjkrtjj	t
qndS(Nu%s not in path(
R!R2RTRRJRMR5RRR/R(R$R)(R&R)((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytunmounts'	cCstjj|j|j}d|j|jf}d|}d|}tj|t}tj|d}tj|d}t	j
d}t|d}	|	j|}
||
}t
|}WdQX|djd	d
}
tg|
D]}t|^q}i}|	j|D}td|,}x"|D]}|d}|||Fsu0Cannot update non-compliant (PEP-440) version %rR2tlegacyuVersion updated from %r to %r(R0RR}RRR`RRRRR
RLRRR(	RLR2tupdatedR.RkRRtmdR0((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytupdate_version;s(
0
!
		u%s-%su%s.dist-infouRECORDuruutf-8u..uinvalid entry in wheel: %rNRu.whlRu
wheel-update-tdiruNot a directory: %r(R!R2RRJRMRKRLRaRRRRRRRRR
R0RtmkstemptcloseRRRRRRtcopyfile(R&tmodifiertdest_dirRR.R3R)ReRfRRRhR-RRRR2toriginal_versionRtmodifiedtcurrent_versiontfdtnewpathRRRW((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR( sX	
	
		
(iiN(R?R@t__doc__RRR0RR'tpropertyRMRYRZRRqR^RWRRRRRRRRR$R%R&R+R,RUR((((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRAs2)							h				"				6cCstg}td}xGttjddddD](}|jdj|t|gq1Wg}xLtjD]>\}}}|j	drp|j|j
dddqpqpW|jtdkr|j
dtn|jdg}tg}tjd	kr=tjd
t}|r=|j\}	}}}
t|}|
g}|
dkrg|jd
n|
dkr|jdn|
dkr|jdn|
dkr|jdn|
dkr|jdnx`|dkr6x@|D]8}d|	|||f}
|
tkr|j|
qqW|d8}qWq=nxH|D]@}x7|D]/}
|jdjt|df||
fqQWqDWxwt|D]i\}}|jdjt|fddf|dkr|jdjt|dfddfqqWxwt|D]i\}}|jdjd|fddf|dkr|jdjd|dfddfqqWt|S(uG
    Return (pyver, abi, arch) tuples compatible with this Python.
    iiiuu.abiu.iunoneudarwinu(\w+)_(\d+)_(\d+)_(\w+)$ui386uppcufatux86_64ufat3uppc64ufat64uintelu	universalu%s_%s_%s_%suanyupy(ui386uppc(ui386uppcux86_64(uppc64ux86_64(ui386ux86_64(ui386ux86_64uinteluppcuppc64(RtrangeR5tversion_infoRRRR8tget_suffixesRRRRRR'RtplatformtreRPR|R`t
IMP_PREFIXRtset(tversionstmajortminortabisRRR3tarchesRVRKRHtmatchesRPRRGRkRL((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytcompatible_tagss`	
$&$

		


1%0%0cCst|tst|}nt}|dkr9t}nxN|D]F\}}}||jkr@||jkr@||jkr@t}Pq@q@W|S(N(	RRARR0tCOMPATIBLE_TAGSRFRGRHR(twheelRZR3tverRGRH((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR%s	-(Tt
__future__RRR\Rtdistutils.utilt	distutilstemailRRR8RtloggingR!RaRERR5RRRRRtcompatRRRRRtdatabaseR	RqR
RtutilRR
RRRRRRRRLRRt	getLoggerR?RR0RthasattrRFRDRRRRBRERtget_platformR RRRtcompilet
IGNORECASEtVERBOSERSRORvR{RzRyR"RtobjectR#R(RARNROR%(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyts(@			

'				#		>	PK
Z=,site-packages/pip/_vendor/distlib/markers.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Parser for the environment markers micro-language defined in PEP 345."""

import ast
import os
import sys
import platform

from .compat import python_implementation, string_types
from .util import in_venv

__all__ = ['interpret']


class Evaluator(object):
    """
    A limited evaluator for Python expressions.
    """

    operators = {
        'eq': lambda x, y: x == y,
        'gt': lambda x, y: x > y,
        'gte': lambda x, y: x >= y,
        'in': lambda x, y: x in y,
        'lt': lambda x, y: x < y,
        'lte': lambda x, y: x <= y,
        'not': lambda x: not x,
        'noteq': lambda x, y: x != y,
        'notin': lambda x, y: x not in y,
    }

    allowed_values = {
        'sys_platform': sys.platform,
        'python_version': '%s.%s' % sys.version_info[:2],
        # parsing sys.platform is not reliable, but there is no other
        # way to get e.g. 2.7.2+, and the PEP is defined with sys.version
        'python_full_version': sys.version.split(' ', 1)[0],
        'os_name': os.name,
        'platform_in_venv': str(in_venv()),
        'platform_release': platform.release(),
        'platform_version': platform.version(),
        'platform_machine': platform.machine(),
        'platform_python_implementation': python_implementation(),
    }

    def __init__(self, context=None):
        """
        Initialise an instance.

        :param context: If specified, names are looked up in this mapping.
        """
        self.context = context or {}
        self.source = None

    def get_fragment(self, offset):
        """
        Get the part of the source which is causing a problem.
        """
        fragment_len = 10
        s = '%r' % (self.source[offset:offset + fragment_len])
        if offset + fragment_len < len(self.source):
            s += '...'
        return s

    def get_handler(self, node_type):
        """
        Get a handler for the specified AST node type.
        """
        return getattr(self, 'do_%s' % node_type, None)

    def evaluate(self, node, filename=None):
        """
        Evaluate a source string or node, using ``filename`` when
        displaying errors.
        """
        if isinstance(node, string_types):
            self.source = node
            kwargs = {'mode': 'eval'}
            if filename:
                kwargs['filename'] = filename
            try:
                node = ast.parse(node, **kwargs)
            except SyntaxError as e:
                s = self.get_fragment(e.offset)
                raise SyntaxError('syntax error %s' % s)
        node_type = node.__class__.__name__.lower()
        handler = self.get_handler(node_type)
        if handler is None:
            if self.source is None:
                s = '(source not available)'
            else:
                s = self.get_fragment(node.col_offset)
            raise SyntaxError("don't know how to evaluate %r %s" % (
                node_type, s))
        return handler(node)

    def get_attr_key(self, node):
        assert isinstance(node, ast.Attribute), 'attribute node expected'
        return '%s.%s' % (node.value.id, node.attr)

    def do_attribute(self, node):
        if not isinstance(node.value, ast.Name):
            valid = False
        else:
            key = self.get_attr_key(node)
            valid = key in self.context or key in self.allowed_values
        if not valid:
            raise SyntaxError('invalid expression: %s' % key)
        if key in self.context:
            result = self.context[key]
        else:
            result = self.allowed_values[key]
        return result

    def do_boolop(self, node):
        result = self.evaluate(node.values[0])
        is_or = node.op.__class__ is ast.Or
        is_and = node.op.__class__ is ast.And
        assert is_or or is_and
        if (is_and and result) or (is_or and not result):
            for n in node.values[1:]:
                result = self.evaluate(n)
                if (is_or and result) or (is_and and not result):
                    break
        return result

    def do_compare(self, node):
        def sanity_check(lhsnode, rhsnode):
            valid = True
            if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
                valid = False
            #elif (isinstance(lhsnode, ast.Attribute)
            #      and isinstance(rhsnode, ast.Attribute)):
            #    klhs = self.get_attr_key(lhsnode)
            #    krhs = self.get_attr_key(rhsnode)
            #    valid = klhs != krhs
            if not valid:
                s = self.get_fragment(node.col_offset)
                raise SyntaxError('Invalid comparison: %s' % s)

        lhsnode = node.left
        lhs = self.evaluate(lhsnode)
        result = True
        for op, rhsnode in zip(node.ops, node.comparators):
            sanity_check(lhsnode, rhsnode)
            op = op.__class__.__name__.lower()
            if op not in self.operators:
                raise SyntaxError('unsupported operation: %r' % op)
            rhs = self.evaluate(rhsnode)
            result = self.operators[op](lhs, rhs)
            if not result:
                break
            lhs = rhs
            lhsnode = rhsnode
        return result

    def do_expression(self, node):
        return self.evaluate(node.body)

    def do_name(self, node):
        valid = False
        if node.id in self.context:
            valid = True
            result = self.context[node.id]
        elif node.id in self.allowed_values:
            valid = True
            result = self.allowed_values[node.id]
        if not valid:
            raise SyntaxError('invalid expression: %s' % node.id)
        return result

    def do_str(self, node):
        return node.s


def interpret(marker, execution_context=None):
    """
    Interpret a marker and return a result depending on environment.

    :param marker: The marker to interpret.
    :type marker: str
    :param execution_context: The context used for name lookup.
    :type execution_context: mapping
    """
    return Evaluator(execution_context).evaluate(marker.strip())
PK
Z"\N\N+site-packages/pip/_vendor/distlib/index.pycnu[
abc@sddlZddlZddlZddlZddlZddlZyddlmZWn!ek
rddl	mZnXddl
mZddlm
Z
mZmZmZmZmZddlmZmZmZejeZdZdZd	efd
YZdS(iN(tThreadi(tDistlibException(tHTTPBasicAuthHandlertRequesttHTTPPasswordMgrturlparsetbuild_openertstring_types(tcached_propertytzip_dirtServerProxyshttps://pypi.python.org/pypitpypitPackageIndexcBseZdZdZddZdZdZdZdZ	dZ
dZdd	Zdd
Z
ddZdddd
ddZdZddZddZdddZdZdZddZRS(sc
    This class represents a package index compatible with PyPI, the Python
    Package Index.
    s.----------ThIs_Is_tHe_distlib_index_bouNdaRY_$cCs|p	t|_|jt|j\}}}}}}|sX|sX|sX|dkrntd|jnd|_d|_d|_d|_	d|_
ttj
dj}x`d
D]X}	y>tj|	dgd|d	|}
|
d
kr|	|_PnWqtk
rqXqWWdQXdS(s
        Initialise an instance.

        :param url: The URL of the index. If not specified, the URL for PyPI is
                    used.
        thttpthttpssinvalid repository: %stwtgpgtgpg2s	--versiontstdouttstderriN(R
R(RR(t
DEFAULT_INDEXturltread_configurationRRtNonetpassword_handlertssl_verifierRtgpg_homet	rpc_proxytopentostdevnullt
subprocesst
check_calltOSError(tselfRtschemetnetloctpathtparamstquerytfragtsinktstrc((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt__init__$s(
!					
		
cCs3ddlm}ddlm}|}||S(ss
        Get the distutils command for interacting with PyPI configurations.
        :return: the command.
        i(tDistribution(t
PyPIRCCommand(tdistutils.coreR-tdistutils.configR.(R"R-R.td((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt_get_pypirc_commandBs	cCsy|j}|j|_|j}|jd|_|jd|_|jdd|_|jd|j|_dS(s
        Read the PyPI access configuration as supported by distutils, getting
        PyPI to do the actual work. This populates ``username``, ``password``,
        ``realm`` and ``url`` attributes from the configuration.
        tusernametpasswordtrealmRt
repositoryN(R2RR6t_read_pypirctgetR3R4R5(R"tctcfg((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRLscCs0|j|j}|j|j|jdS(s
        Save the PyPI access configuration. You must have set ``username`` and
        ``password`` attributes before calling this method.

        Again, distutils is used to do the actual work.
        N(tcheck_credentialsR2t
_store_pypircR3R4(R"R9((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytsave_configuration[s
cCs|jdks|jdkr-tdnt}t|j\}}}}}}|j|j||j|jt	||_
dS(sp
        Check that ``username`` and ``password`` have been set, and raise an
        exception if not.
        s!username and password must be setN(R3RR4RRRRtadd_passwordR5RR(R"tpmt_R$((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyR;gs	!cCs|j|j|j}d|d<|j|jg}|j|}d|d<|j|jg}|j|S(sq
        Register a distribution on PyPI, using the provided metadata.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the distribution to be
                         registered.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        tverifys:actiontsubmit(R;tvalidatettodicttencode_requesttitemstsend_request(R"tmetadataR1trequesttresponse((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytregisterss




cCsjxYtr[|j}|sPn|jdj}|j|tjd||fqW|jdS(sr
        Thread runner for reading lines of from a subprocess into a buffer.

        :param name: The logical name of the stream (used for logging only).
        :param stream: The stream to read from. This will typically a pipe
                       connected to the output stream of a subprocess.
        :param outbuf: The list to append the read lines to.
        sutf-8s%s: %sN(tTruetreadlinetdecodetrstriptappendtloggertdebugtclose(R"tnametstreamtoutbufR*((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt_readers		
cCs|jdddg}|dkr-|j}n|rI|jd|gn|dk	rn|jdddgntj}tjj|tjj	|d}|jd	d
d|d||gt
jd
dj|||fS(s
        Return a suitable command for signing a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The signing command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        s--status-fdt2s--no-ttys	--homedirs--batchs--passphrase-fdt0s.ascs
--detach-signs--armors--local-users--outputsinvoking: %st N(RRRtextendttempfiletmkdtempRR%tjointbasenameRQRR(R"tfilenametsignert
sign_passwordtkeystoretcmdttdtsf((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytget_sign_commands%c	Cs
itjd6tjd6}|dk	r6tj|d

		
c
Cs|jtjj|s/td|ntjj|d}tjj|sitd|n|j|j|j	}}t
|j}d	d|fd|fg}d||fg}|j||}	|j
|	S(
s2
        Upload documentation to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the documentation to be
                         uploaded.
        :param doc_dir: The pathname of the directory which contains the
                        documentation. This should be the directory that
                        contains the ``index.html`` for the documentation.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        snot a directory: %rs
index.htmls
not found: %rs:actiont
doc_uploadRTtversionR(s:actionR(R;RR%tisdirRR^RRCRTRR	tgetvalueRERG(
R"RHtdoc_dirtfnRTRtzip_datatfieldsRRI((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytupload_documentation)s

cCs||jdddg}|dkr-|j}n|rI|jd|gn|jd||gtjddj||S(	s|
        Return a suitable command for verifying a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The verifying command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        s--status-fdRXs--no-ttys	--homedirs--verifysinvoking: %sRZN(RRRR[RQRRR^(R"tsignature_filenamet
data_filenameRcRd((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytget_verify_commandEscCsn|jstdn|j|||}|j|\}}}|dkrdtd|n|dkS(s6
        Verify a signature for a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: True if the signature was verified, else False.
        s0verification unavailable because gpg unavailableiis(verify command failed with error code %s(ii(RRRRv(R"RRRcRdR+RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytverify_signature]s		
cCs|dkr"d}tjdnMt|ttfrF|\}}nd}tt|}tjd|t|d}|j	t
|}z|j}	d}
d}d}d}
d|	krt|	d	}n|r||
|
|nxyt
rp|j|
}|sPn|t|7}|j||rJ|j|n|
d
7}
|r||
|
|qqWWd|jXWdQX|dkr||krtd||fn|r|j}||krtd
||||fntjd|ndS(s
        This is a convenience method for downloading a file from an URL.
        Normally, this will be a file from the index, though currently
        no check is made for this (i.e. a file can be downloaded from
        anywhere).

        The method is just like the :func:`urlretrieve` function in the
        standard library, except that it allows digest computation to be
        done during download and checking that the downloaded data
        matched any expected value.

        :param url: The URL of the file to be downloaded (assumed to be
                    available via an HTTP GET request).
        :param destfile: The pathname where the downloaded file is to be
                         saved.
        :param digest: If specified, this must be a (hasher, value)
                       tuple, where hasher is the algorithm used (e.g.
                       ``'md5'``) and ``value`` is the expected value.
        :param reporthook: The same as for :func:`urlretrieve` in the
                           standard library.
        sNo digest specifiedRsDigest specified: %stwbi iiscontent-lengthsContent-LengthiNs1retrieval incomplete: got only %d out of %d bytess.%s digest mismatch for %s: expected %s, got %ssDigest verified: %s(RRQRRt
isinstancetlistttupletgetattrRRRGRtinfotintRLRtlenRnRRSRR(R"Rtdestfiletdigestt
reporthooktdigesterthashertdfptsfptheaderst	blocksizetsizeRtblocknumtblocktactual((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt
download_filevsV	

cCsWg}|jr"|j|jn|jr>|j|jnt|}|j|S(s
        Send a standard library :class:`Request` to PyPI and return its
        response.

        :param req: The request to send.
        :return: The HTTP response from PyPI (a standard library HTTPResponse).
        (RRPRRR(R"treqthandlerstopener((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRGs		cCs<g}|j}xy|D]q\}}t|ttfsC|g}nxA|D]9}|jd|d|jdd|jdfqJWqWxG|D]?\}}	}
|jd|d||	fjdd|
fqW|jd|ddfdj|}d|}i|d6tt|d	6}
t	|j
||
S(
s&
        Encode fields and files for posting to an HTTP server.

        :param fields: The fields to send as a list of (fieldname, value)
                       tuples.
        :param files: The files to send as a list of (fieldname, filename,
                      file_bytes) tuple.
        s--s)Content-Disposition: form-data; name="%s"sutf-8ts8Content-Disposition: form-data; name="%s"; filename="%s"s
smultipart/form-data; boundary=sContent-typesContent-length(tboundaryRRRR[RwR^tstrRRR(R"RRtpartsRtktvaluestvtkeyR`tvaluetbodytctR((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyREs4	


cCsbt|tri|d6}n|jdkrIt|jdd|_n|jj||p^dS(NRTttimeoutg@tand(RRRRR
Rtsearch(R"ttermstoperator((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRs
N(t__name__t
__module__t__doc__RRR,R2RR=R;RKRWRgRvRyRRRRRRGRER(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRs*	
					#8	M		+(RtloggingRRRR\t	threadingRtImportErrortdummy_threadingRRtcompatRRRRRRtutilRR	R
t	getLoggerRRQRt
DEFAULT_REALMtobjectR(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyts 
.PK
Z_Ԡ˘˘*site-packages/pip/_vendor/distlib/wheel.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals

import base64
import codecs
import datetime
import distutils.util
from email import message_from_file
import hashlib
import imp
import json
import logging
import os
import posixpath
import re
import shutil
import sys
import tempfile
import zipfile

from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import Metadata, METADATA_FILENAME
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
                   cached_property, get_cache_base, read_exports, tempdir)
from .version import NormalizedVersion, UnsupportedVersionError

logger = logging.getLogger(__name__)

cache = None    # created when needed

if hasattr(sys, 'pypy_version_info'):
    IMP_PREFIX = 'pp'
elif sys.platform.startswith('java'):
    IMP_PREFIX = 'jy'
elif sys.platform == 'cli':
    IMP_PREFIX = 'ip'
else:
    IMP_PREFIX = 'cp'

VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
if not VER_SUFFIX:   # pragma: no cover
    VER_SUFFIX = '%s%s' % sys.version_info[:2]
PYVER = 'py' + VER_SUFFIX
IMPVER = IMP_PREFIX + VER_SUFFIX

ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')

ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
    ABI = ABI.replace('cpython-', 'cp')
else:
    def _derive_abi():
        parts = ['cp', VER_SUFFIX]
        if sysconfig.get_config_var('Py_DEBUG'):
            parts.append('d')
        if sysconfig.get_config_var('WITH_PYMALLOC'):
            parts.append('m')
        if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
            parts.append('u')
        return ''.join(parts)
    ABI = _derive_abi()
    del _derive_abi

FILENAME_RE = re.compile(r'''
(?P[^-]+)
-(?P\d+[^-]*)
(-(?P\d+[^-]*))?
-(?P\w+\d+(\.\w+\d+)*)
-(?P\w+)
-(?P\w+(\.\w+)*)
\.whl$
''', re.IGNORECASE | re.VERBOSE)

NAME_VERSION_RE = re.compile(r'''
(?P[^-]+)
-(?P\d+[^-]*)
(-(?P\d+[^-]*))?$
''', re.IGNORECASE | re.VERBOSE)

SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
SHEBANG_PYTHON = b'#!python'
SHEBANG_PYTHONW = b'#!pythonw'

if os.sep == '/':
    to_posix = lambda o: o
else:
    to_posix = lambda o: o.replace(os.sep, '/')


class Mounter(object):
    def __init__(self):
        self.impure_wheels = {}
        self.libs = {}

    def add(self, pathname, extensions):
        self.impure_wheels[pathname] = extensions
        self.libs.update(extensions)

    def remove(self, pathname):
        extensions = self.impure_wheels.pop(pathname)
        for k, v in extensions:
            if k in self.libs:
                del self.libs[k]

    def find_module(self, fullname, path=None):
        if fullname in self.libs:
            result = self
        else:
            result = None
        return result

    def load_module(self, fullname):
        if fullname in sys.modules:
            result = sys.modules[fullname]
        else:
            if fullname not in self.libs:
                raise ImportError('unable to find extension for %s' % fullname)
            result = imp.load_dynamic(fullname, self.libs[fullname])
            result.__loader__ = self
            parts = fullname.rsplit('.', 1)
            if len(parts) > 1:
                result.__package__ = parts[0]
        return result

_hook = Mounter()


class Wheel(object):
    """
    Class to build and install from Wheel files (PEP 427).
    """

    wheel_version = (1, 1)
    hash_kind = 'sha256'

    def __init__(self, filename=None, sign=False, verify=False):
        """
        Initialise an instance using a (valid) filename.
        """
        self.sign = sign
        self.should_verify = verify
        self.buildver = ''
        self.pyver = [PYVER]
        self.abi = ['none']
        self.arch = ['any']
        self.dirname = os.getcwd()
        if filename is None:
            self.name = 'dummy'
            self.version = '0.1'
            self._filename = self.filename
        else:
            m = NAME_VERSION_RE.match(filename)
            if m:
                info = m.groupdict('')
                self.name = info['nm']
                # Reinstate the local version separator
                self.version = info['vn'].replace('_', '-')
                self.buildver = info['bn']
                self._filename = self.filename
            else:
                dirname, filename = os.path.split(filename)
                m = FILENAME_RE.match(filename)
                if not m:
                    raise DistlibException('Invalid name or '
                                           'filename: %r' % filename)
                if dirname:
                    self.dirname = os.path.abspath(dirname)
                self._filename = filename
                info = m.groupdict('')
                self.name = info['nm']
                self.version = info['vn']
                self.buildver = info['bn']
                self.pyver = info['py'].split('.')
                self.abi = info['bi'].split('.')
                self.arch = info['ar'].split('.')

    @property
    def filename(self):
        """
        Build and return a filename from the various components.
        """
        if self.buildver:
            buildver = '-' + self.buildver
        else:
            buildver = ''
        pyver = '.'.join(self.pyver)
        abi = '.'.join(self.abi)
        arch = '.'.join(self.arch)
        # replace - with _ as a local version separator
        version = self.version.replace('-', '_')
        return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
                                         pyver, abi, arch)

    @property
    def exists(self):
        path = os.path.join(self.dirname, self.filename)
        return os.path.isfile(path)

    @property
    def tags(self):
        for pyver in self.pyver:
            for abi in self.abi:
                for arch in self.arch:
                    yield pyver, abi, arch

    @cached_property
    def metadata(self):
        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        wrapper = codecs.getreader('utf-8')
        with ZipFile(pathname, 'r') as zf:
            wheel_metadata = self.get_wheel_metadata(zf)
            wv = wheel_metadata['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            if file_version < (1, 1):
                fn = 'METADATA'
            else:
                fn = METADATA_FILENAME
            try:
                metadata_filename = posixpath.join(info_dir, fn)
                with zf.open(metadata_filename) as bf:
                    wf = wrapper(bf)
                    result = Metadata(fileobj=wf)
            except KeyError:
                raise ValueError('Invalid wheel, because %s is '
                                 'missing' % fn)
        return result

    def get_wheel_metadata(self, zf):
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        metadata_filename = posixpath.join(info_dir, 'WHEEL')
        with zf.open(metadata_filename) as bf:
            wf = codecs.getreader('utf-8')(bf)
            message = message_from_file(wf)
        return dict(message)

    @cached_property
    def info(self):
        pathname = os.path.join(self.dirname, self.filename)
        with ZipFile(pathname, 'r') as zf:
            result = self.get_wheel_metadata(zf)
        return result

    def process_shebang(self, data):
        m = SHEBANG_RE.match(data)
        if m:
            end = m.end()
            shebang, data_after_shebang = data[:end], data[end:]
            # Preserve any arguments after the interpreter
            if b'pythonw' in shebang.lower():
                shebang_python = SHEBANG_PYTHONW
            else:
                shebang_python = SHEBANG_PYTHON
            m = SHEBANG_DETAIL_RE.match(shebang)
            if m:
                args = b' ' + m.groups()[-1]
            else:
                args = b''
            shebang = shebang_python + args
            data = shebang + data_after_shebang
        else:
            cr = data.find(b'\r')
            lf = data.find(b'\n')
            if cr < 0 or cr > lf:
                term = b'\n'
            else:
                if data[cr:cr + 2] == b'\r\n':
                    term = b'\r\n'
                else:
                    term = b'\r'
            data = SHEBANG_PYTHON + term + data
        return data

    def get_hash(self, data, hash_kind=None):
        if hash_kind is None:
            hash_kind = self.hash_kind
        try:
            hasher = getattr(hashlib, hash_kind)
        except AttributeError:
            raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
        result = hasher(data).digest()
        result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
        return hash_kind, result

    def write_record(self, records, record_path, base):
        records = list(records) # make a copy for sorting
        p = to_posix(os.path.relpath(record_path, base))
        records.append((p, '', ''))
        records.sort()
        with CSVWriter(record_path) as writer:
            for row in records:
                writer.writerow(row)

    def write_records(self, info, libdir, archive_paths):
        records = []
        distinfo, info_dir = info
        hasher = getattr(hashlib, self.hash_kind)
        for ap, p in archive_paths:
            with open(p, 'rb') as f:
                data = f.read()
            digest = '%s=%s' % self.get_hash(data)
            size = os.path.getsize(p)
            records.append((ap, digest, size))

        p = os.path.join(distinfo, 'RECORD')
        self.write_record(records, p, libdir)
        ap = to_posix(os.path.join(info_dir, 'RECORD'))
        archive_paths.append((ap, p))

    def build_zip(self, pathname, archive_paths):
        with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
            for ap, p in archive_paths:
                logger.debug('Wrote %s to %s in wheel', p, ap)
                zf.write(p, ap)

    def build(self, paths, tags=None, wheel_version=None):
        """
        Build a wheel from files in specified paths, and use any specified tags
        when determining the name of the wheel.
        """
        if tags is None:
            tags = {}

        libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
        if libkey == 'platlib':
            is_pure = 'false'
            default_pyver = [IMPVER]
            default_abi = [ABI]
            default_arch = [ARCH]
        else:
            is_pure = 'true'
            default_pyver = [PYVER]
            default_abi = ['none']
            default_arch = ['any']

        self.pyver = tags.get('pyver', default_pyver)
        self.abi = tags.get('abi', default_abi)
        self.arch = tags.get('arch', default_arch)

        libdir = paths[libkey]

        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        archive_paths = []

        # First, stuff which is not in site-packages
        for key in ('data', 'headers', 'scripts'):
            if key not in paths:
                continue
            path = paths[key]
            if os.path.isdir(path):
                for root, dirs, files in os.walk(path):
                    for fn in files:
                        p = fsdecode(os.path.join(root, fn))
                        rp = os.path.relpath(p, path)
                        ap = to_posix(os.path.join(data_dir, key, rp))
                        archive_paths.append((ap, p))
                        if key == 'scripts' and not p.endswith('.exe'):
                            with open(p, 'rb') as f:
                                data = f.read()
                            data = self.process_shebang(data)
                            with open(p, 'wb') as f:
                                f.write(data)

        # Now, stuff which is in site-packages, other than the
        # distinfo stuff.
        path = libdir
        distinfo = None
        for root, dirs, files in os.walk(path):
            if root == path:
                # At the top level only, save distinfo for later
                # and skip it for now
                for i, dn in enumerate(dirs):
                    dn = fsdecode(dn)
                    if dn.endswith('.dist-info'):
                        distinfo = os.path.join(root, dn)
                        del dirs[i]
                        break
                assert distinfo, '.dist-info directory expected, not found'

            for fn in files:
                # comment out next suite to leave .pyc files in
                if fsdecode(fn).endswith(('.pyc', '.pyo')):
                    continue
                p = os.path.join(root, fn)
                rp = to_posix(os.path.relpath(p, path))
                archive_paths.append((rp, p))

        # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
        files = os.listdir(distinfo)
        for fn in files:
            if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
                p = fsdecode(os.path.join(distinfo, fn))
                ap = to_posix(os.path.join(info_dir, fn))
                archive_paths.append((ap, p))

        wheel_metadata = [
            'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
            'Generator: distlib %s' % __version__,
            'Root-Is-Purelib: %s' % is_pure,
        ]
        for pyver, abi, arch in self.tags:
            wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
        p = os.path.join(distinfo, 'WHEEL')
        with open(p, 'w') as f:
            f.write('\n'.join(wheel_metadata))
        ap = to_posix(os.path.join(info_dir, 'WHEEL'))
        archive_paths.append((ap, p))

        # Now, at last, RECORD.
        # Paths in here are archive paths - nothing else makes sense.
        self.write_records((distinfo, info_dir), libdir, archive_paths)
        # Now, ready to build the zip file
        pathname = os.path.join(self.dirname, self.filename)
        self.build_zip(pathname, archive_paths)
        return pathname

    def install(self, paths, maker, **kwargs):
        """
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        """

        dry_run = maker.dry_run
        warner = kwargs.get('warner')
        lib_only = kwargs.get('lib_only', False)

        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
        record_name = posixpath.join(info_dir, 'RECORD')

        wrapper = codecs.getreader('utf-8')

        with ZipFile(pathname, 'r') as zf:
            with zf.open(wheel_metadata_name) as bwf:
                wf = wrapper(bwf)
                message = message_from_file(wf)
            wv = message['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            if (file_version != self.wheel_version) and warner:
                warner(self.wheel_version, file_version)

            if message['Root-Is-Purelib'] == 'true':
                libdir = paths['purelib']
            else:
                libdir = paths['platlib']

            records = {}
            with zf.open(record_name) as bf:
                with CSVReader(stream=bf) as reader:
                    for row in reader:
                        p = row[0]
                        records[p] = row

            data_pfx = posixpath.join(data_dir, '')
            info_pfx = posixpath.join(info_dir, '')
            script_pfx = posixpath.join(data_dir, 'scripts', '')

            # make a new instance rather than a copy of maker's,
            # as we mutate it
            fileop = FileOperator(dry_run=dry_run)
            fileop.record = True    # so we can rollback if needed

            bc = not sys.dont_write_bytecode    # Double negatives. Lovely!

            outfiles = []   # for RECORD writing

            # for script copying/shebang processing
            workdir = tempfile.mkdtemp()
            # set target dir later
            # we default add_launchers to False, as the
            # Python Launcher should be used instead
            maker.source_dir = workdir
            maker.target_dir = None
            try:
                for zinfo in zf.infolist():
                    arcname = zinfo.filename
                    if isinstance(arcname, text_type):
                        u_arcname = arcname
                    else:
                        u_arcname = arcname.decode('utf-8')
                    # The signature file won't be in RECORD,
                    # and we  don't currently don't do anything with it
                    if u_arcname.endswith('/RECORD.jws'):
                        continue
                    row = records[u_arcname]
                    if row[2] and str(zinfo.file_size) != row[2]:
                        raise DistlibException('size mismatch for '
                                               '%s' % u_arcname)
                    if row[1]:
                        kind, value = row[1].split('=', 1)
                        with zf.open(arcname) as bf:
                            data = bf.read()
                        _, digest = self.get_hash(data, kind)
                        if digest != value:
                            raise DistlibException('digest mismatch for '
                                                   '%s' % arcname)

                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
                        logger.debug('lib_only: skipping %s', u_arcname)
                        continue
                    is_script = (u_arcname.startswith(script_pfx)
                                 and not u_arcname.endswith('.exe'))

                    if u_arcname.startswith(data_pfx):
                        _, where, rp = u_arcname.split('/', 2)
                        outfile = os.path.join(paths[where], convert_path(rp))
                    else:
                        # meant for site-packages.
                        if u_arcname in (wheel_metadata_name, record_name):
                            continue
                        outfile = os.path.join(libdir, convert_path(u_arcname))
                    if not is_script:
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, outfile)
                        outfiles.append(outfile)
                        # Double check the digest of the written file
                        if not dry_run and row[1]:
                            with open(outfile, 'rb') as bf:
                                data = bf.read()
                                _, newdigest = self.get_hash(data, kind)
                                if newdigest != digest:
                                    raise DistlibException('digest mismatch '
                                                           'on write for '
                                                           '%s' % outfile)
                        if bc and outfile.endswith('.py'):
                            try:
                                pyc = fileop.byte_compile(outfile)
                                outfiles.append(pyc)
                            except Exception:
                                # Don't give up if byte-compilation fails,
                                # but log it and perhaps warn the user
                                logger.warning('Byte-compilation failed',
                                               exc_info=True)
                    else:
                        fn = os.path.basename(convert_path(arcname))
                        workname = os.path.join(workdir, fn)
                        with zf.open(arcname) as bf:
                            fileop.copy_stream(bf, workname)

                        dn, fn = os.path.split(outfile)
                        maker.target_dir = dn
                        filenames = maker.make(fn)
                        fileop.set_executable_mode(filenames)
                        outfiles.extend(filenames)

                if lib_only:
                    logger.debug('lib_only: returning None')
                    dist = None
                else:
                    # Generate scripts

                    # Try to get pydist.json so we can see if there are
                    # any commands to generate. If this fails (e.g. because
                    # of a legacy wheel), log a warning but don't give up.
                    commands = None
                    file_version = self.info['Wheel-Version']
                    if file_version == '1.0':
                        # Use legacy info
                        ep = posixpath.join(info_dir, 'entry_points.txt')
                        try:
                            with zf.open(ep) as bwf:
                                epdata = read_exports(bwf)
                            commands = {}
                            for key in ('console', 'gui'):
                                k = '%s_scripts' % key
                                if k in epdata:
                                    commands['wrap_%s' % key] = d = {}
                                    for v in epdata[k].values():
                                        s = '%s:%s' % (v.prefix, v.suffix)
                                        if v.flags:
                                            s += ' %s' % v.flags
                                        d[v.name] = s
                        except Exception:
                            logger.warning('Unable to read legacy script '
                                           'metadata, so cannot generate '
                                           'scripts')
                    else:
                        try:
                            with zf.open(metadata_name) as bwf:
                                wf = wrapper(bwf)
                                commands = json.load(wf).get('extensions')
                                if commands:
                                    commands = commands.get('python.commands')
                        except Exception:
                            logger.warning('Unable to read JSON metadata, so '
                                           'cannot generate scripts')
                    if commands:
                        console_scripts = commands.get('wrap_console', {})
                        gui_scripts = commands.get('wrap_gui', {})
                        if console_scripts or gui_scripts:
                            script_dir = paths.get('scripts', '')
                            if not os.path.isdir(script_dir):
                                raise ValueError('Valid script path not '
                                                 'specified')
                            maker.target_dir = script_dir
                            for k, v in console_scripts.items():
                                script = '%s = %s' % (k, v)
                                filenames = maker.make(script)
                                fileop.set_executable_mode(filenames)

                            if gui_scripts:
                                options = {'gui': True }
                                for k, v in gui_scripts.items():
                                    script = '%s = %s' % (k, v)
                                    filenames = maker.make(script, options)
                                    fileop.set_executable_mode(filenames)

                    p = os.path.join(libdir, info_dir)
                    dist = InstalledDistribution(p)

                    # Write SHARED
                    paths = dict(paths)     # don't change passed in dict
                    del paths['purelib']
                    del paths['platlib']
                    paths['lib'] = libdir
                    p = dist.write_shared_locations(paths, dry_run)
                    if p:
                        outfiles.append(p)

                    # Write RECORD
                    dist.write_installed_files(outfiles, paths['prefix'],
                                               dry_run)
                return dist
            except Exception:  # pragma: no cover
                logger.exception('installation failed.')
                fileop.rollback()
                raise
            finally:
                shutil.rmtree(workdir)

    def _get_dylib_cache(self):
        global cache
        if cache is None:
            # Use native string to avoid issues on 2.x: see Python #20140.
            base = os.path.join(get_cache_base(), str('dylib-cache'),
                                sys.version[:3])
            cache = Cache(base)
        return cache

    def _get_extensions(self):
        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        arcname = posixpath.join(info_dir, 'EXTENSIONS')
        wrapper = codecs.getreader('utf-8')
        result = []
        with ZipFile(pathname, 'r') as zf:
            try:
                with zf.open(arcname) as bf:
                    wf = wrapper(bf)
                    extensions = json.load(wf)
                    cache = self._get_dylib_cache()
                    prefix = cache.prefix_to_dir(pathname)
                    cache_base = os.path.join(cache.base, prefix)
                    if not os.path.isdir(cache_base):
                        os.makedirs(cache_base)
                    for name, relpath in extensions.items():
                        dest = os.path.join(cache_base, convert_path(relpath))
                        if not os.path.exists(dest):
                            extract = True
                        else:
                            file_time = os.stat(dest).st_mtime
                            file_time = datetime.datetime.fromtimestamp(file_time)
                            info = zf.getinfo(relpath)
                            wheel_time = datetime.datetime(*info.date_time)
                            extract = wheel_time > file_time
                        if extract:
                            zf.extract(relpath, cache_base)
                        result.append((name, dest))
            except KeyError:
                pass
        return result

    def is_compatible(self):
        """
        Determine if a wheel is compatible with the running system.
        """
        return is_compatible(self)

    def is_mountable(self):
        """
        Determine if a wheel is asserted as mountable by its metadata.
        """
        return True # for now - metadata details TBD

    def mount(self, append=False):
        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
        if not self.is_compatible():
            msg = 'Wheel %s not compatible with this Python.' % pathname
            raise DistlibException(msg)
        if not self.is_mountable():
            msg = 'Wheel %s is marked as not mountable.' % pathname
            raise DistlibException(msg)
        if pathname in sys.path:
            logger.debug('%s already in path', pathname)
        else:
            if append:
                sys.path.append(pathname)
            else:
                sys.path.insert(0, pathname)
            extensions = self._get_extensions()
            if extensions:
                if _hook not in sys.meta_path:
                    sys.meta_path.append(_hook)
                _hook.add(pathname, extensions)

    def unmount(self):
        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
        if pathname not in sys.path:
            logger.debug('%s not in path', pathname)
        else:
            sys.path.remove(pathname)
            if pathname in _hook.impure_wheels:
                _hook.remove(pathname)
            if not _hook.impure_wheels:
                if _hook in sys.meta_path:
                    sys.meta_path.remove(_hook)

    def verify(self):
        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        data_dir = '%s.data' % name_ver
        info_dir = '%s.dist-info' % name_ver

        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
        record_name = posixpath.join(info_dir, 'RECORD')

        wrapper = codecs.getreader('utf-8')

        with ZipFile(pathname, 'r') as zf:
            with zf.open(wheel_metadata_name) as bwf:
                wf = wrapper(bwf)
                message = message_from_file(wf)
            wv = message['Wheel-Version'].split('.', 1)
            file_version = tuple([int(i) for i in wv])
            # TODO version verification

            records = {}
            with zf.open(record_name) as bf:
                with CSVReader(stream=bf) as reader:
                    for row in reader:
                        p = row[0]
                        records[p] = row

            for zinfo in zf.infolist():
                arcname = zinfo.filename
                if isinstance(arcname, text_type):
                    u_arcname = arcname
                else:
                    u_arcname = arcname.decode('utf-8')
                if '..' in u_arcname:
                    raise DistlibException('invalid entry in '
                                           'wheel: %r' % u_arcname)

                # The signature file won't be in RECORD,
                # and we  don't currently don't do anything with it
                if u_arcname.endswith('/RECORD.jws'):
                    continue
                row = records[u_arcname]
                if row[2] and str(zinfo.file_size) != row[2]:
                    raise DistlibException('size mismatch for '
                                           '%s' % u_arcname)
                if row[1]:
                    kind, value = row[1].split('=', 1)
                    with zf.open(arcname) as bf:
                        data = bf.read()
                    _, digest = self.get_hash(data, kind)
                    if digest != value:
                        raise DistlibException('digest mismatch for '
                                               '%s' % arcname)

    def update(self, modifier, dest_dir=None, **kwargs):
        """
        Update the contents of a wheel in a generic way. The modifier should
        be a callable which expects a dictionary argument: its keys are
        archive-entry paths, and its values are absolute filesystem paths
        where the contents the corresponding archive entries can be found. The
        modifier is free to change the contents of the files pointed to, add
        new entries and remove entries, before returning. This method will
        extract the entire contents of the wheel to a temporary location, call
        the modifier, and then use the passed (and possibly updated)
        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
        wheel is written there -- otherwise, the original wheel is overwritten.

        The modifier should return True if it updated the wheel, else False.
        This method returns the same value the modifier returns.
        """

        def get_version(path_map, info_dir):
            version = path = None
            key = '%s/%s' % (info_dir, METADATA_FILENAME)
            if key not in path_map:
                key = '%s/PKG-INFO' % info_dir
            if key in path_map:
                path = path_map[key]
                version = Metadata(path=path).version
            return version, path

        def update_version(version, path):
            updated = None
            try:
                v = NormalizedVersion(version)
                i = version.find('-')
                if i < 0:
                    updated = '%s+1' % version
                else:
                    parts = [int(s) for s in version[i + 1:].split('.')]
                    parts[-1] += 1
                    updated = '%s+%s' % (version[:i],
                                         '.'.join(str(i) for i in parts))
            except UnsupportedVersionError:
                logger.debug('Cannot update non-compliant (PEP-440) '
                             'version %r', version)
            if updated:
                md = Metadata(path=path)
                md.version = updated
                legacy = not path.endswith(METADATA_FILENAME)
                md.write(path=path, legacy=legacy)
                logger.debug('Version updated from %r to %r', version,
                             updated)

        pathname = os.path.join(self.dirname, self.filename)
        name_ver = '%s-%s' % (self.name, self.version)
        info_dir = '%s.dist-info' % name_ver
        record_name = posixpath.join(info_dir, 'RECORD')
        with tempdir() as workdir:
            with ZipFile(pathname, 'r') as zf:
                path_map = {}
                for zinfo in zf.infolist():
                    arcname = zinfo.filename
                    if isinstance(arcname, text_type):
                        u_arcname = arcname
                    else:
                        u_arcname = arcname.decode('utf-8')
                    if u_arcname == record_name:
                        continue
                    if '..' in u_arcname:
                        raise DistlibException('invalid entry in '
                                               'wheel: %r' % u_arcname)
                    zf.extract(zinfo, workdir)
                    path = os.path.join(workdir, convert_path(u_arcname))
                    path_map[u_arcname] = path

            # Remember the version.
            original_version, _ = get_version(path_map, info_dir)
            # Files extracted. Call the modifier.
            modified = modifier(path_map, **kwargs)
            if modified:
                # Something changed - need to build a new wheel.
                current_version, path = get_version(path_map, info_dir)
                if current_version and (current_version == original_version):
                    # Add or update local version to signify changes.
                    update_version(current_version, path)
                # Decide where the new wheel goes.
                if dest_dir is None:
                    fd, newpath = tempfile.mkstemp(suffix='.whl',
                                                   prefix='wheel-update-',
                                                   dir=workdir)
                    os.close(fd)
                else:
                    if not os.path.isdir(dest_dir):
                        raise DistlibException('Not a directory: %r' % dest_dir)
                    newpath = os.path.join(dest_dir, self.filename)
                archive_paths = list(path_map.items())
                distinfo = os.path.join(workdir, info_dir)
                info = distinfo, info_dir
                self.write_records(info, workdir, archive_paths)
                self.build_zip(newpath, archive_paths)
                if dest_dir is None:
                    shutil.copyfile(newpath, pathname)
        return modified

def compatible_tags():
    """
    Return (pyver, abi, arch) tuples compatible with this Python.
    """
    versions = [VER_SUFFIX]
    major = VER_SUFFIX[0]
    for minor in range(sys.version_info[1] - 1, - 1, -1):
        versions.append(''.join([major, str(minor)]))

    abis = []
    for suffix, _, _ in imp.get_suffixes():
        if suffix.startswith('.abi'):
            abis.append(suffix.split('.', 2)[1])
    abis.sort()
    if ABI != 'none':
        abis.insert(0, ABI)
    abis.append('none')
    result = []

    arches = [ARCH]
    if sys.platform == 'darwin':
        m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
        if m:
            name, major, minor, arch = m.groups()
            minor = int(minor)
            matches = [arch]
            if arch in ('i386', 'ppc'):
                matches.append('fat')
            if arch in ('i386', 'ppc', 'x86_64'):
                matches.append('fat3')
            if arch in ('ppc64', 'x86_64'):
                matches.append('fat64')
            if arch in ('i386', 'x86_64'):
                matches.append('intel')
            if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
                matches.append('universal')
            while minor >= 0:
                for match in matches:
                    s = '%s_%s_%s_%s' % (name, major, minor, match)
                    if s != ARCH:   # already there
                        arches.append(s)
                minor -= 1

    # Most specific - our Python version, ABI and arch
    for abi in abis:
        for arch in arches:
            result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))

    # where no ABI / arch dependency, but IMP_PREFIX dependency
    for i, version in enumerate(versions):
        result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
        if i == 0:
            result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))

    # no IMP_PREFIX, ABI or arch dependency
    for i, version in enumerate(versions):
        result.append((''.join(('py', version)), 'none', 'any'))
        if i == 0:
            result.append((''.join(('py', version[0])), 'none', 'any'))
    return set(result)


COMPATIBLE_TAGS = compatible_tags()

del compatible_tags


def is_compatible(wheel, tags=None):
    if not isinstance(wheel, Wheel):
        wheel = Wheel(wheel)    # assume it's a filename
    result = False
    if tags is None:
        tags = COMPATIBLE_TAGS
    for ver, abi, arch in tags:
        if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
            result = True
            break
    return result
PK
Z~==.site-packages/pip/_vendor/distlib/__init__.pycnu[
abc@sddlZdZdefdYZyddlmZWn*ek
rhdejfdYZnXejeZ	e	j
edS(iNs0.2.4tDistlibExceptioncBseZRS((t__name__t
__module__(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyRs(tNullHandlerRcBs#eZdZdZdZRS(cCsdS(N((tselftrecord((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pythandletcCsdS(N((RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pytemitRcCs
d|_dS(N(tNonetlock(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyt
createLockR(RRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyRs		(tloggingt__version__t	ExceptionRRtImportErrortHandlert	getLoggerRtloggert
addHandler(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyts
PK
Z* ,site-packages/pip/_vendor/distlib/compat.pyonu[
abc@@sddlmZddlZddlZddlZyddlZWnek
r]dZnXejddkr
ddl	m	Z	e
fZeZ
ddlmZddlZddlZddlmZddlmZmZmZmZmZdd	lmZmZm Z m!Z!m"Z"m#Z#m$Z$d
Zddl%Z%ddl%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.erddl%m/Z/nddl0Z0ddl1Z1ddl2Z3dd
l4m4Z4ddl5Z5e6Z6ddl7m8Z9ddl7m:Z;da<dZ=nddl>m	Z	e?fZe?Z
ddl>m@ZddlZddlZddlZddlAmZmZmZm=Z=mZm Z mZmZm$Z$ddlBm'Z'mZm&Z&m!Z!m"Z"m*Z*m+Z+m,Z,m-Z-m.Z.erddlBm/Z/nddlCm)Z)m(Z(m#Z#ddlDjEZ0ddlBjFZ%ddlGjEZ1ddl3Z3dd
lHm4Z4ddlIjJZ5eKZ6ddl7m;Z;e9Z9yddlmLZLmMZMWn<ek
rdeNfdYZMddZOdZLnXyddlmPZQWn'ek
r"deRfdYZQnXyddlmSZSWn*ek
rcejTejUBddZSnXdd lVmWZXeYeXd!reXZWn<dd"lVmZZ[d#e[fd$YZZd%eXfd&YZWydd'l\m]Z]Wnek
rd(Z]nXyddl^Z^Wn!ek
r,dd)lm^Z^nXy
e_Z_Wn*e`k
rcdd*lambZbd+Z_nXyejcZcejdZdWnJeek
rejfZgegd,krd-Zhnd.Zhd/Zcd0ZdnXydd1limjZjWnTek
r1dd2lkmlZlmmZmddlZejnd3Zod4Zpd5ZjnXydd6lqmrZrWn!ek
ridd6lsmrZrnXejd7 dTkre4jtZtndd9lqmtZtydd:lamuZuWnkek
rdd;lamvZvydd<lwmxZyWnek
rd=d>ZynXd?evfd@YZunXyddAlzm{Z{Wnek
rQddBZ{nXyddClam|Z|Wnek
ryddDl}m~ZWn!ek
rddDlm~ZnXy ddElmZmZmZWnek
rnXdFefdGYZ|nXyddHlmZmZWnek
rejndIejZdJZdKefdLYZddMZdNefdOYZdPefdQYZdReRfdSYZnXdS(Ui(tabsolute_importNi(tStringIO(tFileTypei(tshutil(turlparset
urlunparseturljointurlsplitt
urlunsplit(turlretrievetquotetunquoteturl2pathnametpathname2urltContentTooShortErrort	splittypecC@s+t|tr!|jd}nt|S(Nsutf-8(t
isinstancetunicodetencodet_quote(ts((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR
s(	tRequestturlopentURLErrort	HTTPErrortHTTPBasicAuthHandlertHTTPPasswordMgrtHTTPHandlertHTTPRedirectHandlertbuild_opener(tHTTPSHandler(t
HTMLParser(tifilter(tifilterfalsecC@sYtdkr*ddl}|jdantj|}|rO|jddSd|fS(sJsplituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.iNs^(.*)@(.*)$ii(t	_userprogtNonetretcompiletmatchtgroup(thostR$R&((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	splituser4s(t
TextIOWrapper(	RRRR)R
RRRR(
RR	RRR
RRRRR(RRR(tfilterfalse(tmatch_hostnametCertificateErrorR-cB@seZRS((t__name__t
__module__(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR-^sc
C@sSg}|stS|jd}|d|d}}|jd}||krhtdt|n|s|j|jkS|dkr|jdnY|jds|jdr|jtj	|n"|jtj	|j
dd	x$|D]}|jtj	|qWtjd
dj|dtj
}	|	j|S(
spMatching according to RFC 6125, section 6.4.3

        http://tools.ietf.org/html/rfc6125#section-6.4.3
        t.iit*s,too many wildcards in certificate DNS name: s[^.]+sxn--s\*s[^.]*s\As\.s\Z(tFalsetsplittcountR-treprtlowertappendt
startswithR$tescapetreplaceR%tjoint
IGNORECASER&(
tdnthostnamet
max_wildcardstpatstpartstleftmostt	remaindert	wildcardstfragtpat((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt_dnsname_matchbs("
&cC@s[|stdng}|jdd
}xC|D];\}}|dkr4t||r_dS|j|q4q4W|sxc|jddD]L}xC|D];\}}|dkrt||rdS|j|qqWqWnt|dkrtd|d	jtt|fn;t|dkrKtd
||dfntddS(s=Verify that *cert* (in decoded format as returned by
        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
        rules are followed, but IP addresses are not accepted for *hostname*.

        CertificateError is raised on failure. On success, the function
        returns nothing.
        stempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDtsubjectAltNametDNSNtsubjectt
commonNameis&hostname %r doesn't match either of %ss, shostname %r doesn't match %ris=no appropriate commonName or subjectAltName fields were found(((	t
ValueErrortgetRGR7tlenR-R;tmapR5(tcertR>tdnsnamestsantkeytvaluetsub((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR,s.%(tSimpleNamespacet	ContainercB@seZdZdZRS(sR
        A generic container for when multiple values need to be returned
        cK@s|jj|dS(N(t__dict__tupdate(tselftkwargs((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__init__s(R.R/t__doc__R\(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRWs(twhichc@sd}tjjr2||r.SdS|dkrYtjjdtj}n|scdS|jtj}t	j
dkrtj|kr|jdtjntjjddjtj}t
fd|Drg}qg|D]}|^q}n	g}t}xu|D]m}tjj|}	|	|kr+|j|	x9|D].}
tjj||
}|||rc|SqcWq+q+WdS(	sKGiven a command, mode, and a PATH string, return the path which
        conforms to the given mode on the PATH, or None if there is no such
        file.

        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
        of os.environ.get("PATH"), or can be overridden with a custom search
        path.

        cS@s5tjj|o4tj||o4tjj|S(N(tostpathtexiststaccesstisdir(tfntmode((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
_access_checks$tPATHtwin32itPATHEXTtc3@s*|] }jj|jVqdS(N(R6tendswith(t.0text(tcmd(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pys	sN(R_R`tdirnameR#tenvironRMtdefpathR3tpathseptsystplatformtcurdirtinserttanytsettnormcasetaddR;(RnReR`RftpathexttfilesRmtseentdirtnormdirtthefiletname((Rns>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR^s8
	! 		


(tZipFilet	__enter__(t
ZipExtFileRcB@s#eZdZdZdZRS(cC@s|jj|jdS(N(RXRY(RZtbase((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\scC@s|S(N((RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRscG@s|jdS(N(tclose(RZtexc_info((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__exit__s(R.R/R\RR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs		RcB@s#eZdZdZdZRS(cC@s|S(N((RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR"scG@s|jdS(N(R(RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR%scO@stj|||}t|S(N(tBaseZipFiletopenR(RZtargsR[R((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR)s(R.R/RRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR!s		(tpython_implementationcC@s@dtjkrdStjdkr&dStjjdr<dSdS(s6Return a string identifying the Python implementation.tPyPytjavatJythont
IronPythontCPython(RstversionR_RR8(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR0s(t	sysconfig(tCallablecC@s
t|tS(N(RR(tobj((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytcallableDstmbcststricttsurrogateescapecC@sOt|tr|St|tr2|jttStdt|jdS(Nsexpect bytes or str, not %s(	Rtbytest	text_typeRt_fsencodingt	_fserrorst	TypeErrorttypeR.(tfilename((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytfsencodeRscC@sOt|tr|St|tr2|jttStdt|jdS(Nsexpect bytes or str, not %s(	RRRtdecodeRRRRR.(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytfsdecode[s(tdetect_encoding(tBOM_UTF8tlookupscoding[:=]\s*([-\w.]+)cC@s^|d jjdd}|dks7|jdr;dS|dksV|jd
rZdS|S(s(Imitates get_normal_name in tokenizer.c.it_t-sutf-8sutf-8-slatin-1s
iso-8859-1siso-latin-1slatin-1-siso-8859-1-siso-latin-1-(slatin-1s
iso-8859-1siso-latin-1(slatin-1-siso-8859-1-siso-latin-1-(R6R:R8(torig_enctenc((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt_get_normal_namelsc@syjjWntk
r)dnXtd}d}fd}fd}|}|jtrt|d}d}n|s|gfS||}|r||gfS|}|s||gfS||}|r|||gfS|||gfS(s?
        The detect_encoding() function is used to detect the encoding that should
        be used to decode a Python source file.  It requires one argument, readline,
        in the same way as the tokenize() generator.

        It will call readline a maximum of twice, and return the encoding used
        (as a string) and a list of any lines (left as bytes) it has read in.

        It detects the encoding from the presence of a utf-8 bom or an encoding
        cookie as specified in pep-0263.  If both a bom and a cookie are present,
        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
        'utf-8-sig' is returned.

        If no encoding is specified, then the default of 'utf-8' will be returned.
        sutf-8c@s$ySWntk
rdSXdS(NRj(t
StopIteration((treadline(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytread_or_stops
c@s7y|jd}WnDtk
rYd}dk	rJdj|}nt|nXtj|}|ssdSt|d}yt|}WnHt	k
rdkrd|}ndj|}t|nXr3|j
dkr&dkrd}ndj}t|n|d	7}n|S(
Nsutf-8s'invalid or missing encoding declarations{} for {!r}isunknown encoding: sunknown encoding for {!r}: {}sencoding problem: utf-8s encoding problem for {!r}: utf-8s-sig(RtUnicodeDecodeErrorR#tformattSyntaxErrort	cookie_retfindallRRtLookupErrorR(tlinetline_stringtmsgtmatchestencodingtcodec(t	bom_foundR(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytfind_cookies6


			
is	utf-8-sigN(t__self__RtAttributeErrorR#R2R8RtTrue(RRtdefaultRRtfirsttsecond((RRRs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRws4

&	
	

	
(R9ii(tunescape(tChainMap(tMutableMapping(trecursive_reprs...c@sfd}|S(sm
            Decorator to make a repr function return fillvalue for a recursive
            call
            c@smtfd}td|_td|_td|_tdi|_|S(Nc@sWt|tf}|kr%Sj|z|}Wdj|X|S(N(tidt	get_identRztdiscard(RZRStresult(t	fillvaluetrepr_runningt
user_function(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytwrappers
R/R]R.t__annotations__(RxtgetattrR/R]R.R(RR(R(RRs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytdecorating_functions	((RR((Rs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt_recursive_reprsRcB@seZdZdZdZdZddZdZdZ	dZ
dZed	Z
ed
ZdZeZdZed
ZdZdZdZdZdZRS(s A ChainMap groups multiple dicts (or other mappings) together
        to create a single, updateable view.

        The underlying mappings are stored in a list.  That list is public and can
        accessed or updated using the *maps* attribute.  There is no other state.

        Lookups search the underlying mappings successively until a key is found.
        In contrast, writes, updates, and deletions only operate on the first
        mapping.

        cG@st|pig|_dS(sInitialize a ChainMap by setting *maps* to the given mappings.
            If no mappings are provided, a single empty dictionary is used.

            N(tlisttmaps(RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\
scC@st|dS(N(tKeyError(RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__missing__scC@sAx1|jD]&}y||SWq
tk
r/q
Xq
W|j|S(N(RRR(RZRStmapping((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__getitem__s
cC@s||kr||S|S(N((RZRSR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRMscC@sttj|jS(N(RNRxtunionR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__len__"scC@sttj|jS(N(titerRxRR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__iter__%sc@stfd|jDS(Nc3@s|]}|kVqdS(N((Rltm(RS(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pys	)s(RwR(RZRS((RSs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__contains__(scC@s
t|jS(N(RwR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__bool__+scC@s%dj|djtt|jS(Ns{0.__class__.__name__}({1})s, (RR;ROR5R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__repr__.scG@s|tj||S(s?Create a ChainMap with a single dict created from the iterable.(tdicttfromkeys(tclstiterableR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR3scC@s$|j|jdj|jdS(sHNew ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]ii(t	__class__Rtcopy(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR8scC@s|ji|jS(s;New ChainMap with a new dict followed by all previous maps.(RR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	new_child>scC@s|j|jdS(sNew ChainMap from maps[1:].i(RR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytparentsBscC@s||jd|/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__setitem__GscC@s?y|jd|=Wn&tk
r:tdj|nXdS(Nis(Key not found in the first mapping: {!r}(RRR(RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__delitem__Js
cC@s9y|jdjSWntk
r4tdnXdS(sPRemove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.is#No keys found in the first mapping.N(RtpopitemR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRPs
cG@sHy|jdj||SWn&tk
rCtdj|nXdS(sWRemove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].is(Key not found in the first mapping: {!r}N(RtpopRR(RZRSR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRWs
cC@s|jdjdS(s'Clear maps[0], leaving maps[1:] intact.iN(Rtclear(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR^sN(R.R/R]R\RRR#RMRRRRRRtclassmethodRRt__copy__RtpropertyRRRRRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs(													(tcache_from_sourcecC@s2|dkrt}n|r$d}nd}||S(Ntcto(R#t	__debug__(R`tdebug_overridetsuffix((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRes		(tOrderedDict(R(tKeysViewt
ValuesViewt	ItemsViewRcB@seZdZdZejdZejdZdZdZdZ	e
dZdZd	Z
d
ZdZdZd
ZdZeZeZedZddZddZdZdZeddZdZdZdZ dZ!dZ"RS(s)Dictionary that remembers insertion ordercO@st|dkr+tdt|ny|jWn7tk
rog|_}||dg|(i|_nX|j||dS(sInitialize an ordered dictionary.  Signature is the same as for
            regular dictionaries, but keyword arguments are not recommended
            because their insertion order is arbitrary.

            is$expected at most 1 arguments, got %dN(RNRt_OrderedDict__rootRR#t_OrderedDict__mapt_OrderedDict__update(RZRtkwdstroot((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\s


cC@s\||krH|j}|d}|||g|d<|d<|j| od[i]=yiiN(RR(RZRSRTtdict_setitemRtlast((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	
)cC@s@||||jj|\}}}||d<||d del od[y]iiN(RR(RZRStdict_delitemt	link_prevt	link_next((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs

cc@s=|j}|d}x#||k	r8|dV|d}qWdS(sod.__iter__() <==> iter(od)iiN(R(RZRtcurr((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	
	cc@s=|j}|d}x#||k	r8|dV|d}qWdS(s#od.__reversed__() <==> reversed(od)iiN(R(RZRR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__reversed__s
	
	cC@smyHx|jjD]
}|2qW|j}||dg|(|jjWntk
r[nXtj|dS(s.od.clear() -> None.  Remove all items from od.N(Rt
itervaluesRR#RRR(RZtnodeR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs	
cC@s|stdn|j}|rO|d}|d}||d<||d (k, v), return and remove a (key, value) pair.
            Pairs are returned in LIFO order if last is true or FIFO order if false.

            sdictionary is emptyiii(RRRRR(RZRRtlinkRRRSRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs 	









cC@s
t|S(sod.keys() -> list of keys in od(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytkeysscC@sg|D]}||^qS(s#od.values() -> list of values in od((RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytvaluesscC@s!g|D]}|||f^qS(s.od.items() -> list of (key, value) pairs in od((RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytitemsscC@s
t|S(s0od.iterkeys() -> an iterator over the keys in od(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytiterkeysscc@sx|D]}||VqWdS(s2od.itervalues -> an iterator over the values in odN((RZtk((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
cc@s$x|D]}|||fVqWdS(s=od.iteritems -> an iterator over the (key, value) items in odN((RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	iteritemss
cO@s&t|dkr.tdt|fn|sCtdn|d}d}t|dkrr|d}nt|trxw|D]}|||| None.  Update od from dict/iterable E and F.

            If E is a dict instance, does:           for k in E: od[k] = E[k]
            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
            In either case, this is followed by:     for k, v in F.items(): od[k] = v

            is8update() takes at most 2 positional arguments (%d given)s,update() takes at least 1 argument (0 given)iiR
N((RNRRRthasattrR
R(RRRZtotherRSRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRYs&	


cC@sC||kr!||}||=|S||jkr?t|n|S(sod.pop(k[,d]) -> v, remove specified key and return the corresponding value.
            If key is not found, d is returned if given, otherwise KeyError is raised.

            (t_OrderedDict__markerR(RZRSRR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR!s
cC@s"||kr||S|||<|S(sDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od((RZRSR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
setdefault.s
cC@s|si}nt|tf}||kr4dSd|| repr(od)s...is%s()s%s(%r)N(Rt
_get_identRR.R(RZt
_repr_runningtcall_key((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR5s	
cC@sg|D]}|||g^q}t|j}x'ttD]}|j|dqEW|rx|j|f|fS|j|ffS(s%Return state information for picklingN(tvarsRRRR#R(RZRRt	inst_dict((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
__reduce__Cs#cC@s
|j|S(s!od.copy() -> a shallow copy of od(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRMscC@s(|}x|D]}||| New ordered dictionary with keys from S
            and values equal to v (which defaults to None).

            ((RRRTtdRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRQs	
cC@sMt|tr=t|t|ko<|j|jkStj||S(sod.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
            while comparison to a regular mapping is order-insensitive.

            (RRRNRRt__eq__(RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\s.cC@s||kS(N((RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__ne__escC@s
t|S(s@od.viewkeys() -> a set-like object providing a view on od's keys(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytviewkeysjscC@s
t|S(s<od.viewvalues() -> an object providing a view on od's values(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
viewvaluesnscC@s
t|S(sBod.viewitems() -> a set-like object providing a view on od's items(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	viewitemsrsN(#R.R/R]R\RRRRRRRRR
RRR
RRRYRtobjectRRR#RRRRRRRRRRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs:	
												
	
	
					(tBaseConfiguratortvalid_idents^[a-z_][a-z0-9_]*$cC@s,tj|}|s(td|ntS(Ns!Not a valid Python identifier: %r(t
IDENTIFIERR&RLR(RR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR"|stConvertingDictcB@s#eZdZdZddZRS(s A converting dictionary wrapper.cC@sqtj||}|jj|}||k	rm|||/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	cC@sttj|||}|jj|}||k	rp|||/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRMs
	N(R.R/R]RR#RM(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR$s	cC@sjtj|||}|jj|}||k	rft|tttfkrf||_||_	qfn|S(N(
RRR%R&RR$R'R(R)RS(RZRSRRTR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs	R'cB@s#eZdZdZddZRS(sA converting list wrapper.cC@sqtj||}|jj|}||k	rm|||/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	icC@s^tj||}|jj|}||k	rZt|tttfkrZ||_qZn|S(N(	RRR%R&RR$R'R(R)(RZtidxRTR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs(R.R/R]RR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR's	R(cB@seZdZdZRS(sA converting tuple wrapper.cC@sgtj||}|jj|}||k	rct|tttfkrc||_||_	qcn|S(N(
ttupleRR%R&RR$R'R(R)RS(RZRSRTR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs	(R.R/R]R(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR(sR!cB@seZdZejdZejdZejdZejdZejdZ	idd6dd	6Z
eeZ
d
ZdZdZd
ZdZdZdZRS(sQ
        The configurator base class which defines some useful defaults.
        s%^(?P[a-z]+)://(?P.*)$s^\s*(\w+)\s*s^\.\s*(\w+)\s*s^\[\s*(\w+)\s*\]\s*s^\d+$text_convertRmtcfg_converttcfgcC@st||_||j_dS(N(R$tconfigR%(RZR/((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\sc	C@s|jd}|jd}yy|j|}x_|D]W}|d|7}yt||}Wq7tk
r|j|t||}q7Xq7W|SWnVtk
rtjd\}}td||f}|||_	|_
|nXdS(sl
            Resolve strings to objects using standard import and attribute
            syntax.
            R0iisCannot resolve %r: %sN(R3RtimporterRRtImportErrorRsRRLt	__cause__t
__traceback__(	RZRRtusedtfoundREtettbtv((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytresolves"



cC@s
|j|S(s*Default converter for the ext:// protocol.(R9(RZRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR,scC@sO|}|jj|}|dkr7td|n||j}|j|jd}x|rJ|jj|}|r||jd}n|jj|}|r|jd}|j	j|s||}qyt
|}||}Wqtk
r||}qXn|r1||j}qatd||fqaW|S(s*Default converter for the cfg:// protocol.sUnable to convert %risUnable to convert %r at %rN(tWORD_PATTERNR&R#RLtendR/tgroupstDOT_PATTERNt
INDEX_PATTERNt
DIGIT_PATTERNtintR(RZRTtrestRRR*tn((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR-s2	

cC@s/t|tr7t|tr7t|}||_nt|trnt|trnt|}||_nt|trt|trt|}||_nt|tr+|j	j
|}|r+|j}|d}|jj
|d}|r(|d}t||}||}q(q+n|S(s
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            tprefixRN(RR$RR%R'RR(R+tstring_typestCONVERT_PATTERNR&t	groupdicttvalue_convertersRMR#R(RZRTRRRCt	converterR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR&)s*

c	C@s|jd}t|s-|j|}n|jdd}tg|D]"}t|rI|||f^qI}||}|rx-|jD]\}}t|||qWn|S(s1Configure an object with a user-supplied factory.s()R0N(RRR9R#RR"Rtsetattr(	RZR/RtpropsRR[RRRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytconfigure_customEs5cC@s"t|trt|}n|S(s0Utility function which converts lists to tuples.(RRR+(RZRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytas_tupleSs(R.R/R]R$R%RER:R=R>R?RGtstaticmethodt
__import__R0R\R9R,R-R&RKRL(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR!s"
				"		(ii(t
__future__RR_R$RstsslR1R#tversion_infoRt
basestringRDRRttypesRt	file_typet__builtin__tbuiltinstConfigParsertconfigparsert	_backportRRRRRRturllibR	R
RRRR
RRturllib2RRRRRRRRRRthttplibt	xmlrpclibtQueuetqueueRthtmlentitydefst	raw_inputt	itertoolsR tfilterR!R+R"R)tiotstrR*turllib.parseturllib.requestturllib.errorthttp.clienttclienttrequestt
xmlrpc.clientthtml.parsert
html.entitiestentitiestinputR,R-RLRGRVRWR R^tF_OKtX_OKtzipfileRRRRtBaseZipExtFileRtRRRt	NameErrortcollectionsRRRRtgetfilesystemencodingRRttokenizeRtcodecsRRR%RRthtmlR9tcgiRRRtreprlibRRtimpRRtthreadRRtdummy_threadt_abcollRRRRtlogging.configR!R"tIR#R$RRR'R+R((((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyts$

	(4	@	@F
2
+

A	





	

			
	
	
[


b



 

	
PK
Ziff.site-packages/pip/_vendor/distlib/locators.pyonu[
abc@s&ddlZddlmZddlZddlZddlZddlZddlZyddlZWne	k
rddl
ZnXddlZddlm
Z
ddlmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZddlm Z m!Z!m"Z"ddl#m$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.dd	l/m0Z0m1Z1dd
l2m3Z3m4Z4ej5e6Z7ej8dZ9ej8dej:Z;ej8d
Z<dZ=e>dZ?defdYZ@deAfdYZBdeBfdYZCdeBfdYZDdeAfdYZEdeBfdYZFdeBfdYZGdeBfdYZHd eBfd!YZId"eBfd#YZJeJeHeFd$d%d&d'd(ZKeKjLZLej8d)ZMd*eAfd+YZNdS(,iN(tBytesIOi(tDistlibException(turljointurlparset
urlunparseturl2pathnametpathname2urltqueuetquotetunescapetstring_typestbuild_openertHTTPRedirectHandlert	text_typetRequestt	HTTPErrortURLError(tDistributiontDistributionPatht	make_dist(tMetadata(	tcached_propertytparse_credentialstensure_slashtsplit_filenametget_project_datatparse_requirementtparse_name_and_versiontServerProxytnormalize_name(t
get_schemetUnsupportedVersionError(tWheelt
is_compatibles^(\w+)=([a-f0-9]+)s;\s*charset\s*=\s*(.*)\s*$stext/html|application/x(ht)?mlshttps://pypi.python.org/pypicCs1|dkrt}nt|dd}|jS(s
    Return all distribution names known by an index.
    :param url: The URL of the index.
    :return: A list of all known distribution names.
    ttimeoutg@N(tNonet
DEFAULT_INDEXRt
list_packages(turltclient((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytget_all_distribution_names)s	tRedirectHandlercBs%eZdZdZeZZZRS(sE
    A class to work around a bug in some Python 3.2.x releases.
    c	Csd}x(dD] }||kr
||}Pq
q
W|dkrAdSt|}|jdkrt|j|}t|dr|j||q|||
        Clear any errors which may have been logged.
        N(RR(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytclear_errorsscCs|jjdS(N(RDtclear(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytclear_cachescCs|jS(N(t_scheme(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_get_schemescCs
||_dS(N(RV(R3tvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_set_schemescCstddS(s=
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This should be implemented in subclasses.

        If called from a locate() request, self.matcher will be set to a
        matcher for the requirement to satisfy, otherwise it will be None.
        s Please implement in the subclassN(tNotImplementedError(R3tname((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_get_projects
cCstddS(sJ
        Return all the distribution names known to this locator.
        s Please implement in the subclassN(RZ(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytget_distribution_namesscCsj|jdkr!|j|}nE||jkr@|j|}n&|j|j|}||j|<|S(s
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This calls _get_project to do all the work, and just implements a caching layer on top.
        N(RDR#R\RS(R3R[RP((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytget_projects

cCsyt|}tj|j}t}|jd}|rTtt||j}n|j	dkd|j
k|||fS(su
        Give an url a score which can be used to choose preferred URLs
        for a given project release.
        s.whlthttpsspypi.python.org(Rt	posixpathtbasenametpathtTruetendswithR!R t
wheel_tagsR.tnetloc(R3R&ttRat
compatibletis_wheel((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt	score_urlscCs{|}|rw|j|}|j|}||kr?|}n||kratjd||qwtjd||n|S(s{
        Choose one of two URLs where both are candidates for distribution
        archives for the same version of a distribution (for example,
        .tar.gz vs. zip).

        The current implementation favours https:// URLs over http://, archives
        from PyPI over those from other locations, wheel compatibility (if a
        wheel) and then the archive name.
        sNot replacing %r with %rsReplacing %r with %r(Rjtloggertdebug(R3turl1turl2RPts1ts2((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt
prefer_urls
	cCs
t||S(sZ
        Attempt to split a filename in project name, version and Python version.
        (R(R3tfilenametproject_name((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRsc	Csd}d}t|\}}}}}	}
|
jjdrXtjd||
ntj|
}|r|j\}}
nd\}}
|}|r|ddkr|d }n|j	dryt
|}t||jr|dkrt
}n||j|}|ri|jd6|jd6|jd	6t|||||	d
fd6djg|jD]}d
jt|d^qdd6}qnWqtk
r}tjd|qXn|j	|jrtj|}}x|jD]}|j	|r|t| }|j||}|s@tjd|nu|\}}}|se|||ri|d6|d6|d	6t|||||	d
fd6}|r||d= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        sNot a valid requirement: %rsmatcher: %s (%s)iRRs%s did not match %rs%skipping pre-release version %s of %sserror matching %s with %riR:ssorted list: %siN(RR(R#RRRR.RFtrequirementRkRlttypeR<R^R[Rt
version_classR}t
is_prereleaseRMRRtsortedR:textrasRKRt
download_urlsR(R3RtprereleasesRPtrR.RFtversionstslisttvclstkRxtdtsdR&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytlocatePsT
	
	

	$	(s.tar.gzs.tar.bz2s.tars.zips.tgzs.tbz(s.eggs.exes.whl(s.pdfN(s.whl(R<R=R>tsource_extensionstbinary_extensionstexcluded_extensionsR#ReRRIRRRSRURWRYtpropertyR.R\R]R^RjRqRRRRRLR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRBSs.
												F		tPyPIRPCLocatorcBs)eZdZdZdZdZRS(s
    This locator uses XML-RPC to locate distributions. It therefore
    cannot be used with simple mirrors (that only mirror file content).
    cKs8tt|j|||_t|dd|_dS(s
        Initialise an instance.

        :param url: The URL to use for XML-RPC.
        :param kwargs: Passed to the superclass constructor.
        R"g@N(tsuperRRItbase_urlRR'(R3R&tkwargs((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIs	cCst|jjS(sJ
        Return all the distribution names known to this locator.
        (RR'R%(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]scCsviid6id6}|jj|t}xF|D]>}|jj||}|jj||}td|j}|d|_|d|_|j	d|_
|j	dg|_|j	d|_t
|}|r0|d	}	|	d
|_|j|	|_||_|||RIR]R\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs		tPyPIJSONLocatorcBs)eZdZdZdZdZRS(sw
    This locator uses PyPI's JSON interface. It's very limited in functionality
    and probably not worth using.
    cKs)tt|j|t||_dS(N(RRRIRR(R3R&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIscCstddS(sJ
        Return all the distribution names known to this locator.
        sNot available from this locatorN(RZ(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]scCsiid6id6}t|jdt|}yE|jj|}|jj}tj|}t	d|j
}|d}|d|_|d|_|j
d|_|j
d	g|_|j
d
|_t|}||_|d}	|||jRIR]R\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs		tPagecBszeZdZejdejejBejBZejdejejBZ	dZ
ejdejZedZ
RS(s4
    This class represents a scraped HTML page.
    s
(rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s
]*))\s+)?
href\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s
]*))
(\s+rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s
]*)))?
s!]+)cCsM||_||_|_|jj|j}|rI|jd|_ndS(sk
        Initialise an instance with the Unicode page contents and the URL they
        came from.
        iN(RRR&t_basetsearchtgroup(R3RR&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIs
	s[^a-z0-9$&+,/:;=?@.#%_\\|-]cCsd}t}x|jj|jD]}|jd}|dpv|dpv|dpv|dpv|dpv|d}|d	p|d
p|d}t|j|}t|}|jj	d|}|j
||fq(Wt|d
ddt}|S(s
        Return the URLs of all the links on a page together with information
        about their "rel" attribute, for determining which ones to treat as
        downloads and which ones to queue for further scraping.
        cSs@t|\}}}}}}t||t||||fS(sTidy up an URL.(RRR(R&R.RfRbRRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytclean%sR,trel1trel2trel3trel4trel5trel6RmRnturl3cSsdt|jdS(Ns%%%2xi(tordR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt3R,R:cSs|dS(Ni((Rg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR7R,treverse(
Rt_hreftfinditerRt	groupdictRRR	t	_clean_retsubRRRc(R3RRPR}RtrelR&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytlinkss		(R<R=R>tretcompiletItStXRRRIRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs	tSimpleScrapingLocatorcBseZdZiejd6dd6dd6ZdddZdZd	Z	d
Z
ejdej
ZdZd
ZdZdZdZejdZdZRS(s
    A locator which scrapes HTML pages to locate downloads for a distribution.
    This runs multiple threads to do the I/O; performance is at least as good
    as pip's PackageFinder, which works in an analogous fashion.
    tdeflatecCstjdttjS(Ntfileobj(tgziptGzipFileRRR(tb((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRER,RcCs|S(N((R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRFR,tnonei
cKstt|j|t||_||_i|_t|_t	j
|_t|_t
|_||_tj|_tj|_dS(s
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        N(RRRIRRR"t_page_cacheRt_seenRRGt	_to_fetcht
_bad_hostsRLtskip_externalstnum_workerst	threadingtRLockt_lockt_gplock(R3R&R"RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIIs
				cCscg|_xSt|jD]B}tjd|j}|jt|j|jj	|qWdS(s
        Threads are created only when get_project is called, and terminate
        before it returns. They are there primarily to parallelise I/O (i.e.
        fetching web pages).
        ttargetN(
t_threadstrangeRRtThreadt_fetcht	setDaemonRctstartRM(R3tiRg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_prepare_threadscs	

cCsOx!|jD]}|jjdq
Wx|jD]}|jq.Wg|_dS(su
        Tell all the threads to terminate (by sending a sentinel value) and
        wait for them to do so.
        N(RRRR#R(R3Rg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt
_wait_threadsps
c	Csiid6id6}|j||_||_t|jdt|}|jj|jj|j	z1t
jd||jj
||jjWd|jX|`WdQX|S(NRRs%s/sQueueing %s(RRPRsRRRRRTRRRkRlRRRR(R3R[RPR&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\}s
		


s<\b(linux-(i\d86|x86_64|arm\w+)|win(32|-amd64)|macosx-?\d+)\bcCs|jj|S(sD
        Does an URL refer to a platform-specific download?
        (tplatform_dependentR(R3R&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_is_platform_dependentscCsp|j|rd}n|j||j}tjd|||rl|j|j|j|WdQXn|S(s%
        See if an URL is a suitable download for a project.

        If it is, register information in the result dictionary (for
        _get_project) about the specific version it's for.

        Note that the return value isn't actually used other than as a boolean
        value.
        sprocess_download: %s -> %sN(	RR#RRsRkRlRRRP(R3R&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_process_downloads
	
c
Cst|\}}}}}}|j|j|j|jrGt}n|jrl|j|jrlt}n|j|jst}ny|dkrt}nd|dkrt}nO|j	|rt}n7|j
ddd}	|	jd	krt}nt}t
jd
|||||S(
s
        Determine whether a link URL from a referring page and with a
        particular "rel" attribute should be queued for scraping.
        thomepagetdownloadthttpR_tftpt:iit	localhosts#should_queue: %s (%s) from %s -> %s(RR	(R
R_R(RRdRRRRLRR{RRtsplitRzRcRkRl(
R3tlinktreferrerRR.RfRbt_RPthost((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt
_should_queues*
							
cCs xtr|jj}zy|r|j|}|dkrEwnx|jD]y\}}||jkrO|jj||j|r|j	|||rt
jd|||jj|qqOqOWnWn)t
k
r}|jjt|nXWd|jjX|sPqqWdS(s
        Get a URL to fetch from the work queue, get the HTML page, examine its
        links for download candidates and candidates for further scraping.

        This is a handy method to run in a thread.
        sQueueing %s from %sN(RcRRKtget_pageR#RRRRRRkRlRRRHR
RO(R3R&tpageRRRQ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs(	!cCst|\}}}}}}|dkrZtjjt|rZtt|d}n||jkr|j|}tj	d||nK|j
ddd}d}||jkrtj	d||n
t
|did	d
6}zy7tj	d||jj|d|j}	tj	d
||	j}
|
jdd}tj|r|	j}|	j}
|
jd}|r|j|}||
}
nd}tj|}|r|jd}ny|
j|}
Wn tk
r|
jd}
nXt|
|}||j|]*>([^<]+)tzlibt
decompressRR#RIRRR\RRRRRRRRRR#R](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR;s$


	
	
							;tDirectoryLocatorcBs2eZdZdZdZdZdZRS(s?
    This class locates distributions in a directory tree.
    cKso|jdt|_tt|j|tjj|}tjj	|sbt
d|n||_dS(s
        Initialise an instance.
        :param path: The root of the directory tree to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * recursive - if True (the default), subdirectories are
                         recursed into. If False, only the top-level directory
                         is searched,
        t	recursivesNot a directory: %rN(RRcR'RR&RIRRbtabspathRRtbase_dir(R3RbR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRI5s
cCs|j|jS(s
        Should a filename be considered as a candidate for a distribution
        archive? As well as the filename, the directory which contains it
        is provided, though not used by the current implementation.
        (RdR(R3Rrtparent((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytshould_includeFsc		Csiid6id6}xtj|jD]\}}}x|D]}|j||r=tjj||}tddttjj|dddf}|j	||}|r|j
||qq=q=W|js'Pq'q'W|S(NRRRR,(RtwalkR)R+RbRRRR(RRR'(	R3R[RPtroottdirstfilestfnR&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\Ns"
		c	Cst}xtj|jD]\}}}x|D]}|j||r2tjj||}tddttjj	|dddf}|j
|d}|r|j|dqq2q2W|j
sPqqW|S(sJ
        Return all the distribution names known to this locator.
        RR,R[N(RRR,R)R+RbRRRR(RR#RR'(R3RPR-R.R/R0R&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]^s	"
		(R<R=R>RIR+R\R](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR&0s
			tJSONLocatorcBs eZdZdZdZRS(s
    This locator uses special extended metadata (not available on PyPI) and is
    the basis of performant dependency resolution in distlib. Other locators
    require archive downloads before dependencies can be determined! As you
    might imagine, that can be slow.
    cCstddS(sJ
        Return all the distribution names known to this locator.
        sNot available from this locatorN(RZ(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]xscCsBiid6id6}t|}|r>x|jdgD]}|ddks9|ddkreq9nt|d|d	d
|jd
dd|j}|j}|d
|_d|kr|drd|df|_n|jdi|_|jdi|_|||j	<|dj
|j	tj|d
q9Wn|S(NRRR/tptypetsdistt	pyversiontsourceR[RxRsPlaceholder for summaryR.R&RRtrequirementstexports(
RRKRR.RRRtdependenciesR7RxRRR(R3R[RPRRRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\~s& 			

.(R<R=R>R]R\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR1qs	tDistPathLocatorcBs eZdZdZdZRS(s
    This locator finds installed distributions in a path. It can be useful for
    adding to an :class:`AggregatingLocator`.
    cKs#tt|j|||_dS(ss
        Initialise an instance.

        :param distpath: A :class:`DistributionPath` instance to search.
        N(RR9RItdistpath(R3R:R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIscCs|jj|}|dkr5iid6id6}nGi||j6it|jg|j6d6itdg|j6d6}|S(NRR(R:tget_distributionR#RxRR(R3R[RRP((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\s
(R<R=R>RIR\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR9s	
tAggregatingLocatorcBsPeZdZdZdZdZeejj	eZdZ
dZRS(sI
    This class allows you to chain and/or merge a list of locators.
    cOs8|jdt|_||_tt|j|dS(s
        Initialise an instance.

        :param locators: The list of locators to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * merge - if False (the default), the first successful
                         search from any of the locators is returned. If True,
                         the results from all locators are merged (this can be
                         slow).
        tmergeN(RRLR=tlocatorsRR<RI(R3R>R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIs	cCs5tt|jx|jD]}|jqWdS(N(RR<RUR>(R3R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRUscCs*||_x|jD]}||_qWdS(N(RVR>R.(R3RXR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRYs	cCs]i}xP|jD]E}|j|}|r|jr|jdi}|jdi}|j||jd}|r|rxF|jD]5\}}	||kr||c|	OR^R=RKtupdateRRFR#RcRLR}(R3R[RPRRR/RtdfRRtddtfound((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\s8	
	
cCsIt}x9|jD].}y||jO}Wqtk
r@qXqW|S(sJ
        Return all the distribution names known to this locator.
        (RR>R]RZ(R3RPR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]s	
(R<R=R>RIRURYRRBR.tfgetR\R](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR<s				,shttps://pypi.python.org/simple/R"g@R.tlegacys1(?P[\w-]+)\s*\(\s*(==\s*)?(?P[^)]+)\)$tDependencyFindercBsVeZdZddZdZdZdZdZdZ	de
dZRS(	s0
    Locate dependencies for distributions.
    cCs(|p	t|_t|jj|_dS(sf
        Initialise an instance, using the specified locator
        to locate distributions.
        N(tdefault_locatorRRR.(R3R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIscCstjd||j}||j|<||j||jf= 1.0) while C requires (B >= 1.1).

        For successful replacement, ``provider`` must meet all the requirements
        which ``other`` fulfills.

        :param provider: The provider we are trying to replace with.
        :param other: The provider we're trying to replace.
        :param problems: If False is returned, this will contain what
                         problems prevented replacement. This is currently
                         a tuple of the literal string 'cantreplace',
                         ``provider``, ``other``  and the set of requirements
                         that ``provider`` couldn't fulfill.
        :return: True if we can replace ``other`` with ``provider``, else
                 False.
        tcantreplace(treqtsRRQR}RxRt	frozensetRLRORRLRc(	R3RRtothertproblemstrlistt	unmatchedRNRFRP((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyttry_to_replaceos"
	
	


#
cCsi|_i|_i|_i|_t|p0g}d|krk|jd|tdddgO}nt|tr|}}tj	d|nK|j
j|d|}}|dkrt
d|ntj	d|t|_t}t|g}t|g}x|r|j}|j}	|	|jkrO|j|n/|j|	}
|
|kr~|j||
|n|j|jB}|j}t}
||krxAdD]6}d|}||kr|
t|d
|O}
qqWn||B|
B}x|D]}|j|}|s+tj	d||j
j|d|}|dkrv|rv|j
j|dt}n|dkrtj	d||jd|fq+|j|j}}||f|jkr|j|n|j|||kr+||kr+|j|tj	d|jq+nxw|D]o}|j}	|	|jkrr|jj|tj|q2|j|	}
|
|kr2|j||
|q2q2WqWqWt|jj}x<|D]4}||k|_|jrtj	d|jqqWtj	d|||fS(s
        Find a distribution and all distributions it depends on.

        :param requirement: The requirement specifying the distribution to
                            find, or a Distribution instance.
        :param meta_extras: A list of meta extras such as :test:, :build: and
                            so on.
        :param prereleases: If ``True``, allow pre-release versions to be
                            returned - otherwise, don't return prereleases
                            unless they're all that's available.

        Return a set of :class:`Distribution` instances and a set of
        problems.

        The distributions returned should be such that they have the
        :attr:`required` attribute set to ``True`` if they were
        from the ``requirement`` passed to ``find()``, and they have the
        :attr:`build_time_dependency` attribute set to ``True`` unless they
        are post-installation dependencies of the ``requirement``.

        The problems should be a tuple consisting of the string
        ``'unsatisfied'`` and the requirement which couldn't be satisfied
        by any distribution known to the locator.
        s:*:s:test:s:build:s:dev:spassed %s as requirementRsUnable to locate %rs
located %sttesttbuildtdevs:%s:s%s_requiressNo providers found for %rsCannot satisfy %rtunsatisfiedsAdding %s to install_distss#%s is a build-time dependency only.sfind done for %sN(R\R]R^(RJRHRGRURRMt
isinstanceRRkRlRRR#RRct	requestedRR:RLR[trun_requirest
meta_requirestbuild_requirestgetattrRSRRxtname_and_versionRtvaluestbuild_time_dependency(R3Rtmeta_extrasRRRRXttodot
install_distsR[RWtireqtstsreqtstereqtsR:RQt	all_reqtsRt	providersRRtnRRKRH((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytfinds				


				
		

!


	
	"
"
		N(R<R=R>R#RIRLRORQRSR[RLRr(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyREs					((ORtioRRtloggingRR`RRtImportErrortdummy_threadingR$R,RtcompatRRRRRRRR	R
RRR1R
RRRtdatabaseRRRRRtutilRRRRRRRRRRxRRRR R!t	getLoggerR<RkRR|RRRR$R#R(R)tobjectRBRRRRR&R1R9R<RFRtNAME_VERSION_RERE(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytsV
d@:0E:A&[				PK
ZF22*site-packages/pip/_vendor/distlib/util.pyonu[
abc@sddlZddlmZddlZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZyddlZWnek
rdZnXddlZddlZddlZddlZddlZyddlZWnek
r9ddlZnXddlZddlmZddlmZmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0e
j1e2Z3dZ4e
j5e4Z6dZ7d	e7d
Z8e7dZ9dZ:d
e:de9de4d
e:de9dZ;dZ<de;de<de;dZ=e8d
e4e8dZ>de>dZ?de7de?de=dZ@e
j5e@ZAde:de9d
ZBe
j5eBZCdZDd ZEd!ZFd"ZGddd#ZHd$ZId%ZJd&ZKejLd'ZMejLd(ZNejLd)d*ZOd+ePfd,YZQd-ZRd.ePfd/YZSd0ZTd1ePfd2YZUe
j5d3e
jVZWd4ZXdd5ZYd6ZZd7Z[d8Z\d9Z]d:Z^e
j5d;e
j_Z`e
j5d<Zadd=Zbe
j5d>Zcd?Zdd@ZedAZfdBZgdCZhdDZidEePfdFYZjdGePfdHYZkdIePfdJYZldZmdendRZodSZpdZqdZePfd[YZre
j5d\Zse
j5d]Zte
j5d^Zud_Zd`ZverddalmwZxmyZymzZzdbe%j{fdcYZ{ddexfdeYZwdfewe(fdgYZ|nej}dh Z~e~dkrdje%jfdkYZerdle%jfdmYZqndne&jfdoYZerFdpe&jfdqYZndre&jfdsYZdtZduePfdvYZdwefdxYZdyefdzYZd{e)fd|YZd}ePfd~YZdZdS(iN(tdeque(tiglobi(tDistlibException(tstring_typest	text_typetshutilt	raw_inputtStringIOtcache_from_sourceturlopenturljointhttplibt	xmlrpclibt	splittypetHTTPHandlertBaseConfiguratortvalid_identt	ContainertconfigparsertURLErrortZipFiletfsdecodetunquotes\s*,\s*s
(\w|[.-])+s(\*|:(\*|\w+):|t)s\*?s([<>=!~]=)|[<>]t(s)?\s*(s)(s)\s*(s))*s(from\s+(?P.*))s\(\s*(?Pt|s)\s*\)|(?Ps\s*)s)*s\[\s*(?Ps)?\s*\]s(?Ps	\s*)?(\s*s)?$s(?Ps)\s*(?Pc

Cskd}d}tj|}|rg|j}|d}|dpK|d}|dsad}nd}|dj}|sd}d}|d}	n{|ddkrd	|}ntj|}
g|
D]}||^q}d
|djg|D]}d|^qf}	|d
s$d}ntj	|d
}t
d|d|d|d|	d|d|}n|S(NcSs|j}|d|dfS(Ntoptvn(t	groupdict(tmtd((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_constraintYstdntc1tc2tdireftis<>!=s~=s%s (%s)s, s%s %stextnametconstraintstextrastrequirementtsourceturl(tNonetREQUIREMENT_REtmatchRtstriptRELOP_IDENT_REtfinditertjointCOMMA_REtsplitR(
tsRtresultRRR&tconsR+tconstrtrstiteratortconR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytparse_requirementWs4	

	

0
	cCsd}i}x|D]\}}}tjj||}xt|D]}tjj||}	xt|	D]v}
|||
}|dkr|j|dqo|||
}|jtjjdjd}
|
d|||RAtrstrip(tresources_roottrulesREtdestinationsRDtsuffixtdesttprefixtabs_basetabs_globtabs_patht
resource_filetrel_pathtrel_dest((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_resources_dests|s	!cCs:ttdrt}ntjttdtjk}|S(Ntreal_prefixtbase_prefix(thasattrtsystTrueRMtgetattr(R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytin_venvs	cCs7tjjtj}t|ts3t|}n|S(N(R?R@tnormcaseRXt
executablet
isinstanceRR(R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_executables
cCs|}xwtrt|}|}|r7|r7|}n|r	|dj}||kr]Pn|r|d|||f}q|q	q	W|S(Nis	%c: %s
%s(RYRtlower(tpromptt
allowed_charsterror_prompttdefaulttpR5tc((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytproceeds	
	cCsVt|tr|j}ni}x+|D]#}||kr+||||R$cCstjj|}||jkrtjj|r|jj|tjj|\}}|j|tj	d||j
stj|n|jr|j
j|qndS(NsCreating %s(R?R@RRRRR4RRRRtmkdirRR(RR@RR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs"
		cCst||}tjd|||js|sD|j||rf|sSd}qf|t|}ntj|||t	n|j
||S(NsByte-compiling %s to %s(RRRRRR,RBt
py_compiletcompileRYR(RR@toptimizetforceRMtdpathtdiagpath((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytbyte_compiles		
cCstjj|rtjj|rtjj|rtjd||js`tj	|n|j
r||jkr|jj|qqqtjj|rd}nd}tjd|||jstj|n|j
r||j
kr|j
j|qqndS(NsRemoving directory tree at %stlinktfilesRemoving %s %s(R?R@RtisdirRRtdebugRRRRRRR(RR@R5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytensure_removeds"%					cCsjt}x]|setjj|r:tj|tj}Pntjj|}||kr\Pn|}q	W|S(N(RR?R@RtaccesstW_OKR(RR@R6tparent((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytis_writables	
cCs |j|jf}|j|S(sV
        Commit recorded changes, turn off recording, return
        changes.
        (RRR(RR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytcommits
cCs|jsx9t|jD](}tjj|rtj|qqWt|jdt	}x\|D]Q}tj
|}|rtjj||d}tj|ntj|qaWn|j
dS(Ntreversei(RtlistRR?R@RRtsortedRRYtlistdirR2trmdirR(RRtdirsRtflisttsd((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytrollbacks	
N(RRRRRRRRYRR,RRRRtset_executable_modeRRRRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRQs 											
cCs|tjkrtj|}nt|}|dkr@|}nG|jd}t||jd}x|D]}t||}qnW|S(Nt.i(RXtmodulest
__import__R,R4RZRF(tmodule_nametdotted_pathtmodR6tpartsRe((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytresolves	
tExportEntrycBs;eZdZedZdZdZejZRS(cCs(||_||_||_||_dS(N(R&RMRKR(RR&RMRKR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs			cCst|j|jS(N(RRMRK(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRscCs d|j|j|j|jfS(Ns(R&RMRKR(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt__repr__!scCsdt|tst}nH|j|jko]|j|jko]|j|jko]|j|jk}|S(N(R^RRR&RMRKR(RtotherR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt__eq__%s	(	RRRRRR	RRt__hash__(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs
			
s(?P(\w|[-.+])+)
                      \s*=\s*(?P(\w+)([:\.]\w+)*)
                      \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
                      cCsStj|}|sId}d|ks3d|krOtd|qOn|j}|d}|d}|jd}|dkr|d}}n4|dkrtd|n|jd\}}|d	}	|	dkrd|ksd|kr	td|ng}	n(g|	jd
D]}
|
j^q"}	t||||	}|S(Nt[t]sInvalid specification '%s'R&tcallablet:iiRt,(	tENTRY_REtsearchR,RRtcountR4R/R(t
specificationRR6RR&R@tcolonsRMRKRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRy7s2




	(cCs|d
krd}ntjdkrHdtjkrHtjjd}ntjjd}tjj|rtj|tj	}|st
jd|qnGytj|t
}Wn-tk
rt
jd|dt
t}nX|s	tj}t
jd	|ntjj||S(s
    Return the default base location for distlib caches. If the directory does
    not exist, it is created. Use the suffix provided for the base directory,
    and default to '.distlib' if it isn't provided.

    On Windows, if LOCALAPPDATA is defined in the environment, then it is
    assumed to be a directory, and will be the parent directory of the result.
    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
    directory - using os.expanduser('~') - will be the parent directory of
    the result.

    The result is just the directory '.distlib' in the parent directory as
    determined above, or with the name specified with ``suffix``.
    s.distlibtnttLOCALAPPDATAs
$localappdatat~s(Directory exists but is not writable: %ssUnable to create %stexc_infos#Default location unusable, using %sN(R,R?R&tenvironR@t
expandvarst
expanduserRRRRtwarningtmakedirsRYtOSErrorRRRR2(RKR6tusable((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_cache_baseVs&	



cCs`tjjtjj|\}}|r?|jdd}n|jtjd}||dS(s
    Convert an absolute path to a directory name for use in a cache.

    The algorithm used is:

    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
    #. ``'.cache'`` is appended.
    Rs---s--s.cache(R?R@t
splitdriveRR>RA(R@RRe((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytpath_to_cache_dirs

$cCs|jds|dS|S(NR=(tendswith(R5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytensure_slashscCskd}}d|kr^|jdd\}}d|krC|}q^|jdd\}}n|||fS(Nt@iR(R,R4(tnetloctusernametpasswordRM((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytparse_credentialss
	cCs tjd}tj||S(Ni(R?tumask(R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_process_umasks
cCsFt}d}x3t|D]%\}}t|tst}PqqW|S(N(RYR,t	enumerateR^RR(tseqR6tiR5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytis_string_sequencess3([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-([a-z0-9_.+-]+)s
-py(\d\.?\d?)cCsd}d}t|jdd}tj|}|r[|jd}||j }n|rt|t|dkrtj	tj
|d|}|r|j}|| ||d|f}qn|dkrtj	|}|r|jd|jd|f}qn|S(sw
    Extract name, version, python version from a filename (no extension)

    Return name, version, pyver or None
    t t-is\biN(
R,RR>tPYTHON_VERSIONRRtstartRBtreR.tescapetendtPROJECT_NAME_AND_VERSION(tfilenametproject_nameR6tpyverRtn((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytsplit_filenames""!'s-(?P[\w .-]+)\s*\(\s*(?P[^\s)]+)\)$cCsRtj|}|s(td|n|j}|djj|dfS(s
    A utility method used to get name and version from a string.

    From e.g. a Provides-Dist value.

    :param p: A value in a form 'foo (1.0)'
    :return: The name and version as a tuple.
    s$Ill-formed name/version string: '%s'R&tver(tNAME_VERSION_RER.RRR/R`(ReRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytparse_name_and_versions
	cCs	t}t|pg}t|p'g}d|krS|jd||O}nx|D]}|dkr||j|qZ|jdr|d}||krtjd|n||kr|j|qqZ||krtjd|n|j|qZW|S(Nt*R3isundeclared extra: %s(RRRt
startswithRR(t	requestedt	availableR6trtunwanted((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
get_extrass&	



cCsi}yqt|}|j}|jd}|jdsRtjd|n$tjd|}tj	|}Wn&t
k
r}tjd||nX|S(NsContent-Typesapplication/jsons(Unexpected response for JSON request: %ssutf-8s&Failed to get external data for %s: %s(R	RtgetRCRRRsRtRvRwRzt	exception(R+R6tresptheaderstcttreaderte((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt_get_external_datass'https://www.red-dove.com/pypi/projects/cCs9d|dj|f}tt|}t|}|S(Ns%s/%s/project.jsoni(tupperR
t_external_data_base_urlRP(R&R+R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_project_datascCs6d|dj||f}tt|}t|S(Ns%s/%s/package-%s.jsoni(RQR
RRRP(R&tversionR+((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_package_datastCachecBs)eZdZdZdZdZRS(s
    A class implementing a cache for resources that need to live in the file system
    e.g. shared libraries. This class was moved from resources to here because it
    could be used by other modules, e.g. the wheel module.
    cCsvtjj|s"tj|ntj|jd@dkrQtjd|ntjjtjj	||_
dS(su
        Initialise an instance.

        :param base: The base directory where the cache should be located.
        i?isDirectory '%s' is not privateN(R?R@RRRRRRRtnormpathRD(RRD((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR"s
cCs
t|S(sN
        Converts a resource prefix to a directory name in the cache.
        (R$(RRM((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
prefix_to_dir0scCsg}xtj|jD]}tjj|j|}yZtjj|s^tjj|rntj|n"tjj|rt	j
|nWqtk
r|j|qXqW|S(s"
        Clear the cache.
        (
R?RRDR@R2RRRRRRRztappend(Rtnot_removedtfn((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytclear6s$
(RRt__doc__RRXR\(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRVs		t
EventMixincBs>eZdZdZedZdZdZdZRS(s1
    A very simple publish/subscribe system.
    cCs
i|_dS(N(t_subscribers(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRKscCs\|j}||kr+t|g|| %s;s  %s;t}s
(RkRYRmR2(RR6RtRvRsRn((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytdot
s	


(
RRRRoRRqRRRyRtpropertyRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRjs				
		3s.tar.gzs.tar.bz2s.tars.zips.tgzs.tbzs.whlc
sfd}tjjtd}|dkr|jdrZd}q|jdrxd}d}q|jdrd}d}q|jd
rd}d}qtd|nz|dkrt|d}|rZ|j}x|D]}||qWqZnBt	j
||}|rZ|j}x|D]}||qCWn|dkrtj
ddkrxA|jD]0}	t|	jts|	jjd|	_qqWn|jWd|r|jnXdS(Ncs|t|ts!|jd}ntjjtjj|}|jse|tjkrxt	d|ndS(Nsutf-8spath outside destination: %r(
R^RtdecodeR?R@RR2RCRAR(R@Re(tdest_dirtplen(s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
check_paths
!#s.zips.whltzips.tar.gzs.tgzttgzsr:gzs.tar.bz2s.tbzttbzsr:bz2s.tarttarRFsUnknown format for %riisutf-8(s.zips.whl(s.tar.gzs.tgz(s.tar.bz2s.tbz(R?R@RRBR,R%RRtnamelistttarfileRtgetnamesRXRrt
getmembersR^R&RRt
extractallR~(
tarchive_filenameRtformatRRtarchiveRtnamesR&ttarinfo((RRs</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt	unarchivesH				

c	Cstj}t|}t|d}xutj|D]d\}}}xR|D]J}tjj||}||}	tjj|	|}
|j||
qPWq:WWdQX|S(s*zip a directory tree into a BytesIO objectRN(	tiotBytesIORBRR?twalkR@R2R(t	directoryR6tdlentzftrootRRR&tfulltrelRL((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytzip_dirSs

R$tKtMtGtTtPtProgresscBseZdZdddZdZdZdZdZedZ	ed	Z
d
ZedZedZ
RS(
tUNKNOWNiidcCs8||_|_||_d|_d|_t|_dS(Ni(RtcurtmaxR,tstartedtelapsedRtdone(Rtminvaltmaxval((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRjs
			cCsD||_tj}|jdkr0||_n||j|_dS(N(RttimeRR,R(Rtcurvaltnow((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytupdaters
	cCs|j|j|dS(N(RR(Rtincr((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt	increment|scCs|j|j|S(N(RR(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR5scCs/|jdk	r"|j|jnt|_dS(N(RR,RRYR(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytstopscCs|jdkr|jS|jS(N(RR,tunknown(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytmaximumscCsZ|jrd}nD|jdkr*d}n,d|j|j|j|j}d|}|S(Ns100 %s ?? %gY@s%3d %%(RRR,RR(RR6R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
percentages			"
cCsU|dkr|jdks-|j|jkr6d}ntjdtj|}|S(Nis??:??:??s%H:%M:%S(RR,RRRtstrftimetgmtime(RtdurationR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytformat_durations-	cCs|jrd}|j}nd}|jdkr9d}ne|jdksZ|j|jkrcd}n;t|j|j}||j|j:}|d|j}d||j|fS(NtDonesETA iiis%s: %s(RRRR,RRtfloatR(RRMtt((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytETAs		!	cCsh|jdkrd}n|j|j|j}x(tD] }|dkrLPn|d:}q6Wd||fS(Nigig@@s%d %sB/s(RRRtUNITS(RR6tunit((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytspeeds	
(RRRRRRR5RRRRRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRgs	
					s\{([^}]*)\}s[^/\\,{]\*\*|\*\*[^/\\,}]s^[^{]*\}|\{[^}]*$cCsZtj|r(d}t||ntj|rPd}t||nt|S(sAExtended globbing function that supports ** and {opt1,opt2,opt3}.s7invalid glob %r: recursive glob "**" must be used alones2invalid glob %r: mismatching set marker '{' or '}'(t_CHECK_RECURSIVE_GLOBRRt_CHECK_MISMATCH_SETt_iglob(t	path_globR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRsccsmtj|d}t|dkr~|\}}}x3|jdD]4}x+tdj|||fD]}|VqhWqCWnd|krxt|D]}|VqWn|jdd\}}|dkrd}n|dkrd}n|jd}|jd}x]tj|D]L\}}}	tj	j
|}x(ttj	j||D]}
|
VqVWqWdS(	NiRR$s**RRBR=s\(t	RICH_GLOBR4RBRR2t	std_iglobRCR?RR@RW(Rtrich_path_globRMRRKtitemR@tradicaltdirRR[((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs(%		"(tHTTPSHandlertmatch_hostnametCertificateErrortHTTPSConnectioncBseZdZeZdZRS(c
Cstj|j|jf|j}t|dtrI||_|jnt	t
ds|jrmt
j}n	t
j
}t
j||j|jd|dt
jd|j|_nt
jt
j}|jt
jO_|jr|j|j|jni}|jrHt
j|_|jd|jtt
dtrH|j|d!	
					

N(RRR,RRYRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRsRcBs&eZedZdZdZRS(cCs#tj|||_||_dS(N(tBaseHTTPSHandlerRRR(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR#s
	cOs7t||}|jr3|j|_|j|_n|S(s
            This is called to create a connection instance. Normally you'd
            pass a connection class to do_open, but it doesn't actually check for
            a class, and just expects a callable. As long as we behave just as a
            constructor would have, we should be OK. If it ever changes so that
            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
            which just sets check_domain to False in the class definition, and
            choose which one to pass to do_open.
            (RRR(RRgRhR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt_conn_maker(s

	cCs_y|j|j|SWnAtk
rZ}dt|jkrTtd|jq[nXdS(Nscertificate verify faileds*Unable to verify server certificate for %s(tdo_openRRtstrtreasonRR(RtreqRO((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
https_open8s(RRRYRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR"s	tHTTPSOnlyHandlercBseZdZRS(cCstd|dS(NsAUnexpected HTTP request on what should be a secure connection: %s(R(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt	http_openLs(RRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRKsiitHTTPcBseZdddZRS(R$cKs5|dkrd}n|j|j|||dS(Ni(R,t_setupt_connection_class(RRRRh((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRXs	N(RRR,R(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRWstHTTPScBseZdddZRS(R$cKs5|dkrd}n|j|j|||dS(Ni(R,RR	(RRRRh((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR`s	N(RRR,R(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR
_st	TransportcBseZddZdZRS(icCs ||_tjj||dS(N(RRRR(RRtuse_datetime((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRgs	cCs|j|\}}}tdkr<t|d|j}nN|jsY||jdkr}||_|tj|f|_n|jd}|S(NiiRii(ii(t
get_host_infot	_ver_infoRRt_connectiont_extra_headersRtHTTPConnection(RRthtehtx509R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytmake_connectionks	
(RRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRfst
SafeTransportcBseZddZdZRS(icCs ||_tjj||dS(N(RRRR(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRxs	cCs|j|\}}}|s'i}n|j|dR3(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR2s
		tSubprocessMixincBs)eZeddZdZdZRS(cCs||_||_dS(N(tverbosetprogress(RRBRC((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs	cCs|j}|j}x{tr|j}|s1Pn|dk	rM|||q|sftjjdntjj|jdtjj	qW|j
dS(s
        Read lines from a subprocess' output stream and either pass to a progress
        callable (if specified) or write progress information to sys.stderr.
        Rsutf-8N(RCRBRYtreadlineR,RXtstderrRRtflushR~(RRpRRCRBR5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRN"s			cKstj|dtjdtj|}tjd|jd|jdf}|jtjd|jd|jdf}|j|j	|j
|j
|jdk	r|jddn|j
rtjjdn|S(NtstdoutRERRgsdone.tmainsdone.
(t
subprocesstPopentPIPEt	threadingtThreadRNRGR5REtwaitR2RCR,RBRXR(RtcmdRhRett1tt2((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytrun_command7s$
$



	N(RRRR,RRNRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRAs	cCstjdd|jS(s,Normalize a python package name a la PEP 503s[-_.]+R3(R6tsubR`(R&((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytnormalize_nameHs(s.tar.gzs.tar.bz2s.tars.zips.tgzs.tbzs.whl(R$RRRRR(ii(RstcollectionsRt
contextlibR*tglobRRRRvtloggingR?RR6RRRtImportErrorR,RIRXRRRRLtdummy_threadingRR$RtcompatRRRRRR	R
RRR
RRRRRRRRRt	getLoggerRRtCOMMARR3tIDENTtEXTRA_IDENTtVERSPECtRELOPtBARE_CONSTRAINTSt
DIRECT_REFtCONSTRAINTSt
EXTRA_LISTtEXTRAStREQUIREMENTR-tRELOP_IDENTR0R<RTR[R_RgRjRRtcontextmanagerRRRRRRRRRtVERBOSERRyR"R$R&R+R-R1tIR9R4R>R@RARHRPRRRSRURVR^RjtARCHIVE_EXTENSIONSRYRRRRRRRRRRRRRRRrRRR
RRRR R!R)R.R2RART(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyts



.	%		
				/	
				)					
						,H6	]		*)	
	:+PK
Z8-]R]R*site-packages/pip/_vendor/distlib/index.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import hashlib
import logging
import os
import shutil
import subprocess
import tempfile
try:
    from threading import Thread
except ImportError:
    from dummy_threading import Thread

from . import DistlibException
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
                     urlparse, build_opener, string_types)
from .util import cached_property, zip_dir, ServerProxy

logger = logging.getLogger(__name__)

DEFAULT_INDEX = 'https://pypi.python.org/pypi'
DEFAULT_REALM = 'pypi'

class PackageIndex(object):
    """
    This class represents a package index compatible with PyPI, the Python
    Package Index.
    """

    boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'

    def __init__(self, url=None):
        """
        Initialise an instance.

        :param url: The URL of the index. If not specified, the URL for PyPI is
                    used.
        """
        self.url = url or DEFAULT_INDEX
        self.read_configuration()
        scheme, netloc, path, params, query, frag = urlparse(self.url)
        if params or query or frag or scheme not in ('http', 'https'):
            raise DistlibException('invalid repository: %s' % self.url)
        self.password_handler = None
        self.ssl_verifier = None
        self.gpg = None
        self.gpg_home = None
        self.rpc_proxy = None
        with open(os.devnull, 'w') as sink:
            # Use gpg by default rather than gpg2, as gpg2 insists on
            # prompting for passwords
            for s in ('gpg', 'gpg2'):
                try:
                    rc = subprocess.check_call([s, '--version'], stdout=sink,
                                               stderr=sink)
                    if rc == 0:
                        self.gpg = s
                        break
                except OSError:
                    pass

    def _get_pypirc_command(self):
        """
        Get the distutils command for interacting with PyPI configurations.
        :return: the command.
        """
        from distutils.core import Distribution
        from distutils.config import PyPIRCCommand
        d = Distribution()
        return PyPIRCCommand(d)

    def read_configuration(self):
        """
        Read the PyPI access configuration as supported by distutils, getting
        PyPI to do the actual work. This populates ``username``, ``password``,
        ``realm`` and ``url`` attributes from the configuration.
        """
        # get distutils to do the work
        c = self._get_pypirc_command()
        c.repository = self.url
        cfg = c._read_pypirc()
        self.username = cfg.get('username')
        self.password = cfg.get('password')
        self.realm = cfg.get('realm', 'pypi')
        self.url = cfg.get('repository', self.url)

    def save_configuration(self):
        """
        Save the PyPI access configuration. You must have set ``username`` and
        ``password`` attributes before calling this method.

        Again, distutils is used to do the actual work.
        """
        self.check_credentials()
        # get distutils to do the work
        c = self._get_pypirc_command()
        c._store_pypirc(self.username, self.password)

    def check_credentials(self):
        """
        Check that ``username`` and ``password`` have been set, and raise an
        exception if not.
        """
        if self.username is None or self.password is None:
            raise DistlibException('username and password must be set')
        pm = HTTPPasswordMgr()
        _, netloc, _, _, _, _ = urlparse(self.url)
        pm.add_password(self.realm, netloc, self.username, self.password)
        self.password_handler = HTTPBasicAuthHandler(pm)

    def register(self, metadata):
        """
        Register a distribution on PyPI, using the provided metadata.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the distribution to be
                         registered.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        """
        self.check_credentials()
        metadata.validate()
        d = metadata.todict()
        d[':action'] = 'verify'
        request = self.encode_request(d.items(), [])
        response = self.send_request(request)
        d[':action'] = 'submit'
        request = self.encode_request(d.items(), [])
        return self.send_request(request)

    def _reader(self, name, stream, outbuf):
        """
        Thread runner for reading lines of from a subprocess into a buffer.

        :param name: The logical name of the stream (used for logging only).
        :param stream: The stream to read from. This will typically a pipe
                       connected to the output stream of a subprocess.
        :param outbuf: The list to append the read lines to.
        """
        while True:
            s = stream.readline()
            if not s:
                break
            s = s.decode('utf-8').rstrip()
            outbuf.append(s)
            logger.debug('%s: %s' % (name, s))
        stream.close()

    def get_sign_command(self, filename, signer, sign_password,
                         keystore=None):
        """
        Return a suitable command for signing a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The signing command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        """
        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
        if keystore is None:
            keystore = self.gpg_home
        if keystore:
            cmd.extend(['--homedir', keystore])
        if sign_password is not None:
            cmd.extend(['--batch', '--passphrase-fd', '0'])
        td = tempfile.mkdtemp()
        sf = os.path.join(td, os.path.basename(filename) + '.asc')
        cmd.extend(['--detach-sign', '--armor', '--local-user',
                    signer, '--output', sf, filename])
        logger.debug('invoking: %s', ' '.join(cmd))
        return cmd, sf

    def run_command(self, cmd, input_data=None):
        """
        Run a command in a child process , passing it any input data specified.

        :param cmd: The command to run.
        :param input_data: If specified, this must be a byte string containing
                           data to be sent to the child process.
        :return: A tuple consisting of the subprocess' exit code, a list of
                 lines read from the subprocess' ``stdout``, and a list of
                 lines read from the subprocess' ``stderr``.
        """
        kwargs = {
            'stdout': subprocess.PIPE,
            'stderr': subprocess.PIPE,
        }
        if input_data is not None:
            kwargs['stdin'] = subprocess.PIPE
        stdout = []
        stderr = []
        p = subprocess.Popen(cmd, **kwargs)
        # We don't use communicate() here because we may need to
        # get clever with interacting with the command
        t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
        t1.start()
        t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
        t2.start()
        if input_data is not None:
            p.stdin.write(input_data)
            p.stdin.close()

        p.wait()
        t1.join()
        t2.join()
        return p.returncode, stdout, stderr

    def sign_file(self, filename, signer, sign_password, keystore=None):
        """
        Sign a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The absolute pathname of the file where the signature is
                 stored.
        """
        cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
                                              keystore)
        rc, stdout, stderr = self.run_command(cmd,
                                              sign_password.encode('utf-8'))
        if rc != 0:
            raise DistlibException('sign command failed with error '
                                   'code %s' % rc)
        return sig_file

    def upload_file(self, metadata, filename, signer=None, sign_password=None,
                    filetype='sdist', pyversion='source', keystore=None):
        """
        Upload a release file to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the file to be uploaded.
        :param filename: The pathname of the file to be uploaded.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param filetype: The type of the file being uploaded. This is the
                        distutils command which produced that file, e.g.
                        ``sdist`` or ``bdist_wheel``.
        :param pyversion: The version of Python which the release relates
                          to. For code compatible with any Python, this would
                          be ``source``, otherwise it would be e.g. ``3.2``.
        :param keystore: The path to a directory which contains the keys
                         used in signing. If not specified, the instance's
                         ``gpg_home`` attribute is used instead.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        """
        self.check_credentials()
        if not os.path.exists(filename):
            raise DistlibException('not found: %s' % filename)
        metadata.validate()
        d = metadata.todict()
        sig_file = None
        if signer:
            if not self.gpg:
                logger.warning('no signing program available - not signed')
            else:
                sig_file = self.sign_file(filename, signer, sign_password,
                                          keystore)
        with open(filename, 'rb') as f:
            file_data = f.read()
        md5_digest = hashlib.md5(file_data).hexdigest()
        sha256_digest = hashlib.sha256(file_data).hexdigest()
        d.update({
            ':action': 'file_upload',
            'protocol_version': '1',
            'filetype': filetype,
            'pyversion': pyversion,
            'md5_digest': md5_digest,
            'sha256_digest': sha256_digest,
        })
        files = [('content', os.path.basename(filename), file_data)]
        if sig_file:
            with open(sig_file, 'rb') as f:
                sig_data = f.read()
            files.append(('gpg_signature', os.path.basename(sig_file),
                         sig_data))
            shutil.rmtree(os.path.dirname(sig_file))
        request = self.encode_request(d.items(), files)
        return self.send_request(request)

    def upload_documentation(self, metadata, doc_dir):
        """
        Upload documentation to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the documentation to be
                         uploaded.
        :param doc_dir: The pathname of the directory which contains the
                        documentation. This should be the directory that
                        contains the ``index.html`` for the documentation.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        """
        self.check_credentials()
        if not os.path.isdir(doc_dir):
            raise DistlibException('not a directory: %r' % doc_dir)
        fn = os.path.join(doc_dir, 'index.html')
        if not os.path.exists(fn):
            raise DistlibException('not found: %r' % fn)
        metadata.validate()
        name, version = metadata.name, metadata.version
        zip_data = zip_dir(doc_dir).getvalue()
        fields = [(':action', 'doc_upload'),
                  ('name', name), ('version', version)]
        files = [('content', name, zip_data)]
        request = self.encode_request(fields, files)
        return self.send_request(request)

    def get_verify_command(self, signature_filename, data_filename,
                           keystore=None):
        """
        Return a suitable command for verifying a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The verifying command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        """
        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
        if keystore is None:
            keystore = self.gpg_home
        if keystore:
            cmd.extend(['--homedir', keystore])
        cmd.extend(['--verify', signature_filename, data_filename])
        logger.debug('invoking: %s', ' '.join(cmd))
        return cmd

    def verify_signature(self, signature_filename, data_filename,
                         keystore=None):
        """
        Verify a signature for a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: True if the signature was verified, else False.
        """
        if not self.gpg:
            raise DistlibException('verification unavailable because gpg '
                                   'unavailable')
        cmd = self.get_verify_command(signature_filename, data_filename,
                                      keystore)
        rc, stdout, stderr = self.run_command(cmd)
        if rc not in (0, 1):
            raise DistlibException('verify command failed with error '
                             'code %s' % rc)
        return rc == 0

    def download_file(self, url, destfile, digest=None, reporthook=None):
        """
        This is a convenience method for downloading a file from an URL.
        Normally, this will be a file from the index, though currently
        no check is made for this (i.e. a file can be downloaded from
        anywhere).

        The method is just like the :func:`urlretrieve` function in the
        standard library, except that it allows digest computation to be
        done during download and checking that the downloaded data
        matched any expected value.

        :param url: The URL of the file to be downloaded (assumed to be
                    available via an HTTP GET request).
        :param destfile: The pathname where the downloaded file is to be
                         saved.
        :param digest: If specified, this must be a (hasher, value)
                       tuple, where hasher is the algorithm used (e.g.
                       ``'md5'``) and ``value`` is the expected value.
        :param reporthook: The same as for :func:`urlretrieve` in the
                           standard library.
        """
        if digest is None:
            digester = None
            logger.debug('No digest specified')
        else:
            if isinstance(digest, (list, tuple)):
                hasher, digest = digest
            else:
                hasher = 'md5'
            digester = getattr(hashlib, hasher)()
            logger.debug('Digest specified: %s' % digest)
        # The following code is equivalent to urlretrieve.
        # We need to do it this way so that we can compute the
        # digest of the file as we go.
        with open(destfile, 'wb') as dfp:
            # addinfourl is not a context manager on 2.x
            # so we have to use try/finally
            sfp = self.send_request(Request(url))
            try:
                headers = sfp.info()
                blocksize = 8192
                size = -1
                read = 0
                blocknum = 0
                if "content-length" in headers:
                    size = int(headers["Content-Length"])
                if reporthook:
                    reporthook(blocknum, blocksize, size)
                while True:
                    block = sfp.read(blocksize)
                    if not block:
                        break
                    read += len(block)
                    dfp.write(block)
                    if digester:
                        digester.update(block)
                    blocknum += 1
                    if reporthook:
                        reporthook(blocknum, blocksize, size)
            finally:
                sfp.close()

        # check that we got the whole file, if we can
        if size >= 0 and read < size:
            raise DistlibException(
                'retrieval incomplete: got only %d out of %d bytes'
                % (read, size))
        # if we have a digest, it must match.
        if digester:
            actual = digester.hexdigest()
            if digest != actual:
                raise DistlibException('%s digest mismatch for %s: expected '
                                       '%s, got %s' % (hasher, destfile,
                                                       digest, actual))
            logger.debug('Digest verified: %s', digest)

    def send_request(self, req):
        """
        Send a standard library :class:`Request` to PyPI and return its
        response.

        :param req: The request to send.
        :return: The HTTP response from PyPI (a standard library HTTPResponse).
        """
        handlers = []
        if self.password_handler:
            handlers.append(self.password_handler)
        if self.ssl_verifier:
            handlers.append(self.ssl_verifier)
        opener = build_opener(*handlers)
        return opener.open(req)

    def encode_request(self, fields, files):
        """
        Encode fields and files for posting to an HTTP server.

        :param fields: The fields to send as a list of (fieldname, value)
                       tuples.
        :param files: The files to send as a list of (fieldname, filename,
                      file_bytes) tuple.
        """
        # Adapted from packaging, which in turn was adapted from
        # http://code.activestate.com/recipes/146306

        parts = []
        boundary = self.boundary
        for k, values in fields:
            if not isinstance(values, (list, tuple)):
                values = [values]

            for v in values:
                parts.extend((
                    b'--' + boundary,
                    ('Content-Disposition: form-data; name="%s"' %
                     k).encode('utf-8'),
                    b'',
                    v.encode('utf-8')))
        for key, filename, value in files:
            parts.extend((
                b'--' + boundary,
                ('Content-Disposition: form-data; name="%s"; filename="%s"' %
                 (key, filename)).encode('utf-8'),
                b'',
                value))

        parts.extend((b'--' + boundary + b'--', b''))

        body = b'\r\n'.join(parts)
        ct = b'multipart/form-data; boundary=' + boundary
        headers = {
            'Content-type': ct,
            'Content-length': str(len(body))
        }
        return Request(self.url, body, headers)

    def search(self, terms, operator=None):
        if isinstance(terms, string_types):
            terms = {'name': terms}
        if self.rpc_proxy is None:
            self.rpc_proxy = ServerProxy(self.url, timeout=3.0)
        return self.rpc_proxy.search(terms, operator or 'and')
PK
Z㚳.site-packages/pip/_vendor/distlib/locators.pycnu[
abc@s&ddlZddlmZddlZddlZddlZddlZddlZyddlZWne	k
rddl
ZnXddlZddlm
Z
ddlmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZddlm Z m!Z!m"Z"ddl#m$Z$ddl%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.dd	l/m0Z0m1Z1dd
l2m3Z3m4Z4ej5e6Z7ej8dZ9ej8dej:Z;ej8d
Z<dZ=e>dZ?defdYZ@deAfdYZBdeBfdYZCdeBfdYZDdeAfdYZEdeBfdYZFdeBfdYZGdeBfdYZHd eBfd!YZId"eBfd#YZJeJeHeFd$d%d&d'd(ZKeKjLZLej8d)ZMd*eAfd+YZNdS(,iN(tBytesIOi(tDistlibException(turljointurlparset
urlunparseturl2pathnametpathname2urltqueuetquotetunescapetstring_typestbuild_openertHTTPRedirectHandlert	text_typetRequestt	HTTPErrortURLError(tDistributiontDistributionPatht	make_dist(tMetadata(	tcached_propertytparse_credentialstensure_slashtsplit_filenametget_project_datatparse_requirementtparse_name_and_versiontServerProxytnormalize_name(t
get_schemetUnsupportedVersionError(tWheelt
is_compatibles^(\w+)=([a-f0-9]+)s;\s*charset\s*=\s*(.*)\s*$stext/html|application/x(ht)?mlshttps://pypi.python.org/pypicCs1|dkrt}nt|dd}|jS(s
    Return all distribution names known by an index.
    :param url: The URL of the index.
    :return: A list of all known distribution names.
    ttimeoutg@N(tNonet
DEFAULT_INDEXRt
list_packages(turltclient((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytget_all_distribution_names)s	tRedirectHandlercBs%eZdZdZeZZZRS(sE
    A class to work around a bug in some Python 3.2.x releases.
    c	Csd}x(dD] }||kr
||}Pq
q
W|dkrAdSt|}|jdkrt|j|}t|dr|j||q|||
        Clear any errors which may have been logged.
        N(RR(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytclear_errorsscCs|jjdS(N(RDtclear(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytclear_cachescCs|jS(N(t_scheme(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_get_schemescCs
||_dS(N(RV(R3tvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_set_schemescCstddS(s=
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This should be implemented in subclasses.

        If called from a locate() request, self.matcher will be set to a
        matcher for the requirement to satisfy, otherwise it will be None.
        s Please implement in the subclassN(tNotImplementedError(R3tname((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_get_projects
cCstddS(sJ
        Return all the distribution names known to this locator.
        s Please implement in the subclassN(RZ(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytget_distribution_namesscCsj|jdkr!|j|}nE||jkr@|j|}n&|j|j|}||j|<|S(s
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This calls _get_project to do all the work, and just implements a caching layer on top.
        N(RDR#R\RS(R3R[RP((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytget_projects

cCsyt|}tj|j}t}|jd}|rTtt||j}n|j	dkd|j
k|||fS(su
        Give an url a score which can be used to choose preferred URLs
        for a given project release.
        s.whlthttpsspypi.python.org(Rt	posixpathtbasenametpathtTruetendswithR!R t
wheel_tagsR.tnetloc(R3R&ttRat
compatibletis_wheel((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt	score_urlscCs{|}|rw|j|}|j|}||kr?|}n||kratjd||qwtjd||n|S(s{
        Choose one of two URLs where both are candidates for distribution
        archives for the same version of a distribution (for example,
        .tar.gz vs. zip).

        The current implementation favours https:// URLs over http://, archives
        from PyPI over those from other locations, wheel compatibility (if a
        wheel) and then the archive name.
        sNot replacing %r with %rsReplacing %r with %r(Rjtloggertdebug(R3turl1turl2RPts1ts2((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt
prefer_urls
	cCs
t||S(sZ
        Attempt to split a filename in project name, version and Python version.
        (R(R3tfilenametproject_name((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRsc	Csd}d}t|\}}}}}	}
|
jjdrXtjd||
ntj|
}|r|j\}}
nd\}}
|}|r|ddkr|d }n|j	dryt
|}t||jr|dkrt
}n||j|}|ri|jd6|jd6|jd	6t|||||	d
fd6djg|jD]}d
jt|d^qdd6}qnWqtk
r}tjd|qXn|j	|jrtj|}}x|jD]}|j	|r|t| }|j||}|s@tjd|nu|\}}}|se|||ri|d6|d6|d	6t|||||	d
fd6}|r||d= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        sNot a valid requirement: %rsmatcher: %s (%s)iRRs%s did not match %rs%skipping pre-release version %s of %sserror matching %s with %riR:ssorted list: %siN(RR(R#RRRR.RFtrequirementRkRlttypeR<R^R[Rt
version_classR}t
is_prereleaseRMRRtsortedR:textrasRKRt
download_urlsR(R3RtprereleasesRPtrR.RFtversionstslisttvclstkRxtdtsdR&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytlocatePsT
	
	

	$	(s.tar.gzs.tar.bz2s.tars.zips.tgzs.tbz(s.eggs.exes.whl(s.pdfN(s.whl(R<R=R>tsource_extensionstbinary_extensionstexcluded_extensionsR#ReRRIRRRSRURWRYtpropertyR.R\R]R^RjRqRRRRRLR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRBSs.
												F		tPyPIRPCLocatorcBs)eZdZdZdZdZRS(s
    This locator uses XML-RPC to locate distributions. It therefore
    cannot be used with simple mirrors (that only mirror file content).
    cKs8tt|j|||_t|dd|_dS(s
        Initialise an instance.

        :param url: The URL to use for XML-RPC.
        :param kwargs: Passed to the superclass constructor.
        R"g@N(tsuperRRItbase_urlRR'(R3R&tkwargs((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIs	cCst|jjS(sJ
        Return all the distribution names known to this locator.
        (RR'R%(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]scCsviid6id6}|jj|t}xF|D]>}|jj||}|jj||}td|j}|d|_|d|_|j	d|_
|j	dg|_|j	d|_t
|}|r0|d	}	|	d
|_|j|	|_||_|||RIR]R\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs		tPyPIJSONLocatorcBs)eZdZdZdZdZRS(sw
    This locator uses PyPI's JSON interface. It's very limited in functionality
    and probably not worth using.
    cKs)tt|j|t||_dS(N(RRRIRR(R3R&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIscCstddS(sJ
        Return all the distribution names known to this locator.
        sNot available from this locatorN(RZ(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]scCsiid6id6}t|jdt|}yE|jj|}|jj}tj|}t	d|j
}|d}|d|_|d|_|j
d|_|j
d	g|_|j
d
|_t|}||_|d}	|||jRIR]R\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs		tPagecBszeZdZejdejejBejBZejdejejBZ	dZ
ejdejZedZ
RS(s4
    This class represents a scraped HTML page.
    s
(rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s
]*))\s+)?
href\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s
]*))
(\s+rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s
]*)))?
s!]+)cCsM||_||_|_|jj|j}|rI|jd|_ndS(sk
        Initialise an instance with the Unicode page contents and the URL they
        came from.
        iN(RRR&t_basetsearchtgroup(R3RR&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIs
	s[^a-z0-9$&+,/:;=?@.#%_\\|-]cCsd}t}x|jj|jD]}|jd}|dpv|dpv|dpv|dpv|dpv|d}|d	p|d
p|d}t|j|}t|}|jj	d|}|j
||fq(Wt|d
ddt}|S(s
        Return the URLs of all the links on a page together with information
        about their "rel" attribute, for determining which ones to treat as
        downloads and which ones to queue for further scraping.
        cSs@t|\}}}}}}t||t||||fS(sTidy up an URL.(RRR(R&R.RfRbRRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytclean%sR,trel1trel2trel3trel4trel5trel6RmRnturl3cSsdt|jdS(Ns%%%2xi(tordR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt3R,R:cSs|dS(Ni((Rg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR7R,treverse(
Rt_hreftfinditerRt	groupdictRRR	t	_clean_retsubRRRc(R3RRPR}RtrelR&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytlinkss		(R<R=R>tretcompiletItStXRRRIRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs	tSimpleScrapingLocatorcBseZdZiejd6dd6dd6ZdddZdZd	Z	d
Z
ejdej
ZdZd
ZdZdZdZejdZdZRS(s
    A locator which scrapes HTML pages to locate downloads for a distribution.
    This runs multiple threads to do the I/O; performance is at least as good
    as pip's PackageFinder, which works in an analogous fashion.
    tdeflatecCstjdttjS(Ntfileobj(tgziptGzipFileRRR(tb((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRER,RcCs|S(N((R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRFR,tnonei
cKstt|j|t||_||_i|_t|_t	j
|_t|_t
|_||_tj|_tj|_dS(s
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        N(RRRIRRR"t_page_cacheRt_seenRRGt	_to_fetcht
_bad_hostsRLtskip_externalstnum_workerst	threadingtRLockt_lockt_gplock(R3R&R"RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIIs
				cCscg|_xSt|jD]B}tjd|j}|jt|j|jj	|qWdS(s
        Threads are created only when get_project is called, and terminate
        before it returns. They are there primarily to parallelise I/O (i.e.
        fetching web pages).
        ttargetN(
t_threadstrangeRRtThreadt_fetcht	setDaemonRctstartRM(R3tiRg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_prepare_threadscs	

cCsOx!|jD]}|jjdq
Wx|jD]}|jq.Wg|_dS(su
        Tell all the threads to terminate (by sending a sentinel value) and
        wait for them to do so.
        N(RRRR#R(R3Rg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt
_wait_threadsps
c	Csiid6id6}|j||_||_t|jdt|}|jj|jj|j	z1t
jd||jj
||jjWd|jX|`WdQX|S(NRRs%s/sQueueing %s(RRPRsRRRRRTRRRkRlRRRR(R3R[RPR&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\}s
		


s<\b(linux-(i\d86|x86_64|arm\w+)|win(32|-amd64)|macosx-?\d+)\bcCs|jj|S(sD
        Does an URL refer to a platform-specific download?
        (tplatform_dependentR(R3R&((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_is_platform_dependentscCsp|j|rd}n|j||j}tjd|||rl|j|j|j|WdQXn|S(s%
        See if an URL is a suitable download for a project.

        If it is, register information in the result dictionary (for
        _get_project) about the specific version it's for.

        Note that the return value isn't actually used other than as a boolean
        value.
        sprocess_download: %s -> %sN(	RR#RRsRkRlRRRP(R3R&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt_process_downloads
	
c
Cst|\}}}}}}|j|j|j|jrGt}n|jrl|j|jrlt}n|j|jst}ny|dkrt}nd|dkrt}nO|j	|rt}n7|j
ddd}	|	jd	krt}nt}t
jd
|||||S(
s
        Determine whether a link URL from a referring page and with a
        particular "rel" attribute should be queued for scraping.
        thomepagetdownloadthttpR_tftpt:iit	localhosts#should_queue: %s (%s) from %s -> %s(RR	(R
R_R(RRdRRRRLRR{RRtsplitRzRcRkRl(
R3tlinktreferrerRR.RfRbt_RPthost((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyt
_should_queues*
							
cCs xtr|jj}zy|r|j|}|dkrEwnx|jD]y\}}||jkrO|jj||j|r|j	|||rt
jd|||jj|qqOqOWnWn)t
k
r}|jjt|nXWd|jjX|sPqqWdS(s
        Get a URL to fetch from the work queue, get the HTML page, examine its
        links for download candidates and candidates for further scraping.

        This is a handy method to run in a thread.
        sQueueing %s from %sN(RcRRKtget_pageR#RRRRRRkRlRRRHR
RO(R3R&tpageRRRQ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRs(	!cCst|\}}}}}}|dkrZtjjt|rZtt|d}n||jkr|j|}tj	d||nK|j
ddd}d}||jkrtj	d||n
t
|did	d
6}zy7tj	d||jj|d|j}	tj	d
||	j}
|
jdd}tj|r|	j}|	j}
|
jd}|r|j|}||
}
nd}tj|}|r|jd}ny|
j|}
Wn tk
r|
jd}
nXt|
|}||j|]*>([^<]+)tzlibt
decompressRR#RIRRR\RRRRRRRRRR#R](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR;s$


	
	
							;tDirectoryLocatorcBs2eZdZdZdZdZdZRS(s?
    This class locates distributions in a directory tree.
    cKso|jdt|_tt|j|tjj|}tjj	|sbt
d|n||_dS(s
        Initialise an instance.
        :param path: The root of the directory tree to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * recursive - if True (the default), subdirectories are
                         recursed into. If False, only the top-level directory
                         is searched,
        t	recursivesNot a directory: %rN(RRcR'RR&RIRRbtabspathRRtbase_dir(R3RbR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRI5s
cCs|j|jS(s
        Should a filename be considered as a candidate for a distribution
        archive? As well as the filename, the directory which contains it
        is provided, though not used by the current implementation.
        (RdR(R3Rrtparent((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytshould_includeFsc		Csiid6id6}xtj|jD]\}}}x|D]}|j||r=tjj||}tddttjj|dddf}|j	||}|r|j
||qq=q=W|js'Pq'q'W|S(NRRRR,(RtwalkR)R+RbRRRR(RRR'(	R3R[RPtroottdirstfilestfnR&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\Ns"
		c	Cst}xtj|jD]\}}}x|D]}|j||r2tjj||}tddttjj	|dddf}|j
|d}|r|j|dqq2q2W|j
sPqqW|S(sJ
        Return all the distribution names known to this locator.
        RR,R[N(RRR,R)R+RbRRRR(RR#RR'(R3RPR-R.R/R0R&R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]^s	"
		(R<R=R>RIR+R\R](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR&0s
			tJSONLocatorcBs eZdZdZdZRS(s
    This locator uses special extended metadata (not available on PyPI) and is
    the basis of performant dependency resolution in distlib. Other locators
    require archive downloads before dependencies can be determined! As you
    might imagine, that can be slow.
    cCstddS(sJ
        Return all the distribution names known to this locator.
        sNot available from this locatorN(RZ(R3((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR]xscCsBiid6id6}t|}|r>x|jdgD]}|ddks9|ddkreq9nt|d|d	d
|jd
dd|j}|j}|d
|_d|kr|drd|df|_n|jdi|_|jdi|_|||j	<|dj
|j	tj|d
q9Wn|S(NRRR/tptypetsdistt	pyversiontsourceR[RxRsPlaceholder for summaryR.R&RRtrequirementstexports(
RRKRR.RRRtdependenciesR7RxRRR(R3R[RPRRRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\~s& 			

.(R<R=R>R]R\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR1qs	tDistPathLocatorcBs eZdZdZdZRS(s
    This locator finds installed distributions in a path. It can be useful for
    adding to an :class:`AggregatingLocator`.
    cKs8tt|j|t|ts+t||_dS(ss
        Initialise an instance.

        :param distpath: A :class:`DistributionPath` instance to search.
        N(RR9RIt
isinstanceRtAssertionErrortdistpath(R3R<R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIscCs|jj|}|dkr5iid6id6}nGi||j6it|jg|j6d6itdg|j6d6}|S(NRR(R<tget_distributionR#RxRR(R3R[RRP((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR\s
(R<R=R>RIR\(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR9s	
tAggregatingLocatorcBsPeZdZdZdZdZeejj	eZdZ
dZRS(sI
    This class allows you to chain and/or merge a list of locators.
    cOs8|jdt|_||_tt|j|dS(s
        Initialise an instance.

        :param locators: The list of locators to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * merge - if False (the default), the first successful
                         search from any of the locators is returned. If True,
                         the results from all locators are merged (this can be
                         slow).
        tmergeN(RRLR?tlocatorsRR>RI(R3R@R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIs	cCs5tt|jx|jD]}|jqWdS(N(RR>RUR@(R3R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRUscCs*||_x|jD]}||_qWdS(N(RVR@R.(R3RXR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRYs	cCs]i}xP|jD]E}|j|}|r|jr|jdi}|jdi}|j||jd}|r|rxF|jD]5\}}	||kr||c|	ORIRURYRRBR.tfgetR\R](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyR>s				,shttps://pypi.python.org/simple/R"g@R.tlegacys1(?P[\w-]+)\s*\(\s*(==\s*)?(?P[^)]+)\)$tDependencyFindercBsVeZdZddZdZdZdZdZdZ	de
dZRS(	s0
    Locate dependencies for distributions.
    cCs(|p	t|_t|jj|_dS(sf
        Initialise an instance, using the specified locator
        to locate distributions.
        N(tdefault_locatorRRR.(R3R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRIscCstjd||j}||j|<||j||jf= 1.0) while C requires (B >= 1.1).

        For successful replacement, ``provider`` must meet all the requirements
        which ``other`` fulfills.

        :param provider: The provider we are trying to replace with.
        :param other: The provider we're trying to replace.
        :param problems: If False is returned, this will contain what
                         problems prevented replacement. This is currently
                         a tuple of the literal string 'cantreplace',
                         ``provider``, ``other``  and the set of requirements
                         that ``provider`` couldn't fulfill.
        :return: True if we can replace ``other`` with ``provider``, else
                 False.
        tcantreplace(treqtsRRSR}RxRt	frozensetRLRQRRNRc(	R3RTtothertproblemstrlistt	unmatchedRPRFRP((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyttry_to_replaceos"
	
	


#
cCsi|_i|_i|_i|_t|p0g}d|krk|jd|tdddgO}nt|tr|}}tj	d|nK|j
j|d|}}|dkrt
d|ntj	d|t|_t}t|g}t|g}x|r|j}|j}	|	|jkrO|j|n/|j|	}
|
|kr~|j||
|n|j|jB}|j}t}
||krxAdD]6}d|}||kr|
t|d
|O}
qqWn||B|
B}x|D]}|j|}|s+tj	d||j
j|d|}|dkrv|rv|j
j|dt}n|dkrtj	d||jd|fq+|j|j}}||f|jkr|j|n|j|||kr+||kr+|j|tj	d|jq+nxw|D]o}|j}	|	|jkrr|jj|tj|q2|j|	}
|
|kr2|j||
|q2q2WqWqWt|jj}x<|D]4}||k|_|jrtj	d|jqqWtj	d|||fS(s
        Find a distribution and all distributions it depends on.

        :param requirement: The requirement specifying the distribution to
                            find, or a Distribution instance.
        :param meta_extras: A list of meta extras such as :test:, :build: and
                            so on.
        :param prereleases: If ``True``, allow pre-release versions to be
                            returned - otherwise, don't return prereleases
                            unless they're all that's available.

        Return a set of :class:`Distribution` instances and a set of
        problems.

        The distributions returned should be such that they have the
        :attr:`required` attribute set to ``True`` if they were
        from the ``requirement`` passed to ``find()``, and they have the
        :attr:`build_time_dependency` attribute set to ``True`` unless they
        are post-installation dependencies of the ``requirement``.

        The problems should be a tuple consisting of the string
        ``'unsatisfied'`` and the requirement which couldn't be satisfied
        by any distribution known to the locator.
        s:*:s:test:s:build:s:dev:spassed %s as requirementRsUnable to locate %rs
located %sttesttbuildtdevs:%s:s%s_requiressNo providers found for %rsCannot satisfy %rtunsatisfiedsAdding %s to install_distss#%s is a build-time dependency only.sfind done for %sN(R^R_R`(RLRJRIRWRROR:RRkRlRRR#RRct	requestedRR:RNR]trun_requirest
meta_requirestbuild_requirestgetattrRURRxtname_and_versionRtvaluestbuild_time_dependency(R3Rtmeta_extrasRRRRZttodot
install_distsR[RYtireqtstsreqtstereqtsR:RQt	all_reqtsRt	providersRTtnRRMRJ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytfinds				


				
		

!


	
	"
"
		N(R<R=R>R#RIRNRQRSRUR]RLRs(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pyRGs					((ORtioRRtloggingRR`RRtImportErrortdummy_threadingR$R,RtcompatRRRRRRRR	R
RRR1R
RRRtdatabaseRRRRRtutilRRRRRRRRRRxRRRR R!t	getLoggerR<RkRR|RRRR$R#R(R)tobjectRBRRRRR&R1R9R>RHRtNAME_VERSION_RERG(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/locators.pytsV
d@:0E:A&[				PK
Z~==.site-packages/pip/_vendor/distlib/__init__.pyonu[
abc@sddlZdZdefdYZyddlmZWn*ek
rhdejfdYZnXejeZ	e	j
edS(iNs0.2.4tDistlibExceptioncBseZRS((t__name__t
__module__(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyRs(tNullHandlerRcBs#eZdZdZdZRS(cCsdS(N((tselftrecord((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pythandletcCsdS(N((RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pytemitRcCs
d|_dS(N(tNonetlock(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyt
createLockR(RRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyRs		(tloggingt__version__t	ExceptionRRtImportErrortHandlert	getLoggerRtloggert
addHandler(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/__init__.pyts
PK
Z^00-site-packages/pip/_vendor/distlib/scripts.pycnu[
abc@sddlmZddlZddlZddlZddlZddlZddlmZm	Z	m
Z
ddlmZddl
mZmZmZmZmZejeZdjZejdZd	Zd
ZdefdYZdS(
i(tBytesIONi(t	sysconfigtdetect_encodingtZipFile(tfinder(tFileOperatortget_export_entrytconvert_pathtget_executabletin_venvs


 

 
 
 
 
 
 
 
 
s^#!.*pythonw?[0-9.]*([ 	].*)?$s|# -*- coding: utf-8 -*-
if __name__ == '__main__':
    import sys, re

    def _resolve(module, func):
        __import__(module)
        mod = sys.modules[module]
        parts = func.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
        return result

    try:
        sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])

        func = _resolve('%(module)s', '%(func)s')
        rc = func() # None interpreted as 0
    except Exception as e:  # only supporting Python >= 2.6
        sys.stderr.write('%%s\n' %% e)
        rc = 1
    sys.exit(rc)
cCsd|kr|jdre|jdd\}}d|kr|jdrd||f}qq|jdsd|}qn|S(Nt s
/usr/bin/env it"s%s "%s"s"%s"(t
startswithtsplit(t
executabletenvt_executable((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_enquote_executableBstScriptMakercBseZdZeZdZeeddZ	dZ
ejj
drZdZdZndddZdZeZd	Zd
ZddZdZed
ZejdZejdksejdkrejdkrdZnddZddZ RS(s_
    A class to copy or create scripts from source scripts or callable
    specifications.
    cCs||_||_||_t|_t|_tjdkpWtjdkoWtjdk|_	t
d|_|p{t||_
tjdkptjdkotjdk|_dS(NtposixtjavatsX.Ytnt(RsX.Y(t
source_dirt
target_dirt
add_launcherstFalsetforcetclobbertostnamet_nametset_modetsettvariantsRt_fileopt_is_nt(tselfRRRtdry_runtfileop((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt__init__[s					cCsa|jdtr]|jr]tjj|\}}|jdd}tjj||}n|S(Ntguitpythontpythonw(tgetRR$RtpathR
treplacetjoin(R%Rtoptionstdntfn((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_get_alternate_executableks
RcCs[y,t|}|jddkSWdQXWn(ttfk
rVtjd|tSXdS(sl
            Determine if the specified executable is a script
            (contains a #! line)
            is#!NsFailed to open %s(topentreadtOSErrortIOErrortloggertwarningR(R%Rtfp((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt	_is_shellsscCs^|j|r=ddl}|jjjddkrV|Sn|jjdrV|Sd|S(Nisos.nametLinuxs
jython.exes/usr/bin/env %s(R;RtlangtSystemtgetPropertytlowertendswith(R%RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_fix_jython_executablesRcCst}|jr!|j}t}ntjs9t}nqtrptjj	tj
ddtjd}n:tjj	tjddtjdtjdf}|r|j||}nt
jjdr|j|}ntjj|}|rt|}n|jd}t
jd	krSd
|krSd|krS|d7}nd
||d}y|jdWn!tk
rtd|nX|dkry|j|Wqtk
rtd||fqXn|S(Ntscriptsspython%stEXEtBINDIRs
python%s%stVERSIONRsutf-8tclis	-X:Framess
-X:FullFramess
 -X:Framess#!s
s,The shebang (%r) is not decodable from utf-8s?The shebang (%r) is not decodable from the script encoding (%r)(tTrueRRRtis_python_buildRR	RR-R/tget_pathtget_config_varR3tsystplatformRRBtnormcaseRtencodetdecodetUnicodeDecodeErrort
ValueError(R%tencodingtpost_interpR0tenquoteRtshebang((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_get_shebangsL					


cCs |jtd|jd|jS(Ntmoduletfunc(tscript_templatetdicttprefixtsuffix(R%tentry((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_get_script_textscCstjj|}|j|S(N(RR-tbasenametmanifest(R%texenametbase((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pytget_manifestscCs|jo|j}tjjd}|s;|||}n||dkrY|jd}n|jd}t}	t|	d}
|
jd|WdQX|	j	}||||}x|D]}tj
j|j|}
|rtj
j
|
\}}|jdr|}
nd|
}
y|jj|
|Wqltk
rtjdd	|
}tj
j|r|tj|ntj|
||jj|
|tjd
ytj|Wqtk
rqXqlXn|jr|
jd|rd|
|f}
ntj
j|
r:|jr:tjd
|
qn|jj|
||jrl|jj|
gn|j|
qWdS(Nsutf-8tpytttws__main__.pys.pys%s.exes:Failed to write executable - trying to use .deleteme logics%s.deletemes0Able to replace executable using .deleteme logict.s%s.%ssSkipping existing file %s(RR$RtlinesepROt
_get_launcherRRtwritestrtgetvalueR-R/RtsplitextRR#twrite_binary_filet	ExceptionR8R9texiststremovetrenametdebugRARR tset_executable_modetappend(R%tnamesRVtscript_bytest	filenamestexttuse_launcherRitlaunchertstreamtzftzip_dataRtoutnametntetdfname((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt
_write_scriptsT	
	





	cCsQd}|rL|jdg}|rLddj|}|jd}qLn|jd|d|}|j|jd}|j}t}	d|jkr|	j|nd|jkr|	jd|t	j
d	fnd
|jkr
|	jd|t	j
d fn|r.|jd
tr.d}
nd}
|j|	||||
dS(NRtinterpreter_argss %sR
sutf-8R0tXs%s%sisX.Ys%s-%siR)tpywRe(
R,R/RORWR_RR!R"taddRLtversionRR(R%R^RxR0RTtargsRVtscriptRtscriptnamesRy((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_make_scripts(		!!	cCs@t}tjj|jt|}tjj|jtjj|}|jr||j	j
||r|tjd|dSyt
|d}Wn&tk
r|jsnd}noX|j}|stjd|j|dStj|jdd}|r&t}|jdp d}n|s|r?|jn|j	j|||jrq|j	j|gn|j|ntjd||j|j	js)t|j\}	}
|j d	|j!|	|}d
|krd}nd}tjj|}
|j"|
g||j#||n|r<|jndS(
Nsnot copying %s (up-to-date)trbs"%s: %s is an empty file (skipping)s
s
iRscopying and adjusting %s -> %siR+RRe($RRR-R/RRRR`RR#tnewerR8RsR4R7R&tNonetreadlineR9tget_command_namet
FIRST_LINE_REtmatchR.RHtgrouptcloset	copy_fileR RtRutinfoRtseekRWRR5(R%RRxtadjustRtft
first_lineRRTRStlinesRVRyR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_copy_scriptsR$ 
	
	
	

	%cCs
|jjS(N(R#R&(R%((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyR&JscCs||j_dS(N(R#R&(R%tvalue((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyR&NsRcCsftjddkrd}nd}d||f}tjddd}t|j|j}|S(	NtPit64t32s%s%s.exeRhii(tstructtcalcsizet__name__trsplitRtfindtbytes(R%tkindtbitsRtdistlib_packagetresult((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyRjVs	cCsKg}t|}|dkr1|j||n|j||d||S(s
        Make a script.

        :param specification: The specification, which is either a valid export
                              entry specification (to make a script from a
                              callable) or a filename (to make a script by
                              copying from a source location).
        :param options: A dictionary of options controlling script generation.
        :return: A list of all absolute pathnames written to.
        R0N(RRRR(R%t
specificationR0RxR^((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pytmakedscCs4g}x'|D]}|j|j||q
W|S(s
        Take a list of specifications and make scripts from them,
        :param specifications: A list of specifications.
        :return: A list of all absolute pathnames written to,
        (textendR(R%tspecificationsR0RxR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt
make_multiplews
N(!Rt
__module__t__doc__tSCRIPT_TEMPLATERZRRRHRR(R3RLRMRR;RBRWR_t_DEFAULT_MANIFESTRaRdRRRtpropertyR&tsetterRRRRjRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyRRs,		8			2	4-(tioRtloggingRtreRRLtcompatRRRt	resourcesRtutilRRRRR	t	getLoggerRR8tstripRtcompileRRRtobjectR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyts(	PK
ZV,site-packages/pip/_vendor/distlib/compat.pycnu[
abc@@sddlmZddlZddlZddlZyddlZWnek
r]dZnXejddkr
ddl	m	Z	e
fZeZ
ddlmZddlZddlZddlmZddlmZmZmZmZmZdd	lmZmZm Z m!Z!m"Z"m#Z#m$Z$d
Zddl%Z%ddl%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.erddl%m/Z/nddl0Z0ddl1Z1ddl2Z3dd
l4m4Z4ddl5Z5e6Z6ddl7m8Z9ddl7m:Z;da<dZ=nddl>m	Z	e?fZe?Z
ddl>m@ZddlZddlZddlZddlAmZmZmZm=Z=mZm Z mZmZm$Z$ddlBm'Z'mZm&Z&m!Z!m"Z"m*Z*m+Z+m,Z,m-Z-m.Z.erddlBm/Z/nddlCm)Z)m(Z(m#Z#ddlDjEZ0ddlBjFZ%ddlGjEZ1ddl3Z3dd
lHm4Z4ddlIjJZ5eKZ6ddl7m;Z;e9Z9yddlmLZLmMZMWn<ek
rdeNfdYZMddZOdZLnXyddlmPZQWn'ek
r"deRfdYZQnXyddlmSZSWn*ek
rcejTejUBddZSnXdd lVmWZXeYeXd!reXZWn<dd"lVmZZ[d#e[fd$YZZd%eXfd&YZWydd'l\m]Z]Wnek
rd(Z]nXyddl^Z^Wn!ek
r,dd)lm^Z^nXy
e_Z_Wn*e`k
rcdd*lambZbd+Z_nXyejcZcejdZdWnJeek
rejfZgegd,krd-Zhnd.Zhd/Zcd0ZdnXydd1limjZjWnTek
r1dd2lkmlZlmmZmddlZejnd3Zod4Zpd5ZjnXydd6lqmrZrWn!ek
ridd6lsmrZrnXejd7 dTkre4jtZtndd9lqmtZtydd:lamuZuWnkek
rdd;lamvZvydd<lwmxZyWnek
rd=d>ZynXd?evfd@YZunXyddAlzm{Z{Wnek
rQddBZ{nXyddClam|Z|Wnek
ryddDl}m~ZWn!ek
rddDlm~ZnXy ddElmZmZmZWnek
rnXdFefdGYZ|nXyddHlmZmZWnek
rejndIejZdJZdKefdLYZddMZdNefdOYZdPefdQYZdReRfdSYZnXdS(Ui(tabsolute_importNi(tStringIO(tFileTypei(tshutil(turlparset
urlunparseturljointurlsplitt
urlunsplit(turlretrievetquotetunquoteturl2pathnametpathname2urltContentTooShortErrort	splittypecC@s+t|tr!|jd}nt|S(Nsutf-8(t
isinstancetunicodetencodet_quote(ts((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR
s(	tRequestturlopentURLErrort	HTTPErrortHTTPBasicAuthHandlertHTTPPasswordMgrtHTTPHandlertHTTPRedirectHandlertbuild_opener(tHTTPSHandler(t
HTMLParser(tifilter(tifilterfalsecC@sYtdkr*ddl}|jdantj|}|rO|jddSd|fS(sJsplituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.iNs^(.*)@(.*)$ii(t	_userprogtNonetretcompiletmatchtgroup(thostR$R&((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	splituser4s(t
TextIOWrapper(	RRRR)R
RRRR(
RR	RRR
RRRRR(RRR(tfilterfalse(tmatch_hostnametCertificateErrorR-cB@seZRS((t__name__t
__module__(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR-^sc
C@sSg}|stS|jd}|d|d}}|jd}||krhtdt|n|s|j|jkS|dkr|jdnY|jds|jdr|jtj	|n"|jtj	|j
dd	x$|D]}|jtj	|qWtjd
dj|dtj
}	|	j|S(
spMatching according to RFC 6125, section 6.4.3

        http://tools.ietf.org/html/rfc6125#section-6.4.3
        t.iit*s,too many wildcards in certificate DNS name: s[^.]+sxn--s\*s[^.]*s\As\.s\Z(tFalsetsplittcountR-treprtlowertappendt
startswithR$tescapetreplaceR%tjoint
IGNORECASER&(
tdnthostnamet
max_wildcardstpatstpartstleftmostt	remaindert	wildcardstfragtpat((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt_dnsname_matchbs("
&cC@s[|stdng}|jdd
}xC|D];\}}|dkr4t||r_dS|j|q4q4W|sxc|jddD]L}xC|D];\}}|dkrt||rdS|j|qqWqWnt|dkrtd|d	jtt|fn;t|dkrKtd
||dfntddS(s=Verify that *cert* (in decoded format as returned by
        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
        rules are followed, but IP addresses are not accepted for *hostname*.

        CertificateError is raised on failure. On success, the function
        returns nothing.
        stempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDtsubjectAltNametDNSNtsubjectt
commonNameis&hostname %r doesn't match either of %ss, shostname %r doesn't match %ris=no appropriate commonName or subjectAltName fields were found(((	t
ValueErrortgetRGR7tlenR-R;tmapR5(tcertR>tdnsnamestsantkeytvaluetsub((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR,s.%(tSimpleNamespacet	ContainercB@seZdZdZRS(sR
        A generic container for when multiple values need to be returned
        cK@s|jj|dS(N(t__dict__tupdate(tselftkwargs((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__init__s(R.R/t__doc__R\(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRWs(twhichc@sd}tjjr2||r.SdS|dkrYtjjdtj}n|scdS|jtj}t	j
dkrtj|kr|jdtjntjjddjtj}t
fd|Drg}qg|D]}|^q}n	g}t}xu|D]m}tjj|}	|	|kr+|j|	x9|D].}
tjj||
}|||rc|SqcWq+q+WdS(	sKGiven a command, mode, and a PATH string, return the path which
        conforms to the given mode on the PATH, or None if there is no such
        file.

        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
        of os.environ.get("PATH"), or can be overridden with a custom search
        path.

        cS@s5tjj|o4tj||o4tjj|S(N(tostpathtexiststaccesstisdir(tfntmode((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
_access_checks$tPATHtwin32itPATHEXTtc3@s*|] }jj|jVqdS(N(R6tendswith(t.0text(tcmd(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pys	sN(R_R`tdirnameR#tenvironRMtdefpathR3tpathseptsystplatformtcurdirtinserttanytsettnormcasetaddR;(RnReR`RftpathexttfilesRmtseentdirtnormdirtthefiletname((Rns>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR^s8
	! 		


(tZipFilet	__enter__(t
ZipExtFileRcB@s#eZdZdZdZRS(cC@s|jj|jdS(N(RXRY(RZtbase((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\scC@s|S(N((RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRscG@s|jdS(N(tclose(RZtexc_info((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__exit__s(R.R/R\RR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs		RcB@s#eZdZdZdZRS(cC@s|S(N((RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR"scG@s|jdS(N(R(RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR%scO@stj|||}t|S(N(tBaseZipFiletopenR(RZtargsR[R((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR)s(R.R/RRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR!s		(tpython_implementationcC@s@dtjkrdStjdkr&dStjjdr<dSdS(s6Return a string identifying the Python implementation.tPyPytjavatJythont
IronPythontCPython(RstversionR_RR8(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR0s(t	sysconfig(tCallablecC@s
t|tS(N(RR(tobj((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytcallableDstmbcststricttsurrogateescapecC@sOt|tr|St|tr2|jttStdt|jdS(Nsexpect bytes or str, not %s(	Rtbytest	text_typeRt_fsencodingt	_fserrorst	TypeErrorttypeR.(tfilename((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytfsencodeRscC@sOt|tr|St|tr2|jttStdt|jdS(Nsexpect bytes or str, not %s(	RRRtdecodeRRRRR.(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytfsdecode[s(tdetect_encoding(tBOM_UTF8tlookupscoding[:=]\s*([-\w.]+)cC@s^|d jjdd}|dks7|jdr;dS|dksV|jd
rZdS|S(s(Imitates get_normal_name in tokenizer.c.it_t-sutf-8sutf-8-slatin-1s
iso-8859-1siso-latin-1slatin-1-siso-8859-1-siso-latin-1-(slatin-1s
iso-8859-1siso-latin-1(slatin-1-siso-8859-1-siso-latin-1-(R6R:R8(torig_enctenc((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt_get_normal_namelsc@syjjWntk
r)dnXtd}d}fd}fd}|}|jtrt|d}d}n|s|gfS||}|r||gfS|}|s||gfS||}|r|||gfS|||gfS(s?
        The detect_encoding() function is used to detect the encoding that should
        be used to decode a Python source file.  It requires one argument, readline,
        in the same way as the tokenize() generator.

        It will call readline a maximum of twice, and return the encoding used
        (as a string) and a list of any lines (left as bytes) it has read in.

        It detects the encoding from the presence of a utf-8 bom or an encoding
        cookie as specified in pep-0263.  If both a bom and a cookie are present,
        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
        'utf-8-sig' is returned.

        If no encoding is specified, then the default of 'utf-8' will be returned.
        sutf-8c@s$ySWntk
rdSXdS(NRj(t
StopIteration((treadline(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytread_or_stops
c@s7y|jd}WnDtk
rYd}dk	rJdj|}nt|nXtj|}|ssdSt|d}yt|}WnHt	k
rdkrd|}ndj|}t|nXr3|j
dkr&dkrd}ndj}t|n|d	7}n|S(
Nsutf-8s'invalid or missing encoding declarations{} for {!r}isunknown encoding: sunknown encoding for {!r}: {}sencoding problem: utf-8s encoding problem for {!r}: utf-8s-sig(RtUnicodeDecodeErrorR#tformattSyntaxErrort	cookie_retfindallRRtLookupErrorR(tlinetline_stringtmsgtmatchestencodingtcodec(t	bom_foundR(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytfind_cookies6


			
is	utf-8-sigN(t__self__RtAttributeErrorR#R2R8RtTrue(RRtdefaultRRtfirsttsecond((RRRs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRws4

&	
	

	
(R9ii(tunescape(tChainMap(tMutableMapping(trecursive_reprs...c@sfd}|S(sm
            Decorator to make a repr function return fillvalue for a recursive
            call
            c@smtfd}td|_td|_td|_tdi|_|S(Nc@sWt|tf}|kr%Sj|z|}Wdj|X|S(N(tidt	get_identRztdiscard(RZRStresult(t	fillvaluetrepr_runningt
user_function(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytwrappers
R/R]R.t__annotations__(RxtgetattrR/R]R.R(RR(R(RRs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytdecorating_functions	((RR((Rs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt_recursive_reprsRcB@seZdZdZdZdZddZdZdZ	dZ
dZed	Z
ed
ZdZeZdZed
ZdZdZdZdZdZRS(s A ChainMap groups multiple dicts (or other mappings) together
        to create a single, updateable view.

        The underlying mappings are stored in a list.  That list is public and can
        accessed or updated using the *maps* attribute.  There is no other state.

        Lookups search the underlying mappings successively until a key is found.
        In contrast, writes, updates, and deletions only operate on the first
        mapping.

        cG@st|pig|_dS(sInitialize a ChainMap by setting *maps* to the given mappings.
            If no mappings are provided, a single empty dictionary is used.

            N(tlisttmaps(RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\
scC@st|dS(N(tKeyError(RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__missing__scC@sAx1|jD]&}y||SWq
tk
r/q
Xq
W|j|S(N(RRR(RZRStmapping((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__getitem__s
cC@s||kr||S|S(N((RZRSR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRMscC@sttj|jS(N(RNRxtunionR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__len__"scC@sttj|jS(N(titerRxRR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__iter__%sc@stfd|jDS(Nc3@s|]}|kVqdS(N((Rltm(RS(s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pys	)s(RwR(RZRS((RSs>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__contains__(scC@s
t|jS(N(RwR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__bool__+scC@s%dj|djtt|jS(Ns{0.__class__.__name__}({1})s, (RR;ROR5R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__repr__.scG@s|tj||S(s?Create a ChainMap with a single dict created from the iterable.(tdicttfromkeys(tclstiterableR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR3scC@s$|j|jdj|jdS(sHNew ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]ii(t	__class__Rtcopy(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR8scC@s|ji|jS(s;New ChainMap with a new dict followed by all previous maps.(RR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	new_child>scC@s|j|jdS(sNew ChainMap from maps[1:].i(RR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytparentsBscC@s||jd|/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__setitem__GscC@s?y|jd|=Wn&tk
r:tdj|nXdS(Nis(Key not found in the first mapping: {!r}(RRR(RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__delitem__Js
cC@s9y|jdjSWntk
r4tdnXdS(sPRemove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.is#No keys found in the first mapping.N(RtpopitemR(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRPs
cG@sHy|jdj||SWn&tk
rCtdj|nXdS(sWRemove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].is(Key not found in the first mapping: {!r}N(RtpopRR(RZRSR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRWs
cC@s|jdjdS(s'Clear maps[0], leaving maps[1:] intact.iN(Rtclear(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR^sN(R.R/R]R\RRR#RMRRRRRRtclassmethodRRt__copy__RtpropertyRRRRRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs(													(tcache_from_sourcecC@sG|jdst|dkr*t}n|r9d}nd}||S(Ns.pytcto(RktAssertionErrorR#t	__debug__(R`tdebug_overridetsuffix((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRes		(tOrderedDict(R(tKeysViewt
ValuesViewt	ItemsViewRcB@seZdZdZejdZejdZdZdZdZ	e
dZdZd	Z
d
ZdZdZd
ZdZeZeZedZddZddZdZdZeddZdZdZdZ dZ!dZ"RS(s)Dictionary that remembers insertion ordercO@st|dkr+tdt|ny|jWn7tk
rog|_}||dg|(i|_nX|j||dS(sInitialize an ordered dictionary.  Signature is the same as for
            regular dictionaries, but keyword arguments are not recommended
            because their insertion order is arbitrary.

            is$expected at most 1 arguments, got %dN(RNRt_OrderedDict__rootRR#t_OrderedDict__mapt_OrderedDict__update(RZRtkwdstroot((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\s


cC@s\||krH|j}|d}|||g|d<|d<|j| od[i]=yiiN(RR(RZRSRTtdict_setitemRtlast((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	
)cC@s@||||jj|\}}}||d<||d del od[y]iiN(RR(RZRStdict_delitemt	link_prevt	link_next((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs

cc@s=|j}|d}x#||k	r8|dV|d}qWdS(sod.__iter__() <==> iter(od)iiN(R(RZRtcurr((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	
	cc@s=|j}|d}x#||k	r8|dV|d}qWdS(s#od.__reversed__() <==> reversed(od)iiN(R(RZRR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__reversed__s
	
	cC@smyHx|jjD]
}|2qW|j}||dg|(|jjWntk
r[nXtj|dS(s.od.clear() -> None.  Remove all items from od.N(Rt
itervaluesRR#RRR(RZtnodeR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs	
cC@s|stdn|j}|rO|d}|d}||d<||d (k, v), return and remove a (key, value) pair.
            Pairs are returned in LIFO order if last is true or FIFO order if false.

            sdictionary is emptyiii(RRRRR(RZRRtlinkRRRSRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs 	









cC@s
t|S(sod.keys() -> list of keys in od(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytkeysscC@sg|D]}||^qS(s#od.values() -> list of values in od((RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytvaluesscC@s!g|D]}|||f^qS(s.od.items() -> list of (key, value) pairs in od((RZRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytitemsscC@s
t|S(s0od.iterkeys() -> an iterator over the keys in od(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytiterkeysscc@sx|D]}||VqWdS(s2od.itervalues -> an iterator over the values in odN((RZtk((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
cc@s$x|D]}|||fVqWdS(s=od.iteritems -> an iterator over the (key, value) items in odN((RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	iteritemss
cO@s&t|dkr.tdt|fn|sCtdn|d}d}t|dkrr|d}nt|trxw|D]}|||| None.  Update od from dict/iterable E and F.

            If E is a dict instance, does:           for k in E: od[k] = E[k]
            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
            In either case, this is followed by:     for k, v in F.items(): od[k] = v

            is8update() takes at most 2 positional arguments (%d given)s,update() takes at least 1 argument (0 given)iiRN((RNRRRthasattrRR
(RRRZtotherRSRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRYs&	


cC@sC||kr!||}||=|S||jkr?t|n|S(sod.pop(k[,d]) -> v, remove specified key and return the corresponding value.
            If key is not found, d is returned if given, otherwise KeyError is raised.

            (t_OrderedDict__markerR(RZRSRR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR!s
cC@s"||kr||S|||<|S(sDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od((RZRSR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
setdefault.s
cC@s|si}nt|tf}||kr4dSd|| repr(od)s...is%s()s%s(%r)N(Rt
_get_identRR.R
(RZt
_repr_runningtcall_key((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR5s	
cC@sg|D]}|||g^q}t|j}x'ttD]}|j|dqEW|rx|j|f|fS|j|ffS(s%Return state information for picklingN(tvarsRRRR#R(RZRR
t	inst_dict((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
__reduce__Cs#cC@s
|j|S(s!od.copy() -> a shallow copy of od(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRMscC@s(|}x|D]}||| New ordered dictionary with keys from S
            and values equal to v (which defaults to None).

            ((RRRTtdRS((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRQs	
cC@sMt|tr=t|t|ko<|j|jkStj||S(sod.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
            while comparison to a regular mapping is order-insensitive.

            (RRRNR
Rt__eq__(RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\s.cC@s||kS(N((RZR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt__ne__escC@s
t|S(s@od.viewkeys() -> a set-like object providing a view on od's keys(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytviewkeysjscC@s
t|S(s<od.viewvalues() -> an object providing a view on od's values(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt
viewvaluesnscC@s
t|S(sBod.viewitems() -> a set-like object providing a view on od's items(R(RZ((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyt	viewitemsrsN(#R.R/R]R\RRRRRRRRRRR
RRRRYRtobjectRRR#RRRRRRRRRRR (((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs:	
												
	
	
					(tBaseConfiguratortvalid_idents^[a-z_][a-z0-9_]*$cC@s,tj|}|s(td|ntS(Ns!Not a valid Python identifier: %r(t
IDENTIFIERR&RLR(RR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR#|stConvertingDictcB@s#eZdZdZddZRS(s A converting dictionary wrapper.cC@sqtj||}|jj|}||k	rm|||/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	cC@sttj|||}|jj|}||k	rp|||/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRMs
	N(R.R/R]RR#RM(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR%s	cC@sjtj|||}|jj|}||k	rft|tttfkrf||_||_	qfn|S(N(
RRR&R'RR%R(R)R*RS(RZRSRRTR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs	R(cB@s#eZdZdZddZRS(sA converting list wrapper.cC@sqtj||}|jj|}||k	rm|||/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs
	icC@s^tj||}|jj|}||k	rZt|tttfkrZ||_qZn|S(N(	RRR&R'RR%R(R)R*(RZtidxRTR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs(R.R/R]RR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR(s	R)cB@seZdZdZRS(sA converting tuple wrapper.cC@sgtj||}|jj|}||k	rct|tttfkrc||_||_	qcn|S(N(
ttupleRR&R'RR%R(R)R*RS(RZRSRTR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyRs	(R.R/R]R(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR)sR"cB@seZdZejdZejdZejdZejdZejdZ	idd6dd	6Z
eeZ
d
ZdZdZd
ZdZdZdZRS(sQ
        The configurator base class which defines some useful defaults.
        s%^(?P[a-z]+)://(?P.*)$s^\s*(\w+)\s*s^\.\s*(\w+)\s*s^\[\s*(\w+)\s*\]\s*s^\d+$text_convertRmtcfg_converttcfgcC@st||_||j_dS(N(R%tconfigR&(RZR0((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR\sc	C@s|jd}|jd}yy|j|}x_|D]W}|d|7}yt||}Wq7tk
r|j|t||}q7Xq7W|SWnVtk
rtjd\}}td||f}|||_	|_
|nXdS(sl
            Resolve strings to objects using standard import and attribute
            syntax.
            R0iisCannot resolve %r: %sN(R3RtimporterRRtImportErrorRsRRLt	__cause__t
__traceback__(	RZRRtusedtfoundREtettbtv((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytresolves"



cC@s
|j|S(s*Default converter for the ext:// protocol.(R:(RZRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR-scC@sO|}|jj|}|dkr7td|n||j}|j|jd}x|rJ|jj|}|r||jd}n|jj|}|r|jd}|j	j|s||}qyt
|}||}Wqtk
r||}qXn|r1||j}qatd||fqaW|S(s*Default converter for the cfg:// protocol.sUnable to convert %risUnable to convert %r at %rN(tWORD_PATTERNR&R#RLtendR0tgroupstDOT_PATTERNt
INDEX_PATTERNt
DIGIT_PATTERNtintR(RZRTtrestRRR+tn((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR.s2	

cC@s/t|tr7t|tr7t|}||_nt|trnt|trnt|}||_nt|trt|trt|}||_nt|tr+|j	j
|}|r+|j}|d}|jj
|d}|r(|d}t||}||}q(q+n|S(s
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            tprefixRN(RR%RR&R(RR)R,tstring_typestCONVERT_PATTERNR&t	groupdicttvalue_convertersRMR#R(RZRTRRRDt	converterR((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR')s*

c	C@s|jd}t|s-|j|}n|jdd}tg|D]"}t|rI|||f^qI}||}|rx-|jD]\}}t|||qWn|S(s1Configure an object with a user-supplied factory.s()R0N(RRR:R#RR#R
tsetattr(	RZR0RtpropsRR[RRRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytconfigure_customEs5cC@s"t|trt|}n|S(s0Utility function which converts lists to tuples.(RRR,(RZRT((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pytas_tupleSs(R.R/R]R$R%RFR;R>R?R@RHtstaticmethodt
__import__R1R\R:R-R.R'RLRM(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyR"s"
				"		(ii(t
__future__RR_R$RstsslR2R#tversion_infoRt
basestringRERRttypesRt	file_typet__builtin__tbuiltinstConfigParsertconfigparsert	_backportRRRRRRturllibR	R
RRRR
RRturllib2RRRRRRRRRRthttplibt	xmlrpclibtQueuetqueueRthtmlentitydefst	raw_inputt	itertoolsR tfilterR!R+R"R)tiotstrR*turllib.parseturllib.requestturllib.errorthttp.clienttclienttrequestt
xmlrpc.clientthtml.parsert
html.entitiestentitiestinputR,R-RLRGRVRWR!R^tF_OKtX_OKtzipfileRRRRtBaseZipExtFileRtRRRt	NameErrortcollectionsRRRRtgetfilesystemencodingRRttokenizeRtcodecsRRR%RRthtmlR9tcgiRRRtreprlibRRtimpRRtthreadRRtdummy_threadt_abcollRRRRtlogging.configR"R#tIR$R%RRR(R,R)(((s>/usr/lib/python2.7/site-packages/pip/_vendor/distlib/compat.pyts$

	(4	@	@F
2
+

A	





	

			
	
	
[


b



 

	
PK
Zt(8site-packages/pip/_vendor/distlib/_backport/__init__.pycnu[
abc@s
dZdS(sModules copied from Python 3 standard libraries, for internal use only.

Individual classes and functions are found in d2._backport.misc.  Intended
usage is to always import things missing from 3.1 from that module: the
built-in/stdlib objects will be used if found.
N(t__doc__(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.pyttPK
Zo9
9
9site-packages/pip/_vendor/distlib/_backport/sysconfig.cfgnu[[posix_prefix]
# Configuration directories.  Some of these come straight out of the
# configure script.  They are for implementing the other variables, not to
# be used directly in [resource_locations].
confdir = /etc
datadir = /usr/share
libdir = /usr/lib
statedir = /var
# User resource directory
local = ~/.local/{distribution.name}

stdlib = {base}/lib/python{py_version_short}
platstdlib = {platbase}/lib/python{py_version_short}
purelib = {base}/lib/python{py_version_short}/site-packages
platlib = {platbase}/lib/python{py_version_short}/site-packages
include = {base}/include/python{py_version_short}{abiflags}
platinclude = {platbase}/include/python{py_version_short}{abiflags}
data = {base}

[posix_home]
stdlib = {base}/lib/python
platstdlib = {base}/lib/python
purelib = {base}/lib/python
platlib = {base}/lib/python
include = {base}/include/python
platinclude = {base}/include/python
scripts = {base}/bin
data = {base}

[nt]
stdlib = {base}/Lib
platstdlib = {base}/Lib
purelib = {base}/Lib/site-packages
platlib = {base}/Lib/site-packages
include = {base}/Include
platinclude = {base}/Include
scripts = {base}/Scripts
data = {base}

[os2]
stdlib = {base}/Lib
platstdlib = {base}/Lib
purelib = {base}/Lib/site-packages
platlib = {base}/Lib/site-packages
include = {base}/Include
platinclude = {base}/Include
scripts = {base}/Scripts
data = {base}

[os2_home]
stdlib = {userbase}/lib/python{py_version_short}
platstdlib = {userbase}/lib/python{py_version_short}
purelib = {userbase}/lib/python{py_version_short}/site-packages
platlib = {userbase}/lib/python{py_version_short}/site-packages
include = {userbase}/include/python{py_version_short}
scripts = {userbase}/bin
data = {userbase}

[nt_user]
stdlib = {userbase}/Python{py_version_nodot}
platstdlib = {userbase}/Python{py_version_nodot}
purelib = {userbase}/Python{py_version_nodot}/site-packages
platlib = {userbase}/Python{py_version_nodot}/site-packages
include = {userbase}/Python{py_version_nodot}/Include
scripts = {userbase}/Scripts
data = {userbase}

[posix_user]
stdlib = {userbase}/lib/python{py_version_short}
platstdlib = {userbase}/lib/python{py_version_short}
purelib = {userbase}/lib/python{py_version_short}/site-packages
platlib = {userbase}/lib/python{py_version_short}/site-packages
include = {userbase}/include/python{py_version_short}
scripts = {userbase}/bin
data = {userbase}

[osx_framework_user]
stdlib = {userbase}/lib/python
platstdlib = {userbase}/lib/python
purelib = {userbase}/lib/python/site-packages
platlib = {userbase}/lib/python/site-packages
include = {userbase}/include
scripts = {userbase}/bin
data = {userbase}
PK
Zq@SQQ9site-packages/pip/_vendor/distlib/_backport/sysconfig.pyonu[
abc@s_dZddlZddlZddlZddlZddlmZmZyddlZWne	k
r{ddl
ZnXdddddd	d
ddd
dgZdZej
rejjeej
ZneejZejdkr(dedjkr(eejjeeZnejdkrndedjkrneejjeeeZnejdkrdedjkreejjeeeZndZeZeadZejZejdZdZejj dZ!ejd Z"e!de!dZ#ejj$ej%Z&ejj$ej'Z(da*dZ+dZ,dZ-d Z.d!Z/d"Z0d#Z1dd$Z2d%Z3d&Z4d'Z5dd(Z6d)Z7d*Z8d+Z9e0de:d,Z;e0de:d-Z<d.Z=d/Z>d0Z?d1Z@d2ZAd3ZBeCd4kr[eBndS(5s-Access to Python's configuration information.iN(tpardirtrealpathtget_config_h_filenametget_config_vartget_config_varstget_makefile_filenametget_pathtget_path_namest	get_pathstget_platformtget_python_versiontget_scheme_namestparse_config_hcCs'yt|SWntk
r"|SXdS(N(RtOSError(tpath((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_safe_realpath"s
tnttpcbuildis\pc\vis\pcbuild\amd64icCs=x6dD].}tjjtjjtd|rtSqWtS(Ns
Setup.distsSetup.localtModules(s
Setup.distsSetup.local(tosRtisfiletjoint
_PROJECT_BASEtTruetFalse(tfn((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pytis_python_build:s
$cCstsddlm}tjddd}||}|jd}|j}tj|WdQXt	rx7d
D],}tj
|d	d
tj
|ddqvWntandS(Ni(tfindert.iis
sysconfig.cfgtposix_prefixt
posix_hometincludes{srcdir}/Includetplatincludes{projectbase}/.(RR(t	_cfg_readt	resourcesRt__name__trsplittfindt	as_streamt_SCHEMEStreadfpt
_PYTHON_BUILDtsetR(Rtbackport_packaget_findert_cfgfiletstscheme((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_ensure_cfg_readDs
s\{([^{]*?)\}cs-t|jdr(|jd}n	t}|j}xb|D]Z}|dkr\qDnx?|D]7\}}|j||rqcn|j|||qcWqDW|jdxw|jD]i}t|j|fd}x<|j|D]+\}}|j||t	j
||qWqWdS(Ntglobalscs0|jd}|kr#|S|jdS(Nii(tgroup(tmatchobjtname(t	variables(sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt	_replaceros(R0thas_sectiontitemsttupletsectionst
has_optionR*tremove_sectiontdictt	_VAR_REPLtsub(tconfigR1R:tsectiontoptiontvalueR6((R5sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_expand_globalsYs$	

iiicsfd}tj||S(sIn the string `path`, replace tokens like {some.thing} with the
    corresponding value from the map `local_vars`.

    If there is no corresponding value, leave the token unchanged.
    csJ|jd}|kr#|S|tjkr=tj|S|jdS(Nii(R2Rtenviron(R3R4(t
local_vars(sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyR6s(R>R?(RRFR6((RFsK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_subst_varsscCsI|j}x6|jD](\}}||kr7qn|||R?(RCRRR6((RRsK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pytformat_valuescCstjdkrdStjS(NRNR(RR4(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_get_default_schemescCstjjdd}d}tjdkr_tjjdpBd}|rO|S||dSntjdkrtd}|r|r|S|dd	|d
tjd Sqn|r|S|ddSdS(
NtPYTHONUSERBASEcWstjjtjj|S(N(RRRPR(targs((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pytjoinusersRtAPPDATAt~tPythontdarwintPYTHONFRAMEWORKtLibrarys%d.%dis.local(	RREtgetROR4tsystplatformRtversion_info(tenv_baseRYtbaset	framework((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_getuserbases"	cCstjd}tjd}tjd}|dkrBi}ni}i}tj|dddd}|j}WdQXx|D]}	|	jd	s|	jd
krqn|j|	}
|
r|
j	dd\}}|j}|j
d
d
}
d|
kr|||dttjf}nd}tjjt	d|dS(s Return the path of the Makefile.tMakefiletabiflagssconfig-%s%sR@tstdlib(
R)RRRRthasattrRat_PY_VERSION_SHORTRR(tconfig_dir_name((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyRMscCst}yt||WnLtk
rh}d|}t|drY|d|j}nt|nXt}y&t|}t||WdQXWnLtk
r}d|}t|dr|d|j}nt|nXtr|d|dkrd4}qI|d?krd5}qIt d6|fqL|d.krtj!d@krId0}qIqL|dAkrLtj!dBkr@d3}qId/}qLqOnd:|||fS(CsReturn a string that identifies the current platform.

    This is used mainly to distinguish platform-specific build directories and
    platform-specific built distributions.  Typically includes the OS name
    and version and the architecture (as supplied by 'os.uname()'),
    although the exact information included depends on the OS; eg. for IRIX
    the architecture isn't particularly important (IRIX only runs on SGI
    hardware), but for Linux the kernel version isn't particularly
    important.

    Examples of returned values:
       linux-i586
       linux-alpha (?)
       solaris-2.6-sun4u
       irix-5.3
       irix64-6.2

    Windows will return one of:
       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
       win-ia64 (64bit Windows on Itanium)
       win32 (all others - specifically, sys.platform is returned)

    For other non-POSIX platforms, currently just returns 'sys.platform'.
    Rs bit (it)tamd64s	win-amd64titaniumswin-ia64RNRt/RlRt_t-itlinuxs%s-%stsunosit5tsolariss%d.%siiitirixtaixs%s-%s.%sitcygwins[\d.]+R]tMACOSX_DEPLOYMENT_TARGETs0/System/Library/CoreServices/SystemVersion.plists=ProductUserVisibleVersion\s*(.*?)NRitmacosxs10.4.s-archRntfats
-arch\s+(\S+)ti386tppctx86_64tinteltfat3tppc64tfat64t	universals%Don't know machine value for archs=%ri tPowerPCtPower_Macintoshs%s-%s-%s(RR(RR(RRR(RR(RRRRI(RRI("RR4RaRR%RbR~tlowerRRRzR{RrRsRyR2RR`RRuRRtreadtcloseRORRRxtfindallR9RR*R|tmaxsize(RtitjtlooktosnamethosttreleaseRtmachinetrel_reRtcfgvarstmacvert
macreleaseRtcflagstarchs((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyR	[s (	
	+	!
						cCstS(N(R(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyR
scCsZxStt|jD]9\}\}}|dkrCd|GHnd||fGHqWdS(Nis%s: s
	%s = "%s"(t	enumerateRR8(ttitletdatatindexRLRC((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_print_dicts+cCsRdtGHdtGHdtGHdGHtdtdGHtdtdS(	s*Display all information sysconfig detains.sPlatform: "%s"sPython version: "%s"s!Current installation scheme: "%s"tPathst	VariablesN(((R	R
RVRRR(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_mainst__main__(Dt__doc__RtRRrRatos.pathRRtconfigparsertImportErrortConfigParsert__all__RRRRRRR4RRRR)RR!R0tRawConfigParserR'RsR>RDRRRRRRQRRRRRORt
_USER_BASERGRMRTRURVRgRRRRRRRRRRRRRR	R
RRR#(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyts
			%%!%!				#
			
				v						
							PK
Z;VgVg6site-packages/pip/_vendor/distlib/_backport/shutil.pycnu[
abc@s"dZddlZddlZddlZddlmZddlZddlZddlZddl	m
Z
yddlZeZ
Wnek
reZ
nXyddlmZWnek
rdZnXyddlmZWnek
rdZnXdd	d
ddd
dddddddddddddddgZdefdYZdefdYZdefdYZd efd!YZd"efd#YZyeWnek
rdZnXdWd&Zd'Z d(Z!d)Z"d*Z#d+Z$d,Z%d-Z&ede%ed.Z'edd/Z(d0Z)d1Z*d2Z+d3Z,d4Z-d5d6d6dddd7Z.eed8Z/d6d6dd9Z0ie.dXgd;fd<6e.dYgd>fd?6e.dZgd@fdA6e0gdBfdC6Z1e
re.d[gd>fe1d?fe=d?dddVZ?dS(\sUtility functions for copying and archiving files and directory trees.

XXX The functions here don't copy the resource fork or other metadata on Mac.

iN(tabspathi(ttarfile(tgetpwnam(tgetgrnamtcopyfileobjtcopyfiletcopymodetcopystattcopytcopy2tcopytreetmovetrmtreetErrortSpecialFileErrort	ExecErrortmake_archivetget_archive_formatstregister_archive_formattunregister_archive_formattget_unpack_formatstregister_unpack_formattunregister_unpack_formattunpack_archivetignore_patternscBseZRS((t__name__t
__module__(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR
,scBseZdZRS(s|Raised when trying to do a kind of operation (e.g. copying) which is
    not supported on a special file (e.g. a named pipe)(RRt__doc__(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR/scBseZdZRS(s+Raised when a command could not be executed(RRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR3st	ReadErrorcBseZdZRS(s%Raised when an archive cannot be read(RRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR6st
RegistryErrorcBseZdZRS(sVRaised when a registry operation with the archiving
    and unpacking registries fails(RRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR9siicCs1x*|j|}|sPn|j|qWdS(s=copy data from file-like object fsrc to file-like object fdstN(treadtwrite(tfsrctfdsttlengthtbuf((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRCs
cCs{ttjdrAytjj||SWqAtk
r=tSXntjjtjj|tjjtjj|kS(Ntsamefile(thasattrtostpathR$tOSErrortFalsetnormcaseR(tsrctdst((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt	_samefileKs
cCst||r(td||fnx`||gD]R}ytj|}Wntk
raq5Xtj|jr5td|q5q5Wt|d,}t|d}t	||WdQXWdQXdS(sCopy data from src to dsts`%s` and `%s` are the same files`%s` is a named pipetrbtwbN(
R-R
R&tstatR(tS_ISFIFOtst_modeRtopenR(R+R,tfntstR R!((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRWs
cCsGttdrCtj|}tj|j}tj||ndS(sCopy mode bits from src to dsttchmodN(R%R&R0tS_IMODER2R6(R+R,R5tmode((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRkscCstj|}tj|j}ttdrOtj||j|jfnttdrqtj||nttdrt|drytj	||j
Wqtk
r}ttds|jtj
krqqXndS(sCCopy all stat info (mode bits, atime, mtime, flags) from src to dsttutimeR6tchflagstst_flagst
EOPNOTSUPPN(R&R0R7R2R%R9tst_atimetst_mtimeR6R:R;R(terrnoR<(R+R,R5R8twhy((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRrscCsTtjj|r6tjj|tjj|}nt||t||dS(sVCopy data and mode bits ("cp src dst").

    The destination may be a directory.

    N(R&R'tisdirtjointbasenameRR(R+R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRs$
cCsTtjj|r6tjj|tjj|}nt||t||dS(s]Copy data and all stat info ("cp -p src dst").

    The destination may be a directory.

    N(R&R'RARBRCRR(R+R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR	s$
csfd}|S(sFunction that can be used as copytree() ignore parameter.

    Patterns is a sequence of glob-style patterns
    that are used to exclude filescs:g}x'D]}|jtj||q
Wt|S(N(textendtfnmatchtfiltertset(R'tnamest
ignored_namestpattern(tpatterns(sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt_ignore_patternss
((RKRL((RKsH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRscCs
tj|}|dk	r-|||}n	t}tj|g}xG|D]?}	|	|krhqPntjj||	}
tjj||	}ytjj|
rtj|
}|rtj	||q6tjj
|r|rwPn||
|n8tjj|
r)t|
||||n
||
|WqPt
k
r`}
|j|
jdqPtk
r}|j|
|t|fqPXqPWyt||WnMtk
r}tdk	rt|trq|j||t|fnX|r	t
|ndS(sRecursively copy a directory tree.

    The destination directory must not already exist.
    If exception(s) occur, an Error is raised with a list of reasons.

    If the optional symlinks flag is true, symbolic links in the
    source tree result in symbolic links in the destination tree; if
    it is false, the contents of the files pointed to by symbolic
    links are copied. If the file pointed by the symlink doesn't
    exist, an exception will be added in the list of errors raised in
    an Error exception at the end of the copy process.

    You can set the optional ignore_dangling_symlinks flag to true if you
    want to silence this exception. Notice that this has no effect on
    platforms that don't support os.symlink.

    The optional ignore argument is a callable. If given, it
    is called with the `src` parameter, which is the directory
    being visited by copytree(), and `names` which is the list of
    `src` contents, as returned by os.listdir():

        callable(src, names) -> ignored_names

    Since copytree() is called recursively, the callable will be
    called once for each directory that is copied. It returns a
    list of names relative to the `src` directory that should
    not be copied.

    The optional copy_function argument is a callable that will be used
    to copy each file. It will be called with the source path and the
    destination path as arguments. By default, copy2() is used, but any
    function that supports the same signature (like copy()) can be used.

    iN(R&tlistdirtNoneRGtmakedirsR'RBtislinktreadlinktsymlinktexistsRAR
R
RDtargstEnvironmentErrortappendtstrRR(tWindowsErrort
isinstance(R+R,tsymlinkstignoret
copy_functiontignore_dangling_symlinksRHRIterrorstnametsrcnametdstnametlinktoterrR@((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR
sD$	

$ cCs|rd}n|dkr*d}ny%tjj|rNtdnWn.tk
r|tjj|tjdSXg}ytj|}Wn-tjk
r|tj|tjnXx|D]}tjj	||}ytj
|j}Wntjk
rd}nXtj
|r@t|||qytj|Wqtjk
r|tj|tjqXqWytj|Wn-tjk
r|tj|tjnXdS(sRecursively delete a directory tree.

    If ignore_errors is set, errors are ignored; otherwise, if onerror
    is set, it is called to handle the error with arguments (func,
    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
    path is the argument to that function that caused it to fail; and
    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
    is false and onerror is None, an exception is raised.

    cWsdS(N((RT((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pytonerrorscWsdS(N((RT((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRdss%Cannot call rmtree on a symbolic linkNi(RNR&R'RPR(tsystexc_infoRMterrorRBtlstatR2R0tS_ISDIRRtremovetrmdir(R't
ignore_errorsRdRHR_tfullnameR8((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRs>


!cCstjj|jtjjS(N(R&R'RCtrstriptsep(R'((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt	_basename'scCs|}tjj|r~t||r;tj||dStjj|t|}tjj|r~td|q~nytj||Wnt	k
rtjj|rt
||rtd||fnt||dtt
|qt||tj|nXdS(sRecursively move a file or directory to another location. This is
    similar to the Unix "mv" command.

    If the destination is a directory or a symlink to a directory, the source
    is moved inside the directory. The destination path must not already
    exist.

    If the destination already exists but is not a directory, it may be
    overwritten depending on os.rename() semantics.

    If the destination is on our current filesystem, then rename() is used.
    Otherwise, src is copied to the destination and then removed.
    A lot more could be done here...  A look at a mv.c shows a lot of
    the issues this implementation glosses over.

    Ns$Destination path '%s' already existss.Cannot move a directory '%s' into itself '%s'.RZ(R&R'RAR-trenameRBRpRSR
R(t
_destinsrcR
tTrueRR	tunlink(R+R,treal_dst((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR,s$


cCsut|}t|}|jtjjs@|tjj7}n|jtjjsh|tjj7}n|j|S(N(RtendswithR&R'Rot
startswith(R+R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRrTscCs^tdks|dkrdSyt|}Wntk
rEd}nX|dk	rZ|dSdS(s"Returns a gid, given a group name.iN(RRNtKeyError(R_tresult((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt_get_gid]s

cCs^tdks|dkrdSyt|}Wntk
rEd}nX|dk	rZ|dSdS(s"Returns an uid, given a user name.iN(RRNRx(R_Ry((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt_get_uidis

tgzipics|idd6dd6}idd6}	tr>d|d
s






	

								Q1		(					=/		
		6					%	
	PK
Z"ND7D77site-packages/pip/_vendor/distlib/_backport/tarfile.pyonu[
abc
@s>ddlmZdZdZdZdZdZdZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZyddlZddlZWnek
reZZnXeefZyeef7ZWnek
rnXd	d
ddgZejd
dkr3ddlZnddlZejZdZdZ e dZ!dZ"dZ#dZ$dZ%dZ&dZ'dZ(dZ)dZ*dZ+dZ,dZ-dZ.dZ/dZ0dZ1d Z2d!Z3d"Z4d#Z5d
Z6d$Z7d%Z8e7Z9e'e(e)e*e-e.e/e+e,e0e1e2fZ:e'e(e/e2fZ;e0e1e2fZ<d&d'd(d)d*d+d,d-fZ=e>d&d'd,d-fZ?ie@d.6e@d/6e@d)6eAd*6eAd+6eAd(6ZBd0ZCd1ZDd2ZEd3ZFd4ZGd5ZHd6ZId7ZJdZKd8ZLd9ZMd:ZNd;ZOd<ZPd=ZQd>ZRd%ZSd$ZTe	jUd?d@fkr)dAZVnejWZVdBZXdCZYdDZZd=e9dEZ[dFZ\edGZ]eCdHfeDdIfeEdJfeFdKfeGdLfeHdMffeLdNffeMdOffeNeIBdPfeId feNd!ffeOdNffePdOffeQeJBdPfeJd feQd!ffeRdNffeSdOffeTeKBdQfeKdRfeTd!fff
Z^dSZ_de`fdTYZadUeafdVYZbdWeafdXYZcdYeafdZYZdd[eafd\YZed]eafd^YZfd_effd`YZgdaeffdbYZhdceffddYZideeffdfYZjdgeffdhYZkdielfdjYZmdkelfdlYZndmelfdnYZodoelfdpYZpdqelfdrYZqdselfdtYZrd
elfduYZsd	elfdvYZtdwelfdxYZudyZveZwetjZdS(zi(tprint_functions
$Revision$s0.9.0s&Lars Gust\u00e4bel (lars@gustaebel.de)s5$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $s?$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $s8Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend.NtTarFiletTarInfot
is_tarfiletTarErroriisiisustar  sustar00idit0t1t2t3t4t5t6t7tLtKtStxtgtXiitpathtlinkpathtsizetmtimetuidtgidtunametgnametatimetctimeiii`i@i iiiiii@i iiitnttcesutf-8cCs,|j||}|| |t|tS(s8Convert a string to a null-terminated bytes object.
    (tencodetlentNUL(tstlengthtencodingterrors((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytstnscCs8|jd}|dkr(|| }n|j||S(s8Convert a null-terminated bytes object to a string.
    si(tfindtdecode(R"R$R%tp((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytntss
cCs|dtdkr^y%tt|ddp1dd}Wqtk
rZtdqXnId}x@tt|dD](}|dK}|t||d7}q{W|S(	s/Convert a number field to a python number.
    iitasciitstrictRisinvalid headeri(tchrtintR*t
ValueErrortInvalidHeaderErrortrangeR tord(R"tnti((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytntis%

cCsd|kod|dknrHd|d|fjdt}n|tksh|d|dkrwtdn|dkrtjdtjd	|d}nt}x6t|dD]$}|j	d|d
@|dL}qW|j	dd|S(s/Convert a python number to a number field.
    iiis%0*oR+isoverflow in number fieldR
tlii(
RR!t
GNU_FORMATR/tstructtunpacktpackt	bytearrayR1tinsert(R3tdigitstformatR"R4((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytitns	$$ %	cCsxdttjd|d tjd|dd!}dttjd|d tjd|dd!}||fS(	sCalculate the checksum for a member's header by summing up all
       characters except for the chksum field which is treated as if
       it was filled with spaces. According to the GNU tar sources,
       some tars (Sun and NeXT) calculate chksum with signed char,
       which will be different if there are chars in the buffer with
       the high bit set. So we calculate two checksums, unsigned and
       signed.
    it148Bit356Biit148bt356b(tsumR8R9(tbuftunsigned_chksumt
signed_chksum((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytcalc_chksumss	77cCs|dkrdS|dkrSx0trN|jd}|s>Pn|j|qWdSd}t||\}}xQt|D]C}|j|}t||krtdn|j|q{W|dkr|j|}t||krtdn|j|ndS(sjCopy length bytes from fileobj src to fileobj dst.
       If length is None, copy the entire content.
    iNiisend of file reachedi@i@(tNonetTruetreadtwritetdivmodR1R tIOError(tsrctdstR#REtBUFSIZEtblockst	remaindertb((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytcopyfileobjs,	R6t-RTtdtcR)trtwR"tttTcCsig}xStD]K}xB|D]-\}}||@|kr|j|PqqW|jdq
Wdj|S(scConvert a file's mode to a string of the form
       -rwxrwxrwx.
       Used by TarFile.list()
    RVt(tfilemode_tabletappendtjoin(tmodetpermttabletbittchar((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytfilemode8s

cBseZdZRS(sBase exception.(t__name__t
__module__t__doc__(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRGstExtractErrorcBseZdZRS(s%General exception for extract errors.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRjJst	ReadErrorcBseZdZRS(s&Exception for unreadable tar archives.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRkMstCompressionErrorcBseZdZRS(s.Exception for unavailable compression methods.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRlPstStreamErrorcBseZdZRS(s=Exception for unsupported operations on stream-like TarFiles.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRmSstHeaderErrorcBseZdZRS(s!Base exception for header errors.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRnVstEmptyHeaderErrorcBseZdZRS(sException for empty headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRoYstTruncatedHeaderErrorcBseZdZRS(s Exception for truncated headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRp\stEOFHeaderErrorcBseZdZRS(s"Exception for end of file headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRq_sR0cBseZdZRS(sException for invalid headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR0bstSubsequentHeaderErrorcBseZdZRS(s3Exception for missing and invalid extended headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRrest
_LowLevelFilecBs2eZdZdZdZdZdZRS(sLow-level file object. Supports reading and writing.
       It is used instead of a regular file object for streaming
       access.
    cCsgitjd6tjtjBtjBd6|}ttdrK|tjO}ntj||d|_dS(NRYRZtO_BINARYi(	tostO_RDONLYtO_WRONLYtO_CREATtO_TRUNCthasattrRttopentfd(tselftnameRa((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__init__rs
cCstj|jdS(N(RutcloseR|(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR{scCstj|j|S(N(RuRKR|(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRK~scCstj|j|dS(N(RuRLR|(R}R"((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRLs(RgRhRiRRRKRL(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRsls
				t_StreamcBseZdZdZdZdZdZdZdZdZ	dZ
d	d
ZddZ
dZd
ZRS(sClass that serves as an adapter between TarFile and
       a stream-like object.  The stream-like object only
       needs to have a read() or write() method and is accessed
       blockwise.  Use of gzip or bzip2 compression is possible.
       A stream-like object could be for example: sys.stdin,
       sys.stdout, a socket, a tape device etc.

       _Stream is intended to be used only internally.
    cCst|_|dkr0t||}t|_n|dkrWt|}|j}n|p`d|_||_||_	||_
||_d|_d|_
t|_y|dkr%yddl}Wntk
rtdnX||_|jd|_|dkr|jq%|jn|d	kryddl}Wntk
r`td
nX|dkrd|_|j|_q|j|_nWn,|js|j
jnt|_nXdS(s$Construct a _Stream object.
        t*R]itgziNszlib module is not availableRYtbz2sbz2 module is not available(RJt_extfileobjRIRstFalset_StreamProxytgetcomptypeR~RatcomptypetfileobjtbufsizeREtpostclosedtzlibtImportErrorRltcrc32tcrct
_init_read_gzt_init_write_gzRtdbuftBZ2Decompressortcmpt
BZ2CompressorR(R}R~RaRRRRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRsP								
	


			cCs*t|dr&|jr&|jndS(NR(RzRR(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__del__scCs|jjd|jj|jj|jjd|_tjdtt	j	}|j
d|d|jjdr|jd |_n|j
|jj
dd	td
S(s6Initialize for writing with gzip compression.
        i	isZ2RS(@sInformational class which holds the details about an
       archive member given by a tar header block.
       TarInfo objects are returned by TarFile.getmember(),
       TarFile.getmembers() and TarFile.gettarinfo() and are
       usually created internally.
    R~RaRRRRtchksumttypetlinknameRRtdevmajortdevminorRRtpax_headersRRt_sparse_structst_link_targetR]cCs||_d|_d|_d|_d|_d|_d|_t|_d|_	d|_
d|_d|_d|_
d|_d|_d|_i|_dS(sXConstruct a TarInfo object. name is the optional name
           of the member.
        iiR]N(R~RaRRRRRtREGTYPERRRRRRRRRIRR(R}R~((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs"																cCs|jS(N(R~(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_getpathscCs
||_dS(N(R~(R}R~((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_setpathscCs|jS(N(R(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_getlinkpathscCs
||_dS(N(R(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_setlinkpathscCs d|jj|jt|fS(Ns<%s %r at %#x>(t	__class__RgR~tid(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__repr__scCsi
|jd6|jd@d6|jd6|jd6|jd6|jd6|jd6|jd	6|jd
6|j	d6|j
d6|jd
6|jd6}|d	t
kr|djdr|dcd7R$R%R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyttobufscCst|dny||jd	d
Wn"tk
r||||nXt|||kr>||||q>WxddddfD]\}}||krd||R$R%tpartsRER((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRYs&$#cCs@tt|t\}}|dkr<|t|t7}n|S(sdReturn the string payload filled with zero bytes
           up to the next 512 byte border.
        i(RMR RR!(tpayloadRRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_create_payloaduscCsm|j||t}i}d|d<||d|j|S|jtttfkrc|j	|S|j
|SdS(sYChoose the right processing method depending on
           the type and call it.
        N(RRRt
_proc_gnulongRt_proc_sparseRRtSOLARIS_XHDTYPEt	_proc_paxt
_proc_builtin(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR%s


cCsx|jj|_|j}|js6|jtkrO||j|j7}n||_|j	|j
|j|j|S(sfProcess a builtin type or an unknown type which
           will be treated as a regular file.
        (
RRRtisregRtSUPPORTED_TYPESt_blockRRt_apply_pax_infoRR$R%(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR+$s		cCs|jj|j|j}y|j|}Wntk
rPtdnX|j|_|jt	krt
||j|j|_
n-|jtkrt
||j|j|_n|S(sSProcess the blocks that hold a GNU longname
           or longlink member.
        s missing or bad subsequent header(RRKR.RR&RnRrRRRR*R$R%R~RR(R}RREtnext((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR'5s
c
Cs|j\}}}|`x|r|jjt}d}xtdD]}}y6t|||d!}t||d|d!}	Wntk
rPnX|r|	r|j||	fn|d7}qFWt|d}qW||_	|jj
|_|j|j|j
|_||_
|S(s8Process a GNU sparse header plus extra headers.
        iiiii(RRRKRR1R5R/R_RRRRR.RR(
R}RR R"R#RERR4RR!((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR(Ks(	
		cCs|jj|j|j}|jtkr9|j}n|jj}tj	d|}|dk	r|jdjd|dscCsx|jD]\}}|dkr8t|d|q
|dkr]t|dt|q
|dkrt|dt|q
|tkr
|tkryt||}Wqtk
rd}qXn|dkr|jd}nt|||q
q
W|j|_dS(	soReplace fields with supplemental information from a previous
           pax extended or global header.
        sGNU.sparse.nameRsGNU.sparse.sizeRsGNU.sparse.realsizeiRN(	RtsetattrR.t
PAX_FIELDStPAX_NUMBER_FIELDSR/RRR(R}RR$R%RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR/s"

cCs9y|j|dSWntk
r4|j||SXdS(s1Decode a single field from a pax record.
        R,N(R(tUnicodeDecodeError(R}RR$tfallback_encodingtfallback_errors((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR:s
cCs0t|t\}}|r(|d7}n|tS(s_Round up a byte count by BLOCKSIZE and return it,
           e.g. _block(834) => 1024.
        i(RMR(R}RRRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR.
s
cCs
|jtkS(N(Rt
REGULAR_TYPES(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR,scCs
|jS(N(R,(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisfilescCs
|jtkS(N(RR(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRscCs
|jtkS(N(RtSYMTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytissymscCs
|jtkS(N(RtLNKTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytislnkscCs
|jtkS(N(RtCHRTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytischr scCs
|jtkS(N(RtBLKTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisblk"scCs
|jtkS(N(RtFIFOTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisfifo$scCs
|jdk	S(N(RRI(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytissparse&scCs|jtttfkS(N(RRTRVRX(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisdev(s(R~RaRRRRRRRRRRRRRRRRRR(3RgRhRit	__slots__RRRtpropertyRRRRRRtDEFAULT_FORMATtENCODINGRRRRtclassmethodR
RtstaticmethodRRRRR$R&R%R+R'R(R*R=R<R>R/R:R.R,RORRQRSRURWRYRZR[(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs`							
		1	
3?	
				f																c
Bs-eZdZdZeZeZdZeZ	e
Zd1Z
eZeZd1dd1d1d1d1d1d1dd1d1d1dZed1dd1edZedd1dZedd1dd	Zedd1dd
Zidd6d
d6dd6ZdZdZdZdZd1d1d1dZedZ d1ed1d1dZ!d1dZ"dd1dZ#dedZ$dZ%edZ&dZ'd Z(d!Z)d"Z*d#Z+d$Z,d%Z-d&Z.d'Z/d(Z0d1ed)Z1d*Z2d1d+Z3d,Z4d-Z5d.Z6d/Z7d0Z8RS(2s=The TarFile Class provides an interface to tar archives.
    iiRYRc
Cst|dks|dkr-tdn||_idd6dd6dd	6||_|s|jdkrtjj|rd	|_d|_nt||j}t|_	nN|d
krt|d
r|j}nt|dr|j|_nt
|_	|rtjj|nd
|_||_|d
k	rC||_n|d
k	r[||_n|d
k	rs||_n|d
k	r||_n|d
k	r||_n|	|_|
d
k	r|jtkr|
|_n	i|_|d
k	r||_n|d
k	r||_nt|_g|_t|_|jj|_i|_y9|jdkrod
|_ |j!|_ n|jdkrxt
r|jj"|jy&|jj#|}|jj$|Wqt%k
r|jj"|jPqt&k
r}
t't(|
qXqWn|jdkrzt
|_|jrz|jj)|jj*}|jj+||jt|7_qznWn,|j	s|jj,nt
|_nXd
S(sOpen an (uncompressed) tar archive `name'. `mode' is either 'r' to
           read from an existing archive, 'a' to append data to an existing
           file or 'w' to create a new file overwriting an existing one. `mode'
           defaults to 'r'.
           If `fileobj' is given, it is used for reading or writing data. If it
           can be determined, `mode' is overridden by `fileobj's mode.
           `fileobj' is not closed, when TarFile is closed.
        iRsmode must be 'r', 'a' or 'w'trbRYsr+btatwbRZR~RatawN(-R R/Rat_modeRuRtexistst	bltn_openRRRIRzR~RJtabspathRR>Rtdereferencetignore_zerosR$R%RRtdebugt
errorlevelRtmemberst_loadedRRtinodestfirstmemberR0RR&R_RqRnRkRR
RRLR(R}R~RaRR>RRjRkR$R%RRlRmteRE((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRFs	""		!									
				c
Ks4|r|rtdn|dkrx|jD]}t||j|}|dk	rj|j}ny||d||SWq3ttfk
r}	|dk	r3|j|q3q3q3Xq3WtdnUd|krV|jdd\}
}|
pd}
|pd}||jkr3t||j|}ntd||||
||Sd	|kr|jd	d\}
}|
pd}
|pd}|
d
krtdnt	||
|||}y|||
||}Wn|j
nXt|_|S|dkr$|j
||||Std
dS(s|Open a tar archive for reading, writing or appending. Return
           an appropriate TarFile class.

           mode:
           'r' or 'r:*' open for reading with transparent compression
           'r:'         open for reading exclusively uncompressed
           'r:gz'       open for reading with gzip compression
           'r:bz2'      open for reading with bzip2 compression
           'a' or 'a:'  open for appending, creating the file if necessary
           'w' or 'w:'  open for writing without compression
           'w:gz'       open for writing with gzip compression
           'w:bz2'      open for writing with bzip2 compression

           'r|*'        open a stream of tar blocks with transparent compression
           'r|'         open an uncompressed stream of tar blocks for reading
           'r|gz'       open a gzip compressed stream of tar blocks
           'r|bz2'      open a bzip2 compressed stream of tar blocks
           'w|'         open an uncompressed stream for writing
           'w|gz'       open a gzip compressed stream for writing
           'w|bz2'      open a bzip2 compressed stream for writing
        snothing to openRYsr:*s%file could not be opened successfullyt:iRsunknown compression type %rt|trwsmode must be 'r' or 'w'Resundiscernible modeN(RYsr:*(R/t	OPEN_METHRRIRRkRlRRERRRRttaropen(
R	R~RaRRtkwargsRtfunct	saved_posRrRftstreamR[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR{sN
	cKs@t|dks|dkr-tdn|||||S(sCOpen uncompressed tar archive name for reading or writing.
        iRsmode must be 'r', 'a' or 'w'(R R/(R	R~RaRRx((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRwsi	c	Ks6t|dks|dkr-tdnyddl}|jWn#ttfk
ritdnX|dk	}y8|j||d||}|j||||}Wnxt	k
r|r|dk	r|j
n|dkrntdn*|r"|dk	r"|j
nnX||_|S(	skOpen gzip compressed tar archive name for reading or writing.
           Appending is not allowed.
        iRusmode must be 'r' or 'w'iNsgzip module is not availableRTsnot a gzip file(
R R/tgziptGzipFileRtAttributeErrorRlRIRwRNRRkR(	R	R~RaRt
compresslevelRxR|t
extfileobjR[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytgzopens.


	cKst|dks|dkr-tdnyddl}Wntk
r\tdnX|dk	r{t||}n|j||d|}y|j||||}Wn-t	t
fk
r|jtdnXt
|_|S(	slOpen bzip2 compressed tar archive name for reading or writing.
           Appending is not allowed.
        iRusmode must be 'r' or 'w'.iNsbz2 module is not availableRsnot a bzip2 file(R R/RRRlRIRtBZ2FileRwRNtEOFErrorRRkRR(R	R~RaRRRxRR[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytbz2open$s 

	RwRRRRRcCs|jr
dS|jdkr|jjttd|jtd7_t|jt\}}|dkr|jjtt|qn|j	s|jj
nt|_dS(slClose the TarFile. In write-mode, two finishing zero blocks are
           appended to the archive.
        NReii(RRaRRLR!RRRMt
RECORDSIZERRRJ(R}RRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRHs		cCs2|j|}|dkr.td|n|S(sReturn a TarInfo object for member `name'. If `name' can not be
           found in the archive, KeyError is raised. If a member occurs more
           than once in the archive, its last occurrence is assumed to be the
           most up-to-date version.
        sfilename %r not foundN(t
_getmemberRItKeyError(R}R~R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt	getmember\scCs'|j|js |jn|jS(sReturn the members of the archive as a list of TarInfo objects. The
           list has the same order as the members in the archive.
        (t_checkRot_loadRn(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt
getmembersgs
	
cCs g|jD]}|j^q
S(sReturn the members of the archive as a list of their names. It has
           the same order as the list returned by getmembers().
        (RR~(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytgetnamesqscCs\|jd|d	k	r%|j}n|d	kr:|}ntjj|\}}|jtjd}|jd}|j	}||_
|d	krttdr|jrtj
|}qtj|}ntj|j}d}|j}tj|r|j|jf}	|jrj|jdkrj|	|jkrj||j|	krjt}
|j|	}qt}
|	dr||j|	slink toN(RtprintRfRaRRRRRURWRRRRt	localtimeRR~RRQRRS(R}tverboseR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRAs&

	!)c	Cs|jd|dkr"|}n|dk	rtddl}|jdtd||rt|jdd|dSn|jdk	rtjj	||jkr|jdd|dS|jd||j
||}|dkr|jdd	|dS|dk	r;||}|dkr;|jdd|dSn|jrst|d
}|j
|||jn|jr|j
||rxTtj|D]@}|jtjj||tjj||||d|qWqn
|j
|dS(s~Add the file `name' to the archive. `name' may be any type of file
           (directory, fifo, symbolic link, etc.). If given, `arcname'
           specifies an alternative name for the file in the archive.
           Directories are added recursively by default. This can be avoided by
           setting `recursive' to False. `exclude' is a function that should
           return True for each filename to be excluded. `filter' is a function
           that expects a TarInfo object argument and returns the changed
           TarInfo object, if it returns None the TarInfo object will be
           excluded from the archive.
        ReiNsuse the filter argument insteadistarfile: Excluded %rstarfile: Skipped %ristarfile: Unsupported type %rRbtfilter(RRItwarningstwarntDeprecationWarningt_dbgR~RuRRiRR,RhtaddfileRRtlistdirtaddR`(	R}R~Rt	recursivetexcludeRRRtf((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRsD
		
*

*cCs|jdtj|}|j|j|j|j}|jj||jt	|7_|dk	rt||j|jt
|jt\}}|dkr|jjtt||d7}n|j|t7_n|jj|dS(s]Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
           given, tarinfo.size bytes are read from it and added to the archive.
           You can create TarInfo objects using gettarinfo().
           On Windows platforms, `fileobj' should always be opened with mode
           'rb' to avoid irritation about the file size.
        ReiiN(RRRR>R$R%RRLRR RIRURRMRR!RnR_(R}RRRERRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR4s

t.cCs:g}|dkr|}nx_|D]W}|jr\|j|tj|}d|_n|j||d|jq"W|jdd|jx|D]}tj	j
||j}y4|j|||j
|||j||Wqtk
r1}|jdkrq2|jdd|qXqWdS(sMExtract all members from the archive to the current working
           directory and set owner, modification time and permissions on
           directories afterwards. `path' specifies a different directory
           to extract to. `members' is optional and must be a subset of the
           list returned by getmembers().
        it	set_attrstkeycSs|jS(N(R~(Rc((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytdR]istarfile: %sN(RIRR_RRatextracttsorttreverseRuRR`R~tchowntutimetchmodRjRmR(R}RRntdirectoriesRtdirpathRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt
extractallNs*	

!

R]cCs=|jdt|tr.|j|}n|}|jr^tjj||j|_	ny,|j
|tjj||jd|Wntk
r}|j
dkrq9|jdkr|jdd|jq9|jdd|j|jfn<tk
r8}|j
dkr!q9|jdd|nXdS(sxExtract a member from the archive to the current working directory,
           using its full name. Its file information is extracted as accurately
           as possible. `member' may be a filename or a TarInfo object. You can
           specify a different directory using `path'. File attributes (owner,
           mtime, mode) are set unless `set_attrs' is False.
        RYRiistarfile: %sstarfile: %s %rN(RRRRRSRuRR`RRt_extract_memberR~tEnvironmentErrorRmtfilenameRIRtstrerrorRj(R}tmemberRRRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRts&
!#cCs|jdt|tr.|j|}n|}|jrP|j||S|jtkro|j||S|js|j	rt|j
trtdq|j
|j|SndSdS(sExtract a member from the archive as a file object. `member' may be
           a filename or a TarInfo object. If `member' is a regular file, a
           file-like object is returned. If `member' is a link, a file-like
           object is constructed from the link's target. If `member' is none of
           the above, None is returned.
           The file-like object is read-only and provides the following
           methods: read(), readline(), readlines(), seek() and tell()
        RYs'cannot extract (sym)link as file objectN(RRRRR,t
fileobjectRR-RSRQRRRmtextractfilet_find_link_targetRI(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs	
cCs|jd}|jdtj}tjj|}|r_tjj|r_tj|n|jsw|j	r|j
dd|j|jfn|j
d|j|j
r|j||n|jr|j||n|jr
|j||n|js"|jr5|j||n]|jsM|j	r`|j||n2|jtkr|j||n|j|||r|j|||j	s|j|||j||qndS(s\Extract the TarInfo object tarinfo to a physical
           file called targetpath.
        Ris%s -> %sN(RRRuRRtdirnameRgtmakedirsRSRQRR~RR,tmakefileRtmakedirRYtmakefifoRURWtmakedevtmakelinkRR-tmakeunknownRRR(R}Rt
targetpathRt	upperdirs((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs4#cCsFytj|dWn+tk
rA}|jtjkrBqBnXdS(s,Make a directory called targetpath.
        iN(RutmkdirRterrnotEEXIST(R}RRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs
cCs|j}|j|jt|d}|jdk	rqxJ|jD])\}}|j|t|||qAWnt|||j|j|j|j|j	dS(s'Make a file called targetpath.
        RdN(
RRRRhRRIRURttruncateR(R}RRtsourcettargetRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs	

cCs+|j|||jdd|jdS(sYMake a file from a TarInfo object with an unknown type
           at targetpath.
        is9tarfile: Unknown file type %r, extracted as regular file.N(RRR(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	scCs/ttdrtj|ntddS(s'Make a fifo called targetpath.
        tmkfifosfifo not supported by systemN(RzRuRRj(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	scCsttds ttdr/tdn|j}|jrT|tjO}n
|tjO}tj||tj	|j
|jdS(s<Make a character or block device called targetpath.
        tmknodRs'special devices not supported by systemN(RzRuRjRaRWRtS_IFBLKtS_IFCHRRRRR(R}RRRa((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s 	
cCsyj|jr%tj|j|nDtjj|jrPtj|j|n|j|j	||WnPt
k
r|jrtjjtjj|j
|j}q|j}n>Xy|j|j	||Wntk
rtdnXdS(sMake a (symbolic) link called targetpath. If it cannot be created
          (platform limitation), we try to make a copy of the referenced file
          instead of a link.
        s%unable to resolve link inside archiveN(RQRutsymlinkRRRgRtlinkRRtsymlink_exceptionR`RR~RRj(R}RRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR'	s"


cCstrttdrtjdkrytj|jd}Wntk
r]|j}nXytj	|j
d}Wntk
r|j}nXyZ|jrttdrtj
|||n%tjdkrtj|||nWqtk
r}tdqXndS(s6Set owner of targetpath according to tarinfo.
        tgeteuidiitlchowntos2emxscould not change ownerN(RRzRuRRtgetgrnamRRRtgetpwnamRRRQRtsystplatformRRRj(R}RRRtuRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRD	s '



cCsOttdrKytj||jWqKtk
rG}tdqKXndS(sASet file permissions of targetpath according to tarinfo.
        Rscould not change modeN(RzRuRRaRRj(R}RRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRZ	s
cCsYttdsdSy tj||j|jfWntk
rT}tdnXdS(sBSet modification time of targetpath according to tarinfo.
        RNs"could not change modification time(RzRuRRRRj(R}RRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRc	s cCs|jd|jdk	r2|j}d|_|S|jj|jd}xktry|jj|}WnGt	k
r}|j
r|jdd|j|f|jt7_qNqnt
k
r+}|j
r|jdd|j|f|jt7_qNq|jdkrtt|qntk
rY|jdkrtdqn[tk
r}|jdkrtt|qn%tk
r}tt|nXPqNW|dk	r|jj|n	t|_|S(sReturn the next member of the archive as a TarInfo object, when
           TarFile is opened for reading. Return None if there is no more
           available.
        trais0x%X: %sis
empty fileN(RRqRIRRRRJRR&RqRkRRR0RkRRoRpRrRnR_Ro(R}tmRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR0n	sF
						
	cCs|j}|dk	r.||j| }n|rItjj|}nxKt|D]=}|rztjj|j}n	|j}||krV|SqVWdS(s}Find an archive member by name from bottom to top.
           If tarinfo is given, it is used as the starting point.
        N(RRItindexRuRtnormpathtreversedR~(R}R~Rt	normalizeRnRtmember_name((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s	cCs6x&tr(|j}|dkrPqqWt|_dS(sWRead through the entire archive file and look for readable
           members.
        N(RJR0RIRo(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s
	cCsW|jr"td|jjn|dk	rS|j|krStd|jndS(snCheck if TarFile is still open, and if the operation's mode
           corresponds to TarFile's mode.
        s%s is closedsbad operation for mode %rN(RRNRRgRIRa(R}Ra((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s	cCs|jr5tjj|jd|j}d}n|j}|}|j|d|dt}|dkr~t	d|n|S(sZFind the target member of a symlink or hardlink member in the
           archive.
        RRRslinkname %r not foundN(
RQRuRRR~RRIRRJR(R}RRtlimitR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s 		cCs$|jrt|jSt|SdS(s$Provide an iterator object.
        N(RotiterRntTarIter(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s	
cCs)||jkr%t|dtjndS(s.Write debugging output to sys.stderr.
        tfileN(RlRRtstderr(R}tleveltmsg((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	scCs|j|S(N(R(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt	__enter__	s
cCs?|dkr|jn"|js2|jjnt|_dS(N(RIRRRRJR(R}RRt	traceback((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__exit__	s

	N(9RgRhRiRlRRjRkRmR^R>R_R$RIR%RRRRRR`RR{RwRRRvRRRRRRJRARRRRRRRRRRRRRRRR0RRRRRRRR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR,sniK
			
	b>&#	&0											1	
					RcBs/eZdZdZdZdZeZRS(sMIterator Class.

       for tarinfo in TarFile(...):
           suite...
    cCs||_d|_dS(s$Construct a TarIter object.
        iN(RR(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR
s	cCs|S(s Return iterator object.
        ((R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR
scCs}|jjs9|jj}|sjt|j_tqjn1y|jj|j}Wntk
ritnX|jd7_|S(sReturn the next item using TarFile's next() method.
           When all members have been read, set TarFile as _loaded.
        i(RRoR0RJt
StopIterationRnRt
IndexError(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__next__

s

(RgRhRiRRRR0(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s
			cCs7yt|}|jtSWntk
r2tSXdS(sfReturn True if name points to a tar archive that we
       are able to handle, else return False.
    N(R{RRJRR(R~R[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR#
s

(xt
__future__Rt__version__tversiont
__author__t__date__t	__cvsid__t__credits__RRuRRRR8RR3RRRRIR~tNotImplementedErrorRtWindowsErrort	NameErrort__all__tversion_infot__builtin__tbuiltinsR{t_openR!RRRRRRRRRRRRPRTRVRRXtCONTTYPERRRRRR)RR7RR^R-RNRRItsetR;RR.RJtS_IFLNKtS_IFREGRtS_IFDIRRtS_IFIFOtTSUIDtTSGIDtTSVTXtTUREADtTUWRITEtTUEXECtTGREADtTGWRITEtTGEXECtTOREADtTOWRITEtTOEXECR~R_tgetfilesystemencodingR&R*R5R?RHRUR^Rft	ExceptionRRjRkRlRmRnRoRpRqR0RrtobjectRsRRRRRRRRRRh(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyts.

	
					
					
					
	
	
		?K*	PK
Zt(8site-packages/pip/_vendor/distlib/_backport/__init__.pyonu[
abc@s
dZdS(sModules copied from Python 3 standard libraries, for internal use only.

Individual classes and functions are found in d2._backport.misc.  Intended
usage is to always import things missing from 3.1 from that module: the
built-in/stdlib objects will be used if found.
N(t__doc__(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/__init__.pyttPK
Z/e3site-packages/pip/_vendor/distlib/_backport/misc.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Backports for individual classes and functions."""

import os
import sys

__all__ = ['cache_from_source', 'callable', 'fsencode']


try:
    from imp import cache_from_source
except ImportError:
    def cache_from_source(py_file, debug=__debug__):
        ext = debug and 'c' or 'o'
        return py_file + ext


try:
    callable = callable
except NameError:
    from collections import Callable

    def callable(obj):
        return isinstance(obj, Callable)


try:
    fsencode = os.fsencode
except AttributeError:
    def fsencode(filename):
        if isinstance(filename, bytes):
            return filename
        elif isinstance(filename, str):
            return filename.encode(sys.getfilesystemencoding())
        else:
            raise TypeError("expect bytes or str, not %s" %
                            type(filename).__name__)
PK
Zl>4site-packages/pip/_vendor/distlib/_backport/misc.pycnu[
abc@sdZddlZddlZdddgZyddlmZWnek
r`edZnXy
eZWn*e	k
rddl
mZd	ZnXy
ejZWne
k
rd
ZnXdS(s/Backports for individual classes and functions.iNtcache_from_sourcetcallabletfsencode(RcCs|rdpd}||S(Ntcto((tpy_filetdebugtext((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyRs(tCallablecCs
t|tS(N(t
isinstanceR(tobj((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyRscCsRt|tr|St|tr5|jtjStdt|jdS(Nsexpect bytes or str, not %s(	R	tbyteststrtencodetsystgetfilesystemencodingt	TypeErrorttypet__name__(tfilename((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyR"s(t__doc__tosRt__all__timpRtImportErrort	__debug__Rt	NameErrortcollectionsRRtAttributeError(((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyts 





PK
Z"ND7D77site-packages/pip/_vendor/distlib/_backport/tarfile.pycnu[
abc
@s>ddlmZdZdZdZdZdZdZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZyddlZddlZWnek
reZZnXeefZyeef7ZWnek
rnXd	d
ddgZejd
dkr3ddlZnddlZejZdZdZ e dZ!dZ"dZ#dZ$dZ%dZ&dZ'dZ(dZ)dZ*dZ+dZ,dZ-dZ.dZ/dZ0dZ1d Z2d!Z3d"Z4d#Z5d
Z6d$Z7d%Z8e7Z9e'e(e)e*e-e.e/e+e,e0e1e2fZ:e'e(e/e2fZ;e0e1e2fZ<d&d'd(d)d*d+d,d-fZ=e>d&d'd,d-fZ?ie@d.6e@d/6e@d)6eAd*6eAd+6eAd(6ZBd0ZCd1ZDd2ZEd3ZFd4ZGd5ZHd6ZId7ZJdZKd8ZLd9ZMd:ZNd;ZOd<ZPd=ZQd>ZRd%ZSd$ZTe	jUd?d@fkr)dAZVnejWZVdBZXdCZYdDZZd=e9dEZ[dFZ\edGZ]eCdHfeDdIfeEdJfeFdKfeGdLfeHdMffeLdNffeMdOffeNeIBdPfeId feNd!ffeOdNffePdOffeQeJBdPfeJd feQd!ffeRdNffeSdOffeTeKBdQfeKdRfeTd!fff
Z^dSZ_de`fdTYZadUeafdVYZbdWeafdXYZcdYeafdZYZdd[eafd\YZed]eafd^YZfd_effd`YZgdaeffdbYZhdceffddYZideeffdfYZjdgeffdhYZkdielfdjYZmdkelfdlYZndmelfdnYZodoelfdpYZpdqelfdrYZqdselfdtYZrd
elfduYZsd	elfdvYZtdwelfdxYZudyZveZwetjZdS(zi(tprint_functions
$Revision$s0.9.0s&Lars Gust\u00e4bel (lars@gustaebel.de)s5$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $s?$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $s8Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend.NtTarFiletTarInfot
is_tarfiletTarErroriisiisustar  sustar00idit0t1t2t3t4t5t6t7tLtKtStxtgtXiitpathtlinkpathtsizetmtimetuidtgidtunametgnametatimetctimeiii`i@i iiiiii@i iiitnttcesutf-8cCs,|j||}|| |t|tS(s8Convert a string to a null-terminated bytes object.
    (tencodetlentNUL(tstlengthtencodingterrors((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytstnscCs8|jd}|dkr(|| }n|j||S(s8Convert a null-terminated bytes object to a string.
    si(tfindtdecode(R"R$R%tp((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytntss
cCs|dtdkr^y%tt|ddp1dd}Wqtk
rZtdqXnId}x@tt|dD](}|dK}|t||d7}q{W|S(	s/Convert a number field to a python number.
    iitasciitstrictRisinvalid headeri(tchrtintR*t
ValueErrortInvalidHeaderErrortrangeR tord(R"tnti((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytntis%

cCsd|kod|dknrHd|d|fjdt}n|tksh|d|dkrwtdn|dkrtjdtjd	|d}nt}x6t|dD]$}|j	d|d
@|dL}qW|j	dd|S(s/Convert a python number to a number field.
    iiis%0*oR+isoverflow in number fieldR
tlii(
RR!t
GNU_FORMATR/tstructtunpacktpackt	bytearrayR1tinsert(R3tdigitstformatR"R4((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytitns	$$ %	cCsxdttjd|d tjd|dd!}dttjd|d tjd|dd!}||fS(	sCalculate the checksum for a member's header by summing up all
       characters except for the chksum field which is treated as if
       it was filled with spaces. According to the GNU tar sources,
       some tars (Sun and NeXT) calculate chksum with signed char,
       which will be different if there are chars in the buffer with
       the high bit set. So we calculate two checksums, unsigned and
       signed.
    it148Bit356Biit148bt356b(tsumR8R9(tbuftunsigned_chksumt
signed_chksum((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytcalc_chksumss	77cCs|dkrdS|dkrSx0trN|jd}|s>Pn|j|qWdSd}t||\}}xQt|D]C}|j|}t||krtdn|j|q{W|dkr|j|}t||krtdn|j|ndS(sjCopy length bytes from fileobj src to fileobj dst.
       If length is None, copy the entire content.
    iNiisend of file reachedi@i@(tNonetTruetreadtwritetdivmodR1R tIOError(tsrctdstR#REtBUFSIZEtblockst	remaindertb((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytcopyfileobjs,	R6t-RTtdtcR)trtwR"tttTcCsig}xStD]K}xB|D]-\}}||@|kr|j|PqqW|jdq
Wdj|S(scConvert a file's mode to a string of the form
       -rwxrwxrwx.
       Used by TarFile.list()
    RVt(tfilemode_tabletappendtjoin(tmodetpermttabletbittchar((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytfilemode8s

cBseZdZRS(sBase exception.(t__name__t
__module__t__doc__(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRGstExtractErrorcBseZdZRS(s%General exception for extract errors.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRjJst	ReadErrorcBseZdZRS(s&Exception for unreadable tar archives.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRkMstCompressionErrorcBseZdZRS(s.Exception for unavailable compression methods.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRlPstStreamErrorcBseZdZRS(s=Exception for unsupported operations on stream-like TarFiles.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRmSstHeaderErrorcBseZdZRS(s!Base exception for header errors.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRnVstEmptyHeaderErrorcBseZdZRS(sException for empty headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRoYstTruncatedHeaderErrorcBseZdZRS(s Exception for truncated headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRp\stEOFHeaderErrorcBseZdZRS(s"Exception for end of file headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRq_sR0cBseZdZRS(sException for invalid headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR0bstSubsequentHeaderErrorcBseZdZRS(s3Exception for missing and invalid extended headers.(RgRhRi(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRrest
_LowLevelFilecBs2eZdZdZdZdZdZRS(sLow-level file object. Supports reading and writing.
       It is used instead of a regular file object for streaming
       access.
    cCsgitjd6tjtjBtjBd6|}ttdrK|tjO}ntj||d|_dS(NRYRZtO_BINARYi(	tostO_RDONLYtO_WRONLYtO_CREATtO_TRUNCthasattrRttopentfd(tselftnameRa((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__init__rs
cCstj|jdS(N(RutcloseR|(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR{scCstj|j|S(N(RuRKR|(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRK~scCstj|j|dS(N(RuRLR|(R}R"((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRLs(RgRhRiRRRKRL(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRsls
				t_StreamcBseZdZdZdZdZdZdZdZdZ	dZ
d	d
ZddZ
dZd
ZRS(sClass that serves as an adapter between TarFile and
       a stream-like object.  The stream-like object only
       needs to have a read() or write() method and is accessed
       blockwise.  Use of gzip or bzip2 compression is possible.
       A stream-like object could be for example: sys.stdin,
       sys.stdout, a socket, a tape device etc.

       _Stream is intended to be used only internally.
    cCst|_|dkr0t||}t|_n|dkrWt|}|j}n|p`d|_||_||_	||_
||_d|_d|_
t|_y|dkr%yddl}Wntk
rtdnX||_|jd|_|dkr|jq%|jn|d	kryddl}Wntk
r`td
nX|dkrd|_|j|_q|j|_nWn,|js|j
jnt|_nXdS(s$Construct a _Stream object.
        t*R]itgziNszlib module is not availableRYtbz2sbz2 module is not available(RJt_extfileobjRIRstFalset_StreamProxytgetcomptypeR~RatcomptypetfileobjtbufsizeREtpostclosedtzlibtImportErrorRltcrc32tcrct
_init_read_gzt_init_write_gzRtdbuftBZ2Decompressortcmpt
BZ2CompressorR(R}R~RaRRRRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRsP								
	


			cCs*t|dr&|jr&|jndS(NR(RzRR(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__del__scCs|jjd|jj|jj|jjd|_tjdtt	j	}|j
d|d|jjdr|jd |_n|j
|jj
dd	td
S(s6Initialize for writing with gzip compression.
        i	isZ2RS(@sInformational class which holds the details about an
       archive member given by a tar header block.
       TarInfo objects are returned by TarFile.getmember(),
       TarFile.getmembers() and TarFile.gettarinfo() and are
       usually created internally.
    R~RaRRRRtchksumttypetlinknameRRtdevmajortdevminorRRtpax_headersRRt_sparse_structst_link_targetR]cCs||_d|_d|_d|_d|_d|_d|_t|_d|_	d|_
d|_d|_d|_
d|_d|_d|_i|_dS(sXConstruct a TarInfo object. name is the optional name
           of the member.
        iiR]N(R~RaRRRRRtREGTYPERRRRRRRRRIRR(R}R~((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs"																cCs|jS(N(R~(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_getpathscCs
||_dS(N(R~(R}R~((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_setpathscCs|jS(N(R(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_getlinkpathscCs
||_dS(N(R(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_setlinkpathscCs d|jj|jt|fS(Ns<%s %r at %#x>(t	__class__RgR~tid(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__repr__scCsi
|jd6|jd@d6|jd6|jd6|jd6|jd6|jd6|jd	6|jd
6|j	d6|j
d6|jd
6|jd6}|d	t
kr|djdr|dcd7R$R%R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyttobufscCst|dny||jd	d
Wn"tk
r||||nXt|||kr>||||q>WxddddfD]\}}||krd||R$R%tpartsRER((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRYs&$#cCs@tt|t\}}|dkr<|t|t7}n|S(sdReturn the string payload filled with zero bytes
           up to the next 512 byte border.
        i(RMR RR!(tpayloadRRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt_create_payloaduscCsm|j||t}i}d|d<||d|j|S|jtttfkrc|j	|S|j
|SdS(sYChoose the right processing method depending on
           the type and call it.
        N(RRRt
_proc_gnulongRt_proc_sparseRRtSOLARIS_XHDTYPEt	_proc_paxt
_proc_builtin(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR%s


cCsx|jj|_|j}|js6|jtkrO||j|j7}n||_|j	|j
|j|j|S(sfProcess a builtin type or an unknown type which
           will be treated as a regular file.
        (
RRRtisregRtSUPPORTED_TYPESt_blockRRt_apply_pax_infoRR$R%(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR+$s		cCs|jj|j|j}y|j|}Wntk
rPtdnX|j|_|jt	krt
||j|j|_
n-|jtkrt
||j|j|_n|S(sSProcess the blocks that hold a GNU longname
           or longlink member.
        s missing or bad subsequent header(RRKR.RR&RnRrRRRR*R$R%R~RR(R}RREtnext((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR'5s
c
Cs|j\}}}|`x|r|jjt}d}xtdD]}}y6t|||d!}t||d|d!}	Wntk
rPnX|r|	r|j||	fn|d7}qFWt|d}qW||_	|jj
|_|j|j|j
|_||_
|S(s8Process a GNU sparse header plus extra headers.
        iiiii(RRRKRR1R5R/R_RRRRR.RR(
R}RR R"R#RERR4RR!((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR(Ks(	
		cCs|jj|j|j}|jtkr9|j}n|jj}tj	d|}|dk	r|jdjd|dscCsx|jD]\}}|dkr8t|d|q
|dkr]t|dt|q
|dkrt|dt|q
|tkr
|tkryt||}Wqtk
rd}qXn|dkr|jd}nt|||q
q
W|j|_dS(	soReplace fields with supplemental information from a previous
           pax extended or global header.
        sGNU.sparse.nameRsGNU.sparse.sizeRsGNU.sparse.realsizeiRN(	RtsetattrR.t
PAX_FIELDStPAX_NUMBER_FIELDSR/RRR(R}RR$R%RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR/s"

cCs9y|j|dSWntk
r4|j||SXdS(s1Decode a single field from a pax record.
        R,N(R(tUnicodeDecodeError(R}RR$tfallback_encodingtfallback_errors((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR:s
cCs0t|t\}}|r(|d7}n|tS(s_Round up a byte count by BLOCKSIZE and return it,
           e.g. _block(834) => 1024.
        i(RMR(R}RRRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR.
s
cCs
|jtkS(N(Rt
REGULAR_TYPES(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR,scCs
|jS(N(R,(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisfilescCs
|jtkS(N(RR(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRscCs
|jtkS(N(RtSYMTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytissymscCs
|jtkS(N(RtLNKTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytislnkscCs
|jtkS(N(RtCHRTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytischr scCs
|jtkS(N(RtBLKTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisblk"scCs
|jtkS(N(RtFIFOTYPE(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisfifo$scCs
|jdk	S(N(RRI(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytissparse&scCs|jtttfkS(N(RRTRVRX(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytisdev(s(R~RaRRRRRRRRRRRRRRRRRR(3RgRhRit	__slots__RRRtpropertyRRRRRRtDEFAULT_FORMATtENCODINGRRRRtclassmethodR
RtstaticmethodRRRRR$R&R%R+R'R(R*R=R<R>R/R:R.R,RORRQRSRURWRYRZR[(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs`							
		1	
3?	
				f																c
Bs-eZdZdZeZeZdZeZ	e
Zd1Z
eZeZd1dd1d1d1d1d1d1dd1d1d1dZed1dd1edZedd1dZedd1dd	Zedd1dd
Zidd6d
d6dd6ZdZdZdZdZd1d1d1dZedZ d1ed1d1dZ!d1dZ"dd1dZ#dedZ$dZ%edZ&dZ'd Z(d!Z)d"Z*d#Z+d$Z,d%Z-d&Z.d'Z/d(Z0d1ed)Z1d*Z2d1d+Z3d,Z4d-Z5d.Z6d/Z7d0Z8RS(2s=The TarFile Class provides an interface to tar archives.
    iiRYRc
Cst|dks|dkr-tdn||_idd6dd6dd	6||_|s|jdkrtjj|rd	|_d|_nt||j}t|_	nN|d
krt|d
r|j}nt|dr|j|_nt
|_	|rtjj|nd
|_||_|d
k	rC||_n|d
k	r[||_n|d
k	rs||_n|d
k	r||_n|d
k	r||_n|	|_|
d
k	r|jtkr|
|_n	i|_|d
k	r||_n|d
k	r||_nt|_g|_t|_|jj|_i|_y9|jdkrod
|_ |j!|_ n|jdkrxt
r|jj"|jy&|jj#|}|jj$|Wqt%k
r|jj"|jPqt&k
r}
t't(|
qXqWn|jdkrzt
|_|jrz|jj)|jj*}|jj+||jt|7_qznWn,|j	s|jj,nt
|_nXd
S(sOpen an (uncompressed) tar archive `name'. `mode' is either 'r' to
           read from an existing archive, 'a' to append data to an existing
           file or 'w' to create a new file overwriting an existing one. `mode'
           defaults to 'r'.
           If `fileobj' is given, it is used for reading or writing data. If it
           can be determined, `mode' is overridden by `fileobj's mode.
           `fileobj' is not closed, when TarFile is closed.
        iRsmode must be 'r', 'a' or 'w'trbRYsr+btatwbRZR~RatawN(-R R/Rat_modeRuRtexistst	bltn_openRRRIRzR~RJtabspathRR>Rtdereferencetignore_zerosR$R%RRtdebugt
errorlevelRtmemberst_loadedRRtinodestfirstmemberR0RR&R_RqRnRkRR
RRLR(R}R~RaRR>RRjRkR$R%RRlRmteRE((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRFs	""		!									
				c
Ks4|r|rtdn|dkrx|jD]}t||j|}|dk	rj|j}ny||d||SWq3ttfk
r}	|dk	r3|j|q3q3q3Xq3WtdnUd|krV|jdd\}
}|
pd}
|pd}||jkr3t||j|}ntd||||
||Sd	|kr|jd	d\}
}|
pd}
|pd}|
d
krtdnt	||
|||}y|||
||}Wn|j
nXt|_|S|dkr$|j
||||Std
dS(s|Open a tar archive for reading, writing or appending. Return
           an appropriate TarFile class.

           mode:
           'r' or 'r:*' open for reading with transparent compression
           'r:'         open for reading exclusively uncompressed
           'r:gz'       open for reading with gzip compression
           'r:bz2'      open for reading with bzip2 compression
           'a' or 'a:'  open for appending, creating the file if necessary
           'w' or 'w:'  open for writing without compression
           'w:gz'       open for writing with gzip compression
           'w:bz2'      open for writing with bzip2 compression

           'r|*'        open a stream of tar blocks with transparent compression
           'r|'         open an uncompressed stream of tar blocks for reading
           'r|gz'       open a gzip compressed stream of tar blocks
           'r|bz2'      open a bzip2 compressed stream of tar blocks
           'w|'         open an uncompressed stream for writing
           'w|gz'       open a gzip compressed stream for writing
           'w|bz2'      open a bzip2 compressed stream for writing
        snothing to openRYsr:*s%file could not be opened successfullyt:iRsunknown compression type %rt|trwsmode must be 'r' or 'w'Resundiscernible modeN(RYsr:*(R/t	OPEN_METHRRIRRkRlRRERRRRttaropen(
R	R~RaRRtkwargsRtfunct	saved_posRrRftstreamR[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR{sN
	cKs@t|dks|dkr-tdn|||||S(sCOpen uncompressed tar archive name for reading or writing.
        iRsmode must be 'r', 'a' or 'w'(R R/(R	R~RaRRx((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRwsi	c	Ks6t|dks|dkr-tdnyddl}|jWn#ttfk
ritdnX|dk	}y8|j||d||}|j||||}Wnxt	k
r|r|dk	r|j
n|dkrntdn*|r"|dk	r"|j
nnX||_|S(	skOpen gzip compressed tar archive name for reading or writing.
           Appending is not allowed.
        iRusmode must be 'r' or 'w'iNsgzip module is not availableRTsnot a gzip file(
R R/tgziptGzipFileRtAttributeErrorRlRIRwRNRRkR(	R	R~RaRt
compresslevelRxR|t
extfileobjR[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytgzopens.


	cKst|dks|dkr-tdnyddl}Wntk
r\tdnX|dk	r{t||}n|j||d|}y|j||||}Wn-t	t
fk
r|jtdnXt
|_|S(	slOpen bzip2 compressed tar archive name for reading or writing.
           Appending is not allowed.
        iRusmode must be 'r' or 'w'.iNsbz2 module is not availableRsnot a bzip2 file(R R/RRRlRIRtBZ2FileRwRNtEOFErrorRRkRR(R	R~RaRRRxRR[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytbz2open$s 

	RwRRRRRcCs|jr
dS|jdkr|jjttd|jtd7_t|jt\}}|dkr|jjtt|qn|j	s|jj
nt|_dS(slClose the TarFile. In write-mode, two finishing zero blocks are
           appended to the archive.
        NReii(RRaRRLR!RRRMt
RECORDSIZERRRJ(R}RRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRHs		cCs2|j|}|dkr.td|n|S(sReturn a TarInfo object for member `name'. If `name' can not be
           found in the archive, KeyError is raised. If a member occurs more
           than once in the archive, its last occurrence is assumed to be the
           most up-to-date version.
        sfilename %r not foundN(t
_getmemberRItKeyError(R}R~R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt	getmember\scCs'|j|js |jn|jS(sReturn the members of the archive as a list of TarInfo objects. The
           list has the same order as the members in the archive.
        (t_checkRot_loadRn(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt
getmembersgs
	
cCs g|jD]}|j^q
S(sReturn the members of the archive as a list of their names. It has
           the same order as the list returned by getmembers().
        (RR~(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytgetnamesqscCs\|jd|d	k	r%|j}n|d	kr:|}ntjj|\}}|jtjd}|jd}|j	}||_
|d	krttdr|jrtj
|}qtj|}ntj|j}d}|j}tj|r|j|jf}	|jrj|jdkrj|	|jkrj||j|	krjt}
|j|	}qt}
|	dr||j|	slink toN(RtprintRfRaRRRRRURWRRRRt	localtimeRR~RRQRRS(R}tverboseR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRAs&

	!)c	Cs|jd|dkr"|}n|dk	rtddl}|jdtd||rt|jdd|dSn|jdk	rtjj	||jkr|jdd|dS|jd||j
||}|dkr|jdd	|dS|dk	r;||}|dkr;|jdd|dSn|jrst|d
}|j
|||jn|jr|j
||rxTtj|D]@}|jtjj||tjj||||d|qWqn
|j
|dS(s~Add the file `name' to the archive. `name' may be any type of file
           (directory, fifo, symbolic link, etc.). If given, `arcname'
           specifies an alternative name for the file in the archive.
           Directories are added recursively by default. This can be avoided by
           setting `recursive' to False. `exclude' is a function that should
           return True for each filename to be excluded. `filter' is a function
           that expects a TarInfo object argument and returns the changed
           TarInfo object, if it returns None the TarInfo object will be
           excluded from the archive.
        ReiNsuse the filter argument insteadistarfile: Excluded %rstarfile: Skipped %ristarfile: Unsupported type %rRbtfilter(RRItwarningstwarntDeprecationWarningt_dbgR~RuRRiRR,RhtaddfileRRtlistdirtaddR`(	R}R~Rt	recursivetexcludeRRRtf((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRsD
		
*

*cCs|jdtj|}|j|j|j|j}|jj||jt	|7_|dk	rt||j|jt
|jt\}}|dkr|jjtt||d7}n|j|t7_n|jj|dS(s]Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
           given, tarinfo.size bytes are read from it and added to the archive.
           You can create TarInfo objects using gettarinfo().
           On Windows platforms, `fileobj' should always be opened with mode
           'rb' to avoid irritation about the file size.
        ReiiN(RRRR>R$R%RRLRR RIRURRMRR!RnR_(R}RRRERRRS((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR4s

t.cCs:g}|dkr|}nx_|D]W}|jr\|j|tj|}d|_n|j||d|jq"W|jdd|jx|D]}tj	j
||j}y4|j|||j
|||j||Wqtk
r1}|jdkrq2|jdd|qXqWdS(sMExtract all members from the archive to the current working
           directory and set owner, modification time and permissions on
           directories afterwards. `path' specifies a different directory
           to extract to. `members' is optional and must be a subset of the
           list returned by getmembers().
        it	set_attrstkeycSs|jS(N(R~(Rc((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pytdR]istarfile: %sN(RIRR_RRatextracttsorttreverseRuRR`R~tchowntutimetchmodRjRmR(R}RRntdirectoriesRtdirpathRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt
extractallNs*	

!

R]cCs=|jdt|tr.|j|}n|}|jr^tjj||j|_	ny,|j
|tjj||jd|Wntk
r}|j
dkrq9|jdkr|jdd|jq9|jdd|j|jfn<tk
r8}|j
dkr!q9|jdd|nXdS(sxExtract a member from the archive to the current working directory,
           using its full name. Its file information is extracted as accurately
           as possible. `member' may be a filename or a TarInfo object. You can
           specify a different directory using `path'. File attributes (owner,
           mtime, mode) are set unless `set_attrs' is False.
        RYRiistarfile: %sstarfile: %s %rN(RRRRRSRuRR`RRt_extract_memberR~tEnvironmentErrorRmtfilenameRIRtstrerrorRj(R}tmemberRRRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRts&
!#cCs|jdt|tr.|j|}n|}|jrP|j||S|jtkro|j||S|js|j	rt|j
trtdq|j
|j|SndSdS(sExtract a member from the archive as a file object. `member' may be
           a filename or a TarInfo object. If `member' is a regular file, a
           file-like object is returned. If `member' is a link, a file-like
           object is constructed from the link's target. If `member' is none of
           the above, None is returned.
           The file-like object is read-only and provides the following
           methods: read(), readline(), readlines(), seek() and tell()
        RYs'cannot extract (sym)link as file objectN(RRRRR,t
fileobjectRR-RSRQRRRmtextractfilet_find_link_targetRI(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs	
cCs|jd}|jdtj}tjj|}|r_tjj|r_tj|n|jsw|j	r|j
dd|j|jfn|j
d|j|j
r|j||n|jr|j||n|jr
|j||n|js"|jr5|j||n]|jsM|j	r`|j||n2|jtkr|j||n|j|||r|j|||j	s|j|||j||qndS(s\Extract the TarInfo object tarinfo to a physical
           file called targetpath.
        Ris%s -> %sN(RRRuRRtdirnameRgtmakedirsRSRQRR~RR,tmakefileRtmakedirRYtmakefifoRURWtmakedevtmakelinkRR-tmakeunknownRRR(R}Rt
targetpathRt	upperdirs((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs4#cCsFytj|dWn+tk
rA}|jtjkrBqBnXdS(s,Make a directory called targetpath.
        iN(RutmkdirRterrnotEEXIST(R}RRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs
cCs|j}|j|jt|d}|jdk	rqxJ|jD])\}}|j|t|||qAWnt|||j|j|j|j|j	dS(s'Make a file called targetpath.
        RdN(
RRRRhRRIRURttruncateR(R}RRtsourcettargetRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRs	

cCs+|j|||jdd|jdS(sYMake a file from a TarInfo object with an unknown type
           at targetpath.
        is9tarfile: Unknown file type %r, extracted as regular file.N(RRR(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	scCs/ttdrtj|ntddS(s'Make a fifo called targetpath.
        tmkfifosfifo not supported by systemN(RzRuRRj(R}RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	scCsttds ttdr/tdn|j}|jrT|tjO}n
|tjO}tj||tj	|j
|jdS(s<Make a character or block device called targetpath.
        tmknodRs'special devices not supported by systemN(RzRuRjRaRWRtS_IFBLKtS_IFCHRRRRR(R}RRRa((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s 	
cCsyj|jr%tj|j|nDtjj|jrPtj|j|n|j|j	||WnPt
k
r|jrtjjtjj|j
|j}q|j}n>Xy|j|j	||Wntk
rtdnXdS(sMake a (symbolic) link called targetpath. If it cannot be created
          (platform limitation), we try to make a copy of the referenced file
          instead of a link.
        s%unable to resolve link inside archiveN(RQRutsymlinkRRRgRtlinkRRtsymlink_exceptionR`RR~RRj(R}RRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR'	s"


cCstrttdrtjdkrytj|jd}Wntk
r]|j}nXytj	|j
d}Wntk
r|j}nXyZ|jrttdrtj
|||n%tjdkrtj|||nWqtk
r}tdqXndS(s6Set owner of targetpath according to tarinfo.
        tgeteuidiitlchowntos2emxscould not change ownerN(RRzRuRRtgetgrnamRRRtgetpwnamRRRQRtsystplatformRRRj(R}RRRtuRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRD	s '



cCsOttdrKytj||jWqKtk
rG}tdqKXndS(sASet file permissions of targetpath according to tarinfo.
        Rscould not change modeN(RzRuRRaRRj(R}RRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRZ	s
cCsYttdsdSy tj||j|jfWntk
rT}tdnXdS(sBSet modification time of targetpath according to tarinfo.
        RNs"could not change modification time(RzRuRRRRj(R}RRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyRc	s cCs|jd|jdk	r2|j}d|_|S|jj|jd}xktry|jj|}WnGt	k
r}|j
r|jdd|j|f|jt7_qNqnt
k
r+}|j
r|jdd|j|f|jt7_qNq|jdkrtt|qntk
rY|jdkrtdqn[tk
r}|jdkrtt|qn%tk
r}tt|nXPqNW|dk	r|jj|n	t|_|S(sReturn the next member of the archive as a TarInfo object, when
           TarFile is opened for reading. Return None if there is no more
           available.
        trais0x%X: %sis
empty fileN(RRqRIRRRRJRR&RqRkRRR0RkRRoRpRrRnR_Ro(R}tmRRr((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR0n	sF
						
	cCs|j}|dk	r.||j| }n|rItjj|}nxKt|D]=}|rztjj|j}n	|j}||krV|SqVWdS(s}Find an archive member by name from bottom to top.
           If tarinfo is given, it is used as the starting point.
        N(RRItindexRuRtnormpathtreversedR~(R}R~Rt	normalizeRnRtmember_name((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s	cCs6x&tr(|j}|dkrPqqWt|_dS(sWRead through the entire archive file and look for readable
           members.
        N(RJR0RIRo(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s
	cCsW|jr"td|jjn|dk	rS|j|krStd|jndS(snCheck if TarFile is still open, and if the operation's mode
           corresponds to TarFile's mode.
        s%s is closedsbad operation for mode %rN(RRNRRgRIRa(R}Ra((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s	cCs|jr5tjj|jd|j}d}n|j}|}|j|d|dt}|dkr~t	d|n|S(sZFind the target member of a symlink or hardlink member in the
           archive.
        RRRslinkname %r not foundN(
RQRuRRR~RRIRRJR(R}RRtlimitR((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s 		cCs$|jrt|jSt|SdS(s$Provide an iterator object.
        N(RotiterRntTarIter(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s	
cCs)||jkr%t|dtjndS(s.Write debugging output to sys.stderr.
        tfileN(RlRRtstderr(R}tleveltmsg((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	scCs|j|S(N(R(R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt	__enter__	s
cCs?|dkr|jn"|js2|jjnt|_dS(N(RIRRRRJR(R}RRt	traceback((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__exit__	s

	N(9RgRhRiRlRRjRkRmR^R>R_R$RIR%RRRRRR`RR{RwRRRvRRRRRRJRARRRRRRRRRRRRRRRR0RRRRRRRR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR,sniK
			
	b>&#	&0											1	
					RcBs/eZdZdZdZdZeZRS(sMIterator Class.

       for tarinfo in TarFile(...):
           suite...
    cCs||_d|_dS(s$Construct a TarIter object.
        iN(RR(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR
s	cCs|S(s Return iterator object.
        ((R}((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR
scCs}|jjs9|jj}|sjt|j_tqjn1y|jj|j}Wntk
ritnX|jd7_|S(sReturn the next item using TarFile's next() method.
           When all members have been read, set TarFile as _loaded.
        i(RRoR0RJt
StopIterationRnRt
IndexError(R}R((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyt__next__

s

(RgRhRiRRRR0(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR	s
			cCs7yt|}|jtSWntk
r2tSXdS(sfReturn True if name points to a tar archive that we
       are able to handle, else return False.
    N(R{RRJRR(R~R[((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyR#
s

(xt
__future__Rt__version__tversiont
__author__t__date__t	__cvsid__t__credits__RRuRRRR8RR3RRRRIR~tNotImplementedErrorRtWindowsErrort	NameErrort__all__tversion_infot__builtin__tbuiltinsR{t_openR!RRRRRRRRRRRRPRTRVRRXtCONTTYPERRRRRR)RR7RR^R-RNRRItsetR;RR.RJtS_IFLNKtS_IFREGRtS_IFDIRRtS_IFIFOtTSUIDtTSGIDtTSVTXtTUREADtTUWRITEtTUEXECtTGREADtTGWRITEtTGEXECtTOREADtTOWRITEtTOEXECR~R_tgetfilesystemencodingR&R*R5R?RHRUR^Rft	ExceptionRRjRkRlRmRnRoRpRqR0RrtobjectRsRRRRRRRRRRh(((sI/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/tarfile.pyts.

	
					
					
					
	
	
		?K*	PKZb=/d/d5site-packages/pip/_vendor/distlib/_backport/shutil.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Utility functions for copying and archiving files and directory trees.

XXX The functions here don't copy the resource fork or other metadata on Mac.

"""

import os
import sys
import stat
from os.path import abspath
import fnmatch
import collections
import errno
from . import tarfile

try:
    import bz2
    _BZ2_SUPPORTED = True
except ImportError:
    _BZ2_SUPPORTED = False

try:
    from pwd import getpwnam
except ImportError:
    getpwnam = None

try:
    from grp import getgrnam
except ImportError:
    getgrnam = None

__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
           "copytree", "move", "rmtree", "Error", "SpecialFileError",
           "ExecError", "make_archive", "get_archive_formats",
           "register_archive_format", "unregister_archive_format",
           "get_unpack_formats", "register_unpack_format",
           "unregister_unpack_format", "unpack_archive", "ignore_patterns"]

class Error(EnvironmentError):
    pass

class SpecialFileError(EnvironmentError):
    """Raised when trying to do a kind of operation (e.g. copying) which is
    not supported on a special file (e.g. a named pipe)"""

class ExecError(EnvironmentError):
    """Raised when a command could not be executed"""

class ReadError(EnvironmentError):
    """Raised when an archive cannot be read"""

class RegistryError(Exception):
    """Raised when a registry operation with the archiving
    and unpacking registries fails"""


try:
    WindowsError
except NameError:
    WindowsError = None

def copyfileobj(fsrc, fdst, length=16*1024):
    """copy data from file-like object fsrc to file-like object fdst"""
    while 1:
        buf = fsrc.read(length)
        if not buf:
            break
        fdst.write(buf)

def _samefile(src, dst):
    # Macintosh, Unix.
    if hasattr(os.path, 'samefile'):
        try:
            return os.path.samefile(src, dst)
        except OSError:
            return False

    # All other platforms: check for same pathname.
    return (os.path.normcase(os.path.abspath(src)) ==
            os.path.normcase(os.path.abspath(dst)))

def copyfile(src, dst):
    """Copy data from src to dst"""
    if _samefile(src, dst):
        raise Error("`%s` and `%s` are the same file" % (src, dst))

    for fn in [src, dst]:
        try:
            st = os.stat(fn)
        except OSError:
            # File most likely does not exist
            pass
        else:
            # XXX What about other special files? (sockets, devices...)
            if stat.S_ISFIFO(st.st_mode):
                raise SpecialFileError("`%s` is a named pipe" % fn)

    with open(src, 'rb') as fsrc:
        with open(dst, 'wb') as fdst:
            copyfileobj(fsrc, fdst)

def copymode(src, dst):
    """Copy mode bits from src to dst"""
    if hasattr(os, 'chmod'):
        st = os.stat(src)
        mode = stat.S_IMODE(st.st_mode)
        os.chmod(dst, mode)

def copystat(src, dst):
    """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
    st = os.stat(src)
    mode = stat.S_IMODE(st.st_mode)
    if hasattr(os, 'utime'):
        os.utime(dst, (st.st_atime, st.st_mtime))
    if hasattr(os, 'chmod'):
        os.chmod(dst, mode)
    if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
        try:
            os.chflags(dst, st.st_flags)
        except OSError as why:
            if (not hasattr(errno, 'EOPNOTSUPP') or
                why.errno != errno.EOPNOTSUPP):
                raise

def copy(src, dst):
    """Copy data and mode bits ("cp src dst").

    The destination may be a directory.

    """
    if os.path.isdir(dst):
        dst = os.path.join(dst, os.path.basename(src))
    copyfile(src, dst)
    copymode(src, dst)

def copy2(src, dst):
    """Copy data and all stat info ("cp -p src dst").

    The destination may be a directory.

    """
    if os.path.isdir(dst):
        dst = os.path.join(dst, os.path.basename(src))
    copyfile(src, dst)
    copystat(src, dst)

def ignore_patterns(*patterns):
    """Function that can be used as copytree() ignore parameter.

    Patterns is a sequence of glob-style patterns
    that are used to exclude files"""
    def _ignore_patterns(path, names):
        ignored_names = []
        for pattern in patterns:
            ignored_names.extend(fnmatch.filter(names, pattern))
        return set(ignored_names)
    return _ignore_patterns

def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
             ignore_dangling_symlinks=False):
    """Recursively copy a directory tree.

    The destination directory must not already exist.
    If exception(s) occur, an Error is raised with a list of reasons.

    If the optional symlinks flag is true, symbolic links in the
    source tree result in symbolic links in the destination tree; if
    it is false, the contents of the files pointed to by symbolic
    links are copied. If the file pointed by the symlink doesn't
    exist, an exception will be added in the list of errors raised in
    an Error exception at the end of the copy process.

    You can set the optional ignore_dangling_symlinks flag to true if you
    want to silence this exception. Notice that this has no effect on
    platforms that don't support os.symlink.

    The optional ignore argument is a callable. If given, it
    is called with the `src` parameter, which is the directory
    being visited by copytree(), and `names` which is the list of
    `src` contents, as returned by os.listdir():

        callable(src, names) -> ignored_names

    Since copytree() is called recursively, the callable will be
    called once for each directory that is copied. It returns a
    list of names relative to the `src` directory that should
    not be copied.

    The optional copy_function argument is a callable that will be used
    to copy each file. It will be called with the source path and the
    destination path as arguments. By default, copy2() is used, but any
    function that supports the same signature (like copy()) can be used.

    """
    names = os.listdir(src)
    if ignore is not None:
        ignored_names = ignore(src, names)
    else:
        ignored_names = set()

    os.makedirs(dst)
    errors = []
    for name in names:
        if name in ignored_names:
            continue
        srcname = os.path.join(src, name)
        dstname = os.path.join(dst, name)
        try:
            if os.path.islink(srcname):
                linkto = os.readlink(srcname)
                if symlinks:
                    os.symlink(linkto, dstname)
                else:
                    # ignore dangling symlink if the flag is on
                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
                        continue
                    # otherwise let the copy occurs. copy2 will raise an error
                    copy_function(srcname, dstname)
            elif os.path.isdir(srcname):
                copytree(srcname, dstname, symlinks, ignore, copy_function)
            else:
                # Will raise a SpecialFileError for unsupported file types
                copy_function(srcname, dstname)
        # catch the Error from the recursive copytree so that we can
        # continue with other files
        except Error as err:
            errors.extend(err.args[0])
        except EnvironmentError as why:
            errors.append((srcname, dstname, str(why)))
    try:
        copystat(src, dst)
    except OSError as why:
        if WindowsError is not None and isinstance(why, WindowsError):
            # Copying file access times may fail on Windows
            pass
        else:
            errors.extend((src, dst, str(why)))
    if errors:
        raise Error(errors)

def rmtree(path, ignore_errors=False, onerror=None):
    """Recursively delete a directory tree.

    If ignore_errors is set, errors are ignored; otherwise, if onerror
    is set, it is called to handle the error with arguments (func,
    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
    path is the argument to that function that caused it to fail; and
    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
    is false and onerror is None, an exception is raised.

    """
    if ignore_errors:
        def onerror(*args):
            pass
    elif onerror is None:
        def onerror(*args):
            raise
    try:
        if os.path.islink(path):
            # symlinks to directories are forbidden, see bug #1669
            raise OSError("Cannot call rmtree on a symbolic link")
    except OSError:
        onerror(os.path.islink, path, sys.exc_info())
        # can't continue even if onerror hook returns
        return
    names = []
    try:
        names = os.listdir(path)
    except os.error:
        onerror(os.listdir, path, sys.exc_info())
    for name in names:
        fullname = os.path.join(path, name)
        try:
            mode = os.lstat(fullname).st_mode
        except os.error:
            mode = 0
        if stat.S_ISDIR(mode):
            rmtree(fullname, ignore_errors, onerror)
        else:
            try:
                os.remove(fullname)
            except os.error:
                onerror(os.remove, fullname, sys.exc_info())
    try:
        os.rmdir(path)
    except os.error:
        onerror(os.rmdir, path, sys.exc_info())


def _basename(path):
    # A basename() variant which first strips the trailing slash, if present.
    # Thus we always get the last component of the path, even for directories.
    return os.path.basename(path.rstrip(os.path.sep))

def move(src, dst):
    """Recursively move a file or directory to another location. This is
    similar to the Unix "mv" command.

    If the destination is a directory or a symlink to a directory, the source
    is moved inside the directory. The destination path must not already
    exist.

    If the destination already exists but is not a directory, it may be
    overwritten depending on os.rename() semantics.

    If the destination is on our current filesystem, then rename() is used.
    Otherwise, src is copied to the destination and then removed.
    A lot more could be done here...  A look at a mv.c shows a lot of
    the issues this implementation glosses over.

    """
    real_dst = dst
    if os.path.isdir(dst):
        if _samefile(src, dst):
            # We might be on a case insensitive filesystem,
            # perform the rename anyway.
            os.rename(src, dst)
            return

        real_dst = os.path.join(dst, _basename(src))
        if os.path.exists(real_dst):
            raise Error("Destination path '%s' already exists" % real_dst)
    try:
        os.rename(src, real_dst)
    except OSError:
        if os.path.isdir(src):
            if _destinsrc(src, dst):
                raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
            copytree(src, real_dst, symlinks=True)
            rmtree(src)
        else:
            copy2(src, real_dst)
            os.unlink(src)

def _destinsrc(src, dst):
    src = abspath(src)
    dst = abspath(dst)
    if not src.endswith(os.path.sep):
        src += os.path.sep
    if not dst.endswith(os.path.sep):
        dst += os.path.sep
    return dst.startswith(src)

def _get_gid(name):
    """Returns a gid, given a group name."""
    if getgrnam is None or name is None:
        return None
    try:
        result = getgrnam(name)
    except KeyError:
        result = None
    if result is not None:
        return result[2]
    return None

def _get_uid(name):
    """Returns an uid, given a user name."""
    if getpwnam is None or name is None:
        return None
    try:
        result = getpwnam(name)
    except KeyError:
        result = None
    if result is not None:
        return result[2]
    return None

def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
                  owner=None, group=None, logger=None):
    """Create a (possibly compressed) tar file from all the files under
    'base_dir'.

    'compress' must be "gzip" (the default), "bzip2", or None.

    'owner' and 'group' can be used to define an owner and a group for the
    archive that is being built. If not provided, the current owner and group
    will be used.

    The output tar file will be named 'base_name' +  ".tar", possibly plus
    the appropriate compression extension (".gz", or ".bz2").

    Returns the output filename.
    """
    tar_compression = {'gzip': 'gz', None: ''}
    compress_ext = {'gzip': '.gz'}

    if _BZ2_SUPPORTED:
        tar_compression['bzip2'] = 'bz2'
        compress_ext['bzip2'] = '.bz2'

    # flags for compression program, each element of list will be an argument
    if compress is not None and compress not in compress_ext:
        raise ValueError("bad value for 'compress', or compression format not "
                         "supported : {0}".format(compress))

    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
    archive_dir = os.path.dirname(archive_name)

    if not os.path.exists(archive_dir):
        if logger is not None:
            logger.info("creating %s", archive_dir)
        if not dry_run:
            os.makedirs(archive_dir)

    # creating the tarball
    if logger is not None:
        logger.info('Creating tar archive')

    uid = _get_uid(owner)
    gid = _get_gid(group)

    def _set_uid_gid(tarinfo):
        if gid is not None:
            tarinfo.gid = gid
            tarinfo.gname = group
        if uid is not None:
            tarinfo.uid = uid
            tarinfo.uname = owner
        return tarinfo

    if not dry_run:
        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
        try:
            tar.add(base_dir, filter=_set_uid_gid)
        finally:
            tar.close()

    return archive_name

def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
    # XXX see if we want to keep an external call here
    if verbose:
        zipoptions = "-r"
    else:
        zipoptions = "-rq"
    from distutils.errors import DistutilsExecError
    from distutils.spawn import spawn
    try:
        spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
    except DistutilsExecError:
        # XXX really should distinguish between "couldn't find
        # external 'zip' command" and "zip failed".
        raise ExecError("unable to create zip file '%s': "
            "could neither import the 'zipfile' module nor "
            "find a standalone zip utility") % zip_filename

def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
    """Create a zip file from all the files under 'base_dir'.

    The output zip file will be named 'base_name' + ".zip".  Uses either the
    "zipfile" Python module (if available) or the InfoZIP "zip" utility
    (if installed and found on the default search path).  If neither tool is
    available, raises ExecError.  Returns the name of the output zip
    file.
    """
    zip_filename = base_name + ".zip"
    archive_dir = os.path.dirname(base_name)

    if not os.path.exists(archive_dir):
        if logger is not None:
            logger.info("creating %s", archive_dir)
        if not dry_run:
            os.makedirs(archive_dir)

    # If zipfile module is not available, try spawning an external 'zip'
    # command.
    try:
        import zipfile
    except ImportError:
        zipfile = None

    if zipfile is None:
        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
    else:
        if logger is not None:
            logger.info("creating '%s' and adding '%s' to it",
                        zip_filename, base_dir)

        if not dry_run:
            zip = zipfile.ZipFile(zip_filename, "w",
                                  compression=zipfile.ZIP_DEFLATED)

            for dirpath, dirnames, filenames in os.walk(base_dir):
                for name in filenames:
                    path = os.path.normpath(os.path.join(dirpath, name))
                    if os.path.isfile(path):
                        zip.write(path, path)
                        if logger is not None:
                            logger.info("adding '%s'", path)
            zip.close()

    return zip_filename

_ARCHIVE_FORMATS = {
    'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
    'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
    'tar':   (_make_tarball, [('compress', None)], "uncompressed tar file"),
    'zip':   (_make_zipfile, [], "ZIP file"),
    }

if _BZ2_SUPPORTED:
    _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
                                "bzip2'ed tar-file")

def get_archive_formats():
    """Returns a list of supported formats for archiving and unarchiving.

    Each element of the returned sequence is a tuple (name, description)
    """
    formats = [(name, registry[2]) for name, registry in
               _ARCHIVE_FORMATS.items()]
    formats.sort()
    return formats

def register_archive_format(name, function, extra_args=None, description=''):
    """Registers an archive format.

    name is the name of the format. function is the callable that will be
    used to create archives. If provided, extra_args is a sequence of
    (name, value) tuples that will be passed as arguments to the callable.
    description can be provided to describe the format, and will be returned
    by the get_archive_formats() function.
    """
    if extra_args is None:
        extra_args = []
    if not isinstance(function, collections.Callable):
        raise TypeError('The %s object is not callable' % function)
    if not isinstance(extra_args, (tuple, list)):
        raise TypeError('extra_args needs to be a sequence')
    for element in extra_args:
        if not isinstance(element, (tuple, list)) or len(element) !=2:
            raise TypeError('extra_args elements are : (arg_name, value)')

    _ARCHIVE_FORMATS[name] = (function, extra_args, description)

def unregister_archive_format(name):
    del _ARCHIVE_FORMATS[name]

def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
                 dry_run=0, owner=None, group=None, logger=None):
    """Create an archive file (eg. zip or tar).

    'base_name' is the name of the file to create, minus any format-specific
    extension; 'format' is the archive format: one of "zip", "tar", "bztar"
    or "gztar".

    'root_dir' is a directory that will be the root directory of the
    archive; ie. we typically chdir into 'root_dir' before creating the
    archive.  'base_dir' is the directory where we start archiving from;
    ie. 'base_dir' will be the common prefix of all files and
    directories in the archive.  'root_dir' and 'base_dir' both default
    to the current directory.  Returns the name of the archive file.

    'owner' and 'group' are used when creating a tar archive. By default,
    uses the current owner and group.
    """
    save_cwd = os.getcwd()
    if root_dir is not None:
        if logger is not None:
            logger.debug("changing into '%s'", root_dir)
        base_name = os.path.abspath(base_name)
        if not dry_run:
            os.chdir(root_dir)

    if base_dir is None:
        base_dir = os.curdir

    kwargs = {'dry_run': dry_run, 'logger': logger}

    try:
        format_info = _ARCHIVE_FORMATS[format]
    except KeyError:
        raise ValueError("unknown archive format '%s'" % format)

    func = format_info[0]
    for arg, val in format_info[1]:
        kwargs[arg] = val

    if format != 'zip':
        kwargs['owner'] = owner
        kwargs['group'] = group

    try:
        filename = func(base_name, base_dir, **kwargs)
    finally:
        if root_dir is not None:
            if logger is not None:
                logger.debug("changing back to '%s'", save_cwd)
            os.chdir(save_cwd)

    return filename


def get_unpack_formats():
    """Returns a list of supported formats for unpacking.

    Each element of the returned sequence is a tuple
    (name, extensions, description)
    """
    formats = [(name, info[0], info[3]) for name, info in
               _UNPACK_FORMATS.items()]
    formats.sort()
    return formats

def _check_unpack_options(extensions, function, extra_args):
    """Checks what gets registered as an unpacker."""
    # first make sure no other unpacker is registered for this extension
    existing_extensions = {}
    for name, info in _UNPACK_FORMATS.items():
        for ext in info[0]:
            existing_extensions[ext] = name

    for extension in extensions:
        if extension in existing_extensions:
            msg = '%s is already registered for "%s"'
            raise RegistryError(msg % (extension,
                                       existing_extensions[extension]))

    if not isinstance(function, collections.Callable):
        raise TypeError('The registered function must be a callable')


def register_unpack_format(name, extensions, function, extra_args=None,
                           description=''):
    """Registers an unpack format.

    `name` is the name of the format. `extensions` is a list of extensions
    corresponding to the format.

    `function` is the callable that will be
    used to unpack archives. The callable will receive archives to unpack.
    If it's unable to handle an archive, it needs to raise a ReadError
    exception.

    If provided, `extra_args` is a sequence of
    (name, value) tuples that will be passed as arguments to the callable.
    description can be provided to describe the format, and will be returned
    by the get_unpack_formats() function.
    """
    if extra_args is None:
        extra_args = []
    _check_unpack_options(extensions, function, extra_args)
    _UNPACK_FORMATS[name] = extensions, function, extra_args, description

def unregister_unpack_format(name):
    """Removes the pack format from the registry."""
    del _UNPACK_FORMATS[name]

def _ensure_directory(path):
    """Ensure that the parent directory of `path` exists"""
    dirname = os.path.dirname(path)
    if not os.path.isdir(dirname):
        os.makedirs(dirname)

def _unpack_zipfile(filename, extract_dir):
    """Unpack zip `filename` to `extract_dir`
    """
    try:
        import zipfile
    except ImportError:
        raise ReadError('zlib not supported, cannot unpack this archive.')

    if not zipfile.is_zipfile(filename):
        raise ReadError("%s is not a zip file" % filename)

    zip = zipfile.ZipFile(filename)
    try:
        for info in zip.infolist():
            name = info.filename

            # don't extract absolute paths or ones with .. in them
            if name.startswith('/') or '..' in name:
                continue

            target = os.path.join(extract_dir, *name.split('/'))
            if not target:
                continue

            _ensure_directory(target)
            if not name.endswith('/'):
                # file
                data = zip.read(info.filename)
                f = open(target, 'wb')
                try:
                    f.write(data)
                finally:
                    f.close()
                    del data
    finally:
        zip.close()

def _unpack_tarfile(filename, extract_dir):
    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
    """
    try:
        tarobj = tarfile.open(filename)
    except tarfile.TarError:
        raise ReadError(
            "%s is not a compressed or uncompressed tar file" % filename)
    try:
        tarobj.extractall(extract_dir)
    finally:
        tarobj.close()

_UNPACK_FORMATS = {
    'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
    'tar':   (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
    'zip':   (['.zip'], _unpack_zipfile, [], "ZIP file")
    }

if _BZ2_SUPPORTED:
    _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
                                "bzip2'ed tar-file")

def _find_unpack_format(filename):
    for name, info in _UNPACK_FORMATS.items():
        for extension in info[0]:
            if filename.endswith(extension):
                return name
    return None

def unpack_archive(filename, extract_dir=None, format=None):
    """Unpack an archive.

    `filename` is the name of the archive.

    `extract_dir` is the name of the target directory, where the archive
    is unpacked. If not provided, the current working directory is used.

    `format` is the archive format: one of "zip", "tar", or "gztar". Or any
    other registered format. If not provided, unpack_archive will use the
    filename extension and see if an unpacker was registered for that
    extension.

    In case none is found, a ValueError is raised.
    """
    if extract_dir is None:
        extract_dir = os.getcwd()

    if format is not None:
        try:
            format_info = _UNPACK_FORMATS[format]
        except KeyError:
            raise ValueError("Unknown unpack format '{0}'".format(format))

        func = format_info[1]
        func(filename, extract_dir, **dict(format_info[2]))
    else:
        # we need to look at the registered unpackers supported extensions
        format = _find_unpack_format(filename)
        if format is None:
            raise ReadError("Unknown archive format '{0}'".format(filename))

        func = _UNPACK_FORMATS[format][1]
        kwargs = dict(_UNPACK_FORMATS[format][2])
        func(filename, extract_dir, **kwargs)
PKZ;VgVg6site-packages/pip/_vendor/distlib/_backport/shutil.pyonu[
abc@s"dZddlZddlZddlZddlmZddlZddlZddlZddl	m
Z
yddlZeZ
Wnek
reZ
nXyddlmZWnek
rdZnXyddlmZWnek
rdZnXdd	d
ddd
dddddddddddddddgZdefdYZdefdYZdefdYZd efd!YZd"efd#YZyeWnek
rdZnXdWd&Zd'Z d(Z!d)Z"d*Z#d+Z$d,Z%d-Z&ede%ed.Z'edd/Z(d0Z)d1Z*d2Z+d3Z,d4Z-d5d6d6dddd7Z.eed8Z/d6d6dd9Z0ie.dXgd;fd<6e.dYgd>fd?6e.dZgd@fdA6e0gdBfdC6Z1e
re.d[gd>fe1d?fe=d?dddVZ?dS(\sUtility functions for copying and archiving files and directory trees.

XXX The functions here don't copy the resource fork or other metadata on Mac.

iN(tabspathi(ttarfile(tgetpwnam(tgetgrnamtcopyfileobjtcopyfiletcopymodetcopystattcopytcopy2tcopytreetmovetrmtreetErrortSpecialFileErrort	ExecErrortmake_archivetget_archive_formatstregister_archive_formattunregister_archive_formattget_unpack_formatstregister_unpack_formattunregister_unpack_formattunpack_archivetignore_patternscBseZRS((t__name__t
__module__(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR
,scBseZdZRS(s|Raised when trying to do a kind of operation (e.g. copying) which is
    not supported on a special file (e.g. a named pipe)(RRt__doc__(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR/scBseZdZRS(s+Raised when a command could not be executed(RRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR3st	ReadErrorcBseZdZRS(s%Raised when an archive cannot be read(RRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR6st
RegistryErrorcBseZdZRS(sVRaised when a registry operation with the archiving
    and unpacking registries fails(RRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR9siicCs1x*|j|}|sPn|j|qWdS(s=copy data from file-like object fsrc to file-like object fdstN(treadtwrite(tfsrctfdsttlengthtbuf((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRCs
cCs{ttjdrAytjj||SWqAtk
r=tSXntjjtjj|tjjtjj|kS(Ntsamefile(thasattrtostpathR$tOSErrortFalsetnormcaseR(tsrctdst((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt	_samefileKs
cCst||r(td||fnx`||gD]R}ytj|}Wntk
raq5Xtj|jr5td|q5q5Wt|d,}t|d}t	||WdQXWdQXdS(sCopy data from src to dsts`%s` and `%s` are the same files`%s` is a named pipetrbtwbN(
R-R
R&tstatR(tS_ISFIFOtst_modeRtopenR(R+R,tfntstR R!((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRWs
cCsGttdrCtj|}tj|j}tj||ndS(sCopy mode bits from src to dsttchmodN(R%R&R0tS_IMODER2R6(R+R,R5tmode((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRkscCstj|}tj|j}ttdrOtj||j|jfnttdrqtj||nttdrt|drytj	||j
Wqtk
r}ttds|jtj
krqqXndS(sCCopy all stat info (mode bits, atime, mtime, flags) from src to dsttutimeR6tchflagstst_flagst
EOPNOTSUPPN(R&R0R7R2R%R9tst_atimetst_mtimeR6R:R;R(terrnoR<(R+R,R5R8twhy((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRrscCsTtjj|r6tjj|tjj|}nt||t||dS(sVCopy data and mode bits ("cp src dst").

    The destination may be a directory.

    N(R&R'tisdirtjointbasenameRR(R+R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRs$
cCsTtjj|r6tjj|tjj|}nt||t||dS(s]Copy data and all stat info ("cp -p src dst").

    The destination may be a directory.

    N(R&R'RARBRCRR(R+R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR	s$
csfd}|S(sFunction that can be used as copytree() ignore parameter.

    Patterns is a sequence of glob-style patterns
    that are used to exclude filescs:g}x'D]}|jtj||q
Wt|S(N(textendtfnmatchtfiltertset(R'tnamest
ignored_namestpattern(tpatterns(sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt_ignore_patternss
((RKRL((RKsH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRscCs
tj|}|dk	r-|||}n	t}tj|g}xG|D]?}	|	|krhqPntjj||	}
tjj||	}ytjj|
rtj|
}|rtj	||q6tjj
|r|rwPn||
|n8tjj|
r)t|
||||n
||
|WqPt
k
r`}
|j|
jdqPtk
r}|j|
|t|fqPXqPWyt||WnMtk
r}tdk	rt|trq|j||t|fnX|r	t
|ndS(sRecursively copy a directory tree.

    The destination directory must not already exist.
    If exception(s) occur, an Error is raised with a list of reasons.

    If the optional symlinks flag is true, symbolic links in the
    source tree result in symbolic links in the destination tree; if
    it is false, the contents of the files pointed to by symbolic
    links are copied. If the file pointed by the symlink doesn't
    exist, an exception will be added in the list of errors raised in
    an Error exception at the end of the copy process.

    You can set the optional ignore_dangling_symlinks flag to true if you
    want to silence this exception. Notice that this has no effect on
    platforms that don't support os.symlink.

    The optional ignore argument is a callable. If given, it
    is called with the `src` parameter, which is the directory
    being visited by copytree(), and `names` which is the list of
    `src` contents, as returned by os.listdir():

        callable(src, names) -> ignored_names

    Since copytree() is called recursively, the callable will be
    called once for each directory that is copied. It returns a
    list of names relative to the `src` directory that should
    not be copied.

    The optional copy_function argument is a callable that will be used
    to copy each file. It will be called with the source path and the
    destination path as arguments. By default, copy2() is used, but any
    function that supports the same signature (like copy()) can be used.

    iN(R&tlistdirtNoneRGtmakedirsR'RBtislinktreadlinktsymlinktexistsRAR
R
RDtargstEnvironmentErrortappendtstrRR(tWindowsErrort
isinstance(R+R,tsymlinkstignoret
copy_functiontignore_dangling_symlinksRHRIterrorstnametsrcnametdstnametlinktoterrR@((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR
sD$	

$ cCs|rd}n|dkr*d}ny%tjj|rNtdnWn.tk
r|tjj|tjdSXg}ytj|}Wn-tjk
r|tj|tjnXx|D]}tjj	||}ytj
|j}Wntjk
rd}nXtj
|r@t|||qytj|Wqtjk
r|tj|tjqXqWytj|Wn-tjk
r|tj|tjnXdS(sRecursively delete a directory tree.

    If ignore_errors is set, errors are ignored; otherwise, if onerror
    is set, it is called to handle the error with arguments (func,
    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
    path is the argument to that function that caused it to fail; and
    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
    is false and onerror is None, an exception is raised.

    cWsdS(N((RT((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pytonerrorscWsdS(N((RT((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRdss%Cannot call rmtree on a symbolic linkNi(RNR&R'RPR(tsystexc_infoRMterrorRBtlstatR2R0tS_ISDIRRtremovetrmdir(R't
ignore_errorsRdRHR_tfullnameR8((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRs>


!cCstjj|jtjjS(N(R&R'RCtrstriptsep(R'((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt	_basename'scCs|}tjj|r~t||r;tj||dStjj|t|}tjj|r~td|q~nytj||Wnt	k
rtjj|rt
||rtd||fnt||dtt
|qt||tj|nXdS(sRecursively move a file or directory to another location. This is
    similar to the Unix "mv" command.

    If the destination is a directory or a symlink to a directory, the source
    is moved inside the directory. The destination path must not already
    exist.

    If the destination already exists but is not a directory, it may be
    overwritten depending on os.rename() semantics.

    If the destination is on our current filesystem, then rename() is used.
    Otherwise, src is copied to the destination and then removed.
    A lot more could be done here...  A look at a mv.c shows a lot of
    the issues this implementation glosses over.

    Ns$Destination path '%s' already existss.Cannot move a directory '%s' into itself '%s'.RZ(R&R'RAR-trenameRBRpRSR
R(t
_destinsrcR
tTrueRR	tunlink(R+R,treal_dst((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyR,s$


cCsut|}t|}|jtjjs@|tjj7}n|jtjjsh|tjj7}n|j|S(N(RtendswithR&R'Rot
startswith(R+R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyRrTscCs^tdks|dkrdSyt|}Wntk
rEd}nX|dk	rZ|dSdS(s"Returns a gid, given a group name.iN(RRNtKeyError(R_tresult((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt_get_gid]s

cCs^tdks|dkrdSyt|}Wntk
rEd}nX|dk	rZ|dSdS(s"Returns an uid, given a user name.iN(RRNRx(R_Ry((sH/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/shutil.pyt_get_uidis

tgzipics|idd6dd6}idd6}	tr>d|d
s






	

								Q1		(					=/		
		6					%	
	PKZl>4site-packages/pip/_vendor/distlib/_backport/misc.pyonu[
abc@sdZddlZddlZdddgZyddlmZWnek
r`edZnXy
eZWn*e	k
rddl
mZd	ZnXy
ejZWne
k
rd
ZnXdS(s/Backports for individual classes and functions.iNtcache_from_sourcetcallabletfsencode(RcCs|rdpd}||S(Ntcto((tpy_filetdebugtext((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyRs(tCallablecCs
t|tS(N(t
isinstanceR(tobj((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyRscCsRt|tr|St|tr5|jtjStdt|jdS(Nsexpect bytes or str, not %s(	R	tbyteststrtencodetsystgetfilesystemencodingt	TypeErrorttypet__name__(tfilename((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyR"s(t__doc__tosRt__all__timpRtImportErrort	__debug__Rt	NameErrortcollectionsRRtAttributeError(((sF/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/misc.pyts 





PKZ#g7site-packages/pip/_vendor/distlib/_backport/__init__.pynu["""Modules copied from Python 3 standard libraries, for internal use only.

Individual classes and functions are found in d2._backport.misc.  Intended
usage is to always import things missing from 3.1 from that module: the
built-in/stdlib objects will be used if found.
"""
PKZDBCii6site-packages/pip/_vendor/distlib/_backport/tarfile.pynu[#-------------------------------------------------------------------
# tarfile.py
#-------------------------------------------------------------------
# Copyright (C) 2002 Lars Gustaebel 
# All rights reserved.
#
# Permission  is  hereby granted,  free  of charge,  to  any person
# obtaining a  copy of  this software  and associated documentation
# files  (the  "Software"),  to   deal  in  the  Software   without
# restriction,  including  without limitation  the  rights to  use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies  of  the  Software,  and to  permit  persons  to  whom the
# Software  is  furnished  to  do  so,  subject  to  the  following
# conditions:
#
# The above copyright  notice and this  permission notice shall  be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function

"""Read from and write to tar format archives.
"""

__version__ = "$Revision$"

version     = "0.9.0"
__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
__date__    = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
__cvsid__   = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."

#---------
# Imports
#---------
import sys
import os
import stat
import errno
import time
import struct
import copy
import re

try:
    import grp, pwd
except ImportError:
    grp = pwd = None

# os.symlink on Windows prior to 6.0 raises NotImplementedError
symlink_exception = (AttributeError, NotImplementedError)
try:
    # WindowsError (1314) will be raised if the caller does not hold the
    # SeCreateSymbolicLinkPrivilege privilege
    symlink_exception += (WindowsError,)
except NameError:
    pass

# from tarfile import *
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]

if sys.version_info[0] < 3:
    import __builtin__ as builtins
else:
    import builtins

_open = builtins.open   # Since 'open' is TarFile.open

#---------------------------------------------------------
# tar constants
#---------------------------------------------------------
NUL = b"\0"                     # the null character
BLOCKSIZE = 512                 # length of processing blocks
RECORDSIZE = BLOCKSIZE * 20     # length of records
GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string

LENGTH_NAME = 100               # maximum length of a filename
LENGTH_LINK = 100               # maximum length of a linkname
LENGTH_PREFIX = 155             # maximum length of the prefix field

REGTYPE = b"0"                  # regular file
AREGTYPE = b"\0"                # regular file
LNKTYPE = b"1"                  # link (inside tarfile)
SYMTYPE = b"2"                  # symbolic link
CHRTYPE = b"3"                  # character special device
BLKTYPE = b"4"                  # block special device
DIRTYPE = b"5"                  # directory
FIFOTYPE = b"6"                 # fifo special device
CONTTYPE = b"7"                 # contiguous file

GNUTYPE_LONGNAME = b"L"         # GNU tar longname
GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
GNUTYPE_SPARSE = b"S"           # GNU tar sparse file

XHDTYPE = b"x"                  # POSIX.1-2001 extended header
XGLTYPE = b"g"                  # POSIX.1-2001 global header
SOLARIS_XHDTYPE = b"X"          # Solaris extended header

USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1                  # GNU tar format
PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
DEFAULT_FORMAT = GNU_FORMAT

#---------------------------------------------------------
# tarfile constants
#---------------------------------------------------------
# File types that tarfile supports:
SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
                   SYMTYPE, DIRTYPE, FIFOTYPE,
                   CONTTYPE, CHRTYPE, BLKTYPE,
                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
                   GNUTYPE_SPARSE)

# File types that will be treated as a regular file.
REGULAR_TYPES = (REGTYPE, AREGTYPE,
                 CONTTYPE, GNUTYPE_SPARSE)

# File types that are part of the GNU tar format.
GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
             GNUTYPE_SPARSE)

# Fields from a pax header that override a TarInfo attribute.
PAX_FIELDS = ("path", "linkpath", "size", "mtime",
              "uid", "gid", "uname", "gname")

# Fields from a pax header that are affected by hdrcharset.
PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname"))

# Fields in a pax header that are numbers, all other fields
# are treated as strings.
PAX_NUMBER_FIELDS = {
    "atime": float,
    "ctime": float,
    "mtime": float,
    "uid": int,
    "gid": int,
    "size": int
}

#---------------------------------------------------------
# Bits used in the mode field, values in octal.
#---------------------------------------------------------
S_IFLNK = 0o120000        # symbolic link
S_IFREG = 0o100000        # regular file
S_IFBLK = 0o060000        # block device
S_IFDIR = 0o040000        # directory
S_IFCHR = 0o020000        # character device
S_IFIFO = 0o010000        # fifo

TSUID   = 0o4000          # set UID on execution
TSGID   = 0o2000          # set GID on execution
TSVTX   = 0o1000          # reserved

TUREAD  = 0o400           # read by owner
TUWRITE = 0o200           # write by owner
TUEXEC  = 0o100           # execute/search by owner
TGREAD  = 0o040           # read by group
TGWRITE = 0o020           # write by group
TGEXEC  = 0o010           # execute/search by group
TOREAD  = 0o004           # read by other
TOWRITE = 0o002           # write by other
TOEXEC  = 0o001           # execute/search by other

#---------------------------------------------------------
# initialization
#---------------------------------------------------------
if os.name in ("nt", "ce"):
    ENCODING = "utf-8"
else:
    ENCODING = sys.getfilesystemencoding()

#---------------------------------------------------------
# Some useful functions
#---------------------------------------------------------

def stn(s, length, encoding, errors):
    """Convert a string to a null-terminated bytes object.
    """
    s = s.encode(encoding, errors)
    return s[:length] + (length - len(s)) * NUL

def nts(s, encoding, errors):
    """Convert a null-terminated bytes object to a string.
    """
    p = s.find(b"\0")
    if p != -1:
        s = s[:p]
    return s.decode(encoding, errors)

def nti(s):
    """Convert a number field to a python number.
    """
    # There are two possible encodings for a number field, see
    # itn() below.
    if s[0] != chr(0o200):
        try:
            n = int(nts(s, "ascii", "strict") or "0", 8)
        except ValueError:
            raise InvalidHeaderError("invalid header")
    else:
        n = 0
        for i in range(len(s) - 1):
            n <<= 8
            n += ord(s[i + 1])
    return n

def itn(n, digits=8, format=DEFAULT_FORMAT):
    """Convert a python number to a number field.
    """
    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
    # octal digits followed by a null-byte, this allows values up to
    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
    # that if necessary. A leading 0o200 byte indicates this particular
    # encoding, the following digits-1 bytes are a big-endian
    # representation. This allows values up to (256**(digits-1))-1.
    if 0 <= n < 8 ** (digits - 1):
        s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL
    else:
        if format != GNU_FORMAT or n >= 256 ** (digits - 1):
            raise ValueError("overflow in number field")

        if n < 0:
            # XXX We mimic GNU tar's behaviour with negative numbers,
            # this could raise OverflowError.
            n = struct.unpack("L", struct.pack("l", n))[0]

        s = bytearray()
        for i in range(digits - 1):
            s.insert(0, n & 0o377)
            n >>= 8
        s.insert(0, 0o200)
    return s

def calc_chksums(buf):
    """Calculate the checksum for a member's header by summing up all
       characters except for the chksum field which is treated as if
       it was filled with spaces. According to the GNU tar sources,
       some tars (Sun and NeXT) calculate chksum with signed char,
       which will be different if there are chars in the buffer with
       the high bit set. So we calculate two checksums, unsigned and
       signed.
    """
    unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
    signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
    return unsigned_chksum, signed_chksum

def copyfileobj(src, dst, length=None):
    """Copy length bytes from fileobj src to fileobj dst.
       If length is None, copy the entire content.
    """
    if length == 0:
        return
    if length is None:
        while True:
            buf = src.read(16*1024)
            if not buf:
                break
            dst.write(buf)
        return

    BUFSIZE = 16 * 1024
    blocks, remainder = divmod(length, BUFSIZE)
    for b in range(blocks):
        buf = src.read(BUFSIZE)
        if len(buf) < BUFSIZE:
            raise IOError("end of file reached")
        dst.write(buf)

    if remainder != 0:
        buf = src.read(remainder)
        if len(buf) < remainder:
            raise IOError("end of file reached")
        dst.write(buf)
    return

filemode_table = (
    ((S_IFLNK,      "l"),
     (S_IFREG,      "-"),
     (S_IFBLK,      "b"),
     (S_IFDIR,      "d"),
     (S_IFCHR,      "c"),
     (S_IFIFO,      "p")),

    ((TUREAD,       "r"),),
    ((TUWRITE,      "w"),),
    ((TUEXEC|TSUID, "s"),
     (TSUID,        "S"),
     (TUEXEC,       "x")),

    ((TGREAD,       "r"),),
    ((TGWRITE,      "w"),),
    ((TGEXEC|TSGID, "s"),
     (TSGID,        "S"),
     (TGEXEC,       "x")),

    ((TOREAD,       "r"),),
    ((TOWRITE,      "w"),),
    ((TOEXEC|TSVTX, "t"),
     (TSVTX,        "T"),
     (TOEXEC,       "x"))
)

def filemode(mode):
    """Convert a file's mode to a string of the form
       -rwxrwxrwx.
       Used by TarFile.list()
    """
    perm = []
    for table in filemode_table:
        for bit, char in table:
            if mode & bit == bit:
                perm.append(char)
                break
        else:
            perm.append("-")
    return "".join(perm)

class TarError(Exception):
    """Base exception."""
    pass
class ExtractError(TarError):
    """General exception for extract errors."""
    pass
class ReadError(TarError):
    """Exception for unreadable tar archives."""
    pass
class CompressionError(TarError):
    """Exception for unavailable compression methods."""
    pass
class StreamError(TarError):
    """Exception for unsupported operations on stream-like TarFiles."""
    pass
class HeaderError(TarError):
    """Base exception for header errors."""
    pass
class EmptyHeaderError(HeaderError):
    """Exception for empty headers."""
    pass
class TruncatedHeaderError(HeaderError):
    """Exception for truncated headers."""
    pass
class EOFHeaderError(HeaderError):
    """Exception for end of file headers."""
    pass
class InvalidHeaderError(HeaderError):
    """Exception for invalid headers."""
    pass
class SubsequentHeaderError(HeaderError):
    """Exception for missing and invalid extended headers."""
    pass

#---------------------------
# internal stream interface
#---------------------------
class _LowLevelFile(object):
    """Low-level file object. Supports reading and writing.
       It is used instead of a regular file object for streaming
       access.
    """

    def __init__(self, name, mode):
        mode = {
            "r": os.O_RDONLY,
            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
        }[mode]
        if hasattr(os, "O_BINARY"):
            mode |= os.O_BINARY
        self.fd = os.open(name, mode, 0o666)

    def close(self):
        os.close(self.fd)

    def read(self, size):
        return os.read(self.fd, size)

    def write(self, s):
        os.write(self.fd, s)

class _Stream(object):
    """Class that serves as an adapter between TarFile and
       a stream-like object.  The stream-like object only
       needs to have a read() or write() method and is accessed
       blockwise.  Use of gzip or bzip2 compression is possible.
       A stream-like object could be for example: sys.stdin,
       sys.stdout, a socket, a tape device etc.

       _Stream is intended to be used only internally.
    """

    def __init__(self, name, mode, comptype, fileobj, bufsize):
        """Construct a _Stream object.
        """
        self._extfileobj = True
        if fileobj is None:
            fileobj = _LowLevelFile(name, mode)
            self._extfileobj = False

        if comptype == '*':
            # Enable transparent compression detection for the
            # stream interface
            fileobj = _StreamProxy(fileobj)
            comptype = fileobj.getcomptype()

        self.name     = name or ""
        self.mode     = mode
        self.comptype = comptype
        self.fileobj  = fileobj
        self.bufsize  = bufsize
        self.buf      = b""
        self.pos      = 0
        self.closed   = False

        try:
            if comptype == "gz":
                try:
                    import zlib
                except ImportError:
                    raise CompressionError("zlib module is not available")
                self.zlib = zlib
                self.crc = zlib.crc32(b"")
                if mode == "r":
                    self._init_read_gz()
                else:
                    self._init_write_gz()

            if comptype == "bz2":
                try:
                    import bz2
                except ImportError:
                    raise CompressionError("bz2 module is not available")
                if mode == "r":
                    self.dbuf = b""
                    self.cmp = bz2.BZ2Decompressor()
                else:
                    self.cmp = bz2.BZ2Compressor()
        except:
            if not self._extfileobj:
                self.fileobj.close()
            self.closed = True
            raise

    def __del__(self):
        if hasattr(self, "closed") and not self.closed:
            self.close()

    def _init_write_gz(self):
        """Initialize for writing with gzip compression.
        """
        self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
                                            -self.zlib.MAX_WBITS,
                                            self.zlib.DEF_MEM_LEVEL,
                                            0)
        timestamp = struct.pack(" self.bufsize:
            self.fileobj.write(self.buf[:self.bufsize])
            self.buf = self.buf[self.bufsize:]

    def close(self):
        """Close the _Stream object. No operation should be
           done on it afterwards.
        """
        if self.closed:
            return

        if self.mode == "w" and self.comptype != "tar":
            self.buf += self.cmp.flush()

        if self.mode == "w" and self.buf:
            self.fileobj.write(self.buf)
            self.buf = b""
            if self.comptype == "gz":
                # The native zlib crc is an unsigned 32-bit integer, but
                # the Python wrapper implicitly casts that to a signed C
                # long.  So, on a 32-bit box self.crc may "look negative",
                # while the same crc on a 64-bit box may "look positive".
                # To avoid irksome warnings from the `struct` module, force
                # it to look positive on all boxes.
                self.fileobj.write(struct.pack("= 0:
            blocks, remainder = divmod(pos - self.pos, self.bufsize)
            for i in range(blocks):
                self.read(self.bufsize)
            self.read(remainder)
        else:
            raise StreamError("seeking backwards is not allowed")
        return self.pos

    def read(self, size=None):
        """Return the next size number of bytes from the stream.
           If size is not defined, return all bytes of the stream
           up to EOF.
        """
        if size is None:
            t = []
            while True:
                buf = self._read(self.bufsize)
                if not buf:
                    break
                t.append(buf)
            buf = "".join(t)
        else:
            buf = self._read(size)
        self.pos += len(buf)
        return buf

    def _read(self, size):
        """Return size bytes from the stream.
        """
        if self.comptype == "tar":
            return self.__read(size)

        c = len(self.dbuf)
        while c < size:
            buf = self.__read(self.bufsize)
            if not buf:
                break
            try:
                buf = self.cmp.decompress(buf)
            except IOError:
                raise ReadError("invalid compressed data")
            self.dbuf += buf
            c += len(buf)
        buf = self.dbuf[:size]
        self.dbuf = self.dbuf[size:]
        return buf

    def __read(self, size):
        """Return size bytes from stream. If internal buffer is empty,
           read another block from the stream.
        """
        c = len(self.buf)
        while c < size:
            buf = self.fileobj.read(self.bufsize)
            if not buf:
                break
            self.buf += buf
            c += len(buf)
        buf = self.buf[:size]
        self.buf = self.buf[size:]
        return buf
# class _Stream

class _StreamProxy(object):
    """Small proxy class that enables transparent compression
       detection for the Stream interface (mode 'r|*').
    """

    def __init__(self, fileobj):
        self.fileobj = fileobj
        self.buf = self.fileobj.read(BLOCKSIZE)

    def read(self, size):
        self.read = self.fileobj.read
        return self.buf

    def getcomptype(self):
        if self.buf.startswith(b"\037\213\010"):
            return "gz"
        if self.buf.startswith(b"BZh91"):
            return "bz2"
        return "tar"

    def close(self):
        self.fileobj.close()
# class StreamProxy

class _BZ2Proxy(object):
    """Small proxy class that enables external file object
       support for "r:bz2" and "w:bz2" modes. This is actually
       a workaround for a limitation in bz2 module's BZ2File
       class which (unlike gzip.GzipFile) has no support for
       a file object argument.
    """

    blocksize = 16 * 1024

    def __init__(self, fileobj, mode):
        self.fileobj = fileobj
        self.mode = mode
        self.name = getattr(self.fileobj, "name", None)
        self.init()

    def init(self):
        import bz2
        self.pos = 0
        if self.mode == "r":
            self.bz2obj = bz2.BZ2Decompressor()
            self.fileobj.seek(0)
            self.buf = b""
        else:
            self.bz2obj = bz2.BZ2Compressor()

    def read(self, size):
        x = len(self.buf)
        while x < size:
            raw = self.fileobj.read(self.blocksize)
            if not raw:
                break
            data = self.bz2obj.decompress(raw)
            self.buf += data
            x += len(data)

        buf = self.buf[:size]
        self.buf = self.buf[size:]
        self.pos += len(buf)
        return buf

    def seek(self, pos):
        if pos < self.pos:
            self.init()
        self.read(pos - self.pos)

    def tell(self):
        return self.pos

    def write(self, data):
        self.pos += len(data)
        raw = self.bz2obj.compress(data)
        self.fileobj.write(raw)

    def close(self):
        if self.mode == "w":
            raw = self.bz2obj.flush()
            self.fileobj.write(raw)
# class _BZ2Proxy

#------------------------
# Extraction file object
#------------------------
class _FileInFile(object):
    """A thin wrapper around an existing file object that
       provides a part of its data as an individual file
       object.
    """

    def __init__(self, fileobj, offset, size, blockinfo=None):
        self.fileobj = fileobj
        self.offset = offset
        self.size = size
        self.position = 0

        if blockinfo is None:
            blockinfo = [(0, size)]

        # Construct a map with data and zero blocks.
        self.map_index = 0
        self.map = []
        lastpos = 0
        realpos = self.offset
        for offset, size in blockinfo:
            if offset > lastpos:
                self.map.append((False, lastpos, offset, None))
            self.map.append((True, offset, offset + size, realpos))
            realpos += size
            lastpos = offset + size
        if lastpos < self.size:
            self.map.append((False, lastpos, self.size, None))

    def seekable(self):
        if not hasattr(self.fileobj, "seekable"):
            # XXX gzip.GzipFile and bz2.BZ2File
            return True
        return self.fileobj.seekable()

    def tell(self):
        """Return the current file position.
        """
        return self.position

    def seek(self, position):
        """Seek to a position in the file.
        """
        self.position = position

    def read(self, size=None):
        """Read data from the file.
        """
        if size is None:
            size = self.size - self.position
        else:
            size = min(size, self.size - self.position)

        buf = b""
        while size > 0:
            while True:
                data, start, stop, offset = self.map[self.map_index]
                if start <= self.position < stop:
                    break
                else:
                    self.map_index += 1
                    if self.map_index == len(self.map):
                        self.map_index = 0
            length = min(size, stop - self.position)
            if data:
                self.fileobj.seek(offset + (self.position - start))
                buf += self.fileobj.read(length)
            else:
                buf += NUL * length
            size -= length
            self.position += length
        return buf
#class _FileInFile


class ExFileObject(object):
    """File-like object for reading an archive member.
       Is returned by TarFile.extractfile().
    """
    blocksize = 1024

    def __init__(self, tarfile, tarinfo):
        self.fileobj = _FileInFile(tarfile.fileobj,
                                   tarinfo.offset_data,
                                   tarinfo.size,
                                   tarinfo.sparse)
        self.name = tarinfo.name
        self.mode = "r"
        self.closed = False
        self.size = tarinfo.size

        self.position = 0
        self.buffer = b""

    def readable(self):
        return True

    def writable(self):
        return False

    def seekable(self):
        return self.fileobj.seekable()

    def read(self, size=None):
        """Read at most size bytes from the file. If size is not
           present or None, read all data until EOF is reached.
        """
        if self.closed:
            raise ValueError("I/O operation on closed file")

        buf = b""
        if self.buffer:
            if size is None:
                buf = self.buffer
                self.buffer = b""
            else:
                buf = self.buffer[:size]
                self.buffer = self.buffer[size:]

        if size is None:
            buf += self.fileobj.read()
        else:
            buf += self.fileobj.read(size - len(buf))

        self.position += len(buf)
        return buf

    # XXX TextIOWrapper uses the read1() method.
    read1 = read

    def readline(self, size=-1):
        """Read one entire line from the file. If size is present
           and non-negative, return a string with at most that
           size, which may be an incomplete line.
        """
        if self.closed:
            raise ValueError("I/O operation on closed file")

        pos = self.buffer.find(b"\n") + 1
        if pos == 0:
            # no newline found.
            while True:
                buf = self.fileobj.read(self.blocksize)
                self.buffer += buf
                if not buf or b"\n" in buf:
                    pos = self.buffer.find(b"\n") + 1
                    if pos == 0:
                        # no newline found.
                        pos = len(self.buffer)
                    break

        if size != -1:
            pos = min(size, pos)

        buf = self.buffer[:pos]
        self.buffer = self.buffer[pos:]
        self.position += len(buf)
        return buf

    def readlines(self):
        """Return a list with all remaining lines.
        """
        result = []
        while True:
            line = self.readline()
            if not line: break
            result.append(line)
        return result

    def tell(self):
        """Return the current file position.
        """
        if self.closed:
            raise ValueError("I/O operation on closed file")

        return self.position

    def seek(self, pos, whence=os.SEEK_SET):
        """Seek to a position in the file.
        """
        if self.closed:
            raise ValueError("I/O operation on closed file")

        if whence == os.SEEK_SET:
            self.position = min(max(pos, 0), self.size)
        elif whence == os.SEEK_CUR:
            if pos < 0:
                self.position = max(self.position + pos, 0)
            else:
                self.position = min(self.position + pos, self.size)
        elif whence == os.SEEK_END:
            self.position = max(min(self.size + pos, self.size), 0)
        else:
            raise ValueError("Invalid argument")

        self.buffer = b""
        self.fileobj.seek(self.position)

    def close(self):
        """Close the file object.
        """
        self.closed = True

    def __iter__(self):
        """Get an iterator over the file's lines.
        """
        while True:
            line = self.readline()
            if not line:
                break
            yield line
#class ExFileObject

#------------------
# Exported Classes
#------------------
class TarInfo(object):
    """Informational class which holds the details about an
       archive member given by a tar header block.
       TarInfo objects are returned by TarFile.getmember(),
       TarFile.getmembers() and TarFile.gettarinfo() and are
       usually created internally.
    """

    __slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
                 "chksum", "type", "linkname", "uname", "gname",
                 "devmajor", "devminor",
                 "offset", "offset_data", "pax_headers", "sparse",
                 "tarfile", "_sparse_structs", "_link_target")

    def __init__(self, name=""):
        """Construct a TarInfo object. name is the optional name
           of the member.
        """
        self.name = name        # member name
        self.mode = 0o644       # file permissions
        self.uid = 0            # user id
        self.gid = 0            # group id
        self.size = 0           # file size
        self.mtime = 0          # modification time
        self.chksum = 0         # header checksum
        self.type = REGTYPE     # member type
        self.linkname = ""      # link name
        self.uname = ""         # user name
        self.gname = ""         # group name
        self.devmajor = 0       # device major number
        self.devminor = 0       # device minor number

        self.offset = 0         # the tar header starts here
        self.offset_data = 0    # the file's data starts here

        self.sparse = None      # sparse member information
        self.pax_headers = {}   # pax header information

    # In pax headers the "name" and "linkname" field are called
    # "path" and "linkpath".
    def _getpath(self):
        return self.name
    def _setpath(self, name):
        self.name = name
    path = property(_getpath, _setpath)

    def _getlinkpath(self):
        return self.linkname
    def _setlinkpath(self, linkname):
        self.linkname = linkname
    linkpath = property(_getlinkpath, _setlinkpath)

    def __repr__(self):
        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))

    def get_info(self):
        """Return the TarInfo's attributes as a dictionary.
        """
        info = {
            "name":     self.name,
            "mode":     self.mode & 0o7777,
            "uid":      self.uid,
            "gid":      self.gid,
            "size":     self.size,
            "mtime":    self.mtime,
            "chksum":   self.chksum,
            "type":     self.type,
            "linkname": self.linkname,
            "uname":    self.uname,
            "gname":    self.gname,
            "devmajor": self.devmajor,
            "devminor": self.devminor
        }

        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
            info["name"] += "/"

        return info

    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
        """Return a tar header as a string of 512 byte blocks.
        """
        info = self.get_info()

        if format == USTAR_FORMAT:
            return self.create_ustar_header(info, encoding, errors)
        elif format == GNU_FORMAT:
            return self.create_gnu_header(info, encoding, errors)
        elif format == PAX_FORMAT:
            return self.create_pax_header(info, encoding)
        else:
            raise ValueError("invalid format")

    def create_ustar_header(self, info, encoding, errors):
        """Return the object as a ustar header block.
        """
        info["magic"] = POSIX_MAGIC

        if len(info["linkname"]) > LENGTH_LINK:
            raise ValueError("linkname is too long")

        if len(info["name"]) > LENGTH_NAME:
            info["prefix"], info["name"] = self._posix_split_name(info["name"])

        return self._create_header(info, USTAR_FORMAT, encoding, errors)

    def create_gnu_header(self, info, encoding, errors):
        """Return the object as a GNU header block sequence.
        """
        info["magic"] = GNU_MAGIC

        buf = b""
        if len(info["linkname"]) > LENGTH_LINK:
            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)

        if len(info["name"]) > LENGTH_NAME:
            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)

        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)

    def create_pax_header(self, info, encoding):
        """Return the object as a ustar header block. If it cannot be
           represented this way, prepend a pax extended header sequence
           with supplement information.
        """
        info["magic"] = POSIX_MAGIC
        pax_headers = self.pax_headers.copy()

        # Test string fields for values that exceed the field length or cannot
        # be represented in ASCII encoding.
        for name, hname, length in (
                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
                ("uname", "uname", 32), ("gname", "gname", 32)):

            if hname in pax_headers:
                # The pax header has priority.
                continue

            # Try to encode the string as ASCII.
            try:
                info[name].encode("ascii", "strict")
            except UnicodeEncodeError:
                pax_headers[hname] = info[name]
                continue

            if len(info[name]) > length:
                pax_headers[hname] = info[name]

        # Test number fields for values that exceed the field limit or values
        # that like to be stored as float.
        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
            if name in pax_headers:
                # The pax header has priority. Avoid overflow.
                info[name] = 0
                continue

            val = info[name]
            if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
                pax_headers[name] = str(val)
                info[name] = 0

        # Create a pax extended header if necessary.
        if pax_headers:
            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
        else:
            buf = b""

        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")

    @classmethod
    def create_pax_global_header(cls, pax_headers):
        """Return the object as a pax global header block sequence.
        """
        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")

    def _posix_split_name(self, name):
        """Split a name longer than 100 chars into a prefix
           and a name part.
        """
        prefix = name[:LENGTH_PREFIX + 1]
        while prefix and prefix[-1] != "/":
            prefix = prefix[:-1]

        name = name[len(prefix):]
        prefix = prefix[:-1]

        if not prefix or len(name) > LENGTH_NAME:
            raise ValueError("name is too long")
        return prefix, name

    @staticmethod
    def _create_header(info, format, encoding, errors):
        """Return a header block. info is a dictionary with file
           information, format must be one of the *_FORMAT constants.
        """
        parts = [
            stn(info.get("name", ""), 100, encoding, errors),
            itn(info.get("mode", 0) & 0o7777, 8, format),
            itn(info.get("uid", 0), 8, format),
            itn(info.get("gid", 0), 8, format),
            itn(info.get("size", 0), 12, format),
            itn(info.get("mtime", 0), 12, format),
            b"        ", # checksum field
            info.get("type", REGTYPE),
            stn(info.get("linkname", ""), 100, encoding, errors),
            info.get("magic", POSIX_MAGIC),
            stn(info.get("uname", ""), 32, encoding, errors),
            stn(info.get("gname", ""), 32, encoding, errors),
            itn(info.get("devmajor", 0), 8, format),
            itn(info.get("devminor", 0), 8, format),
            stn(info.get("prefix", ""), 155, encoding, errors)
        ]

        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
        buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:]
        return buf

    @staticmethod
    def _create_payload(payload):
        """Return the string payload filled with zero bytes
           up to the next 512 byte border.
        """
        blocks, remainder = divmod(len(payload), BLOCKSIZE)
        if remainder > 0:
            payload += (BLOCKSIZE - remainder) * NUL
        return payload

    @classmethod
    def _create_gnu_long_header(cls, name, type, encoding, errors):
        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
           for name.
        """
        name = name.encode(encoding, errors) + NUL

        info = {}
        info["name"] = "././@LongLink"
        info["type"] = type
        info["size"] = len(name)
        info["magic"] = GNU_MAGIC

        # create extended header + name blocks.
        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
                cls._create_payload(name)

    @classmethod
    def _create_pax_generic_header(cls, pax_headers, type, encoding):
        """Return a POSIX.1-2008 extended or global header sequence
           that contains a list of keyword, value pairs. The values
           must be strings.
        """
        # Check if one of the fields contains surrogate characters and thereby
        # forces hdrcharset=BINARY, see _proc_pax() for more information.
        binary = False
        for keyword, value in pax_headers.items():
            try:
                value.encode("utf8", "strict")
            except UnicodeEncodeError:
                binary = True
                break

        records = b""
        if binary:
            # Put the hdrcharset field at the beginning of the header.
            records += b"21 hdrcharset=BINARY\n"

        for keyword, value in pax_headers.items():
            keyword = keyword.encode("utf8")
            if binary:
                # Try to restore the original byte representation of `value'.
                # Needless to say, that the encoding must match the string.
                value = value.encode(encoding, "surrogateescape")
            else:
                value = value.encode("utf8")

            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
            n = p = 0
            while True:
                n = l + len(str(p))
                if n == p:
                    break
                p = n
            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"

        # We use a hardcoded "././@PaxHeader" name like star does
        # instead of the one that POSIX recommends.
        info = {}
        info["name"] = "././@PaxHeader"
        info["type"] = type
        info["size"] = len(records)
        info["magic"] = POSIX_MAGIC

        # Create pax header + record blocks.
        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
                cls._create_payload(records)

    @classmethod
    def frombuf(cls, buf, encoding, errors):
        """Construct a TarInfo object from a 512 byte bytes object.
        """
        if len(buf) == 0:
            raise EmptyHeaderError("empty header")
        if len(buf) != BLOCKSIZE:
            raise TruncatedHeaderError("truncated header")
        if buf.count(NUL) == BLOCKSIZE:
            raise EOFHeaderError("end of file header")

        chksum = nti(buf[148:156])
        if chksum not in calc_chksums(buf):
            raise InvalidHeaderError("bad checksum")

        obj = cls()
        obj.name = nts(buf[0:100], encoding, errors)
        obj.mode = nti(buf[100:108])
        obj.uid = nti(buf[108:116])
        obj.gid = nti(buf[116:124])
        obj.size = nti(buf[124:136])
        obj.mtime = nti(buf[136:148])
        obj.chksum = chksum
        obj.type = buf[156:157]
        obj.linkname = nts(buf[157:257], encoding, errors)
        obj.uname = nts(buf[265:297], encoding, errors)
        obj.gname = nts(buf[297:329], encoding, errors)
        obj.devmajor = nti(buf[329:337])
        obj.devminor = nti(buf[337:345])
        prefix = nts(buf[345:500], encoding, errors)

        # Old V7 tar format represents a directory as a regular
        # file with a trailing slash.
        if obj.type == AREGTYPE and obj.name.endswith("/"):
            obj.type = DIRTYPE

        # The old GNU sparse format occupies some of the unused
        # space in the buffer for up to 4 sparse structures.
        # Save the them for later processing in _proc_sparse().
        if obj.type == GNUTYPE_SPARSE:
            pos = 386
            structs = []
            for i in range(4):
                try:
                    offset = nti(buf[pos:pos + 12])
                    numbytes = nti(buf[pos + 12:pos + 24])
                except ValueError:
                    break
                structs.append((offset, numbytes))
                pos += 24
            isextended = bool(buf[482])
            origsize = nti(buf[483:495])
            obj._sparse_structs = (structs, isextended, origsize)

        # Remove redundant slashes from directories.
        if obj.isdir():
            obj.name = obj.name.rstrip("/")

        # Reconstruct a ustar longname.
        if prefix and obj.type not in GNU_TYPES:
            obj.name = prefix + "/" + obj.name
        return obj

    @classmethod
    def fromtarfile(cls, tarfile):
        """Return the next TarInfo object from TarFile object
           tarfile.
        """
        buf = tarfile.fileobj.read(BLOCKSIZE)
        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
        return obj._proc_member(tarfile)

    #--------------------------------------------------------------------------
    # The following are methods that are called depending on the type of a
    # member. The entry point is _proc_member() which can be overridden in a
    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
    # implement the following
    # operations:
    # 1. Set self.offset_data to the position where the data blocks begin,
    #    if there is data that follows.
    # 2. Set tarfile.offset to the position where the next member's header will
    #    begin.
    # 3. Return self or another valid TarInfo object.
    def _proc_member(self, tarfile):
        """Choose the right processing method depending on
           the type and call it.
        """
        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
            return self._proc_gnulong(tarfile)
        elif self.type == GNUTYPE_SPARSE:
            return self._proc_sparse(tarfile)
        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
            return self._proc_pax(tarfile)
        else:
            return self._proc_builtin(tarfile)

    def _proc_builtin(self, tarfile):
        """Process a builtin type or an unknown type which
           will be treated as a regular file.
        """
        self.offset_data = tarfile.fileobj.tell()
        offset = self.offset_data
        if self.isreg() or self.type not in SUPPORTED_TYPES:
            # Skip the following data blocks.
            offset += self._block(self.size)
        tarfile.offset = offset

        # Patch the TarInfo object with saved global
        # header information.
        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)

        return self

    def _proc_gnulong(self, tarfile):
        """Process the blocks that hold a GNU longname
           or longlink member.
        """
        buf = tarfile.fileobj.read(self._block(self.size))

        # Fetch the next header and process it.
        try:
            next = self.fromtarfile(tarfile)
        except HeaderError:
            raise SubsequentHeaderError("missing or bad subsequent header")

        # Patch the TarInfo object from the next header with
        # the longname information.
        next.offset = self.offset
        if self.type == GNUTYPE_LONGNAME:
            next.name = nts(buf, tarfile.encoding, tarfile.errors)
        elif self.type == GNUTYPE_LONGLINK:
            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)

        return next

    def _proc_sparse(self, tarfile):
        """Process a GNU sparse header plus extra headers.
        """
        # We already collected some sparse structures in frombuf().
        structs, isextended, origsize = self._sparse_structs
        del self._sparse_structs

        # Collect sparse structures from extended header blocks.
        while isextended:
            buf = tarfile.fileobj.read(BLOCKSIZE)
            pos = 0
            for i in range(21):
                try:
                    offset = nti(buf[pos:pos + 12])
                    numbytes = nti(buf[pos + 12:pos + 24])
                except ValueError:
                    break
                if offset and numbytes:
                    structs.append((offset, numbytes))
                pos += 24
            isextended = bool(buf[504])
        self.sparse = structs

        self.offset_data = tarfile.fileobj.tell()
        tarfile.offset = self.offset_data + self._block(self.size)
        self.size = origsize
        return self

    def _proc_pax(self, tarfile):
        """Process an extended or global header as described in
           POSIX.1-2008.
        """
        # Read the header information.
        buf = tarfile.fileobj.read(self._block(self.size))

        # A pax header stores supplemental information for either
        # the following file (extended) or all following files
        # (global).
        if self.type == XGLTYPE:
            pax_headers = tarfile.pax_headers
        else:
            pax_headers = tarfile.pax_headers.copy()

        # Check if the pax header contains a hdrcharset field. This tells us
        # the encoding of the path, linkpath, uname and gname fields. Normally,
        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
        # implementations are allowed to store them as raw binary strings if
        # the translation to UTF-8 fails.
        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
        if match is not None:
            pax_headers["hdrcharset"] = match.group(1).decode("utf8")

        # For the time being, we don't care about anything other than "BINARY".
        # The only other value that is currently allowed by the standard is
        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
        hdrcharset = pax_headers.get("hdrcharset")
        if hdrcharset == "BINARY":
            encoding = tarfile.encoding
        else:
            encoding = "utf8"

        # Parse pax header information. A record looks like that:
        # "%d %s=%s\n" % (length, keyword, value). length is the size
        # of the complete record including the length field itself and
        # the newline. keyword and value are both UTF-8 encoded strings.
        regex = re.compile(br"(\d+) ([^=]+)=")
        pos = 0
        while True:
            match = regex.match(buf, pos)
            if not match:
                break

            length, keyword = match.groups()
            length = int(length)
            value = buf[match.end(2) + 1:match.start(1) + length - 1]

            # Normally, we could just use "utf8" as the encoding and "strict"
            # as the error handler, but we better not take the risk. For
            # example, GNU tar <= 1.23 is known to store filenames it cannot
            # translate to UTF-8 as raw strings (unfortunately without a
            # hdrcharset=BINARY header).
            # We first try the strict standard encoding, and if that fails we
            # fall back on the user's encoding and error handler.
            keyword = self._decode_pax_field(keyword, "utf8", "utf8",
                    tarfile.errors)
            if keyword in PAX_NAME_FIELDS:
                value = self._decode_pax_field(value, encoding, tarfile.encoding,
                        tarfile.errors)
            else:
                value = self._decode_pax_field(value, "utf8", "utf8",
                        tarfile.errors)

            pax_headers[keyword] = value
            pos += length

        # Fetch the next header.
        try:
            next = self.fromtarfile(tarfile)
        except HeaderError:
            raise SubsequentHeaderError("missing or bad subsequent header")

        # Process GNU sparse information.
        if "GNU.sparse.map" in pax_headers:
            # GNU extended sparse format version 0.1.
            self._proc_gnusparse_01(next, pax_headers)

        elif "GNU.sparse.size" in pax_headers:
            # GNU extended sparse format version 0.0.
            self._proc_gnusparse_00(next, pax_headers, buf)

        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
            # GNU extended sparse format version 1.0.
            self._proc_gnusparse_10(next, pax_headers, tarfile)

        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
            # Patch the TarInfo object with the extended header info.
            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
            next.offset = self.offset

            if "size" in pax_headers:
                # If the extended header replaces the size field,
                # we need to recalculate the offset where the next
                # header starts.
                offset = next.offset_data
                if next.isreg() or next.type not in SUPPORTED_TYPES:
                    offset += next._block(next.size)
                tarfile.offset = offset

        return next

    def _proc_gnusparse_00(self, next, pax_headers, buf):
        """Process a GNU tar extended sparse header, version 0.0.
        """
        offsets = []
        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
            offsets.append(int(match.group(1)))
        numbytes = []
        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
            numbytes.append(int(match.group(1)))
        next.sparse = list(zip(offsets, numbytes))

    def _proc_gnusparse_01(self, next, pax_headers):
        """Process a GNU tar extended sparse header, version 0.1.
        """
        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
        next.sparse = list(zip(sparse[::2], sparse[1::2]))

    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
        """Process a GNU tar extended sparse header, version 1.0.
        """
        fields = None
        sparse = []
        buf = tarfile.fileobj.read(BLOCKSIZE)
        fields, buf = buf.split(b"\n", 1)
        fields = int(fields)
        while len(sparse) < fields * 2:
            if b"\n" not in buf:
                buf += tarfile.fileobj.read(BLOCKSIZE)
            number, buf = buf.split(b"\n", 1)
            sparse.append(int(number))
        next.offset_data = tarfile.fileobj.tell()
        next.sparse = list(zip(sparse[::2], sparse[1::2]))

    def _apply_pax_info(self, pax_headers, encoding, errors):
        """Replace fields with supplemental information from a previous
           pax extended or global header.
        """
        for keyword, value in pax_headers.items():
            if keyword == "GNU.sparse.name":
                setattr(self, "path", value)
            elif keyword == "GNU.sparse.size":
                setattr(self, "size", int(value))
            elif keyword == "GNU.sparse.realsize":
                setattr(self, "size", int(value))
            elif keyword in PAX_FIELDS:
                if keyword in PAX_NUMBER_FIELDS:
                    try:
                        value = PAX_NUMBER_FIELDS[keyword](value)
                    except ValueError:
                        value = 0
                if keyword == "path":
                    value = value.rstrip("/")
                setattr(self, keyword, value)

        self.pax_headers = pax_headers.copy()

    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
        """Decode a single field from a pax record.
        """
        try:
            return value.decode(encoding, "strict")
        except UnicodeDecodeError:
            return value.decode(fallback_encoding, fallback_errors)

    def _block(self, count):
        """Round up a byte count by BLOCKSIZE and return it,
           e.g. _block(834) => 1024.
        """
        blocks, remainder = divmod(count, BLOCKSIZE)
        if remainder:
            blocks += 1
        return blocks * BLOCKSIZE

    def isreg(self):
        return self.type in REGULAR_TYPES
    def isfile(self):
        return self.isreg()
    def isdir(self):
        return self.type == DIRTYPE
    def issym(self):
        return self.type == SYMTYPE
    def islnk(self):
        return self.type == LNKTYPE
    def ischr(self):
        return self.type == CHRTYPE
    def isblk(self):
        return self.type == BLKTYPE
    def isfifo(self):
        return self.type == FIFOTYPE
    def issparse(self):
        return self.sparse is not None
    def isdev(self):
        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo

class TarFile(object):
    """The TarFile Class provides an interface to tar archives.
    """

    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)

    dereference = False         # If true, add content of linked file to the
                                # tar file, else the link.

    ignore_zeros = False        # If true, skips empty or invalid blocks and
                                # continues processing.

    errorlevel = 1              # If 0, fatal errors only appear in debug
                                # messages (if debug >= 0). If > 0, errors
                                # are passed to the caller as exceptions.

    format = DEFAULT_FORMAT     # The format to use when creating an archive.

    encoding = ENCODING         # Encoding for 8-bit character strings.

    errors = None               # Error handler for unicode conversion.

    tarinfo = TarInfo           # The default TarInfo class to use.

    fileobject = ExFileObject   # The default ExFileObject class to use.

    def __init__(self, name=None, mode="r", fileobj=None, format=None,
            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
            errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
           read from an existing archive, 'a' to append data to an existing
           file or 'w' to create a new file overwriting an existing one. `mode'
           defaults to 'r'.
           If `fileobj' is given, it is used for reading or writing data. If it
           can be determined, `mode' is overridden by `fileobj's mode.
           `fileobj' is not closed, when TarFile is closed.
        """
        if len(mode) > 1 or mode not in "raw":
            raise ValueError("mode must be 'r', 'a' or 'w'")
        self.mode = mode
        self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]

        if not fileobj:
            if self.mode == "a" and not os.path.exists(name):
                # Create nonexistent files in append mode.
                self.mode = "w"
                self._mode = "wb"
            fileobj = bltn_open(name, self._mode)
            self._extfileobj = False
        else:
            if name is None and hasattr(fileobj, "name"):
                name = fileobj.name
            if hasattr(fileobj, "mode"):
                self._mode = fileobj.mode
            self._extfileobj = True
        self.name = os.path.abspath(name) if name else None
        self.fileobj = fileobj

        # Init attributes.
        if format is not None:
            self.format = format
        if tarinfo is not None:
            self.tarinfo = tarinfo
        if dereference is not None:
            self.dereference = dereference
        if ignore_zeros is not None:
            self.ignore_zeros = ignore_zeros
        if encoding is not None:
            self.encoding = encoding
        self.errors = errors

        if pax_headers is not None and self.format == PAX_FORMAT:
            self.pax_headers = pax_headers
        else:
            self.pax_headers = {}

        if debug is not None:
            self.debug = debug
        if errorlevel is not None:
            self.errorlevel = errorlevel

        # Init datastructures.
        self.closed = False
        self.members = []       # list of members as TarInfo objects
        self._loaded = False    # flag if all members have been read
        self.offset = self.fileobj.tell()
                                # current position in the archive file
        self.inodes = {}        # dictionary caching the inodes of
                                # archive members already added

        try:
            if self.mode == "r":
                self.firstmember = None
                self.firstmember = self.next()

            if self.mode == "a":
                # Move to the end of the archive,
                # before the first empty block.
                while True:
                    self.fileobj.seek(self.offset)
                    try:
                        tarinfo = self.tarinfo.fromtarfile(self)
                        self.members.append(tarinfo)
                    except EOFHeaderError:
                        self.fileobj.seek(self.offset)
                        break
                    except HeaderError as e:
                        raise ReadError(str(e))

            if self.mode in "aw":
                self._loaded = True

                if self.pax_headers:
                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
                    self.fileobj.write(buf)
                    self.offset += len(buf)
        except:
            if not self._extfileobj:
                self.fileobj.close()
            self.closed = True
            raise

    #--------------------------------------------------------------------------
    # Below are the classmethods which act as alternate constructors to the
    # TarFile class. The open() method is the only one that is needed for
    # public use; it is the "super"-constructor and is able to select an
    # adequate "sub"-constructor for a particular compression using the mapping
    # from OPEN_METH.
    #
    # This concept allows one to subclass TarFile without losing the comfort of
    # the super-constructor. A sub-constructor is registered and made available
    # by adding it to the mapping in OPEN_METH.

    @classmethod
    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
        """Open a tar archive for reading, writing or appending. Return
           an appropriate TarFile class.

           mode:
           'r' or 'r:*' open for reading with transparent compression
           'r:'         open for reading exclusively uncompressed
           'r:gz'       open for reading with gzip compression
           'r:bz2'      open for reading with bzip2 compression
           'a' or 'a:'  open for appending, creating the file if necessary
           'w' or 'w:'  open for writing without compression
           'w:gz'       open for writing with gzip compression
           'w:bz2'      open for writing with bzip2 compression

           'r|*'        open a stream of tar blocks with transparent compression
           'r|'         open an uncompressed stream of tar blocks for reading
           'r|gz'       open a gzip compressed stream of tar blocks
           'r|bz2'      open a bzip2 compressed stream of tar blocks
           'w|'         open an uncompressed stream for writing
           'w|gz'       open a gzip compressed stream for writing
           'w|bz2'      open a bzip2 compressed stream for writing
        """

        if not name and not fileobj:
            raise ValueError("nothing to open")

        if mode in ("r", "r:*"):
            # Find out which *open() is appropriate for opening the file.
            for comptype in cls.OPEN_METH:
                func = getattr(cls, cls.OPEN_METH[comptype])
                if fileobj is not None:
                    saved_pos = fileobj.tell()
                try:
                    return func(name, "r", fileobj, **kwargs)
                except (ReadError, CompressionError) as e:
                    if fileobj is not None:
                        fileobj.seek(saved_pos)
                    continue
            raise ReadError("file could not be opened successfully")

        elif ":" in mode:
            filemode, comptype = mode.split(":", 1)
            filemode = filemode or "r"
            comptype = comptype or "tar"

            # Select the *open() function according to
            # given compression.
            if comptype in cls.OPEN_METH:
                func = getattr(cls, cls.OPEN_METH[comptype])
            else:
                raise CompressionError("unknown compression type %r" % comptype)
            return func(name, filemode, fileobj, **kwargs)

        elif "|" in mode:
            filemode, comptype = mode.split("|", 1)
            filemode = filemode or "r"
            comptype = comptype or "tar"

            if filemode not in "rw":
                raise ValueError("mode must be 'r' or 'w'")

            stream = _Stream(name, filemode, comptype, fileobj, bufsize)
            try:
                t = cls(name, filemode, stream, **kwargs)
            except:
                stream.close()
                raise
            t._extfileobj = False
            return t

        elif mode in "aw":
            return cls.taropen(name, mode, fileobj, **kwargs)

        raise ValueError("undiscernible mode")

    @classmethod
    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
        """Open uncompressed tar archive name for reading or writing.
        """
        if len(mode) > 1 or mode not in "raw":
            raise ValueError("mode must be 'r', 'a' or 'w'")
        return cls(name, mode, fileobj, **kwargs)

    @classmethod
    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
        """Open gzip compressed tar archive name for reading or writing.
           Appending is not allowed.
        """
        if len(mode) > 1 or mode not in "rw":
            raise ValueError("mode must be 'r' or 'w'")

        try:
            import gzip
            gzip.GzipFile
        except (ImportError, AttributeError):
            raise CompressionError("gzip module is not available")

        extfileobj = fileobj is not None
        try:
            fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
            t = cls.taropen(name, mode, fileobj, **kwargs)
        except IOError:
            if not extfileobj and fileobj is not None:
                fileobj.close()
            if fileobj is None:
                raise
            raise ReadError("not a gzip file")
        except:
            if not extfileobj and fileobj is not None:
                fileobj.close()
            raise
        t._extfileobj = extfileobj
        return t

    @classmethod
    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
        """Open bzip2 compressed tar archive name for reading or writing.
           Appending is not allowed.
        """
        if len(mode) > 1 or mode not in "rw":
            raise ValueError("mode must be 'r' or 'w'.")

        try:
            import bz2
        except ImportError:
            raise CompressionError("bz2 module is not available")

        if fileobj is not None:
            fileobj = _BZ2Proxy(fileobj, mode)
        else:
            fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)

        try:
            t = cls.taropen(name, mode, fileobj, **kwargs)
        except (IOError, EOFError):
            fileobj.close()
            raise ReadError("not a bzip2 file")
        t._extfileobj = False
        return t

    # All *open() methods are registered here.
    OPEN_METH = {
        "tar": "taropen",   # uncompressed tar
        "gz":  "gzopen",    # gzip compressed tar
        "bz2": "bz2open"    # bzip2 compressed tar
    }

    #--------------------------------------------------------------------------
    # The public methods which TarFile provides:

    def close(self):
        """Close the TarFile. In write-mode, two finishing zero blocks are
           appended to the archive.
        """
        if self.closed:
            return

        if self.mode in "aw":
            self.fileobj.write(NUL * (BLOCKSIZE * 2))
            self.offset += (BLOCKSIZE * 2)
            # fill up the end with zero-blocks
            # (like option -b20 for tar does)
            blocks, remainder = divmod(self.offset, RECORDSIZE)
            if remainder > 0:
                self.fileobj.write(NUL * (RECORDSIZE - remainder))

        if not self._extfileobj:
            self.fileobj.close()
        self.closed = True

    def getmember(self, name):
        """Return a TarInfo object for member `name'. If `name' can not be
           found in the archive, KeyError is raised. If a member occurs more
           than once in the archive, its last occurrence is assumed to be the
           most up-to-date version.
        """
        tarinfo = self._getmember(name)
        if tarinfo is None:
            raise KeyError("filename %r not found" % name)
        return tarinfo

    def getmembers(self):
        """Return the members of the archive as a list of TarInfo objects. The
           list has the same order as the members in the archive.
        """
        self._check()
        if not self._loaded:    # if we want to obtain a list of
            self._load()        # all members, we first have to
                                # scan the whole archive.
        return self.members

    def getnames(self):
        """Return the members of the archive as a list of their names. It has
           the same order as the list returned by getmembers().
        """
        return [tarinfo.name for tarinfo in self.getmembers()]

    def gettarinfo(self, name=None, arcname=None, fileobj=None):
        """Create a TarInfo object for either the file `name' or the file
           object `fileobj' (using os.fstat on its file descriptor). You can
           modify some of the TarInfo's attributes before you add it using
           addfile(). If given, `arcname' specifies an alternative name for the
           file in the archive.
        """
        self._check("aw")

        # When fileobj is given, replace name by
        # fileobj's real name.
        if fileobj is not None:
            name = fileobj.name

        # Building the name of the member in the archive.
        # Backward slashes are converted to forward slashes,
        # Absolute paths are turned to relative paths.
        if arcname is None:
            arcname = name
        drv, arcname = os.path.splitdrive(arcname)
        arcname = arcname.replace(os.sep, "/")
        arcname = arcname.lstrip("/")

        # Now, fill the TarInfo object with
        # information specific for the file.
        tarinfo = self.tarinfo()
        tarinfo.tarfile = self

        # Use os.stat or os.lstat, depending on platform
        # and if symlinks shall be resolved.
        if fileobj is None:
            if hasattr(os, "lstat") and not self.dereference:
                statres = os.lstat(name)
            else:
                statres = os.stat(name)
        else:
            statres = os.fstat(fileobj.fileno())
        linkname = ""

        stmd = statres.st_mode
        if stat.S_ISREG(stmd):
            inode = (statres.st_ino, statres.st_dev)
            if not self.dereference and statres.st_nlink > 1 and \
                    inode in self.inodes and arcname != self.inodes[inode]:
                # Is it a hardlink to an already
                # archived file?
                type = LNKTYPE
                linkname = self.inodes[inode]
            else:
                # The inode is added only if its valid.
                # For win32 it is always 0.
                type = REGTYPE
                if inode[0]:
                    self.inodes[inode] = arcname
        elif stat.S_ISDIR(stmd):
            type = DIRTYPE
        elif stat.S_ISFIFO(stmd):
            type = FIFOTYPE
        elif stat.S_ISLNK(stmd):
            type = SYMTYPE
            linkname = os.readlink(name)
        elif stat.S_ISCHR(stmd):
            type = CHRTYPE
        elif stat.S_ISBLK(stmd):
            type = BLKTYPE
        else:
            return None

        # Fill the TarInfo object with all
        # information we can get.
        tarinfo.name = arcname
        tarinfo.mode = stmd
        tarinfo.uid = statres.st_uid
        tarinfo.gid = statres.st_gid
        if type == REGTYPE:
            tarinfo.size = statres.st_size
        else:
            tarinfo.size = 0
        tarinfo.mtime = statres.st_mtime
        tarinfo.type = type
        tarinfo.linkname = linkname
        if pwd:
            try:
                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
            except KeyError:
                pass
        if grp:
            try:
                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
            except KeyError:
                pass

        if type in (CHRTYPE, BLKTYPE):
            if hasattr(os, "major") and hasattr(os, "minor"):
                tarinfo.devmajor = os.major(statres.st_rdev)
                tarinfo.devminor = os.minor(statres.st_rdev)
        return tarinfo

    def list(self, verbose=True):
        """Print a table of contents to sys.stdout. If `verbose' is False, only
           the names of the members are printed. If it is True, an `ls -l'-like
           output is produced.
        """
        self._check()

        for tarinfo in self:
            if verbose:
                print(filemode(tarinfo.mode), end=' ')
                print("%s/%s" % (tarinfo.uname or tarinfo.uid,
                                 tarinfo.gname or tarinfo.gid), end=' ')
                if tarinfo.ischr() or tarinfo.isblk():
                    print("%10s" % ("%d,%d" \
                                    % (tarinfo.devmajor, tarinfo.devminor)), end=' ')
                else:
                    print("%10d" % tarinfo.size, end=' ')
                print("%d-%02d-%02d %02d:%02d:%02d" \
                      % time.localtime(tarinfo.mtime)[:6], end=' ')

            print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')

            if verbose:
                if tarinfo.issym():
                    print("->", tarinfo.linkname, end=' ')
                if tarinfo.islnk():
                    print("link to", tarinfo.linkname, end=' ')
            print()

    def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
        """Add the file `name' to the archive. `name' may be any type of file
           (directory, fifo, symbolic link, etc.). If given, `arcname'
           specifies an alternative name for the file in the archive.
           Directories are added recursively by default. This can be avoided by
           setting `recursive' to False. `exclude' is a function that should
           return True for each filename to be excluded. `filter' is a function
           that expects a TarInfo object argument and returns the changed
           TarInfo object, if it returns None the TarInfo object will be
           excluded from the archive.
        """
        self._check("aw")

        if arcname is None:
            arcname = name

        # Exclude pathnames.
        if exclude is not None:
            import warnings
            warnings.warn("use the filter argument instead",
                    DeprecationWarning, 2)
            if exclude(name):
                self._dbg(2, "tarfile: Excluded %r" % name)
                return

        # Skip if somebody tries to archive the archive...
        if self.name is not None and os.path.abspath(name) == self.name:
            self._dbg(2, "tarfile: Skipped %r" % name)
            return

        self._dbg(1, name)

        # Create a TarInfo object from the file.
        tarinfo = self.gettarinfo(name, arcname)

        if tarinfo is None:
            self._dbg(1, "tarfile: Unsupported type %r" % name)
            return

        # Change or exclude the TarInfo object.
        if filter is not None:
            tarinfo = filter(tarinfo)
            if tarinfo is None:
                self._dbg(2, "tarfile: Excluded %r" % name)
                return

        # Append the tar header and data to the archive.
        if tarinfo.isreg():
            f = bltn_open(name, "rb")
            self.addfile(tarinfo, f)
            f.close()

        elif tarinfo.isdir():
            self.addfile(tarinfo)
            if recursive:
                for f in os.listdir(name):
                    self.add(os.path.join(name, f), os.path.join(arcname, f),
                            recursive, exclude, filter=filter)

        else:
            self.addfile(tarinfo)

    def addfile(self, tarinfo, fileobj=None):
        """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
           given, tarinfo.size bytes are read from it and added to the archive.
           You can create TarInfo objects using gettarinfo().
           On Windows platforms, `fileobj' should always be opened with mode
           'rb' to avoid irritation about the file size.
        """
        self._check("aw")

        tarinfo = copy.copy(tarinfo)

        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
        self.fileobj.write(buf)
        self.offset += len(buf)

        # If there's data to follow, append it.
        if fileobj is not None:
            copyfileobj(fileobj, self.fileobj, tarinfo.size)
            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
            if remainder > 0:
                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
                blocks += 1
            self.offset += blocks * BLOCKSIZE

        self.members.append(tarinfo)

    def extractall(self, path=".", members=None):
        """Extract all members from the archive to the current working
           directory and set owner, modification time and permissions on
           directories afterwards. `path' specifies a different directory
           to extract to. `members' is optional and must be a subset of the
           list returned by getmembers().
        """
        directories = []

        if members is None:
            members = self

        for tarinfo in members:
            if tarinfo.isdir():
                # Extract directories with a safe mode.
                directories.append(tarinfo)
                tarinfo = copy.copy(tarinfo)
                tarinfo.mode = 0o700
            # Do not set_attrs directories, as we will do that further down
            self.extract(tarinfo, path, set_attrs=not tarinfo.isdir())

        # Reverse sort directories.
        directories.sort(key=lambda a: a.name)
        directories.reverse()

        # Set correct owner, mtime and filemode on directories.
        for tarinfo in directories:
            dirpath = os.path.join(path, tarinfo.name)
            try:
                self.chown(tarinfo, dirpath)
                self.utime(tarinfo, dirpath)
                self.chmod(tarinfo, dirpath)
            except ExtractError as e:
                if self.errorlevel > 1:
                    raise
                else:
                    self._dbg(1, "tarfile: %s" % e)

    def extract(self, member, path="", set_attrs=True):
        """Extract a member from the archive to the current working directory,
           using its full name. Its file information is extracted as accurately
           as possible. `member' may be a filename or a TarInfo object. You can
           specify a different directory using `path'. File attributes (owner,
           mtime, mode) are set unless `set_attrs' is False.
        """
        self._check("r")

        if isinstance(member, str):
            tarinfo = self.getmember(member)
        else:
            tarinfo = member

        # Prepare the link target for makelink().
        if tarinfo.islnk():
            tarinfo._link_target = os.path.join(path, tarinfo.linkname)

        try:
            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
                                 set_attrs=set_attrs)
        except EnvironmentError as e:
            if self.errorlevel > 0:
                raise
            else:
                if e.filename is None:
                    self._dbg(1, "tarfile: %s" % e.strerror)
                else:
                    self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
        except ExtractError as e:
            if self.errorlevel > 1:
                raise
            else:
                self._dbg(1, "tarfile: %s" % e)

    def extractfile(self, member):
        """Extract a member from the archive as a file object. `member' may be
           a filename or a TarInfo object. If `member' is a regular file, a
           file-like object is returned. If `member' is a link, a file-like
           object is constructed from the link's target. If `member' is none of
           the above, None is returned.
           The file-like object is read-only and provides the following
           methods: read(), readline(), readlines(), seek() and tell()
        """
        self._check("r")

        if isinstance(member, str):
            tarinfo = self.getmember(member)
        else:
            tarinfo = member

        if tarinfo.isreg():
            return self.fileobject(self, tarinfo)

        elif tarinfo.type not in SUPPORTED_TYPES:
            # If a member's type is unknown, it is treated as a
            # regular file.
            return self.fileobject(self, tarinfo)

        elif tarinfo.islnk() or tarinfo.issym():
            if isinstance(self.fileobj, _Stream):
                # A small but ugly workaround for the case that someone tries
                # to extract a (sym)link as a file-object from a non-seekable
                # stream of tar blocks.
                raise StreamError("cannot extract (sym)link as file object")
            else:
                # A (sym)link's file object is its target's file object.
                return self.extractfile(self._find_link_target(tarinfo))
        else:
            # If there's no data associated with the member (directory, chrdev,
            # blkdev, etc.), return None instead of a file object.
            return None

    def _extract_member(self, tarinfo, targetpath, set_attrs=True):
        """Extract the TarInfo object tarinfo to a physical
           file called targetpath.
        """
        # Fetch the TarInfo object for the given name
        # and build the destination pathname, replacing
        # forward slashes to platform specific separators.
        targetpath = targetpath.rstrip("/")
        targetpath = targetpath.replace("/", os.sep)

        # Create all upper directories.
        upperdirs = os.path.dirname(targetpath)
        if upperdirs and not os.path.exists(upperdirs):
            # Create directories that are not part of the archive with
            # default permissions.
            os.makedirs(upperdirs)

        if tarinfo.islnk() or tarinfo.issym():
            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
        else:
            self._dbg(1, tarinfo.name)

        if tarinfo.isreg():
            self.makefile(tarinfo, targetpath)
        elif tarinfo.isdir():
            self.makedir(tarinfo, targetpath)
        elif tarinfo.isfifo():
            self.makefifo(tarinfo, targetpath)
        elif tarinfo.ischr() or tarinfo.isblk():
            self.makedev(tarinfo, targetpath)
        elif tarinfo.islnk() or tarinfo.issym():
            self.makelink(tarinfo, targetpath)
        elif tarinfo.type not in SUPPORTED_TYPES:
            self.makeunknown(tarinfo, targetpath)
        else:
            self.makefile(tarinfo, targetpath)

        if set_attrs:
            self.chown(tarinfo, targetpath)
            if not tarinfo.issym():
                self.chmod(tarinfo, targetpath)
                self.utime(tarinfo, targetpath)

    #--------------------------------------------------------------------------
    # Below are the different file methods. They are called via
    # _extract_member() when extract() is called. They can be replaced in a
    # subclass to implement other functionality.

    def makedir(self, tarinfo, targetpath):
        """Make a directory called targetpath.
        """
        try:
            # Use a safe mode for the directory, the real mode is set
            # later in _extract_member().
            os.mkdir(targetpath, 0o700)
        except EnvironmentError as e:
            if e.errno != errno.EEXIST:
                raise

    def makefile(self, tarinfo, targetpath):
        """Make a file called targetpath.
        """
        source = self.fileobj
        source.seek(tarinfo.offset_data)
        target = bltn_open(targetpath, "wb")
        if tarinfo.sparse is not None:
            for offset, size in tarinfo.sparse:
                target.seek(offset)
                copyfileobj(source, target, size)
        else:
            copyfileobj(source, target, tarinfo.size)
        target.seek(tarinfo.size)
        target.truncate()
        target.close()

    def makeunknown(self, tarinfo, targetpath):
        """Make a file from a TarInfo object with an unknown type
           at targetpath.
        """
        self.makefile(tarinfo, targetpath)
        self._dbg(1, "tarfile: Unknown file type %r, " \
                     "extracted as regular file." % tarinfo.type)

    def makefifo(self, tarinfo, targetpath):
        """Make a fifo called targetpath.
        """
        if hasattr(os, "mkfifo"):
            os.mkfifo(targetpath)
        else:
            raise ExtractError("fifo not supported by system")

    def makedev(self, tarinfo, targetpath):
        """Make a character or block device called targetpath.
        """
        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
            raise ExtractError("special devices not supported by system")

        mode = tarinfo.mode
        if tarinfo.isblk():
            mode |= stat.S_IFBLK
        else:
            mode |= stat.S_IFCHR

        os.mknod(targetpath, mode,
                 os.makedev(tarinfo.devmajor, tarinfo.devminor))

    def makelink(self, tarinfo, targetpath):
        """Make a (symbolic) link called targetpath. If it cannot be created
          (platform limitation), we try to make a copy of the referenced file
          instead of a link.
        """
        try:
            # For systems that support symbolic and hard links.
            if tarinfo.issym():
                os.symlink(tarinfo.linkname, targetpath)
            else:
                # See extract().
                if os.path.exists(tarinfo._link_target):
                    os.link(tarinfo._link_target, targetpath)
                else:
                    self._extract_member(self._find_link_target(tarinfo),
                                         targetpath)
        except symlink_exception:
            if tarinfo.issym():
                linkpath = os.path.join(os.path.dirname(tarinfo.name),
                                        tarinfo.linkname)
            else:
                linkpath = tarinfo.linkname
        else:
            try:
                self._extract_member(self._find_link_target(tarinfo),
                                     targetpath)
            except KeyError:
                raise ExtractError("unable to resolve link inside archive")

    def chown(self, tarinfo, targetpath):
        """Set owner of targetpath according to tarinfo.
        """
        if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
            # We have to be root to do so.
            try:
                g = grp.getgrnam(tarinfo.gname)[2]
            except KeyError:
                g = tarinfo.gid
            try:
                u = pwd.getpwnam(tarinfo.uname)[2]
            except KeyError:
                u = tarinfo.uid
            try:
                if tarinfo.issym() and hasattr(os, "lchown"):
                    os.lchown(targetpath, u, g)
                else:
                    if sys.platform != "os2emx":
                        os.chown(targetpath, u, g)
            except EnvironmentError as e:
                raise ExtractError("could not change owner")

    def chmod(self, tarinfo, targetpath):
        """Set file permissions of targetpath according to tarinfo.
        """
        if hasattr(os, 'chmod'):
            try:
                os.chmod(targetpath, tarinfo.mode)
            except EnvironmentError as e:
                raise ExtractError("could not change mode")

    def utime(self, tarinfo, targetpath):
        """Set modification time of targetpath according to tarinfo.
        """
        if not hasattr(os, 'utime'):
            return
        try:
            os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
        except EnvironmentError as e:
            raise ExtractError("could not change modification time")

    #--------------------------------------------------------------------------
    def next(self):
        """Return the next member of the archive as a TarInfo object, when
           TarFile is opened for reading. Return None if there is no more
           available.
        """
        self._check("ra")
        if self.firstmember is not None:
            m = self.firstmember
            self.firstmember = None
            return m

        # Read the next block.
        self.fileobj.seek(self.offset)
        tarinfo = None
        while True:
            try:
                tarinfo = self.tarinfo.fromtarfile(self)
            except EOFHeaderError as e:
                if self.ignore_zeros:
                    self._dbg(2, "0x%X: %s" % (self.offset, e))
                    self.offset += BLOCKSIZE
                    continue
            except InvalidHeaderError as e:
                if self.ignore_zeros:
                    self._dbg(2, "0x%X: %s" % (self.offset, e))
                    self.offset += BLOCKSIZE
                    continue
                elif self.offset == 0:
                    raise ReadError(str(e))
            except EmptyHeaderError:
                if self.offset == 0:
                    raise ReadError("empty file")
            except TruncatedHeaderError as e:
                if self.offset == 0:
                    raise ReadError(str(e))
            except SubsequentHeaderError as e:
                raise ReadError(str(e))
            break

        if tarinfo is not None:
            self.members.append(tarinfo)
        else:
            self._loaded = True

        return tarinfo

    #--------------------------------------------------------------------------
    # Little helper methods:

    def _getmember(self, name, tarinfo=None, normalize=False):
        """Find an archive member by name from bottom to top.
           If tarinfo is given, it is used as the starting point.
        """
        # Ensure that all members have been loaded.
        members = self.getmembers()

        # Limit the member search list up to tarinfo.
        if tarinfo is not None:
            members = members[:members.index(tarinfo)]

        if normalize:
            name = os.path.normpath(name)

        for member in reversed(members):
            if normalize:
                member_name = os.path.normpath(member.name)
            else:
                member_name = member.name

            if name == member_name:
                return member

    def _load(self):
        """Read through the entire archive file and look for readable
           members.
        """
        while True:
            tarinfo = self.next()
            if tarinfo is None:
                break
        self._loaded = True

    def _check(self, mode=None):
        """Check if TarFile is still open, and if the operation's mode
           corresponds to TarFile's mode.
        """
        if self.closed:
            raise IOError("%s is closed" % self.__class__.__name__)
        if mode is not None and self.mode not in mode:
            raise IOError("bad operation for mode %r" % self.mode)

    def _find_link_target(self, tarinfo):
        """Find the target member of a symlink or hardlink member in the
           archive.
        """
        if tarinfo.issym():
            # Always search the entire archive.
            linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
            limit = None
        else:
            # Search the archive before the link, because a hard link is
            # just a reference to an already archived file.
            linkname = tarinfo.linkname
            limit = tarinfo

        member = self._getmember(linkname, tarinfo=limit, normalize=True)
        if member is None:
            raise KeyError("linkname %r not found" % linkname)
        return member

    def __iter__(self):
        """Provide an iterator object.
        """
        if self._loaded:
            return iter(self.members)
        else:
            return TarIter(self)

    def _dbg(self, level, msg):
        """Write debugging output to sys.stderr.
        """
        if level <= self.debug:
            print(msg, file=sys.stderr)

    def __enter__(self):
        self._check()
        return self

    def __exit__(self, type, value, traceback):
        if type is None:
            self.close()
        else:
            # An exception occurred. We must not call close() because
            # it would try to write end-of-archive blocks and padding.
            if not self._extfileobj:
                self.fileobj.close()
            self.closed = True
# class TarFile

class TarIter(object):
    """Iterator Class.

       for tarinfo in TarFile(...):
           suite...
    """

    def __init__(self, tarfile):
        """Construct a TarIter object.
        """
        self.tarfile = tarfile
        self.index = 0
    def __iter__(self):
        """Return iterator object.
        """
        return self

    def __next__(self):
        """Return the next item using TarFile's next() method.
           When all members have been read, set TarFile as _loaded.
        """
        # Fix for SF #1100429: Under rare circumstances it can
        # happen that getmembers() is called during iteration,
        # which will cause TarIter to stop prematurely.
        if not self.tarfile._loaded:
            tarinfo = self.tarfile.next()
            if not tarinfo:
                self.tarfile._loaded = True
                raise StopIteration
        else:
            try:
                tarinfo = self.tarfile.members[self.index]
            except IndexError:
                raise StopIteration
        self.index += 1
        return tarinfo

    next = __next__ # for Python 2.x

#--------------------
# exported functions
#--------------------
def is_tarfile(name):
    """Return True if name points to a tar archive that we
       are able to handle, else return False.
    """
    try:
        t = open(name)
        t.close()
        return True
    except TarError:
        return False

bltn_open = open
open = TarFile.open
PKZ਱BQBQ9site-packages/pip/_vendor/distlib/_backport/sysconfig.pycnu[
abc@s_dZddlZddlZddlZddlZddlmZmZyddlZWne	k
r{ddl
ZnXdddddd	d
ddd
dgZdZej
rejjeej
ZneejZejdkr(dedjkr(eejjeeZnejdkrndedjkrneejjeeeZnejdkrdedjkreejjeeeZndZeZeadZejZejdZdZejj dZ!ejd Z"e!de!dZ#ejj$ej%Z&ejj$ej'Z(da*dZ+dZ,dZ-d Z.d!Z/d"Z0d#Z1dd$Z2d%Z3d&Z4d'Z5dd(Z6d)Z7d*Z8d+Z9e0de:d,Z;e0de:d-Z<d.Z=d/Z>d0Z?d1Z@d2ZAd3ZBeCd4kr[eBndS(5s-Access to Python's configuration information.iN(tpardirtrealpathtget_config_h_filenametget_config_vartget_config_varstget_makefile_filenametget_pathtget_path_namest	get_pathstget_platformtget_python_versiontget_scheme_namestparse_config_hcCs'yt|SWntk
r"|SXdS(N(RtOSError(tpath((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_safe_realpath"s
tnttpcbuildis\pc\vis\pcbuild\amd64icCs=x6dD].}tjjtjjtd|rtSqWtS(Ns
Setup.distsSetup.localtModules(s
Setup.distsSetup.local(tosRtisfiletjoint
_PROJECT_BASEtTruetFalse(tfn((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pytis_python_build:s
$cCstsddlm}tjddd}||}|jd}|sYtd|j}tj	|WdQXt
rx7dD],}tj|d
dtj|dd
qWntandS(Ni(tfindert.iis
sysconfig.cfgssysconfig.cfg existstposix_prefixt
posix_hometincludes{srcdir}/Includetplatincludes{projectbase}/.(RR(
t	_cfg_readt	resourcesRt__name__trsplittfindtAssertionErrort	as_streamt_SCHEMEStreadfpt
_PYTHON_BUILDtsetR(Rtbackport_packaget_findert_cfgfiletstscheme((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_ensure_cfg_readDs
s\{([^{]*?)\}cs-t|jdr(|jd}n	t}|j}xb|D]Z}|dkr\qDnx?|D]7\}}|j||rqcn|j|||qcWqDW|jdxw|jD]i}t|j|fd}x<|j|D]+\}}|j||t	j
||qWqWdS(Ntglobalscs0|jd}|kr#|S|jdS(Nii(tgroup(tmatchobjtname(t	variables(sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt	_replaceros(R1thas_sectiontitemsttupletsectionst
has_optionR+tremove_sectiontdictt	_VAR_REPLtsub(tconfigR2R;tsectiontoptiontvalueR7((R6sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_expand_globalsYs$	

iiicsfd}tj||S(sIn the string `path`, replace tokens like {some.thing} with the
    corresponding value from the map `local_vars`.

    If there is no corresponding value, leave the token unchanged.
    csJ|jd}|kr#|S|tjkr=tj|S|jdS(Nii(R3Rtenviron(R4R5(t
local_vars(sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyR7s(R?R@(RRGR7((RGsK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_subst_varsscCsI|j}x6|jD](\}}||kr7qn|||dttjf}nd}tjjt	d|dS(s Return the path of the Makefile.tMakefiletabiflagssconfig-%s%sRAtstdlib(
R*RRRRthasattrRbt_PY_VERSION_SHORTRR(tconfig_dir_name((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyRMscCst}yt||WnLtk
rh}d|}t|drY|d|j}nt|nXt}y&t|}t||WdQXWnLtk
r}d|}t|dr|d|j}nt|nXtr|d|dR(R9(R0RStexpand((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyRs
cCst||||S(s[Return a path corresponding to the scheme.

    ``scheme`` is the install scheme name.
    (R(R5R0RSR((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyRscGstd"krRiattdkrd4}qI|d?krd5}qIt d6|fqL|d.krtj!d@krId0}qIqL|dAkrLtj!dBkr@d3}qId/}qLqOnd:|||fS(CsReturn a string that identifies the current platform.

    This is used mainly to distinguish platform-specific build directories and
    platform-specific built distributions.  Typically includes the OS name
    and version and the architecture (as supplied by 'os.uname()'),
    although the exact information included depends on the OS; eg. for IRIX
    the architecture isn't particularly important (IRIX only runs on SGI
    hardware), but for Linux the kernel version isn't particularly
    important.

    Examples of returned values:
       linux-i586
       linux-alpha (?)
       solaris-2.6-sun4u
       irix-5.3
       irix64-6.2

    Windows will return one of:
       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
       win-ia64 (64bit Windows on Itanium)
       win32 (all others - specifically, sys.platform is returned)

    For other non-POSIX platforms, currently just returns 'sys.platform'.
    Rs bit (it)tamd64s	win-amd64titaniumswin-ia64RORt/RmRt_t-itlinuxs%s-%stsunosit5tsolariss%d.%siiitirixtaixs%s-%s.%sitcygwins[\d.]+R^tMACOSX_DEPLOYMENT_TARGETs0/System/Library/CoreServices/SystemVersion.plists=ProductUserVisibleVersion\s*(.*?)NRitmacosxs10.4.s-archRotfats
-arch\s+(\S+)ti386tppctx86_64tinteltfat3tppc64tfat64t	universals%Don't know machine value for archs=%ri tPowerPCtPower_Macintoshs%s-%s-%s(RR(RR(RRR(RR(RRRRI(RRI("RR5RbRR%RcRtlowerRRR{R|RsRtRzR3RRaRRvRRtreadtcloseRPRRRytfindallR:RR+R}tmaxsize(RtitjtlooktosnamethosttreleaseRtmachinetrel_reRtcfgvarstmacvert
macreleaseRtcflagstarchs((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyR	[s (	
	+	!
						cCstS(N(R(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyR
scCsZxStt|jD]9\}\}}|dkrCd|GHnd||fGHqWdS(Nis%s: s
	%s = "%s"(t	enumerateRR9(ttitletdatatindexRMRD((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_print_dicts+cCsRdtGHdtGHdtGHdGHtdtdGHtdtdS(	s*Display all information sysconfig detains.sPlatform: "%s"sPython version: "%s"s!Current installation scheme: "%s"tPathst	VariablesN(((R	R
RWRRR(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyt_mainst__main__(Dt__doc__RuRRsRbtos.pathRRtconfigparsertImportErrortConfigParsert__all__RRRRRRR5RRRR*RR!R1tRawConfigParserR(RtR?RERRRRRRRRRRRRPRt
_USER_BASERHRNRURVRWRhRRRRRRRRRRRRRR	R
RRR#(((sK/usr/lib/python2.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.pyts
			%%!%!				#
			
				v						
							PKZ6RKiKi8site-packages/pip/_vendor/distlib/_backport/sysconfig.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Access to Python's configuration information."""

import codecs
import os
import re
import sys
from os.path import pardir, realpath
try:
    import configparser
except ImportError:
    import ConfigParser as configparser


__all__ = [
    'get_config_h_filename',
    'get_config_var',
    'get_config_vars',
    'get_makefile_filename',
    'get_path',
    'get_path_names',
    'get_paths',
    'get_platform',
    'get_python_version',
    'get_scheme_names',
    'parse_config_h',
]


def _safe_realpath(path):
    try:
        return realpath(path)
    except OSError:
        return path


if sys.executable:
    _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
else:
    # sys.executable can be empty if argv[0] has been changed and Python is
    # unable to retrieve the real program name
    _PROJECT_BASE = _safe_realpath(os.getcwd())

if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
# PC/VS7.1
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# PC/AMD64
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))


def is_python_build():
    for fn in ("Setup.dist", "Setup.local"):
        if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
            return True
    return False

_PYTHON_BUILD = is_python_build()

_cfg_read = False

def _ensure_cfg_read():
    global _cfg_read
    if not _cfg_read:
        from ..resources import finder
        backport_package = __name__.rsplit('.', 1)[0]
        _finder = finder(backport_package)
        _cfgfile = _finder.find('sysconfig.cfg')
        assert _cfgfile, 'sysconfig.cfg exists'
        with _cfgfile.as_stream() as s:
            _SCHEMES.readfp(s)
        if _PYTHON_BUILD:
            for scheme in ('posix_prefix', 'posix_home'):
                _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
                _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')

        _cfg_read = True


_SCHEMES = configparser.RawConfigParser()
_VAR_REPL = re.compile(r'\{([^{]*?)\}')

def _expand_globals(config):
    _ensure_cfg_read()
    if config.has_section('globals'):
        globals = config.items('globals')
    else:
        globals = tuple()

    sections = config.sections()
    for section in sections:
        if section == 'globals':
            continue
        for option, value in globals:
            if config.has_option(section, option):
                continue
            config.set(section, option, value)
    config.remove_section('globals')

    # now expanding local variables defined in the cfg file
    #
    for section in config.sections():
        variables = dict(config.items(section))

        def _replacer(matchobj):
            name = matchobj.group(1)
            if name in variables:
                return variables[name]
            return matchobj.group(0)

        for option, value in config.items(section):
            config.set(section, option, _VAR_REPL.sub(_replacer, value))

#_expand_globals(_SCHEMES)

 # FIXME don't rely on sys.version here, its format is an implementation detail
 # of CPython, use sys.version_info or sys.hexversion
_PY_VERSION = sys.version.split()[0]
_PY_VERSION_SHORT = sys.version[:3]
_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
_PREFIX = os.path.normpath(sys.prefix)
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None


def _subst_vars(path, local_vars):
    """In the string `path`, replace tokens like {some.thing} with the
    corresponding value from the map `local_vars`.

    If there is no corresponding value, leave the token unchanged.
    """
    def _replacer(matchobj):
        name = matchobj.group(1)
        if name in local_vars:
            return local_vars[name]
        elif name in os.environ:
            return os.environ[name]
        return matchobj.group(0)
    return _VAR_REPL.sub(_replacer, path)


def _extend_dict(target_dict, other_dict):
    target_keys = target_dict.keys()
    for key, value in other_dict.items():
        if key in target_keys:
            continue
        target_dict[key] = value


def _expand_vars(scheme, vars):
    res = {}
    if vars is None:
        vars = {}
    _extend_dict(vars, get_config_vars())

    for key, value in _SCHEMES.items(scheme):
        if os.name in ('posix', 'nt'):
            value = os.path.expanduser(value)
        res[key] = os.path.normpath(_subst_vars(value, vars))
    return res


def format_value(value, vars):
    def _replacer(matchobj):
        name = matchobj.group(1)
        if name in vars:
            return vars[name]
        return matchobj.group(0)
    return _VAR_REPL.sub(_replacer, value)


def _get_default_scheme():
    if os.name == 'posix':
        # the default scheme for posix is posix_prefix
        return 'posix_prefix'
    return os.name


def _getuserbase():
    env_base = os.environ.get("PYTHONUSERBASE", None)

    def joinuser(*args):
        return os.path.expanduser(os.path.join(*args))

    # what about 'os2emx', 'riscos' ?
    if os.name == "nt":
        base = os.environ.get("APPDATA") or "~"
        if env_base:
            return env_base
        else:
            return joinuser(base, "Python")

    if sys.platform == "darwin":
        framework = get_config_var("PYTHONFRAMEWORK")
        if framework:
            if env_base:
                return env_base
            else:
                return joinuser("~", "Library", framework, "%d.%d" %
                                sys.version_info[:2])

    if env_base:
        return env_base
    else:
        return joinuser("~", ".local")


def _parse_makefile(filename, vars=None):
    """Parse a Makefile-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    """
    # Regexes needed for parsing Makefile (and similar syntaxes,
    # like old-style Setup files).
    _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
    _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
    _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")

    if vars is None:
        vars = {}
    done = {}
    notdone = {}

    with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
        lines = f.readlines()

    for line in lines:
        if line.startswith('#') or line.strip() == '':
            continue
        m = _variable_rx.match(line)
        if m:
            n, v = m.group(1, 2)
            v = v.strip()
            # `$$' is a literal `$' in make
            tmpv = v.replace('$$', '')

            if "$" in tmpv:
                notdone[n] = v
            else:
                try:
                    v = int(v)
                except ValueError:
                    # insert literal `$'
                    done[n] = v.replace('$$', '$')
                else:
                    done[n] = v

    # do variable interpolation here
    variables = list(notdone.keys())

    # Variables with a 'PY_' prefix in the makefile. These need to
    # be made available without that prefix through sysconfig.
    # Special care is needed to ensure that variable expansion works, even
    # if the expansion uses the name without a prefix.
    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')

    while len(variables) > 0:
        for name in tuple(variables):
            value = notdone[name]
            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
            if m is not None:
                n = m.group(1)
                found = True
                if n in done:
                    item = str(done[n])
                elif n in notdone:
                    # get it on a subsequent round
                    found = False
                elif n in os.environ:
                    # do it like make: fall back to environment
                    item = os.environ[n]

                elif n in renamed_variables:
                    if (name.startswith('PY_') and
                        name[3:] in renamed_variables):
                        item = ""

                    elif 'PY_' + n in notdone:
                        found = False

                    else:
                        item = str(done['PY_' + n])

                else:
                    done[n] = item = ""

                if found:
                    after = value[m.end():]
                    value = value[:m.start()] + item + after
                    if "$" in after:
                        notdone[name] = value
                    else:
                        try:
                            value = int(value)
                        except ValueError:
                            done[name] = value.strip()
                        else:
                            done[name] = value
                        variables.remove(name)

                        if (name.startswith('PY_') and
                            name[3:] in renamed_variables):

                            name = name[3:]
                            if name not in done:
                                done[name] = value

            else:
                # bogus variable reference (e.g. "prefix=$/opt/python");
                # just drop it since we can't deal
                done[name] = value
                variables.remove(name)

    # strip spurious spaces
    for k, v in done.items():
        if isinstance(v, str):
            done[k] = v.strip()

    # save the results in the global dictionary
    vars.update(done)
    return vars


def get_makefile_filename():
    """Return the path of the Makefile."""
    if _PYTHON_BUILD:
        return os.path.join(_PROJECT_BASE, "Makefile")
    if hasattr(sys, 'abiflags'):
        config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
    else:
        config_dir_name = 'config'
    return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')


def _init_posix(vars):
    """Initialize the module as appropriate for POSIX systems."""
    # load the installed Makefile:
    makefile = get_makefile_filename()
    try:
        _parse_makefile(makefile, vars)
    except IOError as e:
        msg = "invalid Python installation: unable to open %s" % makefile
        if hasattr(e, "strerror"):
            msg = msg + " (%s)" % e.strerror
        raise IOError(msg)
    # load the installed pyconfig.h:
    config_h = get_config_h_filename()
    try:
        with open(config_h) as f:
            parse_config_h(f, vars)
    except IOError as e:
        msg = "invalid Python installation: unable to open %s" % config_h
        if hasattr(e, "strerror"):
            msg = msg + " (%s)" % e.strerror
        raise IOError(msg)
    # On AIX, there are wrong paths to the linker scripts in the Makefile
    # -- these paths are relative to the Python source, but when installed
    # the scripts are in another directory.
    if _PYTHON_BUILD:
        vars['LDSHARED'] = vars['BLDSHARED']


def _init_non_posix(vars):
    """Initialize the module as appropriate for NT"""
    # set basic install directories
    vars['LIBDEST'] = get_path('stdlib')
    vars['BINLIBDEST'] = get_path('platstdlib')
    vars['INCLUDEPY'] = get_path('include')
    vars['SO'] = '.pyd'
    vars['EXE'] = '.exe'
    vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
    vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))

#
# public APIs
#


def parse_config_h(fp, vars=None):
    """Parse a config.h-style file.

    A dictionary containing name/value pairs is returned.  If an
    optional dictionary is passed in as the second argument, it is
    used instead of a new dictionary.
    """
    if vars is None:
        vars = {}
    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")

    while True:
        line = fp.readline()
        if not line:
            break
        m = define_rx.match(line)
        if m:
            n, v = m.group(1, 2)
            try:
                v = int(v)
            except ValueError:
                pass
            vars[n] = v
        else:
            m = undef_rx.match(line)
            if m:
                vars[m.group(1)] = 0
    return vars


def get_config_h_filename():
    """Return the path of pyconfig.h."""
    if _PYTHON_BUILD:
        if os.name == "nt":
            inc_dir = os.path.join(_PROJECT_BASE, "PC")
        else:
            inc_dir = _PROJECT_BASE
    else:
        inc_dir = get_path('platinclude')
    return os.path.join(inc_dir, 'pyconfig.h')


def get_scheme_names():
    """Return a tuple containing the schemes names."""
    return tuple(sorted(_SCHEMES.sections()))


def get_path_names():
    """Return a tuple containing the paths names."""
    # xxx see if we want a static list
    return _SCHEMES.options('posix_prefix')


def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
    """Return a mapping containing an install scheme.

    ``scheme`` is the install scheme name. If not provided, it will
    return the default scheme for the current platform.
    """
    _ensure_cfg_read()
    if expand:
        return _expand_vars(scheme, vars)
    else:
        return dict(_SCHEMES.items(scheme))


def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
    """Return a path corresponding to the scheme.

    ``scheme`` is the install scheme name.
    """
    return get_paths(scheme, vars, expand)[name]


def get_config_vars(*args):
    """With no arguments, return a dictionary of all configuration
    variables relevant for the current platform.

    On Unix, this means every variable defined in Python's installed Makefile;
    On Windows and Mac OS it's a much smaller set.

    With arguments, return a list of values that result from looking up
    each argument in the configuration variable dictionary.
    """
    global _CONFIG_VARS
    if _CONFIG_VARS is None:
        _CONFIG_VARS = {}
        # Normalized versions of prefix and exec_prefix are handy to have;
        # in fact, these are the standard versions used most places in the
        # distutils2 module.
        _CONFIG_VARS['prefix'] = _PREFIX
        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
        _CONFIG_VARS['py_version'] = _PY_VERSION
        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
        _CONFIG_VARS['base'] = _PREFIX
        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
        _CONFIG_VARS['projectbase'] = _PROJECT_BASE
        try:
            _CONFIG_VARS['abiflags'] = sys.abiflags
        except AttributeError:
            # sys.abiflags may not be defined on all platforms.
            _CONFIG_VARS['abiflags'] = ''

        if os.name in ('nt', 'os2'):
            _init_non_posix(_CONFIG_VARS)
        if os.name == 'posix':
            _init_posix(_CONFIG_VARS)
        # Setting 'userbase' is done below the call to the
        # init function to enable using 'get_config_var' in
        # the init-function.
        if sys.version >= '2.6':
            _CONFIG_VARS['userbase'] = _getuserbase()

        if 'srcdir' not in _CONFIG_VARS:
            _CONFIG_VARS['srcdir'] = _PROJECT_BASE
        else:
            _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])

        # Convert srcdir into an absolute path if it appears necessary.
        # Normally it is relative to the build directory.  However, during
        # testing, for example, we might be running a non-installed python
        # from a different directory.
        if _PYTHON_BUILD and os.name == "posix":
            base = _PROJECT_BASE
            try:
                cwd = os.getcwd()
            except OSError:
                cwd = None
            if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
                base != cwd):
                # srcdir is relative and we are not in the same directory
                # as the executable. Assume executable is in the build
                # directory and make srcdir absolute.
                srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
                _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)

        if sys.platform == 'darwin':
            kernel_version = os.uname()[2]  # Kernel version (8.4.3)
            major_version = int(kernel_version.split('.')[0])

            if major_version < 8:
                # On macOS before 10.4, check if -arch and -isysroot
                # are in CFLAGS or LDFLAGS and remove them if they are.
                # This is needed when building extensions on a 10.3 system
                # using a universal build of python.
                for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
                    flags = _CONFIG_VARS[key]
                    flags = re.sub('-arch\s+\w+\s', ' ', flags)
                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
                    _CONFIG_VARS[key] = flags
            else:
                # Allow the user to override the architecture flags using
                # an environment variable.
                # NOTE: This name was introduced by Apple in OSX 10.5 and
                # is used by several scripting languages distributed with
                # that OS release.
                if 'ARCHFLAGS' in os.environ:
                    arch = os.environ['ARCHFLAGS']
                    for key in ('LDFLAGS', 'BASECFLAGS',
                        # a number of derived variables. These need to be
                        # patched up as well.
                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                        flags = _CONFIG_VARS[key]
                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
                        flags = flags + ' ' + arch
                        _CONFIG_VARS[key] = flags

                # If we're on OSX 10.5 or later and the user tries to
                # compiles an extension using an SDK that is not present
                # on the current machine it is better to not use an SDK
                # than to fail.
                #
                # The major usecase for this is users using a Python.org
                # binary installer  on OSX 10.6: that installer uses
                # the 10.4u SDK, but that SDK is not installed by default
                # when you install Xcode.
                #
                CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
                m = re.search('-isysroot\s+(\S+)', CFLAGS)
                if m is not None:
                    sdk = m.group(1)
                    if not os.path.exists(sdk):
                        for key in ('LDFLAGS', 'BASECFLAGS',
                             # a number of derived variables. These need to be
                             # patched up as well.
                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):

                            flags = _CONFIG_VARS[key]
                            flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
                            _CONFIG_VARS[key] = flags

    if args:
        vals = []
        for name in args:
            vals.append(_CONFIG_VARS.get(name))
        return vals
    else:
        return _CONFIG_VARS


def get_config_var(name):
    """Return the value of a single variable using the dictionary returned by
    'get_config_vars()'.

    Equivalent to get_config_vars().get(name)
    """
    return get_config_vars().get(name)


def get_platform():
    """Return a string that identifies the current platform.

    This is used mainly to distinguish platform-specific build directories and
    platform-specific built distributions.  Typically includes the OS name
    and version and the architecture (as supplied by 'os.uname()'),
    although the exact information included depends on the OS; eg. for IRIX
    the architecture isn't particularly important (IRIX only runs on SGI
    hardware), but for Linux the kernel version isn't particularly
    important.

    Examples of returned values:
       linux-i586
       linux-alpha (?)
       solaris-2.6-sun4u
       irix-5.3
       irix64-6.2

    Windows will return one of:
       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
       win-ia64 (64bit Windows on Itanium)
       win32 (all others - specifically, sys.platform is returned)

    For other non-POSIX platforms, currently just returns 'sys.platform'.
    """
    if os.name == 'nt':
        # sniff sys.version for architecture.
        prefix = " bit ("
        i = sys.version.find(prefix)
        if i == -1:
            return sys.platform
        j = sys.version.find(")", i)
        look = sys.version[i+len(prefix):j].lower()
        if look == 'amd64':
            return 'win-amd64'
        if look == 'itanium':
            return 'win-ia64'
        return sys.platform

    if os.name != "posix" or not hasattr(os, 'uname'):
        # XXX what about the architecture? NT is Intel or Alpha,
        # Mac OS is M68k or PPC, etc.
        return sys.platform

    # Try to distinguish various flavours of Unix
    osname, host, release, version, machine = os.uname()

    # Convert the OS name to lowercase, remove '/' characters
    # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
    osname = osname.lower().replace('/', '')
    machine = machine.replace(' ', '_')
    machine = machine.replace('/', '-')

    if osname[:5] == "linux":
        # At least on Linux/Intel, 'machine' is the processor --
        # i386, etc.
        # XXX what about Alpha, SPARC, etc?
        return  "%s-%s" % (osname, machine)
    elif osname[:5] == "sunos":
        if release[0] >= "5":           # SunOS 5 == Solaris 2
            osname = "solaris"
            release = "%d.%s" % (int(release[0]) - 3, release[2:])
        # fall through to standard osname-release-machine representation
    elif osname[:4] == "irix":              # could be "irix64"!
        return "%s-%s" % (osname, release)
    elif osname[:3] == "aix":
        return "%s-%s.%s" % (osname, version, release)
    elif osname[:6] == "cygwin":
        osname = "cygwin"
        rel_re = re.compile(r'[\d.]+')
        m = rel_re.match(release)
        if m:
            release = m.group()
    elif osname[:6] == "darwin":
        #
        # For our purposes, we'll assume that the system version from
        # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
        # to. This makes the compatibility story a bit more sane because the
        # machine is going to compile and link as if it were
        # MACOSX_DEPLOYMENT_TARGET.
        cfgvars = get_config_vars()
        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')

        if True:
            # Always calculate the release of the running machine,
            # needed to determine if we can build fat binaries or not.

            macrelease = macver
            # Get the system version. Reading this plist is a documented
            # way to get the system version (see the documentation for
            # the Gestalt Manager)
            try:
                f = open('/System/Library/CoreServices/SystemVersion.plist')
            except IOError:
                # We're on a plain darwin box, fall back to the default
                # behaviour.
                pass
            else:
                try:
                    m = re.search(r'ProductUserVisibleVersion\s*'
                                  r'(.*?)', f.read())
                finally:
                    f.close()
                if m is not None:
                    macrelease = '.'.join(m.group(1).split('.')[:2])
                # else: fall back to the default behaviour

        if not macver:
            macver = macrelease

        if macver:
            release = macver
            osname = "macosx"

            if ((macrelease + '.') >= '10.4.' and
                '-arch' in get_config_vars().get('CFLAGS', '').strip()):
                # The universal build will build fat binaries, but not on
                # systems before 10.4
                #
                # Try to detect 4-way universal builds, those have machine-type
                # 'universal' instead of 'fat'.

                machine = 'fat'
                cflags = get_config_vars().get('CFLAGS')

                archs = re.findall('-arch\s+(\S+)', cflags)
                archs = tuple(sorted(set(archs)))

                if len(archs) == 1:
                    machine = archs[0]
                elif archs == ('i386', 'ppc'):
                    machine = 'fat'
                elif archs == ('i386', 'x86_64'):
                    machine = 'intel'
                elif archs == ('i386', 'ppc', 'x86_64'):
                    machine = 'fat3'
                elif archs == ('ppc64', 'x86_64'):
                    machine = 'fat64'
                elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
                    machine = 'universal'
                else:
                    raise ValueError(
                       "Don't know machine value for archs=%r" % (archs,))

            elif machine == 'i386':
                # On OSX the machine type returned by uname is always the
                # 32-bit variant, even if the executable architecture is
                # the 64-bit variant
                if sys.maxsize >= 2**32:
                    machine = 'x86_64'

            elif machine in ('PowerPC', 'Power_Macintosh'):
                # Pick a sane name for the PPC architecture.
                # See 'i386' case
                if sys.maxsize >= 2**32:
                    machine = 'ppc64'
                else:
                    machine = 'ppc'

    return "%s-%s-%s" % (osname, release, machine)


def get_python_version():
    return _PY_VERSION_SHORT


def _print_dict(title, data):
    for index, (key, value) in enumerate(sorted(data.items())):
        if index == 0:
            print('%s: ' % (title))
        print('\t%s = "%s"' % (key, value))


def _main():
    """Display all information sysconfig detains."""
    print('Platform: "%s"' % get_platform())
    print('Python version: "%s"' % get_python_version())
    print('Current installation scheme: "%s"' % _get_default_scheme())
    print()
    _print_dict('Paths', get_paths())
    print()
    _print_dict('Variables', get_config_vars())


if __name__ == '__main__':
    _main()
PKZzoo-site-packages/pip/_vendor/distlib/markers.pyonu[
abc@sdZddlZddlZddlZddlZddlmZmZddlm	Z	dgZ
defdYZdd	ZdS(
sEParser for the environment markers micro-language defined in PEP 345.iNi(tpython_implementationtstring_types(tin_venvt	interprett	EvaluatorcBs^eZdZi	dd6dd6dd6dd6d	d
6dd6d
d6dd6dd6Zi	ejd6dejd d6ejjdddd6e	j
d6eed6ej
d6ejd6ejd6ed 6Zd,d!Zd"Zd#Zd,d$Zd%Zd&Zd'Zd(Zd)Zd*Zd+ZRS(-s5
    A limited evaluator for Python expressions.
    cCs
||kS(N((txty((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyttteqcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtgtcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtgtecCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtincCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtltcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtltecCs|S(N((R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyR RtnotcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyR!RtnoteqcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyR"Rtnotintsys_platforms%s.%sitpython_versiont iitpython_full_versiontos_nametplatform_in_venvtplatform_releasetplatform_versiontplatform_machinetplatform_python_implementationcCs|p	i|_d|_dS(su
        Initialise an instance.

        :param context: If specified, names are looked up in this mapping.
        N(tcontexttNonetsource(tselfR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt__init__3scCsHd}d|j|||!}||t|jkrD|d7}n|S(sH
        Get the part of the source which is causing a problem.
        i
s%rs...(Rtlen(Rtoffsettfragment_lents((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytget_fragment<s

cCst|d|dS(s@
        Get a handler for the specified AST node type.
        sdo_%sN(tgetattrR(Rt	node_type((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytget_handlerFscCst|tr||_idd6}|r8||dR%R3R/(tlhsnodetrhsnodeR@R$(R4R(s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytsanity_checks$	sunsupported operation: %r(tleftR8RLtziptopstcomparatorsR0R1R2t	operatorsR/(	RR4RPRNtlhsRBREROtrhs((R4Rs?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt
do_compares 
	"

cCs|j|jS(N(R8tbody(RR4((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt
do_expressionscCs|t}|j|jkr1t}|j|j}n+|j|jkr\t}|j|j}n|sxtd|jn|S(Nsinvalid expression: %s(R>R:RRLR?R/(RR4R@RB((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytdo_namescCs|jS(N(R$(RR4((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytdo_strsN(R1t
__module__t__doc__RUtsystplatformtversion_infotversiontsplittostnametstrRtreleasetmachineRR?RR R%R(R8R<RCRKRXRZR[R\(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRs@














		
							cCst|j|jS(s
    Interpret a marker and return a result depending on environment.

    :param marker: The marker to interpret.
    :type marker: str
    :param execution_context: The context used for name lookup.
    :type execution_context: mapping
    (RR8tstrip(tmarkertexecution_context((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRs	(R^R-RdR_R`tcompatRRtutilRt__all__tobjectRRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyts	PKZ_	s`aa+site-packages/pip/_vendor/distlib/compat.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import absolute_import

import os
import re
import sys

try:
    import ssl
except ImportError:
    ssl = None

if sys.version_info[0] < 3:  # pragma: no cover
    from StringIO import StringIO
    string_types = basestring,
    text_type = unicode
    from types import FileType as file_type
    import __builtin__ as builtins
    import ConfigParser as configparser
    from ._backport import shutil
    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
    from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
                        pathname2url, ContentTooShortError, splittype)

    def quote(s):
        if isinstance(s, unicode):
            s = s.encode('utf-8')
        return _quote(s)

    import urllib2
    from urllib2 import (Request, urlopen, URLError, HTTPError,
                         HTTPBasicAuthHandler, HTTPPasswordMgr,
                         HTTPHandler, HTTPRedirectHandler,
                         build_opener)
    if ssl:
        from urllib2 import HTTPSHandler
    import httplib
    import xmlrpclib
    import Queue as queue
    from HTMLParser import HTMLParser
    import htmlentitydefs
    raw_input = raw_input
    from itertools import ifilter as filter
    from itertools import ifilterfalse as filterfalse

    _userprog = None
    def splituser(host):
        """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
        global _userprog
        if _userprog is None:
            import re
            _userprog = re.compile('^(.*)@(.*)$')

        match = _userprog.match(host)
        if match: return match.group(1, 2)
        return None, host

else:  # pragma: no cover
    from io import StringIO
    string_types = str,
    text_type = str
    from io import TextIOWrapper as file_type
    import builtins
    import configparser
    import shutil
    from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
                              unquote, urlsplit, urlunsplit, splittype)
    from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
                                pathname2url,
                                HTTPBasicAuthHandler, HTTPPasswordMgr,
                                HTTPHandler, HTTPRedirectHandler,
                                build_opener)
    if ssl:
        from urllib.request import HTTPSHandler
    from urllib.error import HTTPError, URLError, ContentTooShortError
    import http.client as httplib
    import urllib.request as urllib2
    import xmlrpc.client as xmlrpclib
    import queue
    from html.parser import HTMLParser
    import html.entities as htmlentitydefs
    raw_input = input
    from itertools import filterfalse
    filter = filter

try:
    from ssl import match_hostname, CertificateError
except ImportError: # pragma: no cover
    class CertificateError(ValueError):
        pass


    def _dnsname_match(dn, hostname, max_wildcards=1):
        """Matching according to RFC 6125, section 6.4.3

        http://tools.ietf.org/html/rfc6125#section-6.4.3
        """
        pats = []
        if not dn:
            return False

        parts = dn.split('.')
        leftmost, remainder = parts[0], parts[1:]

        wildcards = leftmost.count('*')
        if wildcards > max_wildcards:
            # Issue #17980: avoid denials of service by refusing more
            # than one wildcard per fragment.  A survey of established
            # policy among SSL implementations showed it to be a
            # reasonable choice.
            raise CertificateError(
                "too many wildcards in certificate DNS name: " + repr(dn))

        # speed up common case w/o wildcards
        if not wildcards:
            return dn.lower() == hostname.lower()

        # RFC 6125, section 6.4.3, subitem 1.
        # The client SHOULD NOT attempt to match a presented identifier in which
        # the wildcard character comprises a label other than the left-most label.
        if leftmost == '*':
            # When '*' is a fragment by itself, it matches a non-empty dotless
            # fragment.
            pats.append('[^.]+')
        elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
            # RFC 6125, section 6.4.3, subitem 3.
            # The client SHOULD NOT attempt to match a presented identifier
            # where the wildcard character is embedded within an A-label or
            # U-label of an internationalized domain name.
            pats.append(re.escape(leftmost))
        else:
            # Otherwise, '*' matches any dotless string, e.g. www*
            pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))

        # add the remaining fragments, ignore any wildcards
        for frag in remainder:
            pats.append(re.escape(frag))

        pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
        return pat.match(hostname)


    def match_hostname(cert, hostname):
        """Verify that *cert* (in decoded format as returned by
        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
        rules are followed, but IP addresses are not accepted for *hostname*.

        CertificateError is raised on failure. On success, the function
        returns nothing.
        """
        if not cert:
            raise ValueError("empty or no certificate, match_hostname needs a "
                             "SSL socket or SSL context with either "
                             "CERT_OPTIONAL or CERT_REQUIRED")
        dnsnames = []
        san = cert.get('subjectAltName', ())
        for key, value in san:
            if key == 'DNS':
                if _dnsname_match(value, hostname):
                    return
                dnsnames.append(value)
        if not dnsnames:
            # The subject is only checked when there is no dNSName entry
            # in subjectAltName
            for sub in cert.get('subject', ()):
                for key, value in sub:
                    # XXX according to RFC 2818, the most specific Common Name
                    # must be used.
                    if key == 'commonName':
                        if _dnsname_match(value, hostname):
                            return
                        dnsnames.append(value)
        if len(dnsnames) > 1:
            raise CertificateError("hostname %r "
                "doesn't match either of %s"
                % (hostname, ', '.join(map(repr, dnsnames))))
        elif len(dnsnames) == 1:
            raise CertificateError("hostname %r "
                "doesn't match %r"
                % (hostname, dnsnames[0]))
        else:
            raise CertificateError("no appropriate commonName or "
                "subjectAltName fields were found")


try:
    from types import SimpleNamespace as Container
except ImportError:  # pragma: no cover
    class Container(object):
        """
        A generic container for when multiple values need to be returned
        """
        def __init__(self, **kwargs):
            self.__dict__.update(kwargs)


try:
    from shutil import which
except ImportError:  # pragma: no cover
    # Implementation from Python 3.3
    def which(cmd, mode=os.F_OK | os.X_OK, path=None):
        """Given a command, mode, and a PATH string, return the path which
        conforms to the given mode on the PATH, or None if there is no such
        file.

        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
        of os.environ.get("PATH"), or can be overridden with a custom search
        path.

        """
        # Check that a given file can be accessed with the correct mode.
        # Additionally check that `file` is not a directory, as on Windows
        # directories pass the os.access check.
        def _access_check(fn, mode):
            return (os.path.exists(fn) and os.access(fn, mode)
                    and not os.path.isdir(fn))

        # If we're given a path with a directory part, look it up directly rather
        # than referring to PATH directories. This includes checking relative to the
        # current directory, e.g. ./script
        if os.path.dirname(cmd):
            if _access_check(cmd, mode):
                return cmd
            return None

        if path is None:
            path = os.environ.get("PATH", os.defpath)
        if not path:
            return None
        path = path.split(os.pathsep)

        if sys.platform == "win32":
            # The current directory takes precedence on Windows.
            if not os.curdir in path:
                path.insert(0, os.curdir)

            # PATHEXT is necessary to check on Windows.
            pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
            # See if the given file matches any of the expected path extensions.
            # This will allow us to short circuit when given "python.exe".
            # If it does match, only test that one, otherwise we have to try
            # others.
            if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
                files = [cmd]
            else:
                files = [cmd + ext for ext in pathext]
        else:
            # On other platforms you don't have things like PATHEXT to tell you
            # what file suffixes are executable, so just pass on cmd as-is.
            files = [cmd]

        seen = set()
        for dir in path:
            normdir = os.path.normcase(dir)
            if not normdir in seen:
                seen.add(normdir)
                for thefile in files:
                    name = os.path.join(dir, thefile)
                    if _access_check(name, mode):
                        return name
        return None


# ZipFile is a context manager in 2.7, but not in 2.6

from zipfile import ZipFile as BaseZipFile

if hasattr(BaseZipFile, '__enter__'):  # pragma: no cover
    ZipFile = BaseZipFile
else:
    from zipfile import ZipExtFile as BaseZipExtFile

    class ZipExtFile(BaseZipExtFile):
        def __init__(self, base):
            self.__dict__.update(base.__dict__)

        def __enter__(self):
            return self

        def __exit__(self, *exc_info):
            self.close()
            # return None, so if an exception occurred, it will propagate

    class ZipFile(BaseZipFile):
        def __enter__(self):
            return self

        def __exit__(self, *exc_info):
            self.close()
            # return None, so if an exception occurred, it will propagate

        def open(self, *args, **kwargs):
            base = BaseZipFile.open(self, *args, **kwargs)
            return ZipExtFile(base)

try:
    from platform import python_implementation
except ImportError: # pragma: no cover
    def python_implementation():
        """Return a string identifying the Python implementation."""
        if 'PyPy' in sys.version:
            return 'PyPy'
        if os.name == 'java':
            return 'Jython'
        if sys.version.startswith('IronPython'):
            return 'IronPython'
        return 'CPython'

try:
    import sysconfig
except ImportError: # pragma: no cover
    from ._backport import sysconfig

try:
    callable = callable
except NameError:   # pragma: no cover
    from collections import Callable

    def callable(obj):
        return isinstance(obj, Callable)


try:
    fsencode = os.fsencode
    fsdecode = os.fsdecode
except AttributeError:  # pragma: no cover
    _fsencoding = sys.getfilesystemencoding()
    if _fsencoding == 'mbcs':
        _fserrors = 'strict'
    else:
        _fserrors = 'surrogateescape'

    def fsencode(filename):
        if isinstance(filename, bytes):
            return filename
        elif isinstance(filename, text_type):
            return filename.encode(_fsencoding, _fserrors)
        else:
            raise TypeError("expect bytes or str, not %s" %
                            type(filename).__name__)

    def fsdecode(filename):
        if isinstance(filename, text_type):
            return filename
        elif isinstance(filename, bytes):
            return filename.decode(_fsencoding, _fserrors)
        else:
            raise TypeError("expect bytes or str, not %s" %
                            type(filename).__name__)

try:
    from tokenize import detect_encoding
except ImportError: # pragma: no cover
    from codecs import BOM_UTF8, lookup
    import re

    cookie_re = re.compile("coding[:=]\s*([-\w.]+)")

    def _get_normal_name(orig_enc):
        """Imitates get_normal_name in tokenizer.c."""
        # Only care about the first 12 characters.
        enc = orig_enc[:12].lower().replace("_", "-")
        if enc == "utf-8" or enc.startswith("utf-8-"):
            return "utf-8"
        if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
           enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
            return "iso-8859-1"
        return orig_enc

    def detect_encoding(readline):
        """
        The detect_encoding() function is used to detect the encoding that should
        be used to decode a Python source file.  It requires one argument, readline,
        in the same way as the tokenize() generator.

        It will call readline a maximum of twice, and return the encoding used
        (as a string) and a list of any lines (left as bytes) it has read in.

        It detects the encoding from the presence of a utf-8 bom or an encoding
        cookie as specified in pep-0263.  If both a bom and a cookie are present,
        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
        'utf-8-sig' is returned.

        If no encoding is specified, then the default of 'utf-8' will be returned.
        """
        try:
            filename = readline.__self__.name
        except AttributeError:
            filename = None
        bom_found = False
        encoding = None
        default = 'utf-8'
        def read_or_stop():
            try:
                return readline()
            except StopIteration:
                return b''

        def find_cookie(line):
            try:
                # Decode as UTF-8. Either the line is an encoding declaration,
                # in which case it should be pure ASCII, or it must be UTF-8
                # per default encoding.
                line_string = line.decode('utf-8')
            except UnicodeDecodeError:
                msg = "invalid or missing encoding declaration"
                if filename is not None:
                    msg = '{} for {!r}'.format(msg, filename)
                raise SyntaxError(msg)

            matches = cookie_re.findall(line_string)
            if not matches:
                return None
            encoding = _get_normal_name(matches[0])
            try:
                codec = lookup(encoding)
            except LookupError:
                # This behaviour mimics the Python interpreter
                if filename is None:
                    msg = "unknown encoding: " + encoding
                else:
                    msg = "unknown encoding for {!r}: {}".format(filename,
                            encoding)
                raise SyntaxError(msg)

            if bom_found:
                if codec.name != 'utf-8':
                    # This behaviour mimics the Python interpreter
                    if filename is None:
                        msg = 'encoding problem: utf-8'
                    else:
                        msg = 'encoding problem for {!r}: utf-8'.format(filename)
                    raise SyntaxError(msg)
                encoding += '-sig'
            return encoding

        first = read_or_stop()
        if first.startswith(BOM_UTF8):
            bom_found = True
            first = first[3:]
            default = 'utf-8-sig'
        if not first:
            return default, []

        encoding = find_cookie(first)
        if encoding:
            return encoding, [first]

        second = read_or_stop()
        if not second:
            return default, [first]

        encoding = find_cookie(second)
        if encoding:
            return encoding, [first, second]

        return default, [first, second]

# For converting & <-> & etc.
try:
    from html import escape
except ImportError:
    from cgi import escape
if sys.version_info[:2] < (3, 4):
    unescape = HTMLParser().unescape
else:
    from html import unescape

try:
    from collections import ChainMap
except ImportError: # pragma: no cover
    from collections import MutableMapping

    try:
        from reprlib import recursive_repr as _recursive_repr
    except ImportError:
        def _recursive_repr(fillvalue='...'):
            '''
            Decorator to make a repr function return fillvalue for a recursive
            call
            '''

            def decorating_function(user_function):
                repr_running = set()

                def wrapper(self):
                    key = id(self), get_ident()
                    if key in repr_running:
                        return fillvalue
                    repr_running.add(key)
                    try:
                        result = user_function(self)
                    finally:
                        repr_running.discard(key)
                    return result

                # Can't use functools.wraps() here because of bootstrap issues
                wrapper.__module__ = getattr(user_function, '__module__')
                wrapper.__doc__ = getattr(user_function, '__doc__')
                wrapper.__name__ = getattr(user_function, '__name__')
                wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
                return wrapper

            return decorating_function

    class ChainMap(MutableMapping):
        ''' A ChainMap groups multiple dicts (or other mappings) together
        to create a single, updateable view.

        The underlying mappings are stored in a list.  That list is public and can
        accessed or updated using the *maps* attribute.  There is no other state.

        Lookups search the underlying mappings successively until a key is found.
        In contrast, writes, updates, and deletions only operate on the first
        mapping.

        '''

        def __init__(self, *maps):
            '''Initialize a ChainMap by setting *maps* to the given mappings.
            If no mappings are provided, a single empty dictionary is used.

            '''
            self.maps = list(maps) or [{}]          # always at least one map

        def __missing__(self, key):
            raise KeyError(key)

        def __getitem__(self, key):
            for mapping in self.maps:
                try:
                    return mapping[key]             # can't use 'key in mapping' with defaultdict
                except KeyError:
                    pass
            return self.__missing__(key)            # support subclasses that define __missing__

        def get(self, key, default=None):
            return self[key] if key in self else default

        def __len__(self):
            return len(set().union(*self.maps))     # reuses stored hash values if possible

        def __iter__(self):
            return iter(set().union(*self.maps))

        def __contains__(self, key):
            return any(key in m for m in self.maps)

        def __bool__(self):
            return any(self.maps)

        @_recursive_repr()
        def __repr__(self):
            return '{0.__class__.__name__}({1})'.format(
                self, ', '.join(map(repr, self.maps)))

        @classmethod
        def fromkeys(cls, iterable, *args):
            'Create a ChainMap with a single dict created from the iterable.'
            return cls(dict.fromkeys(iterable, *args))

        def copy(self):
            'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
            return self.__class__(self.maps[0].copy(), *self.maps[1:])

        __copy__ = copy

        def new_child(self):                        # like Django's Context.push()
            'New ChainMap with a new dict followed by all previous maps.'
            return self.__class__({}, *self.maps)

        @property
        def parents(self):                          # like Django's Context.pop()
            'New ChainMap from maps[1:].'
            return self.__class__(*self.maps[1:])

        def __setitem__(self, key, value):
            self.maps[0][key] = value

        def __delitem__(self, key):
            try:
                del self.maps[0][key]
            except KeyError:
                raise KeyError('Key not found in the first mapping: {!r}'.format(key))

        def popitem(self):
            'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
            try:
                return self.maps[0].popitem()
            except KeyError:
                raise KeyError('No keys found in the first mapping.')

        def pop(self, key, *args):
            'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
            try:
                return self.maps[0].pop(key, *args)
            except KeyError:
                raise KeyError('Key not found in the first mapping: {!r}'.format(key))

        def clear(self):
            'Clear maps[0], leaving maps[1:] intact.'
            self.maps[0].clear()

try:
    from imp import cache_from_source
except ImportError: # pragma: no cover
    def cache_from_source(path, debug_override=None):
        assert path.endswith('.py')
        if debug_override is None:
            debug_override = __debug__
        if debug_override:
            suffix = 'c'
        else:
            suffix = 'o'
        return path + suffix

try:
    from collections import OrderedDict
except ImportError: # pragma: no cover
## {{{ http://code.activestate.com/recipes/576693/ (r9)
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
    try:
        from thread import get_ident as _get_ident
    except ImportError:
        from dummy_thread import get_ident as _get_ident

    try:
        from _abcoll import KeysView, ValuesView, ItemsView
    except ImportError:
        pass


    class OrderedDict(dict):
        'Dictionary that remembers insertion order'
        # An inherited dict maps keys to values.
        # The inherited dict provides __getitem__, __len__, __contains__, and get.
        # The remaining methods are order-aware.
        # Big-O running times for all methods are the same as for regular dictionaries.

        # The internal self.__map dictionary maps keys to links in a doubly linked list.
        # The circular doubly linked list starts and ends with a sentinel element.
        # The sentinel element never gets deleted (this simplifies the algorithm).
        # Each link is stored as a list of length three:  [PREV, NEXT, KEY].

        def __init__(self, *args, **kwds):
            '''Initialize an ordered dictionary.  Signature is the same as for
            regular dictionaries, but keyword arguments are not recommended
            because their insertion order is arbitrary.

            '''
            if len(args) > 1:
                raise TypeError('expected at most 1 arguments, got %d' % len(args))
            try:
                self.__root
            except AttributeError:
                self.__root = root = []                     # sentinel node
                root[:] = [root, root, None]
                self.__map = {}
            self.__update(*args, **kwds)

        def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
            'od.__setitem__(i, y) <==> od[i]=y'
            # Setting a new item creates a new link which goes at the end of the linked
            # list, and the inherited dictionary is updated with the new key/value pair.
            if key not in self:
                root = self.__root
                last = root[0]
                last[1] = root[0] = self.__map[key] = [last, root, key]
            dict_setitem(self, key, value)

        def __delitem__(self, key, dict_delitem=dict.__delitem__):
            'od.__delitem__(y) <==> del od[y]'
            # Deleting an existing item uses self.__map to find the link which is
            # then removed by updating the links in the predecessor and successor nodes.
            dict_delitem(self, key)
            link_prev, link_next, key = self.__map.pop(key)
            link_prev[1] = link_next
            link_next[0] = link_prev

        def __iter__(self):
            'od.__iter__() <==> iter(od)'
            root = self.__root
            curr = root[1]
            while curr is not root:
                yield curr[2]
                curr = curr[1]

        def __reversed__(self):
            'od.__reversed__() <==> reversed(od)'
            root = self.__root
            curr = root[0]
            while curr is not root:
                yield curr[2]
                curr = curr[0]

        def clear(self):
            'od.clear() -> None.  Remove all items from od.'
            try:
                for node in self.__map.itervalues():
                    del node[:]
                root = self.__root
                root[:] = [root, root, None]
                self.__map.clear()
            except AttributeError:
                pass
            dict.clear(self)

        def popitem(self, last=True):
            '''od.popitem() -> (k, v), return and remove a (key, value) pair.
            Pairs are returned in LIFO order if last is true or FIFO order if false.

            '''
            if not self:
                raise KeyError('dictionary is empty')
            root = self.__root
            if last:
                link = root[0]
                link_prev = link[0]
                link_prev[1] = root
                root[0] = link_prev
            else:
                link = root[1]
                link_next = link[1]
                root[1] = link_next
                link_next[0] = root
            key = link[2]
            del self.__map[key]
            value = dict.pop(self, key)
            return key, value

        # -- the following methods do not depend on the internal structure --

        def keys(self):
            'od.keys() -> list of keys in od'
            return list(self)

        def values(self):
            'od.values() -> list of values in od'
            return [self[key] for key in self]

        def items(self):
            'od.items() -> list of (key, value) pairs in od'
            return [(key, self[key]) for key in self]

        def iterkeys(self):
            'od.iterkeys() -> an iterator over the keys in od'
            return iter(self)

        def itervalues(self):
            'od.itervalues -> an iterator over the values in od'
            for k in self:
                yield self[k]

        def iteritems(self):
            'od.iteritems -> an iterator over the (key, value) items in od'
            for k in self:
                yield (k, self[k])

        def update(*args, **kwds):
            '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.

            If E is a dict instance, does:           for k in E: od[k] = E[k]
            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
            In either case, this is followed by:     for k, v in F.items(): od[k] = v

            '''
            if len(args) > 2:
                raise TypeError('update() takes at most 2 positional '
                                'arguments (%d given)' % (len(args),))
            elif not args:
                raise TypeError('update() takes at least 1 argument (0 given)')
            self = args[0]
            # Make progressively weaker assumptions about "other"
            other = ()
            if len(args) == 2:
                other = args[1]
            if isinstance(other, dict):
                for key in other:
                    self[key] = other[key]
            elif hasattr(other, 'keys'):
                for key in other.keys():
                    self[key] = other[key]
            else:
                for key, value in other:
                    self[key] = value
            for key, value in kwds.items():
                self[key] = value

        __update = update  # let subclasses override update without breaking __init__

        __marker = object()

        def pop(self, key, default=__marker):
            '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
            If key is not found, d is returned if given, otherwise KeyError is raised.

            '''
            if key in self:
                result = self[key]
                del self[key]
                return result
            if default is self.__marker:
                raise KeyError(key)
            return default

        def setdefault(self, key, default=None):
            'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
            if key in self:
                return self[key]
            self[key] = default
            return default

        def __repr__(self, _repr_running=None):
            'od.__repr__() <==> repr(od)'
            if not _repr_running: _repr_running = {}
            call_key = id(self), _get_ident()
            if call_key in _repr_running:
                return '...'
            _repr_running[call_key] = 1
            try:
                if not self:
                    return '%s()' % (self.__class__.__name__,)
                return '%s(%r)' % (self.__class__.__name__, self.items())
            finally:
                del _repr_running[call_key]

        def __reduce__(self):
            'Return state information for pickling'
            items = [[k, self[k]] for k in self]
            inst_dict = vars(self).copy()
            for k in vars(OrderedDict()):
                inst_dict.pop(k, None)
            if inst_dict:
                return (self.__class__, (items,), inst_dict)
            return self.__class__, (items,)

        def copy(self):
            'od.copy() -> a shallow copy of od'
            return self.__class__(self)

        @classmethod
        def fromkeys(cls, iterable, value=None):
            '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
            and values equal to v (which defaults to None).

            '''
            d = cls()
            for key in iterable:
                d[key] = value
            return d

        def __eq__(self, other):
            '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
            while comparison to a regular mapping is order-insensitive.

            '''
            if isinstance(other, OrderedDict):
                return len(self)==len(other) and self.items() == other.items()
            return dict.__eq__(self, other)

        def __ne__(self, other):
            return not self == other

        # -- the following methods are only used in Python 2.7 --

        def viewkeys(self):
            "od.viewkeys() -> a set-like object providing a view on od's keys"
            return KeysView(self)

        def viewvalues(self):
            "od.viewvalues() -> an object providing a view on od's values"
            return ValuesView(self)

        def viewitems(self):
            "od.viewitems() -> a set-like object providing a view on od's items"
            return ItemsView(self)

try:
    from logging.config import BaseConfigurator, valid_ident
except ImportError: # pragma: no cover
    IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)


    def valid_ident(s):
        m = IDENTIFIER.match(s)
        if not m:
            raise ValueError('Not a valid Python identifier: %r' % s)
        return True


    # The ConvertingXXX classes are wrappers around standard Python containers,
    # and they serve to convert any suitable values in the container. The
    # conversion converts base dicts, lists and tuples to their wrapped
    # equivalents, whereas strings which match a conversion format are converted
    # appropriately.
    #
    # Each wrapper should have a configurator attribute holding the actual
    # configurator to use for conversion.

    class ConvertingDict(dict):
        """A converting dictionary wrapper."""

        def __getitem__(self, key):
            value = dict.__getitem__(self, key)
            result = self.configurator.convert(value)
            #If the converted value is different, save for next time
            if value is not result:
                self[key] = result
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

        def get(self, key, default=None):
            value = dict.get(self, key, default)
            result = self.configurator.convert(value)
            #If the converted value is different, save for next time
            if value is not result:
                self[key] = result
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

    def pop(self, key, default=None):
        value = dict.pop(self, key, default)
        result = self.configurator.convert(value)
        if value is not result:
            if type(result) in (ConvertingDict, ConvertingList,
                                ConvertingTuple):
                result.parent = self
                result.key = key
        return result

    class ConvertingList(list):
        """A converting list wrapper."""
        def __getitem__(self, key):
            value = list.__getitem__(self, key)
            result = self.configurator.convert(value)
            #If the converted value is different, save for next time
            if value is not result:
                self[key] = result
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

        def pop(self, idx=-1):
            value = list.pop(self, idx)
            result = self.configurator.convert(value)
            if value is not result:
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
            return result

    class ConvertingTuple(tuple):
        """A converting tuple wrapper."""
        def __getitem__(self, key):
            value = tuple.__getitem__(self, key)
            result = self.configurator.convert(value)
            if value is not result:
                if type(result) in (ConvertingDict, ConvertingList,
                                    ConvertingTuple):
                    result.parent = self
                    result.key = key
            return result

    class BaseConfigurator(object):
        """
        The configurator base class which defines some useful defaults.
        """

        CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$')

        WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
        DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
        INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
        DIGIT_PATTERN = re.compile(r'^\d+$')

        value_converters = {
            'ext' : 'ext_convert',
            'cfg' : 'cfg_convert',
        }

        # We might want to use a different one, e.g. importlib
        importer = staticmethod(__import__)

        def __init__(self, config):
            self.config = ConvertingDict(config)
            self.config.configurator = self

        def resolve(self, s):
            """
            Resolve strings to objects using standard import and attribute
            syntax.
            """
            name = s.split('.')
            used = name.pop(0)
            try:
                found = self.importer(used)
                for frag in name:
                    used += '.' + frag
                    try:
                        found = getattr(found, frag)
                    except AttributeError:
                        self.importer(used)
                        found = getattr(found, frag)
                return found
            except ImportError:
                e, tb = sys.exc_info()[1:]
                v = ValueError('Cannot resolve %r: %s' % (s, e))
                v.__cause__, v.__traceback__ = e, tb
                raise v

        def ext_convert(self, value):
            """Default converter for the ext:// protocol."""
            return self.resolve(value)

        def cfg_convert(self, value):
            """Default converter for the cfg:// protocol."""
            rest = value
            m = self.WORD_PATTERN.match(rest)
            if m is None:
                raise ValueError("Unable to convert %r" % value)
            else:
                rest = rest[m.end():]
                d = self.config[m.groups()[0]]
                #print d, rest
                while rest:
                    m = self.DOT_PATTERN.match(rest)
                    if m:
                        d = d[m.groups()[0]]
                    else:
                        m = self.INDEX_PATTERN.match(rest)
                        if m:
                            idx = m.groups()[0]
                            if not self.DIGIT_PATTERN.match(idx):
                                d = d[idx]
                            else:
                                try:
                                    n = int(idx) # try as number first (most likely)
                                    d = d[n]
                                except TypeError:
                                    d = d[idx]
                    if m:
                        rest = rest[m.end():]
                    else:
                        raise ValueError('Unable to convert '
                                         '%r at %r' % (value, rest))
            #rest should be empty
            return d

        def convert(self, value):
            """
            Convert values to an appropriate type. dicts, lists and tuples are
            replaced by their converting alternatives. Strings are checked to
            see if they have a conversion format and are converted if they do.
            """
            if not isinstance(value, ConvertingDict) and isinstance(value, dict):
                value = ConvertingDict(value)
                value.configurator = self
            elif not isinstance(value, ConvertingList) and isinstance(value, list):
                value = ConvertingList(value)
                value.configurator = self
            elif not isinstance(value, ConvertingTuple) and\
                     isinstance(value, tuple):
                value = ConvertingTuple(value)
                value.configurator = self
            elif isinstance(value, string_types):
                m = self.CONVERT_PATTERN.match(value)
                if m:
                    d = m.groupdict()
                    prefix = d['prefix']
                    converter = self.value_converters.get(prefix, None)
                    if converter:
                        suffix = d['suffix']
                        converter = getattr(self, converter)
                        value = converter(suffix)
            return value

        def configure_custom(self, config):
            """Configure an object with a user-supplied factory."""
            c = config.pop('()')
            if not callable(c):
                c = self.resolve(c)
            props = config.pop('.', None)
            # Check for valid identifiers
            kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
            result = c(**kwargs)
            if props:
                for name, value in props.items():
                    setattr(result, name, value)
            return result

        def as_tuple(self, value):
            """Utility function which converts lists to tuples."""
            if isinstance(value, list):
                value = tuple(value)
            return value
PKZ
l-site-packages/pip/_vendor/distlib/metadata.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Implementation of the Metadata for Python packages PEPs.

Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
"""
from __future__ import unicode_literals

import codecs
from email import message_from_file
import json
import logging
import re


from . import DistlibException, __version__
from .compat import StringIO, string_types, text_type
from .markers import interpret
from .util import extract_by_key, get_extras
from .version import get_scheme, PEP440_VERSION_RE

logger = logging.getLogger(__name__)


class MetadataMissingError(DistlibException):
    """A required metadata is missing"""


class MetadataConflictError(DistlibException):
    """Attempt to read or write metadata fields that are conflictual."""


class MetadataUnrecognizedVersionError(DistlibException):
    """Unknown metadata version number."""


class MetadataInvalidError(DistlibException):
    """A metadata value is invalid"""

# public API of this module
__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']

# Encoding used for the PKG-INFO files
PKG_INFO_ENCODING = 'utf-8'

# preferred version. Hopefully will be changed
# to 1.2 once PEP 345 is supported everywhere
PKG_INFO_PREFERRED_VERSION = '1.1'

_LINE_PREFIX_1_2 = re.compile('\n       \|')
_LINE_PREFIX_PRE_1_2 = re.compile('\n        ')
_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'License')

_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Supported-Platform', 'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'License', 'Classifier', 'Download-URL', 'Obsoletes',
               'Provides', 'Requires')

_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
                'Download-URL')

_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Supported-Platform', 'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'Maintainer', 'Maintainer-email', 'License',
               'Classifier', 'Download-URL', 'Obsoletes-Dist',
               'Project-URL', 'Provides-Dist', 'Requires-Dist',
               'Requires-Python', 'Requires-External')

_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
                'Maintainer-email', 'Project-URL')

_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
               'Supported-Platform', 'Summary', 'Description',
               'Keywords', 'Home-page', 'Author', 'Author-email',
               'Maintainer', 'Maintainer-email', 'License',
               'Classifier', 'Download-URL', 'Obsoletes-Dist',
               'Project-URL', 'Provides-Dist', 'Requires-Dist',
               'Requires-Python', 'Requires-External', 'Private-Version',
               'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
               'Provides-Extra')

_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
                'Setup-Requires-Dist', 'Extension')

_ALL_FIELDS = set()
_ALL_FIELDS.update(_241_FIELDS)
_ALL_FIELDS.update(_314_FIELDS)
_ALL_FIELDS.update(_345_FIELDS)
_ALL_FIELDS.update(_426_FIELDS)

EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')


def _version2fieldlist(version):
    if version == '1.0':
        return _241_FIELDS
    elif version == '1.1':
        return _314_FIELDS
    elif version == '1.2':
        return _345_FIELDS
    elif version == '2.0':
        return _426_FIELDS
    raise MetadataUnrecognizedVersionError(version)


def _best_version(fields):
    """Detect the best version depending on the fields used."""
    def _has_marker(keys, markers):
        for marker in markers:
            if marker in keys:
                return True
        return False

    keys = []
    for key, value in fields.items():
        if value in ([], 'UNKNOWN', None):
            continue
        keys.append(key)

    possible_versions = ['1.0', '1.1', '1.2', '2.0']

    # first let's try to see if a field is not part of one of the version
    for key in keys:
        if key not in _241_FIELDS and '1.0' in possible_versions:
            possible_versions.remove('1.0')
        if key not in _314_FIELDS and '1.1' in possible_versions:
            possible_versions.remove('1.1')
        if key not in _345_FIELDS and '1.2' in possible_versions:
            possible_versions.remove('1.2')
        if key not in _426_FIELDS and '2.0' in possible_versions:
            possible_versions.remove('2.0')

    # possible_version contains qualified versions
    if len(possible_versions) == 1:
        return possible_versions[0]   # found !
    elif len(possible_versions) == 0:
        raise MetadataConflictError('Unknown metadata set')

    # let's see if one unique marker is found
    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
    is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
    if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1:
        raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields')

    # we have the choice, 1.0, or 1.2, or 2.0
    #   - 1.0 has a broken Summary field but works with all tools
    #   - 1.1 is to avoid
    #   - 1.2 fixes Summary but has little adoption
    #   - 2.0 adds more features and is very new
    if not is_1_1 and not is_1_2 and not is_2_0:
        # we couldn't find any specific marker
        if PKG_INFO_PREFERRED_VERSION in possible_versions:
            return PKG_INFO_PREFERRED_VERSION
    if is_1_1:
        return '1.1'
    if is_1_2:
        return '1.2'

    return '2.0'

_ATTR2FIELD = {
    'metadata_version': 'Metadata-Version',
    'name': 'Name',
    'version': 'Version',
    'platform': 'Platform',
    'supported_platform': 'Supported-Platform',
    'summary': 'Summary',
    'description': 'Description',
    'keywords': 'Keywords',
    'home_page': 'Home-page',
    'author': 'Author',
    'author_email': 'Author-email',
    'maintainer': 'Maintainer',
    'maintainer_email': 'Maintainer-email',
    'license': 'License',
    'classifier': 'Classifier',
    'download_url': 'Download-URL',
    'obsoletes_dist': 'Obsoletes-Dist',
    'provides_dist': 'Provides-Dist',
    'requires_dist': 'Requires-Dist',
    'setup_requires_dist': 'Setup-Requires-Dist',
    'requires_python': 'Requires-Python',
    'requires_external': 'Requires-External',
    'requires': 'Requires',
    'provides': 'Provides',
    'obsoletes': 'Obsoletes',
    'project_url': 'Project-URL',
    'private_version': 'Private-Version',
    'obsoleted_by': 'Obsoleted-By',
    'extension': 'Extension',
    'provides_extra': 'Provides-Extra',
}

_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
_VERSIONS_FIELDS = ('Requires-Python',)
_VERSION_FIELDS = ('Version',)
_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
               'Requires', 'Provides', 'Obsoletes-Dist',
               'Provides-Dist', 'Requires-Dist', 'Requires-External',
               'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
               'Provides-Extra', 'Extension')
_LISTTUPLEFIELDS = ('Project-URL',)

_ELEMENTSFIELD = ('Keywords',)

_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')

_MISSING = object()

_FILESAFE = re.compile('[^A-Za-z0-9.]+')


def _get_name_and_version(name, version, for_filename=False):
    """Return the distribution name with version.

    If for_filename is true, return a filename-escaped form."""
    if for_filename:
        # For both name and version any runs of non-alphanumeric or '.'
        # characters are replaced with a single '-'.  Additionally any
        # spaces in the version string become '.'
        name = _FILESAFE.sub('-', name)
        version = _FILESAFE.sub('-', version.replace(' ', '.'))
    return '%s-%s' % (name, version)


class LegacyMetadata(object):
    """The legacy metadata of a release.

    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
    instantiate the class with one of these arguments (or none):
    - *path*, the path to a metadata file
    - *fileobj* give a file-like object with metadata as content
    - *mapping* is a dict-like object
    - *scheme* is a version scheme name
    """
    # TODO document the mapping API and UNKNOWN default key

    def __init__(self, path=None, fileobj=None, mapping=None,
                 scheme='default'):
        if [path, fileobj, mapping].count(None) < 2:
            raise TypeError('path, fileobj and mapping are exclusive')
        self._fields = {}
        self.requires_files = []
        self._dependencies = None
        self.scheme = scheme
        if path is not None:
            self.read(path)
        elif fileobj is not None:
            self.read_file(fileobj)
        elif mapping is not None:
            self.update(mapping)
            self.set_metadata_version()

    def set_metadata_version(self):
        self._fields['Metadata-Version'] = _best_version(self._fields)

    def _write_field(self, fileobj, name, value):
        fileobj.write('%s: %s\n' % (name, value))

    def __getitem__(self, name):
        return self.get(name)

    def __setitem__(self, name, value):
        return self.set(name, value)

    def __delitem__(self, name):
        field_name = self._convert_name(name)
        try:
            del self._fields[field_name]
        except KeyError:
            raise KeyError(name)

    def __contains__(self, name):
        return (name in self._fields or
                self._convert_name(name) in self._fields)

    def _convert_name(self, name):
        if name in _ALL_FIELDS:
            return name
        name = name.replace('-', '_').lower()
        return _ATTR2FIELD.get(name, name)

    def _default_value(self, name):
        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
            return []
        return 'UNKNOWN'

    def _remove_line_prefix(self, value):
        if self.metadata_version in ('1.0', '1.1'):
            return _LINE_PREFIX_PRE_1_2.sub('\n', value)
        else:
            return _LINE_PREFIX_1_2.sub('\n', value)

    def __getattr__(self, name):
        if name in _ATTR2FIELD:
            return self[name]
        raise AttributeError(name)

    #
    # Public API
    #

#    dependencies = property(_get_dependencies, _set_dependencies)

    def get_fullname(self, filesafe=False):
        """Return the distribution name with version.

        If filesafe is true, return a filename-escaped form."""
        return _get_name_and_version(self['Name'], self['Version'], filesafe)

    def is_field(self, name):
        """return True if name is a valid metadata key"""
        name = self._convert_name(name)
        return name in _ALL_FIELDS

    def is_multi_field(self, name):
        name = self._convert_name(name)
        return name in _LISTFIELDS

    def read(self, filepath):
        """Read the metadata values from a file path."""
        fp = codecs.open(filepath, 'r', encoding='utf-8')
        try:
            self.read_file(fp)
        finally:
            fp.close()

    def read_file(self, fileob):
        """Read the metadata values from a file object."""
        msg = message_from_file(fileob)
        self._fields['Metadata-Version'] = msg['metadata-version']

        # When reading, get all the fields we can
        for field in _ALL_FIELDS:
            if field not in msg:
                continue
            if field in _LISTFIELDS:
                # we can have multiple lines
                values = msg.get_all(field)
                if field in _LISTTUPLEFIELDS and values is not None:
                    values = [tuple(value.split(',')) for value in values]
                self.set(field, values)
            else:
                # single line
                value = msg[field]
                if value is not None and value != 'UNKNOWN':
                    self.set(field, value)
        self.set_metadata_version()

    def write(self, filepath, skip_unknown=False):
        """Write the metadata fields to filepath."""
        fp = codecs.open(filepath, 'w', encoding='utf-8')
        try:
            self.write_file(fp, skip_unknown)
        finally:
            fp.close()

    def write_file(self, fileobject, skip_unknown=False):
        """Write the PKG-INFO format data to a file object."""
        self.set_metadata_version()

        for field in _version2fieldlist(self['Metadata-Version']):
            values = self.get(field)
            if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
                continue
            if field in _ELEMENTSFIELD:
                self._write_field(fileobject, field, ','.join(values))
                continue
            if field not in _LISTFIELDS:
                if field == 'Description':
                    if self.metadata_version in ('1.0', '1.1'):
                        values = values.replace('\n', '\n        ')
                    else:
                        values = values.replace('\n', '\n       |')
                values = [values]

            if field in _LISTTUPLEFIELDS:
                values = [','.join(value) for value in values]

            for value in values:
                self._write_field(fileobject, field, value)

    def update(self, other=None, **kwargs):
        """Set metadata values from the given iterable `other` and kwargs.

        Behavior is like `dict.update`: If `other` has a ``keys`` method,
        they are looped over and ``self[key]`` is assigned ``other[key]``.
        Else, ``other`` is an iterable of ``(key, value)`` iterables.

        Keys that don't match a metadata field or that have an empty value are
        dropped.
        """
        def _set(key, value):
            if key in _ATTR2FIELD and value:
                self.set(self._convert_name(key), value)

        if not other:
            # other is None or empty container
            pass
        elif hasattr(other, 'keys'):
            for k in other.keys():
                _set(k, other[k])
        else:
            for k, v in other:
                _set(k, v)

        if kwargs:
            for k, v in kwargs.items():
                _set(k, v)

    def set(self, name, value):
        """Control then set a metadata field."""
        name = self._convert_name(name)

        if ((name in _ELEMENTSFIELD or name == 'Platform') and
            not isinstance(value, (list, tuple))):
            if isinstance(value, string_types):
                value = [v.strip() for v in value.split(',')]
            else:
                value = []
        elif (name in _LISTFIELDS and
              not isinstance(value, (list, tuple))):
            if isinstance(value, string_types):
                value = [value]
            else:
                value = []

        if logger.isEnabledFor(logging.WARNING):
            project_name = self['Name']

            scheme = get_scheme(self.scheme)
            if name in _PREDICATE_FIELDS and value is not None:
                for v in value:
                    # check that the values are valid
                    if not scheme.is_valid_matcher(v.split(';')[0]):
                        logger.warning(
                            "'%s': '%s' is not valid (field '%s')",
                            project_name, v, name)
            # FIXME this rejects UNKNOWN, is that right?
            elif name in _VERSIONS_FIELDS and value is not None:
                if not scheme.is_valid_constraint_list(value):
                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
                                   project_name, value, name)
            elif name in _VERSION_FIELDS and value is not None:
                if not scheme.is_valid_version(value):
                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
                                   project_name, value, name)

        if name in _UNICODEFIELDS:
            if name == 'Description':
                value = self._remove_line_prefix(value)

        self._fields[name] = value

    def get(self, name, default=_MISSING):
        """Get a metadata field."""
        name = self._convert_name(name)
        if name not in self._fields:
            if default is _MISSING:
                default = self._default_value(name)
            return default
        if name in _UNICODEFIELDS:
            value = self._fields[name]
            return value
        elif name in _LISTFIELDS:
            value = self._fields[name]
            if value is None:
                return []
            res = []
            for val in value:
                if name not in _LISTTUPLEFIELDS:
                    res.append(val)
                else:
                    # That's for Project-URL
                    res.append((val[0], val[1]))
            return res

        elif name in _ELEMENTSFIELD:
            value = self._fields[name]
            if isinstance(value, string_types):
                return value.split(',')
        return self._fields[name]

    def check(self, strict=False):
        """Check if the metadata is compliant. If strict is True then raise if
        no Name or Version are provided"""
        self.set_metadata_version()

        # XXX should check the versions (if the file was loaded)
        missing, warnings = [], []

        for attr in ('Name', 'Version'):  # required by PEP 345
            if attr not in self:
                missing.append(attr)

        if strict and missing != []:
            msg = 'missing required metadata: %s' % ', '.join(missing)
            raise MetadataMissingError(msg)

        for attr in ('Home-page', 'Author'):
            if attr not in self:
                missing.append(attr)

        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
        if self['Metadata-Version'] != '1.2':
            return missing, warnings

        scheme = get_scheme(self.scheme)

        def are_valid_constraints(value):
            for v in value:
                if not scheme.is_valid_matcher(v.split(';')[0]):
                    return False
            return True

        for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
                                   (_VERSIONS_FIELDS,
                                    scheme.is_valid_constraint_list),
                                   (_VERSION_FIELDS,
                                    scheme.is_valid_version)):
            for field in fields:
                value = self.get(field, None)
                if value is not None and not controller(value):
                    warnings.append("Wrong value for '%s': %s" % (field, value))

        return missing, warnings

    def todict(self, skip_missing=False):
        """Return fields as a dict.

        Field names will be converted to use the underscore-lowercase style
        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
        """
        self.set_metadata_version()

        mapping_1_0 = (
            ('metadata_version', 'Metadata-Version'),
            ('name', 'Name'),
            ('version', 'Version'),
            ('summary', 'Summary'),
            ('home_page', 'Home-page'),
            ('author', 'Author'),
            ('author_email', 'Author-email'),
            ('license', 'License'),
            ('description', 'Description'),
            ('keywords', 'Keywords'),
            ('platform', 'Platform'),
            ('classifiers', 'Classifier'),
            ('download_url', 'Download-URL'),
        )

        data = {}
        for key, field_name in mapping_1_0:
            if not skip_missing or field_name in self._fields:
                data[key] = self[field_name]

        if self['Metadata-Version'] == '1.2':
            mapping_1_2 = (
                ('requires_dist', 'Requires-Dist'),
                ('requires_python', 'Requires-Python'),
                ('requires_external', 'Requires-External'),
                ('provides_dist', 'Provides-Dist'),
                ('obsoletes_dist', 'Obsoletes-Dist'),
                ('project_url', 'Project-URL'),
                ('maintainer', 'Maintainer'),
                ('maintainer_email', 'Maintainer-email'),
            )
            for key, field_name in mapping_1_2:
                if not skip_missing or field_name in self._fields:
                    if key != 'project_url':
                        data[key] = self[field_name]
                    else:
                        data[key] = [','.join(u) for u in self[field_name]]

        elif self['Metadata-Version'] == '1.1':
            mapping_1_1 = (
                ('provides', 'Provides'),
                ('requires', 'Requires'),
                ('obsoletes', 'Obsoletes'),
            )
            for key, field_name in mapping_1_1:
                if not skip_missing or field_name in self._fields:
                    data[key] = self[field_name]

        return data

    def add_requirements(self, requirements):
        if self['Metadata-Version'] == '1.1':
            # we can't have 1.1 metadata *and* Setuptools requires
            for field in ('Obsoletes', 'Requires', 'Provides'):
                if field in self:
                    del self[field]
        self['Requires-Dist'] += requirements

    # Mapping API
    # TODO could add iter* variants

    def keys(self):
        return list(_version2fieldlist(self['Metadata-Version']))

    def __iter__(self):
        for key in self.keys():
            yield key

    def values(self):
        return [self[key] for key in self.keys()]

    def items(self):
        return [(key, self[key]) for key in self.keys()]

    def __repr__(self):
        return '<%s %s %s>' % (self.__class__.__name__, self.name,
                               self.version)


METADATA_FILENAME = 'pydist.json'
WHEEL_METADATA_FILENAME = 'metadata.json'


class Metadata(object):
    """
    The metadata of a release. This implementation uses 2.0 (JSON)
    metadata where possible. If not possible, it wraps a LegacyMetadata
    instance which handles the key-value metadata format.
    """

    METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$')

    NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)

    VERSION_MATCHER = PEP440_VERSION_RE

    SUMMARY_MATCHER = re.compile('.{1,2047}')

    METADATA_VERSION = '2.0'

    GENERATOR = 'distlib (%s)' % __version__

    MANDATORY_KEYS = {
        'name': (),
        'version': (),
        'summary': ('legacy',),
    }

    INDEX_KEYS = ('name version license summary description author '
                  'author_email keywords platform home_page classifiers '
                  'download_url')

    DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
                       'dev_requires provides meta_requires obsoleted_by '
                       'supports_environments')

    SYNTAX_VALIDATORS = {
        'metadata_version': (METADATA_VERSION_MATCHER, ()),
        'name': (NAME_MATCHER, ('legacy',)),
        'version': (VERSION_MATCHER, ('legacy',)),
        'summary': (SUMMARY_MATCHER, ('legacy',)),
    }

    __slots__ = ('_legacy', '_data', 'scheme')

    def __init__(self, path=None, fileobj=None, mapping=None,
                 scheme='default'):
        if [path, fileobj, mapping].count(None) < 2:
            raise TypeError('path, fileobj and mapping are exclusive')
        self._legacy = None
        self._data = None
        self.scheme = scheme
        #import pdb; pdb.set_trace()
        if mapping is not None:
            try:
                self._validate_mapping(mapping, scheme)
                self._data = mapping
            except MetadataUnrecognizedVersionError:
                self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
                self.validate()
        else:
            data = None
            if path:
                with open(path, 'rb') as f:
                    data = f.read()
            elif fileobj:
                data = fileobj.read()
            if data is None:
                # Initialised with no args - to be added
                self._data = {
                    'metadata_version': self.METADATA_VERSION,
                    'generator': self.GENERATOR,
                }
            else:
                if not isinstance(data, text_type):
                    data = data.decode('utf-8')
                try:
                    self._data = json.loads(data)
                    self._validate_mapping(self._data, scheme)
                except ValueError:
                    # Note: MetadataUnrecognizedVersionError does not
                    # inherit from ValueError (it's a DistlibException,
                    # which should not inherit from ValueError).
                    # The ValueError comes from the json.load - if that
                    # succeeds and we get a validation error, we want
                    # that to propagate
                    self._legacy = LegacyMetadata(fileobj=StringIO(data),
                                                  scheme=scheme)
                    self.validate()

    common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))

    none_list = (None, list)
    none_dict = (None, dict)

    mapped_keys = {
        'run_requires': ('Requires-Dist', list),
        'build_requires': ('Setup-Requires-Dist', list),
        'dev_requires': none_list,
        'test_requires': none_list,
        'meta_requires': none_list,
        'extras': ('Provides-Extra', list),
        'modules': none_list,
        'namespaces': none_list,
        'exports': none_dict,
        'commands': none_dict,
        'classifiers': ('Classifier', list),
        'source_url': ('Download-URL', None),
        'metadata_version': ('Metadata-Version', None),
    }

    del none_list, none_dict

    def __getattribute__(self, key):
        common = object.__getattribute__(self, 'common_keys')
        mapped = object.__getattribute__(self, 'mapped_keys')
        if key in mapped:
            lk, maker = mapped[key]
            if self._legacy:
                if lk is None:
                    result = None if maker is None else maker()
                else:
                    result = self._legacy.get(lk)
            else:
                value = None if maker is None else maker()
                if key not in ('commands', 'exports', 'modules', 'namespaces',
                               'classifiers'):
                    result = self._data.get(key, value)
                else:
                    # special cases for PEP 459
                    sentinel = object()
                    result = sentinel
                    d = self._data.get('extensions')
                    if d:
                        if key == 'commands':
                            result = d.get('python.commands', value)
                        elif key == 'classifiers':
                            d = d.get('python.details')
                            if d:
                                result = d.get(key, value)
                        else:
                            d = d.get('python.exports')
                            if not d:
                                d = self._data.get('python.exports')
                            if d:
                                result = d.get(key, value)
                    if result is sentinel:
                        result = value
        elif key not in common:
            result = object.__getattribute__(self, key)
        elif self._legacy:
            result = self._legacy.get(key)
        else:
            result = self._data.get(key)
        return result

    def _validate_value(self, key, value, scheme=None):
        if key in self.SYNTAX_VALIDATORS:
            pattern, exclusions = self.SYNTAX_VALIDATORS[key]
            if (scheme or self.scheme) not in exclusions:
                m = pattern.match(value)
                if not m:
                    raise MetadataInvalidError("'%s' is an invalid value for "
                                               "the '%s' property" % (value,
                                                                    key))

    def __setattr__(self, key, value):
        self._validate_value(key, value)
        common = object.__getattribute__(self, 'common_keys')
        mapped = object.__getattribute__(self, 'mapped_keys')
        if key in mapped:
            lk, _ = mapped[key]
            if self._legacy:
                if lk is None:
                    raise NotImplementedError
                self._legacy[lk] = value
            elif key not in ('commands', 'exports', 'modules', 'namespaces',
                             'classifiers'):
                self._data[key] = value
            else:
                # special cases for PEP 459
                d = self._data.setdefault('extensions', {})
                if key == 'commands':
                    d['python.commands'] = value
                elif key == 'classifiers':
                    d = d.setdefault('python.details', {})
                    d[key] = value
                else:
                    d = d.setdefault('python.exports', {})
                    d[key] = value
        elif key not in common:
            object.__setattr__(self, key, value)
        else:
            if key == 'keywords':
                if isinstance(value, string_types):
                    value = value.strip()
                    if value:
                        value = value.split()
                    else:
                        value = []
            if self._legacy:
                self._legacy[key] = value
            else:
                self._data[key] = value

    @property
    def name_and_version(self):
        return _get_name_and_version(self.name, self.version, True)

    @property
    def provides(self):
        if self._legacy:
            result = self._legacy['Provides-Dist']
        else:
            result = self._data.setdefault('provides', [])
        s = '%s (%s)' % (self.name, self.version)
        if s not in result:
            result.append(s)
        return result

    @provides.setter
    def provides(self, value):
        if self._legacy:
            self._legacy['Provides-Dist'] = value
        else:
            self._data['provides'] = value

    def get_requirements(self, reqts, extras=None, env=None):
        """
        Base method to get dependencies, given a set of extras
        to satisfy and an optional environment context.
        :param reqts: A list of sometimes-wanted dependencies,
                      perhaps dependent on extras and environment.
        :param extras: A list of optional components being requested.
        :param env: An optional environment for marker evaluation.
        """
        if self._legacy:
            result = reqts
        else:
            result = []
            extras = get_extras(extras or [], self.extras)
            for d in reqts:
                if 'extra' not in d and 'environment' not in d:
                    # unconditional
                    include = True
                else:
                    if 'extra' not in d:
                        # Not extra-dependent - only environment-dependent
                        include = True
                    else:
                        include = d.get('extra') in extras
                    if include:
                        # Not excluded because of extras, check environment
                        marker = d.get('environment')
                        if marker:
                            include = interpret(marker, env)
                if include:
                    result.extend(d['requires'])
            for key in ('build', 'dev', 'test'):
                e = ':%s:' % key
                if e in extras:
                    extras.remove(e)
                    # A recursive call, but it should terminate since 'test'
                    # has been removed from the extras
                    reqts = self._data.get('%s_requires' % key, [])
                    result.extend(self.get_requirements(reqts, extras=extras,
                                                        env=env))
        return result

    @property
    def dictionary(self):
        if self._legacy:
            return self._from_legacy()
        return self._data

    @property
    def dependencies(self):
        if self._legacy:
            raise NotImplementedError
        else:
            return extract_by_key(self._data, self.DEPENDENCY_KEYS)

    @dependencies.setter
    def dependencies(self, value):
        if self._legacy:
            raise NotImplementedError
        else:
            self._data.update(value)

    def _validate_mapping(self, mapping, scheme):
        if mapping.get('metadata_version') != self.METADATA_VERSION:
            raise MetadataUnrecognizedVersionError()
        missing = []
        for key, exclusions in self.MANDATORY_KEYS.items():
            if key not in mapping:
                if scheme not in exclusions:
                    missing.append(key)
        if missing:
            msg = 'Missing metadata items: %s' % ', '.join(missing)
            raise MetadataMissingError(msg)
        for k, v in mapping.items():
            self._validate_value(k, v, scheme)

    def validate(self):
        if self._legacy:
            missing, warnings = self._legacy.check(True)
            if missing or warnings:
                logger.warning('Metadata: missing: %s, warnings: %s',
                               missing, warnings)
        else:
            self._validate_mapping(self._data, self.scheme)

    def todict(self):
        if self._legacy:
            return self._legacy.todict(True)
        else:
            result = extract_by_key(self._data, self.INDEX_KEYS)
            return result

    def _from_legacy(self):
        assert self._legacy and not self._data
        result = {
            'metadata_version': self.METADATA_VERSION,
            'generator': self.GENERATOR,
        }
        lmd = self._legacy.todict(True)     # skip missing ones
        for k in ('name', 'version', 'license', 'summary', 'description',
                  'classifier'):
            if k in lmd:
                if k == 'classifier':
                    nk = 'classifiers'
                else:
                    nk = k
                result[nk] = lmd[k]
        kw = lmd.get('Keywords', [])
        if kw == ['']:
            kw = []
        result['keywords'] = kw
        keys = (('requires_dist', 'run_requires'),
                ('setup_requires_dist', 'build_requires'))
        for ok, nk in keys:
            if ok in lmd and lmd[ok]:
                result[nk] = [{'requires': lmd[ok]}]
        result['provides'] = self.provides
        author = {}
        maintainer = {}
        return result

    LEGACY_MAPPING = {
        'name': 'Name',
        'version': 'Version',
        'license': 'License',
        'summary': 'Summary',
        'description': 'Description',
        'classifiers': 'Classifier',
    }

    def _to_legacy(self):
        def process_entries(entries):
            reqts = set()
            for e in entries:
                extra = e.get('extra')
                env = e.get('environment')
                rlist = e['requires']
                for r in rlist:
                    if not env and not extra:
                        reqts.add(r)
                    else:
                        marker = ''
                        if extra:
                            marker = 'extra == "%s"' % extra
                        if env:
                            if marker:
                                marker = '(%s) and %s' % (env, marker)
                            else:
                                marker = env
                        reqts.add(';'.join((r, marker)))
            return reqts

        assert self._data and not self._legacy
        result = LegacyMetadata()
        nmd = self._data
        for nk, ok in self.LEGACY_MAPPING.items():
            if nk in nmd:
                result[ok] = nmd[nk]
        r1 = process_entries(self.run_requires + self.meta_requires)
        r2 = process_entries(self.build_requires + self.dev_requires)
        if self.extras:
            result['Provides-Extra'] = sorted(self.extras)
        result['Requires-Dist'] = sorted(r1)
        result['Setup-Requires-Dist'] = sorted(r2)
        # TODO: other fields such as contacts
        return result

    def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
        if [path, fileobj].count(None) != 1:
            raise ValueError('Exactly one of path and fileobj is needed')
        self.validate()
        if legacy:
            if self._legacy:
                legacy_md = self._legacy
            else:
                legacy_md = self._to_legacy()
            if path:
                legacy_md.write(path, skip_unknown=skip_unknown)
            else:
                legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
        else:
            if self._legacy:
                d = self._from_legacy()
            else:
                d = self._data
            if fileobj:
                json.dump(d, fileobj, ensure_ascii=True, indent=2,
                          sort_keys=True)
            else:
                with codecs.open(path, 'w', 'utf-8') as f:
                    json.dump(d, f, ensure_ascii=True, indent=2,
                              sort_keys=True)

    def add_requirements(self, requirements):
        if self._legacy:
            self._legacy.add_requirements(requirements)
        else:
            run_requires = self._data.setdefault('run_requires', [])
            always = None
            for entry in run_requires:
                if 'environment' not in entry and 'extra' not in entry:
                    always = entry
                    break
            if always is None:
                always = { 'requires': requirements }
                run_requires.insert(0, always)
            else:
                rset = set(always['requires']) | set(requirements)
                always['requires'] = sorted(rset)

    def __repr__(self):
        name = self.name or '(no name)'
        version = self.version or 'no version'
        return '<%s %s %s (%s)>' % (self.__class__.__name__,
                                    self.metadata_version, name, version)
PKZ:
ü-site-packages/pip/_vendor/distlib/database.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""PEP 376 implementation."""

from __future__ import unicode_literals

import base64
import codecs
import contextlib
import hashlib
import logging
import os
import posixpath
import sys
import zipimport

from . import DistlibException, resources
from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError
from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME
from .util import (parse_requirement, cached_property, parse_name_and_version,
                   read_exports, write_exports, CSVReader, CSVWriter)


__all__ = ['Distribution', 'BaseInstalledDistribution',
           'InstalledDistribution', 'EggInfoDistribution',
           'DistributionPath']


logger = logging.getLogger(__name__)

EXPORTS_FILENAME = 'pydist-exports.json'
COMMANDS_FILENAME = 'pydist-commands.json'

DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
              'RESOURCES', EXPORTS_FILENAME, 'SHARED')

DISTINFO_EXT = '.dist-info'


class _Cache(object):
    """
    A simple cache mapping names and .dist-info paths to distributions
    """
    def __init__(self):
        """
        Initialise an instance. There is normally one for each DistributionPath.
        """
        self.name = {}
        self.path = {}
        self.generated = False

    def clear(self):
        """
        Clear the cache, setting it to its initial state.
        """
        self.name.clear()
        self.path.clear()
        self.generated = False

    def add(self, dist):
        """
        Add a distribution to the cache.
        :param dist: The distribution to add.
        """
        if dist.path not in self.path:
            self.path[dist.path] = dist
            self.name.setdefault(dist.key, []).append(dist)


class DistributionPath(object):
    """
    Represents a set of distributions installed on a path (typically sys.path).
    """
    def __init__(self, path=None, include_egg=False):
        """
        Create an instance from a path, optionally including legacy (distutils/
        setuptools/distribute) distributions.
        :param path: The path to use, as a list of directories. If not specified,
                     sys.path is used.
        :param include_egg: If True, this instance will look for and return legacy
                            distributions as well as those based on PEP 376.
        """
        if path is None:
            path = sys.path
        self.path = path
        self._include_dist = True
        self._include_egg = include_egg

        self._cache = _Cache()
        self._cache_egg = _Cache()
        self._cache_enabled = True
        self._scheme = get_scheme('default')

    def _get_cache_enabled(self):
        return self._cache_enabled

    def _set_cache_enabled(self, value):
        self._cache_enabled = value

    cache_enabled = property(_get_cache_enabled, _set_cache_enabled)

    def clear_cache(self):
        """
        Clears the internal cache.
        """
        self._cache.clear()
        self._cache_egg.clear()


    def _yield_distributions(self):
        """
        Yield .dist-info and/or .egg(-info) distributions.
        """
        # We need to check if we've seen some resources already, because on
        # some Linux systems (e.g. some Debian/Ubuntu variants) there are
        # symlinks which alias other files in the environment.
        seen = set()
        for path in self.path:
            finder = resources.finder_for_path(path)
            if finder is None:
                continue
            r = finder.find('')
            if not r or not r.is_container:
                continue
            rset = sorted(r.resources)
            for entry in rset:
                r = finder.find(entry)
                if not r or r.path in seen:
                    continue
                if self._include_dist and entry.endswith(DISTINFO_EXT):
                    possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME]
                    for metadata_filename in possible_filenames:
                        metadata_path = posixpath.join(entry, metadata_filename)
                        pydist = finder.find(metadata_path)
                        if pydist:
                            break
                    else:
                        continue

                    with contextlib.closing(pydist.as_stream()) as stream:
                        metadata = Metadata(fileobj=stream, scheme='legacy')
                    logger.debug('Found %s', r.path)
                    seen.add(r.path)
                    yield new_dist_class(r.path, metadata=metadata,
                                         env=self)
                elif self._include_egg and entry.endswith(('.egg-info',
                                                          '.egg')):
                    logger.debug('Found %s', r.path)
                    seen.add(r.path)
                    yield old_dist_class(r.path, self)

    def _generate_cache(self):
        """
        Scan the path for distributions and populate the cache with
        those that are found.
        """
        gen_dist = not self._cache.generated
        gen_egg = self._include_egg and not self._cache_egg.generated
        if gen_dist or gen_egg:
            for dist in self._yield_distributions():
                if isinstance(dist, InstalledDistribution):
                    self._cache.add(dist)
                else:
                    self._cache_egg.add(dist)

            if gen_dist:
                self._cache.generated = True
            if gen_egg:
                self._cache_egg.generated = True

    @classmethod
    def distinfo_dirname(cls, name, version):
        """
        The *name* and *version* parameters are converted into their
        filename-escaped form, i.e. any ``'-'`` characters are replaced
        with ``'_'`` other than the one in ``'dist-info'`` and the one
        separating the name from the version number.

        :parameter name: is converted to a standard distribution name by replacing
                         any runs of non- alphanumeric characters with a single
                         ``'-'``.
        :type name: string
        :parameter version: is converted to a standard version string. Spaces
                            become dots, and all other non-alphanumeric characters
                            (except dots) become dashes, with runs of multiple
                            dashes condensed to a single dash.
        :type version: string
        :returns: directory name
        :rtype: string"""
        name = name.replace('-', '_')
        return '-'.join([name, version]) + DISTINFO_EXT

    def get_distributions(self):
        """
        Provides an iterator that looks for distributions and returns
        :class:`InstalledDistribution` or
        :class:`EggInfoDistribution` instances for each one of them.

        :rtype: iterator of :class:`InstalledDistribution` and
                :class:`EggInfoDistribution` instances
        """
        if not self._cache_enabled:
            for dist in self._yield_distributions():
                yield dist
        else:
            self._generate_cache()

            for dist in self._cache.path.values():
                yield dist

            if self._include_egg:
                for dist in self._cache_egg.path.values():
                    yield dist

    def get_distribution(self, name):
        """
        Looks for a named distribution on the path.

        This function only returns the first result found, as no more than one
        value is expected. If nothing is found, ``None`` is returned.

        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
                or ``None``
        """
        result = None
        name = name.lower()
        if not self._cache_enabled:
            for dist in self._yield_distributions():
                if dist.key == name:
                    result = dist
                    break
        else:
            self._generate_cache()

            if name in self._cache.name:
                result = self._cache.name[name][0]
            elif self._include_egg and name in self._cache_egg.name:
                result = self._cache_egg.name[name][0]
        return result

    def provides_distribution(self, name, version=None):
        """
        Iterates over all distributions to find which distributions provide *name*.
        If a *version* is provided, it will be used to filter the results.

        This function only returns the first result found, since no more than
        one values are expected. If the directory is not found, returns ``None``.

        :parameter version: a version specifier that indicates the version
                            required, conforming to the format in ``PEP-345``

        :type name: string
        :type version: string
        """
        matcher = None
        if not version is None:
            try:
                matcher = self._scheme.matcher('%s (%s)' % (name, version))
            except ValueError:
                raise DistlibException('invalid name or version: %r, %r' %
                                      (name, version))

        for dist in self.get_distributions():
            provided = dist.provides

            for p in provided:
                p_name, p_ver = parse_name_and_version(p)
                if matcher is None:
                    if p_name == name:
                        yield dist
                        break
                else:
                    if p_name == name and matcher.match(p_ver):
                        yield dist
                        break

    def get_file_path(self, name, relative_path):
        """
        Return the path to a resource file.
        """
        dist = self.get_distribution(name)
        if dist is None:
            raise LookupError('no distribution named %r found' % name)
        return dist.get_resource_path(relative_path)

    def get_exported_entries(self, category, name=None):
        """
        Return all of the exported entries in a particular category.

        :param category: The category to search for entries.
        :param name: If specified, only entries with that name are returned.
        """
        for dist in self.get_distributions():
            r = dist.exports
            if category in r:
                d = r[category]
                if name is not None:
                    if name in d:
                        yield d[name]
                else:
                    for v in d.values():
                        yield v


class Distribution(object):
    """
    A base class for distributions, whether installed or from indexes.
    Either way, it must have some metadata, so that's all that's needed
    for construction.
    """

    build_time_dependency = False
    """
    Set to True if it's known to be only a build-time dependency (i.e.
    not needed after installation).
    """

    requested = False
    """A boolean that indicates whether the ``REQUESTED`` metadata file is
    present (in other words, whether the package was installed by user
    request or it was installed as a dependency)."""

    def __init__(self, metadata):
        """
        Initialise an instance.
        :param metadata: The instance of :class:`Metadata` describing this
        distribution.
        """
        self.metadata = metadata
        self.name = metadata.name
        self.key = self.name.lower()    # for case-insensitive comparisons
        self.version = metadata.version
        self.locator = None
        self.digest = None
        self.extras = None      # additional features requested
        self.context = None     # environment marker overrides
        self.download_urls = set()
        self.digests = {}

    @property
    def source_url(self):
        """
        The source archive download URL for this distribution.
        """
        return self.metadata.source_url

    download_url = source_url   # Backward compatibility

    @property
    def name_and_version(self):
        """
        A utility property which displays the name and version in parentheses.
        """
        return '%s (%s)' % (self.name, self.version)

    @property
    def provides(self):
        """
        A set of distribution names and versions provided by this distribution.
        :return: A set of "name (version)" strings.
        """
        plist = self.metadata.provides
        s = '%s (%s)' % (self.name, self.version)
        if s not in plist:
            plist.append(s)
        return plist

    def _get_requirements(self, req_attr):
        md = self.metadata
        logger.debug('Getting requirements from metadata %r', md.todict())
        reqts = getattr(md, req_attr)
        return set(md.get_requirements(reqts, extras=self.extras,
                                       env=self.context))

    @property
    def run_requires(self):
        return self._get_requirements('run_requires')

    @property
    def meta_requires(self):
        return self._get_requirements('meta_requires')

    @property
    def build_requires(self):
        return self._get_requirements('build_requires')

    @property
    def test_requires(self):
        return self._get_requirements('test_requires')

    @property
    def dev_requires(self):
        return self._get_requirements('dev_requires')

    def matches_requirement(self, req):
        """
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        """
        # Requirement may contain extras - parse to lose those
        # from what's passed to the matcher
        r = parse_requirement(req)
        scheme = get_scheme(self.metadata.scheme)
        try:
            matcher = scheme.matcher(r.requirement)
        except UnsupportedVersionError:
            # XXX compat-mode if cannot read the version
            logger.warning('could not read version %r - using name only',
                           req)
            name = req.split()[0]
            matcher = scheme.matcher(name)

        name = matcher.key   # case-insensitive

        result = False
        for p in self.provides:
            p_name, p_ver = parse_name_and_version(p)
            if p_name != name:
                continue
            try:
                result = matcher.match(p_ver)
                break
            except UnsupportedVersionError:
                pass
        return result

    def __repr__(self):
        """
        Return a textual representation of this instance,
        """
        if self.source_url:
            suffix = ' [%s]' % self.source_url
        else:
            suffix = ''
        return '' % (self.name, self.version, suffix)

    def __eq__(self, other):
        """
        See if this distribution is the same as another.
        :param other: The distribution to compare with. To be equal to one
                      another. distributions must have the same type, name,
                      version and source_url.
        :return: True if it is the same, else False.
        """
        if type(other) is not type(self):
            result = False
        else:
            result = (self.name == other.name and
                      self.version == other.version and
                      self.source_url == other.source_url)
        return result

    def __hash__(self):
        """
        Compute hash in a way which matches the equality test.
        """
        return hash(self.name) + hash(self.version) + hash(self.source_url)


class BaseInstalledDistribution(Distribution):
    """
    This is the base class for installed distributions (whether PEP 376 or
    legacy).
    """

    hasher = None

    def __init__(self, metadata, path, env=None):
        """
        Initialise an instance.
        :param metadata: An instance of :class:`Metadata` which describes the
                         distribution. This will normally have been initialised
                         from a metadata file in the ``path``.
        :param path:     The path of the ``.dist-info`` or ``.egg-info``
                         directory for the distribution.
        :param env:      This is normally the :class:`DistributionPath`
                         instance where this distribution was found.
        """
        super(BaseInstalledDistribution, self).__init__(metadata)
        self.path = path
        self.dist_path = env

    def get_hash(self, data, hasher=None):
        """
        Get the hash of some data, using a particular hash algorithm, if
        specified.

        :param data: The data to be hashed.
        :type data: bytes
        :param hasher: The name of a hash implementation, supported by hashlib,
                       or ``None``. Examples of valid values are ``'sha1'``,
                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
                       ``'sha512'``. If no hasher is specified, the ``hasher``
                       attribute of the :class:`InstalledDistribution` instance
                       is used. If the hasher is determined to be ``None``, MD5
                       is used as the hashing algorithm.
        :returns: The hash of the data. If a hasher was explicitly specified,
                  the returned hash will be prefixed with the specified hasher
                  followed by '='.
        :rtype: str
        """
        if hasher is None:
            hasher = self.hasher
        if hasher is None:
            hasher = hashlib.md5
            prefix = ''
        else:
            hasher = getattr(hashlib, hasher)
            prefix = '%s=' % self.hasher
        digest = hasher(data).digest()
        digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
        return '%s%s' % (prefix, digest)


class InstalledDistribution(BaseInstalledDistribution):
    """
    Created with the *path* of the ``.dist-info`` directory provided to the
    constructor. It reads the metadata contained in ``pydist.json`` when it is
    instantiated., or uses a passed in Metadata instance (useful for when
    dry-run mode is being used).
    """

    hasher = 'sha256'

    def __init__(self, path, metadata=None, env=None):
        self.finder = finder = resources.finder_for_path(path)
        if finder is None:
            import pdb; pdb.set_trace ()
        if env and env._cache_enabled and path in env._cache.path:
            metadata = env._cache.path[path].metadata
        elif metadata is None:
            r = finder.find(METADATA_FILENAME)
            # Temporary - for Wheel 0.23 support
            if r is None:
                r = finder.find(WHEEL_METADATA_FILENAME)
            # Temporary - for legacy support
            if r is None:
                r = finder.find('METADATA')
            if r is None:
                raise ValueError('no %s found in %s' % (METADATA_FILENAME,
                                                        path))
            with contextlib.closing(r.as_stream()) as stream:
                metadata = Metadata(fileobj=stream, scheme='legacy')

        super(InstalledDistribution, self).__init__(metadata, path, env)

        if env and env._cache_enabled:
            env._cache.add(self)

        try:
            r = finder.find('REQUESTED')
        except AttributeError:
            import pdb; pdb.set_trace ()
        self.requested = r is not None

    def __repr__(self):
        return '' % (
            self.name, self.version, self.path)

    def __str__(self):
        return "%s %s" % (self.name, self.version)

    def _get_records(self):
        """
        Get the list of installed files for the distribution
        :return: A list of tuples of path, hash and size. Note that hash and
                 size might be ``None`` for some entries. The path is exactly
                 as stored in the file (which is as in PEP 376).
        """
        results = []
        r = self.get_distinfo_resource('RECORD')
        with contextlib.closing(r.as_stream()) as stream:
            with CSVReader(stream=stream) as record_reader:
                # Base location is parent dir of .dist-info dir
                #base_location = os.path.dirname(self.path)
                #base_location = os.path.abspath(base_location)
                for row in record_reader:
                    missing = [None for i in range(len(row), 3)]
                    path, checksum, size = row + missing
                    #if not os.path.isabs(path):
                    #    path = path.replace('/', os.sep)
                    #    path = os.path.join(base_location, path)
                    results.append((path, checksum, size))
        return results

    @cached_property
    def exports(self):
        """
        Return the information exported by this distribution.
        :return: A dictionary of exports, mapping an export category to a dict
                 of :class:`ExportEntry` instances describing the individual
                 export entries, and keyed by name.
        """
        result = {}
        r = self.get_distinfo_resource(EXPORTS_FILENAME)
        if r:
            result = self.read_exports()
        return result

    def read_exports(self):
        """
        Read exports data from a file in .ini format.

        :return: A dictionary of exports, mapping an export category to a list
                 of :class:`ExportEntry` instances describing the individual
                 export entries.
        """
        result = {}
        r = self.get_distinfo_resource(EXPORTS_FILENAME)
        if r:
            with contextlib.closing(r.as_stream()) as stream:
                result = read_exports(stream)
        return result

    def write_exports(self, exports):
        """
        Write a dictionary of exports to a file in .ini format.
        :param exports: A dictionary of exports, mapping an export category to
                        a list of :class:`ExportEntry` instances describing the
                        individual export entries.
        """
        rf = self.get_distinfo_file(EXPORTS_FILENAME)
        with open(rf, 'w') as f:
            write_exports(exports, f)

    def get_resource_path(self, relative_path):
        """
        NOTE: This API may change in the future.

        Return the absolute path to a resource file with the given relative
        path.

        :param relative_path: The path, relative to .dist-info, of the resource
                              of interest.
        :return: The absolute path where the resource is to be found.
        """
        r = self.get_distinfo_resource('RESOURCES')
        with contextlib.closing(r.as_stream()) as stream:
            with CSVReader(stream=stream) as resources_reader:
                for relative, destination in resources_reader:
                    if relative == relative_path:
                        return destination
        raise KeyError('no resource file with relative path %r '
                       'is installed' % relative_path)

    def list_installed_files(self):
        """
        Iterates over the ``RECORD`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: iterator of (path, hash, size)
        """
        for result in self._get_records():
            yield result

    def write_installed_files(self, paths, prefix, dry_run=False):
        """
        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
        existing ``RECORD`` file is silently overwritten.

        prefix is used to determine when to write absolute paths.
        """
        prefix = os.path.join(prefix, '')
        base = os.path.dirname(self.path)
        base_under_prefix = base.startswith(prefix)
        base = os.path.join(base, '')
        record_path = self.get_distinfo_file('RECORD')
        logger.info('creating %s', record_path)
        if dry_run:
            return None
        with CSVWriter(record_path) as writer:
            for path in paths:
                if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
                    # do not put size and hash, as in PEP-376
                    hash_value = size = ''
                else:
                    size = '%d' % os.path.getsize(path)
                    with open(path, 'rb') as fp:
                        hash_value = self.get_hash(fp.read())
                if path.startswith(base) or (base_under_prefix and
                                             path.startswith(prefix)):
                    path = os.path.relpath(path, base)
                writer.writerow((path, hash_value, size))

            # add the RECORD file itself
            if record_path.startswith(base):
                record_path = os.path.relpath(record_path, base)
            writer.writerow((record_path, '', ''))
        return record_path

    def check_installed_files(self):
        """
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        """
        mismatches = []
        base = os.path.dirname(self.path)
        record_path = self.get_distinfo_file('RECORD')
        for path, hash_value, size in self.list_installed_files():
            if not os.path.isabs(path):
                path = os.path.join(base, path)
            if path == record_path:
                continue
            if not os.path.exists(path):
                mismatches.append((path, 'exists', True, False))
            elif os.path.isfile(path):
                actual_size = str(os.path.getsize(path))
                if size and actual_size != size:
                    mismatches.append((path, 'size', size, actual_size))
                elif hash_value:
                    if '=' in hash_value:
                        hasher = hash_value.split('=', 1)[0]
                    else:
                        hasher = None

                    with open(path, 'rb') as f:
                        actual_hash = self.get_hash(f.read(), hasher)
                        if actual_hash != hash_value:
                            mismatches.append((path, 'hash', hash_value, actual_hash))
        return mismatches

    @cached_property
    def shared_locations(self):
        """
        A dictionary of shared locations whose keys are in the set 'prefix',
        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
        The corresponding value is the absolute path of that category for
        this distribution, and takes into account any paths selected by the
        user at installation time (e.g. via command-line arguments). In the
        case of the 'namespace' key, this would be a list of absolute paths
        for the roots of namespace packages in this distribution.

        The first time this property is accessed, the relevant information is
        read from the SHARED file in the .dist-info directory.
        """
        result = {}
        shared_path = os.path.join(self.path, 'SHARED')
        if os.path.isfile(shared_path):
            with codecs.open(shared_path, 'r', encoding='utf-8') as f:
                lines = f.read().splitlines()
            for line in lines:
                key, value = line.split('=', 1)
                if key == 'namespace':
                    result.setdefault(key, []).append(value)
                else:
                    result[key] = value
        return result

    def write_shared_locations(self, paths, dry_run=False):
        """
        Write shared location information to the SHARED file in .dist-info.
        :param paths: A dictionary as described in the documentation for
        :meth:`shared_locations`.
        :param dry_run: If True, the action is logged but no file is actually
                        written.
        :return: The path of the file written to.
        """
        shared_path = os.path.join(self.path, 'SHARED')
        logger.info('creating %s', shared_path)
        if dry_run:
            return None
        lines = []
        for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
            path = paths[key]
            if os.path.isdir(paths[key]):
                lines.append('%s=%s' % (key,  path))
        for ns in paths.get('namespace', ()):
            lines.append('namespace=%s' % ns)

        with codecs.open(shared_path, 'w', encoding='utf-8') as f:
            f.write('\n'.join(lines))
        return shared_path

    def get_distinfo_resource(self, path):
        if path not in DIST_FILES:
            raise DistlibException('invalid path for a dist-info file: '
                                   '%r at %r' % (path, self.path))
        finder = resources.finder_for_path(self.path)
        if finder is None:
            raise DistlibException('Unable to get a finder for %s' % self.path)
        return finder.find(path)

    def get_distinfo_file(self, path):
        """
        Returns a path located under the ``.dist-info`` directory. Returns a
        string representing the path.

        :parameter path: a ``'/'``-separated path relative to the
                         ``.dist-info`` directory or an absolute path;
                         If *path* is an absolute path and doesn't start
                         with the ``.dist-info`` directory path,
                         a :class:`DistlibException` is raised
        :type path: str
        :rtype: str
        """
        # Check if it is an absolute path  # XXX use relpath, add tests
        if path.find(os.sep) >= 0:
            # it's an absolute path?
            distinfo_dirname, path = path.split(os.sep)[-2:]
            if distinfo_dirname != self.path.split(os.sep)[-1]:
                raise DistlibException(
                    'dist-info file %r does not belong to the %r %s '
                    'distribution' % (path, self.name, self.version))

        # The file must be relative
        if path not in DIST_FILES:
            raise DistlibException('invalid path for a dist-info file: '
                                   '%r at %r' % (path, self.path))

        return os.path.join(self.path, path)

    def list_distinfo_files(self):
        """
        Iterates over the ``RECORD`` entries and returns paths for each line if
        the path is pointing to a file located in the ``.dist-info`` directory
        or one of its subdirectories.

        :returns: iterator of paths
        """
        base = os.path.dirname(self.path)
        for path, checksum, size in self._get_records():
            # XXX add separator or use real relpath algo
            if not os.path.isabs(path):
                path = os.path.join(base, path)
            if path.startswith(self.path):
                yield path

    def __eq__(self, other):
        return (isinstance(other, InstalledDistribution) and
                self.path == other.path)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    __hash__ = object.__hash__


class EggInfoDistribution(BaseInstalledDistribution):
    """Created with the *path* of the ``.egg-info`` directory or file provided
    to the constructor. It reads the metadata contained in the file itself, or
    if the given path happens to be a directory, the metadata is read from the
    file ``PKG-INFO`` under that directory."""

    requested = True    # as we have no way of knowing, assume it was
    shared_locations = {}

    def __init__(self, path, env=None):
        def set_name_and_version(s, n, v):
            s.name = n
            s.key = n.lower()   # for case-insensitive comparisons
            s.version = v

        self.path = path
        self.dist_path = env
        if env and env._cache_enabled and path in env._cache_egg.path:
            metadata = env._cache_egg.path[path].metadata
            set_name_and_version(self, metadata.name, metadata.version)
        else:
            metadata = self._get_metadata(path)

            # Need to be set before caching
            set_name_and_version(self, metadata.name, metadata.version)

            if env and env._cache_enabled:
                env._cache_egg.add(self)
        super(EggInfoDistribution, self).__init__(metadata, path, env)

    def _get_metadata(self, path):
        requires = None

        def parse_requires_data(data):
            """Create a list of dependencies from a requires.txt file.

            *data*: the contents of a setuptools-produced requires.txt file.
            """
            reqs = []
            lines = data.splitlines()
            for line in lines:
                line = line.strip()
                if line.startswith('['):
                    logger.warning('Unexpected line: quitting requirement scan: %r',
                                   line)
                    break
                r = parse_requirement(line)
                if not r:
                    logger.warning('Not recognised as a requirement: %r', line)
                    continue
                if r.extras:
                    logger.warning('extra requirements in requires.txt are '
                                   'not supported')
                if not r.constraints:
                    reqs.append(r.name)
                else:
                    cons = ', '.join('%s%s' % c for c in r.constraints)
                    reqs.append('%s (%s)' % (r.name, cons))
            return reqs

        def parse_requires_path(req_path):
            """Create a list of dependencies from a requires.txt file.

            *req_path*: the path to a setuptools-produced requires.txt file.
            """

            reqs = []
            try:
                with codecs.open(req_path, 'r', 'utf-8') as fp:
                    reqs = parse_requires_data(fp.read())
            except IOError:
                pass
            return reqs

        if path.endswith('.egg'):
            if os.path.isdir(path):
                meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
                metadata = Metadata(path=meta_path, scheme='legacy')
                req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
                requires = parse_requires_path(req_path)
            else:
                # FIXME handle the case where zipfile is not available
                zipf = zipimport.zipimporter(path)
                fileobj = StringIO(
                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
                metadata = Metadata(fileobj=fileobj, scheme='legacy')
                try:
                    data = zipf.get_data('EGG-INFO/requires.txt')
                    requires = parse_requires_data(data.decode('utf-8'))
                except IOError:
                    requires = None
        elif path.endswith('.egg-info'):
            if os.path.isdir(path):
                req_path = os.path.join(path, 'requires.txt')
                requires = parse_requires_path(req_path)
                path = os.path.join(path, 'PKG-INFO')
            metadata = Metadata(path=path, scheme='legacy')
        else:
            raise DistlibException('path must end with .egg-info or .egg, '
                                   'got %r' % path)

        if requires:
            metadata.add_requirements(requires)
        return metadata

    def __repr__(self):
        return '' % (
            self.name, self.version, self.path)

    def __str__(self):
        return "%s %s" % (self.name, self.version)

    def check_installed_files(self):
        """
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        """
        mismatches = []
        record_path = os.path.join(self.path, 'installed-files.txt')
        if os.path.exists(record_path):
            for path, _, _ in self.list_installed_files():
                if path == record_path:
                    continue
                if not os.path.exists(path):
                    mismatches.append((path, 'exists', True, False))
        return mismatches

    def list_installed_files(self):
        """
        Iterates over the ``installed-files.txt`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: a list of (path, hash, size)
        """

        def _md5(path):
            f = open(path, 'rb')
            try:
                content = f.read()
            finally:
                f.close()
            return hashlib.md5(content).hexdigest()

        def _size(path):
            return os.stat(path).st_size

        record_path = os.path.join(self.path, 'installed-files.txt')
        result = []
        if os.path.exists(record_path):
            with codecs.open(record_path, 'r', encoding='utf-8') as f:
                for line in f:
                    line = line.strip()
                    p = os.path.normpath(os.path.join(self.path, line))
                    # "./" is present as a marker between installed files
                    # and installation metadata files
                    if not os.path.exists(p):
                        logger.warning('Non-existent file: %s', p)
                        if p.endswith(('.pyc', '.pyo')):
                            continue
                        #otherwise fall through and fail
                    if not os.path.isdir(p):
                        result.append((p, _md5(p), _size(p)))
            result.append((record_path, None, None))
        return result

    def list_distinfo_files(self, absolute=False):
        """
        Iterates over the ``installed-files.txt`` entries and returns paths for
        each line if the path is pointing to a file located in the
        ``.egg-info`` directory or one of its subdirectories.

        :parameter absolute: If *absolute* is ``True``, each returned path is
                          transformed into a local absolute path. Otherwise the
                          raw value from ``installed-files.txt`` is returned.
        :type absolute: boolean
        :returns: iterator of paths
        """
        record_path = os.path.join(self.path, 'installed-files.txt')
        skip = True
        with codecs.open(record_path, 'r', encoding='utf-8') as f:
            for line in f:
                line = line.strip()
                if line == './':
                    skip = False
                    continue
                if not skip:
                    p = os.path.normpath(os.path.join(self.path, line))
                    if p.startswith(self.path):
                        if absolute:
                            yield p
                        else:
                            yield line

    def __eq__(self, other):
        return (isinstance(other, EggInfoDistribution) and
                self.path == other.path)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    __hash__ = object.__hash__

new_dist_class = InstalledDistribution
old_dist_class = EggInfoDistribution


class DependencyGraph(object):
    """
    Represents a dependency graph between distributions.

    The dependency relationships are stored in an ``adjacency_list`` that maps
    distributions to a list of ``(other, label)`` tuples where  ``other``
    is a distribution and the edge is labeled with ``label`` (i.e. the version
    specifier, if such was provided). Also, for more efficient traversal, for
    every distribution ``x``, a list of predecessors is kept in
    ``reverse_list[x]``. An edge from distribution ``a`` to
    distribution ``b`` means that ``a`` depends on ``b``. If any missing
    dependencies are found, they are stored in ``missing``, which is a
    dictionary that maps distributions to a list of requirements that were not
    provided by any other distributions.
    """

    def __init__(self):
        self.adjacency_list = {}
        self.reverse_list = {}
        self.missing = {}

    def add_distribution(self, distribution):
        """Add the *distribution* to the graph.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        """
        self.adjacency_list[distribution] = []
        self.reverse_list[distribution] = []
        #self.missing[distribution] = []

    def add_edge(self, x, y, label=None):
        """Add an edge from distribution *x* to distribution *y* with the given
        *label*.

        :type x: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type y: :class:`distutils2.database.InstalledDistribution` or
                 :class:`distutils2.database.EggInfoDistribution`
        :type label: ``str`` or ``None``
        """
        self.adjacency_list[x].append((y, label))
        # multiple edges are allowed, so be careful
        if x not in self.reverse_list[y]:
            self.reverse_list[y].append(x)

    def add_missing(self, distribution, requirement):
        """
        Add a missing *requirement* for the given *distribution*.

        :type distribution: :class:`distutils2.database.InstalledDistribution`
                            or :class:`distutils2.database.EggInfoDistribution`
        :type requirement: ``str``
        """
        logger.debug('%s missing %r', distribution, requirement)
        self.missing.setdefault(distribution, []).append(requirement)

    def _repr_dist(self, dist):
        return '%s %s' % (dist.name, dist.version)

    def repr_node(self, dist, level=1):
        """Prints only a subgraph"""
        output = [self._repr_dist(dist)]
        for other, label in self.adjacency_list[dist]:
            dist = self._repr_dist(other)
            if label is not None:
                dist = '%s [%s]' % (dist, label)
            output.append('    ' * level + str(dist))
            suboutput = self.repr_node(other, level + 1)
            subs = suboutput.split('\n')
            output.extend(subs[1:])
        return '\n'.join(output)

    def to_dot(self, f, skip_disconnected=True):
        """Writes a DOT output for the graph to the provided file *f*.

        If *skip_disconnected* is set to ``True``, then all distributions
        that are not dependent on any other distribution are skipped.

        :type f: has to support ``file``-like operations
        :type skip_disconnected: ``bool``
        """
        disconnected = []

        f.write("digraph dependencies {\n")
        for dist, adjs in self.adjacency_list.items():
            if len(adjs) == 0 and not skip_disconnected:
                disconnected.append(dist)
            for other, label in adjs:
                if not label is None:
                    f.write('"%s" -> "%s" [label="%s"]\n' %
                            (dist.name, other.name, label))
                else:
                    f.write('"%s" -> "%s"\n' % (dist.name, other.name))
        if not skip_disconnected and len(disconnected) > 0:
            f.write('subgraph disconnected {\n')
            f.write('label = "Disconnected"\n')
            f.write('bgcolor = red\n')

            for dist in disconnected:
                f.write('"%s"' % dist.name)
                f.write('\n')
            f.write('}\n')
        f.write('}\n')

    def topological_sort(self):
        """
        Perform a topological sort of the graph.
        :return: A tuple, the first element of which is a topologically sorted
                 list of distributions, and the second element of which is a
                 list of distributions that cannot be sorted because they have
                 circular dependencies and so form a cycle.
        """
        result = []
        # Make a shallow copy of the adjacency list
        alist = {}
        for k, v in self.adjacency_list.items():
            alist[k] = v[:]
        while True:
            # See what we can remove in this run
            to_remove = []
            for k, v in list(alist.items())[:]:
                if not v:
                    to_remove.append(k)
                    del alist[k]
            if not to_remove:
                # What's left in alist (if anything) is a cycle.
                break
            # Remove from the adjacency list of others
            for k, v in alist.items():
                alist[k] = [(d, r) for d, r in v if d not in to_remove]
            logger.debug('Moving to result: %s',
                         ['%s (%s)' % (d.name, d.version) for d in to_remove])
            result.extend(to_remove)
        return result, list(alist.keys())

    def __repr__(self):
        """Representation of the graph"""
        output = []
        for dist, adjs in self.adjacency_list.items():
            output.append(self.repr_node(dist))
        return '\n'.join(output)


def make_graph(dists, scheme='default'):
    """Makes a dependency graph from the given distributions.

    :parameter dists: a list of distributions
    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
                 :class:`distutils2.database.EggInfoDistribution` instances
    :rtype: a :class:`DependencyGraph` instance
    """
    scheme = get_scheme(scheme)
    graph = DependencyGraph()
    provided = {}  # maps names to lists of (version, dist) tuples

    # first, build the graph and find out what's provided
    for dist in dists:
        graph.add_distribution(dist)

        for p in dist.provides:
            name, version = parse_name_and_version(p)
            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
            provided.setdefault(name, []).append((version, dist))

    # now make the edges
    for dist in dists:
        requires = (dist.run_requires | dist.meta_requires |
                    dist.build_requires | dist.dev_requires)
        for req in requires:
            try:
                matcher = scheme.matcher(req)
            except UnsupportedVersionError:
                # XXX compat-mode if cannot read the version
                logger.warning('could not read version %r - using name only',
                               req)
                name = req.split()[0]
                matcher = scheme.matcher(name)

            name = matcher.key   # case-insensitive

            matched = False
            if name in provided:
                for version, provider in provided[name]:
                    try:
                        match = matcher.match(version)
                    except UnsupportedVersionError:
                        match = False

                    if match:
                        graph.add_edge(dist, provider, req)
                        matched = True
                        break
            if not matched:
                graph.add_missing(dist, req)
    return graph


def get_dependent_dists(dists, dist):
    """Recursively generate a list of distributions from *dists* that are
    dependent on *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    """
    if dist not in dists:
        raise DistlibException('given distribution %r is not a member '
                               'of the list' % dist.name)
    graph = make_graph(dists)

    dep = [dist]  # dependent distributions
    todo = graph.reverse_list[dist]  # list of nodes we should inspect

    while todo:
        d = todo.pop()
        dep.append(d)
        for succ in graph.reverse_list[d]:
            if succ not in dep:
                todo.append(succ)

    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
    return dep


def get_required_dists(dists, dist):
    """Recursively generate a list of distributions from *dists* that are
    required by *dist*.

    :param dists: a list of distributions
    :param dist: a distribution, member of *dists* for which we are interested
    """
    if dist not in dists:
        raise DistlibException('given distribution %r is not a member '
                               'of the list' % dist.name)
    graph = make_graph(dists)

    req = []  # required distributions
    todo = graph.adjacency_list[dist]  # list of nodes we should inspect

    while todo:
        d = todo.pop()[0]
        req.append(d)
        for pred in graph.adjacency_list[d]:
            if pred not in req:
                todo.append(pred)

    return req


def make_dist(name, version, **kwargs):
    """
    A convenience method for making a dist given just a name and version.
    """
    summary = kwargs.pop('summary', 'Placeholder for summary')
    md = Metadata(**kwargs)
    md.name = name
    md.version = version
    md.summary = summary or 'Placeholder for summary'
    return Distribution(md)
PKZ7%)site-packages/pip/_vendor/distlib/util.pynu[#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import codecs
from collections import deque
import contextlib
import csv
from glob import iglob as std_iglob
import io
import json
import logging
import os
import py_compile
import re
import shutil
import socket
try:
    import ssl
except ImportError:  # pragma: no cover
    ssl = None
import subprocess
import sys
import tarfile
import tempfile
import textwrap

try:
    import threading
except ImportError:  # pragma: no cover
    import dummy_threading as threading
import time

from . import DistlibException
from .compat import (string_types, text_type, shutil, raw_input, StringIO,
                     cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
                     splittype, HTTPHandler, BaseConfigurator, valid_ident,
                     Container, configparser, URLError, ZipFile, fsdecode,
                     unquote)

logger = logging.getLogger(__name__)

#
# Requirement parsing code for name + optional constraints + optional extras
#
# e.g. 'foo >= 1.2, < 2.0 [bar, baz]'
#
# The regex can seem a bit hairy, so we build it up out of smaller pieces
# which are manageable.
#

COMMA = r'\s*,\s*'
COMMA_RE = re.compile(COMMA)

IDENT = r'(\w|[.-])+'
EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')'
VERSPEC = IDENT + r'\*?'

RELOP = '([<>=!~]=)|[<>]'

#
# The first relop is optional - if absent, will be taken as '~='
#
BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' +
                    RELOP + r')\s*(' + VERSPEC + '))*')

DIRECT_REF = '(from\s+(?P.*))'

#
# Either the bare constraints or the bare constraints in parentheses
#
CONSTRAINTS = (r'\(\s*(?P' + BARE_CONSTRAINTS + '|' + DIRECT_REF +
               r')\s*\)|(?P' + BARE_CONSTRAINTS + '\s*)')

EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*'
EXTRAS = r'\[\s*(?P' + EXTRA_LIST + r')?\s*\]'
REQUIREMENT = ('(?P'  + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' +
               CONSTRAINTS + ')?$')
REQUIREMENT_RE = re.compile(REQUIREMENT)

#
# Used to scan through the constraints
#
RELOP_IDENT = '(?P' + RELOP + r')\s*(?P' + VERSPEC + ')'
RELOP_IDENT_RE = re.compile(RELOP_IDENT)

def parse_requirement(s):

    def get_constraint(m):
        d = m.groupdict()
        return d['op'], d['vn']

    result = None
    m = REQUIREMENT_RE.match(s)
    if m:
        d = m.groupdict()
        name = d['dn']
        cons = d['c1'] or d['c2']
        if not d['diref']:
            url = None
        else:
            # direct reference
            cons = None
            url = d['diref'].strip()
        if not cons:
            cons = None
            constr = ''
            rs = d['dn']
        else:
            if cons[0] not in '<>!=':
                cons = '~=' + cons
            iterator = RELOP_IDENT_RE.finditer(cons)
            cons = [get_constraint(m) for m in iterator]
            rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons]))
        if not d['ex']:
            extras = None
        else:
            extras = COMMA_RE.split(d['ex'])
        result = Container(name=name, constraints=cons, extras=extras,
                           requirement=rs, source=s, url=url)
    return result


def get_resources_dests(resources_root, rules):
    """Find destinations for resources files"""

    def get_rel_path(base, path):
        # normalizes and returns a lstripped-/-separated path
        base = base.replace(os.path.sep, '/')
        path = path.replace(os.path.sep, '/')
        assert path.startswith(base)
        return path[len(base):].lstrip('/')


    destinations = {}
    for base, suffix, dest in rules:
        prefix = os.path.join(resources_root, base)
        for abs_base in iglob(prefix):
            abs_glob = os.path.join(abs_base, suffix)
            for abs_path in iglob(abs_glob):
                resource_file = get_rel_path(resources_root, abs_path)
                if dest is None:  # remove the entry if it was here
                    destinations.pop(resource_file, None)
                else:
                    rel_path = get_rel_path(abs_base, abs_path)
                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
                    destinations[resource_file] = rel_dest + '/' + rel_path
    return destinations


def in_venv():
    if hasattr(sys, 'real_prefix'):
        # virtualenv venvs
        result = True
    else:
        # PEP 405 venvs
        result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
    return result


def get_executable():
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
# changes to the stub launcher mean that sys.executable always points
# to the stub on macOS
#    if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
#                                     in os.environ):
#        result =  os.environ['__PYVENV_LAUNCHER__']
#    else:
#        result = sys.executable
#    return result
    result = os.path.normcase(sys.executable)
    if not isinstance(result, text_type):
        result = fsdecode(result)
    return result


def proceed(prompt, allowed_chars, error_prompt=None, default=None):
    p = prompt
    while True:
        s = raw_input(p)
        p = prompt
        if not s and default:
            s = default
        if s:
            c = s[0].lower()
            if c in allowed_chars:
                break
            if error_prompt:
                p = '%c: %s\n%s' % (c, error_prompt, prompt)
    return c


def extract_by_key(d, keys):
    if isinstance(keys, string_types):
        keys = keys.split()
    result = {}
    for key in keys:
        if key in d:
            result[key] = d[key]
    return result

def read_exports(stream):
    if sys.version_info[0] >= 3:
        # needs to be a text stream
        stream = codecs.getreader('utf-8')(stream)
    # Try to load as JSON, falling back on legacy format
    data = stream.read()
    stream = StringIO(data)
    try:
        jdata = json.load(stream)
        result = jdata['extensions']['python.exports']['exports']
        for group, entries in result.items():
            for k, v in entries.items():
                s = '%s = %s' % (k, v)
                entry = get_export_entry(s)
                assert entry is not None
                entries[k] = entry
        return result
    except Exception:
        stream.seek(0, 0)

    def read_stream(cp, stream):
        if hasattr(cp, 'read_file'):
            cp.read_file(stream)
        else:
            cp.readfp(stream)

    cp = configparser.ConfigParser()
    try:
        read_stream(cp, stream)
    except configparser.MissingSectionHeaderError:
        stream.close()
        data = textwrap.dedent(data)
        stream = StringIO(data)
        read_stream(cp, stream)

    result = {}
    for key in cp.sections():
        result[key] = entries = {}
        for name, value in cp.items(key):
            s = '%s = %s' % (name, value)
            entry = get_export_entry(s)
            assert entry is not None
            #entry.dist = self
            entries[name] = entry
    return result


def write_exports(exports, stream):
    if sys.version_info[0] >= 3:
        # needs to be a text stream
        stream = codecs.getwriter('utf-8')(stream)
    cp = configparser.ConfigParser()
    for k, v in exports.items():
        # TODO check k, v for valid values
        cp.add_section(k)
        for entry in v.values():
            if entry.suffix is None:
                s = entry.prefix
            else:
                s = '%s:%s' % (entry.prefix, entry.suffix)
            if entry.flags:
                s = '%s [%s]' % (s, ', '.join(entry.flags))
            cp.set(k, entry.name, s)
    cp.write(stream)


@contextlib.contextmanager
def tempdir():
    td = tempfile.mkdtemp()
    try:
        yield td
    finally:
        shutil.rmtree(td)

@contextlib.contextmanager
def chdir(d):
    cwd = os.getcwd()
    try:
        os.chdir(d)
        yield
    finally:
        os.chdir(cwd)


@contextlib.contextmanager
def socket_timeout(seconds=15):
    cto = socket.getdefaulttimeout()
    try:
        socket.setdefaulttimeout(seconds)
        yield
    finally:
        socket.setdefaulttimeout(cto)


class cached_property(object):
    def __init__(self, func):
        self.func = func
        #for attr in ('__name__', '__module__', '__doc__'):
        #    setattr(self, attr, getattr(func, attr, None))

    def __get__(self, obj, cls=None):
        if obj is None:
            return self
        value = self.func(obj)
        object.__setattr__(obj, self.func.__name__, value)
        #obj.__dict__[self.func.__name__] = value = self.func(obj)
        return value

def convert_path(pathname):
    """Return 'pathname' as a name that will work on the native filesystem.

    The path is split on '/' and put back together again using the current
    directory separator.  Needed because filenames in the setup script are
    always supplied in Unix style, and have to be converted to the local
    convention before we can actually use them in the filesystem.  Raises
    ValueError on non-Unix-ish systems if 'pathname' either starts or
    ends with a slash.
    """
    if os.sep == '/':
        return pathname
    if not pathname:
        return pathname
    if pathname[0] == '/':
        raise ValueError("path '%s' cannot be absolute" % pathname)
    if pathname[-1] == '/':
        raise ValueError("path '%s' cannot end with '/'" % pathname)

    paths = pathname.split('/')
    while os.curdir in paths:
        paths.remove(os.curdir)
    if not paths:
        return os.curdir
    return os.path.join(*paths)


class FileOperator(object):
    def __init__(self, dry_run=False):
        self.dry_run = dry_run
        self.ensured = set()
        self._init_record()

    def _init_record(self):
        self.record = False
        self.files_written = set()
        self.dirs_created = set()

    def record_as_written(self, path):
        if self.record:
            self.files_written.add(path)

    def newer(self, source, target):
        """Tell if the target is newer than the source.

        Returns true if 'source' exists and is more recently modified than
        'target', or if 'source' exists and 'target' doesn't.

        Returns false if both exist and 'target' is the same age or younger
        than 'source'. Raise PackagingFileError if 'source' does not exist.

        Note that this test is not very accurate: files created in the same
        second will have the same "age".
        """
        if not os.path.exists(source):
            raise DistlibException("file '%r' does not exist" %
                                   os.path.abspath(source))
        if not os.path.exists(target):
            return True

        return os.stat(source).st_mtime > os.stat(target).st_mtime

    def copy_file(self, infile, outfile, check=True):
        """Copy a file respecting dry-run and force flags.
        """
        self.ensure_dir(os.path.dirname(outfile))
        logger.info('Copying %s to %s', infile, outfile)
        if not self.dry_run:
            msg = None
            if check:
                if os.path.islink(outfile):
                    msg = '%s is a symlink' % outfile
                elif os.path.exists(outfile) and not os.path.isfile(outfile):
                    msg = '%s is a non-regular file' % outfile
            if msg:
                raise ValueError(msg + ' which would be overwritten')
            shutil.copyfile(infile, outfile)
        self.record_as_written(outfile)

    def copy_stream(self, instream, outfile, encoding=None):
        assert not os.path.isdir(outfile)
        self.ensure_dir(os.path.dirname(outfile))
        logger.info('Copying stream %s to %s', instream, outfile)
        if not self.dry_run:
            if encoding is None:
                outstream = open(outfile, 'wb')
            else:
                outstream = codecs.open(outfile, 'w', encoding=encoding)
            try:
                shutil.copyfileobj(instream, outstream)
            finally:
                outstream.close()
        self.record_as_written(outfile)

    def write_binary_file(self, path, data):
        self.ensure_dir(os.path.dirname(path))
        if not self.dry_run:
            with open(path, 'wb') as f:
                f.write(data)
        self.record_as_written(path)

    def write_text_file(self, path, data, encoding):
        self.ensure_dir(os.path.dirname(path))
        if not self.dry_run:
            with open(path, 'wb') as f:
                f.write(data.encode(encoding))
        self.record_as_written(path)

    def set_mode(self, bits, mask, files):
        if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
            # Set the executable bits (owner, group, and world) on
            # all the files specified.
            for f in files:
                if self.dry_run:
                    logger.info("changing mode of %s", f)
                else:
                    mode = (os.stat(f).st_mode | bits) & mask
                    logger.info("changing mode of %s to %o", f, mode)
                    os.chmod(f, mode)

    set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)

    def ensure_dir(self, path):
        path = os.path.abspath(path)
        if path not in self.ensured and not os.path.exists(path):
            self.ensured.add(path)
            d, f = os.path.split(path)
            self.ensure_dir(d)
            logger.info('Creating %s' % path)
            if not self.dry_run:
                os.mkdir(path)
            if self.record:
                self.dirs_created.add(path)

    def byte_compile(self, path, optimize=False, force=False, prefix=None):
        dpath = cache_from_source(path, not optimize)
        logger.info('Byte-compiling %s to %s', path, dpath)
        if not self.dry_run:
            if force or self.newer(path, dpath):
                if not prefix:
                    diagpath = None
                else:
                    assert path.startswith(prefix)
                    diagpath = path[len(prefix):]
            py_compile.compile(path, dpath, diagpath, True)     # raise error
        self.record_as_written(dpath)
        return dpath

    def ensure_removed(self, path):
        if os.path.exists(path):
            if os.path.isdir(path) and not os.path.islink(path):
                logger.debug('Removing directory tree at %s', path)
                if not self.dry_run:
                    shutil.rmtree(path)
                if self.record:
                    if path in self.dirs_created:
                        self.dirs_created.remove(path)
            else:
                if os.path.islink(path):
                    s = 'link'
                else:
                    s = 'file'
                logger.debug('Removing %s %s', s, path)
                if not self.dry_run:
                    os.remove(path)
                if self.record:
                    if path in self.files_written:
                        self.files_written.remove(path)

    def is_writable(self, path):
        result = False
        while not result:
            if os.path.exists(path):
                result = os.access(path, os.W_OK)
                break
            parent = os.path.dirname(path)
            if parent == path:
                break
            path = parent
        return result

    def commit(self):
        """
        Commit recorded changes, turn off recording, return
        changes.
        """
        assert self.record
        result = self.files_written, self.dirs_created
        self._init_record()
        return result

    def rollback(self):
        if not self.dry_run:
            for f in list(self.files_written):
                if os.path.exists(f):
                    os.remove(f)
            # dirs should all be empty now, except perhaps for
            # __pycache__ subdirs
            # reverse so that subdirs appear before their parents
            dirs = sorted(self.dirs_created, reverse=True)
            for d in dirs:
                flist = os.listdir(d)
                if flist:
                    assert flist == ['__pycache__']
                    sd = os.path.join(d, flist[0])
                    os.rmdir(sd)
                os.rmdir(d)     # should fail if non-empty
        self._init_record()

def resolve(module_name, dotted_path):
    if module_name in sys.modules:
        mod = sys.modules[module_name]
    else:
        mod = __import__(module_name)
    if dotted_path is None:
        result = mod
    else:
        parts = dotted_path.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
    return result


class ExportEntry(object):
    def __init__(self, name, prefix, suffix, flags):
        self.name = name
        self.prefix = prefix
        self.suffix = suffix
        self.flags = flags

    @cached_property
    def value(self):
        return resolve(self.prefix, self.suffix)

    def __repr__(self):  # pragma: no cover
        return '' % (self.name, self.prefix,
                                                self.suffix, self.flags)

    def __eq__(self, other):
        if not isinstance(other, ExportEntry):
            result = False
        else:
            result = (self.name == other.name and
                      self.prefix == other.prefix and
                      self.suffix == other.suffix and
                      self.flags == other.flags)
        return result

    __hash__ = object.__hash__


ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+)
                      \s*=\s*(?P(\w+)([:\.]\w+)*)
                      \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
                      ''', re.VERBOSE)

def get_export_entry(specification):
    m = ENTRY_RE.search(specification)
    if not m:
        result = None
        if '[' in specification or ']' in specification:
            raise DistlibException("Invalid specification "
                                   "'%s'" % specification)
    else:
        d = m.groupdict()
        name = d['name']
        path = d['callable']
        colons = path.count(':')
        if colons == 0:
            prefix, suffix = path, None
        else:
            if colons != 1:
                raise DistlibException("Invalid specification "
                                       "'%s'" % specification)
            prefix, suffix = path.split(':')
        flags = d['flags']
        if flags is None:
            if '[' in specification or ']' in specification:
                raise DistlibException("Invalid specification "
                                       "'%s'" % specification)
            flags = []
        else:
            flags = [f.strip() for f in flags.split(',')]
        result = ExportEntry(name, prefix, suffix, flags)
    return result


def get_cache_base(suffix=None):
    """
    Return the default base location for distlib caches. If the directory does
    not exist, it is created. Use the suffix provided for the base directory,
    and default to '.distlib' if it isn't provided.

    On Windows, if LOCALAPPDATA is defined in the environment, then it is
    assumed to be a directory, and will be the parent directory of the result.
    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
    directory - using os.expanduser('~') - will be the parent directory of
    the result.

    The result is just the directory '.distlib' in the parent directory as
    determined above, or with the name specified with ``suffix``.
    """
    if suffix is None:
        suffix = '.distlib'
    if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
        result = os.path.expandvars('$localappdata')
    else:
        # Assume posix, or old Windows
        result = os.path.expanduser('~')
    # we use 'isdir' instead of 'exists', because we want to
    # fail if there's a file with that name
    if os.path.isdir(result):
        usable = os.access(result, os.W_OK)
        if not usable:
            logger.warning('Directory exists but is not writable: %s', result)
    else:
        try:
            os.makedirs(result)
            usable = True
        except OSError:
            logger.warning('Unable to create %s', result, exc_info=True)
            usable = False
    if not usable:
        result = tempfile.mkdtemp()
        logger.warning('Default location unusable, using %s', result)
    return os.path.join(result, suffix)


def path_to_cache_dir(path):
    """
    Convert an absolute path to a directory name for use in a cache.

    The algorithm used is:

    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
    #. ``'.cache'`` is appended.
    """
    d, p = os.path.splitdrive(os.path.abspath(path))
    if d:
        d = d.replace(':', '---')
    p = p.replace(os.sep, '--')
    return d + p + '.cache'


def ensure_slash(s):
    if not s.endswith('/'):
        return s + '/'
    return s


def parse_credentials(netloc):
    username = password = None
    if '@' in netloc:
        prefix, netloc = netloc.split('@', 1)
        if ':' not in prefix:
            username = prefix
        else:
            username, password = prefix.split(':', 1)
    return username, password, netloc


def get_process_umask():
    result = os.umask(0o22)
    os.umask(result)
    return result

def is_string_sequence(seq):
    result = True
    i = None
    for i, s in enumerate(seq):
        if not isinstance(s, string_types):
            result = False
            break
    assert i is not None
    return result

PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
                                      '([a-z0-9_.+-]+)', re.I)
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')


def split_filename(filename, project_name=None):
    """
    Extract name, version, python version from a filename (no extension)

    Return name, version, pyver or None
    """
    result = None
    pyver = None
    filename = unquote(filename).replace(' ', '-')
    m = PYTHON_VERSION.search(filename)
    if m:
        pyver = m.group(1)
        filename = filename[:m.start()]
    if project_name and len(filename) > len(project_name) + 1:
        m = re.match(re.escape(project_name) + r'\b', filename)
        if m:
            n = m.end()
            result = filename[:n], filename[n + 1:], pyver
    if result is None:
        m = PROJECT_NAME_AND_VERSION.match(filename)
        if m:
            result = m.group(1), m.group(3), pyver
    return result

# Allow spaces in name because of legacy dists like "Twisted Core"
NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*'
                             r'\(\s*(?P[^\s)]+)\)$')

def parse_name_and_version(p):
    """
    A utility method used to get name and version from a string.

    From e.g. a Provides-Dist value.

    :param p: A value in a form 'foo (1.0)'
    :return: The name and version as a tuple.
    """
    m = NAME_VERSION_RE.match(p)
    if not m:
        raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
    d = m.groupdict()
    return d['name'].strip().lower(), d['ver']

def get_extras(requested, available):
    result = set()
    requested = set(requested or [])
    available = set(available or [])
    if '*' in requested:
        requested.remove('*')
        result |= available
    for r in requested:
        if r == '-':
            result.add(r)
        elif r.startswith('-'):
            unwanted = r[1:]
            if unwanted not in available:
                logger.warning('undeclared extra: %s' % unwanted)
            if unwanted in result:
                result.remove(unwanted)
        else:
            if r not in available:
                logger.warning('undeclared extra: %s' % r)
            result.add(r)
    return result
#
# Extended metadata functionality
#

def _get_external_data(url):
    result = {}
    try:
        # urlopen might fail if it runs into redirections,
        # because of Python issue #13696. Fixed in locators
        # using a custom redirect handler.
        resp = urlopen(url)
        headers = resp.info()
        ct = headers.get('Content-Type')
        if not ct.startswith('application/json'):
            logger.debug('Unexpected response for JSON request: %s', ct)
        else:
            reader = codecs.getreader('utf-8')(resp)
            #data = reader.read().decode('utf-8')
            #result = json.loads(data)
            result = json.load(reader)
    except Exception as e:
        logger.exception('Failed to get external data for %s: %s', url, e)
    return result

_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'

def get_project_data(name):
    url = '%s/%s/project.json' % (name[0].upper(), name)
    url = urljoin(_external_data_base_url, url)
    result = _get_external_data(url)
    return result

def get_package_data(name, version):
    url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
    url = urljoin(_external_data_base_url, url)
    return _get_external_data(url)


class Cache(object):
    """
    A class implementing a cache for resources that need to live in the file system
    e.g. shared libraries. This class was moved from resources to here because it
    could be used by other modules, e.g. the wheel module.
    """

    def __init__(self, base):
        """
        Initialise an instance.

        :param base: The base directory where the cache should be located.
        """
        # we use 'isdir' instead of 'exists', because we want to
        # fail if there's a file with that name
        if not os.path.isdir(base):  # pragma: no cover
            os.makedirs(base)
        if (os.stat(base).st_mode & 0o77) != 0:
            logger.warning('Directory \'%s\' is not private', base)
        self.base = os.path.abspath(os.path.normpath(base))

    def prefix_to_dir(self, prefix):
        """
        Converts a resource prefix to a directory name in the cache.
        """
        return path_to_cache_dir(prefix)

    def clear(self):
        """
        Clear the cache.
        """
        not_removed = []
        for fn in os.listdir(self.base):
            fn = os.path.join(self.base, fn)
            try:
                if os.path.islink(fn) or os.path.isfile(fn):
                    os.remove(fn)
                elif os.path.isdir(fn):
                    shutil.rmtree(fn)
            except Exception:
                not_removed.append(fn)
        return not_removed


class EventMixin(object):
    """
    A very simple publish/subscribe system.
    """
    def __init__(self):
        self._subscribers = {}

    def add(self, event, subscriber, append=True):
        """
        Add a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be added (and called when the
                           event is published).
        :param append: Whether to append or prepend the subscriber to an
                       existing subscriber list for the event.
        """
        subs = self._subscribers
        if event not in subs:
            subs[event] = deque([subscriber])
        else:
            sq = subs[event]
            if append:
                sq.append(subscriber)
            else:
                sq.appendleft(subscriber)

    def remove(self, event, subscriber):
        """
        Remove a subscriber for an event.

        :param event: The name of an event.
        :param subscriber: The subscriber to be removed.
        """
        subs = self._subscribers
        if event not in subs:
            raise ValueError('No subscribers: %r' % event)
        subs[event].remove(subscriber)

    def get_subscribers(self, event):
        """
        Return an iterator for the subscribers for an event.
        :param event: The event to return subscribers for.
        """
        return iter(self._subscribers.get(event, ()))

    def publish(self, event, *args, **kwargs):
        """
        Publish a event and return a list of values returned by its
        subscribers.

        :param event: The event to publish.
        :param args: The positional arguments to pass to the event's
                     subscribers.
        :param kwargs: The keyword arguments to pass to the event's
                       subscribers.
        """
        result = []
        for subscriber in self.get_subscribers(event):
            try:
                value = subscriber(event, *args, **kwargs)
            except Exception:
                logger.exception('Exception during event publication')
                value = None
            result.append(value)
        logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
                     event, args, kwargs, result)
        return result

#
# Simple sequencing
#
class Sequencer(object):
    def __init__(self):
        self._preds = {}
        self._succs = {}
        self._nodes = set()     # nodes with no preds/succs

    def add_node(self, node):
        self._nodes.add(node)

    def remove_node(self, node, edges=False):
        if node in self._nodes:
            self._nodes.remove(node)
        if edges:
            for p in set(self._preds.get(node, ())):
                self.remove(p, node)
            for s in set(self._succs.get(node, ())):
                self.remove(node, s)
            # Remove empties
            for k, v in list(self._preds.items()):
                if not v:
                    del self._preds[k]
            for k, v in list(self._succs.items()):
                if not v:
                    del self._succs[k]

    def add(self, pred, succ):
        assert pred != succ
        self._preds.setdefault(succ, set()).add(pred)
        self._succs.setdefault(pred, set()).add(succ)

    def remove(self, pred, succ):
        assert pred != succ
        try:
            preds = self._preds[succ]
            succs = self._succs[pred]
        except KeyError:  # pragma: no cover
            raise ValueError('%r not a successor of anything' % succ)
        try:
            preds.remove(pred)
            succs.remove(succ)
        except KeyError:  # pragma: no cover
            raise ValueError('%r not a successor of %r' % (succ, pred))

    def is_step(self, step):
        return (step in self._preds or step in self._succs or
                step in self._nodes)

    def get_steps(self, final):
        if not self.is_step(final):
            raise ValueError('Unknown: %r' % final)
        result = []
        todo = []
        seen = set()
        todo.append(final)
        while todo:
            step = todo.pop(0)
            if step in seen:
                # if a step was already seen,
                # move it to the end (so it will appear earlier
                # when reversed on return) ... but not for the
                # final step, as that would be confusing for
                # users
                if step != final:
                    result.remove(step)
                    result.append(step)
            else:
                seen.add(step)
                result.append(step)
                preds = self._preds.get(step, ())
                todo.extend(preds)
        return reversed(result)

    @property
    def strong_connections(self):
        #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
        index_counter = [0]
        stack = []
        lowlinks = {}
        index = {}
        result = []

        graph = self._succs

        def strongconnect(node):
            # set the depth index for this node to the smallest unused index
            index[node] = index_counter[0]
            lowlinks[node] = index_counter[0]
            index_counter[0] += 1
            stack.append(node)

            # Consider successors
            try:
                successors = graph[node]
            except Exception:
                successors = []
            for successor in successors:
                if successor not in lowlinks:
                    # Successor has not yet been visited
                    strongconnect(successor)
                    lowlinks[node] = min(lowlinks[node],lowlinks[successor])
                elif successor in stack:
                    # the successor is in the stack and hence in the current
                    # strongly connected component (SCC)
                    lowlinks[node] = min(lowlinks[node],index[successor])

            # If `node` is a root node, pop the stack and generate an SCC
            if lowlinks[node] == index[node]:
                connected_component = []

                while True:
                    successor = stack.pop()
                    connected_component.append(successor)
                    if successor == node: break
                component = tuple(connected_component)
                # storing the result
                result.append(component)

        for node in graph:
            if node not in lowlinks:
                strongconnect(node)

        return result

    @property
    def dot(self):
        result = ['digraph G {']
        for succ in self._preds:
            preds = self._preds[succ]
            for pred in preds:
                result.append('  %s -> %s;' % (pred, succ))
        for node in self._nodes:
            result.append('  %s;' % node)
        result.append('}')
        return '\n'.join(result)

#
# Unarchiving functionality for zip, tar, tgz, tbz, whl
#

ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
                      '.tgz', '.tbz', '.whl')

def unarchive(archive_filename, dest_dir, format=None, check=True):

    def check_path(path):
        if not isinstance(path, text_type):
            path = path.decode('utf-8')
        p = os.path.abspath(os.path.join(dest_dir, path))
        if not p.startswith(dest_dir) or p[plen] != os.sep:
            raise ValueError('path outside destination: %r' % p)

    dest_dir = os.path.abspath(dest_dir)
    plen = len(dest_dir)
    archive = None
    if format is None:
        if archive_filename.endswith(('.zip', '.whl')):
            format = 'zip'
        elif archive_filename.endswith(('.tar.gz', '.tgz')):
            format = 'tgz'
            mode = 'r:gz'
        elif archive_filename.endswith(('.tar.bz2', '.tbz')):
            format = 'tbz'
            mode = 'r:bz2'
        elif archive_filename.endswith('.tar'):
            format = 'tar'
            mode = 'r'
        else:  # pragma: no cover
            raise ValueError('Unknown format for %r' % archive_filename)
    try:
        if format == 'zip':
            archive = ZipFile(archive_filename, 'r')
            if check:
                names = archive.namelist()
                for name in names:
                    check_path(name)
        else:
            archive = tarfile.open(archive_filename, mode)
            if check:
                names = archive.getnames()
                for name in names:
                    check_path(name)
        if format != 'zip' and sys.version_info[0] < 3:
            # See Python issue 17153. If the dest path contains Unicode,
            # tarfile extraction fails on Python 2.x if a member path name
            # contains non-ASCII characters - it leads to an implicit
            # bytes -> unicode conversion using ASCII to decode.
            for tarinfo in archive.getmembers():
                if not isinstance(tarinfo.name, text_type):
                    tarinfo.name = tarinfo.name.decode('utf-8')
        archive.extractall(dest_dir)

    finally:
        if archive:
            archive.close()


def zip_dir(directory):
    """zip a directory tree into a BytesIO object"""
    result = io.BytesIO()
    dlen = len(directory)
    with ZipFile(result, "w") as zf:
        for root, dirs, files in os.walk(directory):
            for name in files:
                full = os.path.join(root, name)
                rel = root[dlen:]
                dest = os.path.join(rel, name)
                zf.write(full, dest)
    return result

#
# Simple progress bar
#

UNITS = ('', 'K', 'M', 'G','T','P')


class Progress(object):
    unknown = 'UNKNOWN'

    def __init__(self, minval=0, maxval=100):
        assert maxval is None or maxval >= minval
        self.min = self.cur = minval
        self.max = maxval
        self.started = None
        self.elapsed = 0
        self.done = False

    def update(self, curval):
        assert self.min <= curval
        assert self.max is None or curval <= self.max
        self.cur = curval
        now = time.time()
        if self.started is None:
            self.started = now
        else:
            self.elapsed = now - self.started

    def increment(self, incr):
        assert incr >= 0
        self.update(self.cur + incr)

    def start(self):
        self.update(self.min)
        return self

    def stop(self):
        if self.max is not None:
            self.update(self.max)
        self.done = True

    @property
    def maximum(self):
        return self.unknown if self.max is None else self.max

    @property
    def percentage(self):
        if self.done:
            result = '100 %'
        elif self.max is None:
            result = ' ?? %'
        else:
            v = 100.0 * (self.cur - self.min) / (self.max - self.min)
            result = '%3d %%' % v
        return result

    def format_duration(self, duration):
        if (duration <= 0) and self.max is None or self.cur == self.min:
            result = '??:??:??'
        #elif duration < 1:
        #    result = '--:--:--'
        else:
            result = time.strftime('%H:%M:%S', time.gmtime(duration))
        return result

    @property
    def ETA(self):
        if self.done:
            prefix = 'Done'
            t = self.elapsed
            #import pdb; pdb.set_trace()
        else:
            prefix = 'ETA '
            if self.max is None:
                t = -1
            elif self.elapsed == 0 or (self.cur == self.min):
                t = 0
            else:
                #import pdb; pdb.set_trace()
                t = float(self.max - self.min)
                t /= self.cur - self.min
                t = (t - 1) * self.elapsed
        return '%s: %s' % (prefix, self.format_duration(t))

    @property
    def speed(self):
        if self.elapsed == 0:
            result = 0.0
        else:
            result = (self.cur - self.min) / self.elapsed
        for unit in UNITS:
            if result < 1000:
                break
            result /= 1000.0
        return '%d %sB/s' % (result, unit)

#
# Glob functionality
#

RICH_GLOB = re.compile(r'\{([^}]*)\}')
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')


def iglob(path_glob):
    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
    if _CHECK_RECURSIVE_GLOB.search(path_glob):
        msg = """invalid glob %r: recursive glob "**" must be used alone"""
        raise ValueError(msg % path_glob)
    if _CHECK_MISMATCH_SET.search(path_glob):
        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
        raise ValueError(msg % path_glob)
    return _iglob(path_glob)


def _iglob(path_glob):
    rich_path_glob = RICH_GLOB.split(path_glob, 1)
    if len(rich_path_glob) > 1:
        assert len(rich_path_glob) == 3, rich_path_glob
        prefix, set, suffix = rich_path_glob
        for item in set.split(','):
            for path in _iglob(''.join((prefix, item, suffix))):
                yield path
    else:
        if '**' not in path_glob:
            for item in std_iglob(path_glob):
                yield item
        else:
            prefix, radical = path_glob.split('**', 1)
            if prefix == '':
                prefix = '.'
            if radical == '':
                radical = '*'
            else:
                # we support both
                radical = radical.lstrip('/')
                radical = radical.lstrip('\\')
            for path, dir, files in os.walk(prefix):
                path = os.path.normpath(path)
                for fn in _iglob(os.path.join(path, radical)):
                    yield fn

if ssl:
    from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
                         CertificateError)


#
# HTTPSConnection which verifies certificates/matches domains
#

    class HTTPSConnection(httplib.HTTPSConnection):
        ca_certs = None # set this to the path to the certs file (.pem)
        check_domain = True # only used if ca_certs is not None

        # noinspection PyPropertyAccess
        def connect(self):
            sock = socket.create_connection((self.host, self.port), self.timeout)
            if getattr(self, '_tunnel_host', False):
                self.sock = sock
                self._tunnel()

            if not hasattr(ssl, 'SSLContext'):
                # For 2.x
                if self.ca_certs:
                    cert_reqs = ssl.CERT_REQUIRED
                else:
                    cert_reqs = ssl.CERT_NONE
                self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
                                            cert_reqs=cert_reqs,
                                            ssl_version=ssl.PROTOCOL_SSLv23,
                                            ca_certs=self.ca_certs)
            else:  # pragma: no cover
                context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
                context.options |= ssl.OP_NO_SSLv2
                if self.cert_file:
                    context.load_cert_chain(self.cert_file, self.key_file)
                kwargs = {}
                if self.ca_certs:
                    context.verify_mode = ssl.CERT_REQUIRED
                    context.load_verify_locations(cafile=self.ca_certs)
                    if getattr(ssl, 'HAS_SNI', False):
                        kwargs['server_hostname'] = self.host
                self.sock = context.wrap_socket(sock, **kwargs)
            if self.ca_certs and self.check_domain:
                try:
                    match_hostname(self.sock.getpeercert(), self.host)
                    logger.debug('Host verified: %s', self.host)
                except CertificateError:  # pragma: no cover
                    self.sock.shutdown(socket.SHUT_RDWR)
                    self.sock.close()
                    raise

    class HTTPSHandler(BaseHTTPSHandler):
        def __init__(self, ca_certs, check_domain=True):
            BaseHTTPSHandler.__init__(self)
            self.ca_certs = ca_certs
            self.check_domain = check_domain

        def _conn_maker(self, *args, **kwargs):
            """
            This is called to create a connection instance. Normally you'd
            pass a connection class to do_open, but it doesn't actually check for
            a class, and just expects a callable. As long as we behave just as a
            constructor would have, we should be OK. If it ever changes so that
            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
            which just sets check_domain to False in the class definition, and
            choose which one to pass to do_open.
            """
            result = HTTPSConnection(*args, **kwargs)
            if self.ca_certs:
                result.ca_certs = self.ca_certs
                result.check_domain = self.check_domain
            return result

        def https_open(self, req):
            try:
                return self.do_open(self._conn_maker, req)
            except URLError as e:
                if 'certificate verify failed' in str(e.reason):
                    raise CertificateError('Unable to verify server certificate '
                                           'for %s' % req.host)
                else:
                    raise

    #
    # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
    # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
    # HTML containing a http://xyz link when it should be https://xyz),
    # you can use the following handler class, which does not allow HTTP traffic.
    #
    # It works by inheriting from HTTPHandler - so build_opener won't add a
    # handler for HTTP itself.
    #
    class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
        def http_open(self, req):
            raise URLError('Unexpected HTTP request on what should be a secure '
                           'connection: %s' % req)

#
# XML-RPC with timeouts
#

_ver_info = sys.version_info[:2]

if _ver_info == (2, 6):
    class HTTP(httplib.HTTP):
        def __init__(self, host='', port=None, **kwargs):
            if port == 0:   # 0 means use port 0, not the default port
                port = None
            self._setup(self._connection_class(host, port, **kwargs))


    if ssl:
        class HTTPS(httplib.HTTPS):
            def __init__(self, host='', port=None, **kwargs):
                if port == 0:   # 0 means use port 0, not the default port
                    port = None
                self._setup(self._connection_class(host, port, **kwargs))


class Transport(xmlrpclib.Transport):
    def __init__(self, timeout, use_datetime=0):
        self.timeout = timeout
        xmlrpclib.Transport.__init__(self, use_datetime)

    def make_connection(self, host):
        h, eh, x509 = self.get_host_info(host)
        if _ver_info == (2, 6):
            result = HTTP(h, timeout=self.timeout)
        else:
            if not self._connection or host != self._connection[0]:
                self._extra_headers = eh
                self._connection = host, httplib.HTTPConnection(h)
            result = self._connection[1]
        return result

if ssl:
    class SafeTransport(xmlrpclib.SafeTransport):
        def __init__(self, timeout, use_datetime=0):
            self.timeout = timeout
            xmlrpclib.SafeTransport.__init__(self, use_datetime)

        def make_connection(self, host):
            h, eh, kwargs = self.get_host_info(host)
            if not kwargs:
                kwargs = {}
            kwargs['timeout'] = self.timeout
            if _ver_info == (2, 6):
                result = HTTPS(host, None, **kwargs)
            else:
                if not self._connection or host != self._connection[0]:
                    self._extra_headers = eh
                    self._connection = host, httplib.HTTPSConnection(h, None,
                                                                     **kwargs)
                result = self._connection[1]
            return result


class ServerProxy(xmlrpclib.ServerProxy):
    def __init__(self, uri, **kwargs):
        self.timeout = timeout = kwargs.pop('timeout', None)
        # The above classes only come into play if a timeout
        # is specified
        if timeout is not None:
            scheme, _ = splittype(uri)
            use_datetime = kwargs.get('use_datetime', 0)
            if scheme == 'https':
                tcls = SafeTransport
            else:
                tcls = Transport
            kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
            self.transport = t
        xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)

#
# CSV functionality. This is provided because on 2.x, the csv module can't
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
#

def _csv_open(fn, mode, **kwargs):
    if sys.version_info[0] < 3:
        mode += 'b'
    else:
        kwargs['newline'] = ''
    return open(fn, mode, **kwargs)


class CSVBase(object):
    defaults = {
        'delimiter': str(','),      # The strs are used because we need native
        'quotechar': str('"'),      # str in the csv API (2.x won't take
        'lineterminator': str('\n') # Unicode)
    }

    def __enter__(self):
        return self

    def __exit__(self, *exc_info):
        self.stream.close()


class CSVReader(CSVBase):
    def __init__(self, **kwargs):
        if 'stream' in kwargs:
            stream = kwargs['stream']
            if sys.version_info[0] >= 3:
                # needs to be a text stream
                stream = codecs.getreader('utf-8')(stream)
            self.stream = stream
        else:
            self.stream = _csv_open(kwargs['path'], 'r')
        self.reader = csv.reader(self.stream, **self.defaults)

    def __iter__(self):
        return self

    def next(self):
        result = next(self.reader)
        if sys.version_info[0] < 3:
            for i, item in enumerate(result):
                if not isinstance(item, text_type):
                    result[i] = item.decode('utf-8')
        return result

    __next__ = next

class CSVWriter(CSVBase):
    def __init__(self, fn, **kwargs):
        self.stream = _csv_open(fn, 'w')
        self.writer = csv.writer(self.stream, **self.defaults)

    def writerow(self, row):
        if sys.version_info[0] < 3:
            r = []
            for item in row:
                if isinstance(item, text_type):
                    item = item.encode('utf-8')
                r.append(item)
            row = r
        self.writer.writerow(row)

#
#   Configurator functionality
#

class Configurator(BaseConfigurator):

    value_converters = dict(BaseConfigurator.value_converters)
    value_converters['inc'] = 'inc_convert'

    def __init__(self, config, base=None):
        super(Configurator, self).__init__(config)
        self.base = base or os.getcwd()

    def configure_custom(self, config):
        def convert(o):
            if isinstance(o, (list, tuple)):
                result = type(o)([convert(i) for i in o])
            elif isinstance(o, dict):
                if '()' in o:
                    result = self.configure_custom(o)
                else:
                    result = {}
                    for k in o:
                        result[k] = convert(o[k])
            else:
                result = self.convert(o)
            return result

        c = config.pop('()')
        if not callable(c):
            c = self.resolve(c)
        props = config.pop('.', None)
        # Check for valid identifiers
        args = config.pop('[]', ())
        if args:
            args = tuple([convert(o) for o in args])
        items = [(k, convert(config[k])) for k in config if valid_ident(k)]
        kwargs = dict(items)
        result = c(*args, **kwargs)
        if props:
            for n, v in props.items():
                setattr(result, n, convert(v))
        return result

    def __getitem__(self, key):
        result = self.config[key]
        if isinstance(result, dict) and '()' in result:
            self.config[key] = result = self.configure_custom(result)
        return result

    def inc_convert(self, value):
        """Default converter for the inc:// protocol."""
        if not os.path.isabs(value):
            value = os.path.join(self.base, value)
        with codecs.open(value, 'r', encoding='utf-8') as f:
            result = json.load(f)
        return result

#
# Mixin for running subprocesses and capturing their output
#

class SubprocessMixin(object):
    def __init__(self, verbose=False, progress=None):
        self.verbose = verbose
        self.progress = progress

    def reader(self, stream, context):
        """
        Read lines from a subprocess' output stream and either pass to a progress
        callable (if specified) or write progress information to sys.stderr.
        """
        progress = self.progress
        verbose = self.verbose
        while True:
            s = stream.readline()
            if not s:
                break
            if progress is not None:
                progress(s, context)
            else:
                if not verbose:
                    sys.stderr.write('.')
                else:
                    sys.stderr.write(s.decode('utf-8'))
                sys.stderr.flush()
        stream.close()

    def run_command(self, cmd, **kwargs):
        p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE, **kwargs)
        t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
        t1.start()
        t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
        t2.start()
        p.wait()
        t1.join()
        t2.join()
        if self.progress is not None:
            self.progress('done.', 'main')
        elif self.verbose:
            sys.stderr.write('done.\n')
        return p


def normalize_name(name):
    """Normalize a python package name a la PEP 503"""
    # https://www.python.org/dev/peps/pep-0503/#normalized-names
    return re.sub('[-_.]+', '-', name).lower()
PKZrjz/z/.site-packages/pip/_vendor/distlib/manifest.pycnu[
abc@sdZddlZddlZddlZddlZddlZddlmZddlm	Z	ddl
mZdgZej
eZejdejZejd	ejejBZejd
 ZdefdYZdS(su
Class representing the list of files in a distribution.

Equivalent to distutils.filelist, but fixes some problems.
iNi(tDistlibException(tfsdecode(tconvert_pathtManifests\\w*
s#.*?(?=
)|
(?=$)icBseZdZd
dZdZdZdZedZ	dZ
dZdZe
d
ed	Ze
d
ed
Ze
d
edZdZRS(s~A list of files built by on exploring the filesystem and filtered by
    applying various patterns to what we find there.
    cCsYtjjtjj|p!tj|_|jtj|_d|_	t
|_dS(sd
        Initialise an instance.

        :param base: The base directory to explore under.
        N(tostpathtabspathtnormpathtgetcwdtbasetseptprefixtNonetallfilestsettfiles(tselfR	((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyt__init__*s-	cCsddlm}m}m}g|_}|j}|g}|j}|j}x|r|}tj	|}	x|	D]{}
tj
j||
}tj|}|j}
||
r|jt
|qu||
ru||
ru||ququWqPWdS(smFind all files under the base and set ``allfiles`` to the absolute
        pathnames of files found.
        i(tS_ISREGtS_ISDIRtS_ISLNKN(tstatRRRR
R	tpoptappendRtlistdirRtjointst_modeR(RRRRR
troottstackRtpushtnamestnametfullnameRtmode((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytfindall9s"
						
	cCsM|j|js-tjj|j|}n|jjtjj|dS(sz
        Add a file to the manifest.

        :param item: The pathname to add. This can be relative to the base.
        N(	t
startswithRRRRR	RtaddR(Rtitem((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR$TscCs"x|D]}|j|qWdS(s
        Add a list of files to the manifest.

        :param items: The pathnames to add. These can be relative to the base.
        N(R$(RtitemsR%((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytadd_many^s
csfdtj}|rgt}x'|D]}|tjj|q7W||O}ngtd|DD]}tjj|^q~S(s8
        Return sorted files in directory order
        csj|j|tjd||jkrftjj|\}}|dksVt||ndS(Nsadd_dir added %stt/(R(R)(R$tloggertdebugR	RRtsplittAssertionError(tdirstdtparentt_(tadd_dirR(s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR2ls
css!|]}tjj|VqdS(N(RRR,(t.0R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pys	{s(RRRRtdirnametsortedR(RtwantdirstresultR.tft
path_tuple((R2Rs@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR5gs	

cCst|_g|_dS(sClear all collected files.N(RRR
(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytclear}scCs|j|\}}}}|dkrcx|D].}|j|dts.tjd|q.q.Wn|dkrx|D]}|j|dt}qvWn{|dkrxl|D].}|j|dtstjd|qqWn3|dkrx$|D]}|j|dt}qWn|dkr`x|D]1}|j|d	|s(tjd
||q(q(Wn|dkrx|D]}|j|d	|}qsWn~|dkr|jdd	|stjd
|qnG|dkr|jdd	|stjd|qntd|dS(sv
        Process a directive which either adds some files from ``allfiles`` to
        ``files``, or removes some files from ``files``.

        :param directive: The directive to process. This should be in a format
                     compatible with distutils ``MANIFEST.in`` files:

                     http://docs.python.org/distutils/sourcedist.html#commands
        tincludetanchorsno files found matching %rtexcludesglobal-includes3no files found matching %r anywhere in distributionsglobal-excludesrecursive-includeRs-no files found matching %r under directory %rsrecursive-excludetgrafts no directories found matching %rtprunes4no previously-included directories found matching %rsinvalid action %rN(	t_parse_directivet_include_patterntTrueR*twarningt_exclude_patterntFalseRR(Rt	directivetactiontpatternstthedirt
dirpatterntpatterntfound((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytprocess_directivesD


	

	
	
	
c	Cs{|j}t|dkrA|ddkrA|jddn|d}d}}}|dkrt|dkrtd|ng|dD]}t|^q}n|dkrt|d
krtd|nt|d}g|dD]}t|^q}nT|dkr[t|dkrHtd|nt|d}ntd|||||fS(s
        Validate a directive.
        :param directive: The directive to validate.
        :return: A tuple of action, patterns, thedir, dir_patterns
        iiR;R=sglobal-includesglobal-excludesrecursive-includesrecursive-excludeR>R?is$%r expects   ...is*%r expects    ...s!%r expects a single sunknown action %r(R;R=sglobal-includesglobal-excludesrecursive-includesrecursive-excludeR>R?N(R;R=sglobal-includesglobal-exclude(srecursive-includesrecursive-exclude(R>R?(R,tlentinsertRRR(RRFtwordsRGRHRItdir_patterntword((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR@s:	
	&&cCszt}|j||||}|jdkr:|jnx9|jD].}|j|rD|jj|t}qDqDW|S(sSelect strings (presumably filenames) from 'self.files' that
        match 'pattern', a Unix-style wildcard (glob) pattern.

        Patterns are not quite the same as implemented by the 'fnmatch'
        module: '*' and '?'  match non-special characters, where "special"
        is platform-dependent: slash on Unix; colon, slash, and backslash on
        DOS/Windows; and colon on Mac OS.

        If 'anchor' is true (the default), then the pattern match is more
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
        'anchor' is false, both of these will match.

        If 'prefix' is supplied, then only filenames starting with 'prefix'
        (itself a pattern) and ending with 'pattern', with anything in between
        them, will match.  'anchor' is ignored in this case.

        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
        'pattern' is assumed to be either a string containing a regex or a
        regex object -- no translation is done, the regex is just compiled
        and used as-is.

        Selected strings will be added to self.files.

        Return True if files are found.
        N(	REt_translate_patternR
RR"tsearchRR$RB(RRKR<Rtis_regexRLt
pattern_reR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyRAs

cCsdt}|j||||}x?t|jD].}|j|r.|jj|t}q.q.W|S(stRemove strings (presumably filenames) from 'files' that match
        'pattern'.

        Other parameters are the same as for 'include_pattern()', above.
        The list 'self.files' is modified in place. Return True if files are
        found.

        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
        packaging source distributions
        (RERStlistRRTtremoveRB(RRKR<RRURLRVR8((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyRD)s
c
CsH|r)t|tr"tj|S|SntdkrY|jdjd\}}}n|r|j|}tdkr|j|r|j|st	qnd}tj
tjj
|jd}	|d
k	rtdkr|jd}
|j|t|
 }nV|j|}|j|r<|j|sBt	|t|t|t|!}tj}tjdkrd}ntdkrd|	|j
|d|f}q;|t|t|t|!}d	||	||||f}nC|r;tdkrd|	|}q;d
||	|t|f}ntj|S(sTranslate a shell-like wildcard pattern to a compiled regular
        expression.

        Return the compiled regex.  If 'is_regex' true,
        then 'pattern' is directly compiled to a regex (if it's a string)
        or just returned as-is (assumes it's a regex object).
        iiR1R(s\s\\t^s.*s%s%s%s%s.*%s%ss%s%s%s(ii(iiN(ii(ii(ii(t
isinstancetstrtretcompilet_PYTHON_VERSIONt_glob_to_ret	partitionR#tendswithR-tescapeRRRR	RRNR
(
RRKR<RRUtstartR1tendRVR	t
empty_patternt	prefix_reR
((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyRS=sB	
$*!$#		# cCsStj|}tj}tjdkr0d}nd|}tjd||}|S(sTranslate a shell-like glob pattern to a regular expression.

        Return a string containing the regex.  Differs from
        'fnmatch.translate()' in that '*' does not match "special characters"
        (which are platform-specific).
        s\s\\\\s\1[^%s]s((?
s	
PKZ46cycy+site-packages/pip/_vendor/distlib/wheel.pyonu[
abc@sddlmZddlZddlZddlZddlZddlmZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlZddlZddlZddlmZmZddlmZmZmZmZmZddlmZddlm Z m!Z!dd	l"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+dd
l,m-Z-m.Z.ej/e0Z1e2a3e4edrdZ5n9ej6j7d
rdZ5nej6dkrdZ5ndZ5ej8dZ9e9rdej:d Z9nde9Z;e5e9Z<ej"j=j>ddj>ddZ?ej8dZ@e@oze@j7dre@j>ddZ@ndZAeAZ@[AejBdejCejDBZEejBdejCejDBZFejBdZGejBdZHd ZId!ZJe
jKd"kr$d#ZLn	d$ZLd%eMfd&YZNeNZOd'eMfd(YZPd)ZQeQZR[Qe2d*ZSdS(+i(tunicode_literalsN(tmessage_from_filei(t__version__tDistlibException(t	sysconfigtZipFiletfsdecodet	text_typetfilter(tInstalledDistribution(tMetadatatMETADATA_FILENAME(	tFileOperatortconvert_patht	CSVReadert	CSVWritertCachetcached_propertytget_cache_basetread_exportsttempdir(tNormalizedVersiontUnsupportedVersionErrorupypy_version_infouppujavaujyucliuipucpupy_version_nodotu%s%siupyu-u_u.uSOABIucpython-cCs|dtg}tjdr+|jdntjdrJ|jdntjddkro|jdnd	j|S(
NucpuPy_DEBUGudu
WITH_PYMALLOCumuPy_UNICODE_SIZEiuuu(t
VER_SUFFIXRtget_config_vartappendtjoin(tparts((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt_derive_abi;suz
(?P[^-]+)
-(?P\d+[^-]*)
(-(?P\d+[^-]*))?
-(?P\w+\d+(\.\w+\d+)*)
-(?P\w+)
-(?P\w+(\.\w+)*)
\.whl$
u7
(?P[^-]+)
-(?P\d+[^-]*)
(-(?P\d+[^-]*))?$
s
\s*#![^\r\n]*s^(\s*#!("[^"]+"|\S+))\s+(.*)$s#!pythons	#!pythonwu/cCs|S(N((to((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt]tcCs|jtjdS(Nu/(treplacetostsep(R((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR_RtMountercBs8eZdZdZdZddZdZRS(cCsi|_i|_dS(N(t
impure_wheelstlibs(tself((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt__init__cs	cCs!||j|<|jj|dS(N(R$R%tupdate(R&tpathnamet
extensions((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytaddgs
cCsI|jj|}x0|D](\}}||jkr|j|=qqWdS(N(R$tpopR%(R&R)R*tktv((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytremovekscCs"||jkr|}nd}|S(N(R%tNone(R&tfullnametpathtresult((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytfind_moduleqs	cCs|tjkrtj|}nx||jkrAtd|ntj||j|}||_|jdd}t|dkr|d|_	n|S(Nuunable to find extension for %su.ii(
tsystmodulesR%tImportErrortimptload_dynamict
__loader__trsplittlent__package__(R&R1R3R((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytload_modulexs	N(t__name__t
__module__R'R+R/R0R4R>(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR#bs
			tWheelcBseZdZdZdZdeedZedZ	edZ
edZedZ
dZed	Zd
ZddZdZd
ZdZdddZdZdZdZdZdZedZdZdZddZRS(u@
    Class to build and install from Wheel files (PEP 427).
    iusha256cCs||_||_d|_tg|_dg|_dg|_tj|_	|dkr{d|_d|_|j
|_nEtj|}|r|jd}|d|_|djdd	|_|d
|_|j
|_ntjj|\}}tj|}|s!td|n|r?tjj||_	n||_|jd}|d|_|d|_|d
|_|djd
|_|djd
|_|djd
|_dS(uB
        Initialise an instance using a (valid) filename.
        uunoneuanyudummyu0.1unmuvnu_u-ubnuInvalid name or filename: %rupyu.ubiuarN(tsignt
should_verifytbuildvertPYVERtpyvertabitarchR!tgetcwdtdirnameR0tnametversiontfilenamet	_filenametNAME_VERSION_REtmatcht	groupdictR R2tsplittFILENAME_RERtabspath(R&RMRBtverifytmtinfoRJ((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR'sB					


	


cCs|jrd|j}nd}dj|j}dj|j}dj|j}|jjdd}d|j|||||fS(uJ
        Build and return a filename from the various components.
        u-uu.u_u%s-%s%s-%s-%s-%s.whl(RDRRFRGRHRLR RK(R&RDRFRGRHRL((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRMs	cCs+tjj|j|j}tjj|S(N(R!R2RRJRMtisfile(R&R2((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytexistssccsNxG|jD]<}x3|jD](}x|jD]}|||fVq*WqWq
WdS(N(RFRGRH(R&RFRGRH((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyttagsscCs8tjj|j|j}d|j|jf}d|}tjd}t	|d}|j
|}|djdd}tg|D]}t
|^q}	|	dkrd}
nt}
yItj||
}|j|"}||}
td	|
}WdQXWn!tk
r-td
|
nXWdQX|S(Nu%s-%su%s.dist-infouutf-8uru
Wheel-Versionu.iuMETADATAtfileobju$Invalid wheel, because %s is missing(ii(R!R2RRJRMRKRLtcodecst	getreaderRtget_wheel_metadataRRttupletintRt	posixpathtopenR
tKeyErrort
ValueError(R&R)tname_vertinfo_dirtwrappertzftwheel_metadatatwvtitfile_versiontfntmetadata_filenametbftwfR3((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytmetadatas(
%	
cCsud|j|jf}d|}tj|d}|j|(}tjd|}t|}WdQXt|S(Nu%s-%su%s.dist-infouWHEELuutf-8(	RKRLRaRRbR\R]Rtdict(R&RhReRfRnRoRptmessage((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR^s
cCsFtjj|j|j}t|d}|j|}WdQX|S(Nur(R!R2RRJRMRR^(R&R)RhR3((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRWscCstj|}|r|j}|| ||}}d|jkrQt}nt}tj|}|rd|jd}nd}||}||}ns|jd}|jd}	|dks||	krd}
n&|||d!d	krd	}
nd}
t|
|}|S(
Ntpythonwt iRs
s
iis
(	t
SHEBANG_RERPtendtlowertSHEBANG_PYTHONWtSHEBANG_PYTHONtSHEBANG_DETAIL_REtgroupstfind(R&tdataRVRwtshebangtdata_after_shebangtshebang_pythontargstcrtlftterm((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytprocess_shebangs,	

		cCs|dkr|j}nytt|}Wn!tk
rNtd|nX||j}tj|j	dj
d}||fS(NuUnsupported hash algorithm: %rt=uascii(R0t	hash_kindtgetattrthashlibtAttributeErrorRtdigesttbase64turlsafe_b64encodetrstriptdecode(R&R~RthasherR3((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytget_hashs
!cCs~t|}ttjj||}|j|ddf|jt|%}x|D]}|j|q]WWdQXdS(Nu(	tlisttto_posixR!R2trelpathRtsortRtwriterow(R&trecordstrecord_pathtbasetptwritertrow((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytwrite_record's

cCsg}|\}}tt|j}xs|D]k\}}	t|	d}
|
j}WdQXd|j|}tjj|	}
|j	|||
fq+Wtjj
|d}	|j||	|ttjj
|d}|j	||	fdS(Nurbu%s=%suRECORD(
RRRRbtreadRR!R2tgetsizeRRRR(R&RWtlibdirt
archive_pathsRtdistinfoRfRtapRtfR~Rtsize((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt
write_records0sc	Cs\t|dtjA}x7|D]/\}}tjd|||j||qWWdQXdS(NuwuWrote %s to %s in wheel(RtzipfiletZIP_DEFLATEDtloggertdebugtwrite(R&R)RRhRR((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt	build_zip@sc!
s|dkri}nttfdd#d}|dkrgd}tg}tg}tg}n!d}tg}dg}dg}|jd	||_|jd
||_	|jd||_
|}	d|j|jf}
d
|
}d|
}g}
xKd$D]C}|kr qn|}t
jj|rxt
j|D]\}}}x|D]}tt
jj||}t
jj||}tt
jj|||}|
j||f|dkrb|jdrbt|d}|j}WdQX|j|}t|d}|j|WdQXqbqbWqLWqqW|	}d}xt
j|D]\}}}||krxXt|D]G\}}t|}|jdrt
jj||}||=PqqWnxl|D]d}t|jd%r	qnt
jj||}tt
jj||}|
j||fqWqkWt
j|}xf|D]^}|d&krjtt
jj||}tt
jj||}|
j||fqjqjWd|p|jdtd|g}x4|jD])\}}}|jd |||fqWt
jj|d}t|d!}|jd"j|WdQXtt
jj|d}|
j||f|j ||f|	|
t
jj|j!|j"} |j#| |
| S('u
        Build a wheel from files in specified paths, and use any specified tags
        when determining the name of the wheel.
        cs
|kS(N((R(tpaths(s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRNRupurelibuplatlibiufalseutrueunoneuanyupyveruabiuarchu%s-%su%s.datau%s.dist-infoudatauheadersuscriptsu.exeurbNuwbu
.dist-infou.pycu.pyouRECORDu	INSTALLERuSHAREDuWHEELuWheel-Version: %d.%duGenerator: distlib %suRoot-Is-Purelib: %su
Tag: %s-%s-%suwu
(upurelibuplatlib(udatauheadersuscripts(u.pycu.pyo(uRECORDu	INSTALLERuSHAREDuWHEEL($R0RRtIMPVERtABItARCHREtgetRFRGRHRKRLR!R2tisdirtwalkRRRRRtendswithRbRRRt	enumeratetlistdirt
wheel_versionRRZRRJRMR(!R&RRZRtlibkeytis_puret
default_pyvertdefault_abitdefault_archRRetdata_dirRfRtkeyR2troottdirstfilesRmRtrpRRR~RRktdnRiRFRGRHR)((Rs=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytbuildFs	"					





%


cCKs	|j}|jd}|jdt}tjj|j|j}d|j|j	f}d|}	d|}
t
j|
t}t
j|
d}t
j|
d}
tj
d}t|d	}|j|}||}t|}Wd
QX|djdd
}tg|D]}t|^q}||jkrY|rY||j|n|ddkrv|d}n
|d}i}|j|
D}td|,}x"|D]}|d}||||jd.}6|6r|6jd/}6nWd
QXWnt1k
rt+j2d0nX|6r|6jd1i}>|6jd2i}?|>s|?r|jdd}@tjj?|@st@d3n|@|_xF|>jAD]8\}:}<d4|:|<f}A|j4|A}4|j5|4q(W|?ritd(6}BxL|?jAD];\}:}<d4|:|<f}A|j4|A|B}4|j5|4qWqqntjj||
}tB|}5tC|}|d=|d=||d5<|5jD||}|r9	|!j/|n|5jE|!|d6||5SWn+t1k
r	t+jFd7|jGnXWd
tHjI|"XWd
QXd
S(9u
        Install a wheel to the specified paths. If kwarg ``warner`` is
        specified, it should be a callable, which will be called with two
        tuples indicating the wheel version of this software and the wheel
        version in the file, if there is a discrepancy in the versions.
        This can be used to issue any warnings to raise any exceptions.
        If kwarg ``lib_only`` is True, only the purelib/platlib files are
        installed, and the headers, scripts, data and dist-info metadata are
        not written.

        The return value is a :class:`InstalledDistribution` instance unless
        ``options.lib_only`` is True, in which case the return value is ``None``.
        uwarnerulib_onlyu%s-%su%s.datau%s.dist-infouWHEELuRECORDuutf-8urNu
Wheel-Versionu.iuRoot-Is-Purelibutrueupurelibuplatlibtstreamiuuscriptstdry_runu/RECORD.jwsiusize mismatch for %su=udigest mismatch for %sulib_only: skipping %su.exeu/urbudigest mismatch on write for %su.pyuByte-compilation failedtexc_infoulib_only: returning Noneu1.0uentry_points.txtuconsoleuguiu
%s_scriptsuwrap_%su%s:%su %suAUnable to read legacy script metadata, so cannot generate scriptsu
extensionsupython.commandsu8Unable to read JSON metadata, so cannot generate scriptsuwrap_consoleuwrap_guiuValid script path not specifiedu%s = %sulibuprefixuinstallation failed.(uconsoleugui(JRRtFalseR!R2RRJRMRKRLRaRR\R]RRbRRRR_R`RRRtTruetrecordR5tdont_write_bytecodettempfiletmkdtempt
source_dirR0t
target_dirtinfolistt
isinstanceRRRtstrt	file_sizeRRRt
startswithRRR
tcopy_streamRtbyte_compilet	Exceptiontwarningtbasenametmaketset_executable_modetextendRWRtvaluestprefixtsuffixtflagstjsontloadRRdtitemsR	Rrtwrite_shared_locationstwrite_installed_filest	exceptiontrollbacktshutiltrmtree(CR&RtmakertkwargsRtwarnertlib_onlyR)ReRRft
metadata_nametwheel_metadata_nametrecord_nameRgRhtbwfRpRsRjRkRlRRRotreaderRRtdata_pfxtinfo_pfxt
script_pfxtfileoptbctoutfilestworkdirtzinfotarcnamet	u_arcnametkindtvalueR~t_Rt	is_scripttwhereRtoutfilet	newdigesttpycRmtworknameRt	filenamestdisttcommandsteptepdataRR-tdR.tstconsole_scriptstgui_scriptst
script_dirtscripttoptions((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytinstallsD	

%



	
				
#

"

	

	


	

	




cCsGtdkrCtjjttdtjd }t	|antS(Nudylib-cachei(
tcacheR0R!R2RRRR5RLR(R&R((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt_get_dylib_caches
c
Cstjj|j|j}d|j|jf}d|}tj|d}tj	d}g}t
|dw}y\|j|G}||}	tj
|	}
|j}|j|}tjj|j|}
tjj|
stj|
nx|
jD]\}}tjj|
t|}tjj|sHt}nQtj|j}tjj|}|j|}tj|j}||k}|r|j||
n|j||fqWWdQXWntk
rnXWdQX|S(Nu%s-%su%s.dist-infou
EXTENSIONSuutf-8ur( R!R2RRJRMRKRLRaR\R]RRbRRRt
prefix_to_dirRRtmakedirsRR
RYRtstattst_mtimetdatetimet
fromtimestamptgetinfot	date_timetextractRRc(R&R)ReRfRRgR3RhRoRpR*RRt
cache_baseRKRtdestRt	file_timeRWt
wheel_time((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyt_get_extensionss>
	!

cCs
t|S(uM
        Determine if a wheel is compatible with the running system.
        (t
is_compatible(R&((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR$scCstS(uP
        Determine if a wheel is asserted as mountable by its metadata.
        (R(R&((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytis_mountablescCs
tjjtjj|j|j}|jsLd|}t|n|jsqd|}t|n|t	jkrt
jd|ns|rt	jj|nt	jj
d||j}|rtt	jkrt	jjtntj||ndS(Nu)Wheel %s not compatible with this Python.u$Wheel %s is marked as not mountable.u%s already in pathi(R!R2RTRRJRMR$RR%R5RRRtinsertR#t_hookt	meta_pathR+(R&RR)tmsgR*((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytmounts"'

cCstjjtjj|j|j}|tjkrItjd|n]tjj	||t
jkrxt
j	|nt
jst
tjkrtjj	t
qndS(Nu%s not in path(
R!R2RTRRJRMR5RRR/R'R$R((R&R)((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytunmounts'	cCstjj|j|j}d|j|jf}d|}d|}tj|t}tj|d}tj|d}t	j
d}t|d}	|	j|}
||
}t
|}WdQX|djd	d
}
tg|
D]}t|^q}i}|	j|D}td|,}x"|D]}|d}|||Fsu0Cannot update non-compliant (PEP-440) version %rR2tlegacyuVersion updated from %r to %r(R0RR}RRR`RRRRR
RLRRR(	RLR2tupdatedR.RkR
RtmdR/((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytupdate_version;s(
0
!
		u%s-%su%s.dist-infouRECORDuruutf-8u..uinvalid entry in wheel: %rNRu.whlRu
wheel-update-tdiruNot a directory: %r(R!R2RRJRMRKRLRaRRRRRRRRR
R0RtmkstemptcloseRRRRRRtcopyfile(R&tmodifiertdest_dirRR-R2R)ReRfRRRhR,RRRR2toriginal_versionRtmodifiedtcurrent_versiontfdtnewpathRRRW((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR( sX	
	
		
(iiN(R?R@t__doc__RRR0RR'tpropertyRMRYRZRRqR^RWRRRRRRRRR#R$R%R*R+RUR((((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyRAs2)							h				"				6cCstg}td}xGttjddddD](}|jdj|t|gq1Wg}xLtjD]>\}}}|j	drp|j|j
dddqpqpW|jtdkr|j
dtn|jdg}tg}tjd	kr=tjd
t}|r=|j\}	}}}
t|}|
g}|
dkrg|jd
n|
dkr|jdn|
dkr|jdn|
dkr|jdn|
dkr|jdnx`|dkr6x@|D]8}d|	|||f}
|
tkr|j|
qqW|d8}qWq=nxH|D]@}x7|D]/}
|jdjt|df||
fqQWqDWxwt|D]i\}}|jdjt|fddf|dkr|jdjt|dfddfqqWxwt|D]i\}}|jdjd|fddf|dkr|jdjd|dfddfqqWt|S(uG
    Return (pyver, abi, arch) tuples compatible with this Python.
    iiiuu.abiu.iunoneudarwinu(\w+)_(\d+)_(\d+)_(\w+)$ui386uppcufatux86_64ufat3uppc64ufat64uintelu	universalu%s_%s_%s_%suanyupy(ui386uppc(ui386uppcux86_64(uppc64ux86_64(ui386ux86_64(ui386ux86_64uinteluppcuppc64(RtrangeR5tversion_infoRRRR8tget_suffixesRRRRRR&RtplatformtreRPR|R`t
IMP_PREFIXRtset(tversionstmajortminortabisRRR3tarchesRVRKRHtmatchesRPR
RGRkRL((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pytcompatible_tagss`	
$&$

		


1%0%0cCst|tst|}nt}|dkr9t}nxN|D]F\}}}||jkr@||jkr@||jkr@t}Pq@q@W|S(N(	RRARR0tCOMPATIBLE_TAGSRFRGRHR(twheelRZR3tverRGRH((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyR$s	-(Tt
__future__RRR\Rtdistutils.utilt	distutilstemailRRR8RtloggingR!RaRDRR5RRRRRtcompatRRRRRtdatabaseR	RqR
RtutilRR
RRRRRRRRLRRt	getLoggerR?RR0RthasattrRERCRRRRARERtget_platformR RRRtcompilet
IGNORECASEtVERBOSERSRORvR{RzRyR"RtobjectR#R'RARMRNR$(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/wheel.pyts(@			

'				#		>	PKZ&QEE-site-packages/pip/_vendor/distlib/__init__.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import logging

__version__ = '0.2.4'

class DistlibException(Exception):
    pass

try:
    from logging import NullHandler
except ImportError: # pragma: no cover
    class NullHandler(logging.Handler):
        def handle(self, record): pass
        def emit(self, record): pass
        def createLock(self): self.lock = None

logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
PKZ:Yn.site-packages/pip/_vendor/distlib/database.pyonu[
abc@s0dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlm
Z
mZddlmZddlmZmZddlmZmZmZdd	lmZmZmZmZmZmZmZd
ddd
dgZ ej!e"Z#dZ$dZ%deddde$dfZ&dZ'de(fdYZ)de(fdYZ*de(fdYZ+de+fdYZ,de,fd YZ-d!e,fd"YZ.e-Z/e.Z0d#e(fd$YZ1d%d&Z2d'Z3d(Z4d)Z5dS(*uPEP 376 implementation.i(tunicode_literalsNi(tDistlibExceptiont	resources(tStringIO(t
get_schemetUnsupportedVersionError(tMetadatatMETADATA_FILENAMEtWHEEL_METADATA_FILENAME(tparse_requirementtcached_propertytparse_name_and_versiontread_exportst
write_exportst	CSVReadert	CSVWriteruDistributionuBaseInstalledDistributionuInstalledDistributionuEggInfoDistributionuDistributionPathupydist-exports.jsonupydist-commands.jsonu	INSTALLERuRECORDu	REQUESTEDu	RESOURCESuSHAREDu
.dist-infot_CachecBs)eZdZdZdZdZRS(uL
    A simple cache mapping names and .dist-info paths to distributions
    cCsi|_i|_t|_dS(uZ
        Initialise an instance. There is normally one for each DistributionPath.
        N(tnametpathtFalset	generated(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__init__0s		cCs'|jj|jjt|_dS(uC
        Clear the cache, setting it to its initial state.
        N(RtclearRRR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR8s

cCsH|j|jkrD||j|j<|jj|jgj|ndS(u`
        Add a distribution to the cache.
        :param dist: The distribution to add.
        N(RRt
setdefaulttkeytappend(Rtdist((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytadd@s(t__name__t
__module__t__doc__RRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR,s		tDistributionPathcBseZdZd
edZdZdZeeeZ	dZ
dZdZe
dZdZd	Zd
d
ZdZd
dZRS(uU
    Represents a set of distributions installed on a path (typically sys.path).
    cCsg|dkrtj}n||_t|_||_t|_t|_t|_	t
d|_dS(u
        Create an instance from a path, optionally including legacy (distutils/
        setuptools/distribute) distributions.
        :param path: The path to use, as a list of directories. If not specified,
                     sys.path is used.
        :param include_egg: If True, this instance will look for and return legacy
                            distributions as well as those based on PEP 376.
        udefaultN(tNonetsysRtTruet
_include_distt_include_eggRt_cachet
_cache_eggt_cache_enabledRt_scheme(RRtinclude_egg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRNs					cCs|jS(N(R((R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_get_cache_enabledbscCs
||_dS(N(R((Rtvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_set_cache_enabledescCs|jj|jjdS(u,
        Clears the internal cache.
        N(R&RR'(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytclear_cachejs
c
cst}x|jD]}tj|}|dkr:qn|jd}|s|jr`qnt|j}x^|D]V}|j|}|sv|j|krqvn|jr}|j	t
r}ttg}x<|D]1}t
j||}	|j|	}
|
rPqqWqvtj|
j}td|dd}WdQXtjd|j|j|jt|jd|d|Vqv|jrv|j	drvtjd|j|j|jt|j|VqvqvWqWdS(uD
        Yield .dist-info and/or .egg(-info) distributions.
        utfileobjtschemeulegacyNuFound %stmetadatatenvu	.egg-infou.egg(u	.egg-infou.egg(tsetRRtfinder_for_pathR!tfindtis_containertsortedR$tendswithtDISTINFO_EXTRRt	posixpathtjoint
contextlibtclosingt	as_streamRtloggertdebugRtnew_dist_classR%told_dist_class(
RtseenRtfindertrtrsettentrytpossible_filenamestmetadata_filenamet
metadata_pathtpydisttstreamR1((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_yield_distributionsrs@	

	cCs|jj}|jo |jj}|s/|rxF|jD]8}t|trd|jj|q<|jj|q<W|rt|j_n|rt|j_qndS(uk
        Scan the path for distributions and populate the cache with
        those that are found.
        N(	R&RR%R'RMt
isinstancetInstalledDistributionRR#(Rtgen_disttgen_eggR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_generate_caches
cCs)|jdd}dj||gtS(uo
        The *name* and *version* parameters are converted into their
        filename-escaped form, i.e. any ``'-'`` characters are replaced
        with ``'_'`` other than the one in ``'dist-info'`` and the one
        separating the name from the version number.

        :parameter name: is converted to a standard distribution name by replacing
                         any runs of non- alphanumeric characters with a single
                         ``'-'``.
        :type name: string
        :parameter version: is converted to a standard version string. Spaces
                            become dots, and all other non-alphanumeric characters
                            (except dots) become dashes, with runs of multiple
                            dashes condensed to a single dash.
        :type version: string
        :returns: directory name
        :rtype: stringu-u_(treplaceR;R9(tclsRtversion((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytdistinfo_dirnamesccs|js(xv|jD]}|VqWnZ|jx|jjjD]}|VqEW|jrx"|jjjD]}|VqpWndS(u5
        Provides an iterator that looks for distributions and returns
        :class:`InstalledDistribution` or
        :class:`EggInfoDistribution` instances for each one of them.

        :rtype: iterator of :class:`InstalledDistribution` and
                :class:`EggInfoDistribution` instances
        N(R(RMRRR&RtvaluesR%R'(RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_distributionss		
		cCsd}|j}|jsNx|jD]}|j|kr(|}Pq(q(Wne|j||jjkr|jj|d}n2|jr||j	jkr|j	j|d}n|S(u=
        Looks for a named distribution on the path.

        This function only returns the first result found, as no more than one
        value is expected. If nothing is found, ``None`` is returned.

        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
                or ``None``
        iN(
R!tlowerR(RMRRRR&RR%R'(RRtresultR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_distributions
	
c	csd}|dk	r_y |jjd||f}Wq_tk
r[td||fq_Xnx|jD]z}|j}xh|D]`}t|\}}|dkr||kr|VPqq||kr|j|r|VPqqWqlWdS(u
        Iterates over all distributions to find which distributions provide *name*.
        If a *version* is provided, it will be used to filter the results.

        This function only returns the first result found, since no more than
        one values are expected. If the directory is not found, returns ``None``.

        :parameter version: a version specifier that indicates the version
                            required, conforming to the format in ``PEP-345``

        :type name: string
        :type version: string
        u%s (%s)uinvalid name or version: %r, %rN(	R!R)tmatchert
ValueErrorRRXtprovidesRtmatch(	RRRUR\Rtprovidedtptp_nametp_ver((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytprovides_distributions$ 
	
cCs;|j|}|dkr.td|n|j|S(u5
        Return the path to a resource file.
        uno distribution named %r foundN(R[R!tLookupErrortget_resource_path(RRt
relative_pathR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt
get_file_pathsccsxy|jD]k}|j}||kr
||}|dk	rY||kru||Vquqxx|jD]}|VqfWq
q
WdS(u
        Return all of the exported entries in a particular category.

        :param category: The category to search for entries.
        :param name: If specified, only entries with that name are returned.
        N(RXtexportsR!RW(RtcategoryRRREtdtv((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_exported_entries"s	
N(RRRR!RRR+R-tpropertyt
cache_enabledR.RMRRtclassmethodRVRXR[RdRhRm(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR Js				*			$		tDistributioncBseZdZeZeZdZedZeZ	edZ
edZdZedZ
edZedZed	Zed
ZdZdZd
ZdZRS(u
    A base class for distributions, whether installed or from indexes.
    Either way, it must have some metadata, so that's all that's needed
    for construction.
    cCsp||_|j|_|jj|_|j|_d|_d|_d|_d|_	t
|_i|_dS(u
        Initialise an instance.
        :param metadata: The instance of :class:`Metadata` describing this
        distribution.
        N(
R1RRYRRUR!tlocatortdigesttextrastcontextR3t
download_urlstdigests(RR1((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRGs					cCs
|jjS(uH
        The source archive download URL for this distribution.
        (R1t
source_url(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRxXscCsd|j|jfS(uX
        A utility property which displays the name and version in parentheses.
        u%s (%s)(RRU(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytname_and_versionascCsB|jj}d|j|jf}||kr>|j|n|S(u
        A set of distribution names and versions provided by this distribution.
        :return: A set of "name (version)" strings.
        u%s (%s)(R1R^RRUR(Rtplistts((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR^hs
cCsS|j}tjd|jt||}t|j|d|jd|jS(Nu%Getting requirements from metadata %rRtR2(	R1R?R@ttodicttgetattrR3tget_requirementsRtRu(Rtreq_attrtmdtreqts((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_get_requirementsts
	cCs
|jdS(Nurun_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytrun_requires{scCs
|jdS(Nu
meta_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt
meta_requiresscCs
|jdS(Nubuild_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytbuild_requiresscCs
|jdS(Nu
test_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt
test_requiresscCs
|jdS(Nudev_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytdev_requiressc
Cst|}t|jj}y|j|j}Wn@tk
rvtjd||j	d}|j|}nX|j
}t}x]|jD]R}t
|\}}	||krqny|j|	}PWqtk
rqXqW|S(u
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        u+could not read version %r - using name onlyi(R	RR1R0R\trequirementRR?twarningtsplitRRR^RR_(
RtreqRER0R\RRZRaRbRc((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytmatches_requirements*	
		
cCs6|jrd|j}nd}d|j|j|fS(uC
        Return a textual representation of this instance,
        u [%s]uu(RxRRU(Rtsuffix((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__repr__s	cCs[t|t|k	r!t}n6|j|jkoT|j|jkoT|j|jk}|S(u<
        See if this distribution is the same as another.
        :param other: The distribution to compare with. To be equal to one
                      another. distributions must have the same type, name,
                      version and source_url.
        :return: True if it is the same, else False.
        (ttypeRRRURx(RtotherRZ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__eq__s	cCs't|jt|jt|jS(uH
        Compute hash in a way which matches the equality test.
        (thashRRURx(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__hash__s(RRRRtbuild_time_dependencyt	requestedRRnRxtdownload_urlRyR^RRRRRRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRq5s$			"	
	tBaseInstalledDistributioncBs,eZdZdZddZddZRS(u]
    This is the base class for installed distributions (whether PEP 376 or
    legacy).
    cCs,tt|j|||_||_dS(u
        Initialise an instance.
        :param metadata: An instance of :class:`Metadata` which describes the
                         distribution. This will normally have been initialised
                         from a metadata file in the ``path``.
        :param path:     The path of the ``.dist-info`` or ``.egg-info``
                         directory for the distribution.
        :param env:      This is normally the :class:`DistributionPath`
                         instance where this distribution was found.
        N(tsuperRRRt	dist_path(RR1RR2((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs	cCs|dkr|j}n|dkr6tj}d}ntt|}d|j}||j}tj|jdj	d}d||fS(u
        Get the hash of some data, using a particular hash algorithm, if
        specified.

        :param data: The data to be hashed.
        :type data: bytes
        :param hasher: The name of a hash implementation, supported by hashlib,
                       or ``None``. Examples of valid values are ``'sha1'``,
                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
                       ``'sha512'``. If no hasher is specified, the ``hasher``
                       attribute of the :class:`InstalledDistribution` instance
                       is used. If the hasher is determined to be ``None``, MD5
                       is used as the hashing algorithm.
        :returns: The hash of the data. If a hasher was explicitly specified,
                  the returned hash will be prefixed with the specified hasher
                  followed by '='.
        :rtype: str
        uu%s=t=uasciiu%s%sN(
R!thasherthashlibtmd5R}Rstbase64turlsafe_b64encodetrstriptdecode(RtdataRtprefixRs((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_hashs		
!N(RRRR!RRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRsROcBseZdZdZdddZdZdZdZe	dZ
dZdZd	Z
d
ZedZdZe	d
ZedZdZdZdZdZejZRS(u
    Created with the *path* of the ``.dist-info`` directory provided to the
    constructor. It reads the metadata contained in ``pydist.json`` when it is
    instantiated., or uses a passed in Metadata instance (useful for when
    dry-run mode is being used).
    usha256c	Cstj||_}|dkr;ddl}|jn|rr|jrr||jjkrr|jj|j	}n|dkr$|j
t}|dkr|j
t}n|dkr|j
d}n|dkrt
dt|fntj|j}td|dd}WdQXntt|j||||rb|jrb|jj|ny|j
d}Wn'tk
rddl}|jnX|dk	|_dS(NiuMETADATAuno %s found in %sR/R0ulegacyu	REQUESTED(RR4RDR!tpdbt	set_traceR(R&RR1R5RRR]R<R=R>RRRORRtAttributeErrorR(RRR1R2RDRRERL((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs4
!	
cCsd|j|j|jfS(Nu#(RRUR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR2scCsd|j|jfS(Nu%s %s(RRU(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__str__6sc
Csg}|jd}tj|j}td|i}x_|D]W}gtt|dD]}d^qb}||\}}	}
|j||	|
fqFWWdQXWdQX|S(u"
        Get the list of installed files for the distribution
        :return: A list of tuples of path, hash and size. Note that hash and
                 size might be ``None`` for some entries. The path is exactly
                 as stored in the file (which is as in PEP 376).
        uRECORDRLiN(	tget_distinfo_resourceR<R=R>RtrangetlenR!R(RtresultsRERLt
record_readertrowtitmissingRtchecksumtsize((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_get_records9s
(&cCs.i}|jt}|r*|j}n|S(u
        Return the information exported by this distribution.
        :return: A dictionary of exports, mapping an export category to a dict
                 of :class:`ExportEntry` instances describing the individual
                 export entries, and keyed by name.
        (RtEXPORTS_FILENAMER(RRZRE((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRiPs
cCsLi}|jt}|rHtj|j}t|}WdQXn|S(u
        Read exports data from a file in .ini format.

        :return: A dictionary of exports, mapping an export category to a list
                 of :class:`ExportEntry` instances describing the individual
                 export entries.
        N(RRR<R=R>R(RRZRERL((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR^scCs8|jt}t|d}t||WdQXdS(u
        Write a dictionary of exports to a file in .ini format.
        :param exports: A dictionary of exports, mapping an export category to
                        a list of :class:`ExportEntry` instances describing the
                        individual export entries.
        uwN(tget_distinfo_fileRtopenR
(RRitrftf((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR
mscCs|jd}tj|jF}td|.}x$|D]\}}||kr@|Sq@WWdQXWdQXtd|dS(uW
        NOTE: This API may change in the future.

        Return the absolute path to a resource file with the given relative
        path.

        :param relative_path: The path, relative to .dist-info, of the resource
                              of interest.
        :return: The absolute path where the resource is to be found.
        u	RESOURCESRLNu3no resource file with relative path %r is installed(RR<R=R>RtKeyError(RRgRERLtresources_readertrelativetdestination((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRfxsccs x|jD]}|Vq
WdS(u
        Iterates over the ``RECORD`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: iterator of (path, hash, size)
        N(R(RRZ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytlist_installed_filesscCstjj|d}tjj|j}|j|}tjj|d}|jd}tjd||rwdSt	|}x|D]}tjj
|s|jd	rd}	}
nCdtjj|}
t
|d}|j|j}	WdQX|j|s(|r@|j|r@tjj||}n|j||	|
fqW|j|rtjj||}n|j|ddfWdQX|S(
u
        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
        existing ``RECORD`` file is silently overwritten.

        prefix is used to determine when to write absolute paths.
        uuRECORDucreating %su.pycu.pyou%durbN(u.pycu.pyo(tosRR;tdirnamet
startswithRR?tinfoR!RtisdirR8tgetsizeRRtreadtrelpathtwriterow(RtpathsRtdry_runtbasetbase_under_prefixtrecord_pathtwriterRt
hash_valueRtfp((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytwrite_installed_filess.
!
c
Csg}tjj|j}|jd}xn|jD]`\}}}tjj|sptjj||}n||krq7ntjj|s|j|dt	t
fq7tjj|r7ttjj
|}|r||kr|j|d||fq|rd|kr3|jddd}nd	}t|dG}	|j|	j|}
|
|kr|j|d||
fnWd	QXqq7q7W|S(
u
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        uRECORDuexistsusizeu=iiurbuhashN(RRRRRtisabsR;texistsRR#RtisfiletstrRRR!RRR(Rt
mismatchesRRRRRtactual_sizeRRtactual_hash((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytcheck_installed_filess.	,cCsi}tjj|jd}tjj|rtj|ddd}|jj}WdQXx[|D]P}|jdd\}}|dkr|j	|gj
|qj|||su%s (%s)(RtstripRR?RR	RttconstraintsRRR;(RtreqsRRREtcons((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytparse_requires_dataos&
			csRg}y4tj|dd}|j}WdQXWntk
rMnX|S(uCreate a list of dependencies from a requires.txt file.

            *req_path*: the path to a setuptools-produced requires.txt file.
            uruutf-8N(RRRtIOError(treq_pathRR(R(s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytparse_requires_paths
u.egguEGG-INFOuPKG-INFORR0ulegacyurequires.txtuEGG-INFO/PKG-INFOuutf8R/uEGG-INFO/requires.txtuutf-8u	.egg-infou,path must end with .egg-info or .egg, got %r(R!R8RRRR;Rt	zipimporttzipimporterRtget_dataRRRtadd_requirements(
RRtrequiresRt	meta_pathR1RtzipfR/R((Rs@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRls:	


cCsd|j|j|jfS(Nu!(RRUR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRscCsd|j|jfS(Nu%s %s(RRU(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRscCsg}tjj|jd}tjj|rx`|jD]O\}}}||kr^q=ntjj|s=|j|dttfq=q=Wn|S(u
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        uinstalled-files.txtuexists(RRR;RRRR#R(RRRRt_((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs	#c
Cs2d}d}tjj|jd}g}tjj|r.tj|ddd}x|D]}|j}tjjtjj|j|}tjj|stj	d||j
drqdqntjj|sd|j|||||fqdqdWWd
QX|j|d
d
fn|S(u
        Iterates over the ``installed-files.txt`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: a list of (path, hash, size)
        cSs@t|d}z|j}Wd|jXtj|jS(Nurb(RRtcloseRRt	hexdigest(RRtcontent((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_md5s
cSstj|jS(N(Rtstattst_size(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_sizesuinstalled-files.txturRuutf-8uNon-existent file: %su.pycu.pyoN(u.pycu.pyo(RRR;RRRRtnormpathR?RR8RRR!(RRRRRZRRRa((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs"		
$	/c	cstjj|jd}t}tj|ddd}x|D]}|j}|dkrjt}q@n|s@tjjtjj|j|}|j	|jr|r|Vq|Vqq@q@WWdQXdS(u
        Iterates over the ``installed-files.txt`` entries and returns paths for
        each line if the path is pointing to a file located in the
        ``.egg-info`` directory or one of its subdirectories.

        :parameter absolute: If *absolute* is ``True``, each returned path is
                          transformed into a local absolute path. Otherwise the
                          raw value from ``installed-files.txt`` is returned.
        :type absolute: boolean
        :returns: iterator of paths
        uinstalled-files.txturRuutf-8u./N(
RRR;R#RRRRRR(RtabsoluteRtskipRRRa((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs
$cCst|to|j|jkS(N(RNRR(RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRsN(RRRR#RRR!RRRRRRRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRNs	K				&	tDependencyGraphcBsheZdZdZdZddZdZdZddZ	e
dZd	Zd
Z
RS(u
    Represents a dependency graph between distributions.

    The dependency relationships are stored in an ``adjacency_list`` that maps
    distributions to a list of ``(other, label)`` tuples where  ``other``
    is a distribution and the edge is labeled with ``label`` (i.e. the version
    specifier, if such was provided). Also, for more efficient traversal, for
    every distribution ``x``, a list of predecessors is kept in
    ``reverse_list[x]``. An edge from distribution ``a`` to
    distribution ``b`` means that ``a`` depends on ``b``. If any missing
    dependencies are found, they are stored in ``missing``, which is a
    dictionary that maps distributions to a list of requirements that were not
    provided by any other distributions.
    cCsi|_i|_i|_dS(N(tadjacency_listtreverse_listR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR.s		cCsg|j| "%s" [label="%s"]
u
"%s" -> "%s"
usubgraph disconnected {
ulabel = "Disconnected"
ubgcolor = red
u"%s"u
u}
N(RRtitemsRRR!R(RRtskip_disconnectedtdisconnectedRtadjsRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytto_dotgs&	
	%



cCs=g}i}x(|jjD]\}}|||t|jD])\}}|sZ|j|||=qZqZW|sPnxO|jD]A\}}g|D]$\}}||kr||f^q||sL4	7F6		PKZ:Yn.site-packages/pip/_vendor/distlib/database.pycnu[
abc@s0dZddlmZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlm
Z
mZddlmZddlmZmZddlmZmZmZdd	lmZmZmZmZmZmZmZd
ddd
dgZ ej!e"Z#dZ$dZ%deddde$dfZ&dZ'de(fdYZ)de(fdYZ*de(fdYZ+de+fdYZ,de,fd YZ-d!e,fd"YZ.e-Z/e.Z0d#e(fd$YZ1d%d&Z2d'Z3d(Z4d)Z5dS(*uPEP 376 implementation.i(tunicode_literalsNi(tDistlibExceptiont	resources(tStringIO(t
get_schemetUnsupportedVersionError(tMetadatatMETADATA_FILENAMEtWHEEL_METADATA_FILENAME(tparse_requirementtcached_propertytparse_name_and_versiontread_exportst
write_exportst	CSVReadert	CSVWriteruDistributionuBaseInstalledDistributionuInstalledDistributionuEggInfoDistributionuDistributionPathupydist-exports.jsonupydist-commands.jsonu	INSTALLERuRECORDu	REQUESTEDu	RESOURCESuSHAREDu
.dist-infot_CachecBs)eZdZdZdZdZRS(uL
    A simple cache mapping names and .dist-info paths to distributions
    cCsi|_i|_t|_dS(uZ
        Initialise an instance. There is normally one for each DistributionPath.
        N(tnametpathtFalset	generated(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__init__0s		cCs'|jj|jjt|_dS(uC
        Clear the cache, setting it to its initial state.
        N(RtclearRRR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR8s

cCsH|j|jkrD||j|j<|jj|jgj|ndS(u`
        Add a distribution to the cache.
        :param dist: The distribution to add.
        N(RRt
setdefaulttkeytappend(Rtdist((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytadd@s(t__name__t
__module__t__doc__RRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR,s		tDistributionPathcBseZdZd
edZdZdZeeeZ	dZ
dZdZe
dZdZd	Zd
d
ZdZd
dZRS(uU
    Represents a set of distributions installed on a path (typically sys.path).
    cCsg|dkrtj}n||_t|_||_t|_t|_t|_	t
d|_dS(u
        Create an instance from a path, optionally including legacy (distutils/
        setuptools/distribute) distributions.
        :param path: The path to use, as a list of directories. If not specified,
                     sys.path is used.
        :param include_egg: If True, this instance will look for and return legacy
                            distributions as well as those based on PEP 376.
        udefaultN(tNonetsysRtTruet
_include_distt_include_eggRt_cachet
_cache_eggt_cache_enabledRt_scheme(RRtinclude_egg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRNs					cCs|jS(N(R((R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_get_cache_enabledbscCs
||_dS(N(R((Rtvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_set_cache_enabledescCs|jj|jjdS(u,
        Clears the internal cache.
        N(R&RR'(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytclear_cachejs
c
cst}x|jD]}tj|}|dkr:qn|jd}|s|jr`qnt|j}x^|D]V}|j|}|sv|j|krqvn|jr}|j	t
r}ttg}x<|D]1}t
j||}	|j|	}
|
rPqqWqvtj|
j}td|dd}WdQXtjd|j|j|jt|jd|d|Vqv|jrv|j	drvtjd|j|j|jt|j|VqvqvWqWdS(uD
        Yield .dist-info and/or .egg(-info) distributions.
        utfileobjtschemeulegacyNuFound %stmetadatatenvu	.egg-infou.egg(u	.egg-infou.egg(tsetRRtfinder_for_pathR!tfindtis_containertsortedR$tendswithtDISTINFO_EXTRRt	posixpathtjoint
contextlibtclosingt	as_streamRtloggertdebugRtnew_dist_classR%told_dist_class(
RtseenRtfindertrtrsettentrytpossible_filenamestmetadata_filenamet
metadata_pathtpydisttstreamR1((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_yield_distributionsrs@	

	cCs|jj}|jo |jj}|s/|rxF|jD]8}t|trd|jj|q<|jj|q<W|rt|j_n|rt|j_qndS(uk
        Scan the path for distributions and populate the cache with
        those that are found.
        N(	R&RR%R'RMt
isinstancetInstalledDistributionRR#(Rtgen_disttgen_eggR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_generate_caches
cCs)|jdd}dj||gtS(uo
        The *name* and *version* parameters are converted into their
        filename-escaped form, i.e. any ``'-'`` characters are replaced
        with ``'_'`` other than the one in ``'dist-info'`` and the one
        separating the name from the version number.

        :parameter name: is converted to a standard distribution name by replacing
                         any runs of non- alphanumeric characters with a single
                         ``'-'``.
        :type name: string
        :parameter version: is converted to a standard version string. Spaces
                            become dots, and all other non-alphanumeric characters
                            (except dots) become dashes, with runs of multiple
                            dashes condensed to a single dash.
        :type version: string
        :returns: directory name
        :rtype: stringu-u_(treplaceR;R9(tclsRtversion((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytdistinfo_dirnamesccs|js(xv|jD]}|VqWnZ|jx|jjjD]}|VqEW|jrx"|jjjD]}|VqpWndS(u5
        Provides an iterator that looks for distributions and returns
        :class:`InstalledDistribution` or
        :class:`EggInfoDistribution` instances for each one of them.

        :rtype: iterator of :class:`InstalledDistribution` and
                :class:`EggInfoDistribution` instances
        N(R(RMRRR&RtvaluesR%R'(RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_distributionss		
		cCsd}|j}|jsNx|jD]}|j|kr(|}Pq(q(Wne|j||jjkr|jj|d}n2|jr||j	jkr|j	j|d}n|S(u=
        Looks for a named distribution on the path.

        This function only returns the first result found, as no more than one
        value is expected. If nothing is found, ``None`` is returned.

        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
                or ``None``
        iN(
R!tlowerR(RMRRRR&RR%R'(RRtresultR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_distributions
	
c	csd}|dk	r_y |jjd||f}Wq_tk
r[td||fq_Xnx|jD]z}|j}xh|D]`}t|\}}|dkr||kr|VPqq||kr|j|r|VPqqWqlWdS(u
        Iterates over all distributions to find which distributions provide *name*.
        If a *version* is provided, it will be used to filter the results.

        This function only returns the first result found, since no more than
        one values are expected. If the directory is not found, returns ``None``.

        :parameter version: a version specifier that indicates the version
                            required, conforming to the format in ``PEP-345``

        :type name: string
        :type version: string
        u%s (%s)uinvalid name or version: %r, %rN(	R!R)tmatchert
ValueErrorRRXtprovidesRtmatch(	RRRUR\Rtprovidedtptp_nametp_ver((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytprovides_distributions$ 
	
cCs;|j|}|dkr.td|n|j|S(u5
        Return the path to a resource file.
        uno distribution named %r foundN(R[R!tLookupErrortget_resource_path(RRt
relative_pathR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt
get_file_pathsccsxy|jD]k}|j}||kr
||}|dk	rY||kru||Vquqxx|jD]}|VqfWq
q
WdS(u
        Return all of the exported entries in a particular category.

        :param category: The category to search for entries.
        :param name: If specified, only entries with that name are returned.
        N(RXtexportsR!RW(RtcategoryRRREtdtv((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_exported_entries"s	
N(RRRR!RRR+R-tpropertyt
cache_enabledR.RMRRtclassmethodRVRXR[RdRhRm(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR Js				*			$		tDistributioncBseZdZeZeZdZedZeZ	edZ
edZdZedZ
edZedZed	Zed
ZdZdZd
ZdZRS(u
    A base class for distributions, whether installed or from indexes.
    Either way, it must have some metadata, so that's all that's needed
    for construction.
    cCsp||_|j|_|jj|_|j|_d|_d|_d|_d|_	t
|_i|_dS(u
        Initialise an instance.
        :param metadata: The instance of :class:`Metadata` describing this
        distribution.
        N(
R1RRYRRUR!tlocatortdigesttextrastcontextR3t
download_urlstdigests(RR1((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRGs					cCs
|jjS(uH
        The source archive download URL for this distribution.
        (R1t
source_url(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRxXscCsd|j|jfS(uX
        A utility property which displays the name and version in parentheses.
        u%s (%s)(RRU(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytname_and_versionascCsB|jj}d|j|jf}||kr>|j|n|S(u
        A set of distribution names and versions provided by this distribution.
        :return: A set of "name (version)" strings.
        u%s (%s)(R1R^RRUR(Rtplistts((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR^hs
cCsS|j}tjd|jt||}t|j|d|jd|jS(Nu%Getting requirements from metadata %rRtR2(	R1R?R@ttodicttgetattrR3tget_requirementsRtRu(Rtreq_attrtmdtreqts((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_get_requirementsts
	cCs
|jdS(Nurun_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytrun_requires{scCs
|jdS(Nu
meta_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt
meta_requiresscCs
|jdS(Nubuild_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytbuild_requiresscCs
|jdS(Nu
test_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt
test_requiresscCs
|jdS(Nudev_requires(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytdev_requiressc
Cst|}t|jj}y|j|j}Wn@tk
rvtjd||j	d}|j|}nX|j
}t}x]|jD]R}t
|\}}	||krqny|j|	}PWqtk
rqXqW|S(u
        Say if this instance matches (fulfills) a requirement.
        :param req: The requirement to match.
        :rtype req: str
        :return: True if it matches, else False.
        u+could not read version %r - using name onlyi(R	RR1R0R\trequirementRR?twarningtsplitRRR^RR_(
RtreqRER0R\RRZRaRbRc((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytmatches_requirements*	
		
cCs6|jrd|j}nd}d|j|j|fS(uC
        Return a textual representation of this instance,
        u [%s]uu(RxRRU(Rtsuffix((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__repr__s	cCs[t|t|k	r!t}n6|j|jkoT|j|jkoT|j|jk}|S(u<
        See if this distribution is the same as another.
        :param other: The distribution to compare with. To be equal to one
                      another. distributions must have the same type, name,
                      version and source_url.
        :return: True if it is the same, else False.
        (ttypeRRRURx(RtotherRZ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__eq__s	cCs't|jt|jt|jS(uH
        Compute hash in a way which matches the equality test.
        (thashRRURx(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__hash__s(RRRRtbuild_time_dependencyt	requestedRRnRxtdownload_urlRyR^RRRRRRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRq5s$			"	
	tBaseInstalledDistributioncBs,eZdZdZddZddZRS(u]
    This is the base class for installed distributions (whether PEP 376 or
    legacy).
    cCs,tt|j|||_||_dS(u
        Initialise an instance.
        :param metadata: An instance of :class:`Metadata` which describes the
                         distribution. This will normally have been initialised
                         from a metadata file in the ``path``.
        :param path:     The path of the ``.dist-info`` or ``.egg-info``
                         directory for the distribution.
        :param env:      This is normally the :class:`DistributionPath`
                         instance where this distribution was found.
        N(tsuperRRRt	dist_path(RR1RR2((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs	cCs|dkr|j}n|dkr6tj}d}ntt|}d|j}||j}tj|jdj	d}d||fS(u
        Get the hash of some data, using a particular hash algorithm, if
        specified.

        :param data: The data to be hashed.
        :type data: bytes
        :param hasher: The name of a hash implementation, supported by hashlib,
                       or ``None``. Examples of valid values are ``'sha1'``,
                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
                       ``'sha512'``. If no hasher is specified, the ``hasher``
                       attribute of the :class:`InstalledDistribution` instance
                       is used. If the hasher is determined to be ``None``, MD5
                       is used as the hashing algorithm.
        :returns: The hash of the data. If a hasher was explicitly specified,
                  the returned hash will be prefixed with the specified hasher
                  followed by '='.
        :rtype: str
        uu%s=t=uasciiu%s%sN(
R!thasherthashlibtmd5R}Rstbase64turlsafe_b64encodetrstriptdecode(RtdataRtprefixRs((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytget_hashs		
!N(RRRR!RRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRsROcBseZdZdZdddZdZdZdZe	dZ
dZdZd	Z
d
ZedZdZe	d
ZedZdZdZdZdZejZRS(u
    Created with the *path* of the ``.dist-info`` directory provided to the
    constructor. It reads the metadata contained in ``pydist.json`` when it is
    instantiated., or uses a passed in Metadata instance (useful for when
    dry-run mode is being used).
    usha256c	Cstj||_}|dkr;ddl}|jn|rr|jrr||jjkrr|jj|j	}n|dkr$|j
t}|dkr|j
t}n|dkr|j
d}n|dkrt
dt|fntj|j}td|dd}WdQXntt|j||||rb|jrb|jj|ny|j
d}Wn'tk
rddl}|jnX|dk	|_dS(NiuMETADATAuno %s found in %sR/R0ulegacyu	REQUESTED(RR4RDR!tpdbt	set_traceR(R&RR1R5RRR]R<R=R>RRRORRtAttributeErrorR(RRR1R2RDRRERL((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs4
!	
cCsd|j|j|jfS(Nu#(RRUR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR2scCsd|j|jfS(Nu%s %s(RRU(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt__str__6sc
Csg}|jd}tj|j}td|i}x_|D]W}gtt|dD]}d^qb}||\}}	}
|j||	|
fqFWWdQXWdQX|S(u"
        Get the list of installed files for the distribution
        :return: A list of tuples of path, hash and size. Note that hash and
                 size might be ``None`` for some entries. The path is exactly
                 as stored in the file (which is as in PEP 376).
        uRECORDRLiN(	tget_distinfo_resourceR<R=R>RtrangetlenR!R(RtresultsRERLt
record_readertrowtitmissingRtchecksumtsize((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_get_records9s
(&cCs.i}|jt}|r*|j}n|S(u
        Return the information exported by this distribution.
        :return: A dictionary of exports, mapping an export category to a dict
                 of :class:`ExportEntry` instances describing the individual
                 export entries, and keyed by name.
        (RtEXPORTS_FILENAMER(RRZRE((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRiPs
cCsLi}|jt}|rHtj|j}t|}WdQXn|S(u
        Read exports data from a file in .ini format.

        :return: A dictionary of exports, mapping an export category to a list
                 of :class:`ExportEntry` instances describing the individual
                 export entries.
        N(RRR<R=R>R(RRZRERL((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR^scCs8|jt}t|d}t||WdQXdS(u
        Write a dictionary of exports to a file in .ini format.
        :param exports: A dictionary of exports, mapping an export category to
                        a list of :class:`ExportEntry` instances describing the
                        individual export entries.
        uwN(tget_distinfo_fileRtopenR
(RRitrftf((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR
mscCs|jd}tj|jF}td|.}x$|D]\}}||kr@|Sq@WWdQXWdQXtd|dS(uW
        NOTE: This API may change in the future.

        Return the absolute path to a resource file with the given relative
        path.

        :param relative_path: The path, relative to .dist-info, of the resource
                              of interest.
        :return: The absolute path where the resource is to be found.
        u	RESOURCESRLNu3no resource file with relative path %r is installed(RR<R=R>RtKeyError(RRgRERLtresources_readertrelativetdestination((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRfxsccs x|jD]}|Vq
WdS(u
        Iterates over the ``RECORD`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: iterator of (path, hash, size)
        N(R(RRZ((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytlist_installed_filesscCstjj|d}tjj|j}|j|}tjj|d}|jd}tjd||rwdSt	|}x|D]}tjj
|s|jd	rd}	}
nCdtjj|}
t
|d}|j|j}	WdQX|j|s(|r@|j|r@tjj||}n|j||	|
fqW|j|rtjj||}n|j|ddfWdQX|S(
u
        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
        existing ``RECORD`` file is silently overwritten.

        prefix is used to determine when to write absolute paths.
        uuRECORDucreating %su.pycu.pyou%durbN(u.pycu.pyo(tosRR;tdirnamet
startswithRR?tinfoR!RtisdirR8tgetsizeRRtreadtrelpathtwriterow(RtpathsRtdry_runtbasetbase_under_prefixtrecord_pathtwriterRt
hash_valueRtfp((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytwrite_installed_filess.
!
c
Csg}tjj|j}|jd}xn|jD]`\}}}tjj|sptjj||}n||krq7ntjj|s|j|dt	t
fq7tjj|r7ttjj
|}|r||kr|j|d||fq|rd|kr3|jddd}nd	}t|dG}	|j|	j|}
|
|kr|j|d||
fnWd	QXqq7q7W|S(
u
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        uRECORDuexistsusizeu=iiurbuhashN(RRRRRtisabsR;texistsRR#RtisfiletstrRRR!RRR(Rt
mismatchesRRRRRtactual_sizeRRtactual_hash((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytcheck_installed_filess.	,cCsi}tjj|jd}tjj|rtj|ddd}|jj}WdQXx[|D]P}|jdd\}}|dkr|j	|gj
|qj|||su%s (%s)(RtstripRR?RR	RttconstraintsRRR;(RtreqsRRREtcons((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytparse_requires_dataos&
			csRg}y4tj|dd}|j}WdQXWntk
rMnX|S(uCreate a list of dependencies from a requires.txt file.

            *req_path*: the path to a setuptools-produced requires.txt file.
            uruutf-8N(RRRtIOError(treq_pathRR(R(s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytparse_requires_paths
u.egguEGG-INFOuPKG-INFORR0ulegacyurequires.txtuEGG-INFO/PKG-INFOuutf8R/uEGG-INFO/requires.txtuutf-8u	.egg-infou,path must end with .egg-info or .egg, got %r(R!R8RRRR;Rt	zipimporttzipimporterRtget_dataRRRtadd_requirements(
RRtrequiresRt	meta_pathR1RtzipfR/R((Rs@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRls:	


cCsd|j|j|jfS(Nu!(RRUR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRscCsd|j|jfS(Nu%s %s(RRU(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRscCsg}tjj|jd}tjj|rx`|jD]O\}}}||kr^q=ntjj|s=|j|dttfq=q=Wn|S(u
        Checks that the hashes and sizes of the files in ``RECORD`` are
        matched by the files themselves. Returns a (possibly empty) list of
        mismatches. Each entry in the mismatch list will be a tuple consisting
        of the path, 'exists', 'size' or 'hash' according to what didn't match
        (existence is checked first, then size, then hash), the expected
        value and the actual value.
        uinstalled-files.txtuexists(RRR;RRRR#R(RRRRt_((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs	#c
Cs2d}d}tjj|jd}g}tjj|r.tj|ddd}x|D]}|j}tjjtjj|j|}tjj|stj	d||j
drqdqntjj|sd|j|||||fqdqdWWd
QX|j|d
d
fn|S(u
        Iterates over the ``installed-files.txt`` entries and returns a tuple
        ``(path, hash, size)`` for each line.

        :returns: a list of (path, hash, size)
        cSs@t|d}z|j}Wd|jXtj|jS(Nurb(RRtcloseRRt	hexdigest(RRtcontent((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_md5s
cSstj|jS(N(Rtstattst_size(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyt_sizesuinstalled-files.txturRuutf-8uNon-existent file: %su.pycu.pyoN(u.pycu.pyo(RRR;RRRRtnormpathR?RR8RRR!(RRRRRZRRRa((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs"		
$	/c	cstjj|jd}t}tj|ddd}x|D]}|j}|dkrjt}q@n|s@tjjtjj|j|}|j	|jr|r|Vq|Vqq@q@WWdQXdS(u
        Iterates over the ``installed-files.txt`` entries and returns paths for
        each line if the path is pointing to a file located in the
        ``.egg-info`` directory or one of its subdirectories.

        :parameter absolute: If *absolute* is ``True``, each returned path is
                          transformed into a local absolute path. Otherwise the
                          raw value from ``installed-files.txt`` is returned.
        :type absolute: boolean
        :returns: iterator of paths
        uinstalled-files.txturRuutf-8u./N(
RRR;R#RRRRRR(RtabsoluteRtskipRRRa((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRs
$cCst|to|j|jkS(N(RNRR(RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRsN(RRRR#RRR!RRRRRRRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyRNs	K				&	tDependencyGraphcBsheZdZdZdZddZdZdZddZ	e
dZd	Zd
Z
RS(u
    Represents a dependency graph between distributions.

    The dependency relationships are stored in an ``adjacency_list`` that maps
    distributions to a list of ``(other, label)`` tuples where  ``other``
    is a distribution and the edge is labeled with ``label`` (i.e. the version
    specifier, if such was provided). Also, for more efficient traversal, for
    every distribution ``x``, a list of predecessors is kept in
    ``reverse_list[x]``. An edge from distribution ``a`` to
    distribution ``b`` means that ``a`` depends on ``b``. If any missing
    dependencies are found, they are stored in ``missing``, which is a
    dictionary that maps distributions to a list of requirements that were not
    provided by any other distributions.
    cCsi|_i|_i|_dS(N(tadjacency_listtreverse_listR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pyR.s		cCsg|j| "%s" [label="%s"]
u
"%s" -> "%s"
usubgraph disconnected {
ulabel = "Disconnected"
ubgcolor = red
u"%s"u
u}
N(RRtitemsRRR!R(RRtskip_disconnectedtdisconnectedRtadjsRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/database.pytto_dotgs&	
	%



cCs=g}i}x(|jjD]\}}|||t|jD])\}}|sZ|j|||=qZqZW|sPnxO|jD]A\}}g|D]$\}}||kr||f^q||sL4	7F6		PKZ"\N\N+site-packages/pip/_vendor/distlib/index.pyonu[
abc@sddlZddlZddlZddlZddlZddlZyddlmZWn!ek
rddl	mZnXddl
mZddlm
Z
mZmZmZmZmZddlmZmZmZejeZdZdZd	efd
YZdS(iN(tThreadi(tDistlibException(tHTTPBasicAuthHandlertRequesttHTTPPasswordMgrturlparsetbuild_openertstring_types(tcached_propertytzip_dirtServerProxyshttps://pypi.python.org/pypitpypitPackageIndexcBseZdZdZddZdZdZdZdZ	dZ
dZdd	Zdd
Z
ddZdddd
ddZdZddZddZdddZdZdZddZRS(sc
    This class represents a package index compatible with PyPI, the Python
    Package Index.
    s.----------ThIs_Is_tHe_distlib_index_bouNdaRY_$cCs|p	t|_|jt|j\}}}}}}|sX|sX|sX|dkrntd|jnd|_d|_d|_d|_	d|_
ttj
dj}x`d
D]X}	y>tj|	dgd|d	|}
|
d
kr|	|_PnWqtk
rqXqWWdQXdS(s
        Initialise an instance.

        :param url: The URL of the index. If not specified, the URL for PyPI is
                    used.
        thttpthttpssinvalid repository: %stwtgpgtgpg2s	--versiontstdouttstderriN(R
R(RR(t
DEFAULT_INDEXturltread_configurationRRtNonetpassword_handlertssl_verifierRtgpg_homet	rpc_proxytopentostdevnullt
subprocesst
check_calltOSError(tselfRtschemetnetloctpathtparamstquerytfragtsinktstrc((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt__init__$s(
!					
		
cCs3ddlm}ddlm}|}||S(ss
        Get the distutils command for interacting with PyPI configurations.
        :return: the command.
        i(tDistribution(t
PyPIRCCommand(tdistutils.coreR-tdistutils.configR.(R"R-R.td((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt_get_pypirc_commandBs	cCsy|j}|j|_|j}|jd|_|jd|_|jdd|_|jd|j|_dS(s
        Read the PyPI access configuration as supported by distutils, getting
        PyPI to do the actual work. This populates ``username``, ``password``,
        ``realm`` and ``url`` attributes from the configuration.
        tusernametpasswordtrealmRt
repositoryN(R2RR6t_read_pypirctgetR3R4R5(R"tctcfg((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRLscCs0|j|j}|j|j|jdS(s
        Save the PyPI access configuration. You must have set ``username`` and
        ``password`` attributes before calling this method.

        Again, distutils is used to do the actual work.
        N(tcheck_credentialsR2t
_store_pypircR3R4(R"R9((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytsave_configuration[s
cCs|jdks|jdkr-tdnt}t|j\}}}}}}|j|j||j|jt	||_
dS(sp
        Check that ``username`` and ``password`` have been set, and raise an
        exception if not.
        s!username and password must be setN(R3RR4RRRRtadd_passwordR5RR(R"tpmt_R$((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyR;gs	!cCs|j|j|j}d|d<|j|jg}|j|}d|d<|j|jg}|j|S(sq
        Register a distribution on PyPI, using the provided metadata.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the distribution to be
                         registered.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        tverifys:actiontsubmit(R;tvalidatettodicttencode_requesttitemstsend_request(R"tmetadataR1trequesttresponse((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytregisterss




cCsjxYtr[|j}|sPn|jdj}|j|tjd||fqW|jdS(sr
        Thread runner for reading lines of from a subprocess into a buffer.

        :param name: The logical name of the stream (used for logging only).
        :param stream: The stream to read from. This will typically a pipe
                       connected to the output stream of a subprocess.
        :param outbuf: The list to append the read lines to.
        sutf-8s%s: %sN(tTruetreadlinetdecodetrstriptappendtloggertdebugtclose(R"tnametstreamtoutbufR*((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt_readers		
cCs|jdddg}|dkr-|j}n|rI|jd|gn|dk	rn|jdddgntj}tjj|tjj	|d}|jd	d
d|d||gt
jd
dj|||fS(s
        Return a suitable command for signing a file.

        :param filename: The pathname to the file to be signed.
        :param signer: The identifier of the signer of the file.
        :param sign_password: The passphrase for the signer's
                              private key used for signing.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The signing command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        s--status-fdt2s--no-ttys	--homedirs--batchs--passphrase-fdt0s.ascs
--detach-signs--armors--local-users--outputsinvoking: %st N(RRRtextendttempfiletmkdtempRR%tjointbasenameRQRR(R"tfilenametsignert
sign_passwordtkeystoretcmdttdtsf((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytget_sign_commands%c	Cs
itjd6tjd6}|dk	r6tj|d

		
c
Cs|jtjj|s/td|ntjj|d}tjj|sitd|n|j|j|j	}}t
|j}d	d|fd|fg}d||fg}|j||}	|j
|	S(
s2
        Upload documentation to the index.

        :param metadata: A :class:`Metadata` instance defining at least a name
                         and version number for the documentation to be
                         uploaded.
        :param doc_dir: The pathname of the directory which contains the
                        documentation. This should be the directory that
                        contains the ``index.html`` for the documentation.
        :return: The HTTP response received from PyPI upon submission of the
                request.
        snot a directory: %rs
index.htmls
not found: %rs:actiont
doc_uploadRTtversionR(s:actionR(R;RR%tisdirRR^RRCRTRR	tgetvalueRERG(
R"RHtdoc_dirtfnRTRtzip_datatfieldsRRI((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytupload_documentation)s

cCs||jdddg}|dkr-|j}n|rI|jd|gn|jd||gtjddj||S(	s|
        Return a suitable command for verifying a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: The verifying command as a list suitable to be
                 passed to :class:`subprocess.Popen`.
        s--status-fdRXs--no-ttys	--homedirs--verifysinvoking: %sRZN(RRRR[RQRRR^(R"tsignature_filenamet
data_filenameRcRd((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytget_verify_commandEscCsn|jstdn|j|||}|j|\}}}|dkrdtd|n|dkS(s6
        Verify a signature for a file.

        :param signature_filename: The pathname to the file containing the
                                   signature.
        :param data_filename: The pathname to the file containing the
                              signed data.
        :param keystore: The path to a directory which contains the keys
                         used in verification. If not specified, the
                         instance's ``gpg_home`` attribute is used instead.
        :return: True if the signature was verified, else False.
        s0verification unavailable because gpg unavailableiis(verify command failed with error code %s(ii(RRRRv(R"RRRcRdR+RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pytverify_signature]s		
cCs|dkr"d}tjdnMt|ttfrF|\}}nd}tt|}tjd|t|d}|j	t
|}z|j}	d}
d}d}d}
d|	krt|	d	}n|r||
|
|nxyt
rp|j|
}|sPn|t|7}|j||rJ|j|n|
d
7}
|r||
|
|qqWWd|jXWdQX|dkr||krtd||fn|r|j}||krtd
||||fntjd|ndS(s
        This is a convenience method for downloading a file from an URL.
        Normally, this will be a file from the index, though currently
        no check is made for this (i.e. a file can be downloaded from
        anywhere).

        The method is just like the :func:`urlretrieve` function in the
        standard library, except that it allows digest computation to be
        done during download and checking that the downloaded data
        matched any expected value.

        :param url: The URL of the file to be downloaded (assumed to be
                    available via an HTTP GET request).
        :param destfile: The pathname where the downloaded file is to be
                         saved.
        :param digest: If specified, this must be a (hasher, value)
                       tuple, where hasher is the algorithm used (e.g.
                       ``'md5'``) and ``value`` is the expected value.
        :param reporthook: The same as for :func:`urlretrieve` in the
                           standard library.
        sNo digest specifiedRsDigest specified: %stwbi iiscontent-lengthsContent-LengthiNs1retrieval incomplete: got only %d out of %d bytess.%s digest mismatch for %s: expected %s, got %ssDigest verified: %s(RRQRRt
isinstancetlistttupletgetattrRRRGRtinfotintRLRtlenRnRRSRR(R"Rtdestfiletdigestt
reporthooktdigesterthashertdfptsfptheaderst	blocksizetsizeRtblocknumtblocktactual((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyt
download_filevsV	

cCsWg}|jr"|j|jn|jr>|j|jnt|}|j|S(s
        Send a standard library :class:`Request` to PyPI and return its
        response.

        :param req: The request to send.
        :return: The HTTP response from PyPI (a standard library HTTPResponse).
        (RRPRRR(R"treqthandlerstopener((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRGs		cCs<g}|j}xy|D]q\}}t|ttfsC|g}nxA|D]9}|jd|d|jdd|jdfqJWqWxG|D]?\}}	}
|jd|d||	fjdd|
fqW|jd|ddfdj|}d|}i|d6tt|d	6}
t	|j
||
S(
s&
        Encode fields and files for posting to an HTTP server.

        :param fields: The fields to send as a list of (fieldname, value)
                       tuples.
        :param files: The files to send as a list of (fieldname, filename,
                      file_bytes) tuple.
        s--s)Content-Disposition: form-data; name="%s"sutf-8ts8Content-Disposition: form-data; name="%s"; filename="%s"s
smultipart/form-data; boundary=sContent-typesContent-length(tboundaryRRRR[RwR^tstrRRR(R"RRtpartsRtktvaluestvtkeyR`tvaluetbodytctR((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyREs4	


cCsbt|tri|d6}n|jdkrIt|jdd|_n|jj||p^dS(NRTttimeoutg@tand(RRRRR
Rtsearch(R"ttermstoperator((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRs
N(t__name__t
__module__t__doc__RRR,R2RR=R;RKRWRgRvRyRRRRRRGRER(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyRs*	
					#8	M		+(RtloggingRRRR\t	threadingRtImportErrortdummy_threadingRRtcompatRRRRRRtutilRR	R
t	getLoggerRRQRt
DEFAULT_REALMtobjectR(((s=/usr/lib/python2.7/site-packages/pip/_vendor/distlib/index.pyts 
.PKZ7JJ.site-packages/pip/_vendor/distlib/metadata.pycnu[
abc@sdZddlmZddlZddlmZddlZddlZddlZddl	m
Z
mZddlm
Z
mZmZddlmZdd	lmZmZdd
lmZmZejeZde
fdYZd
e
fdYZde
fdYZde
fdYZdddgZdZ dZ!ej"dZ#ej"dZ$ddddddd d!d"d#d$fZ%ddddd%ddd d!d"d#d$d&d'd(d)d*fZ&d(d)d*d&d'fZ'ddddd%ddd d!d"d#d+d,d$d&d'd-d.d/d0d1d2fZ(d/d0d1d-d2d+d,d.fZ)ddddd%ddd d!d"d#d+d,d$d&d'd-d.d/d0d1d2d3d4d5d6d7fZ*d3d7d4d5d6fZ+e,Z-e-j.e%e-j.e&e-j.e(e-j.e*ej"d8Z/d9Z0d:Z1idd;6dd<6dd=6dd>6d%d?6dd@6ddA6d dB6d!dC6d"dD6d#dE6d+dF6d,dG6d$dH6d&dI6d'dJ6d-dK6d/dL6d0dM6d5dN6d1dO6d2dP6d*dQ6d)dR6d(dS6d.dT6d3dU6d4dV6d6dW6d7dX6Z2d0d-d/fZ3d1fZ4dfZ5dd&d(d*d)d-d/d0d2d.d%d5d7d6fZ6d.fZ7d fZ8d"d+ddfZ9e:Z;ej"dYZ<e=dZZ>d[e:fd\YZ?d]Z@d^ZAd_e:fd`YZBdS(auImplementation of the Metadata for Python packages PEPs.

Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
i(tunicode_literalsN(tmessage_from_filei(tDistlibExceptiont__version__(tStringIOtstring_typest	text_type(t	interpret(textract_by_keyt
get_extras(t
get_schemetPEP440_VERSION_REtMetadataMissingErrorcBseZdZRS(uA required metadata is missing(t__name__t
__module__t__doc__(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRstMetadataConflictErrorcBseZdZRS(u>Attempt to read or write metadata fields that are conflictual.(R
RR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR st MetadataUnrecognizedVersionErrorcBseZdZRS(u Unknown metadata version number.(R
RR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR$stMetadataInvalidErrorcBseZdZRS(uA metadata value is invalid(R
RR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR(suMetadatauPKG_INFO_ENCODINGuPKG_INFO_PREFERRED_VERSIONuutf-8u1.1u

       \|u	
        uMetadata-VersionuNameuVersionuPlatformuSummaryuDescriptionuKeywordsu	Home-pageuAuthoruAuthor-emailuLicenseuSupported-Platformu
ClassifieruDownload-URLu	ObsoletesuProvidesuRequiresu
MaintaineruMaintainer-emailuObsoletes-DistuProject-URLu
Provides-Distu
Requires-DistuRequires-PythonuRequires-ExternaluPrivate-VersionuObsoleted-ByuSetup-Requires-Distu	ExtensionuProvides-Extrau"extra\s*==\s*("([^"]+)"|'([^']+)')cCsP|dkrtS|dkr tS|dkr0tS|dkr@tSt|dS(Nu1.0u1.1u1.2u2.0(t_241_FIELDSt_314_FIELDSt_345_FIELDSt_426_FIELDSR(tversion((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_version2fieldlistgsc	Csd}g}xB|jD]4\}}|gddfkrCqn|j|qWddddg}x|D]}|tkrd|kr|jdn|tkrd|kr|jdn|tkrd|kr|jdn|tkrmd|krm|jdqmqmWt|dkr1|dSt|dkrRt	d	nd|koj||t
}d|ko||t}d|ko||t}t
|t
|t
|dkrt	d
n|r|r|rt|krtSn|r
dS|rdSdS(u5Detect the best version depending on the fields used.cSs%x|D]}||krtSqWtS(N(tTruetFalse(tkeystmarkerstmarker((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_has_markerus
uUNKNOWNu1.0u1.1u1.2u2.0iiuUnknown metadata setu(You used incompatible 1.1/1.2/2.0 fieldsN(titemstNonetappendRtremoveRRRtlenRt_314_MARKERSt_345_MARKERSt_426_MARKERStinttPKG_INFO_PREFERRED_VERSION(	tfieldsRRtkeytvaluetpossible_versionstis_1_1tis_1_2tis_2_0((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt
_best_versionssB	
&umetadata_versionunameuversionuplatformusupported_platformusummaryudescriptionukeywordsu	home_pageuauthoruauthor_emailu
maintainerumaintainer_emailulicenseu
classifierudownload_urluobsoletes_distu
provides_distu
requires_distusetup_requires_disturequires_pythonurequires_externalurequiresuprovidesu	obsoletesuproject_urluprivate_versionuobsoleted_byu	extensionuprovides_extrau[^A-Za-z0-9.]+cCsG|r9tjd|}tjd|jdd}nd||fS(uhReturn the distribution name with version.

    If for_filename is true, return a filename-escaped form.u-u u.u%s-%s(t	_FILESAFEtsubtreplace(tnameRtfor_filename((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_get_name_and_versions!tLegacyMetadatacBs4eZdZdddddZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZed
ZdZdZdZdZedZedZddZdZedZedZedZdZdZdZdZ dZ!dZ"RS( uaThe legacy metadata of a release.

    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
    instantiate the class with one of these arguments (or none):
    - *path*, the path to a metadata file
    - *fileobj* give a file-like object with metadata as content
    - *mapping* is a dict-like object
    - *scheme* is a version scheme name
    udefaultcCs|||gjddkr-tdni|_g|_d|_||_|dk	rm|j|nB|dk	r|j|n&|dk	r|j	||j
ndS(Niu'path, fileobj and mapping are exclusive(tcountR t	TypeErrort_fieldstrequires_filest
_dependenciestschemetreadt	read_filetupdatetset_metadata_version(tselftpathtfileobjtmappingR=((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt__init__s				
cCst|j|jdJscCst|}|d|jdtj|ddd}z|j||Wd|jXdS(u&Write the metadata fields to filepath.uwRbuutf-8N(RcRdt
write_fileRe(RBRftskip_unknownRg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRGhscCs<|jx+t|dD]}|j|}|rT|dgdgfkrTqn|tkr|j||dj|qn|tkr|dkr|jdkr|jdd}q|jdd	}n|g}n|t	krg|D]}dj|^q}nx!|D]}|j|||qWqWd
S(u0Write the PKG-INFO format data to a file object.uMetadata-VersionuUNKNOWNu,uDescriptionu1.0u1.1u
u	
        u	
       |N(u1.0u1.1(
RARRIRVRHtjoinRURXR3Ri(RBt
fileobjectRqRnRoR+((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRpps$
%
csfd}|sn^t|drRxL|jD]}||||q4Wn$x!|D]\}}|||qYW|rx*|jD]\}}|||qWndS(uSet metadata values from the given iterable `other` and kwargs.

        Behavior is like `dict.update`: If `other` has a ``keys`` method,
        they are looped over and ``self[key]`` is assigned ``other[key]``.
        Else, ``other`` is an iterable of ``(key, value)`` iterables.

        Keys that don't match a metadata field or that have an empty value are
        dropped.
        cs2|tkr.|r.jj||ndS(N(RTRKRM(R*R+(RB(s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_setsukeysN(thasattrRR(RBtothertkwargsRttktv((RBs@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR@s
cCs|j|}|tks'|dkrt|ttfrt|trwg|jdD]}|j^q\}qg}nF|tkrt|ttfrt|tr|g}qg}nt	j
tjr|d}t
|j}|tkrR|d	k	rRx|D];}|j|jddst	jd|||qqWq|tkr|d	k	r|j|st	jd|||qq|tkr|d	k	r|j|st	jd|||qqn|tkr|dkr|j|}qn||j|d?d@f
}i}x;|D]3\}}|sf||jkrD|||||D]3\}}|sk||jkrI||||(t	__class__R
R4R(RB((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt__repr__msN(#R
RRR RFRARHRJRLRPRQRMRWR[R]RR_R`RaR>R?RGRpR@RKRRIRRRRRRoRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR7s>																	,,;					upydist.jsonu
metadata.jsontMetadatacBseZdZejdZejdejZeZ	ejdZ
dZdeZ
id>d6d?d6d@d	6Zd
ZdZiedAfd6edBfd6e	dCfd6e
dDfd	6ZdEZdFdFdFddZedGZdFefZdFefZi
defd6defd6ed6ed6ed6defd6ed6ed6ed6ed 6d!efd"6dHd$6dId6Z[[d&ZdFd'Zd(Zed)Z ed*Z!e!j"d+Z!dFdFd,Z#ed-Z$ed.Z%e%j"d/Z%d0Z&d1Z'd2Z(d3Z)id4d6d5d6d6d6d7d	6d8d96d!d"6Z*d:Z+dFdFe,e-d;Z.d<Z/d=Z0RS(Ju
    The metadata of a release. This implementation uses 2.0 (JSON)
    metadata where possible. If not possible, it wraps a LegacyMetadata
    instance which handles the key-value metadata format.
    u
^\d+(\.\d+)*$u!^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$u	.{1,2047}u2.0udistlib (%s)unameuversionulegacyusummaryuqname version license summary description author author_email keywords platform home_page classifiers download_urluwextras run_requires test_requires build_requires dev_requires provides meta_requires obsoleted_by supports_environmentsumetadata_versionu_legacyu_datauschemeudefaultcCs|||gjddkr-tdnd|_d|_||_|dk	ry|j||||_Wqtk
rtd|d||_|j	qXnd}|rt
|d}|j}WdQXn|r|j}n|dkri|jd6|j
d6|_nt|ts?|jd}ny)tj||_|j|j|Wn9tk
rtd	t|d||_|j	nXdS(
Niu'path, fileobj and mapping are exclusiveRER=urbumetadata_versionu	generatoruutf-8RD(R8R R9t_legacyt_dataR=t_validate_mappingRR7tvalidateRdR>tMETADATA_VERSIONt	GENERATORRzRtdecodetjsontloadst
ValueErrorR(RBRCRDRER=Rtf((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRFs>			



ulicenseukeywordsu
Requires-Disturun_requiresuSetup-Requires-Distubuild_requiresudev_requiresu
test_requiresu
meta_requiresuProvides-Extrauextrasumodulesu
namespacesuexportsucommandsu
ClassifieruclassifiersuDownload-URLu
source_urluMetadata-Versionc
Cstj|d}tj|d}||kr||\}}|jr|dkrs|dkrgdn|}q|jj|}q|dkrdn|}|dkr|jj||}qt}|}|jjd}	|	r|dkr|	jd	|}q|dkrH|	jd
}	|	r|	j||}qq|	jd}	|	sr|jjd}	n|	r|	j||}qn||kr|}qnQ||krtj||}n0|jr|jj|}n|jj|}|S(
Nucommon_keysumapped_keysucommandsuexportsumodulesu
namespacesuclassifiersu
extensionsupython.commandsupython.detailsupython.exports(ucommandsuexportsumodulesu
namespacesuclassifiers(tobjectt__getattribute__RR RIR(
RBR*tcommontmappedtlktmakertresultR+tsentineltd((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRsF				cCso||jkrk|j|\}}|p.|j|krk|j|}|shtd||fqhqkndS(Nu.'%s' is an invalid value for the '%s' property(tSYNTAX_VALIDATORSR=tmatchR(RBR*R+R=tpatternt
exclusionstm((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_validate_valuescCs|j||tj|d}tj|d}||kr||\}}|jr~|dkrntn||j|			


	cCst|j|jtS(N(R6R4RR(RB((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pytname_and_version@scCsd|jr|jd}n|jjdg}d|j|jf}||kr`|j|n|S(Nu
Provides-Distuprovidesu%s (%s)(RRRR4RR!(RBRts((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pytprovidesDs	cCs*|jr||jd}||krL|dkrsd	}n|}||||d|kr>|}Pq>q>W|dkri|d6}|jd|n*t|dt|B}t||d(R4RRR
RX(RBR4R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR(s(((ulegacy((ulegacy(ulegacy(ulegacy(u_legacyu_datauschemeN(unameuversionulicenseukeywordsusummary(uDownload-URLN(uMetadata-VersionN(1R
RRtretcompiletMETADATA_VERSION_MATCHERtItNAME_MATCHERRtVERSION_MATCHERtSUMMARY_MATCHERRRRRRRRt	__slots__R RFRKtcommon_keysR{t	none_listtdictt	none_dicttmapped_keysRRRtpropertyRRtsetterRRRRRRRRRRRRGRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRvs




	,




	+
	'*					
	%	(CRt
__future__RRctemailRRRRtRRtcompatRRRRRtutilRR	RR
Rt	getLoggerR
R}RRRRt__all__tPKG_INFO_ENCODINGR(RRZRYRRR$RR%RR&RKRRR@tEXTRA_RERR0RTRRRRURiRVRRRR1RR6R7tMETADATA_FILENAMEtWHEEL_METADATA_FILENAMER(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt	s																		



		8
									
PKZ..site-packages/pip/_vendor/distlib/metadata.pyonu[
abc@sdZddlmZddlZddlmZddlZddlZddlZddl	m
Z
mZddlm
Z
mZmZddlmZdd	lmZmZdd
lmZmZejeZde
fdYZd
e
fdYZde
fdYZde
fdYZdddgZdZ dZ!ej"dZ#ej"dZ$ddddddd d!d"d#d$fZ%ddddd%ddd d!d"d#d$d&d'd(d)d*fZ&d(d)d*d&d'fZ'ddddd%ddd d!d"d#d+d,d$d&d'd-d.d/d0d1d2fZ(d/d0d1d-d2d+d,d.fZ)ddddd%ddd d!d"d#d+d,d$d&d'd-d.d/d0d1d2d3d4d5d6d7fZ*d3d7d4d5d6fZ+e,Z-e-j.e%e-j.e&e-j.e(e-j.e*ej"d8Z/d9Z0d:Z1idd;6dd<6dd=6dd>6d%d?6dd@6ddA6d dB6d!dC6d"dD6d#dE6d+dF6d,dG6d$dH6d&dI6d'dJ6d-dK6d/dL6d0dM6d5dN6d1dO6d2dP6d*dQ6d)dR6d(dS6d.dT6d3dU6d4dV6d6dW6d7dX6Z2d0d-d/fZ3d1fZ4dfZ5dd&d(d*d)d-d/d0d2d.d%d5d7d6fZ6d.fZ7d fZ8d"d+ddfZ9e:Z;ej"dYZ<e=dZZ>d[e:fd\YZ?d]Z@d^ZAd_e:fd`YZBdS(auImplementation of the Metadata for Python packages PEPs.

Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
i(tunicode_literalsN(tmessage_from_filei(tDistlibExceptiont__version__(tStringIOtstring_typest	text_type(t	interpret(textract_by_keyt
get_extras(t
get_schemetPEP440_VERSION_REtMetadataMissingErrorcBseZdZRS(uA required metadata is missing(t__name__t
__module__t__doc__(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRstMetadataConflictErrorcBseZdZRS(u>Attempt to read or write metadata fields that are conflictual.(R
RR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR st MetadataUnrecognizedVersionErrorcBseZdZRS(u Unknown metadata version number.(R
RR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR$stMetadataInvalidErrorcBseZdZRS(uA metadata value is invalid(R
RR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR(suMetadatauPKG_INFO_ENCODINGuPKG_INFO_PREFERRED_VERSIONuutf-8u1.1u

       \|u	
        uMetadata-VersionuNameuVersionuPlatformuSummaryuDescriptionuKeywordsu	Home-pageuAuthoruAuthor-emailuLicenseuSupported-Platformu
ClassifieruDownload-URLu	ObsoletesuProvidesuRequiresu
MaintaineruMaintainer-emailuObsoletes-DistuProject-URLu
Provides-Distu
Requires-DistuRequires-PythonuRequires-ExternaluPrivate-VersionuObsoleted-ByuSetup-Requires-Distu	ExtensionuProvides-Extrau"extra\s*==\s*("([^"]+)"|'([^']+)')cCsP|dkrtS|dkr tS|dkr0tS|dkr@tSt|dS(Nu1.0u1.1u1.2u2.0(t_241_FIELDSt_314_FIELDSt_345_FIELDSt_426_FIELDSR(tversion((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_version2fieldlistgsc	Csd}g}xB|jD]4\}}|gddfkrCqn|j|qWddddg}x|D]}|tkrd|kr|jdn|tkrd|kr|jdn|tkrd|kr|jdn|tkrmd|krm|jdqmqmWt|dkr1|dSt|dkrRt	d	nd|koj||t
}d|ko||t}d|ko||t}t
|t
|t
|dkrt	d
n|r|r|rt|krtSn|r
dS|rdSdS(u5Detect the best version depending on the fields used.cSs%x|D]}||krtSqWtS(N(tTruetFalse(tkeystmarkerstmarker((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_has_markerus
uUNKNOWNu1.0u1.1u1.2u2.0iiuUnknown metadata setu(You used incompatible 1.1/1.2/2.0 fieldsN(titemstNonetappendRtremoveRRRtlenRt_314_MARKERSt_345_MARKERSt_426_MARKERStinttPKG_INFO_PREFERRED_VERSION(	tfieldsRRtkeytvaluetpossible_versionstis_1_1tis_1_2tis_2_0((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt
_best_versionssB	
&umetadata_versionunameuversionuplatformusupported_platformusummaryudescriptionukeywordsu	home_pageuauthoruauthor_emailu
maintainerumaintainer_emailulicenseu
classifierudownload_urluobsoletes_distu
provides_distu
requires_distusetup_requires_disturequires_pythonurequires_externalurequiresuprovidesu	obsoletesuproject_urluprivate_versionuobsoleted_byu	extensionuprovides_extrau[^A-Za-z0-9.]+cCsG|r9tjd|}tjd|jdd}nd||fS(uhReturn the distribution name with version.

    If for_filename is true, return a filename-escaped form.u-u u.u%s-%s(t	_FILESAFEtsubtreplace(tnameRtfor_filename((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_get_name_and_versions!tLegacyMetadatacBs4eZdZdddddZdZdZdZdZdZ	dZ
d	Zd
ZdZ
dZed
ZdZdZdZdZedZedZddZdZedZedZedZdZdZdZdZ dZ!dZ"RS( uaThe legacy metadata of a release.

    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
    instantiate the class with one of these arguments (or none):
    - *path*, the path to a metadata file
    - *fileobj* give a file-like object with metadata as content
    - *mapping* is a dict-like object
    - *scheme* is a version scheme name
    udefaultcCs|||gjddkr-tdni|_g|_d|_||_|dk	rm|j|nB|dk	r|j|n&|dk	r|j	||j
ndS(Niu'path, fileobj and mapping are exclusive(tcountR t	TypeErrort_fieldstrequires_filest
_dependenciestschemetreadt	read_filetupdatetset_metadata_version(tselftpathtfileobjtmappingR=((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt__init__s				
cCst|j|jdJscCst|}|d|jdtj|ddd}z|j||Wd|jXdS(u&Write the metadata fields to filepath.uwRbuutf-8N(RcRdt
write_fileRe(RBRftskip_unknownRg((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRGhscCs<|jx+t|dD]}|j|}|rT|dgdgfkrTqn|tkr|j||dj|qn|tkr|dkr|jdkr|jdd}q|jdd	}n|g}n|t	krg|D]}dj|^q}nx!|D]}|j|||qWqWd
S(u0Write the PKG-INFO format data to a file object.uMetadata-VersionuUNKNOWNu,uDescriptionu1.0u1.1u
u	
        u	
       |N(u1.0u1.1(
RARRIRVRHtjoinRURXR3Ri(RBt
fileobjectRqRnRoR+((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRpps$
%
csfd}|sn^t|drRxL|jD]}||||q4Wn$x!|D]\}}|||qYW|rx*|jD]\}}|||qWndS(uSet metadata values from the given iterable `other` and kwargs.

        Behavior is like `dict.update`: If `other` has a ``keys`` method,
        they are looped over and ``self[key]`` is assigned ``other[key]``.
        Else, ``other`` is an iterable of ``(key, value)`` iterables.

        Keys that don't match a metadata field or that have an empty value are
        dropped.
        cs2|tkr.|r.jj||ndS(N(RTRKRM(R*R+(RB(s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_setsukeysN(thasattrRR(RBtothertkwargsRttktv((RBs@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR@s
cCs|j|}|tks'|dkrt|ttfrt|trwg|jdD]}|j^q\}qg}nF|tkrt|ttfrt|tr|g}qg}nt	j
tjr|d}t
|j}|tkrR|d	k	rRx|D];}|j|jddst	jd|||qqWq|tkr|d	k	r|j|st	jd|||qq|tkr|d	k	r|j|st	jd|||qqn|tkr|dkr|j|}qn||j|d?d@f
}i}x;|D]3\}}|sf||jkrD|||||D]3\}}|sk||jkrI||||(t	__class__R
R4R(RB((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt__repr__msN(#R
RRR RFRARHRJRLRPRQRMRWR[R]RR_R`RaR>R?RGRpR@RKRRIRRRRRRoRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR7s>																	,,;					upydist.jsonu
metadata.jsontMetadatacBseZdZejdZejdejZeZ	ejdZ
dZdeZ
id>d6d?d6d@d	6Zd
ZdZiedAfd6edBfd6e	dCfd6e
dDfd	6ZdEZdFdFdFddZedGZdFefZdFefZi
defd6defd6ed6ed6ed6defd6ed6ed6ed6ed 6d!efd"6dHd$6dId6Z[[d&ZdFd'Zd(Zed)Z ed*Z!e!j"d+Z!dFdFd,Z#ed-Z$ed.Z%e%j"d/Z%d0Z&d1Z'd2Z(d3Z)id4d6d5d6d6d6d7d	6d8d96d!d"6Z*d:Z+dFdFe,e-d;Z.d<Z/d=Z0RS(Ju
    The metadata of a release. This implementation uses 2.0 (JSON)
    metadata where possible. If not possible, it wraps a LegacyMetadata
    instance which handles the key-value metadata format.
    u
^\d+(\.\d+)*$u!^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$u	.{1,2047}u2.0udistlib (%s)unameuversionulegacyusummaryuqname version license summary description author author_email keywords platform home_page classifiers download_urluwextras run_requires test_requires build_requires dev_requires provides meta_requires obsoleted_by supports_environmentsumetadata_versionu_legacyu_datauschemeudefaultcCs|||gjddkr-tdnd|_d|_||_|dk	ry|j||||_Wqtk
rtd|d||_|j	qXnd}|rt
|d}|j}WdQXn|r|j}n|dkri|jd6|j
d6|_nt|ts?|jd}ny)tj||_|j|j|Wn9tk
rtd	t|d||_|j	nXdS(
Niu'path, fileobj and mapping are exclusiveRER=urbumetadata_versionu	generatoruutf-8RD(R8R R9t_legacyt_dataR=t_validate_mappingRR7tvalidateRdR>tMETADATA_VERSIONt	GENERATORRzRtdecodetjsontloadst
ValueErrorR(RBRCRDRER=Rtf((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRFs>			



ulicenseukeywordsu
Requires-Disturun_requiresuSetup-Requires-Distubuild_requiresudev_requiresu
test_requiresu
meta_requiresuProvides-Extrauextrasumodulesu
namespacesuexportsucommandsu
ClassifieruclassifiersuDownload-URLu
source_urluMetadata-Versionc
Cstj|d}tj|d}||kr||\}}|jr|dkrs|dkrgdn|}q|jj|}q|dkrdn|}|dkr|jj||}qt}|}|jjd}	|	r|dkr|	jd	|}q|dkrH|	jd
}	|	r|	j||}qq|	jd}	|	sr|jjd}	n|	r|	j||}qn||kr|}qnQ||krtj||}n0|jr|jj|}n|jj|}|S(
Nucommon_keysumapped_keysucommandsuexportsumodulesu
namespacesuclassifiersu
extensionsupython.commandsupython.detailsupython.exports(ucommandsuexportsumodulesu
namespacesuclassifiers(tobjectt__getattribute__RR RIR(
RBR*tcommontmappedtlktmakertresultR+tsentineltd((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRsF				cCso||jkrk|j|\}}|p.|j|krk|j|}|shtd||fqhqkndS(Nu.'%s' is an invalid value for the '%s' property(tSYNTAX_VALIDATORSR=tmatchR(RBR*R+R=tpatternt
exclusionstm((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt_validate_valuescCs|j||tj|d}tj|d}||kr||\}}|jr~|dkrntn||j|			


	cCst|j|jtS(N(R6R4RR(RB((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pytname_and_version@scCsd|jr|jd}n|jjdg}d|j|jf}||kr`|j|n|S(Nu
Provides-Distuprovidesu%s (%s)(RRRR4RR!(RBRts((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pytprovidesDs	cCs*|jr||jd}||kr3|dkrZd	}n|}||||d|kr>|}Pq>q>W|dkri|d6}|jd|n*t|dt|B}t||d(R4RRR
RX(RBR4R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyR(s(((ulegacy((ulegacy(ulegacy(ulegacy(u_legacyu_datauschemeN(unameuversionulicenseukeywordsusummary(uDownload-URLN(uMetadata-VersionN(1R
RRtretcompiletMETADATA_VERSION_MATCHERtItNAME_MATCHERRtVERSION_MATCHERtSUMMARY_MATCHERRRRRRRRt	__slots__R RFRKtcommon_keysR{t	none_listtdictt	none_dicttmapped_keysRRRtpropertyRRtsetterRRRRRRRRRRRRGRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyRvs




	,




	+
	'*					
	%	(CRt
__future__RRctemailRRRRtRRtcompatRRRRRtutilRR	RR
Rt	getLoggerR
R}RRRRt__all__tPKG_INFO_ENCODINGR(RRZRYRRR$RR%RR&RKRRR@tEXTRA_RERR0RTRRRRURiRVRRRR1RR6R7tMETADATA_FILENAMEtWHEEL_METADATA_FILENAMER(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/metadata.pyt	s																		



		8
									
PKZ99-site-packages/pip/_vendor/distlib/manifest.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Class representing the list of files in a distribution.

Equivalent to distutils.filelist, but fixes some problems.
"""
import fnmatch
import logging
import os
import re
import sys

from . import DistlibException
from .compat import fsdecode
from .util import convert_path


__all__ = ['Manifest']

logger = logging.getLogger(__name__)

# a \ followed by some spaces + EOL
_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)

#
# Due to the different results returned by fnmatch.translate, we need
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
# to be brought in for Python 3.6 onwards.
#
_PYTHON_VERSION = sys.version_info[:2]

class Manifest(object):
    """A list of files built by on exploring the filesystem and filtered by
    applying various patterns to what we find there.
    """

    def __init__(self, base=None):
        """
        Initialise an instance.

        :param base: The base directory to explore under.
        """
        self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
        self.prefix = self.base + os.sep
        self.allfiles = None
        self.files = set()

    #
    # Public API
    #

    def findall(self):
        """Find all files under the base and set ``allfiles`` to the absolute
        pathnames of files found.
        """
        from stat import S_ISREG, S_ISDIR, S_ISLNK

        self.allfiles = allfiles = []
        root = self.base
        stack = [root]
        pop = stack.pop
        push = stack.append

        while stack:
            root = pop()
            names = os.listdir(root)

            for name in names:
                fullname = os.path.join(root, name)

                # Avoid excess stat calls -- just one will do, thank you!
                stat = os.stat(fullname)
                mode = stat.st_mode
                if S_ISREG(mode):
                    allfiles.append(fsdecode(fullname))
                elif S_ISDIR(mode) and not S_ISLNK(mode):
                    push(fullname)

    def add(self, item):
        """
        Add a file to the manifest.

        :param item: The pathname to add. This can be relative to the base.
        """
        if not item.startswith(self.prefix):
            item = os.path.join(self.base, item)
        self.files.add(os.path.normpath(item))

    def add_many(self, items):
        """
        Add a list of files to the manifest.

        :param items: The pathnames to add. These can be relative to the base.
        """
        for item in items:
            self.add(item)

    def sorted(self, wantdirs=False):
        """
        Return sorted files in directory order
        """

        def add_dir(dirs, d):
            dirs.add(d)
            logger.debug('add_dir added %s', d)
            if d != self.base:
                parent, _ = os.path.split(d)
                assert parent not in ('', '/')
                add_dir(dirs, parent)

        result = set(self.files)    # make a copy!
        if wantdirs:
            dirs = set()
            for f in result:
                add_dir(dirs, os.path.dirname(f))
            result |= dirs
        return [os.path.join(*path_tuple) for path_tuple in
                sorted(os.path.split(path) for path in result)]

    def clear(self):
        """Clear all collected files."""
        self.files = set()
        self.allfiles = []

    def process_directive(self, directive):
        """
        Process a directive which either adds some files from ``allfiles`` to
        ``files``, or removes some files from ``files``.

        :param directive: The directive to process. This should be in a format
                     compatible with distutils ``MANIFEST.in`` files:

                     http://docs.python.org/distutils/sourcedist.html#commands
        """
        # Parse the line: split it up, make sure the right number of words
        # is there, and return the relevant words.  'action' is always
        # defined: it's the first word of the line.  Which of the other
        # three are defined depends on the action; it'll be either
        # patterns, (dir and patterns), or (dirpattern).
        action, patterns, thedir, dirpattern = self._parse_directive(directive)

        # OK, now we know that the action is valid and we have the
        # right number of words on the line for that action -- so we
        # can proceed with minimal error-checking.
        if action == 'include':
            for pattern in patterns:
                if not self._include_pattern(pattern, anchor=True):
                    logger.warning('no files found matching %r', pattern)

        elif action == 'exclude':
            for pattern in patterns:
                found = self._exclude_pattern(pattern, anchor=True)
                #if not found:
                #    logger.warning('no previously-included files '
                #                   'found matching %r', pattern)

        elif action == 'global-include':
            for pattern in patterns:
                if not self._include_pattern(pattern, anchor=False):
                    logger.warning('no files found matching %r '
                                   'anywhere in distribution', pattern)

        elif action == 'global-exclude':
            for pattern in patterns:
                found = self._exclude_pattern(pattern, anchor=False)
                #if not found:
                #    logger.warning('no previously-included files '
                #                   'matching %r found anywhere in '
                #                   'distribution', pattern)

        elif action == 'recursive-include':
            for pattern in patterns:
                if not self._include_pattern(pattern, prefix=thedir):
                    logger.warning('no files found matching %r '
                                   'under directory %r', pattern, thedir)

        elif action == 'recursive-exclude':
            for pattern in patterns:
                found = self._exclude_pattern(pattern, prefix=thedir)
                #if not found:
                #    logger.warning('no previously-included files '
                #                   'matching %r found under directory %r',
                #                   pattern, thedir)

        elif action == 'graft':
            if not self._include_pattern(None, prefix=dirpattern):
                logger.warning('no directories found matching %r',
                               dirpattern)

        elif action == 'prune':
            if not self._exclude_pattern(None, prefix=dirpattern):
                logger.warning('no previously-included directories found '
                               'matching %r', dirpattern)
        else:   # pragma: no cover
            # This should never happen, as it should be caught in
            # _parse_template_line
            raise DistlibException(
                'invalid action %r' % action)

    #
    # Private API
    #

    def _parse_directive(self, directive):
        """
        Validate a directive.
        :param directive: The directive to validate.
        :return: A tuple of action, patterns, thedir, dir_patterns
        """
        words = directive.split()
        if len(words) == 1 and words[0] not in ('include', 'exclude',
                                                'global-include',
                                                'global-exclude',
                                                'recursive-include',
                                                'recursive-exclude',
                                                'graft', 'prune'):
            # no action given, let's use the default 'include'
            words.insert(0, 'include')

        action = words[0]
        patterns = thedir = dir_pattern = None

        if action in ('include', 'exclude',
                      'global-include', 'global-exclude'):
            if len(words) < 2:
                raise DistlibException(
                    '%r expects   ...' % action)

            patterns = [convert_path(word) for word in words[1:]]

        elif action in ('recursive-include', 'recursive-exclude'):
            if len(words) < 3:
                raise DistlibException(
                    '%r expects    ...' % action)

            thedir = convert_path(words[1])
            patterns = [convert_path(word) for word in words[2:]]

        elif action in ('graft', 'prune'):
            if len(words) != 2:
                raise DistlibException(
                    '%r expects a single ' % action)

            dir_pattern = convert_path(words[1])

        else:
            raise DistlibException('unknown action %r' % action)

        return action, patterns, thedir, dir_pattern

    def _include_pattern(self, pattern, anchor=True, prefix=None,
                         is_regex=False):
        """Select strings (presumably filenames) from 'self.files' that
        match 'pattern', a Unix-style wildcard (glob) pattern.

        Patterns are not quite the same as implemented by the 'fnmatch'
        module: '*' and '?'  match non-special characters, where "special"
        is platform-dependent: slash on Unix; colon, slash, and backslash on
        DOS/Windows; and colon on Mac OS.

        If 'anchor' is true (the default), then the pattern match is more
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
        'anchor' is false, both of these will match.

        If 'prefix' is supplied, then only filenames starting with 'prefix'
        (itself a pattern) and ending with 'pattern', with anything in between
        them, will match.  'anchor' is ignored in this case.

        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
        'pattern' is assumed to be either a string containing a regex or a
        regex object -- no translation is done, the regex is just compiled
        and used as-is.

        Selected strings will be added to self.files.

        Return True if files are found.
        """
        # XXX docstring lying about what the special chars are?
        found = False
        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)

        # delayed loading of allfiles list
        if self.allfiles is None:
            self.findall()

        for name in self.allfiles:
            if pattern_re.search(name):
                self.files.add(name)
                found = True
        return found

    def _exclude_pattern(self, pattern, anchor=True, prefix=None,
                         is_regex=False):
        """Remove strings (presumably filenames) from 'files' that match
        'pattern'.

        Other parameters are the same as for 'include_pattern()', above.
        The list 'self.files' is modified in place. Return True if files are
        found.

        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
        packaging source distributions
        """
        found = False
        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
        for f in list(self.files):
            if pattern_re.search(f):
                self.files.remove(f)
                found = True
        return found

    def _translate_pattern(self, pattern, anchor=True, prefix=None,
                           is_regex=False):
        """Translate a shell-like wildcard pattern to a compiled regular
        expression.

        Return the compiled regex.  If 'is_regex' true,
        then 'pattern' is directly compiled to a regex (if it's a string)
        or just returned as-is (assumes it's a regex object).
        """
        if is_regex:
            if isinstance(pattern, str):
                return re.compile(pattern)
            else:
                return pattern

        if _PYTHON_VERSION > (3, 2):
            # ditch start and end characters
            start, _, end = self._glob_to_re('_').partition('_')

        if pattern:
            pattern_re = self._glob_to_re(pattern)
            if _PYTHON_VERSION > (3, 2):
                assert pattern_re.startswith(start) and pattern_re.endswith(end)
        else:
            pattern_re = ''

        base = re.escape(os.path.join(self.base, ''))
        if prefix is not None:
            # ditch end of pattern character
            if _PYTHON_VERSION <= (3, 2):
                empty_pattern = self._glob_to_re('')
                prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
            else:
                prefix_re = self._glob_to_re(prefix)
                assert prefix_re.startswith(start) and prefix_re.endswith(end)
                prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
            sep = os.sep
            if os.sep == '\\':
                sep = r'\\'
            if _PYTHON_VERSION <= (3, 2):
                pattern_re = '^' + base + sep.join((prefix_re,
                                                    '.*' + pattern_re))
            else:
                pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
                pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
                                                  pattern_re, end)
        else:  # no prefix -- respect anchor flag
            if anchor:
                if _PYTHON_VERSION <= (3, 2):
                    pattern_re = '^' + base + pattern_re
                else:
                    pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])

        return re.compile(pattern_re)

    def _glob_to_re(self, pattern):
        """Translate a shell-like glob pattern to a regular expression.

        Return a string containing the regex.  Differs from
        'fnmatch.translate()' in that '*' does not match "special characters"
        (which are platform-specific).
        """
        pattern_re = fnmatch.translate(pattern)

        # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
        # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
        # and by extension they shouldn't match such "special characters" under
        # any OS.  So change all non-escaped dots in the RE to match any
        # character except the special characters (currently: just os.sep).
        sep = os.sep
        if os.sep == '\\':
            # we're using a regex to manipulate a regex, so we need
            # to escape the backslash twice
            sep = r'\\\\'
        escaped = r'\1[^%s]' % sep
        pattern_re = re.sub(r'((? _frozen_importlib_external
    try:
        import _frozen_importlib_external as _fi
    except ImportError:
        import _frozen_importlib as _fi
    _finder_registry[_fi.SourceFileLoader] = ResourceFinder
    _finder_registry[_fi.FileFinder] = ResourceFinder
    del _fi
except (ImportError, AttributeError):
    pass


def register_finder(loader, finder_maker):
    _finder_registry[type(loader)] = finder_maker

_finder_cache = {}


def finder(package):
    """
    Return a resource finder for a package.
    :param package: The name of the package.
    :return: A :class:`ResourceFinder` instance for the package.
    """
    if package in _finder_cache:
        result = _finder_cache[package]
    else:
        if package not in sys.modules:
            __import__(package)
        module = sys.modules[package]
        path = getattr(module, '__path__', None)
        if path is None:
            raise DistlibException('You cannot get a finder for a module, '
                                   'only for a package')
        loader = getattr(module, '__loader__', None)
        finder_maker = _finder_registry.get(type(loader))
        if finder_maker is None:
            raise DistlibException('Unable to locate finder for %r' % package)
        result = finder_maker(module)
        _finder_cache[package] = result
    return result


_dummy_module = types.ModuleType(str('__dummy__'))


def finder_for_path(path):
    """
    Return a resource finder for a path, which should represent a container.

    :param path: The path.
    :return: A :class:`ResourceFinder` instance for the path.
    """
    result = None
    # calls any path hooks, gets importer into cache
    pkgutil.get_importer(path)
    loader = sys.path_importer_cache.get(path)
    finder = _finder_registry.get(type(loader))
    if finder:
        module = _dummy_module
        module.__file__ = os.path.join(path, '')
        module.__loader__ = loader
        result = finder(module)
    return result
PKZ^00-site-packages/pip/_vendor/distlib/scripts.pyonu[
abc@sddlmZddlZddlZddlZddlZddlZddlmZm	Z	m
Z
ddlmZddl
mZmZmZmZmZejeZdjZejdZd	Zd
ZdefdYZdS(
i(tBytesIONi(t	sysconfigtdetect_encodingtZipFile(tfinder(tFileOperatortget_export_entrytconvert_pathtget_executabletin_venvs


 

 
 
 
 
 
 
 
 
s^#!.*pythonw?[0-9.]*([ 	].*)?$s|# -*- coding: utf-8 -*-
if __name__ == '__main__':
    import sys, re

    def _resolve(module, func):
        __import__(module)
        mod = sys.modules[module]
        parts = func.split('.')
        result = getattr(mod, parts.pop(0))
        for p in parts:
            result = getattr(result, p)
        return result

    try:
        sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])

        func = _resolve('%(module)s', '%(func)s')
        rc = func() # None interpreted as 0
    except Exception as e:  # only supporting Python >= 2.6
        sys.stderr.write('%%s\n' %% e)
        rc = 1
    sys.exit(rc)
cCsd|kr|jdre|jdd\}}d|kr|jdrd||f}qq|jdsd|}qn|S(Nt s
/usr/bin/env it"s%s "%s"s"%s"(t
startswithtsplit(t
executabletenvt_executable((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_enquote_executableBstScriptMakercBseZdZeZdZeeddZ	dZ
ejj
drZdZdZndddZdZeZd	Zd
ZddZdZed
ZejdZejdksejdkrejdkrdZnddZddZ RS(s_
    A class to copy or create scripts from source scripts or callable
    specifications.
    cCs||_||_||_t|_t|_tjdkpWtjdkoWtjdk|_	t
d|_|p{t||_
tjdkptjdkotjdk|_dS(NtposixtjavatsX.Ytnt(RsX.Y(t
source_dirt
target_dirt
add_launcherstFalsetforcetclobbertostnamet_nametset_modetsettvariantsRt_fileopt_is_nt(tselfRRRtdry_runtfileop((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt__init__[s					cCsa|jdtr]|jr]tjj|\}}|jdd}tjj||}n|S(Ntguitpythontpythonw(tgetRR$RtpathR
treplacetjoin(R%Rtoptionstdntfn((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_get_alternate_executableks
RcCs[y,t|}|jddkSWdQXWn(ttfk
rVtjd|tSXdS(sl
            Determine if the specified executable is a script
            (contains a #! line)
            is#!NsFailed to open %s(topentreadtOSErrortIOErrortloggertwarningR(R%Rtfp((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt	_is_shellsscCs^|j|r=ddl}|jjjddkrV|Sn|jjdrV|Sd|S(Nisos.nametLinuxs
jython.exes/usr/bin/env %s(R;RtlangtSystemtgetPropertytlowertendswith(R%RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_fix_jython_executablesRcCst}|jr!|j}t}ntjs9t}nqtrptjj	tj
ddtjd}n:tjj	tjddtjdtjdf}|r|j||}nt
jjdr|j|}ntjj|}|rt|}n|jd}t
jd	krSd
|krSd|krS|d7}nd
||d}y|jdWn!tk
rtd|nX|dkry|j|Wqtk
rtd||fqXn|S(Ntscriptsspython%stEXEtBINDIRs
python%s%stVERSIONRsutf-8tclis	-X:Framess
-X:FullFramess
 -X:Framess#!s
s,The shebang (%r) is not decodable from utf-8s?The shebang (%r) is not decodable from the script encoding (%r)(tTrueRRRtis_python_buildRR	RR-R/tget_pathtget_config_varR3tsystplatformRRBtnormcaseRtencodetdecodetUnicodeDecodeErrort
ValueError(R%tencodingtpost_interpR0tenquoteRtshebang((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_get_shebangsL					


cCs |jtd|jd|jS(Ntmoduletfunc(tscript_templatetdicttprefixtsuffix(R%tentry((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_get_script_textscCstjj|}|j|S(N(RR-tbasenametmanifest(R%texenametbase((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pytget_manifestscCs|jo|j}tjjd}|s;|||}n||dkrY|jd}n|jd}t}	t|	d}
|
jd|WdQX|	j	}||||}x|D]}tj
j|j|}
|rtj
j
|
\}}|jdr|}
nd|
}
y|jj|
|Wqltk
rtjdd	|
}tj
j|r|tj|ntj|
||jj|
|tjd
ytj|Wqtk
rqXqlXn|jr|
jd|rd|
|f}
ntj
j|
r:|jr:tjd
|
qn|jj|
||jrl|jj|
gn|j|
qWdS(Nsutf-8tpytttws__main__.pys.pys%s.exes:Failed to write executable - trying to use .deleteme logics%s.deletemes0Able to replace executable using .deleteme logict.s%s.%ssSkipping existing file %s(RR$RtlinesepROt
_get_launcherRRtwritestrtgetvalueR-R/RtsplitextRR#twrite_binary_filet	ExceptionR8R9texiststremovetrenametdebugRARR tset_executable_modetappend(R%tnamesRVtscript_bytest	filenamestexttuse_launcherRitlaunchertstreamtzftzip_dataRtoutnametntetdfname((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt
_write_scriptsT	
	





	cCsQd}|rL|jdg}|rLddj|}|jd}qLn|jd|d|}|j|jd}|j}t}	d|jkr|	j|nd|jkr|	jd|t	j
d	fnd
|jkr
|	jd|t	j
d fn|r.|jd
tr.d}
nd}
|j|	||||
dS(NRtinterpreter_argss %sR
sutf-8R0tXs%s%sisX.Ys%s-%siR)tpywRe(
R,R/RORWR_RR!R"taddRLtversionRR(R%R^RxR0RTtargsRVtscriptRtscriptnamesRy((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_make_scripts(		!!	cCs@t}tjj|jt|}tjj|jtjj|}|jr||j	j
||r|tjd|dSyt
|d}Wn&tk
r|jsnd}noX|j}|stjd|j|dStj|jdd}|r&t}|jdp d}n|s|r?|jn|j	j|||jrq|j	j|gn|j|ntjd||j|j	js)t|j\}	}
|j d	|j!|	|}d
|krd}nd}tjj|}
|j"|
g||j#||n|r<|jndS(
Nsnot copying %s (up-to-date)trbs"%s: %s is an empty file (skipping)s
s
iRscopying and adjusting %s -> %siR+RRe($RRR-R/RRRR`RR#tnewerR8RsR4R7R&tNonetreadlineR9tget_command_namet
FIRST_LINE_REtmatchR.RHtgrouptcloset	copy_fileR RtRutinfoRtseekRWRR5(R%RRxtadjustRtft
first_lineRRTRStlinesRVRyR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt_copy_scriptsR$ 
	
	
	

	%cCs
|jjS(N(R#R&(R%((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyR&JscCs||j_dS(N(R#R&(R%tvalue((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyR&NsRcCsftjddkrd}nd}d||f}tjddd}t|j|j}|S(	NtPit64t32s%s%s.exeRhii(tstructtcalcsizet__name__trsplitRtfindtbytes(R%tkindtbitsRtdistlib_packagetresult((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyRjVs	cCsKg}t|}|dkr1|j||n|j||d||S(s
        Make a script.

        :param specification: The specification, which is either a valid export
                              entry specification (to make a script from a
                              callable) or a filename (to make a script by
                              copying from a source location).
        :param options: A dictionary of options controlling script generation.
        :return: A list of all absolute pathnames written to.
        R0N(RRRR(R%t
specificationR0RxR^((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pytmakedscCs4g}x'|D]}|j|j||q
W|S(s
        Take a list of specifications and make scripts from them,
        :param specifications: A list of specifications.
        :return: A list of all absolute pathnames written to,
        (textendR(R%tspecificationsR0RxR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyt
make_multiplews
N(!Rt
__module__t__doc__tSCRIPT_TEMPLATERZRRRHRR(R3RLRMRR;RBRWR_t_DEFAULT_MANIFESTRaRdRRRtpropertyR&tsetterRRRRjRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyRRs,		8			2	4-(tioRtloggingRtreRRLtcompatRRRt	resourcesRtutilRRRRR	t	getLoggerRR8tstripRtcompileRRRtobjectR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.pyts(	PKZxEE-site-packages/pip/_vendor/distlib/locators.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#

import gzip
from io import BytesIO
import json
import logging
import os
import posixpath
import re
try:
    import threading
except ImportError:  # pragma: no cover
    import dummy_threading as threading
import zlib

from . import DistlibException
from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
                     queue, quote, unescape, string_types, build_opener,
                     HTTPRedirectHandler as BaseRedirectHandler, text_type,
                     Request, HTTPError, URLError)
from .database import Distribution, DistributionPath, make_dist
from .metadata import Metadata
from .util import (cached_property, parse_credentials, ensure_slash,
                   split_filename, get_project_data, parse_requirement,
                   parse_name_and_version, ServerProxy, normalize_name)
from .version import get_scheme, UnsupportedVersionError
from .wheel import Wheel, is_compatible

logger = logging.getLogger(__name__)

HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)')
CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
DEFAULT_INDEX = 'https://pypi.python.org/pypi'

def get_all_distribution_names(url=None):
    """
    Return all distribution names known by an index.
    :param url: The URL of the index.
    :return: A list of all known distribution names.
    """
    if url is None:
        url = DEFAULT_INDEX
    client = ServerProxy(url, timeout=3.0)
    return client.list_packages()

class RedirectHandler(BaseRedirectHandler):
    """
    A class to work around a bug in some Python 3.2.x releases.
    """
    # There's a bug in the base version for some 3.2.x
    # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
    # returns e.g. /abc, it bails because it says the scheme ''
    # is bogus, when actually it should use the request's
    # URL for the scheme. See Python issue #13696.
    def http_error_302(self, req, fp, code, msg, headers):
        # Some servers (incorrectly) return multiple Location headers
        # (so probably same goes for URI).  Use first header.
        newurl = None
        for key in ('location', 'uri'):
            if key in headers:
                newurl = headers[key]
                break
        if newurl is None:
            return
        urlparts = urlparse(newurl)
        if urlparts.scheme == '':
            newurl = urljoin(req.get_full_url(), newurl)
            if hasattr(headers, 'replace_header'):
                headers.replace_header(key, newurl)
            else:
                headers[key] = newurl
        return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
                                                  headers)

    http_error_301 = http_error_303 = http_error_307 = http_error_302

class Locator(object):
    """
    A base class for locators - things that locate distributions.
    """
    source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
    binary_extensions = ('.egg', '.exe', '.whl')
    excluded_extensions = ('.pdf',)

    # A list of tags indicating which wheels you want to match. The default
    # value of None matches against the tags compatible with the running
    # Python. If you want to match other values, set wheel_tags on a locator
    # instance to a list of tuples (pyver, abi, arch) which you want to match.
    wheel_tags = None

    downloadable_extensions = source_extensions + ('.whl',)

    def __init__(self, scheme='default'):
        """
        Initialise an instance.
        :param scheme: Because locators look for most recent versions, they
                       need to know the version scheme to use. This specifies
                       the current PEP-recommended scheme - use ``'legacy'``
                       if you need to support existing distributions on PyPI.
        """
        self._cache = {}
        self.scheme = scheme
        # Because of bugs in some of the handlers on some of the platforms,
        # we use our own opener rather than just using urlopen.
        self.opener = build_opener(RedirectHandler())
        # If get_project() is called from locate(), the matcher instance
        # is set from the requirement passed to locate(). See issue #18 for
        # why this can be useful to know.
        self.matcher = None
        self.errors = queue.Queue()

    def get_errors(self):
        """
        Return any errors which have occurred.
        """
        result = []
        while not self.errors.empty():  # pragma: no cover
            try:
                e = self.errors.get(False)
                result.append(e)
            except self.errors.Empty:
                continue
            self.errors.task_done()
        return result

    def clear_errors(self):
        """
        Clear any errors which may have been logged.
        """
        # Just get the errors and throw them away
        self.get_errors()

    def clear_cache(self):
        self._cache.clear()

    def _get_scheme(self):
        return self._scheme

    def _set_scheme(self, value):
        self._scheme = value

    scheme = property(_get_scheme, _set_scheme)

    def _get_project(self, name):
        """
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This should be implemented in subclasses.

        If called from a locate() request, self.matcher will be set to a
        matcher for the requirement to satisfy, otherwise it will be None.
        """
        raise NotImplementedError('Please implement in the subclass')

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        raise NotImplementedError('Please implement in the subclass')

    def get_project(self, name):
        """
        For a given project, get a dictionary mapping available versions to Distribution
        instances.

        This calls _get_project to do all the work, and just implements a caching layer on top.
        """
        if self._cache is None:
            result = self._get_project(name)
        elif name in self._cache:
            result = self._cache[name]
        else:
            self.clear_errors()
            result = self._get_project(name)
            self._cache[name] = result
        return result

    def score_url(self, url):
        """
        Give an url a score which can be used to choose preferred URLs
        for a given project release.
        """
        t = urlparse(url)
        basename = posixpath.basename(t.path)
        compatible = True
        is_wheel = basename.endswith('.whl')
        if is_wheel:
            compatible = is_compatible(Wheel(basename), self.wheel_tags)
        return (t.scheme != 'https', 'pypi.python.org' in t.netloc,
                is_wheel, compatible, basename)

    def prefer_url(self, url1, url2):
        """
        Choose one of two URLs where both are candidates for distribution
        archives for the same version of a distribution (for example,
        .tar.gz vs. zip).

        The current implementation favours https:// URLs over http://, archives
        from PyPI over those from other locations, wheel compatibility (if a
        wheel) and then the archive name.
        """
        result = url2
        if url1:
            s1 = self.score_url(url1)
            s2 = self.score_url(url2)
            if s1 > s2:
                result = url1
            if result != url2:
                logger.debug('Not replacing %r with %r', url1, url2)
            else:
                logger.debug('Replacing %r with %r', url1, url2)
        return result

    def split_filename(self, filename, project_name):
        """
        Attempt to split a filename in project name, version and Python version.
        """
        return split_filename(filename, project_name)

    def convert_url_to_download_info(self, url, project_name):
        """
        See if a URL is a candidate for a download URL for a project (the URL
        has typically been scraped from an HTML page).

        If it is, a dictionary is returned with keys "name", "version",
        "filename" and "url"; otherwise, None is returned.
        """
        def same_project(name1, name2):
            return normalize_name(name1) == normalize_name(name2)

        result = None
        scheme, netloc, path, params, query, frag = urlparse(url)
        if frag.lower().startswith('egg='):
            logger.debug('%s: version hint in fragment: %r',
                         project_name, frag)
        m = HASHER_HASH.match(frag)
        if m:
            algo, digest = m.groups()
        else:
            algo, digest = None, None
        origpath = path
        if path and path[-1] == '/':
            path = path[:-1]
        if path.endswith('.whl'):
            try:
                wheel = Wheel(path)
                if is_compatible(wheel, self.wheel_tags):
                    if project_name is None:
                        include = True
                    else:
                        include = same_project(wheel.name, project_name)
                    if include:
                        result = {
                            'name': wheel.name,
                            'version': wheel.version,
                            'filename': wheel.filename,
                            'url': urlunparse((scheme, netloc, origpath,
                                               params, query, '')),
                            'python-version': ', '.join(
                                ['.'.join(list(v[2:])) for v in wheel.pyver]),
                        }
            except Exception as e:  # pragma: no cover
                logger.warning('invalid path for wheel: %s', path)
        elif path.endswith(self.downloadable_extensions):
            path = filename = posixpath.basename(path)
            for ext in self.downloadable_extensions:
                if path.endswith(ext):
                    path = path[:-len(ext)]
                    t = self.split_filename(path, project_name)
                    if not t:
                        logger.debug('No match for project/version: %s', path)
                    else:
                        name, version, pyver = t
                        if not project_name or same_project(project_name, name):
                            result = {
                                'name': name,
                                'version': version,
                                'filename': filename,
                                'url': urlunparse((scheme, netloc, origpath,
                                                   params, query, '')),
                                #'packagetype': 'sdist',
                            }
                            if pyver:
                                result['python-version'] = pyver
                    break
        if result and algo:
            result['%s_digest' % algo] = digest
        return result

    def _get_digest(self, info):
        """
        Get a digest from a dictionary by looking at keys of the form
        'algo_digest'.

        Returns a 2-tuple (algo, digest) if found, else None. Currently
        looks only for SHA256, then MD5.
        """
        result = None
        for algo in ('sha256', 'md5'):
            key = '%s_digest' % algo
            if key in info:
                result = (algo, info[key])
                break
        return result

    def _update_version_data(self, result, info):
        """
        Update a result dictionary (the final result from _get_project) with a
        dictionary for a specific version, which typically holds information
        gleaned from a filename or URL for an archive for the distribution.
        """
        name = info.pop('name')
        version = info.pop('version')
        if version in result:
            dist = result[version]
            md = dist.metadata
        else:
            dist = make_dist(name, version, scheme=self.scheme)
            md = dist.metadata
        dist.digest = digest = self._get_digest(info)
        url = info['url']
        result['digests'][url] = digest
        if md.source_url != info['url']:
            md.source_url = self.prefer_url(md.source_url, url)
            result['urls'].setdefault(version, set()).add(url)
        dist.locator = self
        result[version] = dist

    def locate(self, requirement, prereleases=False):
        """
        Find the most recent distribution which matches the given
        requirement.

        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
                            'foo (>= 1.0, < 2.0, != 1.3)'
        :param prereleases: If ``True``, allow pre-release versions
                            to be located. Otherwise, pre-release versions
                            are not returned.
        :return: A :class:`Distribution` instance, or ``None`` if no such
                 distribution could be located.
        """
        result = None
        r = parse_requirement(requirement)
        if r is None:
            raise DistlibException('Not a valid requirement: %r' % requirement)
        scheme = get_scheme(self.scheme)
        self.matcher = matcher = scheme.matcher(r.requirement)
        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
        versions = self.get_project(r.name)
        if len(versions) > 2:   # urls and digests keys are present
            # sometimes, versions are invalid
            slist = []
            vcls = matcher.version_class
            for k in versions:
                if k in ('urls', 'digests'):
                    continue
                try:
                    if not matcher.match(k):
                        logger.debug('%s did not match %r', matcher, k)
                    else:
                        if prereleases or not vcls(k).is_prerelease:
                            slist.append(k)
                        else:
                            logger.debug('skipping pre-release '
                                         'version %s of %s', k, matcher.name)
                except Exception:  # pragma: no cover
                    logger.warning('error matching %s with %r', matcher, k)
                    pass # slist.append(k)
            if len(slist) > 1:
                slist = sorted(slist, key=scheme.key)
            if slist:
                logger.debug('sorted list: %s', slist)
                version = slist[-1]
                result = versions[version]
        if result:
            if r.extras:
                result.extras = r.extras
            result.download_urls = versions.get('urls', {}).get(version, set())
            d = {}
            sd = versions.get('digests', {})
            for url in result.download_urls:
                if url in sd:
                    d[url] = sd[url]
            result.digests = d
        self.matcher = None
        return result


class PyPIRPCLocator(Locator):
    """
    This locator uses XML-RPC to locate distributions. It therefore
    cannot be used with simple mirrors (that only mirror file content).
    """
    def __init__(self, url, **kwargs):
        """
        Initialise an instance.

        :param url: The URL to use for XML-RPC.
        :param kwargs: Passed to the superclass constructor.
        """
        super(PyPIRPCLocator, self).__init__(**kwargs)
        self.base_url = url
        self.client = ServerProxy(url, timeout=3.0)

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        return set(self.client.list_packages())

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        versions = self.client.package_releases(name, True)
        for v in versions:
            urls = self.client.release_urls(name, v)
            data = self.client.release_data(name, v)
            metadata = Metadata(scheme=self.scheme)
            metadata.name = data['name']
            metadata.version = data['version']
            metadata.license = data.get('license')
            metadata.keywords = data.get('keywords', [])
            metadata.summary = data.get('summary')
            dist = Distribution(metadata)
            if urls:
                info = urls[0]
                metadata.source_url = info['url']
                dist.digest = self._get_digest(info)
                dist.locator = self
                result[v] = dist
                for info in urls:
                    url = info['url']
                    digest = self._get_digest(info)
                    result['urls'].setdefault(v, set()).add(url)
                    result['digests'][url] = digest
        return result

class PyPIJSONLocator(Locator):
    """
    This locator uses PyPI's JSON interface. It's very limited in functionality
    and probably not worth using.
    """
    def __init__(self, url, **kwargs):
        super(PyPIJSONLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        raise NotImplementedError('Not available from this locator')

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        url = urljoin(self.base_url, '%s/json' % quote(name))
        try:
            resp = self.opener.open(url)
            data = resp.read().decode() # for now
            d = json.loads(data)
            md = Metadata(scheme=self.scheme)
            data = d['info']
            md.name = data['name']
            md.version = data['version']
            md.license = data.get('license')
            md.keywords = data.get('keywords', [])
            md.summary = data.get('summary')
            dist = Distribution(md)
            dist.locator = self
            urls = d['urls']
            result[md.version] = dist
            for info in d['urls']:
                url = info['url']
                dist.download_urls.add(url)
                dist.digests[url] = self._get_digest(info)
                result['urls'].setdefault(md.version, set()).add(url)
                result['digests'][url] = self._get_digest(info)
            # Now get other releases
            for version, infos in d['releases'].items():
                if version == md.version:
                    continue    # already done
                omd = Metadata(scheme=self.scheme)
                omd.name = md.name
                omd.version = version
                odist = Distribution(omd)
                odist.locator = self
                result[version] = odist
                for info in infos:
                    url = info['url']
                    odist.download_urls.add(url)
                    odist.digests[url] = self._get_digest(info)
                    result['urls'].setdefault(version, set()).add(url)
                    result['digests'][url] = self._get_digest(info)
#            for info in urls:
#                md.source_url = info['url']
#                dist.digest = self._get_digest(info)
#                dist.locator = self
#                for info in urls:
#                    url = info['url']
#                    result['urls'].setdefault(md.version, set()).add(url)
#                    result['digests'][url] = self._get_digest(info)
        except Exception as e:
            self.errors.put(text_type(e))
            logger.exception('JSON fetch failed: %s', e)
        return result


class Page(object):
    """
    This class represents a scraped HTML page.
    """
    # The following slightly hairy-looking regex just looks for the contents of
    # an anchor link, which has an attribute "href" either immediately preceded
    # or immediately followed by a "rel" attribute. The attribute values can be
    # declared with double quotes, single quotes or no quotes - which leads to
    # the length of the expression.
    _href = re.compile("""
(rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*))\s+)?
href\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*))
(\s+rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*)))?
""", re.I | re.S | re.X)
    _base = re.compile(r"""]+)""", re.I | re.S)

    def __init__(self, data, url):
        """
        Initialise an instance with the Unicode page contents and the URL they
        came from.
        """
        self.data = data
        self.base_url = self.url = url
        m = self._base.search(self.data)
        if m:
            self.base_url = m.group(1)

    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)

    @cached_property
    def links(self):
        """
        Return the URLs of all the links on a page together with information
        about their "rel" attribute, for determining which ones to treat as
        downloads and which ones to queue for further scraping.
        """
        def clean(url):
            "Tidy up an URL."
            scheme, netloc, path, params, query, frag = urlparse(url)
            return urlunparse((scheme, netloc, quote(path),
                               params, query, frag))

        result = set()
        for match in self._href.finditer(self.data):
            d = match.groupdict('')
            rel = (d['rel1'] or d['rel2'] or d['rel3'] or
                   d['rel4'] or d['rel5'] or d['rel6'])
            url = d['url1'] or d['url2'] or d['url3']
            url = urljoin(self.base_url, url)
            url = unescape(url)
            url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
            result.add((url, rel))
        # We sort the result, hoping to bring the most recent versions
        # to the front
        result = sorted(result, key=lambda t: t[0], reverse=True)
        return result


class SimpleScrapingLocator(Locator):
    """
    A locator which scrapes HTML pages to locate downloads for a distribution.
    This runs multiple threads to do the I/O; performance is at least as good
    as pip's PackageFinder, which works in an analogous fashion.
    """

    # These are used to deal with various Content-Encoding schemes.
    decoders = {
        'deflate': zlib.decompress,
        'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),
        'none': lambda b: b,
    }

    def __init__(self, url, timeout=None, num_workers=10, **kwargs):
        """
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        """
        super(SimpleScrapingLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)
        self.timeout = timeout
        self._page_cache = {}
        self._seen = set()
        self._to_fetch = queue.Queue()
        self._bad_hosts = set()
        self.skip_externals = False
        self.num_workers = num_workers
        self._lock = threading.RLock()
        # See issue #45: we need to be resilient when the locator is used
        # in a thread, e.g. with concurrent.futures. We can't use self._lock
        # as it is for coordinating our internal threads - the ones created
        # in _prepare_threads.
        self._gplock = threading.RLock()

    def _prepare_threads(self):
        """
        Threads are created only when get_project is called, and terminate
        before it returns. They are there primarily to parallelise I/O (i.e.
        fetching web pages).
        """
        self._threads = []
        for i in range(self.num_workers):
            t = threading.Thread(target=self._fetch)
            t.setDaemon(True)
            t.start()
            self._threads.append(t)

    def _wait_threads(self):
        """
        Tell all the threads to terminate (by sending a sentinel value) and
        wait for them to do so.
        """
        # Note that you need two loops, since you can't say which
        # thread will get each sentinel
        for t in self._threads:
            self._to_fetch.put(None)    # sentinel
        for t in self._threads:
            t.join()
        self._threads = []

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        with self._gplock:
            self.result = result
            self.project_name = name
            url = urljoin(self.base_url, '%s/' % quote(name))
            self._seen.clear()
            self._page_cache.clear()
            self._prepare_threads()
            try:
                logger.debug('Queueing %s', url)
                self._to_fetch.put(url)
                self._to_fetch.join()
            finally:
                self._wait_threads()
            del self.result
        return result

    platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|'
                                    r'win(32|-amd64)|macosx-?\d+)\b', re.I)

    def _is_platform_dependent(self, url):
        """
        Does an URL refer to a platform-specific download?
        """
        return self.platform_dependent.search(url)

    def _process_download(self, url):
        """
        See if an URL is a suitable download for a project.

        If it is, register information in the result dictionary (for
        _get_project) about the specific version it's for.

        Note that the return value isn't actually used other than as a boolean
        value.
        """
        if self._is_platform_dependent(url):
            info = None
        else:
            info = self.convert_url_to_download_info(url, self.project_name)
        logger.debug('process_download: %s -> %s', url, info)
        if info:
            with self._lock:    # needed because self.result is shared
                self._update_version_data(self.result, info)
        return info

    def _should_queue(self, link, referrer, rel):
        """
        Determine whether a link URL from a referring page and with a
        particular "rel" attribute should be queued for scraping.
        """
        scheme, netloc, path, _, _, _ = urlparse(link)
        if path.endswith(self.source_extensions + self.binary_extensions +
                         self.excluded_extensions):
            result = False
        elif self.skip_externals and not link.startswith(self.base_url):
            result = False
        elif not referrer.startswith(self.base_url):
            result = False
        elif rel not in ('homepage', 'download'):
            result = False
        elif scheme not in ('http', 'https', 'ftp'):
            result = False
        elif self._is_platform_dependent(link):
            result = False
        else:
            host = netloc.split(':', 1)[0]
            if host.lower() == 'localhost':
                result = False
            else:
                result = True
        logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
                     referrer, result)
        return result

    def _fetch(self):
        """
        Get a URL to fetch from the work queue, get the HTML page, examine its
        links for download candidates and candidates for further scraping.

        This is a handy method to run in a thread.
        """
        while True:
            url = self._to_fetch.get()
            try:
                if url:
                    page = self.get_page(url)
                    if page is None:    # e.g. after an error
                        continue
                    for link, rel in page.links:
                        if link not in self._seen:
                            self._seen.add(link)
                            if (not self._process_download(link) and
                                self._should_queue(link, url, rel)):
                                logger.debug('Queueing %s from %s', link, url)
                                self._to_fetch.put(link)
            except Exception as e:  # pragma: no cover
                self.errors.put(text_type(e))
            finally:
                # always do this, to avoid hangs :-)
                self._to_fetch.task_done()
            if not url:
                #logger.debug('Sentinel seen, quitting.')
                break

    def get_page(self, url):
        """
        Get the HTML for an URL, possibly from an in-memory cache.

        XXX TODO Note: this cache is never actually cleared. It's assumed that
        the data won't get stale over the lifetime of a locator instance (not
        necessarily true for the default_locator).
        """
        # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
        scheme, netloc, path, _, _, _ = urlparse(url)
        if scheme == 'file' and os.path.isdir(url2pathname(path)):
            url = urljoin(ensure_slash(url), 'index.html')

        if url in self._page_cache:
            result = self._page_cache[url]
            logger.debug('Returning %s from cache: %s', url, result)
        else:
            host = netloc.split(':', 1)[0]
            result = None
            if host in self._bad_hosts:
                logger.debug('Skipping %s due to bad host %s', url, host)
            else:
                req = Request(url, headers={'Accept-encoding': 'identity'})
                try:
                    logger.debug('Fetching %s', url)
                    resp = self.opener.open(req, timeout=self.timeout)
                    logger.debug('Fetched %s', url)
                    headers = resp.info()
                    content_type = headers.get('Content-Type', '')
                    if HTML_CONTENT_TYPE.match(content_type):
                        final_url = resp.geturl()
                        data = resp.read()
                        encoding = headers.get('Content-Encoding')
                        if encoding:
                            decoder = self.decoders[encoding]   # fail if not found
                            data = decoder(data)
                        encoding = 'utf-8'
                        m = CHARSET.search(content_type)
                        if m:
                            encoding = m.group(1)
                        try:
                            data = data.decode(encoding)
                        except UnicodeError:  # pragma: no cover
                            data = data.decode('latin-1')    # fallback
                        result = Page(data, final_url)
                        self._page_cache[final_url] = result
                except HTTPError as e:
                    if e.code != 404:
                        logger.exception('Fetch failed: %s: %s', url, e)
                except URLError as e:  # pragma: no cover
                    logger.exception('Fetch failed: %s: %s', url, e)
                    with self._lock:
                        self._bad_hosts.add(host)
                except Exception as e:  # pragma: no cover
                    logger.exception('Fetch failed: %s: %s', url, e)
                finally:
                    self._page_cache[url] = result   # even if None (failure)
        return result

    _distname_re = re.compile(']*>([^<]+)<')

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        result = set()
        page = self.get_page(self.base_url)
        if not page:
            raise DistlibException('Unable to get %s' % self.base_url)
        for match in self._distname_re.finditer(page.data):
            result.add(match.group(1))
        return result

class DirectoryLocator(Locator):
    """
    This class locates distributions in a directory tree.
    """

    def __init__(self, path, **kwargs):
        """
        Initialise an instance.
        :param path: The root of the directory tree to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * recursive - if True (the default), subdirectories are
                         recursed into. If False, only the top-level directory
                         is searched,
        """
        self.recursive = kwargs.pop('recursive', True)
        super(DirectoryLocator, self).__init__(**kwargs)
        path = os.path.abspath(path)
        if not os.path.isdir(path):  # pragma: no cover
            raise DistlibException('Not a directory: %r' % path)
        self.base_dir = path

    def should_include(self, filename, parent):
        """
        Should a filename be considered as a candidate for a distribution
        archive? As well as the filename, the directory which contains it
        is provided, though not used by the current implementation.
        """
        return filename.endswith(self.downloadable_extensions)

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        for root, dirs, files in os.walk(self.base_dir):
            for fn in files:
                if self.should_include(fn, root):
                    fn = os.path.join(root, fn)
                    url = urlunparse(('file', '',
                                      pathname2url(os.path.abspath(fn)),
                                      '', '', ''))
                    info = self.convert_url_to_download_info(url, name)
                    if info:
                        self._update_version_data(result, info)
            if not self.recursive:
                break
        return result

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        result = set()
        for root, dirs, files in os.walk(self.base_dir):
            for fn in files:
                if self.should_include(fn, root):
                    fn = os.path.join(root, fn)
                    url = urlunparse(('file', '',
                                      pathname2url(os.path.abspath(fn)),
                                      '', '', ''))
                    info = self.convert_url_to_download_info(url, None)
                    if info:
                        result.add(info['name'])
            if not self.recursive:
                break
        return result

class JSONLocator(Locator):
    """
    This locator uses special extended metadata (not available on PyPI) and is
    the basis of performant dependency resolution in distlib. Other locators
    require archive downloads before dependencies can be determined! As you
    might imagine, that can be slow.
    """
    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        raise NotImplementedError('Not available from this locator')

    def _get_project(self, name):
        result = {'urls': {}, 'digests': {}}
        data = get_project_data(name)
        if data:
            for info in data.get('files', []):
                if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
                    continue
                # We don't store summary in project metadata as it makes
                # the data bigger for no benefit during dependency
                # resolution
                dist = make_dist(data['name'], info['version'],
                                 summary=data.get('summary',
                                                  'Placeholder for summary'),
                                 scheme=self.scheme)
                md = dist.metadata
                md.source_url = info['url']
                # TODO SHA256 digest
                if 'digest' in info and info['digest']:
                    dist.digest = ('md5', info['digest'])
                md.dependencies = info.get('requirements', {})
                dist.exports = info.get('exports', {})
                result[dist.version] = dist
                result['urls'].setdefault(dist.version, set()).add(info['url'])
        return result

class DistPathLocator(Locator):
    """
    This locator finds installed distributions in a path. It can be useful for
    adding to an :class:`AggregatingLocator`.
    """
    def __init__(self, distpath, **kwargs):
        """
        Initialise an instance.

        :param distpath: A :class:`DistributionPath` instance to search.
        """
        super(DistPathLocator, self).__init__(**kwargs)
        assert isinstance(distpath, DistributionPath)
        self.distpath = distpath

    def _get_project(self, name):
        dist = self.distpath.get_distribution(name)
        if dist is None:
            result = {'urls': {}, 'digests': {}}
        else:
            result = {
                dist.version: dist,
                'urls': {dist.version: set([dist.source_url])},
                'digests': {dist.version: set([None])}
            }
        return result


class AggregatingLocator(Locator):
    """
    This class allows you to chain and/or merge a list of locators.
    """
    def __init__(self, *locators, **kwargs):
        """
        Initialise an instance.

        :param locators: The list of locators to search.
        :param kwargs: Passed to the superclass constructor,
                       except for:
                       * merge - if False (the default), the first successful
                         search from any of the locators is returned. If True,
                         the results from all locators are merged (this can be
                         slow).
        """
        self.merge = kwargs.pop('merge', False)
        self.locators = locators
        super(AggregatingLocator, self).__init__(**kwargs)

    def clear_cache(self):
        super(AggregatingLocator, self).clear_cache()
        for locator in self.locators:
            locator.clear_cache()

    def _set_scheme(self, value):
        self._scheme = value
        for locator in self.locators:
            locator.scheme = value

    scheme = property(Locator.scheme.fget, _set_scheme)

    def _get_project(self, name):
        result = {}
        for locator in self.locators:
            d = locator.get_project(name)
            if d:
                if self.merge:
                    files = result.get('urls', {})
                    digests = result.get('digests', {})
                    # next line could overwrite result['urls'], result['digests']
                    result.update(d)
                    df = result.get('urls')
                    if files and df:
                        for k, v in files.items():
                            if k in df:
                                df[k] |= v
                            else:
                                df[k] = v
                    dd = result.get('digests')
                    if digests and dd:
                        dd.update(digests)
                else:
                    # See issue #18. If any dists are found and we're looking
                    # for specific constraints, we only return something if
                    # a match is found. For example, if a DirectoryLocator
                    # returns just foo (1.0) while we're looking for
                    # foo (>= 2.0), we'll pretend there was nothing there so
                    # that subsequent locators can be queried. Otherwise we
                    # would just return foo (1.0) which would then lead to a
                    # failure to find foo (>= 2.0), because other locators
                    # weren't searched. Note that this only matters when
                    # merge=False.
                    if self.matcher is None:
                        found = True
                    else:
                        found = False
                        for k in d:
                            if self.matcher.match(k):
                                found = True
                                break
                    if found:
                        result = d
                        break
        return result

    def get_distribution_names(self):
        """
        Return all the distribution names known to this locator.
        """
        result = set()
        for locator in self.locators:
            try:
                result |= locator.get_distribution_names()
            except NotImplementedError:
                pass
        return result


# We use a legacy scheme simply because most of the dists on PyPI use legacy
# versions which don't conform to PEP 426 / PEP 440.
default_locator = AggregatingLocator(
                    JSONLocator(),
                    SimpleScrapingLocator('https://pypi.python.org/simple/',
                                          timeout=3.0),
                    scheme='legacy')

locate = default_locator.locate

NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*'
                             r'\(\s*(==\s*)?(?P[^)]+)\)$')

class DependencyFinder(object):
    """
    Locate dependencies for distributions.
    """

    def __init__(self, locator=None):
        """
        Initialise an instance, using the specified locator
        to locate distributions.
        """
        self.locator = locator or default_locator
        self.scheme = get_scheme(self.locator.scheme)

    def add_distribution(self, dist):
        """
        Add a distribution to the finder. This will update internal information
        about who provides what.
        :param dist: The distribution to add.
        """
        logger.debug('adding distribution %s', dist)
        name = dist.key
        self.dists_by_name[name] = dist
        self.dists[(name, dist.version)] = dist
        for p in dist.provides:
            name, version = parse_name_and_version(p)
            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
            self.provided.setdefault(name, set()).add((version, dist))

    def remove_distribution(self, dist):
        """
        Remove a distribution from the finder. This will update internal
        information about who provides what.
        :param dist: The distribution to remove.
        """
        logger.debug('removing distribution %s', dist)
        name = dist.key
        del self.dists_by_name[name]
        del self.dists[(name, dist.version)]
        for p in dist.provides:
            name, version = parse_name_and_version(p)
            logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
            s = self.provided[name]
            s.remove((version, dist))
            if not s:
                del self.provided[name]

    def get_matcher(self, reqt):
        """
        Get a version matcher for a requirement.
        :param reqt: The requirement
        :type reqt: str
        :return: A version matcher (an instance of
                 :class:`distlib.version.Matcher`).
        """
        try:
            matcher = self.scheme.matcher(reqt)
        except UnsupportedVersionError:  # pragma: no cover
            # XXX compat-mode if cannot read the version
            name = reqt.split()[0]
            matcher = self.scheme.matcher(name)
        return matcher

    def find_providers(self, reqt):
        """
        Find the distributions which can fulfill a requirement.

        :param reqt: The requirement.
         :type reqt: str
        :return: A set of distribution which can fulfill the requirement.
        """
        matcher = self.get_matcher(reqt)
        name = matcher.key   # case-insensitive
        result = set()
        provided = self.provided
        if name in provided:
            for version, provider in provided[name]:
                try:
                    match = matcher.match(version)
                except UnsupportedVersionError:
                    match = False

                if match:
                    result.add(provider)
                    break
        return result

    def try_to_replace(self, provider, other, problems):
        """
        Attempt to replace one provider with another. This is typically used
        when resolving dependencies from multiple sources, e.g. A requires
        (B >= 1.0) while C requires (B >= 1.1).

        For successful replacement, ``provider`` must meet all the requirements
        which ``other`` fulfills.

        :param provider: The provider we are trying to replace with.
        :param other: The provider we're trying to replace.
        :param problems: If False is returned, this will contain what
                         problems prevented replacement. This is currently
                         a tuple of the literal string 'cantreplace',
                         ``provider``, ``other``  and the set of requirements
                         that ``provider`` couldn't fulfill.
        :return: True if we can replace ``other`` with ``provider``, else
                 False.
        """
        rlist = self.reqts[other]
        unmatched = set()
        for s in rlist:
            matcher = self.get_matcher(s)
            if not matcher.match(provider.version):
                unmatched.add(s)
        if unmatched:
            # can't replace other with provider
            problems.add(('cantreplace', provider, other,
                          frozenset(unmatched)))
            result = False
        else:
            # can replace other with provider
            self.remove_distribution(other)
            del self.reqts[other]
            for s in rlist:
                self.reqts.setdefault(provider, set()).add(s)
            self.add_distribution(provider)
            result = True
        return result

    def find(self, requirement, meta_extras=None, prereleases=False):
        """
        Find a distribution and all distributions it depends on.

        :param requirement: The requirement specifying the distribution to
                            find, or a Distribution instance.
        :param meta_extras: A list of meta extras such as :test:, :build: and
                            so on.
        :param prereleases: If ``True``, allow pre-release versions to be
                            returned - otherwise, don't return prereleases
                            unless they're all that's available.

        Return a set of :class:`Distribution` instances and a set of
        problems.

        The distributions returned should be such that they have the
        :attr:`required` attribute set to ``True`` if they were
        from the ``requirement`` passed to ``find()``, and they have the
        :attr:`build_time_dependency` attribute set to ``True`` unless they
        are post-installation dependencies of the ``requirement``.

        The problems should be a tuple consisting of the string
        ``'unsatisfied'`` and the requirement which couldn't be satisfied
        by any distribution known to the locator.
        """

        self.provided = {}
        self.dists = {}
        self.dists_by_name = {}
        self.reqts = {}

        meta_extras = set(meta_extras or [])
        if ':*:' in meta_extras:
            meta_extras.remove(':*:')
            # :meta: and :run: are implicitly included
            meta_extras |= set([':test:', ':build:', ':dev:'])

        if isinstance(requirement, Distribution):
            dist = odist = requirement
            logger.debug('passed %s as requirement', odist)
        else:
            dist = odist = self.locator.locate(requirement,
                                               prereleases=prereleases)
            if dist is None:
                raise DistlibException('Unable to locate %r' % requirement)
            logger.debug('located %s', odist)
        dist.requested = True
        problems = set()
        todo = set([dist])
        install_dists = set([odist])
        while todo:
            dist = todo.pop()
            name = dist.key     # case-insensitive
            if name not in self.dists_by_name:
                self.add_distribution(dist)
            else:
                #import pdb; pdb.set_trace()
                other = self.dists_by_name[name]
                if other != dist:
                    self.try_to_replace(dist, other, problems)

            ireqts = dist.run_requires | dist.meta_requires
            sreqts = dist.build_requires
            ereqts = set()
            if dist in install_dists:
                for key in ('test', 'build', 'dev'):
                    e = ':%s:' % key
                    if e in meta_extras:
                        ereqts |= getattr(dist, '%s_requires' % key)
            all_reqts = ireqts | sreqts | ereqts
            for r in all_reqts:
                providers = self.find_providers(r)
                if not providers:
                    logger.debug('No providers found for %r', r)
                    provider = self.locator.locate(r, prereleases=prereleases)
                    # If no provider is found and we didn't consider
                    # prereleases, consider them now.
                    if provider is None and not prereleases:
                        provider = self.locator.locate(r, prereleases=True)
                    if provider is None:
                        logger.debug('Cannot satisfy %r', r)
                        problems.add(('unsatisfied', r))
                    else:
                        n, v = provider.key, provider.version
                        if (n, v) not in self.dists:
                            todo.add(provider)
                        providers.add(provider)
                        if r in ireqts and dist in install_dists:
                            install_dists.add(provider)
                            logger.debug('Adding %s to install_dists',
                                         provider.name_and_version)
                for p in providers:
                    name = p.key
                    if name not in self.dists_by_name:
                        self.reqts.setdefault(p, set()).add(r)
                    else:
                        other = self.dists_by_name[name]
                        if other != p:
                            # see if other can be replaced by p
                            self.try_to_replace(p, other, problems)

        dists = set(self.dists.values())
        for dist in dists:
            dist.build_time_dependency = dist not in install_dists
            if dist.build_time_dependency:
                logger.debug('%s is a build-time dependency only.',
                             dist.name_and_version)
        logger.debug('find done for %s', odist)
        return dists, problems
PKZV\\,site-packages/pip/_vendor/distlib/version.pynu[# -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2016 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Implementation of a flexible versioning scheme providing support for PEP-440,
setuptools-compatible and semantic versioning.
"""

import logging
import re

from .compat import string_types

__all__ = ['NormalizedVersion', 'NormalizedMatcher',
           'LegacyVersion', 'LegacyMatcher',
           'SemanticVersion', 'SemanticMatcher',
           'UnsupportedVersionError', 'get_scheme']

logger = logging.getLogger(__name__)


class UnsupportedVersionError(ValueError):
    """This is an unsupported version."""
    pass


class Version(object):
    def __init__(self, s):
        self._string = s = s.strip()
        self._parts = parts = self.parse(s)
        assert isinstance(parts, tuple)
        assert len(parts) > 0

    def parse(self, s):
        raise NotImplementedError('please implement in a subclass')

    def _check_compatible(self, other):
        if type(self) != type(other):
            raise TypeError('cannot compare %r and %r' % (self, other))

    def __eq__(self, other):
        self._check_compatible(other)
        return self._parts == other._parts

    def __ne__(self, other):
        return not self.__eq__(other)

    def __lt__(self, other):
        self._check_compatible(other)
        return self._parts < other._parts

    def __gt__(self, other):
        return not (self.__lt__(other) or self.__eq__(other))

    def __le__(self, other):
        return self.__lt__(other) or self.__eq__(other)

    def __ge__(self, other):
        return self.__gt__(other) or self.__eq__(other)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    def __hash__(self):
        return hash(self._parts)

    def __repr__(self):
        return "%s('%s')" % (self.__class__.__name__, self._string)

    def __str__(self):
        return self._string

    @property
    def is_prerelease(self):
        raise NotImplementedError('Please implement in subclasses.')


class Matcher(object):
    version_class = None

    dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?")
    comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$')
    num_re = re.compile(r'^\d+(\.\d+)*$')

    # value is either a callable or the name of a method
    _operators = {
        '<': lambda v, c, p: v < c,
        '>': lambda v, c, p: v > c,
        '<=': lambda v, c, p: v == c or v < c,
        '>=': lambda v, c, p: v == c or v > c,
        '==': lambda v, c, p: v == c,
        '===': lambda v, c, p: v == c,
        # by default, compatible => >=.
        '~=': lambda v, c, p: v == c or v > c,
        '!=': lambda v, c, p: v != c,
    }

    def __init__(self, s):
        if self.version_class is None:
            raise ValueError('Please specify a version class')
        self._string = s = s.strip()
        m = self.dist_re.match(s)
        if not m:
            raise ValueError('Not valid: %r' % s)
        groups = m.groups('')
        self.name = groups[0].strip()
        self.key = self.name.lower()    # for case-insensitive comparisons
        clist = []
        if groups[2]:
            constraints = [c.strip() for c in groups[2].split(',')]
            for c in constraints:
                m = self.comp_re.match(c)
                if not m:
                    raise ValueError('Invalid %r in %r' % (c, s))
                groups = m.groups()
                op = groups[0] or '~='
                s = groups[1]
                if s.endswith('.*'):
                    if op not in ('==', '!='):
                        raise ValueError('\'.*\' not allowed for '
                                         '%r constraints' % op)
                    # Could be a partial version (e.g. for '2.*') which
                    # won't parse as a version, so keep it as a string
                    vn, prefix = s[:-2], True
                    if not self.num_re.match(vn):
                        # Just to check that vn is a valid version
                        self.version_class(vn)
                else:
                    # Should parse as a version, so we can create an
                    # instance for the comparison
                    vn, prefix = self.version_class(s), False
                clist.append((op, vn, prefix))
        self._parts = tuple(clist)

    def match(self, version):
        """
        Check if the provided version matches the constraints.

        :param version: The version to match against this instance.
        :type version: String or :class:`Version` instance.
        """
        if isinstance(version, string_types):
            version = self.version_class(version)
        for operator, constraint, prefix in self._parts:
            f = self._operators.get(operator)
            if isinstance(f, string_types):
                f = getattr(self, f)
            if not f:
                msg = ('%r not implemented '
                       'for %s' % (operator, self.__class__.__name__))
                raise NotImplementedError(msg)
            if not f(version, constraint, prefix):
                return False
        return True

    @property
    def exact_version(self):
        result = None
        if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
            result = self._parts[0][1]
        return result

    def _check_compatible(self, other):
        if type(self) != type(other) or self.name != other.name:
            raise TypeError('cannot compare %s and %s' % (self, other))

    def __eq__(self, other):
        self._check_compatible(other)
        return self.key == other.key and self._parts == other._parts

    def __ne__(self, other):
        return not self.__eq__(other)

    # See http://docs.python.org/reference/datamodel#object.__hash__
    def __hash__(self):
        return hash(self.key) + hash(self._parts)

    def __repr__(self):
        return "%s(%r)" % (self.__class__.__name__, self._string)

    def __str__(self):
        return self._string


PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
                               r'(\.(post)(\d+))?(\.(dev)(\d+))?'
                               r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')


def _pep_440_key(s):
    s = s.strip()
    m = PEP440_VERSION_RE.match(s)
    if not m:
        raise UnsupportedVersionError('Not a valid version: %s' % s)
    groups = m.groups()
    nums = tuple(int(v) for v in groups[1].split('.'))
    while len(nums) > 1 and nums[-1] == 0:
        nums = nums[:-1]

    if not groups[0]:
        epoch = 0
    else:
        epoch = int(groups[0])
    pre = groups[4:6]
    post = groups[7:9]
    dev = groups[10:12]
    local = groups[13]
    if pre == (None, None):
        pre = ()
    else:
        pre = pre[0], int(pre[1])
    if post == (None, None):
        post = ()
    else:
        post = post[0], int(post[1])
    if dev == (None, None):
        dev = ()
    else:
        dev = dev[0], int(dev[1])
    if local is None:
        local = ()
    else:
        parts = []
        for part in local.split('.'):
            # to ensure that numeric compares as > lexicographic, avoid
            # comparing them directly, but encode a tuple which ensures
            # correct sorting
            if part.isdigit():
                part = (1, int(part))
            else:
                part = (0, part)
            parts.append(part)
        local = tuple(parts)
    if not pre:
        # either before pre-release, or final release and after
        if not post and dev:
            # before pre-release
            pre = ('a', -1)     # to sort before a0
        else:
            pre = ('z',)        # to sort after all pre-releases
    # now look at the state of post and dev.
    if not post:
        post = ('_',)   # sort before 'a'
    if not dev:
        dev = ('final',)

    #print('%s -> %s' % (s, m.groups()))
    return epoch, nums, pre, post, dev, local


_normalized_key = _pep_440_key


class NormalizedVersion(Version):
    """A rational version.

    Good:
        1.2         # equivalent to "1.2.0"
        1.2.0
        1.2a1
        1.2.3a2
        1.2.3b1
        1.2.3c1
        1.2.3.4
        TODO: fill this out

    Bad:
        1           # minimum two numbers
        1.2a        # release level must have a release serial
        1.2.3b
    """
    def parse(self, s):
        result = _normalized_key(s)
        # _normalized_key loses trailing zeroes in the release
        # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
        # However, PEP 440 prefix matching needs it: for example,
        # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
        m = PEP440_VERSION_RE.match(s)      # must succeed
        groups = m.groups()
        self._release_clause = tuple(int(v) for v in groups[1].split('.'))
        return result

    PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])

    @property
    def is_prerelease(self):
        return any(t[0] in self.PREREL_TAGS for t in self._parts if t)


def _match_prefix(x, y):
    x = str(x)
    y = str(y)
    if x == y:
        return True
    if not x.startswith(y):
        return False
    n = len(y)
    return x[n] == '.'


class NormalizedMatcher(Matcher):
    version_class = NormalizedVersion

    # value is either a callable or the name of a method
    _operators = {
        '~=': '_match_compatible',
        '<': '_match_lt',
        '>': '_match_gt',
        '<=': '_match_le',
        '>=': '_match_ge',
        '==': '_match_eq',
        '===': '_match_arbitrary',
        '!=': '_match_ne',
    }

    def _adjust_local(self, version, constraint, prefix):
        if prefix:
            strip_local = '+' not in constraint and version._parts[-1]
        else:
            # both constraint and version are
            # NormalizedVersion instances.
            # If constraint does not have a local component,
            # ensure the version doesn't, either.
            strip_local = not constraint._parts[-1] and version._parts[-1]
        if strip_local:
            s = version._string.split('+', 1)[0]
            version = self.version_class(s)
        return version, constraint

    def _match_lt(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if version >= constraint:
            return False
        release_clause = constraint._release_clause
        pfx = '.'.join([str(i) for i in release_clause])
        return not _match_prefix(version, pfx)

    def _match_gt(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if version <= constraint:
            return False
        release_clause = constraint._release_clause
        pfx = '.'.join([str(i) for i in release_clause])
        return not _match_prefix(version, pfx)

    def _match_le(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        return version <= constraint

    def _match_ge(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        return version >= constraint

    def _match_eq(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if not prefix:
            result = (version == constraint)
        else:
            result = _match_prefix(version, constraint)
        return result

    def _match_arbitrary(self, version, constraint, prefix):
        return str(version) == str(constraint)

    def _match_ne(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if not prefix:
            result = (version != constraint)
        else:
            result = not _match_prefix(version, constraint)
        return result

    def _match_compatible(self, version, constraint, prefix):
        version, constraint = self._adjust_local(version, constraint, prefix)
        if version == constraint:
            return True
        if version < constraint:
            return False
#        if not prefix:
#            return True
        release_clause = constraint._release_clause
        if len(release_clause) > 1:
            release_clause = release_clause[:-1]
        pfx = '.'.join([str(i) for i in release_clause])
        return _match_prefix(version, pfx)

_REPLACEMENTS = (
    (re.compile('[.+-]$'), ''),                     # remove trailing puncts
    (re.compile(r'^[.](\d)'), r'0.\1'),             # .N -> 0.N at start
    (re.compile('^[.-]'), ''),                      # remove leading puncts
    (re.compile(r'^\((.*)\)$'), r'\1'),             # remove parentheses
    (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'),    # remove leading v(ersion)
    (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'),        # remove leading v(ersion)
    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
    (re.compile(r'\b(alfa|apha)\b'), 'alpha'),      # misspelt alpha
    (re.compile(r'\b(pre-alpha|prealpha)\b'),
                'pre.alpha'),                       # standardise
    (re.compile(r'\(beta\)$'), 'beta'),             # remove parentheses
)

_SUFFIX_REPLACEMENTS = (
    (re.compile('^[:~._+-]+'), ''),                   # remove leading puncts
    (re.compile('[,*")([\]]'), ''),                   # remove unwanted chars
    (re.compile('[~:+_ -]'), '.'),                    # replace illegal chars
    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
    (re.compile(r'\.$'), ''),                       # trailing '.'
)

_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')


def _suggest_semantic_version(s):
    """
    Try to suggest a semantic form for a version for which
    _suggest_normalized_version couldn't come up with anything.
    """
    result = s.strip().lower()
    for pat, repl in _REPLACEMENTS:
        result = pat.sub(repl, result)
    if not result:
        result = '0.0.0'

    # Now look for numeric prefix, and separate it out from
    # the rest.
    #import pdb; pdb.set_trace()
    m = _NUMERIC_PREFIX.match(result)
    if not m:
        prefix = '0.0.0'
        suffix = result
    else:
        prefix = m.groups()[0].split('.')
        prefix = [int(i) for i in prefix]
        while len(prefix) < 3:
            prefix.append(0)
        if len(prefix) == 3:
            suffix = result[m.end():]
        else:
            suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
            prefix = prefix[:3]
        prefix = '.'.join([str(i) for i in prefix])
        suffix = suffix.strip()
    if suffix:
        #import pdb; pdb.set_trace()
        # massage the suffix.
        for pat, repl in _SUFFIX_REPLACEMENTS:
            suffix = pat.sub(repl, suffix)

    if not suffix:
        result = prefix
    else:
        sep = '-' if 'dev' in suffix else '+'
        result = prefix + sep + suffix
    if not is_semver(result):
        result = None
    return result


def _suggest_normalized_version(s):
    """Suggest a normalized version close to the given version string.

    If you have a version string that isn't rational (i.e. NormalizedVersion
    doesn't like it) then you might be able to get an equivalent (or close)
    rational version from this function.

    This does a number of simple normalizations to the given string, based
    on observation of versions currently in use on PyPI. Given a dump of
    those version during PyCon 2009, 4287 of them:
    - 2312 (53.93%) match NormalizedVersion without change
      with the automatic suggestion
    - 3474 (81.04%) match when using this suggestion method

    @param s {str} An irrational version string.
    @returns A rational version string, or None, if couldn't determine one.
    """
    try:
        _normalized_key(s)
        return s   # already rational
    except UnsupportedVersionError:
        pass

    rs = s.lower()

    # part of this could use maketrans
    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
                       ('-pre', 'c'),
                       ('-release', ''), ('.release', ''), ('-stable', ''),
                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
                       ('final', '')):
        rs = rs.replace(orig, repl)

    # if something ends with dev or pre, we add a 0
    rs = re.sub(r"pre$", r"pre0", rs)
    rs = re.sub(r"dev$", r"dev0", rs)

    # if we have something like "b-2" or "a.2" at the end of the
    # version, that is probably beta, alpha, etc
    # let's remove the dash or dot
    rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)

    # 1.0-dev-r371 -> 1.0.dev371
    # 0.1-dev-r79 -> 0.1.dev79
    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)

    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)

    # Clean: v0.3, v1.0
    if rs.startswith('v'):
        rs = rs[1:]

    # Clean leading '0's on numbers.
    #TODO: unintended side-effect on, e.g., "2003.05.09"
    # PyPI stats: 77 (~2%) better
    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)

    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
    # zero.
    # PyPI stats: 245 (7.56%) better
    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)

    # the 'dev-rNNN' tag is a dev tag
    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)

    # clean the - when used as a pre delimiter
    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)

    # a terminal "dev" or "devel" can be changed into ".dev0"
    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)

    # a terminal "dev" can be changed into ".dev0"
    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)

    # a terminal "final" or "stable" can be removed
    rs = re.sub(r"(final|stable)$", "", rs)

    # The 'r' and the '-' tags are post release tags
    #   0.4a1.r10       ->  0.4a1.post10
    #   0.9.33-17222    ->  0.9.33.post17222
    #   0.9.33-r17222   ->  0.9.33.post17222
    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)

    # Clean 'r' instead of 'dev' usage:
    #   0.9.33+r17222   ->  0.9.33.dev17222
    #   1.0dev123       ->  1.0.dev123
    #   1.0.git123      ->  1.0.dev123
    #   1.0.bzr123      ->  1.0.dev123
    #   0.1a0dev.123    ->  0.1a0.dev123
    # PyPI stats:  ~150 (~4%) better
    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)

    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
    #   0.2.pre1        ->  0.2c1
    #   0.2-c1         ->  0.2c1
    #   1.0preview123   ->  1.0c123
    # PyPI stats: ~21 (0.62%) better
    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)

    # Tcl/Tk uses "px" for their post release markers
    rs = re.sub(r"p(\d+)$", r".post\1", rs)

    try:
        _normalized_key(rs)
    except UnsupportedVersionError:
        rs = None
    return rs

#
#   Legacy version processing (distribute-compatible)
#

_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
_VERSION_REPLACE = {
    'pre': 'c',
    'preview': 'c',
    '-': 'final-',
    'rc': 'c',
    'dev': '@',
    '': None,
    '.': None,
}


def _legacy_key(s):
    def get_parts(s):
        result = []
        for p in _VERSION_PART.split(s.lower()):
            p = _VERSION_REPLACE.get(p, p)
            if p:
                if '0' <= p[:1] <= '9':
                    p = p.zfill(8)
                else:
                    p = '*' + p
                result.append(p)
        result.append('*final')
        return result

    result = []
    for p in get_parts(s):
        if p.startswith('*'):
            if p < '*final':
                while result and result[-1] == '*final-':
                    result.pop()
            while result and result[-1] == '00000000':
                result.pop()
        result.append(p)
    return tuple(result)


class LegacyVersion(Version):
    def parse(self, s):
        return _legacy_key(s)

    @property
    def is_prerelease(self):
        result = False
        for x in self._parts:
            if (isinstance(x, string_types) and x.startswith('*') and
                x < '*final'):
                result = True
                break
        return result


class LegacyMatcher(Matcher):
    version_class = LegacyVersion

    _operators = dict(Matcher._operators)
    _operators['~='] = '_match_compatible'

    numeric_re = re.compile('^(\d+(\.\d+)*)')

    def _match_compatible(self, version, constraint, prefix):
        if version < constraint:
            return False
        m = self.numeric_re.match(str(constraint))
        if not m:
            logger.warning('Cannot compute compatible match for version %s '
                           ' and constraint %s', version, constraint)
            return True
        s = m.groups()[0]
        if '.' in s:
            s = s.rsplit('.', 1)[0]
        return _match_prefix(version, s)

#
#   Semantic versioning
#

_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
                        r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
                        r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)


def is_semver(s):
    return _SEMVER_RE.match(s)


def _semantic_key(s):
    def make_tuple(s, absent):
        if s is None:
            result = (absent,)
        else:
            parts = s[1:].split('.')
            # We can't compare ints and strings on Python 3, so fudge it
            # by zero-filling numeric values so simulate a numeric comparison
            result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
        return result

    m = is_semver(s)
    if not m:
        raise UnsupportedVersionError(s)
    groups = m.groups()
    major, minor, patch = [int(i) for i in groups[:3]]
    # choose the '|' and '*' so that versions sort correctly
    pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
    return (major, minor, patch), pre, build


class SemanticVersion(Version):
    def parse(self, s):
        return _semantic_key(s)

    @property
    def is_prerelease(self):
        return self._parts[1][0] != '|'


class SemanticMatcher(Matcher):
    version_class = SemanticVersion


class VersionScheme(object):
    def __init__(self, key, matcher, suggester=None):
        self.key = key
        self.matcher = matcher
        self.suggester = suggester

    def is_valid_version(self, s):
        try:
            self.matcher.version_class(s)
            result = True
        except UnsupportedVersionError:
            result = False
        return result

    def is_valid_matcher(self, s):
        try:
            self.matcher(s)
            result = True
        except UnsupportedVersionError:
            result = False
        return result

    def is_valid_constraint_list(self, s):
        """
        Used for processing some metadata fields
        """
        return self.is_valid_matcher('dummy_name (%s)' % s)

    def suggest(self, s):
        if self.suggester is None:
            result = None
        else:
            result = self.suggester(s)
        return result

_SCHEMES = {
    'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
                                _suggest_normalized_version),
    'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
    'semantic': VersionScheme(_semantic_key, SemanticMatcher,
                              _suggest_semantic_version),
}

_SCHEMES['default'] = _SCHEMES['normalized']


def get_scheme(name):
    if name not in _SCHEMES:
        raise ValueError('unknown scheme name: %r' % name)
    return _SCHEMES[name]
PKZbޑ66/site-packages/pip/_vendor/distlib/resources.pyonu[
abc@s	ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZmZmZmZejeZdadefdYZdefd	YZd
efdYZdefd
YZdefdYZdefdYZieed6ee
j6Z yQyddl!Z"Wne#k
rddl$Z"nXee e"j%R	R((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pytfinds		cCst|jdS(Nurb(RR	(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR'scCs)t|jd}|jSWdQXdS(Nurb(RR	tread(RRR ((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR+scCstjj|jS(N(RR	tgetsize(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR,scsDfd}tgtj|jD]}||r%|^q%S(Ncs|dko|jjS(Nu__pycache__(tendswithtskipped_extensions(R (R(sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pytalloweds(tsetRtlistdirR	(RRRIR ((RsA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR2scCs|j|jS(N(RCR	(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR0sccs|j|}|dk	r|g}x|r|jd}|V|jr'|j}xe|jD]W}|sr|}ndj||g}|j|}|jr|j|q]|Vq]Wq'q'WndS(Niu/(RDRtpopR0R%R3R
tappend(RR>RttodotrnameR%tnew_nametchild((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pytiterators 						(u.pycu.pyou.class(u.pycu.pyo(R"R#R.tsystplatformt
startswithRHR
R9RARBRRDR'R+R,R2R0tstaticmethodRR	RRCRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR4ws"												tZipResourceFindercBs_eZdZdZdZdZdZdZdZdZ	dZ
d	ZRS(
u6
    Resource finder for resources in .zip files.
    cCstt|j||jj}dt||_t|jdrY|jj|_nt	j
||_t|j|_dS(Niu_files(
RRWR
R7tarchivetlent
prefix_lenthasattrt_filest	zipimportt_zip_directory_cachetsortedtindex(RR5RX((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR
scCs|S(N((RR	((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR9scCs||j}||jkr%t}nr|rN|dtjkrN|tj}ntj|j|}y|j|j|}Wntk
rt	}nX|st
jd||jj
nt
jd||jj
|S(Niu_find failed: %r %ru_find worked: %r %r(RZR\RRR?tbisectR`RUt
IndexErrorR/tloggertdebugR7R(RR	Rti((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRBs
	

cCs-|jj}|jdt|}||fS(Ni(R7RXR	RY(RRRR	((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRscCs|jj|jS(N(R7tget_dataR	(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR+scCstj|j|S(N(tiotBytesIOR+(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR'scCs|j|j}|j|dS(Ni(R	RZR\(RRR	((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR,scCs|j|j}|r9|dtjkr9|tj7}nt|}t}tj|j|}xn|t|jkr|j|j|sPn|j||}|j	|j
tjdd|d7}qfW|S(Niii(R	RZRR?RYRJRaR`RUtaddR<(RRR	tplenRRets((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR2s	 cCs||j}|r6|dtjkr6|tj7}ntj|j|}y|j|j|}Wntk
r~t}nX|S(Ni(RZRR?RaR`RURbR/(RR	ReR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRCs


(R"R#R.R
R9RBRR+R'R,R2RC(((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRWs								cCs|tt|sJ",!ZM




		PKZbG-site-packages/pip/_vendor/distlib/markers.pycnu[
abc@sdZddlZddlZddlZddlZddlmZmZddlm	Z	dgZ
defdYZdd	ZdS(
sEParser for the environment markers micro-language defined in PEP 345.iNi(tpython_implementationtstring_types(tin_venvt	interprett	EvaluatorcBs^eZdZi	dd6dd6dd6dd6d	d
6dd6d
d6dd6dd6Zi	ejd6dejd d6ejjdddd6e	j
d6eed6ej
d6ejd6ejd6ed 6Zd,d!Zd"Zd#Zd,d$Zd%Zd&Zd'Zd(Zd)Zd*Zd+ZRS(-s5
    A limited evaluator for Python expressions.
    cCs
||kS(N((txty((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyttteqcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtgtcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtgtecCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtincCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtltcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRRtltecCs|S(N((R((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyR RtnotcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyR!RtnoteqcCs
||kS(N((RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyR"Rtnotintsys_platforms%s.%sitpython_versiont iitpython_full_versiontos_nametplatform_in_venvtplatform_releasetplatform_versiontplatform_machinetplatform_python_implementationcCs|p	i|_d|_dS(su
        Initialise an instance.

        :param context: If specified, names are looked up in this mapping.
        N(tcontexttNonetsource(tselfR((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt__init__3scCsHd}d|j|||!}||t|jkrD|d7}n|S(sH
        Get the part of the source which is causing a problem.
        i
s%rs...(Rtlen(Rtoffsettfragment_lents((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytget_fragment<s

cCst|d|dS(s@
        Get a handler for the specified AST node type.
        sdo_%sN(tgetattrR(Rt	node_type((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytget_handlerFscCst|tr||_idd6}|r8||dRtallowed_valuesR/(RR4tvalidtkeytresult((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytdo_attributejs	
cCs|j|jd}|jjtjk}|jjtjk}|sR|sRt|r^|sk|r|rxD|jdD]2}|j|}|r|s|ry|ryPqyqyWn|S(Nii(R8tvaluestopR0R-tOrtAndR:(RR4RDtis_ortis_andtn((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt	do_boolopxsc	sfd}j}j|}t}xtjjD]\}}||||jjj}|j	krt
d|nj|}j	|||}|sPn|}|}qFW|S(Ncsbt}t|tjr3t|tjr3t}n|s^jj}td|ndS(NsInvalid comparison: %s(tTrueR,R-tStrR@R%R3R/(tlhsnodetrhsnodeRBR$(R4R(s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytsanity_checks$	sunsupported operation: %r(tleftR8RNtziptopstcomparatorsR0R1R2t	operatorsR/(	RR4RRRPtlhsRDRGRQtrhs((R4Rs?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt
do_compares 
	"

cCs|j|jS(N(R8tbody(RR4((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyt
do_expressionscCs|t}|j|jkr1t}|j|j}n+|j|jkr\t}|j|j}n|sxtd|jn|S(Nsinvalid expression: %s(R@R<RRNRAR/(RR4RBRD((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytdo_namescCs|jS(N(R$(RR4((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pytdo_strsN(R1t
__module__t__doc__RWtsystplatformtversion_infotversiontsplittostnametstrRtreleasetmachineRRARR R%R(R8R>RERMRZR\R]R^(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRs@














		
							cCst|j|jS(s
    Interpret a marker and return a result depending on environment.

    :param marker: The marker to interpret.
    :type marker: str
    :param execution_context: The context used for name lookup.
    :type execution_context: mapping
    (RR8tstrip(tmarkertexecution_context((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyRs	(R`R-RfRaRbtcompatRRtutilRt__all__tobjectRRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/distlib/markers.pyts	PKZbޑ66/site-packages/pip/_vendor/distlib/resources.pycnu[
abc@s	ddlmZddlZddlZddlZddlZddlZddlZddlZddl	Z	ddl
Z
ddlmZddl
mZmZmZmZejeZdadefdYZdefd	YZd
efdYZdefd
YZdefdYZdefdYZieed6ee
j6Z yQyddl!Z"Wne#k
rddl$Z"nXee e"j%R	R((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pytfinds		cCst|jdS(Nurb(RR	(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR'scCs)t|jd}|jSWdQXdS(Nurb(RR	tread(RRR ((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR+scCstjj|jS(N(RR	tgetsize(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR,scsDfd}tgtj|jD]}||r%|^q%S(Ncs|dko|jjS(Nu__pycache__(tendswithtskipped_extensions(R (R(sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pytalloweds(tsetRtlistdirR	(RRRIR ((RsA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR2scCs|j|jS(N(RCR	(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR0sccs|j|}|dk	r|g}x|r|jd}|V|jr'|j}xe|jD]W}|sr|}ndj||g}|j|}|jr|j|q]|Vq]Wq'q'WndS(Niu/(RDRtpopR0R%R3R
tappend(RR>RttodotrnameR%tnew_nametchild((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pytiterators 						(u.pycu.pyou.class(u.pycu.pyo(R"R#R.tsystplatformt
startswithRHR
R9RARBRRDR'R+R,R2R0tstaticmethodRR	RRCRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR4ws"												tZipResourceFindercBs_eZdZdZdZdZdZdZdZdZ	dZ
d	ZRS(
u6
    Resource finder for resources in .zip files.
    cCstt|j||jj}dt||_t|jdrY|jj|_nt	j
||_t|j|_dS(Niu_files(
RRWR
R7tarchivetlent
prefix_lenthasattrt_filest	zipimportt_zip_directory_cachetsortedtindex(RR5RX((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR
scCs|S(N((RR	((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR9scCs||j}||jkr%t}nr|rN|dtjkrN|tj}ntj|j|}y|j|j|}Wntk
rt	}nX|st
jd||jj
nt
jd||jj
|S(Niu_find failed: %r %ru_find worked: %r %r(RZR\RRR?tbisectR`RUt
IndexErrorR/tloggertdebugR7R(RR	Rti((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRBs
	

cCs-|jj}|jdt|}||fS(Ni(R7RXR	RY(RRRR	((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRscCs|jj|jS(N(R7tget_dataR	(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR+scCstj|j|S(N(tiotBytesIOR+(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR'scCs|j|j}|j|dS(Ni(R	RZR\(RRR	((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR,scCs|j|j}|r9|dtjkr9|tj7}nt|}t}tj|j|}xn|t|jkr|j|j|sPn|j||}|j	|j
tjdd|d7}qfW|S(Niii(R	RZRR?RYRJRaR`RUtaddR<(RRR	tplenRRets((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyR2s	 cCs||j}|r6|dtjkr6|tj7}ntj|j|}y|j|j|}Wntk
r~t}nX|S(Ni(RZRR?RaR`RURbR/(RR	ReR((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRCs


(R"R#R.R
R9RBRR+R'R,R2RC(((sA/usr/lib/python2.7/site-packages/pip/_vendor/distlib/resources.pyRWs								cCs|tt|sJ",!ZM




		PKZ|j$$*site-packages/pip/_vendor/distlib/util.pycnu[
abc@sddlZddlmZddlZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZyddlZWnek
rdZnXddlZddlZddlZddlZddlZyddlZWnek
r9ddlZnXddlZddlmZddlmZmZmZm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0e
j1e2Z3dZ4e
j5e4Z6dZ7d	e7d
Z8e7dZ9dZ:d
e:de9de4d
e:de9dZ;dZ<de;de<de;dZ=e8d
e4e8dZ>de>dZ?de7de?de=dZ@e
j5e@ZAde:de9d
ZBe
j5eBZCdZDd ZEd!ZFd"ZGddd#ZHd$ZId%ZJd&ZKejLd'ZMejLd(ZNejLd)d*ZOd+ePfd,YZQd-ZRd.ePfd/YZSd0ZTd1ePfd2YZUe
j5d3e
jVZWd4ZXdd5ZYd6ZZd7Z[d8Z\d9Z]d:Z^e
j5d;e
j_Z`e
j5d<Zadd=Zbe
j5d>Zcd?Zdd@ZedAZfdBZgdCZhdDZidEePfdFYZjdGePfdHYZkdIePfdJYZldZmdendRZodSZpdZqdZePfd[YZre
j5d\Zse
j5d]Zte
j5d^Zud_Zd`ZverddalmwZxmyZymzZzdbe%j{fdcYZ{ddexfdeYZwdfewe(fdgYZ|nej}dh Z~e~dkrdje%jfdkYZerdle%jfdmYZqndne&jfdoYZerFdpe&jfdqYZndre&jfdsYZdtZduePfdvYZdwefdxYZdyefdzYZd{e)fd|YZd}ePfd~YZdZdS(iN(tdeque(tiglobi(tDistlibException(tstring_typest	text_typetshutilt	raw_inputtStringIOtcache_from_sourceturlopenturljointhttplibt	xmlrpclibt	splittypetHTTPHandlertBaseConfiguratortvalid_identt	ContainertconfigparsertURLErrortZipFiletfsdecodetunquotes\s*,\s*s
(\w|[.-])+s(\*|:(\*|\w+):|t)s\*?s([<>=!~]=)|[<>]t(s)?\s*(s)(s)\s*(s))*s(from\s+(?P.*))s\(\s*(?Pt|s)\s*\)|(?Ps\s*)s)*s\[\s*(?Ps)?\s*\]s(?Ps	\s*)?(\s*s)?$s(?Ps)\s*(?Pc

Cskd}d}tj|}|rg|j}|d}|dpK|d}|dsad}nd}|dj}|sd}d}|d}	n{|ddkrd	|}ntj|}
g|
D]}||^q}d
|djg|D]}d|^qf}	|d
s$d}ntj	|d
}t
d|d|d|d|	d|d|}n|S(NcSs|j}|d|dfS(Ntoptvn(t	groupdict(tmtd((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_constraintYstdntc1tc2tdireftis<>!=s~=s%s (%s)s, s%s %stextnametconstraintstextrastrequirementtsourceturl(tNonetREQUIREMENT_REtmatchRtstriptRELOP_IDENT_REtfinditertjointCOMMA_REtsplitR(
tsRtresultRRR&tconsR+tconstrtrstiteratortconR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytparse_requirementWs4	

	

0
	cCsd}i}x|D]\}}}tjj||}xt|D]}tjj||}	xt|	D]v}
|||
}|dkr|j|dqo|||
}|jtjjdjd}
|
d|||RAtrstrip(tresources_roottrulesRGtdestinationsRFtsuffixtdesttprefixtabs_basetabs_globtabs_patht
resource_filetrel_pathtrel_dest((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_resources_dests|s	!cCs:ttdrt}ntjttdtjk}|S(Ntreal_prefixtbase_prefix(thasattrtsystTrueROtgetattr(R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytin_venvs	cCs7tjjtj}t|ts3t|}n|S(N(R?R@tnormcaseRZt
executablet
isinstanceRR(R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_executables
cCs|}xwtrt|}|}|r7|r7|}n|r	|dj}||kr]Pn|r|d|||f}q|q	q	W|S(Nis	%c: %s
%s(R[Rtlower(tpromptt
allowed_charsterror_prompttdefaulttpR5tc((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytproceeds	
	cCsVt|tr|j}ni}x+|D]#}||kr+||||R$cCstjj|}||jkrtjj|r|jj|tjj|\}}|j|tj	d||j
stj|n|jr|j
j|qndS(NsCreating %s(R?R@RRRRR4RRRRtmkdirRR(RR@RR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs"
		cCst||}tjd|||js|sD|j||r{|sSd}q{|j|sht|t|}nt	j
|||tn|j||S(NsByte-compiling %s to %s(
RRRRRR,RBRCRDt
py_compiletcompileR[R(RR@toptimizetforceROtdpathtdiagpath((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytbyte_compiles		
cCstjj|rtjj|rtjj|rtjd||js`tj	|n|j
r||jkr|jj|qqqtjj|rd}nd}tjd|||jstj|n|j
r||j
kr|j
j|qqndS(NsRemoving directory tree at %stlinktfilesRemoving %s %s(R?R@RRRRtdebugRRRRRRR(RR@R5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytensure_removeds"%					cCsjt}x]|setjj|r:tj|tj}Pntjj|}||kr\Pn|}q	W|S(N(RR?R@RtaccesstW_OKR(RR@R6tparent((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytis_writables	
cCs/|jst|j|jf}|j|S(sV
        Commit recorded changes, turn off recording, return
        changes.
        (RRCRRR(RR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytcommits
cCs|jsx9t|jD](}tjj|rtj|qqWt|jdt	}xq|D]f}tj
|}|r|dgksttjj||d}tj
|ntj
|qaWn|jdS(Ntreverset__pycache__i(RtlistRR?R@RRtsortedRR[tlistdirRCR2trmdirR(RRtdirsRtflisttsd((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytrollbacks	
N(RRRRRRRR[RR,RRRRtset_executable_modeRRRRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRQs 											
cCs|tjkrtj|}nt|}|dkr@|}nG|jd}t||jd}x|D]}t||}qnW|S(Nt.i(RZtmodulest
__import__R,R4R\RH(tmodule_nametdotted_pathtmodR6tpartsRg((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytresolves	
tExportEntrycBs;eZdZedZdZdZejZRS(cCs(||_||_||_||_dS(N(R&RORMR(RR&RORMR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs			cCst|j|jS(N(R
RORM(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRscCs d|j|j|j|jfS(Ns(R&RORMR(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt__repr__!scCsdt|tst}nH|j|jko]|j|jko]|j|jko]|j|jk}|S(N(R`RRR&RORMR(RtotherR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt__eq__%s	(	RRRRRRRRt__hash__(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs
			
s(?P(\w|[-.+])+)
                      \s*=\s*(?P(\w+)([:\.]\w+)*)
                      \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
                      cCsStj|}|sId}d|ks3d|krOtd|qOn|j}|d}|d}|jd}|dkr|d}}n4|dkrtd|n|jd\}}|d	}	|	dkrd|ksd|kr	td|ng}	n(g|	jd
D]}
|
j^q"}	t||||	}|S(Nt[t]sInvalid specification '%s'R&tcallablet:iiRt,(	tENTRY_REtsearchR,RRtcountR4R/R(t
specificationRR6RR&R@tcolonsRORMRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR{7s2




	(cCs|d
krd}ntjdkrHdtjkrHtjjd}ntjjd}tjj|rtj|tj	}|st
jd|qnGytj|t
}Wn-tk
rt
jd|dt
t}nX|s	tj}t
jd	|ntjj||S(s
    Return the default base location for distlib caches. If the directory does
    not exist, it is created. Use the suffix provided for the base directory,
    and default to '.distlib' if it isn't provided.

    On Windows, if LOCALAPPDATA is defined in the environment, then it is
    assumed to be a directory, and will be the parent directory of the result.
    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
    directory - using os.expanduser('~') - will be the parent directory of
    the result.

    The result is just the directory '.distlib' in the parent directory as
    determined above, or with the name specified with ``suffix``.
    s.distlibtnttLOCALAPPDATAs
$localappdatat~s(Directory exists but is not writable: %ssUnable to create %stexc_infos#Default location unusable, using %sN(R,R?R&tenvironR@t
expandvarst
expanduserRRRRtwarningtmakedirsR[tOSErrorRRRR2(RMR6tusable((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_cache_baseVs&	



cCs`tjjtjj|\}}|r?|jdd}n|jtjd}||dS(s
    Convert an absolute path to a directory name for use in a cache.

    The algorithm used is:

    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
    #. ``'.cache'`` is appended.
    Rs---s--s.cache(R?R@t
splitdriveRR>RA(R@RRg((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytpath_to_cache_dirs

$cCs|jds|dS|S(NR=(tendswith(R5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytensure_slashscCskd}}d|kr^|jdd\}}d|krC|}q^|jdd\}}n|||fS(Nt@iR(R,R4(tnetloctusernametpasswordRO((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytparse_credentialss
	cCs tjd}tj||S(Ni(R?tumask(R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_process_umasks
cCsXt}d}x3t|D]%\}}t|tst}PqqW|dk	sTt|S(N(R[R,t	enumerateR`RRRC(tseqR6tiR5((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytis_string_sequencess3([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-([a-z0-9_.+-]+)s
-py(\d\.?\d?)cCsd}d}t|jdd}tj|}|r[|jd}||j }n|rt|t|dkrtj	tj
|d|}|r|j}|| ||d|f}qn|dkrtj	|}|r|jd|jd|f}qn|S(sw
    Extract name, version, python version from a filename (no extension)

    Return name, version, pyver or None
    t t-is\biN(
R,RR>tPYTHON_VERSIONRRtstartRDtreR.tescapetendtPROJECT_NAME_AND_VERSION(tfilenametproject_nameR6tpyverRtn((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytsplit_filenames""!'s-(?P[\w .-]+)\s*\(\s*(?P[^\s)]+)\)$cCsRtj|}|s(td|n|j}|djj|dfS(s
    A utility method used to get name and version from a string.

    From e.g. a Provides-Dist value.

    :param p: A value in a form 'foo (1.0)'
    :return: The name and version as a tuple.
    s$Ill-formed name/version string: '%s'R&tver(tNAME_VERSION_RER.RRR/Rb(RgRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytparse_name_and_versions
	cCs	t}t|pg}t|p'g}d|krS|jd||O}nx|D]}|dkr||j|qZ|jdr|d}||krtjd|n||kr|j|qqZ||krtjd|n|j|qZW|S(Nt*R6isundeclared extra: %s(RRRRBRR!(t	requestedt	availableR6trtunwanted((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
get_extrass&	



cCsi}yqt|}|j}|jd}|jdsRtjd|n$tjd|}tj	|}Wn&t
k
r}tjd||nX|S(NsContent-Typesapplication/jsons(Unexpected response for JSON request: %ssutf-8s&Failed to get external data for %s: %s(R	RtgetRBRRRuRvRxRyR|t	exception(R+R6tresptheaderstcttreaderte((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt_get_external_datass'https://www.red-dove.com/pypi/projects/cCs9d|dj|f}tt|}t|}|S(Ns%s/%s/project.jsoni(tupperR
t_external_data_base_urlRR(R&R+R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_project_datascCs6d|dj||f}tt|}t|S(Ns%s/%s/package-%s.jsoni(RSR
RTRR(R&tversionR+((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytget_package_datastCachecBs)eZdZdZdZdZRS(s
    A class implementing a cache for resources that need to live in the file system
    e.g. shared libraries. This class was moved from resources to here because it
    could be used by other modules, e.g. the wheel module.
    cCsvtjj|s"tj|ntj|jd@dkrQtjd|ntjjtjj	||_
dS(su
        Initialise an instance.

        :param base: The base directory where the cache should be located.
        i?isDirectory '%s' is not privateN(R?R@RR"RRRR!RtnormpathRF(RRF((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR"s
cCs
t|S(sN
        Converts a resource prefix to a directory name in the cache.
        (R'(RRO((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
prefix_to_dir0scCsg}xtj|jD]}tjj|j|}yZtjj|s^tjj|rntj|n"tjj|rt	j
|nWqtk
r|j|qXqW|S(s"
        Clear the cache.
        (
R?RRFR@R2RRRRRRR|tappend(Rtnot_removedtfn((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytclear6s$
(RRt__doc__RRZR^(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRXs		t
EventMixincBs>eZdZdZedZdZdZdZRS(s1
    A very simple publish/subscribe system.
    cCs
i|_dS(N(t_subscribers(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRKscCs\|j}||kr+t|g|| %s;s  %s;t}s
(RmR[RoR2(RR6RvRxRuRp((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytdot
s	


(
RRRRqRRsRRR{RtpropertyRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRls				
		3s.tar.gzs.tar.bz2s.tars.zips.tgzs.tbzs.whlc
sfd}tjjtd}|dkr|jdrZd}q|jdrxd}d}q|jdrd}d}q|jd
rd}d}qtd|nz|dkrt|d}|rZ|j}x|D]}||qWqZnBt	j
||}|rZ|j}x|D]}||qCWn|dkrtj
ddkrxA|jD]0}	t|	jts|	jjd|	_qqWn|jWd|r|jnXdS(Ncs|t|ts!|jd}ntjjtjj|}|jse|tjkrxt	d|ndS(Nsutf-8spath outside destination: %r(
R`RtdecodeR?R@RR2RBRAR(R@Rg(tdest_dirtplen(s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
check_paths
!#s.zips.whltzips.tar.gzs.tgzttgzsr:gzs.tar.bz2s.tbzttbzsr:bz2s.tarttarRHsUnknown format for %riisutf-8(s.zips.whl(s.tar.gzs.tgz(s.tar.bz2s.tbz(R?R@RRDR,R(RRtnamelistttarfileRtgetnamesRZRtt
getmembersR`R&RRt
extractallR(
tarchive_filenameRtformatRRtarchiveRtnamesR&ttarinfo((RRs</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt	unarchivesH				

c	Cstj}t|}t|d}xutj|D]d\}}}xR|D]J}tjj||}||}	tjj|	|}
|j||
qPWq:WWdQX|S(s*zip a directory tree into a BytesIO objectRN(	tiotBytesIORDRR?twalkR@R2R(t	directoryR6tdlentzftrootRRR&tfulltrelRN((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytzip_dirSs

R$tKtMtGtTtPtProgresscBseZdZdddZdZdZdZdZedZ	ed	Z
d
ZedZedZ
RS(
tUNKNOWNiidcCsV|dks||kst||_|_||_d|_d|_t|_dS(Ni(	R,RCRtcurtmaxtstartedtelapsedRtdone(Rtminvaltmaxval((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRjs			cCs}|j|kst|jdks9||jks9t||_tj}|jdkri||_n||j|_dS(N(RRCRR,RttimeRR(Rtcurvaltnow((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytupdaters$	cCs*|dkst|j|j|dS(Ni(RCRR(Rtincr((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt	increment|scCs|j|j|S(N(RR(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR8scCs/|jdk	r"|j|jnt|_dS(N(RR,RR[R(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytstopscCs|jdkr|jS|jS(N(RR,tunknown(R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytmaximumscCsZ|jrd}nD|jdkr*d}n,d|j|j|j|j}d|}|S(Ns100 %s ?? %gY@s%3d %%(RRR,RR(RR6R((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
percentages			"
cCsU|dkr|jdks-|j|jkr6d}ntjdtj|}|S(Nis??:??:??s%H:%M:%S(RR,RRRtstrftimetgmtime(RtdurationR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytformat_durations-	cCs|jrd}|j}nd}|jdkr9d}ne|jdksZ|j|jkrcd}n;t|j|j}||j|j:}|d|j}d||j|fS(NtDonesETA iiis%s: %s(RRRR,RRtfloatR(RROtt((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytETAs		!	cCsh|jdkrd}n|j|j|j}x(tD] }|dkrLPn|d:}q6Wd||fS(Nigig@@s%d %sB/s(RRRtUNITS(RR6tunit((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytspeeds	
(RRRRRRR8RRRRRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRgs	
					s\{([^}]*)\}s[^/\\,{]\*\*|\*\*[^/\\,}]s^[^{]*\}|\{[^}]*$cCsZtj|r(d}t||ntj|rPd}t||nt|S(sAExtended globbing function that supports ** and {opt1,opt2,opt3}.s7invalid glob %r: recursive glob "**" must be used alones2invalid glob %r: mismatching set marker '{' or '}'(t_CHECK_RECURSIVE_GLOBRRt_CHECK_MISMATCH_SETt_iglob(t	path_globR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRsccstj|d}t|dkrt|dksBt||\}}}x3|jdD]4}x+tdj|||fD]}|VqWqaWnd|krxt|D]}|VqWn|jdd\}}|dkrd}n|dkr	d}n|jd}|jd	}x]tj	|D]L\}}}	tj
j|}x(ttj
j||D]}
|
VqtWq7WdS(
NiiRR$s**RRER=s\(t	RICH_GLOBR4RDRCRR2t	std_iglobRER?RR@RY(Rtrich_path_globRORRMtitemR@tradicaltdirRR]((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRs*%		"(tHTTPSHandlertmatch_hostnametCertificateErrortHTTPSConnectioncBseZdZeZdZRS(c
Cstj|j|jf|j}t|dtrI||_|jnt	t
ds|jrmt
j}n	t
j
}t
j||j|jd|dt
jd|j|_nt
jt
j}|jt
jO_|jr|j|j|jni}|jrHt
j|_|jd|jtt
dtrH|j|d!	
					

N(RRR,RR[RR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRsRcBs&eZedZdZdZRS(cCs#tj|||_||_dS(N(tBaseHTTPSHandlerRRR(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR#s
	cOs7t||}|jr3|j|_|j|_n|S(s
            This is called to create a connection instance. Normally you'd
            pass a connection class to do_open, but it doesn't actually check for
            a class, and just expects a callable. As long as we behave just as a
            constructor would have, we should be OK. If it ever changes so that
            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
            which just sets check_domain to False in the class definition, and
            choose which one to pass to do_open.
            (RRR(RRiRjR6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt_conn_maker(s

	cCs_y|j|j|SWnAtk
rZ}dt|jkrTtd|jq[nXdS(Nscertificate verify faileds*Unable to verify server certificate for %s(tdo_openRRtstrtreasonRR(RtreqRQ((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt
https_open8s(RRR[RRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR"s	tHTTPSOnlyHandlercBseZdZRS(cCstd|dS(NsAUnexpected HTTP request on what should be a secure connection: %s(R(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyt	http_openLs(RRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRKsiitHTTPcBseZdddZRS(R$cKs5|dkrd}n|j|j|||dS(Ni(R,t_setupt_connection_class(RRRRj((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRXs	N(RRR,R(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR	WstHTTPScBseZdddZRS(R$cKs5|dkrd}n|j|j|||dS(Ni(R,R
R(RRRRj((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR`s	N(RRR,R(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR_st	TransportcBseZddZdZRS(icCs ||_tjj||dS(N(RRR
R(RRtuse_datetime((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRgs	cCs|j|\}}}tdkr<t|d|j}nN|jsY||jdkr}||_|tj|f|_n|jd}|S(NiiRii(ii(t
get_host_infot	_ver_infoR	Rt_connectiont_extra_headersRtHTTPConnection(RRthtehtx509R6((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pytmake_connectionks	
(RRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyR
fst
SafeTransportcBseZddZdZRS(icCs ||_tjj||dS(N(RRRR(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/distlib/util.pyRxs	cCs|j|\}}}|s'i}n|j|ds



.	%		
				/	
				)					
						,H6	]		*)	
	:+PKZإ...site-packages/pip/_vendor/distlib/manifest.pyonu[
abc@sdZddlZddlZddlZddlZddlZddlmZddlm	Z	ddl
mZdgZej
eZejdejZejd	ejejBZejd
 ZdefdYZdS(su
Class representing the list of files in a distribution.

Equivalent to distutils.filelist, but fixes some problems.
iNi(tDistlibException(tfsdecode(tconvert_pathtManifests\\w*
s#.*?(?=
)|
(?=$)icBseZdZd
dZdZdZdZedZ	dZ
dZdZe
d
ed	Ze
d
ed
Ze
d
edZdZRS(s~A list of files built by on exploring the filesystem and filtered by
    applying various patterns to what we find there.
    cCsYtjjtjj|p!tj|_|jtj|_d|_	t
|_dS(sd
        Initialise an instance.

        :param base: The base directory to explore under.
        N(tostpathtabspathtnormpathtgetcwdtbasetseptprefixtNonetallfilestsettfiles(tselfR	((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyt__init__*s-	cCsddlm}m}m}g|_}|j}|g}|j}|j}x|r|}tj	|}	x|	D]{}
tj
j||
}tj|}|j}
||
r|jt
|qu||
ru||
ru||ququWqPWdS(smFind all files under the base and set ``allfiles`` to the absolute
        pathnames of files found.
        i(tS_ISREGtS_ISDIRtS_ISLNKN(tstatRRRR
R	tpoptappendRtlistdirRtjointst_modeR(RRRRR
troottstackRtpushtnamestnametfullnameRtmode((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytfindall9s"
						
	cCsM|j|js-tjj|j|}n|jjtjj|dS(sz
        Add a file to the manifest.

        :param item: The pathname to add. This can be relative to the base.
        N(	t
startswithRRRRR	RtaddR(Rtitem((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR$TscCs"x|D]}|j|qWdS(s
        Add a list of files to the manifest.

        :param items: The pathnames to add. These can be relative to the base.
        N(R$(RtitemsR%((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytadd_many^s
csfdtj}|rgt}x'|D]}|tjj|q7W||O}ngtd|DD]}tjj|^q~S(s8
        Return sorted files in directory order
        csX|j|tjd||jkrTtjj|\}}||ndS(Nsadd_dir added %s(R$tloggertdebugR	RRtsplit(tdirstdtparentt_(tadd_dirR(s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR/ls

css!|]}tjj|VqdS(N(RRR*(t.0R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pys	{s(RRRRtdirnametsortedR(RtwantdirstresultR+tft
path_tuple((R/Rs@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR2gs	

cCst|_g|_dS(sClear all collected files.N(RRR
(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytclear}scCs|j|\}}}}|dkrcx|D].}|j|dts.tjd|q.q.Wn|dkrx|D]}|j|dt}qvWn{|dkrxl|D].}|j|dtstjd|qqWn3|dkrx$|D]}|j|dt}qWn|dkr`x|D]1}|j|d	|s(tjd
||q(q(Wn|dkrx|D]}|j|d	|}qsWn~|dkr|jdd	|stjd
|qnG|dkr|jdd	|stjd|qntd|dS(sv
        Process a directive which either adds some files from ``allfiles`` to
        ``files``, or removes some files from ``files``.

        :param directive: The directive to process. This should be in a format
                     compatible with distutils ``MANIFEST.in`` files:

                     http://docs.python.org/distutils/sourcedist.html#commands
        tincludetanchorsno files found matching %rtexcludesglobal-includes3no files found matching %r anywhere in distributionsglobal-excludesrecursive-includeRs-no files found matching %r under directory %rsrecursive-excludetgrafts no directories found matching %rtprunes4no previously-included directories found matching %rsinvalid action %rN(	t_parse_directivet_include_patterntTrueR(twarningt_exclude_patterntFalseRR(Rt	directivetactiontpatternstthedirt
dirpatterntpatterntfound((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pytprocess_directivesD


	

	
	
	
c	Cs{|j}t|dkrA|ddkrA|jddn|d}d}}}|dkrt|dkrtd|ng|dD]}t|^q}n|dkrt|d
krtd|nt|d}g|dD]}t|^q}nT|dkr[t|dkrHtd|nt|d}ntd|||||fS(s
        Validate a directive.
        :param directive: The directive to validate.
        :return: A tuple of action, patterns, thedir, dir_patterns
        iiR8R:sglobal-includesglobal-excludesrecursive-includesrecursive-excludeR;R<is$%r expects   ...is*%r expects    ...s!%r expects a single sunknown action %r(R8R:sglobal-includesglobal-excludesrecursive-includesrecursive-excludeR;R<N(R8R:sglobal-includesglobal-exclude(srecursive-includesrecursive-exclude(R;R<(R*tlentinsertRRR(RRCtwordsRDRERFtdir_patterntword((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR=s:	
	&&cCszt}|j||||}|jdkr:|jnx9|jD].}|j|rD|jj|t}qDqDW|S(sSelect strings (presumably filenames) from 'self.files' that
        match 'pattern', a Unix-style wildcard (glob) pattern.

        Patterns are not quite the same as implemented by the 'fnmatch'
        module: '*' and '?'  match non-special characters, where "special"
        is platform-dependent: slash on Unix; colon, slash, and backslash on
        DOS/Windows; and colon on Mac OS.

        If 'anchor' is true (the default), then the pattern match is more
        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
        'anchor' is false, both of these will match.

        If 'prefix' is supplied, then only filenames starting with 'prefix'
        (itself a pattern) and ending with 'pattern', with anything in between
        them, will match.  'anchor' is ignored in this case.

        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
        'pattern' is assumed to be either a string containing a regex or a
        regex object -- no translation is done, the regex is just compiled
        and used as-is.

        Selected strings will be added to self.files.

        Return True if files are found.
        N(	RBt_translate_patternR
RR"tsearchRR$R?(RRHR9Rtis_regexRIt
pattern_reR((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR>s

cCsdt}|j||||}x?t|jD].}|j|r.|jj|t}q.q.W|S(stRemove strings (presumably filenames) from 'files' that match
        'pattern'.

        Other parameters are the same as for 'include_pattern()', above.
        The list 'self.files' is modified in place. Return True if files are
        found.

        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
        packaging source distributions
        (RBRPtlistRRQtremoveR?(RRHR9RRRRIRSR5((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyRA)s
c
Cs|r)t|tr"tj|S|SntdkrY|jdjd\}}}n|r|j|}tdkrqnd}tjtj	j
|jd}	|d
k	rtdkr|jd}
|j|t
|
 }n2|j|}|t
|t
|t
|!}tj}tjdkr>d}ntdkrnd|	|j
|d|f}q|t
|t
|t
|!}d	||	||||f}nC|rtdkrd|	|}qd
||	|t
|f}ntj|S(sTranslate a shell-like wildcard pattern to a compiled regular
        expression.

        Return the compiled regex.  If 'is_regex' true,
        then 'pattern' is directly compiled to a regex (if it's a string)
        or just returned as-is (assumes it's a regex object).
        iiR.ts\s\\t^s.*s%s%s%s%s.*%s%ss%s%s%s(ii(iiN(ii(ii(ii(t
isinstancetstrtretcompilet_PYTHON_VERSIONt_glob_to_ret	partitiontescapeRRRR	RRKR
(
RRHR9RRRtstartR.tendRSR	t
empty_patternt	prefix_reR
((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyRP=s@	
$!#		# cCsStj|}tj}tjdkr0d}nd|}tjd||}|S(sTranslate a shell-like glob pattern to a regular expression.

        Return a string containing the regex.  Differs from
        'fnmatch.translate()' in that '*' does not match "special characters"
        (which are platform-specific).
        s\s\\\\s\1[^%s]s((?RARPR](((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyR%s 		
				O	/(6(RjRdtloggingRRZtsysRVRtcompatRtutilRt__all__t	getLoggerRhR(R[tMt_COLLAPSE_PATTERNtSt_COMMENTED_LINEtversion_infoR\tobjectR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.pyt
s	
PKZ,؂؂'site-packages/pip/_vendor/pyparsing.pyonu[
abci@sdZdZdZdZddlZddlmZddlZddl	Z	ddl
Z
ddlZddlZddl
Z
ddlZddlZddlZddlmZyddlmZWn!ek
rddlmZnXydd	l
mZWn?ek
r=ydd	lmZWnek
r9eZnXnXd
ddd
ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdidjdkdldmdndodpdqdrgiZee	jds ZedtdskZere	jZ e!Z"e#Z$e!Z%e&e'e(e)e*ee+e,e-e.e/gZ0nre	j1Z e2Z3duZ%gZ0ddl4Z4xEdvj5D]7Z6ye0j7e8e4e6Wne9k
rZq$nXq$We:dwe3dxDZ;dyZ<dze=fd{YZ>ej?ej@ZAd|ZBeBd}ZCeAeBZDe#d~ZEdjFdejGDZHd!eIfdYZJd#eJfdYZKd%eJfdYZLd'eLfdYZMd*eIfdYZNde=fdYZOd&e=fdYZPe
jQjRePdZSdZTdZUdZVdZWdZXdZYddZZd(e=fdYZ[d0e[fdYZ\de\fdYZ]de\fdYZ^de\fdYZ_e_Z`e_e[_ade\fdYZbde_fdYZcdebfdYZddpe\fdYZed3e\fdYZfd+e\fdYZgd)e\fdYZhd
e\fdYZid2e\fdYZjde\fdYZkdekfdYZldekfdYZmdekfdYZnd.ekfdYZod-ekfdYZpd5ekfdYZqd4ekfdYZrd$e[fdYZsd
esfdYZtd esfdYZudesfdYZvdesfdYZwd"e[fdYZxdexfdYZydexfdYZzdexfdYZ{de{fdYZ|d6e{fdYZ}de=fdYZ~e~ZdexfdYZd,exfdYZdexfdYZdefdYZd1exfdYZdefdYZdefdYZdefdYZd/efdYZde=fdYZdZdedZedZdZdZdZdZeedZdZedZdZdZe]jdGZemjdMZenjdLZeojdeZepjddZefeEdddjdZegdjdZegdjdZeeBeBefeHddddxBegdejBZeeedeZe_dedjdee|eeBjddZdZdZdZdZdZedZedZdZdZdZdZe=e_ddZe>Ze=e_e=e_ededdZeZeegddjdZeegddjdZeegddegddBjdZee`dejjdZddeejdZedZedZedZeefeAeDdjd\ZZeedj5dZegddjFejdjdZdZeegddjdZegdjdZegd	jjd
ZegdjdZeegddeBjd
ZeZegdjdZee|efeHddeefde_denjjdZeeejeBddjd>ZdrfdYZedkrecdZecdZefeAeDdZeeddejeZeeejdZdeBZeeddejeZeeejdZededeedZejdejjdejjdejjd ddlZejjeejejjd!ndS("sS
pyparsing module - Classes and methods to define and execute parsing grammars

The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions.  With pyparsing, you
don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
provides a library of classes that you use to construct the grammar directly in Python.

Here is a program to parse "Hello, World!" (or any greeting of the form 
C{", !"}), built up using L{Word}, L{Literal}, and L{And} elements 
(L{'+'} operator gives L{And} expressions, strings are auto-converted to
L{Literal} expressions)::

    from pyparsing import Word, alphas

    # define grammar of a greeting
    greet = Word(alphas) + "," + Word(alphas) + "!"

    hello = "Hello, World!"
    print (hello, "->", greet.parseString(hello))

The program outputs the following::

    Hello, World! -> ['Hello', ',', 'World', '!']

The Python representation of the grammar is quite readable, owing to the self-explanatory
class names, and the use of '+', '|' and '^' operators.

The L{ParseResults} object returned from L{ParserElement.parseString} can be accessed as a nested list, a dictionary, or an
object with named attributes.

The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
 - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello  ,  World  !", etc.)
 - quoted strings
 - embedded comments
s2.1.10s07 Oct 2016 01:31 UTCs*Paul McGuire iN(tref(tdatetime(tRLock(tOrderedDicttAndtCaselessKeywordtCaselessLiteralt
CharsNotIntCombinetDicttEachtEmptyt
FollowedBytForwardt
GoToColumntGrouptKeywordtLineEndt	LineStarttLiteralt
MatchFirsttNoMatchtNotAnyt	OneOrMoretOnlyOncetOptionaltOrtParseBaseExceptiontParseElementEnhancetParseExceptiontParseExpressiontParseFatalExceptiontParseResultstParseSyntaxExceptiont
ParserElementtQuotedStringtRecursiveGrammarExceptiontRegextSkipTot	StringEndtStringStarttSuppresstTokentTokenConvertertWhitetWordtWordEndt	WordStartt
ZeroOrMoret	alphanumstalphast
alphas8bittanyCloseTagt
anyOpenTagt
cStyleCommenttcoltcommaSeparatedListtcommonHTMLEntitytcountedArraytcppStyleCommenttdblQuotedStringtdblSlashCommentt
delimitedListtdictOftdowncaseTokenstemptythexnumsthtmlCommenttjavaStyleCommenttlinetlineEndt	lineStarttlinenotmakeHTMLTagstmakeXMLTagstmatchOnlyAtColtmatchPreviousExprtmatchPreviousLiteralt
nestedExprtnullDebugActiontnumstoneOftopAssoctoperatorPrecedencet
printablestpunc8bittpythonStyleCommenttquotedStringtremoveQuotestreplaceHTMLEntitytreplaceWitht
restOfLinetsglQuotedStringtsranget	stringEndtstringStartttraceParseActiont
unicodeStringtupcaseTokenst
withAttributet
indentedBlocktoriginalTextFortungroupt
infixNotationtlocatedExprt	withClasst
CloseMatchttokenMaptpyparsing_commoniicCs}t|tr|Syt|SWnUtk
rxt|jtjd}td}|jd|j	|SXdS(sDrop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
           str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
           then < returns the unicode object | encodes it with the default encoding | ... >.
        txmlcharrefreplaces&#\d+;cSs#dtt|ddd!dS(Ns\uiii(thextint(tt((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyttN(
t
isinstancetunicodetstrtUnicodeEncodeErrortencodetsystgetdefaultencodingR%tsetParseActionttransformString(tobjtrett
xmlcharref((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_ustrs
s6sum len sorted reversed list tuple set any all min maxccs|]}|VqdS(N((t.0ty((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	sicCsRd}ddjD}x/t||D]\}}|j||}q,W|S(s/Escape &, <, >, ", ', etc. in a string of data.s&><"'css|]}d|dVqdS(t&t;N((Rts((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	ssamp gt lt quot apos(tsplittziptreplace(tdatatfrom_symbolst
to_symbolstfrom_tto_((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_xml_escapes
t
_ConstantscBseZRS((t__name__t
__module__(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRst
0123456789tABCDEFabcdefi\Rrccs$|]}|tjkr|VqdS(N(tstringt
whitespace(Rtc((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	scBs_eZdZdd
d
dZedZdZdZdZ	ddZ
d	ZRS(s7base exception class for all parsing runtime exceptionsicCs[||_|dkr*||_d|_n||_||_||_|||f|_dS(NRr(tloctNonetmsgtpstrt
parserElementtargs(tselfRRRtelem((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__init__s					cCs||j|j|j|jS(s
        internal factory method to simplify creating one type of ParseException 
        from another - avoids having __init__ signature conflicts among subclasses
        (RRRR(tclstpe((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_from_exceptionscCsm|dkrt|j|jS|dkr>t|j|jS|dkr]t|j|jSt|dS(ssupported attributes by name are:
            - lineno - returns the line number of the exception text
            - col - returns the column number of the exception text
            - line - returns the line containing the exception text
        RHR7tcolumnREN(R7R(RHRRR7REtAttributeError(Rtaname((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__getattr__scCs d|j|j|j|jfS(Ns"%s (at char %d), (line:%d, col:%d)(RRRHR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__str__scCs
t|S(N(R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__repr__ss>!} ('-' operator) indicates that parsing is to stop 
       immediately because an unbacktrackable syntax error has been found(RRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR!scBs eZdZdZdZRS(sZexception thrown by L{ParserElement.validate} if the grammar could be improperly recursivecCs
||_dS(N(tparseElementTrace(RtparseElementList((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsd|jS(NsRecursiveGrammarException: %s(R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR s(RRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR$s	t_ParseResultsWithOffsetcBs,eZdZdZdZdZRS(cCs||f|_dS(N(ttup(Rtp1tp2((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR$scCs|j|S(N(R(Rti((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__getitem__&scCst|jdS(Ni(treprR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR(scCs|jd|f|_dS(Ni(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt	setOffset*s(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR#s			cBseZdZd-d-eedZd-d-eeedZdZedZ	dZ
dZdZdZ
e
Zd	Zd
ZdZdZd
ZereZeZeZn-eZeZeZdZdZdZdZdZd-dZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'ddZ(d Z)d!Z*d"Z+d-e,ded#Z-d$Z.d%Z/dd&ed'Z0d(Z1d)Z2d*Z3d+Z4d,Z5RS(.sI
    Structured parse results, to provide multiple means of access to the parsed data:
       - as a list (C{len(results)})
       - by list index (C{results[0], results[1]}, etc.)
       - by attribute (C{results.} - see L{ParserElement.setResultsName})

    Example::
        integer = Word(nums)
        date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))
        # equivalent form:
        # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")

        # parseString returns a ParseResults object
        result = date_str.parseString("1999/12/31")

        def test(s, fn=repr):
            print("%s -> %s" % (s, fn(eval(s))))
        test("list(result)")
        test("result[0]")
        test("result['month']")
        test("result.day")
        test("'month' in result")
        test("'minutes' in result")
        test("result.dump()", str)
    prints::
        list(result) -> ['1999', '/', '12', '/', '31']
        result[0] -> '1999'
        result['month'] -> '12'
        result.day -> '31'
        'month' in result -> True
        'minutes' in result -> False
        result.dump() -> ['1999', '/', '12', '/', '31']
        - day: 31
        - month: 12
        - year: 1999
    cCs/t||r|Stj|}t|_|S(N(Rstobjectt__new__tTruet_ParseResults__doinit(RttoklisttnametasListtmodaltretobj((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRTs
	cCs|jrt|_d|_d|_i|_||_||_|dkrTg}n||trp||_	n-||t
rt||_	n|g|_	t|_n|dk	r|r|sd|j|s(R(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_itervaluesscsfdjDS(Nc3s|]}||fVqdS(N((RR(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s(R(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
_iteritemsscCst|jS(sVReturns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).(RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytkeysscCst|jS(sXReturns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).(Rt
itervalues(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytvaluesscCst|jS(sfReturns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).(Rt	iteritems(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs
t|jS(sSince keys() returns an iterator, this method is helpful in bypassing
           code that looks for the existence of any defined results names.(tboolR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pythaskeysscOs|sdg}nxI|jD];\}}|dkrJ|d|f}qtd|qWt|dtst|dks|d|kr|d}||}||=|S|d}|SdS(s
        Removes and returns item at specified index (default=C{last}).
        Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
        argument or an integer argument, it will use C{list} semantics
        and pop tokens from the list of parsed tokens. If passed a 
        non-integer argument (most likely a string), it will use C{dict}
        semantics and pop the corresponding value from any defined 
        results names. A second default return value argument is 
        supported, just as in C{dict.pop()}.

        Example::
            def remove_first(tokens):
                tokens.pop(0)
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']

            label = Word(alphas)
            patt = label("LABEL") + OneOrMore(Word(nums))
            print(patt.parseString("AAB 123 321").dump())

            # Use pop() in a parse action to remove named result (note that corresponding value is not
            # removed from list form of results)
            def remove_LABEL(tokens):
                tokens.pop("LABEL")
                return tokens
            patt.addParseAction(remove_LABEL)
            print(patt.parseString("AAB 123 321").dump())
        prints::
            ['AAB', '123', '321']
            - LABEL: AAB

            ['AAB', '123', '321']
        itdefaultis-pop() got an unexpected keyword argument '%s'iN(RRRsRoR(RRtkwargsRRtindexR}tdefaultvalue((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytpops"


cCs||kr||S|SdS(si
        Returns named result matching the given key, or if there is no
        such name, then returns the given C{defaultValue} or C{None} if no
        C{defaultValue} is specified.

        Similar to C{dict.get()}.
        
        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            result = date_str.parseString("1999/12/31")
            print(result.get("year")) # -> '1999'
            print(result.get("hour", "not specified")) # -> 'not specified'
            print(result.get("hour")) # -> None
        N((RtkeytdefaultValue((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsw|jj||x]|jjD]L\}}x=t|D]/\}\}}t||||k|| ['0', '123', '321']

            # use a parse action to insert the parse location in the front of the parsed results
            def insert_locn(locn, tokens):
                tokens.insert(0, locn)
            print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
        N(RtinsertRRRR(RRtinsStrRRRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR2scCs|jj|dS(s
        Add single element to end of ParseResults list of elements.

        Example::
            print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
            
            # use a parse action to compute the sum of the parsed integers, and add it to the end
            def append_sum(tokens):
                tokens.append(sum(map(int, tokens)))
            print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
        N(Rtappend(Rtitem((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRFscCs0t|tr||7}n|jj|dS(s
        Add sequence of elements to end of ParseResults list of elements.

        Example::
            patt = OneOrMore(Word(alphas))
            
            # use a parse action to append the reverse of the matched strings, to make a palindrome
            def make_palindrome(tokens):
                tokens.extend(reversed([t[::-1] for t in tokens]))
                return ''.join(tokens)
            print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
        N(RsR Rtextend(Rtitemseq((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRTs

cCs|j2|jjdS(s7
        Clear all elements and results names.
        N(RRtclear(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRfscCsy||SWntk
r dSX||jkr}||jkrR|j|ddStg|j|D]}|d^qcSndSdS(NRrii(RRRR (RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRms
+cCs|j}||7}|S(N(R(RtotherR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__add__{s
c	s|jrt|jfd}|jj}g|D]<\}}|D])}|t|d||df^qMq=}xJ|D]?\}}|||st](RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRsRrcCsog}xb|jD]W}|r2|r2|j|nt|trT||j7}q|jt|qW|S(N(RRRsR t
_asStringListR(RtseptoutR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs5g|jD]'}t|tr+|jn|^q
S(s
        Returns the parse results as a nested list of matching tokens, all converted to strings.

        Example::
            patt = OneOrMore(Word(alphas))
            result = patt.parseString("sldkj lsdkj sldkj")
            # even though the result prints in string-like form, it is actually a pyparsing ParseResults
            print(type(result), result) # ->  ['sldkj', 'lsdkj', 'sldkj']
            
            # Use asList() to create an actual list
            result_list = result.asList()
            print(type(result_list), result_list) # ->  ['sldkj', 'lsdkj', 'sldkj']
        (RRsR R(Rtres((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscsGtr|j}n	|j}fdtfd|DS(s
        Returns the named parse results as a nested dictionary.

        Example::
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
            
            result = date_str.parseString('12/31/1999')
            print(type(result), repr(result)) # ->  (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
            
            result_dict = result.asDict()
            print(type(result_dict), repr(result_dict)) # ->  {'day': '1999', 'year': '12', 'month': '31'}

            # even though a ParseResults supports dict-like access, sometime you just need to have a dict
            import json
            print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
            print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
        csMt|trE|jr%|jSg|D]}|^q,Sn|SdS(N(RsR RtasDict(R|R(ttoItem(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs

 c3s'|]\}}||fVqdS(N((RRR(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s(tPY_3RRR(Rtitem_fn((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
		cCsPt|j}|jj|_|j|_|jj|j|j|_|S(sA
        Returns a new copy of a C{ParseResults} object.
        (R RRRRRR
R(RR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsd}g}td|jjD}|d}|sPd}d}d}nd	}	|d	k	rk|}	n|jr|j}	n|	s|rdSd}	n|||d|	dg7}x	t|jD]\}
}t|trI|
|kr||j	||
|o|d	k||g7}q||j	d	|o6|d	k||g7}qd	}|
|krh||
}n|s|rzqqd}nt
t|}
|||d|d|
d|dg	7}qW|||d|	dg7}dj|S(
s
        (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
        s
css2|](\}}|D]}|d|fVqqdS(iN((RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s	s  RrtITEMtsgss
%s%s- %s: s  icss|]}t|tVqdS(N(RsR (Rtvv((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	sss
%s%s[%d]:
%s%s%sRr(
RRRRtsortedRRsR tdumpRtanyRR(RR$tdepthtfullRtNLRRRRR1((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR3Ps, B?cOstj|j||dS(s
        Pretty-printer for parsed results as a list, using the C{pprint} module.
        Accepts additional positional or keyword args as defined for the 
        C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})

        Example::
            ident = Word(alphas, alphanums)
            num = Word(nums)
            func = Forward()
            term = ident | num | Group('(' + func + ')')
            func <<= ident + Group(Optional(delimitedList(term)))
            result = func.parseString("fna a,b,(fnb c,d,200),100")
            result.pprint(width=40)
        prints::
            ['fna',
             ['a',
              'b',
              ['(', 'fnb', ['c', 'd', '200'], ')'],
              '100']]
        N(tpprintR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR8}scCsC|j|jj|jdk	r-|jp0d|j|jffS(N(RRRRRRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__getstate__s
cCsm|d|_|d\|_}}|_i|_|jj||dk	r`t||_n	d|_dS(Nii(RRRRR
RRR(RtstateR/tinAccumNames((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__setstate__s
	cCs|j|j|j|jfS(N(RRRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__getnewargs__scCs tt|t|jS(N(RRRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRsN(6RRRRRRRsRRRRRRRt__nonzero__RRRRRRRRRRRRRRRRRRRRR
RRRRRRRRRR!R-R0R3R8R9R<R=R(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR -sh&	'		
														4												#	=		%-			
	cCsW|}d|ko#t|knr@||ddkr@dS||jdd|S(sReturns current column within a string, counting newlines as line separators.
   The first column is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}} for more information
   on parsing strings containing C{}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   iis
(Rtrfind(RtstrgR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR7s
cCs|jdd|dS(sReturns current line number within a string, counting newlines as line separators.
   The first line is number 1.

   Note: the default parsing behavior is to expand tabs in the input string
   before starting the parsing process.  See L{I{ParserElement.parseString}} for more information
   on parsing strings containing C{}s, and suggested methods to maintain a
   consistent view of the parsed string, the parse location, and line and column
   positions within the parsed string.
   s
ii(tcount(RR@((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRHs
cCsR|jdd|}|jd|}|dkrB||d|!S||dSdS(sfReturns the line of text containing loc within a string, counting newlines as line separators.
       s
iiN(R?tfind(RR@tlastCRtnextCR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyREs
cCsAdt|dt|dt||t||fGHdS(NsMatch s at loc s(%d,%d)(RRHR7(tinstringRtexpr((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_defaultStartDebugActionscCs'dt|dt|jGHdS(NsMatched s -> (RRuR(REtstartloctendlocRFttoks((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_defaultSuccessDebugActionscCsdt|GHdS(NsException raised:(R(RERRFtexc((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_defaultExceptionDebugActionscGsdS(sG'Do-nothing' debug action, to suppress debugging output during parsing.N((R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyROsicstkrfdSdgtgtd dkrVdd}ddntj}tjd}|d	dd
}|d|d|ffd}d
}y"tdtdj}Wntk
rt	}nX||_|S(Ncs
|S(N((RtlRp(tfunc(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRriiiicSsJtdkrdnd}tjd||d|}|j|jfgS(	Niiiiitlimiti(iii(tsystem_versiont	tracebackt
extract_stacktfilenameRH(RPRt
frame_summary((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRSscSs2tj|d|}|d}|j|jfgS(NRPi(RRt
extract_tbRTRH(ttbRPtframesRU((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRVs
iRPiicsxy&|d}td<|SWqtk
rdrInAz:tjd}|dddd ksnWd~Xdkrdcd7Rt	__class__(ii(
tsingleArgBuiltinsRRQRRRSRVtgetattrRt	ExceptionRu(ROR[RSt	LINE_DIFFt	this_lineR]t	func_name((RVRZRORPR[R\s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_trim_aritys*
					
	cBseZdZdZeZedZedZedZ	dZ
dZedZe
dZd	Zd
ZdZdZd
ZdZe
dZdZe
e
dZdZdZdefdYZedFk	rdefdYZndefdYZiZe Z!ddgZ"e
e
dZ#eZ$edZ%eZ&eddZ'edZ(e)edZ*d Z+e)d!Z,e)ed"Z-d#Z.d$Z/d%Z0d&Z1d'Z2d(Z3d)Z4d*Z5d+Z6d,Z7d-Z8d.Z9d/Z:dFd0Z;d1Z<d2Z=d3Z>d4Z?d5Z@d6ZAe
d7ZBd8ZCd9ZDd:ZEd;ZFgd<ZGed=ZHd>ZId?ZJd@ZKdAZLdBZMe
dCZNe
dDe
e
edEZORS(Gs)Abstract base level parser element class.s 
	
cCs
|t_dS(s
        Overrides the default whitespace chars

        Example::
            # default whitespace chars are space,  and newline
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def', 'ghi', 'jkl']
            
            # change to just treat newline as significant
            ParserElement.setDefaultWhitespaceChars(" \t")
            OneOrMore(Word(alphas)).parseString("abc def\nghi jkl")  # -> ['abc', 'def']
        N(R"tDEFAULT_WHITE_CHARS(tchars((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetDefaultWhitespaceChars=s
cCs
|t_dS(s
        Set class to be used for inclusion of string literals into a parser.
        
        Example::
            # default literal class used is Literal
            integer = Word(nums)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']


            # change to Suppress
            ParserElement.inlineLiteralsUsing(Suppress)
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")           

            date_str.parseString("1999/12/31")  # -> ['1999', '12', '31']
        N(R"t_literalStringClass(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytinlineLiteralsUsingLscCst|_d|_d|_d|_||_t|_t	j
|_t|_t
|_t
|_t|_t
|_t
|_t|_d|_t|_d|_d|_t|_t
|_dS(NRr(NNN(RtparseActionRt
failActiontstrReprtresultsNamet
saveAsListRtskipWhitespaceR"Rft
whiteCharstcopyDefaultWhiteCharsRtmayReturnEmptytkeepTabstignoreExprstdebugtstreamlinedt
mayIndexErrorterrmsgtmodalResultstdebugActionstretcallPreparset
callDuringTry(Rtsavelist((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRas(																cCsEtj|}|j|_|j|_|jrAtj|_n|S(s$
        Make a copy of this C{ParserElement}.  Useful for defining different parse actions
        for the same parsing pattern, using copies of the original parse element.
        
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
            integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
            
            print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
        prints::
            [5120, 100, 655360, 268435456]
        Equivalent form of C{expr.copy()} is just C{expr()}::
            integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
        (RRkRuRrR"RfRq(Rtcpy((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRxs

	cCs>||_d|j|_t|dr:|j|j_n|S(sf
        Define name for this expression, makes debugging and exception messages clearer.
        
        Example::
            Word(nums).parseString("ABC")  # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
            Word(nums).setName("integer").parseString("ABC")  # -> Exception: Expected integer (at char 0), (line:1, col:1)
        s	Expected t	exception(RRyRRR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetNames
	cCsE|j}|jdr.|d }t}n||_||_|S(sP
        Define name for referencing matching tokens as a nested attribute
        of the returned parse results.
        NOTE: this returns a *copy* of the original C{ParserElement} object;
        this is so that the client can define a basic element, such as an
        integer, and reference it in multiple places with different names.

        You can also set results names using the abbreviated syntax,
        C{expr("name")} in place of C{expr.setResultsName("name")} - 
        see L{I{__call__}<__call__>}.

        Example::
            date_str = (integer.setResultsName("year") + '/' 
                        + integer.setResultsName("month") + '/' 
                        + integer.setResultsName("day"))

            # equivalent form:
            date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
        t*i(RtendswithRRnRz(RRtlistAllMatchestnewself((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetResultsNames
		
csa|r9|jttfd}|_||_n$t|jdr]|jj|_n|S(sMethod to invoke the Python pdb debugger when this element is
           about to be parsed. Set C{breakFlag} to True to enable, False to
           disable.
        cs)ddl}|j||||S(Ni(tpdbt	set_trace(RERt	doActionstcallPreParseR(t_parseMethod(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytbreakers
t_originalParseMethod(t_parseRRR(Rt	breakFlagR((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetBreaks		cOs7tttt||_|jdt|_|S(s
        Define action to perform when successfully matching parse element definition.
        Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
        C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
         - s   = the original string being parsed (see note below)
         - loc = the location of the matching substring
         - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
        If the functions in fns modify the tokens, they can return them as the return
        value from fn, and the modified list of tokens will replace the original.
        Otherwise, fn does not need to return any value.

        Optional keyword arguments:
         - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing

        Note: the default parsing behavior is to expand tabs in the input string
        before starting the parsing process.  See L{I{parseString}} for more information
        on parsing strings containing C{}s, and suggested methods to maintain a
        consistent view of the parsed string, the parse location, and line and column
        positions within the parsed string.
        
        Example::
            integer = Word(nums)
            date_str = integer + '/' + integer + '/' + integer

            date_str.parseString("1999/12/31")  # -> ['1999', '/', '12', '/', '31']

            # use parse action to convert to ints at parse time
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            date_str = integer + '/' + integer + '/' + integer

            # note that integer fields are now ints, not strings
            date_str.parseString("1999/12/31")  # -> [1999, '/', 12, '/', 31]
        R~(RtmapReRkRRR~(RtfnsR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRzs"cOsF|jtttt|7_|jp<|jdt|_|S(s
        Add parse action to expression's list of parse actions. See L{I{setParseAction}}.
        
        See examples in L{I{copy}}.
        R~(RkRRReR~RR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytaddParseActions$cs|jdd|jdtr*tntx3|D]+fd}|jj|q7W|jp~|jdt|_|S(sAdd a boolean predicate function to expression's list of parse actions. See 
        L{I{setParseAction}} for function call signatures. Unlike C{setParseAction}, 
        functions passed to C{addCondition} need to return boolean success/fail of the condition.

        Optional keyword arguments:
         - message = define a custom message to be used in the raised exception
         - fatal   = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
         
        Example::
            integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
            year_int = integer.copy()
            year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
            date_str = year_int + '/' + integer + '/' + integer

            result = date_str.parseString("1999/12/31")  # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
        tmessagesfailed user-defined conditiontfatalcs7tt|||s3||ndS(N(RRe(RRNRp(texc_typetfnR(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytpasR~(RRRRRkRR~(RRRR((RRRs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytaddConditions
cCs
||_|S(sDefine action to perform if parsing fails at this expression.
           Fail acton fn is a callable function that takes the arguments
           C{fn(s,loc,expr,err)} where:
            - s = string being parsed
            - loc = location where expression match was attempted and failed
            - expr = the parse expression that failed
            - err = the exception thrown
           The function returns no value.  It may throw C{L{ParseFatalException}}
           if it is desired to stop parsing immediately.(Rl(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
setFailActions
	cCsnt}xa|rit}xN|jD]C}y)x"|j||\}}t}q+WWqtk
raqXqWq	W|S(N(RRRuRR(RRERt
exprsFoundtetdummy((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt_skipIgnorables#s	
cCsp|jr|j||}n|jrl|j}t|}x-||krh|||krh|d7}q?Wn|S(Ni(RuRRpRqR(RRERtwttinstrlen((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytpreParse0s			cCs
|gfS(N((RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt	parseImpl<scCs|S(N((RRERt	tokenlist((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt	postParse?sc	Cs|j}|s|jr,|jdr?|jd|||n|rc|jrc|j||}n|}|}yUy|j|||\}}Wn/tk
rt|t||j	|nXWqt
k
r(}	|jdr|jd||||	n|jr"|j||||	nqXn|rP|jrP|j||}n|}|}|jsw|t|kry|j|||\}}Wqtk
rt|t||j	|qXn|j|||\}}|j|||}t
||jd|jd|j}
|jrf|s7|jrf|ryrxk|jD]`}||||
}|dk	rJt
||jd|jot|t
tfd|j}
qJqJWWqct
k
r}	|jdr|jd||||	nqcXqfxn|jD]`}||||
}|dk	rt
||jd|joMt|t
tfd|j}
qqWn|r|jdr|jd|||||
qn||
fS(NiiRRi(RvRlR{R}RRRRRRyRRxRR RnRoRzRkR~RRsR(RRERRRt	debuggingtprelocttokensStartttokensterrt	retTokensR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
_parseNoCacheCsp	

&
	

%$	

	
#cCsNy|j||dtdSWn)tk
rIt|||j|nXdS(NRi(RRRRRy(RRER((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyttryParses
cCs7y|j||Wnttfk
r.tSXtSdS(N(RRRRR(RRER((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcanParseNexts
t_UnboundedCachecBseZdZRS(csit|_fd}fd}fd}tj|||_tj|||_tj|||_dS(Ncsj|S(N(R(RR(tcachetnot_in_cache(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscs||})
         - define your parse action using the full C{(s,loc,toks)} signature, and
           reference the input string using the parse action's C{s} argument
         - explictly expand the tabs in your input string before calling
           C{parseString}
        
        Example::
            Word('a').parseString('aaaaabaaa')  # -> ['aaaaa']
            Word('a').parseString('aaaaabaaa', parseAll=True)  # -> Exception: Expected end of text
        iN(
R"RRwt
streamlineRuRtt
expandtabsRRRR'Rtverbose_stacktrace(RREtparseAllRRRtseRL((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytparseString#s$
	
		
ccs|js|jnx|jD]}|jq W|jsRt|j}nt|}d}|j}|j}t	j
d}	yx||kra|	|kray.|||}
|||
dt\}}Wntk
r|
d}qX||krT|	d7}	||
|fV|rK|||}
|
|kr>|}qQ|d7}q^|}q|
d}qWWn(t
k
r}t	jrq|nXdS(s
        Scan the input string for expression matches.  Each match will return the
        matching tokens, start location, and end location.  May be called with optional
        C{maxMatches} argument, to clip scanning after 'n' matches are found.  If
        C{overlap} is specified, then overlapping matches will be reported.

        Note that the start and end locations are reported relative to the string
        being parsed.  See L{I{parseString}} for more information on parsing
        strings with embedded tabs.

        Example::
            source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
            print(source)
            for tokens,start,end in Word(alphas).scanString(source):
                print(' '*start + '^'*(end-start))
                print(' '*start + tokens[0])
        
        prints::
        
            sldjf123lsdjjkf345sldkjf879lkjsfd987
            ^^^^^
            sldjf
                    ^^^^^^^
                    lsdjjkf
                              ^^^^^^
                              sldkjf
                                       ^^^^^^
                                       lkjsfd
        iRiN(RwRRuRtRRRRRR"RRRRR(RREt
maxMatchestoverlapRRRt
preparseFntparseFntmatchesRtnextLocRtnextlocRL((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
scanStringUsB	
			


	
		c	Cs%g}d}t|_yx|j|D]}\}}}|j|||!|rt|trs||j7}qt|tr||7}q|j|n|}q(W|j||g|D]}|r|^q}djt	t
t|SWn(tk
r }t
jrq!|nXdS(sf
        Extension to C{L{scanString}}, to modify matching text with modified tokens that may
        be returned from a parse action.  To use C{transformString}, define a grammar and
        attach a parse action to it that modifies the returned token list.
        Invoking C{transformString()} on a target string will then scan for matches,
        and replace the matched text patterns according to the logic in the parse
        action.  C{transformString()} returns the resulting transformed string.
        
        Example::
            wd = Word(alphas)
            wd.setParseAction(lambda toks: toks[0].title())
            
            print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
        Prints::
            Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
        iRrN(RRtRRRsR RRRRRt_flattenRR"R(	RRERtlastERpRRtoRL((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR{s(	

 	cCsey6tg|j||D]\}}}|^qSWn(tk
r`}tjrWqa|nXdS(s~
        Another extension to C{L{scanString}}, simplifying the access to the tokens found
        to match the given parse expression.  May be called with optional
        C{maxMatches} argument, to clip searching after 'n' matches are found.
        
        Example::
            # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
            cap_word = Word(alphas.upper(), alphas.lower())
            
            print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
        prints::
            ['More', 'Iron', 'Lead', 'Gold', 'I']
        N(R RRR"R(RRERRpRRRL((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsearchStrings6	c	csfd}d}xJ|j|d|D]3\}}}|||!V|rO|dVn|}q"W||VdS(s[
        Generator method to split a string using the given expression as a separator.
        May be called with optional C{maxsplit} argument, to limit the number of splits;
        and the optional C{includeSeparators} argument (default=C{False}), if the separating
        matching text should be included in the split results.
        
        Example::        
            punc = oneOf(list(".,;:/-!?"))
            print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
        prints::
            ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
        iRN(R(	RREtmaxsplittincludeSeparatorstsplitstlastRpRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
%
cCsdt|tr!tj|}nt|tsTtjdt|tdddSt	||gS(s
        Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
        converts them to L{Literal}s by default.
        
        Example::
            greet = Word(alphas) + "," + Word(alphas) + "!"
            hello = "Hello, World!"
            print (hello, "->", greet.parseString(hello))
        Prints::
            Hello, World! -> ['Hello', ',', 'World', '!']
        s4Cannot combine element of type %s with ParserElementt
stackleveliN(
RsRR"RitwarningstwarnRt
SyntaxWarningRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s
cCs\t|tr!tj|}nt|tsTtjdt|tdddS||S(s]
        Implementation of + operator when left operand is not a C{L{ParserElement}}
        s4Cannot combine element of type %s with ParserElementRiN(	RsRR"RiRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
cCsmt|tr!tj|}nt|tsTtjdt|tdddSt	|t	j
|gS(sQ
        Implementation of - operator, returns C{L{And}} with error stop
        s4Cannot combine element of type %s with ParserElementRiN(RsRR"RiRRRRRRt
_ErrorStop(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__sub__s
cCs\t|tr!tj|}nt|tsTtjdt|tdddS||S(s]
        Implementation of - operator when left operand is not a C{L{ParserElement}}
        s4Cannot combine element of type %s with ParserElementRiN(	RsRR"RiRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__rsub__ s
csEt|tr|d}}n-t|tr7|dd }|dd
kr_d|df}nt|dtr|dd
kr|ddkrtS|ddkrtS|dtSqLt|dtrt|dtr|\}}||8}qLtdt|dt|dntdt||dkrgtdn|dkrtdn||kodknrtdn|rfd	|r
|dkr|}qt	g||}qA|}n(|dkr.}nt	g|}|S(s
        Implementation of * operator, allows use of C{expr * 3} in place of
        C{expr + expr + expr}.  Expressions may also me multiplied by a 2-integer
        tuple, similar to C{{min,max}} multipliers in regular expressions.  Tuples
        may also include C{None} as in:
         - C{expr*(n,None)} or C{expr*(n,)} is equivalent
              to C{expr*n + L{ZeroOrMore}(expr)}
              (read as "at least n instances of C{expr}")
         - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
              (read as "0 to n instances of C{expr}")
         - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
         - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}

        Note that C{expr*(None,n)} does not raise an exception if
        more than n exprs exist in the input stream; that is,
        C{expr*(None,n)} does not enforce a maximum number of expr
        occurrences.  If this behavior is desired, then write
        C{expr*(None,n) + ~expr}
        iiis7cannot multiply 'ParserElement' and ('%s','%s') objectss0cannot multiply 'ParserElement' and '%s' objectss/cannot multiply ParserElement by negative values@second tuple value must be greater or equal to first tuple values+cannot multiply ParserElement by 0 or (0,0)cs2|dkr$t|dStSdS(Ni(R(tn(tmakeOptionalListR(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR]sN(NN(
RsRottupleRR0RRRt
ValueErrorR(RR	tminElementstoptElementsR}((RRs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__mul__,sD#

&
) 	cCs
|j|S(N(R(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__rmul__pscCsdt|tr!tj|}nt|tsTtjdt|tdddSt	||gS(sI
        Implementation of | operator - returns C{L{MatchFirst}}
        s4Cannot combine element of type %s with ParserElementRiN(
RsRR"RiRRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__or__ss
cCs\t|tr!tj|}nt|tsTtjdt|tdddS||BS(s]
        Implementation of | operator when left operand is not a C{L{ParserElement}}
        s4Cannot combine element of type %s with ParserElementRiN(	RsRR"RiRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__ror__s
cCsdt|tr!tj|}nt|tsTtjdt|tdddSt	||gS(sA
        Implementation of ^ operator - returns C{L{Or}}
        s4Cannot combine element of type %s with ParserElementRiN(
RsRR"RiRRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__xor__s
cCs\t|tr!tj|}nt|tsTtjdt|tdddS||AS(s]
        Implementation of ^ operator when left operand is not a C{L{ParserElement}}
        s4Cannot combine element of type %s with ParserElementRiN(	RsRR"RiRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__rxor__s
cCsdt|tr!tj|}nt|tsTtjdt|tdddSt	||gS(sC
        Implementation of & operator - returns C{L{Each}}
        s4Cannot combine element of type %s with ParserElementRiN(
RsRR"RiRRRRRR
(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__and__s
cCs\t|tr!tj|}nt|tsTtjdt|tdddS||@S(s]
        Implementation of & operator when left operand is not a C{L{ParserElement}}
        s4Cannot combine element of type %s with ParserElementRiN(	RsRR"RiRRRRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__rand__s
cCs
t|S(sE
        Implementation of ~ operator - returns C{L{NotAny}}
        (R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
__invert__scCs'|dk	r|j|S|jSdS(s

        Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
        
        If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
        passed as C{True}.
           
        If C{name} is omitted, same as calling C{L{copy}}.

        Example::
            # these are equivalent
            userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
            userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")             
        N(RRR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__call__s
cCs
t|S(s
        Suppresses the output of this C{ParserElement}; useful to keep punctuation from
        cluttering up returned output.
        (R)(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsuppressscCs
t|_|S(s
        Disables the skipping of whitespace before matching the characters in the
        C{ParserElement}'s defined pattern.  This is normally only used internally by
        the pyparsing module, but may be needed in some whitespace-sensitive grammars.
        (RRp(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytleaveWhitespaces	cCst|_||_t|_|S(s8
        Overrides the default whitespace chars
        (RRpRqRRr(RRg((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetWhitespaceCharss			cCs
t|_|S(s
        Overrides default behavior to expand C{}s to spaces before parsing the input string.
        Must be called before C{parseString} when the input grammar contains elements that
        match C{} characters.
        (RRt(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
parseWithTabss	cCsrt|trt|}nt|trR||jkrn|jj|qnn|jjt|j|S(s
        Define expression to be ignored (e.g., comments) while doing pattern
        matching; may be called repeatedly, to define multiple comment or other
        ignorable patterns.
        
        Example::
            patt = OneOrMore(Word(alphas))
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
            
            patt.ignore(cStyleComment)
            patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
        (RsRR)RuRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytignores
cCs1|p	t|pt|ptf|_t|_|S(sT
        Enable display of debugging messages while doing pattern matching.
        (RGRKRMR{RRv(RtstartActiont
successActiontexceptionAction((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetDebugActions
s
			cCs)|r|jtttn	t|_|S(s
        Enable display of debugging messages while doing pattern matching.
        Set C{flag} to True to enable, False to disable.

        Example::
            wd = Word(alphas).setName("alphaword")
            integer = Word(nums).setName("numword")
            term = wd | integer
            
            # turn on debugging for wd
            wd.setDebug()

            OneOrMore(term).parseString("abc 123 xyz 890")
        
        prints::
            Match alphaword at loc 0(1,1)
            Matched alphaword -> ['abc']
            Match alphaword at loc 3(1,4)
            Exception raised:Expected alphaword (at char 4), (line:1, col:5)
            Match alphaword at loc 7(1,8)
            Matched alphaword -> ['xyz']
            Match alphaword at loc 11(1,12)
            Exception raised:Expected alphaword (at char 12), (line:1, col:13)
            Match alphaword at loc 15(1,16)
            Exception raised:Expected alphaword (at char 15), (line:1, col:16)

        The output shown is that produced by the default debug actions - custom debug actions can be
        specified using L{setDebugActions}. Prior to attempting
        to match the C{wd} expression, the debugging message C{"Match  at loc (,)"}
        is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
        message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
        which makes debugging and exception messages easier to understand - for instance, the default
        name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
        (RRGRKRMRRv(Rtflag((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetDebugs#	cCs|jS(N(R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR@scCs
t|S(N(R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRCscCst|_d|_|S(N(RRwRRm(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRFs		cCsdS(N((RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcheckRecursionKscCs|jgdS(sj
        Check defined expressions for valid structure, check for infinite recursive definitions.
        N(R(Rt
validateTrace((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytvalidateNscCsy|j}Wn5tk
rGt|d}|j}WdQXnXy|j||SWn(tk
r}tjr}q|nXdS(s
        Execute the parse expression on the given file or filename.
        If a filename is specified (instead of a file object),
        the entire file is opened, read, and closed before parsing.
        trN(treadRtopenRRR"R(Rtfile_or_filenameRt
file_contentstfRL((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt	parseFileTs
	cCsdt|tr1||kp0t|t|kSt|trM|j|Stt||kSdS(N(RsR"tvarsRRtsuper(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__eq__hs
"
cCs||kS(N((RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__ne__pscCstt|S(N(thashtid(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__hash__sscCs
||kS(N((RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__req__vscCs||kS(N((RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__rne__yscCs:y!|jt|d|tSWntk
r5tSXdS(s
        Method for quick testing of a parser against a test string. Good for simple 
        inline microtests of sub expressions while building up larger parser.
           
        Parameters:
         - testString - to test against this expression for a match
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
            
        Example::
            expr = Word(nums)
            assert expr.matches("100")
        RN(RRRRR(Rt
testStringR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR|s


t#cCsyt|tr6tttj|jj}nt|trTt|}ng}g}t	}	x|D]}
|dk	r|j|
ts|r|
r|j
|
qmn|
sqmndj||
g}g}yQ|
jdd}
|j|
d|}|j
|jd||	o%|}	Wntk
r}
t|
trPdnd}d|
kr|j
t|
j|
|j
dt|
j|
dd	|n|j
d|
jd	||j
d
t|
|	o|}	|
}n<tk
r*}|j
dt||	o|}	|}nX|rX|rG|j
dndj|GHn|j
|
|fqmW|	|fS(
s3
        Execute the parse expression on a series of test strings, showing each
        test, the parsed results or where the parse failed. Quick and easy way to
        run a parse expression against a list of sample strings.
           
        Parameters:
         - tests - a list of separate test strings, or a multiline string of test strings
         - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests           
         - comment - (default=C{'#'}) - expression for indicating embedded comments in the test 
              string; pass None to disable comment filtering
         - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
              if False, only dump nested list
         - printResults - (default=C{True}) prints test output to stdout
         - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing

        Returns: a (success, results) tuple, where success indicates that all tests succeeded
        (or failed if C{failureTests} is True), and the results contain a list of lines of each 
        test's output
        
        Example::
            number_expr = pyparsing_common.number.copy()

            result = number_expr.runTests('''
                # unsigned integer
                100
                # negative integer
                -100
                # float with scientific notation
                6.02e23
                # integer with scientific notation
                1e-12
                ''')
            print("Success" if result[0] else "Failed!")

            result = number_expr.runTests('''
                # stray character
                100Z
                # missing leading digit before '.'
                -.100
                # too many '.'
                3.14.159
                ''', failureTests=True)
            print("Success" if result[0] else "Failed!")
        prints::
            # unsigned integer
            100
            [100]

            # negative integer
            -100
            [-100]

            # float with scientific notation
            6.02e23
            [6.02e+23]

            # integer with scientific notation
            1e-12
            [1e-12]

            Success
            
            # stray character
            100Z
               ^
            FAIL: Expected end of text (at char 3), (line:1, col:4)

            # missing leading digit before '.'
            -.100
            ^
            FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)

            # too many '.'
            3.14.159
                ^
            FAIL: Expected end of text (at char 4), (line:1, col:5)

            Success

        Each test string must be on a single line. If you want to test a string that spans multiple
        lines, create a test like this::

            expr.runTest(r"this is a test\n of strings that spans \n 3 lines")
        
        (Note that this is a raw string literal, you must include the leading 'r'.)
        s
s\nRR6s(FATAL)Rrt it^sFAIL: sFAIL-EXCEPTION: N(RsRRRRuRtrstript
splitlinesRRRRRRRRRR3RRRERR7Ra(RttestsRtcommenttfullDumptprintResultstfailureTestst
allResultstcommentstsuccessRpRtresultRRRL((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytrunTestssNW'
+
,	
N(PRRRRfRRtstaticmethodRhRjRRRRRRRzRRRRRRRRRRRRRRRRRRRRRRRRRt_MAX_INTRR{RRR
RRRRRRRRRRRRRRRRRRRRRRRRRR	RR
RRRRR"(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR"8s			&	
		
	
		H			"2G	+					D																	
)									cBseZdZdZRS(sT
    Abstract C{ParserElement} subclass, for defining atomic matching patterns.
    cCstt|jdtdS(NR(RR*RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s(RRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR*	scBseZdZdZRS(s,
    An empty token, will always match.
    cCs2tt|jd|_t|_t|_dS(NR(RRRRRRsRRx(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s		(RRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	scBs#eZdZdZedZRS(s(
    A token that will never match.
    cCs;tt|jd|_t|_t|_d|_dS(NRsUnmatchable token(	RRRRRRsRRxRy(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR*	s
			cCst|||j|dS(N(RRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR1	s(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR&	s	cBs#eZdZdZedZRS(s
    Token to exactly match a specified string.
    
    Example::
        Literal('blah').parseString('blah')  # -> ['blah']
        Literal('blah').parseString('blahfooblah')  # -> ['blah']
        Literal('blah').parseString('bla')  # -> Exception: Expected "blah"
    
    For case-insensitive matching, use L{CaselessLiteral}.
    
    For keyword matching (force word break before and after the matched string),
    use L{Keyword} or L{CaselessKeyword}.
    cCstt|j||_t||_y|d|_Wn0tk
rntj	dt
ddt|_nXdt
|j|_d|j|_t|_t|_dS(Nis2null string passed to Literal; use Empty() insteadRis"%s"s	Expected (RRRtmatchRtmatchLentfirstMatchCharRRRRRR^RRRyRRsRx(RtmatchString((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRC	s	
	

	cCsg|||jkrK|jdks7|j|j|rK||j|jfSt|||j|dS(Ni(R'R&t
startswithR%RRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRV	s$(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR5	s
	cBsKeZdZedZdedZedZ	dZ
edZRS(s\
    Token to exactly match a specified string as a keyword, that is, it must be
    immediately followed by a non-keyword character.  Compare with C{L{Literal}}:
     - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
     - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
    Accepts two optional constructor arguments in addition to the keyword string:
     - C{identChars} is a string of characters that would be valid identifier characters,
          defaulting to all alphanumerics + "_" and "$"
     - C{caseless} allows case-insensitive matching, default is C{False}.
       
    Example::
        Keyword("start").parseString("start")  # -> ['start']
        Keyword("start").parseString("starting")  # -> Exception

    For case-insensitive matching, use L{CaselessKeyword}.
    s_$cCstt|j|dkr+tj}n||_t||_y|d|_Wn't	k
r}t
jdtddnXd|j|_
d|j
|_t|_t|_||_|r|j|_|j}nt||_dS(Nis2null string passed to Keyword; use Empty() insteadRis"%s"s	Expected (RRRRtDEFAULT_KEYWORD_CHARSR%RR&R'RRRRRRyRRsRxtcaselesstuppert
caselessmatchRt
identChars(RR(R.R+((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRq	s&	
				cCsb|jr||||j!j|jkrF|t||jkse|||jj|jkrF|dks||dj|jkrF||j|jfSn|||jkrF|jdks|j|j|rF|t||jks|||j|jkrF|dks2||d|jkrF||j|jfSt	|||j
|dS(Nii(R+R&R,R-RR.R%R'R)RRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s	#9)$3#cCs%tt|j}tj|_|S(N(RRRR*R.(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	scCs
|t_dS(s,Overrides the default Keyword chars
        N(RR*(Rg((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytsetDefaultKeywordChars	sN(
RRRR1R*RRRRRRR#R/(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR^	s
	cBs#eZdZdZedZRS(sl
    Token to match a specified string, ignoring case of letters.
    Note: the matched results will always be in the case of the given
    match string, NOT the case of the input text.

    Example::
        OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
        
    (Contrast with example for L{CaselessKeyword}.)
    cCsItt|j|j||_d|j|_d|j|_dS(Ns'%s's	Expected (RRRR,treturnStringRRy(RR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s	cCsS||||j!j|jkr7||j|jfSt|||j|dS(N(R&R,R%R0RRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s#(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s
	cBs&eZdZddZedZRS(s
    Caseless version of L{Keyword}.

    Example::
        OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
        
    (Contrast with example for L{CaselessLiteral}.)
    cCs#tt|j||dtdS(NR+(RRRR(RR(R.((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	scCs||||j!j|jkrp|t||jks\|||jj|jkrp||j|jfSt|||j|dS(N(R&R,R-RR.R%RRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s#9N(RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	scBs&eZdZddZedZRS(sx
    A variation on L{Literal} which matches "close" matches, that is, 
    strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
     - C{match_string} - string to be matched
     - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
    
    The results from a successful parse will contain the matched text from the input string and the following named results:
     - C{mismatches} - a list of the positions within the match_string where mismatches were found
     - C{original} - the original match_string used to compare against the input string
    
    If C{mismatches} is an empty list, then the match was an exact match.
    
    Example::
        patt = CloseMatch("ATCATCGAATGGA")
        patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
        patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)

        # exact match
        patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})

        # close match allowing up to 2 mismatches
        patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
        patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
    icCs]tt|j||_||_||_d|j|jf|_t|_t|_	dS(Ns&Expected %r (with up to %d mismatches)(
RRjRRtmatch_stringt
maxMismatchesRyRRxRs(RR1R2((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s				cCs|}t|}|t|j}||kr|j}d}g}	|j}
xtt|||!|jD]J\}}|\}}
||
kro|	j|t|	|
krPqqoqoW|d}t|||!g}|j|d<|	|d<||fSnt|||j|dS(Niitoriginalt
mismatches(	RR1R2RRRR RRy(RRERRtstartRtmaxlocR1tmatch_stringlocR4R2ts_mtsrctmattresults((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR	s(		,




(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRj	s	cBs>eZdZddddeddZedZdZRS(s	
    Token for matching words composed of allowed character sets.
    Defined with string containing all allowed initial characters,
    an optional string containing allowed body characters (if omitted,
    defaults to the initial character set), and an optional minimum,
    maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction. An optional
    C{excludeChars} parameter can list characters that might be found in 
    the input C{bodyChars} string; useful to define a word of all printables
    except for one or two characters, for instance.
    
    L{srange} is useful for defining custom character set strings for defining 
    C{Word} expressions, using range notation from regular expression character sets.
    
    A common mistake is to use C{Word} to match a specific literal string, as in 
    C{Word("Address")}. Remember that C{Word} uses the string argument to define
    I{sets} of matchable characters. This expression would match "Add", "AAA",
    "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
    To match an exact literal string, use L{Literal} or L{Keyword}.

    pyparsing includes helper strings for building Words:
     - L{alphas}
     - L{nums}
     - L{alphanums}
     - L{hexnums}
     - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
     - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
     - L{printables} (any non-whitespace character)

    Example::
        # a word composed of digits
        integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
        
        # a word with a leading capital, and zero or more lowercase
        capital_word = Word(alphas.upper(), alphas.lower())

        # hostnames are alphanumeric, with leading alpha, and '-'
        hostname = Word(alphas, alphanums+'-')
        
        # roman numeral (not a strict parser, accepts invalid mix of characters)
        roman = Word("IVXLCDM")
        
        # any string of non-whitespace characters, except for ','
        csv_value = Word(printables, excludeChars=",")
    iicstt|jrcdjfd|D}|rcdjfd|D}qcn||_t||_|r||_t||_n||_t||_|dk|_	|dkrt
dn||_|dkr||_n	t
|_|dkr)||_||_nt||_d|j|_t|_||_d|j|jkr}|dkr}|dkr}|dkr}|j|jkrd	t|j|_net|jdkrd
tj|jt|jf|_n%dt|jt|jf|_|jrDd|jd|_nytj|j|_Wq}tk
ryd|_q}XndS(
NRrc3s!|]}|kr|VqdS(N((RR(texcludeChars(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	7
sc3s!|]}|kr|VqdS(N((RR(R<(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	9
siisZcannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitteds	Expected Rs[%s]+s%s[%s]*s	[%s][%s]*s\b(RR-RRt
initCharsOrigRt	initCharst
bodyCharsOrigt	bodyCharstmaxSpecifiedRtminLentmaxLenR$RRRyRRxt	asKeywordt_escapeRegexRangeCharstreStringRR|tescapetcompileRaR(RR>R@tmintmaxtexactRDR<((R<s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR4
sT%								:	
c
Cs|jr[|jj||}|s?t|||j|n|j}||jfS|||jkrt|||j|n|}|d7}t|}|j}||j	}t
||}x*||kr|||kr|d7}qWt}	|||jkrt
}	n|jrG||krG|||krGt
}	n|jr|dkrp||d|ks||kr|||krt
}	qn|	rt|||j|n||||!fS(Nii(R|R%RRytendtgroupR>RR@RCRIRRBRRARD(
RRERRR!R5Rt	bodycharsR6tthrowException((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRj
s6	
	
	%		<cCsytt|jSWntk
r*nX|jdkrd}|j|jkr}d||j||jf|_qd||j|_n|jS(NcSs&t|dkr|d dS|SdS(Nis...(R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt
charsAsStr
ss	W:(%s,%s)sW:(%s)(RR-RRaRmRR=R?(RRP((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s
	(N(	RRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR-
s.6#cBsDeZdZeejdZddZedZ	dZ
RS(s
    Token for matching strings that match a given regular expression.
    Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
    If the given regex contains named groups (defined using C{(?P...)}), these will be preserved as 
    named parse results.

    Example::
        realnum = Regex(r"[+-]?\d+\.\d*")
        date = Regex(r'(?P\d{4})-(?P\d\d?)-(?P\d\d?)')
        # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
        roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
    s[A-Z]icCs3tt|jt|tr|sAtjdtddn||_||_	y+t
j|j|j	|_
|j|_Wqt
jk
rtjd|tddqXnIt|tjr||_
t||_|_||_	ntdt||_d|j|_t|_t|_dS(sThe parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.s0null string passed to Regex; use Empty() insteadRis$invalid pattern (%s) passed to RegexsCRegex may only be constructed with a string or a compiled RE objects	Expected N(RR%RRsRRRRtpatterntflagsR|RHRFt
sre_constantsterrortcompiledREtypeRuRRRRyRRxRRs(RRQRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s.			


		cCs|jj||}|s6t|||j|n|j}|j}t|j}|rx|D]}||||eZdZddeededZedZdZRS(s
    Token for matching strings that are delimited by quoting characters.
    
    Defined with the following parameters:
        - quoteChar - string of one or more characters defining the quote delimiting string
        - escChar - character to escape quotes, typically backslash (default=C{None})
        - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
        - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
        - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
        - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
        - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})

    Example::
        qs = QuotedString('"')
        print(qs.searchString('lsjdf "This is the quote" sldjf'))
        complex_qs = QuotedString('{{', endQuoteChar='}}')
        print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
        sql_qs = QuotedString('"', escQuote='""')
        print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
    prints::
        [['This is the quote']]
        [['This is the "quote"']]
        [['This is the quote with "embedded" quotes']]
    c	sttj|j}|sGtjdtddtn|dkr\|}n4|j}|stjdtddtn|_	t
|_|d_|_
t
|_|_|_|_|_|rTtjtjB_dtjj	tj
d|dk	rDt|pGdf_nPd_dtjj	tj
d|dk	rt|pdf_t
j
d	krjd
djfdtt
j
d	dd
Dd7_n|r*jdtj|7_n|rhjdtj|7_tjjd_njdtjj
7_y+tjjj_j_Wn4tj k
rtjdjtddnXt!_"dj"_#t$_%t&_'dS(Ns$quoteChar cannot be the empty stringRis'endQuoteChar cannot be the empty stringis%s(?:[^%s%s]Rrs%s(?:[^%s\n\r%s]is|(?:s)|(?:c3s<|]2}dtjj| tj|fVqdS(s%s[^%s]N(R|RGtendQuoteCharRE(RR(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	/sit)s|(?:%s)s|(?:%s.)s(.)s)*%ss$invalid pattern (%s) passed to Regexs	Expected ((RR#RRRRRtSyntaxErrorRt	quoteCharRtquoteCharLentfirstQuoteCharRXtendQuoteCharLentescChartescQuotetunquoteResultstconvertWhitespaceEscapesR|t	MULTILINEtDOTALLRRRGRERQRRtescCharReplacePatternRHRFRSRTRRRyRRxRRs(RR[R_R`t	multilineRaRXRb((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRsf		
					(	%E
	c	CsT|||jkr(|jj||p+d}|sOt|||j|n|j}|j}|jrJ||j	|j
!}t|trJd|kr|j
ridd6dd6dd6dd	6}x/|jD]\}}|j||}qWn|jr tj|jd
|}n|jrG|j|j|j}qGqJn||fS(Ns\s	s\ts
s\nss\fs
s\rs\g<1>(R]R|R%RRRyRLRMRaR\R^RsRRbRRR_RReR`RX(	RRERRR!R}tws_maptwslittwschar((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRGs*.	
		!cCs]ytt|jSWntk
r*nX|jdkrVd|j|jf|_n|jS(Ns.quoted string, starting with %s ending with %s(RR#RRaRmRR[RX(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRjs
N(	RRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR#
sA#cBs5eZdZddddZedZdZRS(s
    Token for matching words composed of characters I{not} in a given set (will
    include whitespace in matched characters if not listed in the provided exclusion set - see example).
    Defined with string containing all disallowed characters, and an optional
    minimum, maximum, and/or exact length.  The default value for C{min} is 1 (a
    minimum value < 1 is not valid); the default values for C{max} and C{exact}
    are 0, meaning no maximum or exact length restriction.

    Example::
        # define a comma-separated-value as anything that is not a ','
        csv_value = CharsNotIn(',')
        print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
    prints::
        ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
    iicCstt|jt|_||_|dkr@tdn||_|dkra||_n	t	|_|dkr||_||_nt
||_d|j|_|jdk|_
t|_dS(Nisfcannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permittedis	Expected (RRRRRptnotCharsRRBRCR$RRRyRsRx(RRjRIRJRK((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs 					cCs|||jkr.t|||j|n|}|d7}|j}t||jt|}x*||kr|||kr|d7}qfW|||jkrt|||j|n||||!fS(Ni(RjRRyRIRCRRB(RRERRR5tnotcharstmaxlen((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
	cCsytt|jSWntk
r*nX|jdkryt|jdkrfd|jd |_qyd|j|_n|jS(Nis
!W:(%s...)s!W:(%s)(RRRRaRmRRRj(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
(RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRvscBsXeZdZidd6dd6dd6dd6d	d
6Zddd
d
dZedZRS(s
    Special matching class for matching whitespace.  Normally, whitespace is ignored
    by pyparsing grammars.  This class is included when some whitespace structures
    are significant.  Define with a string containing the whitespace characters to be
    matched; default is C{" \t\r\n"}.  Also takes optional C{min}, C{max}, and C{exact} arguments,
    as defined for the C{L{Word}} class.
    sRss	ss
ss
sss 	
iicsttj|_jdjfdjDdjdjD_t_	dj_
|_|dkr|_n	t
_|dkr|_|_ndS(NRrc3s$|]}|jkr|VqdS(N(t
matchWhite(RR(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	scss|]}tj|VqdS(N(R,t	whiteStrs(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	ss	Expected i(RR,RRmRRRqRRRsRyRBRCR$(RtwsRIRJRK((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	)				cCs|||jkr.t|||j|n|}|d7}||j}t|t|}x-||kr|||jkr|d7}qcW|||jkrt|||j|n||||!fS(Ni(RmRRyRCRIRRB(RRERRR5R6((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs

"(RRRRnRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR,s
t_PositionTokencBseZdZRS(cCs8tt|j|jj|_t|_t|_	dS(N(
RRpRR^RRRRsRRx(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	(RRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRpscBs,eZdZdZdZedZRS(sb
    Token to advance to a specific column of input text; useful for tabular report scraping.
    cCs tt|j||_dS(N(RRRR7(Rtcolno((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCst|||jkrt|}|jrB|j||}nxE||kr||jrt|||jkr|d7}qEWn|S(Ni(R7RRuRtisspace(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	7cCs^t||}||jkr6t||d|n||j|}|||!}||fS(NsText not in expected column(R7R(RRERRtthiscoltnewlocR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
(RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs			cBs#eZdZdZedZRS(s
    Matches if current position is at the beginning of a line within the parse string
    
    Example::
    
        test = '''        AAA this line
        AAA and this line
          AAA but not this one
        B AAA and definitely not this one
        '''

        for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
            print(t)
    
    Prints::
        ['AAA', ' this line']
        ['AAA', ' and this line']    

    cCs tt|jd|_dS(NsExpected start of line(RRRRy(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR&scCs;t||dkr|gfSt|||j|dS(Ni(R7RRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR*s
(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	cBs#eZdZdZedZRS(sU
    Matches if current position is at the end of a line within the parse string
    cCs<tt|j|jtjjddd|_dS(Ns
RrsExpected end of line(RRRRR"RfRRy(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR3scCs|t|krK||dkr0|ddfSt|||j|n8|t|krk|dgfSt|||j|dS(Ns
i(RRRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR8s(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR/s	cBs#eZdZdZedZRS(sM
    Matches if current position is at the beginning of the parse string
    cCs tt|jd|_dS(NsExpected start of text(RR(RRy(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRGscCsL|dkrB||j|dkrBt|||j|qBn|gfS(Ni(RRRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRKs(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR(Cs	cBs#eZdZdZedZRS(sG
    Matches if current position is at the end of the parse string
    cCs tt|jd|_dS(NsExpected end of text(RR'RRy(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRVscCs|t|kr-t|||j|nT|t|krM|dgfS|t|kri|gfSt|||j|dS(Ni(RRRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRZs
(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR'Rs	cBs&eZdZedZedZRS(sp
    Matches if the current position is at the beginning of a Word, and
    is not preceded by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{} behavior of regular expressions,
    use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
    the string being parsed, or at the beginning of a line.
    cCs/tt|jt||_d|_dS(NsNot at the start of a word(RR/RRt	wordCharsRy(RRu((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRlscCs^|dkrT||d|jks6|||jkrTt|||j|qTn|gfS(Nii(RuRRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqs
(RRRRTRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR/dscBs&eZdZedZedZRS(sZ
    Matches if the current position is at the end of a Word, and
    is not followed by any character in a given set of C{wordChars}
    (default=C{printables}). To emulate the C{} behavior of regular expressions,
    use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
    the string being parsed, or at the end of a line.
    cCs8tt|jt||_t|_d|_dS(NsNot at the end of a word(RR.RRRuRRpRy(RRu((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	cCsvt|}|dkrl||krl|||jksN||d|jkrlt|||j|qln|gfS(Nii(RRuRRy(RRERRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs(RRRRTRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR.xscBsqeZdZedZdZdZdZdZdZ	dZ
edZgd	Zd
Z
RS(s^
    Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
    cCstt|j|t|tr4t|}nt|tr[tj|g|_	nt|t
jrt|}td|Drt
tj|}nt||_	n3yt||_	Wntk
r|g|_	nXt|_dS(Ncss|]}t|tVqdS(N(RsR(RRF((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s(RRRRsRRRR"RitexprsRtIterabletallRRRR}(RRvR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
cCs|j|S(N(Rv(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs|jj|d|_|S(N(RvRRRm(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	cCsPt|_g|jD]}|j^q|_x|jD]}|jq8W|S(s~Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
           all contained expressions.(RRpRvRR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
	%cCst|trb||jkrtt|j|x(|jD]}|j|jdq>Wqn>tt|j|x%|jD]}|j|jdqW|S(Ni(RsR)RuRRRRv(RR	R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsfytt|jSWntk
r*nX|jdkr_d|jjt|j	f|_n|jS(Ns%s:(%s)(
RRRRaRmRR^RRRv(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
%cCswtt|jx|jD]}|jqWt|jdkr`|jd}t||jr|jr|jdkr|j
r|j|jdg|_d|_|j|jO_|j
|j
O_
n|jd}t||jr`|jr`|jdkr`|j
r`|jd |j|_d|_|j|jO_|j
|j
O_
q`ndt||_|S(Niiiis	Expected (RRRRvRRsR^RkRnRRvRmRsRxRRy(RRR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs0


	


	cCstt|j||}|S(N(RRR(RRRR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs@||g}x|jD]}|j|qW|jgdS(N(RvRR(RRttmpR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs>tt|j}g|jD]}|j^q|_|S(N(RRRRv(RR}R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs%(RRRRRRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs						
	"cBsWeZdZdefdYZedZedZdZdZ	dZ
RS(s

    Requires all given C{ParseExpression}s to be found in the given order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'+'} operator.
    May also be constructed using the C{'-'} operator, which will suppress backtracking.

    Example::
        integer = Word(nums)
        name_expr = OneOrMore(Word(alphas))

        expr = And([integer("id"),name_expr("name"),integer("age")])
        # more easily written as:
        expr = integer("id") + name_expr("name") + integer("age")
    RcBseZdZRS(cOs3ttj|j||d|_|jdS(Nt-(RRRRRR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s	(RRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
scCsltt|j||td|jD|_|j|jdj|jdj|_t	|_
dS(Ncss|]}|jVqdS(N(Rs(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	
si(RRRRxRvRsRRqRpRR}(RRvR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s
c	Cs?|jdj|||dt\}}t}x|jdD]}t|tjr`t}q<n|ry|j|||\}}Wqtk
rqtk
r}d|_
tj|qtk
rt|t
||j|qXn|j|||\}}|s$|jr<||7}q<q<W||fS(NiRi(RvRRRsRRRR!RRt
__traceback__RRRRyR(	RRERRt
resultlistt	errorStopRt
exprtokensR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s((
	
%cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR5
scCs@||g}x+|jD] }|j||jsPqqWdS(N(RvRRs(RRtsubRecCheckListR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR:
s

	cCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRt{Rcss|]}t|VqdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	F
st}(RRRmRRRv(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRA
s
*(RRRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs		cBsAeZdZedZedZdZdZdZ	RS(s
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the expression that matches the longest string will be used.
    May be constructed using the C{'^'} operator.

    Example::
        # construct Or using '^' operator
        
        number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789"))
    prints::
        [['123'], ['3.1416'], ['789']]
    cCsNtt|j|||jrAtd|jD|_n	t|_dS(Ncss|]}|jVqdS(N(Rs(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	\
s(RRRRvR4RsR(RRvR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRY
s	cCsd}d}g}x|jD]}y|j||}Wntk
rw}	d|	_|	j|kr|	}|	j}qqtk
rt||krt|t||j|}t|}qqX|j	||fqW|rh|j
ddxn|D]c\}
}y|j|||SWqtk
r`}	d|	_|	j|kra|	}|	j}qaqXqWn|dk	r|j|_|nt||d|dS(NiRcSs	|dS(Ni((tx((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqu
Rrs no defined alternatives to match(
RRvRRR{RRRRyRtsortRR(RRERRt	maxExcLoctmaxExceptionRRtloc2Rt_((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR`
s<	
		cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__ixor__
scCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs ^ css|]}t|VqdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	
sR(RRRmRRRv(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s
*cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s(
RRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRK
s
&			cBsAeZdZedZedZdZdZdZ	RS(s
    Requires that at least one C{ParseExpression} is found.
    If two expressions match, the first one listed is the one that will match.
    May be constructed using the C{'|'} operator.

    Example::
        # construct MatchFirst using '|' operator
        
        # watch the order of expressions to match
        number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
        print(number.searchString("123 3.1416 789")) #  Fail! -> [['123'], ['3'], ['1416'], ['789']]

        # put more selective expression first
        number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
        print(number.searchString("123 3.1416 789")) #  Better -> [['123'], ['3.1416'], ['789']]
    cCsNtt|j|||jrAtd|jD|_n	t|_dS(Ncss|]}|jVqdS(N(Rs(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	
s(RRRRvR4RsR(RRvR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s	c	Csd}d}x|jD]}y|j|||}|SWqtk
ro}|j|kr|}|j}qqtk
rt||krt|t||j|}t|}qqXqW|dk	r|j|_|nt||d|dS(Nis no defined alternatives to match(	RRvRRRRRRyR(	RRERRRRRR}R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s$
	cCs.t|tr!tj|}n|j|S(N(RsRR"RiR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt__ior__
scCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs | css|]}t|VqdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	
sR(RRRmRRRv(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s
*cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s(
RRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR
s			cBs8eZdZedZedZdZdZRS(sm
    Requires all given C{ParseExpression}s to be found, but in any order.
    Expressions may be separated by whitespace.
    May be constructed using the C{'&'} operator.

    Example::
        color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
        shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
        integer = Word(nums)
        shape_attr = "shape:" + shape_type("shape")
        posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
        color_attr = "color:" + color("color")
        size_attr = "size:" + integer("size")

        # use Each (using operator '&') to accept attributes in any order 
        # (shape and posn are required, color and size are optional)
        shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)

        shape_spec.runTests('''
            shape: SQUARE color: BLACK posn: 100, 120
            shape: CIRCLE size: 50 color: BLUE posn: 50,80
            color:GREEN size:20 shape:TRIANGLE posn:20,40
            '''
            )
    prints::
        shape: SQUARE color: BLACK posn: 100, 120
        ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
        - color: BLACK
        - posn: ['100', ',', '120']
          - x: 100
          - y: 120
        - shape: SQUARE


        shape: CIRCLE size: 50 color: BLUE posn: 50,80
        ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
        - color: BLUE
        - posn: ['50', ',', '80']
          - x: 50
          - y: 80
        - shape: CIRCLE
        - size: 50


        color: GREEN size: 20 shape: TRIANGLE posn: 20,40
        ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
        - color: GREEN
        - posn: ['20', ',', '40']
          - x: 20
          - y: 40
        - shape: TRIANGLE
        - size: 20
    cCsKtt|j||td|jD|_t|_t|_dS(Ncss|]}|jVqdS(N(Rs(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s(	RR
RRxRvRsRRptinitExprGroups(RRvR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	cCs4|jrLtd|jD|_g|jD]}t|tr/|j^q/}g|jD]%}|jr]t|tr]|^q]}|||_g|jD]}t|t	r|j^q|_
g|jD]}t|tr|j^q|_g|jD]$}t|tt	tfs|^q|_
|j
|j7_
t|_n|}|j
}|j}	g}
t}x|r_||	|j
|j}g}
x|D]}y|j||}Wntk
r|
j|qX|
j|jjt||||kr|j|q||	kr|	j|qqWt|
t|krut}ququW|rdjd|D}t||d|n|
g|jD]*}t|tr|j|	kr|^q7}
g}x6|
D].}|j|||\}}|j|qWt|tg}||fS(Ncss3|])}t|trt|j|fVqdS(N(RsRRRF(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	ss, css|]}t|VqdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	=ss*Missing one or more required elements (%s)(RRRvtopt1mapRsRRFRst	optionalsR0tmultioptionalsRt
multirequiredtrequiredRRRRRRRtremoveRRRtsumR (RRERRRtopt1topt2ttmpLocttmpReqdttmpOptt
matchOrdertkeepMatchingttmpExprstfailedtmissingR|R;tfinalResults((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRsP	.5
117

	

"
>
cCsVt|dr|jS|jdkrOddjd|jDd|_n|jS(NRRs & css|]}t|VqdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	PsR(RRRmRRRv(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRKs
*cCs3||g}x|jD]}|j|qWdS(N(RvR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRTs(RRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR

s
51		cBs_eZdZedZedZdZdZdZ	dZ
gdZdZRS(	sa
    Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
    cCstt|j|t|trattjtrItj|}qatjt	|}n||_
d|_|dk	r|j
|_
|j|_|j|j|j|_|j|_|j|_|jj|jndS(N(RRRRsRt
issubclassR"RiR*RRFRRmRxRsRRqRpRoR}RuR(RRFR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR^s		cCsG|jdk	r+|jj|||dtStd||j|dS(NRRr(RFRRRRRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRpscCs>t|_|jj|_|jdk	r:|jjn|S(N(RRpRFRRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRvs
	cCst|trc||jkrtt|j||jdk	r`|jj|jdq`qn?tt|j||jdk	r|jj|jdn|S(Ni(RsR)RuRRRRFR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR}s cCs6tt|j|jdk	r2|jjn|S(N(RRRRFR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsV||kr"t||gn||g}|jdk	rR|jj|ndS(N(R$RFRR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
cCsA||g}|jdk	r0|jj|n|jgdS(N(RFRRR(RRRy((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsuytt|jSWntk
r*nX|jdkrn|jdk	rnd|jjt	|jf|_n|jS(Ns%s:(%s)(
RRRRaRmRRFR^RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
%(
RRRRRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRZs				cBs#eZdZdZedZRS(s
    Lookahead matching of the given parse expression.  C{FollowedBy}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression matches at the current
    position.  C{FollowedBy} always returns a null token list.

    Example::
        # use FollowedBy to match a label only if it is followed by a ':'
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
    prints::
        [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
    cCs#tt|j|t|_dS(N(RRRRRs(RRF((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs|jj|||gfS(N(RFR(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	cBs,eZdZdZedZdZRS(s
    Lookahead to disallow matching with the given parse expression.  C{NotAny}
    does I{not} advance the parsing position within the input string, it only
    verifies that the specified parse expression does I{not} match at the current
    position.  Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
    always returns a null token list.  May be constructed using the '~' operator.

    Example::
        
    cCsBtt|j|t|_t|_dt|j|_	dS(NsFound unwanted token, (
RRRRRpRRsRRFRy(RRF((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs		cCs:|jj||r0t|||j|n|gfS(N(RFRRRy(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRs~{R(RRRmRRRF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
(RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
	t_MultipleMatchcBs eZddZedZRS(cCsftt|j|t|_|}t|trFtj|}n|dk	rY|nd|_
dS(N(RRRRRoRsRR"RiRt	not_ender(RRFtstopOntender((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs	cCs|jj}|j}|jdk	}|r9|jj}n|rO|||n||||dt\}}y|j}	xo|r|||n|	r|||}
n|}
|||
|\}}|s|jr~||7}q~q~WWnt	t
fk
rnX||fS(NR(RFRRRRRRRuRRR(RRERRtself_expr_parsetself_skip_ignorablestcheck_endert
try_not_enderRthasIgnoreExprsRt	tmptokens((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs,	N(RRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscBseZdZdZRS(s
    Repetition of one or more of the given expression.
    
    Parameters:
     - expr - expression that must match one or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: BLACK"
        OneOrMore(attr_expr).parseString(text).pprint()  # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]

        # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
        
        # could also be written as
        (attr_expr * (1,)).parseString(text).pprint()
    cCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRRs}...(RRRmRRRF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR!s
(RRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscBs/eZdZddZedZdZRS(sw
    Optional repetition of zero or more of the given expression.
    
    Parameters:
     - expr - expression that must match zero or more times
     - stopOn - (default=C{None}) - expression for a terminating sentinel
          (only required if the sentinel would ordinarily match the repetition 
          expression)          

    Example: similar to L{OneOrMore}
    cCs)tt|j|d|t|_dS(NR(RR0RRRs(RRFR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR6scCsEy tt|j|||SWnttfk
r@|gfSXdS(N(RR0RRR(RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR:s cCsIt|dr|jS|jdkrBdt|jd|_n|jS(NRRs]...(RRRmRRRF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR@s
N(RRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR0*st
_NullTokencBs eZdZeZdZRS(cCstS(N(R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRJscCsdS(NRr((R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRMs(RRRR>R(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRIs	cBs/eZdZedZedZdZRS(sa
    Optional matching of the given expression.

    Parameters:
     - expr - expression that must match zero or more times
     - default (optional) - value to be returned if the optional expression is not found.

    Example::
        # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
        zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
        zip.runTests('''
            # traditional ZIP code
            12345
            
            # ZIP+4 form
            12101-0001
            
            # invalid ZIP
            98765-
            ''')
    prints::
        # traditional ZIP code
        12345
        ['12345']

        # ZIP+4 form
        12101-0001
        ['12101-0001']

        # invalid ZIP
        98765-
             ^
        FAIL: Expected end of text (at char 5), (line:1, col:6)
    cCsAtt|j|dt|jj|_||_t|_dS(NR(	RRRRRFRoRRRs(RRFR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRts	cCsy(|jj|||dt\}}Wnottfk
r|jtk	r|jjrt|jg}|j||jj ['3', '.', '1416']
        # will also erroneously match the following
        print(real.parseString('3. 1416')) # -> ['3', '.', '1416']

        real = Combine(Word(nums) + '.' + Word(nums))
        print(real.parseString('3.1416')) # -> ['3.1416']
        # no match when there are internal spaces
        print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
    RrcCsQtt|j||r)|jn||_t|_||_t|_dS(N(	RRRRtadjacentRRpt
joinStringR}(RRFRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRrs
			cCs6|jrtj||ntt|j||S(N(RR"RRR(RR	((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR|s	cCse|j}|2|tdj|j|jgd|j7}|jr]|jr]|gS|SdS(NRrR(RR RRRRzRnR(RRERRtretToks((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs1(RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRas
	cBs eZdZdZdZRS(s
    Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.

    Example::
        ident = Word(alphas)
        num = Word(nums)
        term = ident | num
        func = ident + Optional(delimitedList(term))
        print(func.parseString("fn a,b,100"))  # -> ['fn', 'a', 'b', '100']

        func = ident + Group(Optional(delimitedList(term)))
        print(func.parseString("fn a,b,100"))  # -> ['fn', ['a', 'b', '100']]
    cCs#tt|j|t|_dS(N(RRRRRo(RRF((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCs|gS(N((RRERR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs(RRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRs
	cBs eZdZdZdZRS(sW
    Converter to return a repetitive expression as a list, but also as a dictionary.
    Each element can also be referenced using the first token in the expression as its key.
    Useful for tabular report scraping when the first column can be used as a item key.

    Example::
        data_word = Word(alphas)
        label = data_word + FollowedBy(':')
        attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))

        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        
        # print attributes as plain groups
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
        result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
        print(result.dump())
        
        # access named fields as dict entries, or output as dict
        print(result['shape'])        
        print(result.asDict())
    prints::
        ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']

        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
    See more examples at L{ParseResults} of accessing fields by results name.
    cCs#tt|j|t|_dS(N(RR	RRRo(RRF((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRscCsTx9t|D]+\}}t|dkr1q
n|d}t|trct|dj}nt|dkrtd|||nX|S(ss
    Decorator for debugging parse actions. 
    
    When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
    When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.

    Example::
        wd = Word(alphas)

        @traceParseAction
        def remove_duplicate_chars(tokens):
            return ''.join(sorted(set(''.join(tokens)))

        wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
        print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
    prints::
        >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
        <>entering %s(line: '%s', %d, %r)
s< ['aa', 'bb', 'cc']
        delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
    s [Rs]...N(RRR0RR)(RFtdelimtcombinetdlName((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR>9s
,!cstfd}|dkrBttjd}n|j}|jd|j|dt|jdt	dS(s:
    Helper to define a counted list of expressions.
    This helper defines a pattern of the form::
        integer expr expr expr...
    where the leading integer tells how many expr expressions follow.
    The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
    
    If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.

    Example::
        countedArray(Word(alphas)).parseString('2 ab cd ef')  # -> ['ab', 'cd']

        # in this parser, the leading integer value is given in binary,
        # '10' indicating that 2 values are in the array
        binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
        countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef')  # -> ['ab', 'cd']
    cs;|d}|r,ttg|p5tt>gS(Ni(RRRA(RRNRpR(t	arrayExprRF(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcountFieldParseAction_s
-cSst|dS(Ni(Ro(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqdRrtarrayLenR~s(len) s...N(
R
RR-RPRzRRRRR(RFtintExprR((RRFs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR:Ls	
cCsMg}x@|D]8}t|tr8|jt|q
|j|q
W|S(N(RsRRRR(tLR}R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRks
csFtfd}|j|dtjdt|S(s*
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousLiteral(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches a
    previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
    If this is not desired, use C{matchPreviousExpr}.
    Do I{not} use with packrat parsing enabled.
    csc|rTt|dkr'|d>q_t|j}td|D>nt>dS(Niicss|]}t|VqdS(N(R(Rttt((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s(RRRRR(RRNRpttflat(trep(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcopyTokenToRepeatersR~s(prev) (R
RRRR(RFR((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRMts

	
cs\t|j}|Kfd}|j|dtjdt|S(sS
    Helper to define an expression that is indirectly defined from
    the tokens matched in a previous expression, that is, it looks
    for a 'repeat' of a previous expression.  For example::
        first = Word(nums)
        second = matchPreviousExpr(first)
        matchExpr = first + ":" + second
    will match C{"1:1"}, but not C{"1:2"}.  Because this matches by
    expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
    the expressions are evaluated first, and then compared, so
    C{"1"} is compared with C{"10"}.
    Do I{not} use with packrat parsing enabled.
    cs8t|jfd}j|dtdS(Ncs7t|j}|kr3tdddndS(NRri(RRR(RRNRpttheseTokens(tmatchTokens(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytmustMatchTheseTokenssR~(RRRzR(RRNRpR(R(Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRsR~s(prev) (R
RRRRR(RFte2R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRLs	
cCsUx$dD]}|j|t|}qW|jdd}|jdd}t|S(Ns\^-]s
s\ns	s\t(Rt_bslashR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyREs

c
sD|r!d}d}tnd}d}tg}t|tr]|j}n7t|tjr~t|}ntj	dt
dd|stSd}x|t|d	krV||}xt
||d	D]f\}}	||	|r
|||d	=Pq|||	r|||d	=|j||	|	}PqqW|d	7}qW|r|ryt|td
j|krtdd
jd|Djd
j|Stdjd|Djd
j|SWqtk
rtj	dt
ddqXntfd|Djd
j|S(s
    Helper to quickly define a set of alternative Literals, and makes sure to do
    longest-first testing when there is a conflict, regardless of the input order,
    but returns a C{L{MatchFirst}} for best performance.

    Parameters:
     - strs - a string of space-delimited literals, or a collection of string literals
     - caseless - (default=C{False}) - treat all literals as caseless
     - useRegex - (default=C{True}) - as an optimization, will generate a Regex
          object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
          if creating a C{Regex} raises an exception)

    Example::
        comp_oper = oneOf("< = > <= >= !=")
        var = Word(alphas)
        number = Word(nums)
        term = var | number
        comparison_expr = term + comp_oper + term
        print(comparison_expr.searchString("B = 12  AA=23 B<=AA AA>12"))
    prints::
        [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
    cSs|j|jkS(N(R,(Rtb((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrcSs|jj|jS(N(R,R)(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrcSs
||kS(N((RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrcSs
|j|S(N(R)(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrs6Invalid argument to oneOf, expected string or iterableRiiiRrs[%s]css|]}t|VqdS(N(RE(Rtsym((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	ss | t|css|]}tj|VqdS(N(R|RG(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	ss7Exception creating Regex for oneOf, building MatchFirstc3s|]}|VqdS(N((RR(tparseElementClass(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	s(RRRsRRRRwRRRRRRRRRR%RRaR(
tstrsR+tuseRegextisequaltmaskstsymbolsRtcurRR	((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRQsL						

!
!33
	cCsttt||S(s
    Helper to easily and clearly define a dictionary by specifying the respective patterns
    for the key and value.  Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
    in the proper order.  The key pattern can include delimiting markers or punctuation,
    as long as they are suppressed, thereby leaving the significant key text.  The value
    pattern can include named results, so that the C{Dict} results can include named token
    fields.

    Example::
        text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
        attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
        print(OneOrMore(attr_expr).parseString(text).dump())
        
        attr_label = label
        attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)

        # similar to Dict, but simpler call format
        result = dictOf(attr_label, attr_value).parseString(text)
        print(result.dump())
        print(result['shape'])
        print(result.shape)  # object attribute access works too
        print(result.asDict())
    prints::
        [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
        - color: light blue
        - posn: upper left
        - shape: SQUARE
        - texture: burlap
        SQUARE
        SQUARE
        {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
    (R	R0R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR?s!cCs|tjd}|j}t|_|d||d}|rVd}n	d}|j||j|_|S(s
    Helper to return the original, untokenized text for a given expression.  Useful to
    restore the parsed fields of an HTML start tag into the raw tag text itself, or to
    revert separate tokens with intervening whitespace back to the original matching
    input text. By default, returns astring containing the original parsed text.  
       
    If the optional C{asString} argument is passed as C{False}, then the return value is a 
    C{L{ParseResults}} containing any results names that were originally matched, and a 
    single token containing the original matched text from the input string.  So if 
    the expression passed to C{L{originalTextFor}} contains expressions with defined
    results names, you must set C{asString} to C{False} if you want to preserve those
    results name values.

    Example::
        src = "this is test  bold text  normal text "
        for tag in ("b","i"):
            opener,closer = makeHTMLTags(tag)
            patt = originalTextFor(opener + SkipTo(closer) + closer)
            print(patt.searchString(src)[0])
    prints::
        [' bold text ']
        ['text']
    cSs|S(N((RRRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRq8Rrt_original_startt
_original_endcSs||j|j!S(N(RR(RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRq=RrcSs'||jd|jd!g|(dS(NRR(R(RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytextractText?s(RRzRRR}Ru(RFtasStringt	locMarkertendlocMarkert	matchExprR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRe s		
cCst|jdS(sp
    Helper to undo pyparsing's default grouping of And expressions, even
    if all but one are non-empty.
    cSs|dS(Ni((Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqJRr(R+Rz(RF((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRfEscCsEtjd}t|d|d|jjdS(s
    Helper to decorate a returned token with its starting and ending locations in the input string.
    This helper adds the following results names:
     - locn_start = location where matched expression begins
     - locn_end = location where matched expression ends
     - value = the actual parsed results

    Be careful if the input text contains C{} characters, you may want to call
    C{L{ParserElement.parseWithTabs}}

    Example::
        wd = Word(alphas)
        for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
            print(match)
    prints::
        [[0, 'ljsdf', 5]]
        [[8, 'lksdjjf', 15]]
        [[18, 'lkkjj', 23]]
    cSs|S(N((RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRq`Rrt
locn_startRtlocn_end(RRzRRR(RFtlocator((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRhLss\[]-*.$+^?()~ RKcCs|ddS(Nii((RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqkRrs\\0?[xX][0-9a-fA-F]+cCs tt|djddS(Nis\0xi(tunichrRotlstrip(RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqlRrs	\\0[0-7]+cCstt|dddS(Niii(RRo(RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqmRrR<s\]s\wRzRRtnegatetbodyRcsOdy-djfdtj|jDSWntk
rJdSXdS(s
    Helper to easily define string ranges for use in Word construction.  Borrows
    syntax from regexp '[]' string range definitions::
        srange("[0-9]")   -> "0123456789"
        srange("[a-z]")   -> "abcdefghijklmnopqrstuvwxyz"
        srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
    The input string must be enclosed in []'s, and the returned string is the expanded
    character set joined into a single string.
    The values enclosed in the []'s may be:
     - a single character
     - an escaped character with a leading backslash (such as C{\-} or C{\]})
     - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) 
         (C{\0x##} is also supported for backwards compatibility) 
     - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
     - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
     - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
    cSsKt|ts|Sdjdtt|dt|ddDS(NRrcss|]}t|VqdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	sii(RsR RRtord(tp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrRrc3s|]}|VqdS(N((Rtpart(t	_expanded(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	sN(Rt_reBracketExprRRRa(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyR]rs
	-
csfd}|S(st
    Helper method for defining parse actions that require matching at a specific
    column in the input text.
    cs2t||kr.t||dndS(Nsmatched token not at column %d(R7R(R@tlocnRJ(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt	verifyCols((RR((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRKscs
fdS(s
    Helper method for common parse actions that simply return a literal value.  Especially
    useful when used with C{L{transformString}()}.

    Example::
        num = Word(nums).setParseAction(lambda toks: int(toks[0]))
        na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
        term = na | num
        
        OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
    csgS(N((RRNRp(treplStr(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRr((R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRZscCs|ddd!S(s
    Helper parse action for removing quotation marks from parsed quoted strings.

    Example::
        # by default, quotation marks are included in parsed results
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]

        # use removeQuotes to strip quotation marks from parsed results
        quotedString.setParseAction(removeQuotes)
        quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
    iii((RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRXscsafd}y"tdtdj}Wntk
rSt}nX||_|S(sG
    Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional 
    args are passed, they are forwarded to the given function as additional arguments after
    the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
    parsed data to an integer using base 16.

    Example (compare the last to example in L{ParserElement.transformString}::
        hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
        hex_ints.runTests('''
            00 11 22 aa FF 0a 0d 1a
            ''')
        
        upperword = Word(alphas).setParseAction(tokenMap(str.upper))
        OneOrMore(upperword).runTests('''
            my kingdom for a horse
            ''')

        wd = Word(alphas).setParseAction(tokenMap(str.title))
        OneOrMore(wd).setParseAction(' '.join).runTests('''
            now is the winter of our discontent made glorious summer by this sun of york
            ''')
    prints::
        00 11 22 aa FF 0a 0d 1a
        [0, 17, 34, 170, 255, 10, 13, 26]

        my kingdom for a horse
        ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']

        now is the winter of our discontent made glorious summer by this sun of york
        ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
    cs g|D]}|^qS(N((RRNRpttokn(RRO(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRsRR^(R`RRaRu(RORRRd((RROs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRks 	
	cCst|jS(N(RR,(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrcCst|jS(N(Rtlower(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrcCs<t|tr+|}t|d|}n	|j}tttd}|rtjj	t
}td|dtt
t|td|tddtgjdj	d	td
}ndjdtD}tjj	t
t|B}td|dtt
t|j	tttd|tddtgjdj	d
td
}ttd|d
}|jddj|jddjjjd|}|jddj|jddjjjd|}||_||_||fS(sRInternal helper to construct opening and closing tag expressions, given a tag nameR+s_-:Rttagt=t/RRAcSs|ddkS(NiR((RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrR Rrcss!|]}|dkr|VqdS(R N((RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys	scSs|ddkS(NiR((RRNRp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrsRLs(RsRRRR-R2R1R<RRzRXR)R	R0RRRRRRTRWR@Rt_LRttitleRRR(ttagStrtxmltresnamettagAttrNamettagAttrValuetopenTagtprintablesLessRAbracktcloseTag((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt	_makeTagss"	o{AA		cCs
t|tS(s 
    Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
    tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.

    Example::
        text = 'More info at the pyparsing wiki page'
        # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
        a,a_end = makeHTMLTags("A")
        link_expr = a + SkipTo(a_end)("link_text") + a_end
        
        for link in link_expr.searchString(text):
            # attributes in the  tag (like "href" shown here) are also accessible as named results
            print(link.link_text, '->', link.href)
    prints::
        pyparsing -> http://pyparsing.wikispaces.com
    (RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRIscCs
t|tS(s
    Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
    tags only in the given upper/lower case.

    Example: similar to L{makeHTMLTags}
    (RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRJscsT|r|n|jgD]\}}||f^q#fd}|S(s<
    Helper to create a validating parse action to be used with start tags created
    with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
    with a required attribute value, to avoid false matches on common tags such as
    C{} or C{
}. Call C{withAttribute} with a series of attribute names and values. Specify the list of filter attributes names and values as: - keyword arguments, as in C{(align="right")}, or - as an explicit dict with C{**} operator, when an attribute name is also a Python reserved word, as in C{**{"class":"Customer", "align":"right"}} - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) For attribute names with a namespace prefix, you must use the second form. Attribute names are matched insensitive to upper/lower case. If just testing for C{class} (with or without a namespace), use C{L{withClass}}. To verify that the attribute exists, but without specifying a value, pass C{withAttribute.ANY_VALUE} as the value. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this has no type
''' div,div_end = makeHTMLTags("div") # only match div tag having a type attribute with value "grid" div_grid = div().setParseAction(withAttribute(type="grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) # construct a match with any div tag having a type attribute, regardless of the value div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 csx~D]v\}}||kr8t||d|n|tjkr|||krt||d||||fqqWdS(Nsno matching attribute s+attribute '%s' has value '%s', must be '%s'(RRct ANY_VALUE(RRNRtattrNamet attrValue(tattrs(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRRs   (R(RtattrDictRRR((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRcs 2  %cCs'|rd|nd}ti||6S(s Simplified version of C{L{withAttribute}} when matching on a div class - made difficult because C{class} is a reserved word in Python. Example:: html = '''
Some text
1 4 0 1 0
1,3 2,3 1,1
this <div> has no class
''' div,div_end = makeHTMLTags("div") div_grid = div().setParseAction(withClass("grid")) grid_expr = div_grid + SkipTo(div | div_end)("body") for grid_header in grid_expr.searchString(html): print(grid_header.body) div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) div_expr = div_any_type + SkipTo(div | div_end)("body") for div_header in div_expr.searchString(html): print(div_header.body) prints:: 1 4 0 1 0 1 4 0 1 0 1,3 2,3 1,1 s%s:classtclass(Rc(t classnamet namespacet classattr((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRi\s t(RYcCs<t}||||B}xt|D]\}}|d d \}} } } | dkrdd|nd|} | dkr|d kst|dkrtdn|\} }ntj| }| tjkr| dkr t||t |t |}q| dkrx|d k rQt|||t |t ||}qt||t |t |}q| dkrt|| |||t || |||}qtdn+| tj kr| dkr)t |t st |}nt|j|t ||}q| dkr|d k rpt|||t |t ||}qt||t |t |}q| dkrt|| |||t || |||}qtdn td | r |j| n||j| |BK}|}q(W||K}|S( s Helper method for constructing grammars of expressions made up of operators working in a precedence hierarchy. Operators may be unary or binary, left- or right-associative. Parse actions can also be attached to operator expressions. The generated parser will also recognize the use of parentheses to override operator precedences (see example below). Note: if you define a deep operator list, you may see performance issues when using infixNotation. See L{ParserElement.enablePackrat} for a mechanism to potentially improve your parser performance. Parameters: - baseExpr - expression representing the most basic element for the nested - opList - list of tuples, one for each operator precedence level in the expression grammar; each tuple is of the form (opExpr, numTerms, rightLeftAssoc, parseAction), where: - opExpr is the pyparsing expression for the operator; may also be a string, which will be converted to a Literal; if numTerms is 3, opExpr is a tuple of two expressions, for the two operators separating the 3 terms - numTerms is the number of terms for this operator (must be 1, 2, or 3) - rightLeftAssoc is the indicator whether the operator is right or left associative, using the pyparsing-defined constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. - parseAction is the parse action to be associated with expressions matching this operator expression (the parse action tuple member may be omitted) - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) Example:: # simple example of four-function arithmetic with ints and variable names integer = pyparsing_common.signed_integer varname = pyparsing_common.identifier arith_expr = infixNotation(integer | varname, [ ('-', 1, opAssoc.RIGHT), (oneOf('* /'), 2, opAssoc.LEFT), (oneOf('+ -'), 2, opAssoc.LEFT), ]) arith_expr.runTests(''' 5+3*6 (5+3)*6 -2--11 ''', fullDump=False) prints:: 5+3*6 [[5, '+', [3, '*', 6]]] (5+3)*6 [[[5, '+', 3], '*', 6]] -2--11 [[['-', 2], '-', ['-', 11]]] iis%s terms %s%s termis@if numterms=3, opExpr must be a tuple or list of two expressionsis6operator must be unary (1), binary (2), or ternary (3)s2operator must indicate right or left associativityN(N(R RRRRRRRtLEFTR RRtRIGHTRsRRFRz(tbaseExprtopListtlpartrparR}tlastExprRtoperDeftopExprtaritytrightLeftAssocRttermNametopExpr1topExpr2tthisExprR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRgsR;    '  /'   $  /'     s4"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*t"s string enclosed in double quotess4'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*t's string enclosed in single quotess*quotedString using single or double quotestusunicode string literalcCs!||krtdn|d krt|trt|trt|dkrt|dkr|d k rtt|t||tj ddj d}q|t j t||tj j d}q|d k r9tt|t |t |ttj ddj d}qttt |t |ttj ddj d}qtdnt}|d k r|tt|t||B|Bt|K}n.|tt|t||Bt|K}|jd ||f|S( s~ Helper method for defining nested lists enclosed in opening and closing delimiters ("(" and ")" are the default). Parameters: - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression - content - expression for items within the nested lists (default=C{None}) - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) If an expression is not provided for the content argument, the nested expression will capture all whitespace-delimited content between delimiters as a list of separate values. Use the C{ignoreExpr} argument to define expressions that may contain opening or closing characters that should not be treated as opening or closing characters for nesting, such as quotedString or a comment expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. The default is L{quotedString}, but if no expressions are to be ignored, then pass C{None} for this argument. Example:: data_type = oneOf("void int short long char float double") decl_data_type = Combine(data_type + Optional(Word('*'))) ident = Word(alphas+'_', alphanums+'_') number = pyparsing_common.number arg = Group(decl_data_type + ident) LPAR,RPAR = map(Suppress, "()") code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) c_function = (decl_data_type("type") + ident("name") + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + code_body("body")) c_function.ignore(cStyleComment) source_code = ''' int is_odd(int x) { return (x%2); } int dec_to_hex(char hchar) { if (hchar >= '0' && hchar <= '9') { return (ord(hchar)-ord('0')); } else { return (10+ord(hchar)-ord('A')); } } ''' for func in c_function.searchString(source_code): print("%(name)s (%(type)s) args: %(args)s" % func) prints:: is_odd (int) args: [['int', 'x']] dec_to_hex (int) args: [['char', 'hchar']] s.opening and closing strings cannot be the sameiRKcSs|djS(Ni(R(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRq9RrcSs|djS(Ni(R(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRq<RrcSs|djS(Ni(R(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqBRrcSs|djS(Ni(R(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqFRrsOopening and closing arguments must be strings if no content expression is givensnested %s%s expressionN(RRRsRRRRRR"RfRzRARRR RR)R0R(topenertclosertcontentRR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRNs4:  $  $    5.c s5fd}fd}fd}ttjdj}ttj|jd}tj|jd}tj|jd} |rtt||t|t|t|| } n0tt|t|t|t|} |j t t| jdS( s Helper method for defining space-delimited indentation blocks, such as those used to define block statements in Python source code. Parameters: - blockStatementExpr - expression defining syntax of statement that is repeated within the indented block - indentStack - list created by caller to manage indentation stack (multiple statementWithIndentedBlock expressions within a single grammar should share a common indentStack) - indent - boolean indicating whether block must be indented beyond the the current level; set to False for block of left-most statements (default=C{True}) A valid block must contain at least one C{blockStatement}. Example:: data = ''' def A(z): A1 B = 100 G = A2 A2 A3 B def BB(a,b,c): BB1 def BBA(): bba1 bba2 bba3 C D def spam(x,y): def eggs(z): pass ''' indentStack = [1] stmt = Forward() identifier = Word(alphas, alphanums) funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") func_body = indentedBlock(stmt, indentStack) funcDef = Group( funcDecl + func_body ) rvalue = Forward() funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") rvalue << (funcCall | identifier | Word(nums)) assignment = Group(identifier + "=" + rvalue) stmt << ( funcDef | assignment | identifier ) module_body = OneOrMore(stmt) parseTree = module_body.parseString(data) parseTree.pprint() prints:: [['def', 'A', ['(', 'z', ')'], ':', [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], 'B', ['def', 'BB', ['(', 'a', 'b', 'c', ')'], ':', [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], 'C', 'D', ['def', 'spam', ['(', 'x', 'y', ')'], ':', [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] css|t|krdSt||}|dkro|dkrZt||dnt||dndS(Nisillegal nestingsnot a peer entry(RR7RR(RRNRptcurCol(t indentStack(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcheckPeerIndentscsEt||}|dkr/j|nt||ddS(Nisnot a subentry(R7RR(RRNRpR+(R,(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcheckSubIndentscsn|t|krdSt||}oH|dkoH|dks`t||dnjdS(Niisnot an unindent(RR7RR(RRNRpR+(R,(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt checkUnindents &s tINDENTRrtUNINDENTsindented block( RRRRR RzRRRRR( tblockStatementExprR,R$R-R.R/R7R0tPEERtUNDENTtsmExpr((R,s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRdQsN"8 $s#[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]s[\0xa1-\0xbf\0xd7\0xf7]s_:sany tagsgt lt amp nbsp quot aposs><& "'s &(?PRs);scommon HTML entitycCstj|jS(sRHelper parser action to replace common HTML entities with their special characters(t_htmlEntityMapRtentity(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRYss/\*(?:[^*]|\*(?!/))*s*/sC style commentss HTML comments.*s rest of lines//(?:\\\n|[^\n])*s // commentsC++ style comments#.*sPython style comments t commaItemRcBseZdZeeZeeZee j dj eZ ee j dj eedZedj dj eZej edej ej dZejdeeeed jeBj d Zejeed j d j eZed j dj eZeeBeBjZedj dj eZeededj dZedj dZedj dZ e de dj dZ!ee de d8dee de d9j dZ"e"j#ddej d Z$e%e!e$Be"Bj d!j d!Z&ed"j d#Z'e(d$d%Z)e(d&d'Z*ed(j d)Z+ed*j d+Z,ed,j d-Z-e.je/jBZ0e(d.Z1e%e2e3d/e4ee5d0d/ee6d1jj d2Z7e8ee9j:e7Bd3d4j d5Z;e(ed6Z<e(ed7Z=RS(:s Here are some common low-level expressions that may be useful in jump-starting parser development: - numeric forms (L{integers}, L{reals}, L{scientific notation}) - common L{programming identifiers} - network addresses (L{MAC}, L{IPv4}, L{IPv6}) - ISO8601 L{dates} and L{datetime} - L{UUID} - L{comma-separated list} Parse actions: - C{L{convertToInteger}} - C{L{convertToFloat}} - C{L{convertToDate}} - C{L{convertToDatetime}} - C{L{stripHTMLTags}} - C{L{upcaseTokens}} - C{L{downcaseTokens}} Example:: pyparsing_common.number.runTests(''' # any int or real number, returned as the appropriate type 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.fnumber.runTests(''' # any int or real number, returned as float 100 -100 +100 3.14159 6.02e23 1e-12 ''') pyparsing_common.hex_integer.runTests(''' # hex numbers 100 FF ''') pyparsing_common.fraction.runTests(''' # fractions 1/2 -3/4 ''') pyparsing_common.mixed_integer.runTests(''' # mixed fractions 1 1/2 -3/4 1-3/4 ''') import uuid pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) pyparsing_common.uuid.runTests(''' # uuid 12345678-1234-5678-1234-567812345678 ''') prints:: # any int or real number, returned as the appropriate type 100 [100] -100 [-100] +100 [100] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # any int or real number, returned as float 100 [100.0] -100 [-100.0] +100 [100.0] 3.14159 [3.14159] 6.02e23 [6.02e+23] 1e-12 [1e-12] # hex numbers 100 [256] FF [255] # fractions 1/2 [0.5] -3/4 [-0.75] # mixed fractions 1 [1] 1/2 [0.5] -3/4 [-0.75] 1-3/4 [1.75] # uuid 12345678-1234-5678-1234-567812345678 [UUID('12345678-1234-5678-1234-567812345678')] tintegers hex integeris[+-]?\d+ssigned integerRtfractioncCs|d|dS(Nii((Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrRzs"fraction or mixed integer-fractions [+-]?\d+\.\d*s real numbers+[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)s$real number with scientific notations[+-]?\d+\.?\d*([eE][+-]?\d+)?tfnumberRt identifiersK(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}s IPv4 addresss[0-9a-fA-F]{1,4}t hex_integerRisfull IPv6 addressiis::sshort IPv6 addresscCstd|DdkS(Ncss'|]}tjj|rdVqdS(iN(Rlt _ipv6_partR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pys si(R(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrs::ffff:smixed IPv6 addresss IPv6 addresss:[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}s MAC addresss%Y-%m-%dcsfd}|S(s Helper to create a parse action for converting parsed date string to Python datetime.date Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) Example:: date_expr = pyparsing_common.iso8601_date.copy() date_expr.setParseAction(pyparsing_common.convertToDate()) print(date_expr.parseString("1999-12-31")) prints:: [datetime.date(1999, 12, 31)] csPytj|djSWn+tk rK}t||t|nXdS(Ni(RtstrptimetdateRRRu(RRNRptve(tfmt(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytcvt_fns((RBRC((RBs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt convertToDatess%Y-%m-%dT%H:%M:%S.%fcsfd}|S(s Helper to create a parse action for converting parsed datetime string to Python datetime.datetime Params - - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() dt_expr.setParseAction(pyparsing_common.convertToDatetime()) print(dt_expr.parseString("1999-12-31T23:59:59.999")) prints:: [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] csJytj|dSWn+tk rE}t||t|nXdS(Ni(RR?RRRu(RRNRpRA(RB(s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRCs((RBRC((RBs9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pytconvertToDatetimess7(?P\d{4})(?:-(?P\d\d)(?:-(?P\d\d))?)?s ISO8601 dates(?P\d{4})-(?P\d\d)-(?P\d\d)[T ](?P\d\d):(?P\d\d)(:(?P\d\d(\.\d*)?)?)?(?PZ|[+-]\d\d:?\d\d)?sISO8601 datetimes2[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}tUUIDcCstjj|dS(s Parse action to remove HTML tags from web page HTML source Example:: # strip HTML links from normal text text = 'More info at the
pyparsing wiki page' td,td_end = makeHTMLTags("TD") table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' i(Rlt_html_stripperR{(RRNR((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt stripHTMLTagss RR<s R8RRrscomma separated listcCst|jS(N(RR,(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRrcCst|jS(N(RR(Rp((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRqRr(ii(ii(>RRRRkRotconvertToIntegertfloattconvertToFloatR-RPRRzR9RBR=R%tsigned_integerR:RRRt mixed_integerRtrealtsci_realRtnumberR;R2R1R<t ipv4_addressR>t_full_ipv6_addresst_short_ipv6_addressRt_mixed_ipv6_addressRt ipv6_addresst mac_addressR#RDREt iso8601_datetiso8601_datetimetuuidR5R4RGRHRRRRTR,t _commasepitemR>RWRtcomma_separated_listRbR@(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyRlsL  '/-  ;&J+t__main__tselecttfroms_$RRtcolumnsRttablestcommandsK # '*' as column list and dotted table name select * from SYS.XYZZY # caseless match on "SELECT", and casts back to "select" SELECT * from XYZZY, ABC # list of column names, and mixed case SELECT keyword Select AA,BB,CC from Sys.dual # multiple tables Select A, B, C from Sys.dual, Table2 # invalid SELECT keyword - should fail Xelect A, B, C from Sys.dual # incomplete command - should fail Select # invalid column name - should fail Select ^^^ frox Sys.dual s] 100 -100 +100 3.14159 6.02e23 1e-12 s 100 FF s6 12345678-1234-5678-1234-567812345678 (Rt __version__t__versionTime__t __author__RtweakrefRRRRxRR|RSRR8RRRRt_threadRt ImportErrort threadingRRt ordereddictRt__all__Rt version_infoRQRtmaxsizeR$RuRtchrRRRRR2treversedRRR4RxRIRJR_tmaxinttxrangeRt __builtin__RtfnameRR`RRRRRRtascii_uppercasetascii_lowercaseR2RPRBR1RRt printableRTRaRRRR!R$RR tMutableMappingtregisterR7RHRERGRKRMROReR"R*R RRRRiRRRRjR-R%R#RR,RpRRRR(R'R/R.RRRRR RR RRRR0RRRR&R RR+RRR R)RR`RR>R:RRMRLRERRQR?ReRfRhRRARGRFR_R^Rzt _escapedPunct_escapedHexChart_escapedOctChartUNICODEt _singleChart _charRangeRRR]RKRZRXRkRbR@R RIRJRcR RiRRRRRgRSR<R\RWRaRNRdR3RUR5R4RRR6RR9RYR6RCRR[R=R;RDRVRRZR8RlRt selectTokent fromTokentidentt columnNametcolumnNameListt columnSpect tableNamet tableNameListt simpleSQLR"RPR;R=RYRF(((s9/usr/lib/python2.7/site-packages/pip/_vendor/pyparsing.pyt=s              *         8      @ & A=IG3pLOD|M &# @sQ,A ,    I # %  !4@    ,   ?  #   k%Z r  (, #8+    $     PKZ`W`W$site-packages/pip/_vendor/appdirs.pynu[#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2005-2010 ActiveState Software Inc. # Copyright (c) 2013 Eddy Petrișor """Utilities for determining application-specific dirs. See for details and usage. """ # Dev Notes: # - MSDN on where to store app data files: # http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 # - macOS: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html __version_info__ = (1, 4, 0) __version__ = '.'.join(map(str, __version_info__)) import sys import os PY3 = sys.version_info[0] == 3 if PY3: unicode = str if sys.platform.startswith('java'): import platform os_name = platform.java_ver()[3][0] if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. system = 'win32' elif os_name.startswith('Mac'): # "macOS", etc. system = 'darwin' else: # "Linux", "SunOS", "FreeBSD", etc. # Setting this to "linux2" is not ideal, but only Windows or Mac # are actually checked for and the rest of the module expects # *sys.platform* style strings. system = 'linux2' else: system = sys.platform def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: macOS: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\Application Data\\ Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ Win 7 (not roaming): C:\Users\\AppData\Local\\ Win 7 (roaming): C:\Users\\AppData\Roaming\\ For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". """ if system == "win32": if appauthor is None: appauthor = appname const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" path = os.path.normpath(_get_win_folder(const)) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('~/Library/Application Support/') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): """Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/', if XDG_DATA_DIRS is not set Typical user data directories are: macOS: /Library/Application Support/ Unix: /usr/local/share/ or /usr/share/ Win XP: C:\Documents and Settings\All Users\Application Data\\ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('/Library/Application Support') if appname: path = os.path.join(path, appname) else: # XDG default for $XDG_DATA_DIRS # only first, if multipath is False path = os.getenv('XDG_DATA_DIRS', os.pathsep.join(['/usr/local/share', '/usr/share'])) pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path if appname and version: path = os.path.join(path, version) return path def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: macOS: same as user_data_dir Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/". """ if system in ["win32", "darwin"]: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): """Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set Typical user data directories are: macOS: same as site_data_dir Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system in ["win32", "darwin"]: path = site_data_dir(appname, appauthor) if appname and version: path = os.path.join(path, version) else: # XDG default for $XDG_CONFIG_DIRS # only first, if multipath is False path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: macOS: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache Vista: C:\Users\\AppData\Local\\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) if opinion: path = os.path.join(path, "Cache") elif system == 'darwin': path = os.path.expanduser('~/Library/Caches') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: macOS: ~/Library/Logs/ Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs Vista: C:\Users\\AppData\Local\\\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. """ if system == "darwin": path = os.path.join( os.path.expanduser('~/Library/Logs'), appname) elif system == "win32": path = user_data_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "Logs") else: path = user_cache_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "log") if appname and version: path = os.path.join(path, version) return path class AppDirs(object): """Convenience wrapper for getting application dirs.""" def __init__(self, appname, appauthor=None, version=None, roaming=False, multipath=False): self.appname = appname self.appauthor = appauthor self.version = version self.roaming = roaming self.multipath = multipath @property def user_data_dir(self): return user_data_dir(self.appname, self.appauthor, version=self.version, roaming=self.roaming) @property def site_data_dir(self): return site_data_dir(self.appname, self.appauthor, version=self.version, multipath=self.multipath) @property def user_config_dir(self): return user_config_dir(self.appname, self.appauthor, version=self.version, roaming=self.roaming) @property def site_config_dir(self): return site_config_dir(self.appname, self.appauthor, version=self.version, multipath=self.multipath) @property def user_cache_dir(self): return user_cache_dir(self.appname, self.appauthor, version=self.version) @property def user_log_dir(self): return user_log_dir(self.appname, self.appauthor, version=self.version) #---- internal support stuff def _get_win_folder_from_registry(csidl_name): """This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. """ import _winreg shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", }[csidl_name] key = _winreg.OpenKey( _winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" ) dir, type = _winreg.QueryValueEx(key, shell_folder_name) return dir def _get_win_folder_with_pywin32(csidl_name): from win32com.shell import shellcon, shell dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) # Try to make this a unicode path because SHGetFolderPath does # not return unicode strings when there is unicode data in the # path. try: dir = unicode(dir) # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in dir: if ord(c) > 255: has_high_char = True break if has_high_char: try: import win32api dir = win32api.GetShortPathName(dir) except ImportError: pass except UnicodeError: pass return dir def _get_win_folder_with_ctypes(csidl_name): import ctypes csidl_const = { "CSIDL_APPDATA": 26, "CSIDL_COMMON_APPDATA": 35, "CSIDL_LOCAL_APPDATA": 28, }[csidl_name] buf = ctypes.create_unicode_buffer(1024) ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in buf: if ord(c) > 255: has_high_char = True break if has_high_char: buf2 = ctypes.create_unicode_buffer(1024) if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): buf = buf2 return buf.value def _get_win_folder_with_jna(csidl_name): import array from com.sun import jna from com.sun.jna.platform import win32 buf_size = win32.WinDef.MAX_PATH * 2 buf = array.zeros('c', buf_size) shell = win32.Shell32.INSTANCE shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) dir = jna.Native.toString(buf.tostring()).rstrip("\0") # Downgrade to short path name if have highbit chars. See # . has_high_char = False for c in dir: if ord(c) > 255: has_high_char = True break if has_high_char: buf = array.zeros('c', buf_size) kernel = win32.Kernel32.INSTANCE if kernal.GetShortPathName(dir, buf, buf_size): dir = jna.Native.toString(buf.tostring()).rstrip("\0") return dir if system == "win32": try: import win32com.shell _get_win_folder = _get_win_folder_with_pywin32 except ImportError: try: from ctypes import windll _get_win_folder = _get_win_folder_with_ctypes except ImportError: try: import com.sun.jna _get_win_folder = _get_win_folder_with_jna except ImportError: _get_win_folder = _get_win_folder_from_registry #---- self test code if __name__ == "__main__": appname = "MyApp" appauthor = "MyCompany" props = ("user_data_dir", "site_data_dir", "user_config_dir", "site_config_dir", "user_cache_dir", "user_log_dir") print("-- app dirs (with optional 'version')") dirs = AppDirs(appname, appauthor, version="1.0") for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (without optional 'version')") dirs = AppDirs(appname, appauthor) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (without optional 'appauthor')") dirs = AppDirs(appname) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) print("\n-- app dirs (with disabled 'appauthor')") dirs = AppDirs(appname, appauthor=False) for prop in props: print("%s: %s" % (prop, getattr(dirs, prop))) PKZǞ+P/P//site-packages/pip/_vendor/lockfile/__init__.pycnu[ abc @@sdZddlmZddlZddlZddlZddlZddlZeedspej e_ neej dsej j ej _ ndddd d d d d dddddg ZdefdYZdefdYZdefdYZd efdYZd efdYZd efdYZd efdYZd efdYZdefdYZdefdYZdZdZd Zd!Zdd"Z eed#rd$d%l!m"Z#e#j$Z%nd$d&l!m&Z'e'j(Z%e%Z)dS('s lockfile.py - Platform-independent advisory file locks. Requires Python 2.5 unless you apply 2.4.diff Locking is done on a per-thread basis instead of a per-process basis. Usage: >>> lock = LockFile('somefile') >>> try: ... lock.acquire() ... except AlreadyLocked: ... print 'somefile', 'is locked already.' ... except LockFailed: ... print 'somefile', 'can\'t be locked.' ... else: ... print 'got lock' got lock >>> print lock.is_locked() True >>> lock.release() >>> lock = LockFile('somefile') >>> print lock.is_locked() False >>> with lock: ... print lock.is_locked() True >>> print lock.is_locked() False >>> lock = LockFile('somefile') >>> # It is okay to lock twice from the same thread... >>> with lock: ... lock.acquire() ... >>> # Though no counter is kept, so you can't unlock multiple times... >>> print lock.is_locked() False Exceptions: Error - base class for other exceptions LockError - base class for all locking exceptions AlreadyLocked - Another thread or process already holds the lock LockFailed - Lock failed for some other reason UnlockError - base class for all unlocking exceptions AlreadyUnlocked - File was not locked. NotMyLock - File was locked but not by the current thread/process i(tabsolute_importNtcurrent_threadtget_nametErrort LockErrort LockTimeoutt AlreadyLockedt LockFailedt UnlockErrort NotLockedt NotMyLockt LinkFileLockt MkdirFileLocktSQLiteFileLocktLockBasetlockedcB@seZdZRS(sw Base class for other exceptions. >>> try: ... raise Error ... except Exception: ... pass (t__name__t __module__t__doc__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRJscB@seZdZRS(s Base class for error arising from attempts to acquire the lock. >>> try: ... raise LockError ... except Error: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRVscB@seZdZRS(sRaised when lock creation fails within a user-defined period of time. >>> try: ... raise LockTimeout ... except LockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRbscB@seZdZRS(sSome other thread/process is locking the file. >>> try: ... raise AlreadyLocked ... except LockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRmscB@seZdZRS(sLock file creation failed for some other reason. >>> try: ... raise LockFailed ... except LockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRxscB@seZdZRS(s Base class for errors arising from attempts to release the lock. >>> try: ... raise UnlockError ... except Error: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRscB@seZdZRS(sRaised when an attempt is made to unlock an unlocked file. >>> try: ... raise NotLocked ... except UnlockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyR scB@seZdZRS(sRaised when an attempt is made to unlock a file someone else locked. >>> try: ... raise NotMyLock ... except UnlockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyR st _SharedBasecB@sAeZdZddZdZdZdZdZRS(cC@s ||_dS(N(tpath(tselfR((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt__init__scC@stddS(s Acquire the lock. * If timeout is omitted (or None), wait forever trying to lock the file. * If timeout > 0, try to acquire the lock for that many seconds. If the lock period expires and the file is still locked, raise LockTimeout. * If timeout <= 0, raise AlreadyLocked immediately if the file is already locked. simplement in subclassN(tNotImplemented(Rttimeout((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pytacquirescC@stddS(sX Release the lock. If the file is not locked, raise NotLocked. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pytreleasescC@s|j|S(s* Context manager support. (R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt __enter__s cG@s|jdS(s* Context manager support. N(R(Rt_exc((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt__exit__scC@sd|jj|jfS(Ns<%s: %r>(t __class__RR(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt__repr__sN( RRRtNoneRRRRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRs      cB@sAeZdZeddZdZdZdZdZ RS(s.Base class for platform-specific lock classes.cC@stt|j|tjj|d|_tj|_ tj |_ |rt j }t|dt|}d|d@|_n d|_tjj|j}tjj|d|j |j|j t|jf|_||_dS(si >>> lock = LockBase('somefile') >>> lock = LockBase('somefile', threaded=False) s.locktidents-%xIts %s%s.%s%sN(tsuperRRtosRtabspatht lock_filetsockett gethostnamethostnametgetpidtpidt threadingRtgetattrthashttnametdirnametjoint unique_nameR(RRtthreadedRttR!R0((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRs     cC@stddS(s9 Tell whether or not the file is locked. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt is_lockedscC@stddS(sA Return True if this object is locking the file. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt i_am_lockingscC@stddS(sN Remove a lock. Useful if a locking thread failed to unlock. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt break_lockscC@sd|jj|j|jfS(Ns<%s: %r -- %r>(RRR2R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRsN( RRRtTrueR RR5R6R7R(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRs !   cO@smtjd|tddt|dts:|d}nt|dkr`| r`t|d4sF                 -:    PKZ6$hb b 4site-packages/pip/_vendor/lockfile/mkdirlockfile.pycnu[ abc@`sddlmZmZddlZddlZddlZddlZddlmZm Z m Z m Z m Z m Z defdYZdS(i(tabsolute_importtdivisionNi(tLockBaset LockFailedt NotLockedt NotMyLockt LockTimeoutt AlreadyLockedt MkdirLockFilecB`sMeZdZeddZddZdZdZdZ dZ RS(s"Lock file by creating a directory.cC`sKtj||||tjj|jd|j|j|jf|_ dS(ss >>> lock = MkdirLockFile('somefile') >>> lock = MkdirLockFile('somefile', threaded=False) s%s.%s%sN( Rt__init__tostpathtjoint lock_filethostnamettnametpidt unique_name(tselfR tthreadedttimeout((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyR s  cC`s{|dk r|n|j}tj}|dk rL|dkrL||7}n|dkrad}ntd|d}xtrvytj|jWntk rXt j d}|j t j krBtj j|jrdS|dk r2tj|kr2|dkrtd|j q2td|j ntj|qstd|jqwXt|jdjdSqwWdS( Nig?i is&Timeout waiting to acquire lock for %ss%s is already lockedsfailed to create %stwb(tNoneRttimetmaxtTrueR tmkdirR tOSErrortsystexc_infoterrnotEEXISTR texistsRRRtsleepRtopentclose(RRtend_timetwaitterr((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pytacquires2       cC`sq|js"td|jn+tjj|jsMtd|jntj|jtj|j dS(Ns%s is not lockeds%s is locked, but not by me( t is_lockedRR R R RRtunlinktrmdirR (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pytreleaseAs  cC`stjj|jS(N(R R R R (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyR(IscC`s|jotjj|jS(N(R(R R R R(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyt i_am_lockingLs cC`shtjj|jrdx9tj|jD]%}tjtjj|j|q(Wtj|jndS(N(R R R R tlistdirR)R R*(Rtname((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyt break_lockPs#N( t__name__t __module__t__doc__RRR R'R+R(R,R/(((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyR s &   (t __future__RRRR RRtRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyts     .PKZiY] ] 3site-packages/pip/_vendor/lockfile/linklockfile.pyonu[ abc@@spddlmZddlZddlZddlmZmZmZmZm Z m Z defdYZ dS(i(tabsolute_importNi(tLockBaset LockFailedt NotLockedt NotMyLockt LockTimeoutt AlreadyLockedt LinkLockFilecB@s>eZdZddZdZdZdZdZRS(sLock access to a file using atomic property of link(2). >>> lock = LinkLockFile('somefile') >>> lock = LinkLockFile('somefile', threaded=False) cC@s~yt|jdjWn$tk r@td|jnX|dk rS|n|j}tj}|dk r|dkr||7}nxtryyt j |j|j Wnt k rqt j |jj}|dkrdS|dk rKtj|krKt j|j|dkr5td|jqKtd|jntj|dk rg|dpjdqXdSqWdS( Ntwbsfailed to create %siis&Timeout waiting to acquire lock for %ss%s is already lockedi g?(topent unique_nametclosetIOErrorRtNonettimeoutttimetTruetostlinkt lock_filetOSErrortstattst_nlinktunlinkRtpathRtsleep(tselfRtend_timetnlinks((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pytacquires0       'cC@sq|js"td|jn+tjj|jsMtd|jntj|jtj|jdS(Ns%s is not lockeds%s is locked, but not by me( t is_lockedRRRtexistsR RRR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pytrelease7s  cC@stjj|jS(N(RRRR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyR?scC@s:|jo9tjj|jo9tj|jjdkS(Ni(RRRRR RR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyt i_am_lockingBs cC@s,tjj|jr(tj|jndS(N(RRRRR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyt break_lockGsN( t__name__t __module__t__doc__R RR RR!R"(((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyR s  &   ( t __future__RRRtRRRRRRR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyts  .PKZg}Ha__5site-packages/pip/_vendor/lockfile/sqlitelockfile.pyonu[ abc@`sddlmZmZddlZddlZyeWnek rOeZnXddlm Z m Z m Z m Z m Z de fdYZdS(i(tabsolute_importtdivisionNi(tLockBaset NotLockedt NotMyLockt LockTimeoutt AlreadyLockedtSQLiteLockFilecB`s\eZdZdZeddZddZdZdZ dZ dZ dZ RS( sDemonstrate SQL-based locking.c C`stj||||t|j|_t|j|_tjdkrddl}|j \}}t j |t j |~~|t_nddl }|jtj|_|jj}y|jdWn|jk rn0X|jjddl} | jt j tjdS(su >>> lock = SQLiteLockFile('somefile') >>> lock = SQLiteLockFile('somefile', threaded=False) iNsGcreate table locks( lock_file varchar(32), unique_name varchar(32))(Rt__init__tunicodet lock_filet unique_nameRttestdbtNonettempfiletmkstemptostclosetunlinktsqlite3tconnectt connectiontcursortexecutetOperationalErrortcommittatexittregister( tselftpathtthreadedttimeoutRt_fdR RtcR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyRs(       cC`s|dk r|n|j}tj}|dk rL|dkrL||7}n|dkrad}n|dkrvd}n |d}|jj}x;tr|js.|jd|j|j f|jj |jd|j f|j }t |dkr'|jd|j f|jj qfdSn8|jd|j f|j }t |dkrfdS|dk rtj|kr|dkrt d|jqtd |jntj|qWdS( Nig?i s;insert into locks (lock_file, unique_name) values (?, ?)s*select * from locks where unique_name = ?is(delete from locks where unique_name = ?s&Timeout waiting to acquire lock for %ss%s is already locked(R RttimeRRtTruet is_lockedRR R RtfetchalltlenRRRtsleep(RRtend_timetwaitRtrows((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pytacquire5sD                    cC`s|js"td|jn|jsPtd|j|jfn|jj}|j d|jf|jj dS(Ns%s is not lockeds#%s is locked, but not by me (by %s)s(delete from locks where unique_name = ?( R$RRt i_am_lockingRR t_who_is_lockingRRRR(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pytreleasets    cC`s3|jj}|jd|jf|jdS(Ns2select unique_name from locks where lock_file = ?i(RRRR tfetchone(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyR-s  cC`s7|jj}|jd|jf|j}| S(Ns(select * from locks where lock_file = ?(RRRR R%(RRR*((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyR$s    cC`s7|jj}|jd|j|jf|j S(Ns?select * from locks where lock_file = ? and unique_name = ?(RRRR R R%(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyR,s cC`s6|jj}|jd|jf|jjdS(Ns&delete from locks where lock_file = ?(RRRR R(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyt break_locks  N( t__name__t __module__t__doc__R R R#RR+R.R-R$R,R0(((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyRs" ?   (t __future__RRR"RR t NameErrortstrtRRRRRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyts    (PKZ6$hb b 4site-packages/pip/_vendor/lockfile/mkdirlockfile.pyonu[ abc@`sddlmZmZddlZddlZddlZddlZddlmZm Z m Z m Z m Z m Z defdYZdS(i(tabsolute_importtdivisionNi(tLockBaset LockFailedt NotLockedt NotMyLockt LockTimeoutt AlreadyLockedt MkdirLockFilecB`sMeZdZeddZddZdZdZdZ dZ RS(s"Lock file by creating a directory.cC`sKtj||||tjj|jd|j|j|jf|_ dS(ss >>> lock = MkdirLockFile('somefile') >>> lock = MkdirLockFile('somefile', threaded=False) s%s.%s%sN( Rt__init__tostpathtjoint lock_filethostnamettnametpidt unique_name(tselfR tthreadedttimeout((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyR s  cC`s{|dk r|n|j}tj}|dk rL|dkrL||7}n|dkrad}ntd|d}xtrvytj|jWntk rXt j d}|j t j krBtj j|jrdS|dk r2tj|kr2|dkrtd|j q2td|j ntj|qstd|jqwXt|jdjdSqwWdS( Nig?i is&Timeout waiting to acquire lock for %ss%s is already lockedsfailed to create %stwb(tNoneRttimetmaxtTrueR tmkdirR tOSErrortsystexc_infoterrnotEEXISTR texistsRRRtsleepRtopentclose(RRtend_timetwaitterr((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pytacquires2       cC`sq|js"td|jn+tjj|jsMtd|jntj|jtj|j dS(Ns%s is not lockeds%s is locked, but not by me( t is_lockedRR R R RRtunlinktrmdirR (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pytreleaseAs  cC`stjj|jS(N(R R R R (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyR(IscC`s|jotjj|jS(N(R(R R R R(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyt i_am_lockingLs cC`shtjj|jrdx9tj|jD]%}tjtjj|j|q(Wtj|jndS(N(R R R R tlistdirR)R R*(Rtname((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyt break_lockPs#N( t__name__t __module__t__doc__RRR R'R+R(R,R/(((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyR s &   (t __future__RRRR RRtRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyts     .PKZ# 8 8 5site-packages/pip/_vendor/lockfile/symlinklockfile.pynu[from __future__ import absolute_import import os import time from . import (LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked) class SymlinkLockFile(LockBase): """Lock access to a file using symlink(2).""" def __init__(self, path, threaded=True, timeout=None): # super(SymlinkLockFile).__init(...) LockBase.__init__(self, path, threaded, timeout) # split it back! self.unique_name = os.path.split(self.unique_name)[1] def acquire(self, timeout=None): # Hopefully unnecessary for symlink. # try: # open(self.unique_name, "wb").close() # except IOError: # raise LockFailed("failed to create %s" % self.unique_name) timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout while True: # Try and create a symbolic link to it. try: os.symlink(self.unique_name, self.lock_file) except OSError: # Link creation failed. Maybe we've double-locked? if self.i_am_locking(): # Linked to out unique name. Proceed. return else: # Otherwise the lock creation failed. if timeout is not None and time.time() > end_time: if timeout > 0: raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: raise AlreadyLocked("%s is already locked" % self.path) time.sleep(timeout / 10 if timeout is not None else 0.1) else: # Link creation succeeded. We're good to go. return def release(self): if not self.is_locked(): raise NotLocked("%s is not locked" % self.path) elif not self.i_am_locking(): raise NotMyLock("%s is locked, but not by me" % self.path) os.unlink(self.lock_file) def is_locked(self): return os.path.islink(self.lock_file) def i_am_locking(self): return (os.path.islink(self.lock_file) and os.readlink(self.lock_file) == self.unique_name) def break_lock(self): if os.path.islink(self.lock_file): # exists && link os.unlink(self.lock_file) PKZ|2site-packages/pip/_vendor/lockfile/pidlockfile.pycnu[ abc@@sdZddlmZddlZddlZddlZddlmZmZm Z m Z m Z m Z defdYZ dZd Zd ZdS( s8 Lockfile behaviour implemented via Unix PID files. i(tabsolute_importNi(tLockBaset AlreadyLockedt LockFailedt NotLockedt NotMyLockt LockTimeoutt PIDLockFilecB@sVeZdZeddZdZdZdZddZ dZ dZ RS( sA Lockfile implemented as a Unix PID file. The lock file is a normal file named by the attribute `path`. A lock's PID file contains a single line of text, containing the process ID (PID) of the process that acquired the lock. >>> lock = PIDLockFile('somefile') >>> lock = PIDLockFile('somefile') cC@s&tj||t||j|_dS(N(Rt__init__tFalsetpatht unique_name(tselfR tthreadedttimeout((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyR$scC@s t|jS(s- Get the PID from the lock file. (tread_pid_from_pidfileR (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pytread_pid*scC@stjj|jS(sv Test if the lock is currently held. The lock is held if the PID file for this lock exists. (tosR texists(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt is_locked/scC@s"|jo!tj|jkS(s Test if the lock is held by the current process. Returns ``True`` if the current process ID matches the number stored in the PID file. (RRtgetpidR(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt i_am_locking7scC@s)|dk r|n|j}tj}|dk rL|dkrL||7}nxtr$yt|jWntk r}|jtjkrtj|kr|dk r|dkrt d|jqt d|jntj |dk r|dpdq!t d|jqOXdSqOWdS(s Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired. is&Timeout waiting to acquire lock for %ss%s is already lockedi g?sfailed to create %sN( tNoneRttimetTruetwrite_pid_to_pidfileR tOSErrorterrnotEEXISTRRtsleepR(R Rtend_timetexc((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pytacquire?s$   &cC@sU|js"td|jn|jsDtd|jnt|jdS(s Release the lock. Removes the PID file to release the lock, or raises an error if the current process does not hold the lock. s%s is not lockeds%s is locked, but not by meN(RRR RRtremove_existing_pidfile(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pytrelease_s   cC@st|jdS(s Break an existing lock. Removes the PID file if it already exists, otherwise does nothing. N(R!R (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt break_locklsN( t__name__t __module__t__doc__R RRRRRR R"R#(((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyRs     cC@sqd}yt|d}Wntk r,nAX|jj}yt|}Wntk rbnX|j|S(s Read the PID recorded in the named PID file. Read and return the numeric PID recorded as text in the named PID file. If the PID file cannot be read, or if the content is not a valid PID, return ``None``. trN(RtopentIOErrortreadlinetstriptintt ValueErrortclose(t pidfile_pathtpidtpidfiletline((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyRvs    cC@sotjtjBtjB}d}tj|||}tj|d}tj}|jd||jdS(s Write the PID in the named PID file. Get the numeric process ID (“PID”) of the current process and write it to the named file as a line of text. itws%s N( RtO_CREATtO_EXCLtO_WRONLYR(tfdopenRtwriteR.(R/t open_flagst open_modet pidfile_fdR1R0((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyRs cC@sCytj|Wn+tk r>}|jtjkr8q?nXdS(s Remove the named PID file if it exists. Removing a PID file that doesn't already exist puts us in the desired state, so we ignore the condition if the file does not exist. N(RtremoveRRtENOENT(R/R((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyR!s (R&t __future__RRRRtRRRRRRRRRR!(((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt s   .] " PKZ|2site-packages/pip/_vendor/lockfile/pidlockfile.pyonu[ abc@@sdZddlmZddlZddlZddlZddlmZmZm Z m Z m Z m Z defdYZ dZd Zd ZdS( s8 Lockfile behaviour implemented via Unix PID files. i(tabsolute_importNi(tLockBaset AlreadyLockedt LockFailedt NotLockedt NotMyLockt LockTimeoutt PIDLockFilecB@sVeZdZeddZdZdZdZddZ dZ dZ RS( sA Lockfile implemented as a Unix PID file. The lock file is a normal file named by the attribute `path`. A lock's PID file contains a single line of text, containing the process ID (PID) of the process that acquired the lock. >>> lock = PIDLockFile('somefile') >>> lock = PIDLockFile('somefile') cC@s&tj||t||j|_dS(N(Rt__init__tFalsetpatht unique_name(tselfR tthreadedttimeout((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyR$scC@s t|jS(s- Get the PID from the lock file. (tread_pid_from_pidfileR (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pytread_pid*scC@stjj|jS(sv Test if the lock is currently held. The lock is held if the PID file for this lock exists. (tosR texists(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt is_locked/scC@s"|jo!tj|jkS(s Test if the lock is held by the current process. Returns ``True`` if the current process ID matches the number stored in the PID file. (RRtgetpidR(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt i_am_locking7scC@s)|dk r|n|j}tj}|dk rL|dkrL||7}nxtr$yt|jWntk r}|jtjkrtj|kr|dk r|dkrt d|jqt d|jntj |dk r|dpdq!t d|jqOXdSqOWdS(s Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired. is&Timeout waiting to acquire lock for %ss%s is already lockedi g?sfailed to create %sN( tNoneRttimetTruetwrite_pid_to_pidfileR tOSErrorterrnotEEXISTRRtsleepR(R Rtend_timetexc((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pytacquire?s$   &cC@sU|js"td|jn|jsDtd|jnt|jdS(s Release the lock. Removes the PID file to release the lock, or raises an error if the current process does not hold the lock. s%s is not lockeds%s is locked, but not by meN(RRR RRtremove_existing_pidfile(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pytrelease_s   cC@st|jdS(s Break an existing lock. Removes the PID file if it already exists, otherwise does nothing. N(R!R (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt break_locklsN( t__name__t __module__t__doc__R RRRRRR R"R#(((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyRs     cC@sqd}yt|d}Wntk r,nAX|jj}yt|}Wntk rbnX|j|S(s Read the PID recorded in the named PID file. Read and return the numeric PID recorded as text in the named PID file. If the PID file cannot be read, or if the content is not a valid PID, return ``None``. trN(RtopentIOErrortreadlinetstriptintt ValueErrortclose(t pidfile_pathtpidtpidfiletline((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyRvs    cC@sotjtjBtjB}d}tj|||}tj|d}tj}|jd||jdS(s Write the PID in the named PID file. Get the numeric process ID (“PID”) of the current process and write it to the named file as a line of text. itws%s N( RtO_CREATtO_EXCLtO_WRONLYR(tfdopenRtwriteR.(R/t open_flagst open_modet pidfile_fdR1R0((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyRs cC@sCytj|Wn+tk r>}|jtjkr8q?nXdS(s Remove the named PID file if it exists. Removing a PID file that doesn't already exist puts us in the desired state, so we ignore the condition if the file does not exist. N(RtremoveRRtENOENT(R/R((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyR!s (R&t __future__RRRRtRRRRRRRRRR!(((sD/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyt s   .] " PKZǞ+P/P//site-packages/pip/_vendor/lockfile/__init__.pyonu[ abc @@sdZddlmZddlZddlZddlZddlZddlZeedspej e_ neej dsej j ej _ ndddd d d d d dddddg ZdefdYZdefdYZdefdYZd efdYZd efdYZd efdYZd efdYZd efdYZdefdYZdefdYZdZdZd Zd!Zdd"Z eed#rd$d%l!m"Z#e#j$Z%nd$d&l!m&Z'e'j(Z%e%Z)dS('s lockfile.py - Platform-independent advisory file locks. Requires Python 2.5 unless you apply 2.4.diff Locking is done on a per-thread basis instead of a per-process basis. Usage: >>> lock = LockFile('somefile') >>> try: ... lock.acquire() ... except AlreadyLocked: ... print 'somefile', 'is locked already.' ... except LockFailed: ... print 'somefile', 'can\'t be locked.' ... else: ... print 'got lock' got lock >>> print lock.is_locked() True >>> lock.release() >>> lock = LockFile('somefile') >>> print lock.is_locked() False >>> with lock: ... print lock.is_locked() True >>> print lock.is_locked() False >>> lock = LockFile('somefile') >>> # It is okay to lock twice from the same thread... >>> with lock: ... lock.acquire() ... >>> # Though no counter is kept, so you can't unlock multiple times... >>> print lock.is_locked() False Exceptions: Error - base class for other exceptions LockError - base class for all locking exceptions AlreadyLocked - Another thread or process already holds the lock LockFailed - Lock failed for some other reason UnlockError - base class for all unlocking exceptions AlreadyUnlocked - File was not locked. NotMyLock - File was locked but not by the current thread/process i(tabsolute_importNtcurrent_threadtget_nametErrort LockErrort LockTimeoutt AlreadyLockedt LockFailedt UnlockErrort NotLockedt NotMyLockt LinkFileLockt MkdirFileLocktSQLiteFileLocktLockBasetlockedcB@seZdZRS(sw Base class for other exceptions. >>> try: ... raise Error ... except Exception: ... pass (t__name__t __module__t__doc__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRJscB@seZdZRS(s Base class for error arising from attempts to acquire the lock. >>> try: ... raise LockError ... except Error: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRVscB@seZdZRS(sRaised when lock creation fails within a user-defined period of time. >>> try: ... raise LockTimeout ... except LockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRbscB@seZdZRS(sSome other thread/process is locking the file. >>> try: ... raise AlreadyLocked ... except LockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRmscB@seZdZRS(sLock file creation failed for some other reason. >>> try: ... raise LockFailed ... except LockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRxscB@seZdZRS(s Base class for errors arising from attempts to release the lock. >>> try: ... raise UnlockError ... except Error: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRscB@seZdZRS(sRaised when an attempt is made to unlock an unlocked file. >>> try: ... raise NotLocked ... except UnlockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyR scB@seZdZRS(sRaised when an attempt is made to unlock a file someone else locked. >>> try: ... raise NotMyLock ... except UnlockError: ... pass (RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyR st _SharedBasecB@sAeZdZddZdZdZdZdZRS(cC@s ||_dS(N(tpath(tselfR((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt__init__scC@stddS(s Acquire the lock. * If timeout is omitted (or None), wait forever trying to lock the file. * If timeout > 0, try to acquire the lock for that many seconds. If the lock period expires and the file is still locked, raise LockTimeout. * If timeout <= 0, raise AlreadyLocked immediately if the file is already locked. simplement in subclassN(tNotImplemented(Rttimeout((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pytacquirescC@stddS(sX Release the lock. If the file is not locked, raise NotLocked. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pytreleasescC@s|j|S(s* Context manager support. (R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt __enter__s cG@s|jdS(s* Context manager support. N(R(Rt_exc((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt__exit__scC@sd|jj|jfS(Ns<%s: %r>(t __class__RR(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt__repr__sN( RRRtNoneRRRRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRs      cB@sAeZdZeddZdZdZdZdZ RS(s.Base class for platform-specific lock classes.cC@stt|j|tjj|d|_tj|_ tj |_ |rt j }t|dt|}d|d@|_n d|_tjj|j}tjj|d|j |j|j t|jf|_||_dS(si >>> lock = LockBase('somefile') >>> lock = LockBase('somefile', threaded=False) s.locktidents-%xIts %s%s.%s%sN(tsuperRRtosRtabspatht lock_filetsockett gethostnamethostnametgetpidtpidt threadingRtgetattrthashttnametdirnametjoint unique_nameR(RRtthreadedRttR!R0((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRs     cC@stddS(s9 Tell whether or not the file is locked. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt is_lockedscC@stddS(sA Return True if this object is locking the file. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt i_am_lockingscC@stddS(sN Remove a lock. Useful if a locking thread failed to unlock. simplement in subclassN(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyt break_lockscC@sd|jj|j|jfS(Ns<%s: %r -- %r>(RRR2R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRsN( RRRtTrueR RR5R6R7R(((sA/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyRs !   cO@smtjd|tddt|dts:|d}nt|dkr`| r`t|d4sF                 -:    PKZ!&~  3site-packages/pip/_vendor/lockfile/mkdirlockfile.pynu[from __future__ import absolute_import, division import time import os import sys import errno from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, AlreadyLocked) class MkdirLockFile(LockBase): """Lock file by creating a directory.""" def __init__(self, path, threaded=True, timeout=None): """ >>> lock = MkdirLockFile('somefile') >>> lock = MkdirLockFile('somefile', threaded=False) """ LockBase.__init__(self, path, threaded, timeout) # Lock file itself is a directory. Place the unique file name into # it. self.unique_name = os.path.join(self.lock_file, "%s.%s%s" % (self.hostname, self.tname, self.pid)) def acquire(self, timeout=None): timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout if timeout is None: wait = 0.1 else: wait = max(0, timeout / 10) while True: try: os.mkdir(self.lock_file) except OSError: err = sys.exc_info()[1] if err.errno == errno.EEXIST: # Already locked. if os.path.exists(self.unique_name): # Already locked by me. return if timeout is not None and time.time() > end_time: if timeout > 0: raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: # Someone else has the lock. raise AlreadyLocked("%s is already locked" % self.path) time.sleep(wait) else: # Couldn't create the lock for some other reason raise LockFailed("failed to create %s" % self.lock_file) else: open(self.unique_name, "wb").close() return def release(self): if not self.is_locked(): raise NotLocked("%s is not locked" % self.path) elif not os.path.exists(self.unique_name): raise NotMyLock("%s is locked, but not by me" % self.path) os.unlink(self.unique_name) os.rmdir(self.lock_file) def is_locked(self): return os.path.exists(self.lock_file) def i_am_locking(self): return (self.is_locked() and os.path.exists(self.unique_name)) def break_lock(self): if os.path.exists(self.lock_file): for name in os.listdir(self.lock_file): os.unlink(os.path.join(self.lock_file, name)) os.rmdir(self.lock_file) PKZ4site-packages/pip/_vendor/lockfile/sqlitelockfile.pynu[from __future__ import absolute_import, division import time import os try: unicode except NameError: unicode = str from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked class SQLiteLockFile(LockBase): "Demonstrate SQL-based locking." testdb = None def __init__(self, path, threaded=True, timeout=None): """ >>> lock = SQLiteLockFile('somefile') >>> lock = SQLiteLockFile('somefile', threaded=False) """ LockBase.__init__(self, path, threaded, timeout) self.lock_file = unicode(self.lock_file) self.unique_name = unicode(self.unique_name) if SQLiteLockFile.testdb is None: import tempfile _fd, testdb = tempfile.mkstemp() os.close(_fd) os.unlink(testdb) del _fd, tempfile SQLiteLockFile.testdb = testdb import sqlite3 self.connection = sqlite3.connect(SQLiteLockFile.testdb) c = self.connection.cursor() try: c.execute("create table locks" "(" " lock_file varchar(32)," " unique_name varchar(32)" ")") except sqlite3.OperationalError: pass else: self.connection.commit() import atexit atexit.register(os.unlink, SQLiteLockFile.testdb) def acquire(self, timeout=None): timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout if timeout is None: wait = 0.1 elif timeout <= 0: wait = 0 else: wait = timeout / 10 cursor = self.connection.cursor() while True: if not self.is_locked(): # Not locked. Try to lock it. cursor.execute("insert into locks" " (lock_file, unique_name)" " values" " (?, ?)", (self.lock_file, self.unique_name)) self.connection.commit() # Check to see if we are the only lock holder. cursor.execute("select * from locks" " where unique_name = ?", (self.unique_name,)) rows = cursor.fetchall() if len(rows) > 1: # Nope. Someone else got there. Remove our lock. cursor.execute("delete from locks" " where unique_name = ?", (self.unique_name,)) self.connection.commit() else: # Yup. We're done, so go home. return else: # Check to see if we are the only lock holder. cursor.execute("select * from locks" " where unique_name = ?", (self.unique_name,)) rows = cursor.fetchall() if len(rows) == 1: # We're the locker, so go home. return # Maybe we should wait a bit longer. if timeout is not None and time.time() > end_time: if timeout > 0: # No more waiting. raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: # Someone else has the lock and we are impatient.. raise AlreadyLocked("%s is already locked" % self.path) # Well, okay. We'll give it a bit longer. time.sleep(wait) def release(self): if not self.is_locked(): raise NotLocked("%s is not locked" % self.path) if not self.i_am_locking(): raise NotMyLock("%s is locked, but not by me (by %s)" % (self.unique_name, self._who_is_locking())) cursor = self.connection.cursor() cursor.execute("delete from locks" " where unique_name = ?", (self.unique_name,)) self.connection.commit() def _who_is_locking(self): cursor = self.connection.cursor() cursor.execute("select unique_name from locks" " where lock_file = ?", (self.lock_file,)) return cursor.fetchone()[0] def is_locked(self): cursor = self.connection.cursor() cursor.execute("select * from locks" " where lock_file = ?", (self.lock_file,)) rows = cursor.fetchall() return not not rows def i_am_locking(self): cursor = self.connection.cursor() cursor.execute("select * from locks" " where lock_file = ?" " and unique_name = ?", (self.lock_file, self.unique_name)) return not not cursor.fetchall() def break_lock(self): cursor = self.connection.cursor() cursor.execute("delete from locks" " where lock_file = ?", (self.lock_file,)) self.connection.commit() PKZ_] 6site-packages/pip/_vendor/lockfile/symlinklockfile.pyonu[ abc@@sjddlmZddlZddlZddlmZmZmZmZm Z defdYZ dS(i(tabsolute_importNi(tLockBaset NotLockedt NotMyLockt LockTimeoutt AlreadyLockedtSymlinkLockFilecB@sMeZdZeddZddZdZdZdZ dZ RS(s'Lock access to a file using symlink(2).cC@s6tj||||tjj|jd|_dS(Ni(Rt__init__tostpathtsplitt unique_name(tselfR tthreadedttimeout((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR scC@s|dk r|n|j}tj}|dk rL|dkrL||7}nxtrytj|j|jWntk r |j rdS|dk rtj|kr|dkrt d|j qt d|j ntj |dk r|dndqOXdSqOWdS(Nis&Timeout waiting to acquire lock for %ss%s is already lockedi g?(tNoneRttimetTrueRtsymlinkR t lock_filetOSErrort i_am_lockingRR Rtsleep(R Rtend_time((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pytacquires$      'cC@sX|js"td|jn"|jsDtd|jntj|jdS(Ns%s is not lockeds%s is locked, but not by me(t is_lockedRR RRRtunlinkR(R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pytrelease6s   cC@stjj|jS(N(RR tislinkR(R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR=scC@s.tjj|jo-tj|j|jkS(N(RR RRtreadlinkR (R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR@scC@s,tjj|jr(tj|jndS(N(RR RRR(R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyt break_lockDsN( t__name__t __module__t__doc__RRRRRRRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR s #   ( t __future__RRRtRRRRRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyts  (PKZ#$$.site-packages/pip/_vendor/lockfile/__init__.pynu[# -*- coding: utf-8 -*- """ lockfile.py - Platform-independent advisory file locks. Requires Python 2.5 unless you apply 2.4.diff Locking is done on a per-thread basis instead of a per-process basis. Usage: >>> lock = LockFile('somefile') >>> try: ... lock.acquire() ... except AlreadyLocked: ... print 'somefile', 'is locked already.' ... except LockFailed: ... print 'somefile', 'can\\'t be locked.' ... else: ... print 'got lock' got lock >>> print lock.is_locked() True >>> lock.release() >>> lock = LockFile('somefile') >>> print lock.is_locked() False >>> with lock: ... print lock.is_locked() True >>> print lock.is_locked() False >>> lock = LockFile('somefile') >>> # It is okay to lock twice from the same thread... >>> with lock: ... lock.acquire() ... >>> # Though no counter is kept, so you can't unlock multiple times... >>> print lock.is_locked() False Exceptions: Error - base class for other exceptions LockError - base class for all locking exceptions AlreadyLocked - Another thread or process already holds the lock LockFailed - Lock failed for some other reason UnlockError - base class for all unlocking exceptions AlreadyUnlocked - File was not locked. NotMyLock - File was locked but not by the current thread/process """ from __future__ import absolute_import import functools import os import socket import threading import warnings # Work with PEP8 and non-PEP8 versions of threading module. if not hasattr(threading, "current_thread"): threading.current_thread = threading.currentThread if not hasattr(threading.Thread, "get_name"): threading.Thread.get_name = threading.Thread.getName __all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked', 'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock', 'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock', 'LockBase', 'locked'] class Error(Exception): """ Base class for other exceptions. >>> try: ... raise Error ... except Exception: ... pass """ pass class LockError(Error): """ Base class for error arising from attempts to acquire the lock. >>> try: ... raise LockError ... except Error: ... pass """ pass class LockTimeout(LockError): """Raised when lock creation fails within a user-defined period of time. >>> try: ... raise LockTimeout ... except LockError: ... pass """ pass class AlreadyLocked(LockError): """Some other thread/process is locking the file. >>> try: ... raise AlreadyLocked ... except LockError: ... pass """ pass class LockFailed(LockError): """Lock file creation failed for some other reason. >>> try: ... raise LockFailed ... except LockError: ... pass """ pass class UnlockError(Error): """ Base class for errors arising from attempts to release the lock. >>> try: ... raise UnlockError ... except Error: ... pass """ pass class NotLocked(UnlockError): """Raised when an attempt is made to unlock an unlocked file. >>> try: ... raise NotLocked ... except UnlockError: ... pass """ pass class NotMyLock(UnlockError): """Raised when an attempt is made to unlock a file someone else locked. >>> try: ... raise NotMyLock ... except UnlockError: ... pass """ pass class _SharedBase(object): def __init__(self, path): self.path = path def acquire(self, timeout=None): """ Acquire the lock. * If timeout is omitted (or None), wait forever trying to lock the file. * If timeout > 0, try to acquire the lock for that many seconds. If the lock period expires and the file is still locked, raise LockTimeout. * If timeout <= 0, raise AlreadyLocked immediately if the file is already locked. """ raise NotImplemented("implement in subclass") def release(self): """ Release the lock. If the file is not locked, raise NotLocked. """ raise NotImplemented("implement in subclass") def __enter__(self): """ Context manager support. """ self.acquire() return self def __exit__(self, *_exc): """ Context manager support. """ self.release() def __repr__(self): return "<%s: %r>" % (self.__class__.__name__, self.path) class LockBase(_SharedBase): """Base class for platform-specific lock classes.""" def __init__(self, path, threaded=True, timeout=None): """ >>> lock = LockBase('somefile') >>> lock = LockBase('somefile', threaded=False) """ super(LockBase, self).__init__(path) self.lock_file = os.path.abspath(path) + ".lock" self.hostname = socket.gethostname() self.pid = os.getpid() if threaded: t = threading.current_thread() # Thread objects in Python 2.4 and earlier do not have ident # attrs. Worm around that. ident = getattr(t, "ident", hash(t)) self.tname = "-%x" % (ident & 0xffffffff) else: self.tname = "" dirname = os.path.dirname(self.lock_file) # unique name is mostly about the current process, but must # also contain the path -- otherwise, two adjacent locked # files conflict (one file gets locked, creating lock-file and # unique file, the other one gets locked, creating lock-file # and overwriting the already existing lock-file, then one # gets unlocked, deleting both lock-file and unique file, # finally the last lock errors out upon releasing. self.unique_name = os.path.join(dirname, "%s%s.%s%s" % (self.hostname, self.tname, self.pid, hash(self.path))) self.timeout = timeout def is_locked(self): """ Tell whether or not the file is locked. """ raise NotImplemented("implement in subclass") def i_am_locking(self): """ Return True if this object is locking the file. """ raise NotImplemented("implement in subclass") def break_lock(self): """ Remove a lock. Useful if a locking thread failed to unlock. """ raise NotImplemented("implement in subclass") def __repr__(self): return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, self.path) def _fl_helper(cls, mod, *args, **kwds): warnings.warn("Import from %s module instead of lockfile package" % mod, DeprecationWarning, stacklevel=2) # This is a bit funky, but it's only for awhile. The way the unit tests # are constructed this function winds up as an unbound method, so it # actually takes three args, not two. We want to toss out self. if not isinstance(args[0], str): # We are testing, avoid the first arg args = args[1:] if len(args) == 1 and not kwds: kwds["threaded"] = True return cls(*args, **kwds) def LinkFileLock(*args, **kwds): """Factory function provided for backwards compatibility. Do not use in new code. Instead, import LinkLockFile from the lockfile.linklockfile module. """ from . import linklockfile return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", *args, **kwds) def MkdirFileLock(*args, **kwds): """Factory function provided for backwards compatibility. Do not use in new code. Instead, import MkdirLockFile from the lockfile.mkdirlockfile module. """ from . import mkdirlockfile return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", *args, **kwds) def SQLiteFileLock(*args, **kwds): """Factory function provided for backwards compatibility. Do not use in new code. Instead, import SQLiteLockFile from the lockfile.mkdirlockfile module. """ from . import sqlitelockfile return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", *args, **kwds) def locked(path, timeout=None): """Decorator which enables locks for decorated function. Arguments: - path: path for lockfile. - timeout (optional): Timeout for acquiring lock. Usage: @locked('/var/run/myname', timeout=0) def myname(...): ... """ def decor(func): @functools.wraps(func) def wrapper(*args, **kwargs): lock = FileLock(path, timeout=timeout) lock.acquire() try: return func(*args, **kwargs) finally: lock.release() return wrapper return decor if hasattr(os, "link"): from . import linklockfile as _llf LockFile = _llf.LinkLockFile else: from . import mkdirlockfile as _mlf LockFile = _mlf.MkdirLockFile FileLock = LockFile PKZg}Ha__5site-packages/pip/_vendor/lockfile/sqlitelockfile.pycnu[ abc@`sddlmZmZddlZddlZyeWnek rOeZnXddlm Z m Z m Z m Z m Z de fdYZdS(i(tabsolute_importtdivisionNi(tLockBaset NotLockedt NotMyLockt LockTimeoutt AlreadyLockedtSQLiteLockFilecB`s\eZdZdZeddZddZdZdZ dZ dZ dZ RS( sDemonstrate SQL-based locking.c C`stj||||t|j|_t|j|_tjdkrddl}|j \}}t j |t j |~~|t_nddl }|jtj|_|jj}y|jdWn|jk rn0X|jjddl} | jt j tjdS(su >>> lock = SQLiteLockFile('somefile') >>> lock = SQLiteLockFile('somefile', threaded=False) iNsGcreate table locks( lock_file varchar(32), unique_name varchar(32))(Rt__init__tunicodet lock_filet unique_nameRttestdbtNonettempfiletmkstemptostclosetunlinktsqlite3tconnectt connectiontcursortexecutetOperationalErrortcommittatexittregister( tselftpathtthreadedttimeoutRt_fdR RtcR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyRs(       cC`s|dk r|n|j}tj}|dk rL|dkrL||7}n|dkrad}n|dkrvd}n |d}|jj}x;tr|js.|jd|j|j f|jj |jd|j f|j }t |dkr'|jd|j f|jj qfdSn8|jd|j f|j }t |dkrfdS|dk rtj|kr|dkrt d|jqtd |jntj|qWdS( Nig?i s;insert into locks (lock_file, unique_name) values (?, ?)s*select * from locks where unique_name = ?is(delete from locks where unique_name = ?s&Timeout waiting to acquire lock for %ss%s is already locked(R RttimeRRtTruet is_lockedRR R RtfetchalltlenRRRtsleep(RRtend_timetwaitRtrows((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pytacquire5sD                    cC`s|js"td|jn|jsPtd|j|jfn|jj}|j d|jf|jj dS(Ns%s is not lockeds#%s is locked, but not by me (by %s)s(delete from locks where unique_name = ?( R$RRt i_am_lockingRR t_who_is_lockingRRRR(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pytreleasets    cC`s3|jj}|jd|jf|jdS(Ns2select unique_name from locks where lock_file = ?i(RRRR tfetchone(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyR-s  cC`s7|jj}|jd|jf|j}| S(Ns(select * from locks where lock_file = ?(RRRR R%(RRR*((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyR$s    cC`s7|jj}|jd|j|jf|j S(Ns?select * from locks where lock_file = ? and unique_name = ?(RRRR R R%(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyR,s cC`s6|jj}|jd|jf|jjdS(Ns&delete from locks where lock_file = ?(RRRR R(RR((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyt break_locks  N( t__name__t __module__t__doc__R R R#RR+R.R-R$R,R0(((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyRs" ?   (t __future__RRR"RR t NameErrortstrtRRRRRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyts    (PKZiY] ] 3site-packages/pip/_vendor/lockfile/linklockfile.pycnu[ abc@@spddlmZddlZddlZddlmZmZmZmZm Z m Z defdYZ dS(i(tabsolute_importNi(tLockBaset LockFailedt NotLockedt NotMyLockt LockTimeoutt AlreadyLockedt LinkLockFilecB@s>eZdZddZdZdZdZdZRS(sLock access to a file using atomic property of link(2). >>> lock = LinkLockFile('somefile') >>> lock = LinkLockFile('somefile', threaded=False) cC@s~yt|jdjWn$tk r@td|jnX|dk rS|n|j}tj}|dk r|dkr||7}nxtryyt j |j|j Wnt k rqt j |jj}|dkrdS|dk rKtj|krKt j|j|dkr5td|jqKtd|jntj|dk rg|dpjdqXdSqWdS( Ntwbsfailed to create %siis&Timeout waiting to acquire lock for %ss%s is already lockedi g?(topent unique_nametclosetIOErrorRtNonettimeoutttimetTruetostlinkt lock_filetOSErrortstattst_nlinktunlinkRtpathRtsleep(tselfRtend_timetnlinks((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pytacquires0       'cC@sq|js"td|jn+tjj|jsMtd|jntj|jtj|jdS(Ns%s is not lockeds%s is locked, but not by me( t is_lockedRRRtexistsR RRR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pytrelease7s  cC@stjj|jS(N(RRRR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyR?scC@s:|jo9tjj|jo9tj|jjdkS(Ni(RRRRR RR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyt i_am_lockingBs cC@s,tjj|jr(tj|jndS(N(RRRRR(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyt break_lockGsN( t__name__t __module__t__doc__R RR RR!R"(((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyR s  &   ( t __future__RRRtRRRRRRR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyts  .PKZ # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the Python Software Foundation License, version 2 or # later as published by the Python Software Foundation. # No warranty expressed or implied. See the file LICENSE.PSF-2 for details. """ Lockfile behaviour implemented via Unix PID files. """ from __future__ import absolute_import import errno import os import time from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock, LockTimeout) class PIDLockFile(LockBase): """ Lockfile implemented as a Unix PID file. The lock file is a normal file named by the attribute `path`. A lock's PID file contains a single line of text, containing the process ID (PID) of the process that acquired the lock. >>> lock = PIDLockFile('somefile') >>> lock = PIDLockFile('somefile') """ def __init__(self, path, threaded=False, timeout=None): # pid lockfiles don't support threaded operation, so always force # False as the threaded arg. LockBase.__init__(self, path, False, timeout) self.unique_name = self.path def read_pid(self): """ Get the PID from the lock file. """ return read_pid_from_pidfile(self.path) def is_locked(self): """ Test if the lock is currently held. The lock is held if the PID file for this lock exists. """ return os.path.exists(self.path) def i_am_locking(self): """ Test if the lock is held by the current process. Returns ``True`` if the current process ID matches the number stored in the PID file. """ return self.is_locked() and os.getpid() == self.read_pid() def acquire(self, timeout=None): """ Acquire the lock. Creates the PID file for this lock, or raises an error if the lock could not be acquired. """ timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout while True: try: write_pid_to_pidfile(self.path) except OSError as exc: if exc.errno == errno.EEXIST: # The lock creation failed. Maybe sleep a bit. if time.time() > end_time: if timeout is not None and timeout > 0: raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: raise AlreadyLocked("%s is already locked" % self.path) time.sleep(timeout is not None and timeout / 10 or 0.1) else: raise LockFailed("failed to create %s" % self.path) else: return def release(self): """ Release the lock. Removes the PID file to release the lock, or raises an error if the current process does not hold the lock. """ if not self.is_locked(): raise NotLocked("%s is not locked" % self.path) if not self.i_am_locking(): raise NotMyLock("%s is locked, but not by me" % self.path) remove_existing_pidfile(self.path) def break_lock(self): """ Break an existing lock. Removes the PID file if it already exists, otherwise does nothing. """ remove_existing_pidfile(self.path) def read_pid_from_pidfile(pidfile_path): """ Read the PID recorded in the named PID file. Read and return the numeric PID recorded as text in the named PID file. If the PID file cannot be read, or if the content is not a valid PID, return ``None``. """ pid = None try: pidfile = open(pidfile_path, 'r') except IOError: pass else: # According to the FHS 2.3 section on PID files in /var/run: # # The file must consist of the process identifier in # ASCII-encoded decimal, followed by a newline character. # # Programs that read PID files should be somewhat flexible # in what they accept; i.e., they should ignore extra # whitespace, leading zeroes, absence of the trailing # newline, or additional lines in the PID file. line = pidfile.readline().strip() try: pid = int(line) except ValueError: pass pidfile.close() return pid def write_pid_to_pidfile(pidfile_path): """ Write the PID in the named PID file. Get the numeric process ID (“PID”) of the current process and write it to the named file as a line of text. """ open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) open_mode = 0o644 pidfile_fd = os.open(pidfile_path, open_flags, open_mode) pidfile = os.fdopen(pidfile_fd, 'w') # According to the FHS 2.3 section on PID files in /var/run: # # The file must consist of the process identifier in # ASCII-encoded decimal, followed by a newline character. For # example, if crond was process number 25, /var/run/crond.pid # would contain three characters: two, five, and newline. pid = os.getpid() pidfile.write("%s\n" % pid) pidfile.close() def remove_existing_pidfile(pidfile_path): """ Remove the named PID file if it exists. Removing a PID file that doesn't already exist puts us in the desired state, so we ignore the condition if the file does not exist. """ try: os.remove(pidfile_path) except OSError as exc: if exc.errno == errno.ENOENT: pass else: raise PKZ+'\ \ 2site-packages/pip/_vendor/lockfile/linklockfile.pynu[from __future__ import absolute_import import time import os from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, AlreadyLocked) class LinkLockFile(LockBase): """Lock access to a file using atomic property of link(2). >>> lock = LinkLockFile('somefile') >>> lock = LinkLockFile('somefile', threaded=False) """ def acquire(self, timeout=None): try: open(self.unique_name, "wb").close() except IOError: raise LockFailed("failed to create %s" % self.unique_name) timeout = timeout if timeout is not None else self.timeout end_time = time.time() if timeout is not None and timeout > 0: end_time += timeout while True: # Try and create a hard link to it. try: os.link(self.unique_name, self.lock_file) except OSError: # Link creation failed. Maybe we've double-locked? nlinks = os.stat(self.unique_name).st_nlink if nlinks == 2: # The original link plus the one I created == 2. We're # good to go. return else: # Otherwise the lock creation failed. if timeout is not None and time.time() > end_time: os.unlink(self.unique_name) if timeout > 0: raise LockTimeout("Timeout waiting to acquire" " lock for %s" % self.path) else: raise AlreadyLocked("%s is already locked" % self.path) time.sleep(timeout is not None and timeout / 10 or 0.1) else: # Link creation succeeded. We're good to go. return def release(self): if not self.is_locked(): raise NotLocked("%s is not locked" % self.path) elif not os.path.exists(self.unique_name): raise NotMyLock("%s is locked, but not by me" % self.path) os.unlink(self.unique_name) os.unlink(self.lock_file) def is_locked(self): return os.path.exists(self.lock_file) def i_am_locking(self): return (self.is_locked() and os.path.exists(self.unique_name) and os.stat(self.unique_name).st_nlink == 2) def break_lock(self): if os.path.exists(self.lock_file): os.unlink(self.lock_file) PKZ_] 6site-packages/pip/_vendor/lockfile/symlinklockfile.pycnu[ abc@@sjddlmZddlZddlZddlmZmZmZmZm Z defdYZ dS(i(tabsolute_importNi(tLockBaset NotLockedt NotMyLockt LockTimeoutt AlreadyLockedtSymlinkLockFilecB@sMeZdZeddZddZdZdZdZ dZ RS(s'Lock access to a file using symlink(2).cC@s6tj||||tjj|jd|_dS(Ni(Rt__init__tostpathtsplitt unique_name(tselfR tthreadedttimeout((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR scC@s|dk r|n|j}tj}|dk rL|dkrL||7}nxtrytj|j|jWntk r |j rdS|dk rtj|kr|dkrt d|j qt d|j ntj |dk r|dndqOXdSqOWdS(Nis&Timeout waiting to acquire lock for %ss%s is already lockedi g?(tNoneRttimetTrueRtsymlinkR t lock_filetOSErrort i_am_lockingRR Rtsleep(R Rtend_time((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pytacquires$      'cC@sX|js"td|jn"|jsDtd|jntj|jdS(Ns%s is not lockeds%s is locked, but not by me(t is_lockedRR RRRtunlinkR(R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pytrelease6s   cC@stjj|jS(N(RR tislinkR(R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR=scC@s.tjj|jo-tj|j|jkS(N(RR RRtreadlinkR (R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR@scC@s,tjj|jr(tj|jndS(N(RR RRR(R ((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyt break_lockDsN( t__name__t __module__t__doc__RRRRRRRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyR s #   ( t __future__RRRtRRRRRR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyts  (PKZnڲFyFy!site-packages/pip/_vendor/six.pycnu[ abcA@@sKdZddlmZddlZddlZddlZddlZddlZdZdZ ej ddkZ ej ddkZ ej dd!dakZ e refZefZefZeZeZejZnefZeefZeejfZeZeZejjd r$edcZnVd efd YZ ye!e Wne"k rjedeZn XedgZ[ dZ#dZ$defdYZ%de%fdYZ&dej'fdYZ(de%fdYZ)defdYZ*e*e+Z,de(fdYZ-e)dddde)d d!d"d#d e)d$d!d!d%d$e)d&d'd"d(d&e)d)d'd*e)d+d!d"d,d+e)d-d.d.d/d-e)d0d.d.d-d0e)d1d'd"d2d1e)d3d'e rd4nd5d6e)d7d'd8e)d9d:d;d<e)ddde)d=d=d>e)d?d?d>e)d@d@d>e)d2d'd"d2d1e)dAd!d"dBdAe)dCd!d!dDdCe&d"d'e&dEdFe&dGdHe&dIdJdKe&dLdMdLe&dNdOdPe&dQdRdSe&dTdUdVe&dWdXdYe&dZd[d\e&d]d^d_e&d`dadbe&dcdddee&dfdgdhe&dididje&dkdkdje&dldldje&dmdmdne&dodpe&dqdre&dsdte&dudvdue&dwdxe&dydzd{e&d|d}d~e&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddd~e&ddde&ddde&ddde&de+dde&de+dde&de+de+de&ddde&ddde&dddg>Z.ejdkr;e.e&ddg7Z.nxJe.D]BZ/e0e-e/j1e/e2e/e&rBe,j3e/de/j1qBqBW[/e.e-_.e-e+dZ4e,j3e4dde(fdYZ5e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)d<dde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddgZ6x!e6D]Z/e0e5e/j1e/q0W[/e6e5_.e,j3e5e+dddde(fdYZ7e)ddde)ddde)dddgZ8x!e8D]Z/e0e7e/j1e/qW[/e8e7_.e,j3e7e+dddde(fdYZ9e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddg!Z:x!e:D]Z/e0e9e/j1e/q W[/e:e9_.e,j3e9e+dddde(fdYZ;e)ddde)ddde)ddde)dddgZ<x!e<D]Z/e0e;e/j1e/q W[/e<e;_.e,j3e;e+d d d d e(fd YZ=e)dddgZ>x!e>D]Z/e0e=e/j1e/q; W[/e>e=_.e,j3e=e+ddddej'fdYZ?e,j3e?e+dddZ@dZAe r dZBdZCdZDdZEdZFdZGn$dZBdZCdZDd ZEd!ZFd"ZGy eHZIWneJk r= d#ZInXeIZHy eKZKWneJk rj d$ZKnXe r d%ZLejMZNd&ZOeZPn7d'ZLd(ZNd)ZOd*efd+YZPeKZKe#eLd,ejQeBZRejQeCZSejQeDZTejQeEZUejQeFZVejQeGZWe rd-ZXd.ZYd/ZZd0Z[ej\d1Z]ej\d2Z^ej\d3Z_nQd4ZXd5ZYd6ZZd7Z[ej\d8Z]ej\d9Z^ej\d:Z_e#eXd;e#eYd<e#eZd=e#e[d>e rd?Z`d@ZaebZcddldZdedjedAjfZg[dejhdZiejjZkelZmddlnZnenjoZoenjpZpdBZqej d d krdCZrdDZsq4dEZrdFZsnpdGZ`dHZaecZcebZgdIZidJZkejtejuevZmddloZoeojoZoZpdKZqdCZrdDZse#e`dLe#eadMdNZwdOZxdPZye reze4j{dQZ|ddRZ~ndddSZ|e|dTej d dhkre|dUn)ej d dikre|dVn dWZeze4j{dXdZedkrdYZnej d djkrDeZdZZne#e~d[ej dd!dkkrejejd\Zn ejZd]Zd^Zd_ZgZe+Zejd`dk rge_nejr7xOeejD]>\ZZeej+dkrej1e+kreje=PqqW[[nejje,dS(ls6Utilities for writing code that runs on Python 2 and 3i(tabsolute_importNs'Benjamin Peterson s1.10.0iiitjavaiitXcB@seZdZRS(cC@sdS(NiiI((tself((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt__len__>s(t__name__t __module__R(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR<si?cC@s ||_dS(s Add documentation to a function.N(t__doc__(tfunctdoc((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt_add_docKscC@st|tj|S(s7Import module, returning the module after the last dot.(t __import__tsystmodules(tname((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt_import_modulePs t _LazyDescrcB@seZdZdZRS(cC@s ||_dS(N(R(RR((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt__init__XscC@sN|j}t||j|yt|j|jWntk rInX|S(N(t_resolvetsetattrRtdelattrt __class__tAttributeError(Rtobjttptresult((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt__get__[s  (RRRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRVs t MovedModulecB@s&eZddZdZdZRS(cC@sJtt|j|tr=|dkr1|}n||_n ||_dS(N(tsuperRRtPY3tNonetmod(RRtoldtnew((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRis    cC@s t|jS(N(RR(R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRrscC@s/|j}t||}t||||S(N(RtgetattrR(Rtattrt_moduletvalue((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt __getattr__us N(RRRRRR&(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRgs t _LazyModulecB@s eZdZdZgZRS(cC@s)tt|j||jj|_dS(N(RR'RRR(RR((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR~scC@s3ddg}|g|jD]}|j^q7}|S(NRR(t_moved_attributesR(RtattrsR#((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt__dir__s #(RRRR*R((((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR'|s  tMovedAttributecB@s eZdddZdZRS(cC@stt|j|trp|dkr1|}n||_|dkrd|dkr[|}qd|}n||_n'||_|dkr|}n||_dS(N(RR+RRRRR#(RRtold_modtnew_modtold_attrtnew_attr((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRs           cC@st|j}t||jS(N(RRR"R#(Rtmodule((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRsN(RRRRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR+st_SixMetaPathImportercB@s_eZdZdZdZdZd dZdZdZ dZ dZ e Z RS( s A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 cC@s||_i|_dS(N(Rt known_modules(Rtsix_module_name((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRs cG@s-x&|D]}||j|jd|(RR6((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt is_packagescC@s|j|dS(s;Return None Required, if is_package is implementedN(R>R(RR6((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytget_codes N( RRRRR7R8RR:R>RARDREt get_source(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR1s       t _MovedItemscB@seZdZgZRS(sLazy loading of moved objects(RRRRB(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRGst cStringIOtiotStringIOtfiltert itertoolstbuiltinstifiltert filterfalset ifilterfalsetinputt __builtin__t raw_inputtinternR tmaptimaptgetcwdtostgetcwdutgetcwdbtrangetxranget reload_modulet importlibtimptreloadtreducet functoolst shlex_quotetpipestshlextquotetUserDictt collectionstUserListt UserStringtziptizipt zip_longestt izip_longestt configparsert ConfigParsertcopyregtcopy_regtdbm_gnutgdbmsdbm.gnut _dummy_threadt dummy_threadthttp_cookiejart cookielibshttp.cookiejart http_cookiestCookies http.cookiest html_entitiesthtmlentitydefss html.entitiest html_parsert HTMLParsers html.parsert http_clientthttplibs http.clienttemail_mime_multipartsemail.MIMEMultipartsemail.mime.multiparttemail_mime_nonmultipartsemail.MIMENonMultipartsemail.mime.nonmultiparttemail_mime_textsemail.MIMETextsemail.mime.texttemail_mime_basesemail.MIMEBasesemail.mime.basetBaseHTTPServers http.servert CGIHTTPServertSimpleHTTPServertcPickletpickletqueuetQueuetreprlibtreprt socketservert SocketServert_threadtthreadttkintertTkinterttkinter_dialogtDialogstkinter.dialogttkinter_filedialogt FileDialogstkinter.filedialogttkinter_scrolledtextt ScrolledTextstkinter.scrolledtextttkinter_simpledialogt SimpleDialogstkinter.simpledialogt tkinter_tixtTixs tkinter.tixt tkinter_ttktttks tkinter.ttkttkinter_constantst Tkconstantsstkinter.constantst tkinter_dndtTkdnds tkinter.dndttkinter_colorchooserttkColorChooserstkinter.colorchooserttkinter_commondialogttkCommonDialogstkinter.commondialogttkinter_tkfiledialogt tkFileDialogt tkinter_fontttkFonts tkinter.fontttkinter_messageboxt tkMessageBoxstkinter.messageboxttkinter_tksimpledialogttkSimpleDialogt urllib_parses.moves.urllib_parses urllib.parset urllib_errors.moves.urllib_errors urllib.errorturllibs .moves.urllibturllib_robotparsert robotparsersurllib.robotparsert xmlrpc_clientt xmlrpclibs xmlrpc.clientt xmlrpc_servertSimpleXMLRPCServers xmlrpc.servertwin32twinregt_winregsmoves.s.movestmovestModule_six_moves_urllib_parsecB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_parse(RRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR@st ParseResultturlparset SplitResulttparse_qst parse_qslt urldefragturljointurlsplitt urlunparset urlunsplitt quote_plustunquotet unquote_plust urlencodet splitquerytsplittagt splitusert uses_fragmentt uses_netloct uses_paramst uses_queryt uses_relativesmoves.urllib_parsesmoves.urllib.parsetModule_six_moves_urllib_errorcB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_error(RRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRhstURLErrorturllib2t HTTPErrortContentTooShortErrors.moves.urllib.errorsmoves.urllib_errorsmoves.urllib.errortModule_six_moves_urllib_requestcB@seZdZRS(s9Lazy loading of moved objects in six.moves.urllib_request(RRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR|sturlopensurllib.requesttinstall_openert build_openert pathname2urlt url2pathnamet getproxiestRequesttOpenerDirectortHTTPDefaultErrorHandlertHTTPRedirectHandlertHTTPCookieProcessort ProxyHandlert BaseHandlertHTTPPasswordMgrtHTTPPasswordMgrWithDefaultRealmtAbstractBasicAuthHandlertHTTPBasicAuthHandlertProxyBasicAuthHandlertAbstractDigestAuthHandlertHTTPDigestAuthHandlertProxyDigestAuthHandlert HTTPHandlert HTTPSHandlert FileHandlert FTPHandlertCacheFTPHandlertUnknownHandlertHTTPErrorProcessort urlretrievet urlcleanupt URLopenertFancyURLopenert proxy_bypasss.moves.urllib.requestsmoves.urllib_requestsmoves.urllib.requestt Module_six_moves_urllib_responsecB@seZdZRS(s:Lazy loading of moved objects in six.moves.urllib_response(RRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRstaddbasesurllib.responset addclosehooktaddinfot addinfourls.moves.urllib.responsesmoves.urllib_responsesmoves.urllib.responset#Module_six_moves_urllib_robotparsercB@seZdZRS(s=Lazy loading of moved objects in six.moves.urllib_robotparser(RRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRstRobotFileParsers.moves.urllib.robotparsersmoves.urllib_robotparsersmoves.urllib.robotparsertModule_six_moves_urllibcB@sheZdZgZejdZejdZejdZejdZ ejdZ dZ RS(sICreate a six.moves.urllib namespace that resembles the Python 3 namespacesmoves.urllib_parsesmoves.urllib_errorsmoves.urllib_requestsmoves.urllib_responsesmoves.urllib_robotparsercC@sdddddgS(NtparseterrortrequesttresponseR((R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR*s( RRRRBt _importerR8RRRRRR*(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRss moves.urllibcC@stt|j|dS(sAdd an item to six.moves.N(RRGR(tmove((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytadd_movescC@s^ytt|WnFtk rYytj|=WqZtk rUtd|fqZXnXdS(sRemove item from six.moves.sno such move, %rN(RRGRRt__dict__R;(R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt remove_moves  t__func__t__self__t __closure__t__code__t __defaults__t __globals__tim_functim_selft func_closuret func_codet func_defaultst func_globalscC@s |jS(N(tnext(tit((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytadvance_iterator scC@stdt|jDS(Ncs@s|]}d|jkVqdS(t__call__N(R (t.0tklass((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pys s(tanyttypet__mro__(R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytcallablescC@s|S(N((tunbound((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytget_unbound_functionscC@s|S(N((Rtcls((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytcreate_unbound_methodscC@s|jS(N(R(R"((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR#"scC@stj|||jS(N(ttypest MethodTypeR(RR((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytcreate_bound_method%scC@stj|d|S(N(R&R'R(RR$((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR%(stIteratorcB@seZdZRS(cC@st|j|S(N(Rt__next__(R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR-s(RRR(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR)+ss3Get the function out of a possibly unbound functioncK@st|j|S(N(titertkeys(tdtkw((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytiterkeys>scK@st|j|S(N(R+tvalues(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt itervaluesAscK@st|j|S(N(R+titems(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt iteritemsDscK@st|j|S(N(R+tlists(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt iterlistsGsR,R0R2cK@s |j|S(N(R/(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR/PscK@s |j|S(N(R1(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR1SscK@s |j|S(N(R3(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR3VscK@s |j|S(N(R5(R-R.((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR5Ystviewkeyst viewvaluest viewitemss1Return an iterator over the keys of a dictionary.s3Return an iterator over the values of a dictionary.s?Return an iterator over the (key, value) pairs of a dictionary.sBReturn an iterator over the (key, [values]) pairs of a dictionary.cC@s |jdS(Nslatin-1(tencode(ts((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytbkscC@s|S(N((R:((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytunss>BtassertCountEqualtassertRaisesRegexptassertRegexpMatchestassertRaisesRegext assertRegexcC@s|S(N((R:((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR;scC@st|jdddS(Ns\\s\\\\tunicode_escape(tunicodetreplace(R:((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR<scC@st|dS(Ni(tord(tbs((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytbyte2intscC@st||S(N(RE(tbufti((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt indexbytesstassertItemsEquals Byte literals Text literalcO@st|t||S(N(R"t_assertCountEqual(Rtargstkwargs((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR=scO@st|t||S(N(R"t_assertRaisesRegex(RRMRN((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR@scO@st|t||S(N(R"t _assertRegex(RRMRN((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRAstexeccC@sC|dkr|}n|j|k r9|j|n|dS(N(Rt __traceback__twith_traceback(RR%ttb((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytreraises   cB@sc|dkrBejd}|j}|dkr<|j}n~n|dkrW|}nddUdS(sExecute code in a namespace.isexec _code_ in _globs_, _locs_N(RR t _getframet f_globalstf_locals(t_code_t_globs_t_locs_tframe((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytexec_s      s9def reraise(tp, value, tb=None): raise tp, value, tb srdef raise_from(value, from_value): if from_value is None: raise value raise value from from_value sCdef raise_from(value, from_value): raise value from from_value cC@s |dS(N((R%t from_value((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt raise_fromstprintc @s|jdtjdkr%dSfd}t}|jdd}|dk rt|trpt}qt|tst dqn|jdd}|dk rt|trt}qt|tst dqn|rt dn|s0x*|D]}t|tr t}Pq q Wn|rQtd }td }n d }d }|dkrr|}n|dkr|}nx7t |D])\} }| r||n||qW||dS( s4The new-style print function for Python 2.4 and 2.5.tfileNc@st|tst|}nttrt|trjdk rtdd}|dkrrd}n|jj|}nj |dS(Nterrorststrict( R?t basestringtstrRaRCtencodingRR"R9twrite(tdataRb(tfp(s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRgs  tsepssep must be None or a stringtendsend must be None or a strings$invalid keyword arguments to print()s t ( tpopR tstdoutRtFalseR?RCtTrueRet TypeErrort enumerate( RMRNRgt want_unicodeRjRktargtnewlinetspaceRI((Ris3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytprint_sL              cO@sW|jdtj}|jdt}t|||rS|dk rS|jndS(NRatflush(tgetR RnRmRot_printRRx(RMRNRiRx((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyRw s  sReraise an exception.c@sfd}|S(Nc@s(tj|}|_|S(N(Rbtwrapst __wrapped__(tf(tassignedtupdatedtwrapped(s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytwrappers ((RR~RR((R~RRs3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR{sc@s5dffdY}tj|ddiS(s%Create a base class with a metaclass.t metaclassc@seZfdZRS(c@s||S(N((R$Rt this_basesR-(tbasestmeta(s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt__new__'s(RRR((RR(s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR%sttemporary_class((RR(RRR((RRs3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytwith_metaclass sc@sfd}|S(s6Class decorator for creating a class with a metaclass.c@s|jj}|jd}|dk rft|trE|g}nx|D]}|j|qLWn|jdd|jdd|j|j|S(Nt __slots__R t __weakref__( R tcopyRyRR?ReRmRt __bases__(R$t orig_varstslotst slots_var(R(s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyR.s   ((RR((Rs3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyt add_metaclass,s cC@sJtrFd|jkr+td|jn|j|_d|_n|S(s A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. t__str__sY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cS@s|jjdS(Nsutf-8(t __unicode__R9(R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytJt(tPY2R t ValueErrorRRR(R((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pytpython_2_unicode_compatible<s t__spec__(iiIiIill(ii(ii(ii(ii(Rt __future__RRbRLtoperatorR R&t __author__t __version__t version_infoRRtPY34Ret string_typestintt integer_typesRt class_typest text_typetbytest binary_typetmaxsizetMAXSIZERdtlongt ClassTypeRCtplatformt startswithtobjectRtlent OverflowErrorR RRRt ModuleTypeR'R+R1RRRGR(R#RRR?R7RRt_urllib_parse_moved_attributesRt_urllib_error_moved_attributesRt _urllib_request_moved_attributesRt!_urllib_response_moved_attributesRt$_urllib_robotparser_moved_attributesRR R t _meth_funct _meth_selft _func_closuret _func_codet_func_defaultst _func_globalsRRt NameErrorR!R#R'R(R%R)t attrgettertget_method_functiontget_method_selftget_function_closuretget_function_codetget_function_defaultstget_function_globalsR/R1R3R5t methodcallerR6R7R8R;R<tchrtunichrtstructtStructtpacktint2bytet itemgetterRGtgetitemRJR+t iterbytesRIRJtBytesIORLRORPtpartialRVRER=R@RAR"RMR]RRUR_RwRztWRAPPER_ASSIGNMENTStWRAPPER_UPDATESR{RRRRBt __package__tglobalsRyRtsubmodule_search_locationst meta_pathRrRItimportertappend(((s3/usr/lib/python2.7/site-packages/pip/_vendor/six.pyts               >                                                                                 5         PKZ`&site-packages/pip/_vendor/re-vendor.pynu[import os import sys import pip import glob import shutil here = os.path.abspath(os.path.dirname(__file__)) def usage(): print("Usage: re-vendor.py [clean|vendor]") sys.exit(1) def clean(): for fn in os.listdir(here): dirname = os.path.join(here, fn) if os.path.isdir(dirname): shutil.rmtree(dirname) # six is a single file, not a package os.unlink(os.path.join(here, 'six.py')) def vendor(): pip.main(['install', '-t', here, '-r', 'vendor.txt']) for dirname in glob.glob('*.egg-info'): shutil.rmtree(dirname) if __name__ == '__main__': if len(sys.argv) != 2: usage() if sys.argv[1] == 'clean': clean() elif sys.argv[1] == 'vendor': vendor() else: usage() PKZY &site-packages/pip/_vendor/__init__.pycnu[ abc@@sKdZddlmZddlZddlZddlZeZej j ej j e Z dZerGejej je dej ej (edededed ed ed ed ed ededededededededededededededededededed ed!ed"ed#ed$ed%ed&ed'ed(ed)ed*ed+ed,ed-ed.ed/ed0ndS(1s pip._vendor is for vendoring dependencies of pip to prevent needing pip to depend on something external. Files inside of pip._vendor should be considered immutable and should only be updated to versions from upstream. i(tabsolute_importNcC@sdjt|}y t|ttddWntk ry t|ttddWntk ruqXtj|tj|<|jdd\}}t tj||tj|nXdS(Ns{0}.{1}tlevelit.i( tformatt__name__t __import__tglobalstlocalst ImportErrortsystmodulestrsplittsetattr(t modulenamet vendored_nametbasethead((s8/usr/lib/python2.7/site-packages/pip/_vendor/__init__.pytvendoreds    s*.whlt cachecontroltcoloramatdistlibtdistrothtml5libtlockfiletsixs six.movesssix.moves.urllibt packagingspackaging.versionspackaging.specifierst pkg_resourcestprogresstretryingtrequestssrequests.packagessrequests.packages.urllib3s&requests.packages.urllib3._collectionss$requests.packages.urllib3.connections(requests.packages.urllib3.connectionpools!requests.packages.urllib3.contribs*requests.packages.urllib3.contrib.ntlmpools+requests.packages.urllib3.contrib.pyopenssls$requests.packages.urllib3.exceptionss requests.packages.urllib3.fieldss"requests.packages.urllib3.fileposts"requests.packages.urllib3.packagess/requests.packages.urllib3.packages.ordered_dicts&requests.packages.urllib3.packages.sixs5requests.packages.urllib3.packages.ssl_match_hostnamesErequests.packages.urllib3.packages.ssl_match_hostname._implementations%requests.packages.urllib3.poolmanagers!requests.packages.urllib3.requests"requests.packages.urllib3.responsesrequests.packages.urllib3.utils)requests.packages.urllib3.util.connections&requests.packages.urllib3.util.requests'requests.packages.urllib3.util.responses$requests.packages.urllib3.util.retrys#requests.packages.urllib3.util.ssl_s&requests.packages.urllib3.util.timeouts"requests.packages.urllib3.util.url(t__doc__t __future__Rtglobtos.pathtosR tFalset DEBUNDLEDtpathtabspathtdirnamet__file__t WHEEL_DIRRtjoin(((s8/usr/lib/python2.7/site-packages/pip/_vendor/__init__.pytsh    )                                          PKZwR{""-site-packages/pip/_vendor/progress/spinner.pynu[# -*- coding: utf-8 -*- # Copyright (c) 2012 Giorgos Verigakis # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from . import Infinite from .helpers import WriteMixin class Spinner(WriteMixin, Infinite): message = '' phases = ('-', '\\', '|', '/') hide_cursor = True def update(self): i = self.index % len(self.phases) self.write(self.phases[i]) class PieSpinner(Spinner): phases = [u'◷', u'◶', u'◵', u'◴'] class MoonSpinner(Spinner): phases = [u'◑', u'◒', u'◐', u'◓'] class LineSpinner(Spinner): phases = [u'⎺', u'⎻', u'⎼', u'⎽', u'⎼', u'⎻'] PKZcWW.site-packages/pip/_vendor/progress/counter.pycnu[ abc@sddlmZmZddlmZdeefdYZdeefdYZdeefdYZd efd YZd S( i(tInfinitetProgress(t WriteMixintCountercBseZdZeZdZRS(tcCs|jt|jdS(N(twritetstrtindex(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pytupdates(t__name__t __module__tmessagetTruet hide_cursorR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyRst CountdowncBseZeZdZRS(cCs|jt|jdS(N(RRt remaining(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR s(R R R RR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyRstStackc BseZd ZeZd ZRS( u u▁u▂u▃u▄u▅u▆u▇u█cCsGt|j}t|dt|j|}|j|j|dS(Ni(tlentphasestmintinttprogressR(Rtnphasesti((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR (s ( u u▁u▂u▃u▄u▅u▆u▇u█(R R RR RR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR$stPiecBseZdZRS(u○u◔u◑u◕u●(u○u◔u◑u◕u●(R R R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR.sN( RRRthelpersRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyts  PKZ>>.site-packages/pip/_vendor/progress/spinner.pycnu[ abc@sddlmZddlmZdeefdYZdefdYZdefdYZd efd YZd S( i(tInfinite(t WriteMixintSpinnercBs#eZdZdZeZdZRS(tt-s\t|t/cCs.|jt|j}|j|j|dS(N(tindextlentphasestwrite(tselfti((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pytupdates(Rs\RR(t__name__t __module__tmessageR tTruet hide_cursorR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyRst PieSpinnercBseZddddgZRS(u◷u◶u◵u◴(RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyRst MoonSpinnercBseZddddgZRS(u◑u◒u◐u◓(RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyR#st LineSpinnercBs eZddddddgZRS(u⎺u⎻u⎼u⎽(RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyR'sN(RRthelpersRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyts  PKZ/site-packages/pip/_vendor/progress/__init__.pycnu[ abc@ sddlmZddlmZddlmZddlmZddlm Z ddl m Z dZ de fd YZ d e fd YZd S( i(tdivision(tdeque(t timedelta(tceil(tstderr(ttimes1.2tInfinitecB seZeZdZdZdZedZedZ edZ dZ dZ dZ d d Zd ZRS( i cO sgd|_t|_|j|_td|j|_x*|jD]\}}t|||qCWdS(Nitmaxlen( tindexRtstart_tst_tsRt sma_windowt_dttitemstsetattr(tselftargstkwargstkeytval((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyt__init__s    cC s#|jdrdSt||dS(Nt_(t startswithtNonetgetattr(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyt __getitem__'scC s'|jr#t|jt|jSdS(Ni(R tsumtlen(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytavg,scC stt|jS(N(tintRR (R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytelapsed0scC std|jS(Ntseconds(RR(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyt elapsed_td4scC sdS(N((R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytupdate8scC sdS(N((R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytstart;scC sdS(N((R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytfinish>sicC s`|dkrBt}||j|}|jj|||_n|j||_|jdS(Ni(RR R tappendRR!(Rtntnowtdt((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytnextAs   cc s.x|D]}|V|jqW|jdS(N(R(R#(Rtittx((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytiterKs (t__name__t __module__RtfileR RRtpropertyRRR R!R"R#R(R+(((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyRs      tProgresscB sweZdZedZedZedZedZedZdZ dZ dZ RS( cO s2tt|j|||jdd|_dS(Ntmaxid(tsuperR0RtgetR1(RRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyRSscC stt|j|jS(N(RRRt remaining(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytetaWscC std|jS(NR(RR5(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyteta_td[scC s |jdS(Nid(tprogress(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytpercent_scC std|j|jS(Ni(tminRR1(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR7cscC st|j|jdS(Ni(R1R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR4gscC s|jdS(N(R!(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR"kscC s||j}|j|dS(N(RR((RRtincr((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytgotons cc sUyt||_Wntk r&nXx|D]}|V|jq.W|jdS(N(RR1t TypeErrorR(R#(RR)R*((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR+rs  ( R,R-RR/R5R6R8R7R4R"R;R+(((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR0Rs   N(t __future__Rt collectionsRtdatetimeRtmathRtsysRRt __version__tobjectRR0(((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyts7PKZG>a a *site-packages/pip/_vendor/progress/bar.pyonu[ abc@sddlmZddlmZdeefdYZdefdYZdefdYZd efd YZd efd YZd efdYZ dS(i(tProgress(t WritelnMixintBarcBsAeZdZdZdZdZdZdZdZe Z dZ RS(i ts%(index)d/%(max)ds |s| t t#cCst|j|j}|j|}|j|}|j|}|j|}|j|}dj||j|||j |g}|j |dS(NR( tinttwidthtprogresstmessagetfillt empty_filltsuffixtjoint bar_prefixt bar_suffixtwriteln(tselft filled_lengtht empty_lengthR tbartemptyR tline((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pytupdates      ( t__name__t __module__RR R RRR R tTruet hide_cursorR(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyRst ChargingBarcBs&eZdZdZdZdZdZRS(s %(percent)d%%Ru∙u█(RRR RRR R (((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR,s tFillingSquaresBarcBseZdZdZRS(u▢u▣(RRR R (((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR4stFillingCirclesBarcBseZdZdZRS(u◯u◉(RRR R (((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR9stIncrementalBarc BseZd Zd ZRS( u u▏u▎u▍u▌u▋u▊u▉u█c Cst|j}t||j|j}t|j|j}|j|}|||}|j|}|jd|}|dkr|j|nd}|jtd|t|} |j|} dj ||j ||| |j | g} |j | dS(NiiR( tlentphasesRRRR R tmaxR R RRR( Rtnphasestexpanded_lengthRRtphaseR RtcurrentRR R((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyRAs    ( u u▏u▎u▍u▌u▋u▊u▉u█(RRR!R(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR>stShadyBarcBseZdZRS(u u░u▒u▓u█(u u░u▒u▓u█(RRR!(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR'RsN( RRthelpersRRRRRRR'(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pytsPKZ>>.site-packages/pip/_vendor/progress/spinner.pyonu[ abc@sddlmZddlmZdeefdYZdefdYZdefdYZd efd YZd S( i(tInfinite(t WriteMixintSpinnercBs#eZdZdZeZdZRS(tt-s\t|t/cCs.|jt|j}|j|j|dS(N(tindextlentphasestwrite(tselfti((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pytupdates(Rs\RR(t__name__t __module__tmessageR tTruet hide_cursorR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyRst PieSpinnercBseZddddgZRS(u◷u◶u◵u◴(RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyRst MoonSpinnercBseZddddgZRS(u◑u◒u◐u◓(RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyR#st LineSpinnercBs eZddddddgZRS(u⎺u⎻u⎼u⎽(RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyR'sN(RRthelpersRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/spinner.pyts  PKZcWW.site-packages/pip/_vendor/progress/counter.pyonu[ abc@sddlmZmZddlmZdeefdYZdeefdYZdeefdYZd efd YZd S( i(tInfinitetProgress(t WriteMixintCountercBseZdZeZdZRS(tcCs|jt|jdS(N(twritetstrtindex(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pytupdates(t__name__t __module__tmessagetTruet hide_cursorR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyRst CountdowncBseZeZdZRS(cCs|jt|jdS(N(RRt remaining(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR s(R R R RR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyRstStackc BseZd ZeZd ZRS( u u▁u▂u▃u▄u▅u▆u▇u█cCsGt|j}t|dt|j|}|j|j|dS(Ni(tlentphasestmintinttprogressR(Rtnphasesti((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR (s ( u u▁u▂u▃u▄u▅u▆u▇u█(R R RR RR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR$stPiecBseZdZRS(u○u◔u◑u◕u●(u○u◔u◑u◕u●(R R R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyR.sN( RRRthelpersRRRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyts  PKZG>a a *site-packages/pip/_vendor/progress/bar.pycnu[ abc@sddlmZddlmZdeefdYZdefdYZdefdYZd efd YZd efd YZd efdYZ dS(i(tProgress(t WritelnMixintBarcBsAeZdZdZdZdZdZdZdZe Z dZ RS(i ts%(index)d/%(max)ds |s| t t#cCst|j|j}|j|}|j|}|j|}|j|}|j|}dj||j|||j |g}|j |dS(NR( tinttwidthtprogresstmessagetfillt empty_filltsuffixtjoint bar_prefixt bar_suffixtwriteln(tselft filled_lengtht empty_lengthR tbartemptyR tline((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pytupdates      ( t__name__t __module__RR R RRR R tTruet hide_cursorR(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyRst ChargingBarcBs&eZdZdZdZdZdZRS(s %(percent)d%%Ru∙u█(RRR RRR R (((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR,s tFillingSquaresBarcBseZdZdZRS(u▢u▣(RRR R (((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR4stFillingCirclesBarcBseZdZdZRS(u◯u◉(RRR R (((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR9stIncrementalBarc BseZd Zd ZRS( u u▏u▎u▍u▌u▋u▊u▉u█c Cst|j}t||j|j}t|j|j}|j|}|||}|j|}|jd|}|dkr|j|nd}|jtd|t|} |j|} dj ||j ||| |j | g} |j | dS(NiiR( tlentphasesRRRR R tmaxR R RRR( Rtnphasestexpanded_lengthRRtphaseR RtcurrentRR R((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyRAs    ( u u▏u▎u▍u▌u▋u▊u▉u█(RRR!R(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR>stShadyBarcBseZdZRS(u u░u▒u▓u█(u u░u▒u▓u█(RRR!(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyR'RsN( RRthelpersRRRRRRR'(((s</usr/lib/python2.7/site-packages/pip/_vendor/progress/bar.pytsPKZ*#.site-packages/pip/_vendor/progress/helpers.pyonu[ abc@sddlmZdZdZdefdYZdefdYZddlmZmZdd l m Z d efd YZ d S( i(tprint_functions[?25ls[?25ht WriteMixincBs,eZeZddZdZdZRS(cKstt|j|d|_|r1||_n|jjr|jrett ddd|jnt|jddd|j|jj ndS(Nitendttfile( tsuperRt__init__t_widthtmessageRtisattyt hide_cursortprintt HIDE_CURSORtflush(tselfRtkwargs((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRs   cCsz|jjrvd|j}|j|j}t||ddd|jt|jt||_|jjndS(NsRRR(RR RtljustR tmaxtlenR (Rtstbtc((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytwrite%s  cCs8|jjr4|jr4ttddd|jndS(NRRR(RR R R t SHOW_CURSOR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytfinish-sN(t__name__t __module__tFalseR tNoneRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRs t WritelnMixincBs5eZeZddZdZdZdZRS(cKs`tt|j||r(||_n|jjr\|jr\ttddd|jndS(NRRR( RRRRRR R R R (RRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR5s  cCs/|jjr+tdddd|jndS(Ns RRR(RR R (R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytclearln=scCsF|jjrB|jt|ddd|j|jjndS(NRRR(RR RR R (Rtline((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytwritelnAs cCsK|jjrGtd|j|jrGttddd|jqGndS(NRRR(RR R R R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRGs N( RRRR RRRR R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR2s    (tsignaltSIGINT(texitt SigIntMixincBs eZdZdZdZRS(s6Registers a signal handler that calls finish on SIGINTcOs-tt|j||tt|jdS(N(RR$RR!R"t_sigint_handler(RtargsR((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRUscCs|jtddS(Ni(RR#(Rtsignumtframe((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR%Ys (RRt__doc__RR%(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR$Rs N( t __future__RR RtobjectRRR!R"tsysR#R$(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytsPKZ/site-packages/pip/_vendor/progress/__init__.pyonu[ abc@ sddlmZddlmZddlmZddlmZddlm Z ddl m Z dZ de fd YZ d e fd YZd S( i(tdivision(tdeque(t timedelta(tceil(tstderr(ttimes1.2tInfinitecB seZeZdZdZdZedZedZ edZ dZ dZ dZ d d Zd ZRS( i cO sgd|_t|_|j|_td|j|_x*|jD]\}}t|||qCWdS(Nitmaxlen( tindexRtstart_tst_tsRt sma_windowt_dttitemstsetattr(tselftargstkwargstkeytval((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyt__init__s    cC s#|jdrdSt||dS(Nt_(t startswithtNonetgetattr(RR((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyt __getitem__'scC s'|jr#t|jt|jSdS(Ni(R tsumtlen(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytavg,scC stt|jS(N(tintRR (R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytelapsed0scC std|jS(Ntseconds(RR(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyt elapsed_td4scC sdS(N((R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytupdate8scC sdS(N((R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytstart;scC sdS(N((R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytfinish>sicC s`|dkrBt}||j|}|jj|||_n|j||_|jdS(Ni(RR R tappendRR!(Rtntnowtdt((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytnextAs   cc s.x|D]}|V|jqW|jdS(N(R(R#(Rtittx((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytiterKs (t__name__t __module__RtfileR RRtpropertyRRR R!R"R#R(R+(((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyRs      tProgresscB sweZdZedZedZedZedZedZdZ dZ dZ RS( cO s2tt|j|||jdd|_dS(Ntmaxid(tsuperR0RtgetR1(RRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyRSscC stt|j|jS(N(RRRt remaining(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytetaWscC std|jS(NR(RR5(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyteta_td[scC s |jdS(Nid(tprogress(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytpercent_scC std|j|jS(Ni(tminRR1(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR7cscC st|j|jdS(Ni(R1R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR4gscC s|jdS(N(R!(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR"kscC s||j}|j|dS(N(RR((RRtincr((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pytgotons cc sUyt||_Wntk r&nXx|D]}|V|jq.W|jdS(N(RR1t TypeErrorR(R#(RR)R*((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR+rs  ( R,R-RR/R5R6R8R7R4R"R;R+(((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyR0Rs   N(t __future__Rt collectionsRtdatetimeRtmathRtsysRRt __version__tobjectRR0(((sA/usr/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyts7PKZF} } )site-packages/pip/_vendor/progress/bar.pynu[# -*- coding: utf-8 -*- # Copyright (c) 2012 Giorgos Verigakis # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from . import Progress from .helpers import WritelnMixin class Bar(WritelnMixin, Progress): width = 32 message = '' suffix = '%(index)d/%(max)d' bar_prefix = ' |' bar_suffix = '| ' empty_fill = ' ' fill = '#' hide_cursor = True def update(self): filled_length = int(self.width * self.progress) empty_length = self.width - filled_length message = self.message % self bar = self.fill * filled_length empty = self.empty_fill * empty_length suffix = self.suffix % self line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix, suffix]) self.writeln(line) class ChargingBar(Bar): suffix = '%(percent)d%%' bar_prefix = ' ' bar_suffix = ' ' empty_fill = u'∙' fill = u'█' class FillingSquaresBar(ChargingBar): empty_fill = u'▢' fill = u'▣' class FillingCirclesBar(ChargingBar): empty_fill = u'◯' fill = u'◉' class IncrementalBar(Bar): phases = (u' ', u'▏', u'▎', u'▍', u'▌', u'▋', u'▊', u'▉', u'█') def update(self): nphases = len(self.phases) expanded_length = int(nphases * self.width * self.progress) filled_length = int(self.width * self.progress) empty_length = self.width - filled_length phase = expanded_length - (filled_length * nphases) message = self.message % self bar = self.phases[-1] * filled_length current = self.phases[phase] if phase > 0 else '' empty = self.empty_fill * max(0, empty_length - len(current)) suffix = self.suffix % self line = ''.join([message, self.bar_prefix, bar, current, empty, self.bar_suffix, suffix]) self.writeln(line) class ShadyBar(IncrementalBar): phases = (u' ', u'░', u'▒', u'▓', u'█') PKZN& & -site-packages/pip/_vendor/progress/helpers.pynu[# Copyright (c) 2012 Giorgos Verigakis # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import print_function HIDE_CURSOR = '\x1b[?25l' SHOW_CURSOR = '\x1b[?25h' class WriteMixin(object): hide_cursor = False def __init__(self, message=None, **kwargs): super(WriteMixin, self).__init__(**kwargs) self._width = 0 if message: self.message = message if self.file.isatty(): if self.hide_cursor: print(HIDE_CURSOR, end='', file=self.file) print(self.message, end='', file=self.file) self.file.flush() def write(self, s): if self.file.isatty(): b = '\b' * self._width c = s.ljust(self._width) print(b + c, end='', file=self.file) self._width = max(self._width, len(s)) self.file.flush() def finish(self): if self.file.isatty() and self.hide_cursor: print(SHOW_CURSOR, end='', file=self.file) class WritelnMixin(object): hide_cursor = False def __init__(self, message=None, **kwargs): super(WritelnMixin, self).__init__(**kwargs) if message: self.message = message if self.file.isatty() and self.hide_cursor: print(HIDE_CURSOR, end='', file=self.file) def clearln(self): if self.file.isatty(): print('\r\x1b[K', end='', file=self.file) def writeln(self, line): if self.file.isatty(): self.clearln() print(line, end='', file=self.file) self.file.flush() def finish(self): if self.file.isatty(): print(file=self.file) if self.hide_cursor: print(SHOW_CURSOR, end='', file=self.file) from signal import signal, SIGINT from sys import exit class SigIntMixin(object): """Registers a signal handler that calls finish on SIGINT""" def __init__(self, *args, **kwargs): super(SigIntMixin, self).__init__(*args, **kwargs) signal(SIGINT, self._sigint_handler) def _sigint_handler(self, signum, frame): self.finish() exit(0) PKZZq .site-packages/pip/_vendor/progress/__init__.pynu[# Copyright (c) 2012 Giorgos Verigakis # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import division from collections import deque from datetime import timedelta from math import ceil from sys import stderr from time import time __version__ = '1.2' class Infinite(object): file = stderr sma_window = 10 def __init__(self, *args, **kwargs): self.index = 0 self.start_ts = time() self._ts = self.start_ts self._dt = deque(maxlen=self.sma_window) for key, val in kwargs.items(): setattr(self, key, val) def __getitem__(self, key): if key.startswith('_'): return None return getattr(self, key, None) @property def avg(self): return sum(self._dt) / len(self._dt) if self._dt else 0 @property def elapsed(self): return int(time() - self.start_ts) @property def elapsed_td(self): return timedelta(seconds=self.elapsed) def update(self): pass def start(self): pass def finish(self): pass def next(self, n=1): if n > 0: now = time() dt = (now - self._ts) / n self._dt.append(dt) self._ts = now self.index = self.index + n self.update() def iter(self, it): for x in it: yield x self.next() self.finish() class Progress(Infinite): def __init__(self, *args, **kwargs): super(Progress, self).__init__(*args, **kwargs) self.max = kwargs.get('max', 100) @property def eta(self): return int(ceil(self.avg * self.remaining)) @property def eta_td(self): return timedelta(seconds=self.eta) @property def percent(self): return self.progress * 100 @property def progress(self): return min(1, self.index / self.max) @property def remaining(self): return max(self.max - self.index, 0) def start(self): self.update() def goto(self, index): incr = index - self.index self.next(incr) def iter(self, it): try: self.max = len(it) except TypeError: pass for x in it: yield x self.next() self.finish() PKZ*0-site-packages/pip/_vendor/progress/counter.pynu[# -*- coding: utf-8 -*- # Copyright (c) 2012 Giorgos Verigakis # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from . import Infinite, Progress from .helpers import WriteMixin class Counter(WriteMixin, Infinite): message = '' hide_cursor = True def update(self): self.write(str(self.index)) class Countdown(WriteMixin, Progress): hide_cursor = True def update(self): self.write(str(self.remaining)) class Stack(WriteMixin, Progress): phases = (u' ', u'▁', u'▂', u'▃', u'▄', u'▅', u'▆', u'▇', u'█') hide_cursor = True def update(self): nphases = len(self.phases) i = min(nphases - 1, int(self.progress * nphases)) self.write(self.phases[i]) class Pie(Stack): phases = (u'○', u'◔', u'◑', u'◕', u'●') PKZ*#.site-packages/pip/_vendor/progress/helpers.pycnu[ abc@sddlmZdZdZdefdYZdefdYZddlmZmZdd l m Z d efd YZ d S( i(tprint_functions[?25ls[?25ht WriteMixincBs,eZeZddZdZdZRS(cKstt|j|d|_|r1||_n|jjr|jrett ddd|jnt|jddd|j|jj ndS(Nitendttfile( tsuperRt__init__t_widthtmessageRtisattyt hide_cursortprintt HIDE_CURSORtflush(tselfRtkwargs((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRs   cCsz|jjrvd|j}|j|j}t||ddd|jt|jt||_|jjndS(NsRRR(RR RtljustR tmaxtlenR (Rtstbtc((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytwrite%s  cCs8|jjr4|jr4ttddd|jndS(NRRR(RR R R t SHOW_CURSOR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytfinish-sN(t__name__t __module__tFalseR tNoneRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRs t WritelnMixincBs5eZeZddZdZdZdZRS(cKs`tt|j||r(||_n|jjr\|jr\ttddd|jndS(NRRR( RRRRRR R R R (RRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR5s  cCs/|jjr+tdddd|jndS(Ns RRR(RR R (R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytclearln=scCsF|jjrB|jt|ddd|j|jjndS(NRRR(RR RR R (Rtline((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytwritelnAs cCsK|jjrGtd|j|jrGttddd|jqGndS(NRRR(RR R R R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRGs N( RRRR RRRR R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR2s    (tsignaltSIGINT(texitt SigIntMixincBs eZdZdZdZRS(s6Registers a signal handler that calls finish on SIGINTcOs-tt|j||tt|jdS(N(RR$RR!R"t_sigint_handler(RtargsR((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyRUscCs|jtddS(Ni(RR#(Rtsignumtframe((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR%Ys (RRt__doc__RR%(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyR$Rs N( t __future__RR RtobjectRRR!R"tsysR#R$(((s@/usr/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pytsPKZXMZuu site-packages/pip/_vendor/six.pynu["""Utilities for writing code that runs on Python 2 and 3""" # Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import import functools import itertools import operator import sys import types __author__ = "Benjamin Peterson " __version__ = "1.10.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): if from_value is None: raise value raise value from from_value """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): raise value from from_value """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer) PKZ m=+site-packages/pip/_vendor/colorama/win32.pynu[# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. # from winbase.h STDOUT = -11 STDERR = -12 try: import ctypes from ctypes import LibraryLoader windll = LibraryLoader(ctypes.WinDLL) from ctypes import wintypes except (AttributeError, ImportError): windll = None SetConsoleTextAttribute = lambda *_: None winapi_test = lambda *_: None else: from ctypes import byref, Structure, c_char, POINTER COORD = wintypes._COORD class CONSOLE_SCREEN_BUFFER_INFO(Structure): """struct in wincon.h.""" _fields_ = [ ("dwSize", COORD), ("dwCursorPosition", COORD), ("wAttributes", wintypes.WORD), ("srWindow", wintypes.SMALL_RECT), ("dwMaximumWindowSize", COORD), ] def __str__(self): return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( self.dwSize.Y, self.dwSize.X , self.dwCursorPosition.Y, self.dwCursorPosition.X , self.wAttributes , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X ) _GetStdHandle = windll.kernel32.GetStdHandle _GetStdHandle.argtypes = [ wintypes.DWORD, ] _GetStdHandle.restype = wintypes.HANDLE _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo _GetConsoleScreenBufferInfo.argtypes = [ wintypes.HANDLE, POINTER(CONSOLE_SCREEN_BUFFER_INFO), ] _GetConsoleScreenBufferInfo.restype = wintypes.BOOL _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute _SetConsoleTextAttribute.argtypes = [ wintypes.HANDLE, wintypes.WORD, ] _SetConsoleTextAttribute.restype = wintypes.BOOL _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition _SetConsoleCursorPosition.argtypes = [ wintypes.HANDLE, COORD, ] _SetConsoleCursorPosition.restype = wintypes.BOOL _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA _FillConsoleOutputCharacterA.argtypes = [ wintypes.HANDLE, c_char, wintypes.DWORD, COORD, POINTER(wintypes.DWORD), ] _FillConsoleOutputCharacterA.restype = wintypes.BOOL _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute _FillConsoleOutputAttribute.argtypes = [ wintypes.HANDLE, wintypes.WORD, wintypes.DWORD, COORD, POINTER(wintypes.DWORD), ] _FillConsoleOutputAttribute.restype = wintypes.BOOL _SetConsoleTitleW = windll.kernel32.SetConsoleTitleA _SetConsoleTitleW.argtypes = [ wintypes.LPCSTR ] _SetConsoleTitleW.restype = wintypes.BOOL handles = { STDOUT: _GetStdHandle(STDOUT), STDERR: _GetStdHandle(STDERR), } def winapi_test(): handle = handles[STDOUT] csbi = CONSOLE_SCREEN_BUFFER_INFO() success = _GetConsoleScreenBufferInfo( handle, byref(csbi)) return bool(success) def GetConsoleScreenBufferInfo(stream_id=STDOUT): handle = handles[stream_id] csbi = CONSOLE_SCREEN_BUFFER_INFO() success = _GetConsoleScreenBufferInfo( handle, byref(csbi)) return csbi def SetConsoleTextAttribute(stream_id, attrs): handle = handles[stream_id] return _SetConsoleTextAttribute(handle, attrs) def SetConsoleCursorPosition(stream_id, position, adjust=True): position = COORD(*position) # If the position is out of range, do nothing. if position.Y <= 0 or position.X <= 0: return # Adjust for Windows' SetConsoleCursorPosition: # 1. being 0-based, while ANSI is 1-based. # 2. expecting (x,y), while ANSI uses (y,x). adjusted_position = COORD(position.Y - 1, position.X - 1) if adjust: # Adjust for viewport's scroll position sr = GetConsoleScreenBufferInfo(STDOUT).srWindow adjusted_position.Y += sr.Top adjusted_position.X += sr.Left # Resume normal processing handle = handles[stream_id] return _SetConsoleCursorPosition(handle, adjusted_position) def FillConsoleOutputCharacter(stream_id, char, length, start): handle = handles[stream_id] char = c_char(char.encode()) length = wintypes.DWORD(length) num_written = wintypes.DWORD(0) # Note that this is hard-coded for ANSI (vs wide) bytes. success = _FillConsoleOutputCharacterA( handle, char, length, start, byref(num_written)) return num_written.value def FillConsoleOutputAttribute(stream_id, attr, length, start): ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' handle = handles[stream_id] attribute = wintypes.WORD(attr) length = wintypes.DWORD(length) num_written = wintypes.DWORD(0) # Note that this is hard-coded for ANSI (vs wide) bytes. return _FillConsoleOutputAttribute( handle, attribute, length, start, byref(num_written)) def SetConsoleTitle(title): return _SetConsoleTitleW(title) PKZc%%1site-packages/pip/_vendor/colorama/ansitowin32.pynu[# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. import re import sys import os from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style from .winterm import WinTerm, WinColor, WinStyle from .win32 import windll, winapi_test winterm = None if windll is not None: winterm = WinTerm() def is_stream_closed(stream): return not hasattr(stream, 'closed') or stream.closed def is_a_tty(stream): return hasattr(stream, 'isatty') and stream.isatty() class StreamWrapper(object): ''' Wraps a stream (such as stdout), acting as a transparent proxy for all attribute access apart from method 'write()', which is delegated to our Converter instance. ''' def __init__(self, wrapped, converter): # double-underscore everything to prevent clashes with names of # attributes on the wrapped stream object. self.__wrapped = wrapped self.__convertor = converter def __getattr__(self, name): return getattr(self.__wrapped, name) def write(self, text): self.__convertor.write(text) class AnsiToWin32(object): ''' Implements a 'write()' method which, on Windows, will strip ANSI character sequences from the text, and if outputting to a tty, will convert them into win32 function calls. ''' ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command def __init__(self, wrapped, convert=None, strip=None, autoreset=False): # The wrapped stream (normally sys.stdout or sys.stderr) self.wrapped = wrapped # should we reset colors to defaults after every .write() self.autoreset = autoreset # create the proxy wrapping our output stream self.stream = StreamWrapper(wrapped, self) on_windows = os.name == 'nt' # We test if the WinAPI works, because even if we are on Windows # we may be using a terminal that doesn't support the WinAPI # (e.g. Cygwin Terminal). In this case it's up to the terminal # to support the ANSI codes. conversion_supported = on_windows and winapi_test() # should we strip ANSI sequences from our output? if strip is None: strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped)) self.strip = strip # should we should convert ANSI sequences into win32 calls? if convert is None: convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped) self.convert = convert # dict of ansi codes to win32 functions and parameters self.win32_calls = self.get_win32_calls() # are we wrapping stderr? self.on_stderr = self.wrapped is sys.stderr def should_wrap(self): ''' True if this class is actually needed. If false, then the output stream will not be affected, nor will win32 calls be issued, so wrapping stdout is not actually required. This will generally be False on non-Windows platforms, unless optional functionality like autoreset has been requested using kwargs to init() ''' return self.convert or self.strip or self.autoreset def get_win32_calls(self): if self.convert and winterm: return { AnsiStyle.RESET_ALL: (winterm.reset_all, ), AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), AnsiFore.RED: (winterm.fore, WinColor.RED), AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), AnsiFore.WHITE: (winterm.fore, WinColor.GREY), AnsiFore.RESET: (winterm.fore, ), AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), AnsiBack.BLACK: (winterm.back, WinColor.BLACK), AnsiBack.RED: (winterm.back, WinColor.RED), AnsiBack.GREEN: (winterm.back, WinColor.GREEN), AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), AnsiBack.BLUE: (winterm.back, WinColor.BLUE), AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), AnsiBack.CYAN: (winterm.back, WinColor.CYAN), AnsiBack.WHITE: (winterm.back, WinColor.GREY), AnsiBack.RESET: (winterm.back, ), AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), } return dict() def write(self, text): if self.strip or self.convert: self.write_and_convert(text) else: self.wrapped.write(text) self.wrapped.flush() if self.autoreset: self.reset_all() def reset_all(self): if self.convert: self.call_win32('m', (0,)) elif not self.strip and not is_stream_closed(self.wrapped): self.wrapped.write(Style.RESET_ALL) def write_and_convert(self, text): ''' Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 calls. ''' cursor = 0 text = self.convert_osc(text) for match in self.ANSI_CSI_RE.finditer(text): start, end = match.span() self.write_plain_text(text, cursor, start) self.convert_ansi(*match.groups()) cursor = end self.write_plain_text(text, cursor, len(text)) def write_plain_text(self, text, start, end): if start < end: self.wrapped.write(text[start:end]) self.wrapped.flush() def convert_ansi(self, paramstring, command): if self.convert: params = self.extract_params(command, paramstring) self.call_win32(command, params) def extract_params(self, command, paramstring): if command in 'Hf': params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) while len(params) < 2: # defaults: params = params + (1,) else: params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) if len(params) == 0: # defaults: if command in 'JKm': params = (0,) elif command in 'ABCD': params = (1,) return params def call_win32(self, command, params): if command == 'm': for param in params: if param in self.win32_calls: func_args = self.win32_calls[param] func = func_args[0] args = func_args[1:] kwargs = dict(on_stderr=self.on_stderr) func(*args, **kwargs) elif command in 'J': winterm.erase_screen(params[0], on_stderr=self.on_stderr) elif command in 'K': winterm.erase_line(params[0], on_stderr=self.on_stderr) elif command in 'Hf': # cursor position - absolute winterm.set_cursor_position(params, on_stderr=self.on_stderr) elif command in 'ABCD': # cursor position - relative n = params[0] # A - up, B - down, C - forward, D - back x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) def convert_osc(self, text): for match in self.ANSI_OSC_RE.finditer(text): start, end = match.span() text = text[:start] + text[end:] paramstring, command = match.groups() if command in '\x07': # \x07 = BEL params = paramstring.split(";") # 0 - change title and icon (we will only change title) # 1 - change icon (we don't support this) # 2 - change title if params[0] in '02': winterm.set_title(params[1]) return text PKZΦ&$$2site-packages/pip/_vendor/colorama/ansitowin32.pycnu[ abc@sddlZddlZddlZddlmZmZmZmZddlm Z m Z m Z ddl m Z mZdZe dk re ZndZdZdefd YZd efd YZdS( iNi(tAnsiForetAnsiBackt AnsiStyletStyle(tWinTermtWinColortWinStyle(twindllt winapi_testcCst|d p|jS(Ntclosed(thasattrR (tstream((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pytis_stream_closedscCst|do|jS(Ntisatty(R R (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pytis_a_ttyst StreamWrappercBs)eZdZdZdZdZRS(s Wraps a stream (such as stdout), acting as a transparent proxy for all attribute access apart from method 'write()', which is delegated to our Converter instance. cCs||_||_dS(N(t_StreamWrapper__wrappedt_StreamWrapper__convertor(tselftwrappedt converter((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyt__init__s cCst|j|S(N(tgetattrR(Rtname((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyt __getattr__$scCs|jj|dS(N(Rtwrite(Rttext((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR's(t__name__t __module__t__doc__RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRs  t AnsiToWin32cBseZdZejdZejdZddedZ dZ dZ dZ dZ dZd Zd Zd Zd Zd ZRS(s Implements a 'write()' method which, on Windows, will strip ANSI character sequences from the text, and if outputting to a tty, will convert them into win32 function calls. s?\[((?:\d|;)*)([a-zA-Z])?s?\]((?:.|;)*?)()?cCs||_||_t|||_tjdk}|o?t}|dkrq|pkt| okt | }n||_ |dkr|ot| ot |}n||_ |j |_ |jtjk|_dS(Ntnt(Rt autoresetRR tosRRtNoneR Rtstriptconverttget_win32_callst win32_callstsyststderrt on_stderr(RRR$R#R t on_windowstconversion_supported((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR4s   #  " cCs|jp|jp|jS(sj True if this class is actually needed. If false, then the output stream will not be affected, nor will win32 calls be issued, so wrapping stdout is not actually required. This will generally be False on non-Windows platforms, unless optional functionality like autoreset has been requested using kwargs to init() (R$R#R (R((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyt should_wrapUscCs||jrutrui&tjftj6tjtjftj6tjtjftj 6tjtjftj6tj t j ft j 6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj ft j6tj t j tft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t j ft!j 6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj ft!j6tj t j tft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6St"S(N(#R$twintermt reset_allRt RESET_ALLtstyleRtBRIGHTtNORMALtDIMtforeRtBLACKRtREDtGREENtYELLOWtBLUEtMAGENTAtCYANtGREYtWHITEtRESETtTruet LIGHTBLACK_EXt LIGHTRED_EXt LIGHTGREEN_EXtLIGHTYELLOW_EXt LIGHTBLUE_EXtLIGHTMAGENTA_EXt LIGHTCYAN_EXt LIGHTWHITE_EXtbackRtdict(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR%_sRcCsY|js|jr"|j|n|jj||jj|jrU|jndS(N(R#R$twrite_and_convertRRtflushR R.(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRs   cCsP|jr|jddn0|j rLt|j rL|jjtjndS(Ntmi(i(R$t call_win32R#R RRRR/(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR.s cCsd}|j|}xX|jj|D]D}|j\}}|j||||j|j|}q(W|j||t|dS(s Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 calls. iN(t convert_osct ANSI_CSI_REtfinditertspantwrite_plain_textt convert_ansitgroupstlen(RRtcursortmatchtstarttend((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRJs cCs7||kr3|jj|||!|jjndS(N(RRRK(RRRXRY((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRRs cCs2|jr.|j||}|j||ndS(N(R$textract_paramsRM(Rt paramstringtcommandtparams((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRSs cCs|dkrQtd|jdD}xt|dkrM|d }q.Wn^td|jdD}t|dkr|dkrd }q|d krd }qn|S( NtHfcss3|])}t|dkr't|ndVqdS(iiN(RUtint(t.0tp((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pys st;iicss-|]#}t|dkrt|VqdS(iN(RUR_(R`Ra((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pys sitJKmtABCD(i(i(i(ttupletsplitRU(RR\R[R]((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRZs     c Cse|dkrrxR|D]X}||jkr|j|}|d}|d}td|j}|||qqWn|dkrtj|dd|jn|dkrtj|dd|jn|dkrtj|d|jnx|dkra|d}id| fd 6d|fd 6|dfd 6| dfd 6|\} } tj| | d|jndS( NRLiiR)tJtKR^RdtAtBtCtD(R&RIR)R-t erase_screent erase_linetset_cursor_positiont cursor_adjust( RR\R]tparamt func_argstfunctargstkwargstntxty((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRMs$          FcCsx|jj|D]~}|j\}}|| ||}|j\}}|dkr|jd}|ddkrtj|dqqqW|S(NsRbit02i(t ANSI_OSC_RERPRQRTRfR-t set_title(RRRWRXRYR[R\R]((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRNs N(RRRtretcompileRORzR"tFalseRR,R%RR.RJRRRSRZRMRN(((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR+s! ,      (R|R'R!tansiRRRRR-RRRtwin32RRR"R RtobjectRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyts   "    PKZ$Ek/site-packages/pip/_vendor/colorama/__init__.pycnu[ abc@s^ddlmZmZmZmZddlmZmZmZm Z ddl m Z dZ dS(i(tinittdeinittreinitt colorama_text(tForetBacktStyletCursor(t AnsiToWin32s0.3.7N( t initialiseRRRRtansiRRRRt ansitowin32Rt __version__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.pyts""PKZw{g!-site-packages/pip/_vendor/colorama/winterm.pynu[# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. from . import win32 # from wincon.h class WinColor(object): BLACK = 0 BLUE = 1 GREEN = 2 CYAN = 3 RED = 4 MAGENTA = 5 YELLOW = 6 GREY = 7 # from wincon.h class WinStyle(object): NORMAL = 0x00 # dim text, dim background BRIGHT = 0x08 # bright text, dim background BRIGHT_BACKGROUND = 0x80 # dim text, bright background class WinTerm(object): def __init__(self): self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes self.set_attrs(self._default) self._default_fore = self._fore self._default_back = self._back self._default_style = self._style # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. # So that LIGHT_EX colors and BRIGHT style do not clobber each other, # we track them separately, since LIGHT_EX is overwritten by Fore/Back # and BRIGHT is overwritten by Style codes. self._light = 0 def get_attrs(self): return self._fore + self._back * 16 + (self._style | self._light) def set_attrs(self, value): self._fore = value & 7 self._back = (value >> 4) & 7 self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) def reset_all(self, on_stderr=None): self.set_attrs(self._default) self.set_console(attrs=self._default) def fore(self, fore=None, light=False, on_stderr=False): if fore is None: fore = self._default_fore self._fore = fore # Emulate LIGHT_EX with BRIGHT Style if light: self._light |= WinStyle.BRIGHT else: self._light &= ~WinStyle.BRIGHT self.set_console(on_stderr=on_stderr) def back(self, back=None, light=False, on_stderr=False): if back is None: back = self._default_back self._back = back # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style if light: self._light |= WinStyle.BRIGHT_BACKGROUND else: self._light &= ~WinStyle.BRIGHT_BACKGROUND self.set_console(on_stderr=on_stderr) def style(self, style=None, on_stderr=False): if style is None: style = self._default_style self._style = style self.set_console(on_stderr=on_stderr) def set_console(self, attrs=None, on_stderr=False): if attrs is None: attrs = self.get_attrs() handle = win32.STDOUT if on_stderr: handle = win32.STDERR win32.SetConsoleTextAttribute(handle, attrs) def get_position(self, handle): position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition # Because Windows coordinates are 0-based, # and win32.SetConsoleCursorPosition expects 1-based. position.X += 1 position.Y += 1 return position def set_cursor_position(self, position=None, on_stderr=False): if position is None: # I'm not currently tracking the position, so there is no default. # position = self.get_position() return handle = win32.STDOUT if on_stderr: handle = win32.STDERR win32.SetConsoleCursorPosition(handle, position) def cursor_adjust(self, x, y, on_stderr=False): handle = win32.STDOUT if on_stderr: handle = win32.STDERR position = self.get_position(handle) adjusted_position = (position.Y + y, position.X + x) win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) def erase_screen(self, mode=0, on_stderr=False): # 0 should clear from the cursor to the end of the screen. # 1 should clear from the cursor to the beginning of the screen. # 2 should clear the entire screen, and move cursor to (1,1) handle = win32.STDOUT if on_stderr: handle = win32.STDERR csbi = win32.GetConsoleScreenBufferInfo(handle) # get the number of character cells in the current buffer cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y # get number of character cells before current cursor position cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X if mode == 0: from_coord = csbi.dwCursorPosition cells_to_erase = cells_in_screen - cells_before_cursor if mode == 1: from_coord = win32.COORD(0, 0) cells_to_erase = cells_before_cursor elif mode == 2: from_coord = win32.COORD(0, 0) cells_to_erase = cells_in_screen # fill the entire screen with blanks win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) # now set the buffer's attributes accordingly win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) if mode == 2: # put the cursor where needed win32.SetConsoleCursorPosition(handle, (1, 1)) def erase_line(self, mode=0, on_stderr=False): # 0 should clear from the cursor to the end of the line. # 1 should clear from the cursor to the beginning of the line. # 2 should clear the entire line. handle = win32.STDOUT if on_stderr: handle = win32.STDERR csbi = win32.GetConsoleScreenBufferInfo(handle) if mode == 0: from_coord = csbi.dwCursorPosition cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X if mode == 1: from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) cells_to_erase = csbi.dwCursorPosition.X elif mode == 2: from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) cells_to_erase = csbi.dwSize.X # fill the entire screen with blanks win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) # now set the buffer's attributes accordingly win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) def set_title(self, title): win32.SetConsoleTitle(title) PKZw=2 2 1site-packages/pip/_vendor/colorama/initialise.pyonu[ abc@sddlZddlZddlZddlmZdadadada e a dZ e dde dZdZejdZdZd ZdS( iNi(t AnsiToWin32cCs#tdk rttjndS(N(RtNonet orig_stdoutt reset_all(((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyRs cCs| r+t|||gr+tdntjatjatjdkrUdant t||||t_atjdkrda nt t||||t_a t st j tta ndS(Ns,wrap=False conflicts with any other arg=True(tanyt ValueErrortsyststdoutRtstderrt orig_stderrRtwrapped_stdoutt wrap_streamtwrapped_stderrt atexit_donetatexittregisterRtTrue(t autoresettconverttstriptwrap((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pytinits     cCs4tdk rtt_ntdk r0tt_ndS(N(RRRRR R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pytdeinit3s   cos%t||z dVWdtXdS(N(RR(targstkwargs((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyt colorama_text:s  cCs4tdk rtt_ntdk r0tt_ndS(N(R RRRR R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pytreinitCs   cCsC|r?t|d|d|d|}|jr?|j}q?n|S(NRRR(Rt should_wraptstream(RRRRRtwrapper((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyR Js   (Rt contextlibRt ansitowin32RRRR R R tFalseR RRRRtcontextmanagerRRR (((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyts      PKZ$Ek/site-packages/pip/_vendor/colorama/__init__.pyonu[ abc@s^ddlmZmZmZmZddlmZmZmZm Z ddl m Z dZ dS(i(tinittdeinittreinitt colorama_text(tForetBacktStyletCursor(t AnsiToWin32s0.3.7N( t initialiseRRRRtansiRRRRt ansitowin32Rt __version__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/colorama/__init__.pyts""PKZ! jPP+site-packages/pip/_vendor/colorama/ansi.pycnu[ abc@sdZdZdZdZdZdZddZddZd efd YZ d efd YZ d e fdYZ de fdYZ de fdYZ e Ze Ze Ze ZdS(s This module generates ANSI character codes to printing colors to terminals. See: http://en.wikipedia.org/wiki/ANSI_escape_code s]scCstt|dS(Ntm(tCSItstr(tcode((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt code_to_chars scCstd|tS(Ns2;(tOSCtBEL(ttitle((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt set_titlesicCstt|dS(NtJ(RR(tmode((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt clear_screenscCstt|dS(NtK(RR(R ((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt clear_linest AnsiCodescBseZdZRS(cCsRxKt|D]=}|jds t||}t||t|q q WdS(Nt_(tdirt startswithtgetattrtsetattrR(tselftnametvalue((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt__init__s(t__name__t __module__R(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyRst AnsiCursorcBsGeZddZddZddZddZdddZRS(icCstt|dS(NtA(RR(Rtn((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytUP%scCstt|dS(NtB(RR(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytDOWN'scCstt|dS(NtC(RR(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytFORWARD)scCstt|dS(NtD(RR(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytBACK+scCs tt|dt|dS(Nt;tH(RR(Rtxty((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytPOS-s(RRRRR!R#R((((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR$s     tAnsiForecBsneZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZRS(iii i!i"i#i$i%i'iZi[i\i]i^i_i`ia(RRtBLACKtREDtGREENtYELLOWtBLUEtMAGENTAtCYANtWHITEtRESETt LIGHTBLACK_EXt LIGHTRED_EXt LIGHTGREEN_EXtLIGHTYELLOW_EXt LIGHTBLUE_EXtLIGHTMAGENTA_EXt LIGHTCYAN_EXt LIGHTWHITE_EX(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR)1s"tAnsiBackcBsneZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZRS(i(i)i*i+i,i-i.i/i1idieifigihiiijik(RRR*R+R,R-R.R/R0R1R2R3R4R5R6R7R8R9R:(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR;Gs"t AnsiStylecBs eZdZdZdZdZRS(iiii(RRtBRIGHTtDIMtNORMALt RESET_ALL(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR<]sN(t__doc__RRRRRR R tobjectRRR)R;R<tForetBacktStyletCursor(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyts          PKZ.,site-packages/pip/_vendor/colorama/win32.pyonu[ abc@s}dZdZy?ddlZddlmZeejZddlmZWn/eefk r|dZdZ dZ nXddlm Z m Z mZmZejZd e fd YZejjZejge_eje_ejjZejeege_eje_ejj Zejejge_eje_ejjZ ejege _eje _ejj!Z"ejeejeeejge"_eje"_ejj#Z$ejejejeeejge$_eje$_ejj%Z&ej'ge&_eje&_ieee6eee6Z(d Z ed Zd Z e)dZdZ*dZ#dZ+dS(iiiN(t LibraryLoader(twintypescGsdS(N(tNone(t_((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyttcGsdS(N(R(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyRR(tbyreft Structuretc_chartPOINTERtCONSOLE_SCREEN_BUFFER_INFOcBsPeZdZdefdefdejfdejfdefgZdZRS(sstruct in wincon.h.tdwSizetdwCursorPositiont wAttributestsrWindowtdwMaximumWindowSizec Cshd|jj|jj|jj|jj|j|jj|jj|jj|jj |j j|j jf S(Ns"(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)( R tYtXR R RtToptLefttBottomtRightR(tself((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyt__str__s $( t__name__t __module__t__doc__tCOORDRtWORDt SMALL_RECTt_fields_R(((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyR s    cCs2tt}t}t|t|}t|S(N(thandlestSTDOUTR t_GetConsoleScreenBufferInfoRtbool(thandletcsbitsuccess((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyt winapi_testas   cCs,t|}t}t|t|}|S(N(RR R!R(t stream_idR#R$R%((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytGetConsoleScreenBufferInfohs   cCst|}t||S(N(Rt_SetConsoleTextAttribute(R'tattrsR#((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytSetConsoleTextAttributeos cCst|}|jdks*|jdkr.dSt|jd|jd}|rttj}|j|j7_|j|j7_nt|}t ||S(Nii( RRRR(R RRRRt_SetConsoleCursorPosition(R'tpositiontadjusttadjusted_positiontsrR#((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytSetConsoleCursorPositionss  cCs_t|}t|j}tj|}tjd}t||||t|}|jS(Ni(RRtencodeRtDWORDt_FillConsoleOutputCharacterARtvalue(R'tchartlengthtstartR#t num_writtenR%((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytFillConsoleOutputCharacters cCsSt|}tj|}tj|}tjd}t||||t|S(sa FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )i(RRRR3t_FillConsoleOutputAttributeR(R'tattrR7R8R#t attributeR9((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytFillConsoleOutputAttributes  cCs t|S(N(t_SetConsoleTitleW(ttitle((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytSetConsoleTitles(,R tSTDERRtctypesRtWinDLLtwindllRtAttributeErrort ImportErrorRR+R&RRRR t_COORDRR tkernel32t GetStdHandlet _GetStdHandleR3targtypestHANDLEtrestypeR(R!tBOOLR)RR1R,tFillConsoleOutputCharacterAR4R>R;tSetConsoleTitleAR?tLPCSTRRtTrueR:RA(((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytsn   "                      PKZ}/}}0site-packages/pip/_vendor/colorama/initialise.pynu[# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. import atexit import contextlib import sys from .ansitowin32 import AnsiToWin32 orig_stdout = None orig_stderr = None wrapped_stdout = None wrapped_stderr = None atexit_done = False def reset_all(): if AnsiToWin32 is not None: # Issue #74: objects might become None at exit AnsiToWin32(orig_stdout).reset_all() def init(autoreset=False, convert=None, strip=None, wrap=True): if not wrap and any([autoreset, convert, strip]): raise ValueError('wrap=False conflicts with any other arg=True') global wrapped_stdout, wrapped_stderr global orig_stdout, orig_stderr orig_stdout = sys.stdout orig_stderr = sys.stderr if sys.stdout is None: wrapped_stdout = None else: sys.stdout = wrapped_stdout = \ wrap_stream(orig_stdout, convert, strip, autoreset, wrap) if sys.stderr is None: wrapped_stderr = None else: sys.stderr = wrapped_stderr = \ wrap_stream(orig_stderr, convert, strip, autoreset, wrap) global atexit_done if not atexit_done: atexit.register(reset_all) atexit_done = True def deinit(): if orig_stdout is not None: sys.stdout = orig_stdout if orig_stderr is not None: sys.stderr = orig_stderr @contextlib.contextmanager def colorama_text(*args, **kwargs): init(*args, **kwargs) try: yield finally: deinit() def reinit(): if wrapped_stdout is not None: sys.stdout = wrapped_stdout if wrapped_stderr is not None: sys.stderr = wrapped_stderr def wrap_stream(stream, convert, strip, autoreset, wrap): if wrap: wrapper = AnsiToWin32(stream, convert=convert, strip=strip, autoreset=autoreset) if wrapper.should_wrap(): stream = wrapper.stream return stream PKZ>_ *site-packages/pip/_vendor/colorama/ansi.pynu[# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. ''' This module generates ANSI character codes to printing colors to terminals. See: http://en.wikipedia.org/wiki/ANSI_escape_code ''' CSI = '\033[' OSC = '\033]' BEL = '\007' def code_to_chars(code): return CSI + str(code) + 'm' def set_title(title): return OSC + '2;' + title + BEL def clear_screen(mode=2): return CSI + str(mode) + 'J' def clear_line(mode=2): return CSI + str(mode) + 'K' class AnsiCodes(object): def __init__(self): # the subclasses declare class attributes which are numbers. # Upon instantiation we define instance attributes, which are the same # as the class attributes but wrapped with the ANSI escape sequence for name in dir(self): if not name.startswith('_'): value = getattr(self, name) setattr(self, name, code_to_chars(value)) class AnsiCursor(object): def UP(self, n=1): return CSI + str(n) + 'A' def DOWN(self, n=1): return CSI + str(n) + 'B' def FORWARD(self, n=1): return CSI + str(n) + 'C' def BACK(self, n=1): return CSI + str(n) + 'D' def POS(self, x=1, y=1): return CSI + str(y) + ';' + str(x) + 'H' class AnsiFore(AnsiCodes): BLACK = 30 RED = 31 GREEN = 32 YELLOW = 33 BLUE = 34 MAGENTA = 35 CYAN = 36 WHITE = 37 RESET = 39 # These are fairly well supported, but not part of the standard. LIGHTBLACK_EX = 90 LIGHTRED_EX = 91 LIGHTGREEN_EX = 92 LIGHTYELLOW_EX = 93 LIGHTBLUE_EX = 94 LIGHTMAGENTA_EX = 95 LIGHTCYAN_EX = 96 LIGHTWHITE_EX = 97 class AnsiBack(AnsiCodes): BLACK = 40 RED = 41 GREEN = 42 YELLOW = 43 BLUE = 44 MAGENTA = 45 CYAN = 46 WHITE = 47 RESET = 49 # These are fairly well supported, but not part of the standard. LIGHTBLACK_EX = 100 LIGHTRED_EX = 101 LIGHTGREEN_EX = 102 LIGHTYELLOW_EX = 103 LIGHTBLUE_EX = 104 LIGHTMAGENTA_EX = 105 LIGHTCYAN_EX = 106 LIGHTWHITE_EX = 107 class AnsiStyle(AnsiCodes): BRIGHT = 1 DIM = 2 NORMAL = 22 RESET_ALL = 0 Fore = AnsiFore() Back = AnsiBack() Style = AnsiStyle() Cursor = AnsiCursor() PKZ-rݨ.site-packages/pip/_vendor/colorama/winterm.pycnu[ abc@sVddlmZdefdYZdefdYZdefdYZdS( i(twin32tWinColorcBs8eZdZdZdZdZdZdZdZdZ RS(iiiiiiii( t__name__t __module__tBLACKtBLUEtGREENtCYANtREDtMAGENTAtYELLOWtGREY(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyRstWinStylecBseZdZdZdZRS(iii(RRtNORMALtBRIGHTtBRIGHT_BACKGROUND(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR stWinTermcBseZdZdZdZddZdeedZdeedZ dedZ dedZ dZ ded Z ed Zd ed Zd ed ZdZRS(cCsYtjtjj|_|j|j|j|_|j|_ |j |_ d|_ dS(Ni( RtGetConsoleScreenBufferInfotSTDOUTt wAttributest_defaultt set_attrst_foret _default_foret_backt _default_backt_stylet_default_stylet_light(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt__init__s    cCs |j|jd|j|jBS(Ni(RRRR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt get_attrs$scCs9|d@|_|d?d@|_|tjtjB@|_dS(Nii(RRR RRR(Rtvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR's cCs'|j|j|jd|jdS(Ntattrs(RRt set_console(Rt on_stderr((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt reset_all,scCsc|dkr|j}n||_|r<|jtjO_n|jtjM_|jd|dS(NR#(tNoneRRRR RR"(RtforetlightR#((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR&0s   cCsc|dkr|j}n||_|r<|jtjO_n|jtjM_|jd|dS(NR#(R%RRRR RR"(RtbackR'R#((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR(;s   cCs5|dkr|j}n||_|jd|dS(NR#(R%RRR"(RtstyleR#((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR)Fs   cCsJ|dkr|j}ntj}|r6tj}ntj||dS(N(R%RRRtSTDERRtSetConsoleTextAttribute(RR!R#thandle((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR"Ls    cCs4tj|j}|jd7_|jd7_|S(Ni(RRtdwCursorPositiontXtY(RR,tposition((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt get_positionTscCs?|dkrdStj}|r+tj}ntj||dS(N(R%RRR*tSetConsoleCursorPosition(RR0R#R,((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pytset_cursor_position\s    cCs^tj}|rtj}n|j|}|j||j|f}tj||dtdS(Ntadjust(RRR*R1R/R.R2tFalse(RtxtyR#R,R0tadjusted_position((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt cursor_adjustfs   ic Cs%tj}|rtj}ntj|}|jj|jj}|jj|jj|jj}|dkr|j}||}n|dkrtjdd}|}n'|dkrtjdd}|}ntj |d||tj ||j |||dkr!tj |dndS(Niiit (ii( RRR*RtdwSizeR.R/R-tCOORDtFillConsoleOutputCharactertFillConsoleOutputAttributeRR2( RtmodeR#R,tcsbitcells_in_screentcells_before_cursort from_coordtcells_to_erase((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt erase_screenns&           cCstj}|rtj}ntj|}|dkrX|j}|jj|jj}n|dkrtjd|jj}|jj}n3|dkrtjd|jj}|jj}ntj |d||tj ||j ||dS(NiiiR:( RRR*RR-R;R.R<R/R=R>R(RR?R#R,R@RCRD((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt erase_lines      cCstj|dS(N(RtSetConsoleTitle(Rttitle((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt set_titlesN(RRRRRR%R$R5R&R(R)R"R1R3R9RERFRI(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyRs       N(tRtobjectRR R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyts PKZXi9.site-packages/pip/_vendor/colorama/__init__.pynu[# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. from .initialise import init, deinit, reinit, colorama_text from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 __version__ = '0.3.7' PKZ-rݨ.site-packages/pip/_vendor/colorama/winterm.pyonu[ abc@sVddlmZdefdYZdefdYZdefdYZdS( i(twin32tWinColorcBs8eZdZdZdZdZdZdZdZdZ RS(iiiiiiii( t__name__t __module__tBLACKtBLUEtGREENtCYANtREDtMAGENTAtYELLOWtGREY(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyRstWinStylecBseZdZdZdZRS(iii(RRtNORMALtBRIGHTtBRIGHT_BACKGROUND(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR stWinTermcBseZdZdZdZddZdeedZdeedZ dedZ dedZ dZ ded Z ed Zd ed Zd ed ZdZRS(cCsYtjtjj|_|j|j|j|_|j|_ |j |_ d|_ dS(Ni( RtGetConsoleScreenBufferInfotSTDOUTt wAttributest_defaultt set_attrst_foret _default_foret_backt _default_backt_stylet_default_stylet_light(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt__init__s    cCs |j|jd|j|jBS(Ni(RRRR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt get_attrs$scCs9|d@|_|d?d@|_|tjtjB@|_dS(Nii(RRR RRR(Rtvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR's cCs'|j|j|jd|jdS(Ntattrs(RRt set_console(Rt on_stderr((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt reset_all,scCsc|dkr|j}n||_|r<|jtjO_n|jtjM_|jd|dS(NR#(tNoneRRRR RR"(RtforetlightR#((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR&0s   cCsc|dkr|j}n||_|r<|jtjO_n|jtjM_|jd|dS(NR#(R%RRRR RR"(RtbackR'R#((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR(;s   cCs5|dkr|j}n||_|jd|dS(NR#(R%RRR"(RtstyleR#((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR)Fs   cCsJ|dkr|j}ntj}|r6tj}ntj||dS(N(R%RRRtSTDERRtSetConsoleTextAttribute(RR!R#thandle((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyR"Ls    cCs4tj|j}|jd7_|jd7_|S(Ni(RRtdwCursorPositiontXtY(RR,tposition((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt get_positionTscCs?|dkrdStj}|r+tj}ntj||dS(N(R%RRR*tSetConsoleCursorPosition(RR0R#R,((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pytset_cursor_position\s    cCs^tj}|rtj}n|j|}|j||j|f}tj||dtdS(Ntadjust(RRR*R1R/R.R2tFalse(RtxtyR#R,R0tadjusted_position((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt cursor_adjustfs   ic Cs%tj}|rtj}ntj|}|jj|jj}|jj|jj|jj}|dkr|j}||}n|dkrtjdd}|}n'|dkrtjdd}|}ntj |d||tj ||j |||dkr!tj |dndS(Niiit (ii( RRR*RtdwSizeR.R/R-tCOORDtFillConsoleOutputCharactertFillConsoleOutputAttributeRR2( RtmodeR#R,tcsbitcells_in_screentcells_before_cursort from_coordtcells_to_erase((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt erase_screenns&           cCstj}|rtj}ntj|}|dkrX|j}|jj|jj}n|dkrtjd|jj}|jj}n3|dkrtjd|jj}|jj}ntj |d||tj ||j ||dS(NiiiR:( RRR*RR-R;R.R<R/R=R>R(RR?R#R,R@RCRD((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt erase_lines      cCstj|dS(N(RtSetConsoleTitle(Rttitle((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyt set_titlesN(RRRRRR%R$R5R&R(R)R"R1R3R9RERFRI(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyRs       N(tRtobjectRR R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.pyts PKZw=2 2 1site-packages/pip/_vendor/colorama/initialise.pycnu[ abc@sddlZddlZddlZddlmZdadadada e a dZ e dde dZdZejdZdZd ZdS( iNi(t AnsiToWin32cCs#tdk rttjndS(N(RtNonet orig_stdoutt reset_all(((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyRs cCs| r+t|||gr+tdntjatjatjdkrUdant t||||t_atjdkrda nt t||||t_a t st j tta ndS(Ns,wrap=False conflicts with any other arg=True(tanyt ValueErrortsyststdoutRtstderrt orig_stderrRtwrapped_stdoutt wrap_streamtwrapped_stderrt atexit_donetatexittregisterRtTrue(t autoresettconverttstriptwrap((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pytinits     cCs4tdk rtt_ntdk r0tt_ndS(N(RRRRR R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pytdeinit3s   cos%t||z dVWdtXdS(N(RR(targstkwargs((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyt colorama_text:s  cCs4tdk rtt_ntdk r0tt_ndS(N(R RRRR R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pytreinitCs   cCsC|r?t|d|d|d|}|jr?|j}q?n|S(NRRR(Rt should_wraptstream(RRRRRtwrapper((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyR Js   (Rt contextlibRt ansitowin32RRRR R R tFalseR RRRRtcontextmanagerRRR (((sC/usr/lib/python2.7/site-packages/pip/_vendor/colorama/initialise.pyts      PKZ! jPP+site-packages/pip/_vendor/colorama/ansi.pyonu[ abc@sdZdZdZdZdZdZddZddZd efd YZ d efd YZ d e fdYZ de fdYZ de fdYZ e Ze Ze Ze ZdS(s This module generates ANSI character codes to printing colors to terminals. See: http://en.wikipedia.org/wiki/ANSI_escape_code s]scCstt|dS(Ntm(tCSItstr(tcode((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt code_to_chars scCstd|tS(Ns2;(tOSCtBEL(ttitle((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt set_titlesicCstt|dS(NtJ(RR(tmode((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt clear_screenscCstt|dS(NtK(RR(R ((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt clear_linest AnsiCodescBseZdZRS(cCsRxKt|D]=}|jds t||}t||t|q q WdS(Nt_(tdirt startswithtgetattrtsetattrR(tselftnametvalue((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyt__init__s(t__name__t __module__R(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyRst AnsiCursorcBsGeZddZddZddZddZdddZRS(icCstt|dS(NtA(RR(Rtn((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytUP%scCstt|dS(NtB(RR(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytDOWN'scCstt|dS(NtC(RR(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytFORWARD)scCstt|dS(NtD(RR(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytBACK+scCs tt|dt|dS(Nt;tH(RR(Rtxty((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pytPOS-s(RRRRR!R#R((((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR$s     tAnsiForecBsneZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZRS(iii i!i"i#i$i%i'iZi[i\i]i^i_i`ia(RRtBLACKtREDtGREENtYELLOWtBLUEtMAGENTAtCYANtWHITEtRESETt LIGHTBLACK_EXt LIGHTRED_EXt LIGHTGREEN_EXtLIGHTYELLOW_EXt LIGHTBLUE_EXtLIGHTMAGENTA_EXt LIGHTCYAN_EXt LIGHTWHITE_EX(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR)1s"tAnsiBackcBsneZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZRS(i(i)i*i+i,i-i.i/i1idieifigihiiijik(RRR*R+R,R-R.R/R0R1R2R3R4R5R6R7R8R9R:(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR;Gs"t AnsiStylecBs eZdZdZdZdZRS(iiii(RRtBRIGHTtDIMtNORMALt RESET_ALL(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyR<]sN(t__doc__RRRRRR R tobjectRRR)R;R<tForetBacktStyletCursor(((s=/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.pyts          PKZ.,site-packages/pip/_vendor/colorama/win32.pycnu[ abc@s}dZdZy?ddlZddlmZeejZddlmZWn/eefk r|dZdZ dZ nXddlm Z m Z mZmZejZd e fd YZejjZejge_eje_ejjZejeege_eje_ejj Zejejge_eje_ejjZ ejege _eje _ejj!Z"ejeejeeejge"_eje"_ejj#Z$ejejejeeejge$_eje$_ejj%Z&ej'ge&_eje&_ieee6eee6Z(d Z ed Zd Z e)dZdZ*dZ#dZ+dS(iiiN(t LibraryLoader(twintypescGsdS(N(tNone(t_((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyttcGsdS(N(R(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyRR(tbyreft Structuretc_chartPOINTERtCONSOLE_SCREEN_BUFFER_INFOcBsPeZdZdefdefdejfdejfdefgZdZRS(sstruct in wincon.h.tdwSizetdwCursorPositiont wAttributestsrWindowtdwMaximumWindowSizec Cshd|jj|jj|jj|jj|j|jj|jj|jj|jj |j j|j jf S(Ns"(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)( R tYtXR R RtToptLefttBottomtRightR(tself((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyt__str__s $( t__name__t __module__t__doc__tCOORDRtWORDt SMALL_RECTt_fields_R(((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyR s    cCs2tt}t}t|t|}t|S(N(thandlestSTDOUTR t_GetConsoleScreenBufferInfoRtbool(thandletcsbitsuccess((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pyt winapi_testas   cCs,t|}t}t|t|}|S(N(RR R!R(t stream_idR#R$R%((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytGetConsoleScreenBufferInfohs   cCst|}t||S(N(Rt_SetConsoleTextAttribute(R'tattrsR#((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytSetConsoleTextAttributeos cCst|}|jdks*|jdkr.dSt|jd|jd}|rttj}|j|j7_|j|j7_nt|}t ||S(Nii( RRRR(R RRRRt_SetConsoleCursorPosition(R'tpositiontadjusttadjusted_positiontsrR#((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytSetConsoleCursorPositionss  cCs_t|}t|j}tj|}tjd}t||||t|}|jS(Ni(RRtencodeRtDWORDt_FillConsoleOutputCharacterARtvalue(R'tchartlengthtstartR#t num_writtenR%((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytFillConsoleOutputCharacters cCsSt|}tj|}tj|}tjd}t||||t|S(sa FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )i(RRRR3t_FillConsoleOutputAttributeR(R'tattrR7R8R#t attributeR9((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytFillConsoleOutputAttributes  cCs t|S(N(t_SetConsoleTitleW(ttitle((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytSetConsoleTitles(,R tSTDERRtctypesRtWinDLLtwindllRtAttributeErrort ImportErrorRR+R&RRRR t_COORDRR tkernel32t GetStdHandlet _GetStdHandleR3targtypestHANDLEtrestypeR(R!tBOOLR)RR1R,tFillConsoleOutputCharacterAR4R>R;tSetConsoleTitleAR?tLPCSTRRtTrueR:RA(((s>/usr/lib/python2.7/site-packages/pip/_vendor/colorama/win32.pytsn   "                      PKZΦ&$$2site-packages/pip/_vendor/colorama/ansitowin32.pyonu[ abc@sddlZddlZddlZddlmZmZmZmZddlm Z m Z m Z ddl m Z mZdZe dk re ZndZdZdefd YZd efd YZdS( iNi(tAnsiForetAnsiBackt AnsiStyletStyle(tWinTermtWinColortWinStyle(twindllt winapi_testcCst|d p|jS(Ntclosed(thasattrR (tstream((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pytis_stream_closedscCst|do|jS(Ntisatty(R R (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pytis_a_ttyst StreamWrappercBs)eZdZdZdZdZRS(s Wraps a stream (such as stdout), acting as a transparent proxy for all attribute access apart from method 'write()', which is delegated to our Converter instance. cCs||_||_dS(N(t_StreamWrapper__wrappedt_StreamWrapper__convertor(tselftwrappedt converter((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyt__init__s cCst|j|S(N(tgetattrR(Rtname((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyt __getattr__$scCs|jj|dS(N(Rtwrite(Rttext((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR's(t__name__t __module__t__doc__RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRs  t AnsiToWin32cBseZdZejdZejdZddedZ dZ dZ dZ dZ dZd Zd Zd Zd Zd ZRS(s Implements a 'write()' method which, on Windows, will strip ANSI character sequences from the text, and if outputting to a tty, will convert them into win32 function calls. s?\[((?:\d|;)*)([a-zA-Z])?s?\]((?:.|;)*?)()?cCs||_||_t|||_tjdk}|o?t}|dkrq|pkt| okt | }n||_ |dkr|ot| ot |}n||_ |j |_ |jtjk|_dS(Ntnt(Rt autoresetRR tosRRtNoneR Rtstriptconverttget_win32_callst win32_callstsyststderrt on_stderr(RRR$R#R t on_windowstconversion_supported((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR4s   #  " cCs|jp|jp|jS(sj True if this class is actually needed. If false, then the output stream will not be affected, nor will win32 calls be issued, so wrapping stdout is not actually required. This will generally be False on non-Windows platforms, unless optional functionality like autoreset has been requested using kwargs to init() (R$R#R (R((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyt should_wrapUscCs||jrutrui&tjftj6tjtjftj6tjtjftj 6tjtjftj6tj t j ft j 6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj t jft j6tj ft j6tj t j tft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t jtft j6tj t j ft!j 6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj t jft!j6tj ft!j6tj t j tft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6tj t jtft!j6St"S(N(#R$twintermt reset_allRt RESET_ALLtstyleRtBRIGHTtNORMALtDIMtforeRtBLACKRtREDtGREENtYELLOWtBLUEtMAGENTAtCYANtGREYtWHITEtRESETtTruet LIGHTBLACK_EXt LIGHTRED_EXt LIGHTGREEN_EXtLIGHTYELLOW_EXt LIGHTBLUE_EXtLIGHTMAGENTA_EXt LIGHTCYAN_EXt LIGHTWHITE_EXtbackRtdict(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR%_sRcCsY|js|jr"|j|n|jj||jj|jrU|jndS(N(R#R$twrite_and_convertRRtflushR R.(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRs   cCsP|jr|jddn0|j rLt|j rL|jjtjndS(Ntmi(i(R$t call_win32R#R RRRR/(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR.s cCsd}|j|}xX|jj|D]D}|j\}}|j||||j|j|}q(W|j||t|dS(s Write the given text to our wrapped stream, stripping any ANSI sequences from the text, and optionally converting them into win32 calls. iN(t convert_osct ANSI_CSI_REtfinditertspantwrite_plain_textt convert_ansitgroupstlen(RRtcursortmatchtstarttend((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRJs cCs7||kr3|jj|||!|jjndS(N(RRRK(RRRXRY((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRRs cCs2|jr.|j||}|j||ndS(N(R$textract_paramsRM(Rt paramstringtcommandtparams((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRSs cCs|dkrQtd|jdD}xt|dkrM|d }q.Wn^td|jdD}t|dkr|dkrd }q|d krd }qn|S( NtHfcss3|])}t|dkr't|ndVqdS(iiN(RUtint(t.0tp((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pys st;iicss-|]#}t|dkrt|VqdS(iN(RUR_(R`Ra((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pys sitJKmtABCD(i(i(i(ttupletsplitRU(RR\R[R]((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRZs     c Cse|dkrrxR|D]X}||jkr|j|}|d}|d}td|j}|||qqWn|dkrtj|dd|jn|dkrtj|dd|jn|dkrtj|d|jnx|dkra|d}id| fd 6d|fd 6|dfd 6| dfd 6|\} } tj| | d|jndS( NRLiiR)tJtKR^RdtAtBtCtD(R&RIR)R-t erase_screent erase_linetset_cursor_positiont cursor_adjust( RR\R]tparamt func_argstfunctargstkwargstntxty((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRMs$          FcCsx|jj|D]~}|j\}}|| ||}|j\}}|dkr|jd}|ddkrtj|dqqqW|S(NsRbit02i(t ANSI_OSC_RERPRQRTRfR-t set_title(RRRWRXRYR[R\R]((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyRNs N(RRRtretcompileRORzR"tFalseRR,R%RR.RJRRRSRZRMRN(((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyR+s! ,      (R|R'R!tansiRRRRR-RRRtwin32RRR"R RtobjectRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/colorama/ansitowin32.pyts   "    PKZs{'site-packages/pip/_vendor/re-vendor.pycnu[ abc@sddlZddlZddlZddlZddlZejjejjeZ dZ dZ dZ e dkreejdkre nejddkre qejdd kre qe ndS( iNcCsdGHtjddS(Ns"Usage: re-vendor.py [clean|vendor]i(tsystexit(((s9/usr/lib/python2.7/site-packages/pip/_vendor/re-vendor.pytusage scCsqxNtjtD]=}tjjt|}tjj|rtj|qqWtjtjjtddS(Nssix.py( tostlistdirtheretpathtjointisdirtshutiltrmtreetunlink(tfntdirname((s9/usr/lib/python2.7/site-packages/pip/_vendor/re-vendor.pytclean s cCsGtjddtddgx$tjdD]}tj|q,WdS(Ntinstalls-ts-rs vendor.txts *.egg-info(tpiptmainRtglobR R (R ((s9/usr/lib/python2.7/site-packages/pip/_vendor/re-vendor.pytvendorst__main__iiRR(RRRRR RtabspathR t__file__RRRRt__name__tlentargv(((s9/usr/lib/python2.7/site-packages/pip/_vendor/re-vendor.pyts            PKZi`胟4site-packages/pip/_vendor/pkg_resources/__init__.pycnu[ abcF@@s dZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZyddlZWnek rEddlZnXddlmZddlmZm Z m!Z!ddlm"Z"y&ddlm#Z#m$Z$m%Z%e&Z'Wnek re(Z'nXdd lm)Z*dd l+m,Z,m-Z-yddl.j/Z0e0j1Wnek re2Z0nXdd lm3Z3dd lm4Z4e5d e5de5de5dddfej6koddfknrdZ7e j8e7ne2Z9e2Z:de;fdYZ<de=fdYZ>de>e4j?j@fdYZAde>e4j?jBfdYZCdZDiZEdZFdZGdZHdZId ZJd!ZKd"ZLd#ZMZNd$ZOd%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDddEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdigFZPd@eQfdjYZRdAeRfdkYZSdleSfdmYZTdBeRfdnYZUdCeRfdoYZViZWej?d ZXdZYdpZZdqZ[dZ\drZ]dsZ^dtZ_gduZ`dvZadwZbejcdxZdejcdyZeebZfdzZgd{ZhehZid|Zjd}Zke2d~ZldZmdXfdYZndYenfdYZod;e=fdYZpdeqfdYZrd:e=fdYZsesZtdDeufdYZvd<fdYZwdZxdZydZzdZ{dZ|dZ}e2dZ~d_fdYZe^e=ed`efdYZdaefdYZejd]efdYZeZdeqfdYZdefdYZde jfdYZdbefdYZe^e jedZefdYZd[efdYZd\efdYZeFddidZe(dZe(dZee jee(dZee=edZe(dZeejeee0dr(ee0jeneFddieFddidZdZdZdZe2dZdZeejeee jeee0dree0jendZee=edZidZdZdZdZejcdjZejcdejejBjZd?e=fdYZdZdZd=e=fdYZdefdYZdefdYZied6ed6ed6ZdZdefdYZdZd>e4jjfdYZdZdZdZdZdZdZe jdde<de&dZeedZedZdS(sZ Package resource API -------------------- A resource is a logical file contained within a package, or a logical subdirectory thereof. The package resource API expects resource names to have their path parts separated with ``/``, *not* whatever the local path separator is. Do not use os.path operations to manipulate resource names being passed into the API. The package resource API is designed to work with normal filesystem packages, .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. i(tabsolute_importN(t get_importer(tsix(turllibtmaptfilter(tutime(tmkdirtrenametunlink(topen(tisdirtsplit(tappdirs(t packagingspip._vendor.packaging.versions pip._vendor.packaging.specifierss"pip._vendor.packaging.requirementsspip._vendor.packaging.markersisLSupport for Python 3.0-3.2 has been dropped. Future versions will fail here.t PEP440WarningcB@seZdZRS(sa Used when there is an issue with a version or specifier not complying with PEP 440. (t__name__t __module__t__doc__(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR[st_SetuptoolsVersionMixincB@sYeZdZdZdZdZdZdZdZdZ dZ RS( cC@stt|jS(N(tsuperRt__hash__(tself((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRcscC@s9t|trt||kStt|j|SdS(N(t isinstancettupleRRt__lt__(Rtother((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRfscC@s9t|trt||kStt|j|SdS(N(RRRRt__le__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRlscC@s9t|trt||kStt|j|SdS(N(RRRRt__eq__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRrscC@s9t|trt||kStt|j|SdS(N(RRRRt__ge__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRxscC@s9t|trt||kStt|j|SdS(N(RRRRt__gt__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR~scC@s9t|trt||kStt|j|SdS(N(RRRRt__ne__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@st||S(N(R(Rtkey((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt __getitem__sc#@stjdtjidd6dd6dd6dd6dd 6jfd fd }tjd td dx|t|D] }|VqWdS(Ns(\d+ | [a-z]+ | \.| -)tctpretpreviewsfinal-t-trct@tdevc3@suxij|D]X}||}| s|dkr>qn|d dkr_|jdVqd|VqWdVdS(Nt.it 0123456789it*s*final(R tzfill(tstpart(t component_retreplace(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_parse_version_partss c@sg}x|jD]}|jdr|dkrdx'|r`|ddkr`|jq=Wnx'|r|ddkr|jqgWn|j|qWt|S(NR+s*finalis*final-t00000000(tlowert startswithtpoptappendR(R-tpartsR.(R1(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytold_parse_versions sYou have iterated over the result of pkg_resources.parse_version. This is a legacy behavior which is inconsistent with the new version class introduced in setuptools 8.0. In most cases, conversion to a tuple is unnecessary. For comparison of versions, sort the Version instances directly. If you have another use case requiring the tuple, please file a bug with the setuptools project describing that need.t stackleveli(tretcompiletVERBOSEtgettwarningstwarntRuntimeWarningtstr(RR8R.((R1R/R0sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__iter__s ( RRRRRRRRRR!RB(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRbs        tSetuptoolsVersioncB@seZRS((RR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRCstSetuptoolsLegacyVersioncB@seZRS((RR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRDscC@s3yt|SWntjjk r.t|SXdS(N(RCRtversiontInvalidVersionRD(tv((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt parse_versionscK@s-tj|tjtj||dS(N(tglobalstupdatet _state_varstdicttfromkeys(tvartypetkw((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_declare_statescC@sLi}t}x6tjD](\}}|d|||||tcC@s{t}tj|}|dk rwtjdkrwy-ddjtd |jdf}Wqwt k rsqwXn|S(sZReturn this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. tdarwins macosx-%s-%sR)iiN( tget_build_platformtmacosVersionStringtmatchRatsystplatformtjoint _macosx_verstgroupt ValueError(tplattm((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_supported_platforms - trequiret run_scriptt get_providertget_distributiontload_entry_pointt get_entry_maptget_entry_infotiter_entry_pointstresource_stringtresource_streamtresource_filenametresource_listdirtresource_existstresource_isdirtdeclare_namespacet working_settadd_activation_listenertfind_distributionstset_extraction_pathtcleanup_resourcestget_default_cachet Environmentt WorkingSettResourceManagert Distributiont Requirementt EntryPointtResolutionErrortVersionConflicttDistributionNotFoundt UnknownExtratExtractionErrortparse_requirementsRHt safe_namet safe_versiont get_platformtcompatible_platformst yield_linestsplit_sectionst safe_extrat to_filenametinvalid_markertevaluate_markertensure_directorytnormalize_pathtEGG_DISTt BINARY_DISTt SOURCE_DISTt CHECKOUT_DISTt DEVELOP_DISTtIMetadataProvidertIResourceProvidert FileMetadatat PathMetadatat EggMetadatat EmptyProvidertempty_providert NullProvidert EggProvidertDefaultProvidert ZipProvidertregister_findertregister_namespace_handlertregister_loader_typetfixup_namespace_packagesRtrun_maintAvailableDistributionscB@seZdZdZRS(s.Abstract base for dependency resolution errorscC@s|jjt|jS(N(t __class__RtreprRb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__repr__Is(RRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRFscB@sDeZdZdZedZedZdZdZRS(s An already-installed version conflicts with the requested version. Should be initialized with the installed Distribution and the requested Requirement. s3{self.dist} is installed but {self.req} is requiredcC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytdistWscC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytreq[scC@s|jjtS(N(t _templatetformattlocals(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytreport_scC@s$|s |S|j|f}t|S(st If required_by is non-empty, return a version of self that is a ContextualVersionConflict. (RbtContextualVersionConflict(Rt required_byRb((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt with_contextbs( RRRRtpropertyRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRMs  RcB@s*eZdZejdZedZRS(s A VersionConflict that accepts a third parameter, the set of the requirements that required the installed Distribution. s by {self.required_by}cC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRus(RRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRms cB@sSeZdZdZedZedZedZdZdZ RS(s&A requested distribution was not foundsSThe '{self.req}' distribution was not found and is required by {self.requirers_str}cC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt requirersscC@s|js dSdj|jS(Nsthe applications, (RRk(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt requirers_strs cC@s|jjtS(N(RRR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s |jS(N(R(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__str__s( RRRRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRzs cB@seZdZRS(s>Distribution doesn't have an "extra feature" of the given name(RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRsiiicC@s|t|scB@s>eZdZdZdZdZdZdZRS(cC@sdS(s;Does the package's distribution contain the named metadata?N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt has_metadataDRdcC@sdS(s'The named metadata resource as a stringN((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt get_metadataGRdcC@sdS(sYield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_metadata_linesJRdcC@sdS(s>Is the named metadata a directory? (like ``os.path.isdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytmetadata_isdirPRdcC@sdS(s?List of metadata names in the directory (like ``os.listdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytmetadata_listdirSRdcC@sdS(s=Execute the named script in the supplied namespace dictionaryN((Rt namespace((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRsVRd(RRRRRRRRs(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRCs      cB@sDeZdZdZdZdZdZdZdZRS(s3An object that provides access to package resourcescC@sdS(sdReturn a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``N((tmanagert resource_name((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_resource_filename]RdcC@sdS(siReturn a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``N((RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_resource_streambRdcC@sdS(smReturn a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``N((RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_resource_stringgRdcC@sdS(s,Does the package contain the named resource?N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt has_resourcelRdcC@sdS(s>Is the named resource a directory? (like ``os.path.isdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRoRdcC@sdS(s?List of resource names in the directory (like ``os.listdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR}rRd( RRRRRRRRR}(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRZs     cB@seZdZddZedZedZdZdZ dZ ddZ dZ d Z deed Zdded Zdded Zd ZedZdZdZdZRS(sDA collection of active distributions on sys.path (or a similar list)cC@s^g|_i|_i|_g|_|dkr<tj}nx|D]}|j|qCWdS(s?Create working set from list of path entries (default=sys.path)N(tentriest entry_keystby_keyt callbacksRaRiRt add_entry(RRtentry((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__init__ys       cC@se|}yddlm}Wntk r1|SXy|j|Wntk r`|j|SX|S(s1 Prepare the master working set. i(t __requires__(t__main__RRRrRt_build_from_requirements(tclstwsR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt _build_masters   cC@s|g}t|}|j|t}x|D]}|j|q4Wx0tjD]%}||jkrU|j|qUqUW|jtj(|S(sQ Build a working set from a requirement spec. Rewrites sys.path. (RtresolveRtaddRiRRR (Rtreq_specRtreqstdistsRR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs    cC@sT|jj|g|jj|x*t|tD]}|j||tq3WdS(sAdd a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) N(R t setdefaultRR6RRRR(RR R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s cC@s|jj|j|kS(s9True if `dist` is the active distribution for its project(R R=R (RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt __contains__scC@sC|jj|j}|dk r?||kr?t||n|S(sFind a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. N(R R=R RaR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cc@sgx`|D]X}|j|}|dkrGx4|jD] }|Vq5Wq||kr||VqqWdS(sYield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order). N(RwRatvalues(RRmRRRtep((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRys    cC@sQtjdj}|d}|j||d<|j|dj||dS(s?Locate distribution for `requires` and run `script_name` scriptiRiN(RiRRR\RrRs(RtrequiresRRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRss    cc@spi}xc|jD]X}||jkr+qnx:|j|D]+}||kr9d||<|j|Vq9q9WqWdS(sYield distributions for non-duplicate projects in the working set The yield order is the order in which the items' path entries were added to the working set. iN(RR R (RtseentitemR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRBs  cC@s|r"|j|j|d|n|dkr:|j}n|jj|g}|jj|jg}| r|j|jkrdS||j|j<|j|kr|j|jn|j|kr|j|jn|j |dS(sAdd `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set's ``.entries`` (if it wasn't already present). `dist` is only added to the working set if it's for a project that doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method will be called. R0N( t insert_onRRatlocationR RR R R6t _added_new(RRR tinsertR0tkeystkeys2((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   cC@s"t|ddd}i}i}g}t}tjt} x|r|jd} | |krmqFn|j| sqFn|j| j} | dkr|j j| j} | dks| | krq|rq|} |dkr| dkrt |j }qt g}t g} n|j| | |} || j<| dkrq| j| d} t| | qqn|j| n| | kr| | }t| | j|n| j| jddd}|j|x/|D]'}| |j| j| j|| Map each requirement to the extras that demanded it. c@s:fd|jddD}j p9t|S(s Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. c3@s(|]}jji|d6VqdS(textraN(tmarkertevaluate(t.0RM(R(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pys s(N(N(R=RaRNtany(RRt extra_evals((RsF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR,s (RRRR,(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR(scB@seZdZd eedZdZdZd dZ dZ dZ d dZ d dZ d Zd Zd ZRS( s5Searchable snapshot of distributions on a search pathcC@s,i|_||_||_|j|dS(s!Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distributions must be compatible with. If unspecified, it defaults to the current platform. `python` is an optional string naming the desired version of Python (e.g. ``'3.3'``); it defaults to the current version. You may explicitly set `platform` (and/or `python`) to ``None`` if you wish to map *all* distributions, not just those compatible with the running platform or Python version. N(t_distmapRjtpythontscan(Rt search_pathRjRT((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   cC@sC|jdks0|jdks0|j|jkoBt|j|jS(sIs distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned. N(RTRat py_versionRRj(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytcan_addscC@s|j|jj|dS(s"Remove `dist` from the environmentN(RSR tremove(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRY(scC@sQ|dkrtj}nx2|D]*}x!t|D]}|j|q2WqWdS(sdScan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added. N(RaRiRRR(RRVR R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRU,s    cC@s|j}|jj|gS(sReturn a newest-to-oldest list of distributions for `project_name` Uses case-insensitive `project_name` comparison, assuming all the project's distributions use their project's name converted to all lowercase as their key. (R3RSR=(RR0tdistribution_key((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR!;s cC@su|j|rq|jrq|jj|jg}||krq|j||jdtjddt qqndS(sLAdd `dist` if we ``can_add()`` it and it has not already been added R thashcmptreverseN( RXt has_versionRSRR R6R<toperatort attrgetterR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRFs   cC@sW|j|}|dk r|Sx%||jD]}||kr-|Sq-W|j||S(sFind distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable distribution isn't active, this method returns the newest distribution in the environment that meets the ``Requirement`` in `req`. If no suitable distribution is found, and `installer` is supplied, then the result of calling the environment's ``obtain(req, installer)`` method will be returned. N(RRaR tobtain(RRRR3R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR-Os   cC@s|dk r||SdS(sObtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.N(Ra(Rt requirementR3((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR`es cc@s0x)|jjD]}||r|VqqWdS(s=Yield the unique project names of the available distributionsN(RSR%(RR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRBqs cC@s{t|tr|j|nXt|trdxF|D](}x||D]}|j|qFWq5Wntd|f|S(s2In-place addition of a distribution or environmentsCan't add %r to environment(RRRRR(RRtprojectR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__iadd__ws cC@s@|jgdddd}x||fD]}||7}q(W|S(s4Add an environment or distribution to an environmentRjRTN(RRa(RRtnewR2((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__add__sN(RRRRaRqtPY_MAJORRRXRYRUR!RR-R`RBRcRe(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs       cB@seZdZRS(sTAn error occurred extracting a resource The following attributes are available from instances of this exception: manager The resource manager that raised this exception cache_path The base directory for resource extraction original_error The exception instance that caused extraction to fail (RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dd Z ed Zd Zd Zed ZRS(s'Manage resource extraction and packagescC@s i|_dS(N(t cached_files(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@st|j|S(sDoes the named resource exist?(RtR(Rtpackage_or_requirementR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR~scC@st|j|S(s,Is the named resource an existing directory?(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@st|j||S(s4Return a true filesystem path for specified resource(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR|s cC@st|j||S(s9Return a readable file-like object for specified resource(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR{s cC@st|j||S(s%Return specified resource as a string(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRzs cC@st|j|S(s1List the contents of the named resource directory(RtR}(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR}s cC@sttjd}|jpt}tjdj}t|jt }||_ ||_ ||_ |dS(s5Give an error message for problems extracting file(s)is Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) to the Python egg cache: {old_exc} The Python egg cache directory is currently set to: {cache_path} Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. N( Ritexc_infotextraction_pathRttextwraptdedenttlstripRRRRt cache_pathtoriginal_error(Rtold_excRnttmplterr((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytextraction_errors   cC@sn|jpt}tjj||d|}yt|Wn|jnX|j|d|j|<|S(sReturn absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if provided, should be a sequence of path name parts "under" the egg's extraction location. This method should only be called by resource providers that need to obtain an extraction location, and only for names they intend to extract, as it tracks the generated names for possible cleanup later. s-tmpi( RjRRRRkt_bypass_ensure_directoryRst_warn_unsafe_extraction_pathRg(Rt archive_nametnamest extract_patht target_path((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_cache_paths   cC@swtjdkr*|jtjd r*dStj|j}|tj@sV|tj@rsd|}tj |t ndS(sN If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. tnttwindirNs%s is writable by group/others and vulnerable to attack when used with get_resource_filename. Consider a more secure location (set with .set_extraction_path or the PYTHON_EGG_CACHE environment variable).( RRR4tenvirontstattst_modetS_IWOTHtS_IWGRPR>R?t UserWarning(Rtmodetmsg((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRus &cC@s@tjdkr<tj|jdBd@}tj||ndS(s4Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don't have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are already in the filesystem. `tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine returns. tposiximiN(RRR~Rtchmod(RttempnametfilenameR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt postprocessscC@s%|jrtdn||_dS(sSet the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that routine's documentation for more details.) Resources are extracted to subdirectories of this path based upon information given by the ``IResourceProvider``. You may set this to a temporary directory, but then you must call ``cleanup_resources()`` to delete the extracted files when done. There is no guarantee that ``cleanup_resources()`` will be able to remove all extracted files. (Note: you may not change the extraction path for a given resource manager once resources have been extracted, unless you first call ``cleanup_resources()``.) s5Can't change extraction path, files already extractedN(RgRnRj(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR)s  cC@sdS(sB Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. This function does not have any concurrency protection, so it should generally only be called when the extraction path is a temporary directory exclusive to a single process. This method is not automatically called; you must call it explicitly or register it as an ``atexit`` function if you wish to ensure cleanup of a temporary directory used for extractions. N((Rtforce((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRCRdN((RRRRaRjRR~RR|R{RzR}RsRzt staticmethodRuRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs           cC@s"tjjdp!tjddS(s Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". tPYTHON_EGG_CACHEtappnames Python-Eggs(RR}R=R tuser_cache_dir(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRQscC@stjdd|S(sConvert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. s[^A-Za-z0-9.]+R%(R:tsub(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR]scC@sZyttjj|SWn9tjjk rU|jdd}tjdd|SXdS(sB Convert an arbitrary string to a standard version string RR)s[^A-Za-z0-9.]+R%N(RARREtVersionRFR0R:R(RE((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRes cC@stjdd|jS(sConvert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. s[^A-Za-z0-9.-]+R(R:RR3(RM((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRqscC@s|jddS(s|Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. R%R(R0(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRzscC@s;yt|Wn&tk r6}d|_d|_|SXtS(so Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. N(Rt SyntaxErrorRaRtlinenoR(ttextte((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs  cC@sLy tjj|}|jSWn%tjjk rG}t|nXdS(s Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. N(RtmarkerstMarkerROt InvalidMarkerR(RRMRNR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dZ dZd Zd Zd Zd Zd ZdZdZdZdZdZRS(sETry to implement resources and metadata for arbitrary PEP 302 loaderscC@s:t|dd|_tjjt|dd|_dS(NRt__file__Rd(RRaRRRtdirnamet module_path(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|S(N(t_fnR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@stj|j||S(N(tiotBytesIOR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_getRR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_hasRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s%|jo$|j|j|j|S(N(tegg_infoRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@sE|js dS|j|j|j|}tjrA|jdS|S(NRdsutf-8(RRRRtPY3tdecode(RRtvalue((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@st|j|S(N(RR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_isdirRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s%|jo$|j|j|j|S(N(RRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_listdirRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR}scC@s)|jr%|j|j|j|SgS(N(RRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs c B@sd|}|j|s,ed|n|j|jdd}|jdd}|j|j|}||dsN( RRRRRRRRaRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR6s    t ZipManifestscB@s#eZdZedZeZRS(s zip manifest builder c@s?t|-fdjD}t|SWdQXdS(s Build a dictionary similar to the zipimport directory caches, except instead of tuples, store ZipInfo objects. Use a platform-specific path separator (os.sep) for the path keys for compatibility with pypy on Windows. c3@s3|])}|jdtjj|fVqdS(RN(R0Rtseptgetinfo(RPR(tzfile(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pys UsN(tContextualZipFiletnamelistRL(RRRR((RsF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytbuildJs  (RRRRLRtload(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyREstMemoizedZipManifestscB@s)eZdZejddZdZRS(s% Memoized zipfile manifests. t manifest_modsmanifest mtimecC@svtjj|}tj|j}||ksC||j|krk|j|}|j||||"os.rename" and "os.unlink" are not supported on this platforms .$extracttdirR{(t_indexRRRRkRRRt WRITE_SUPPORTtIOErrorRzRRt _is_currentt_mkstemptwriteRRRRRRterrortisfileRR Rs( RRRRtlastRRt real_pathtoutfttmpnam((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs@$    c C@s|j|j|\}}tjj|s2tStj|}|j|ks_|j|krctS|j j |}t |d}|j }WdQX||kS(sK Return True if the file_path is current for this zip_path RN( RRRRRRR~tst_sizeRRRR R( Rt file_pathRRRR~t zip_contentstft file_contents((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@sa|jdkrZg}x6dD].}|j|r|j|j|qqW||_n|jS(Nsnative_libs.txtseager_resources.txt(snative_libs.txtseager_resources.txt(RRaRR/R(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs  cC@sy |jSWntk ri}x~|jD]s}|jtj}xX|rtjj|d }||kr||j|dPqF|jg||R?(RR treplacement_charRqR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRLs  cC@st|j|S(N(RR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRTs(RRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR1s     cB@seZdZdZRS(ssMetadata provider for egg directories Usage:: # Development eggs: egg_info = "/path/to/PackageName.egg-info" base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: egg_path = "/path/to/PackageName-ver-pyver-etc.egg" metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) dist = Distribution.from_filename(egg_path, metadata=metadata) cC@s||_||_dS(N(RR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRls (RRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRXscB@seZdZdZRS(s Metadata provider for .egg filescC@s`|jtj|_||_|jrFtjj|j|j|_n |j|_|j dS(s-Create a metadata provider from a zipimporterN( RRRRRtprefixRRkRR(Rtimporter((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRts   ! (RRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRqsRLt_distribution_finderscC@s|t|>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] cS@sYtjj|\}}tj|jd|g}g|D]}tjj|^q=S(s6 Parse each component of the filename R%( RRtsplitextt itertoolstchainR RRER(RtextR7R.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt _by_versionsR R\(tsortedR(RwR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_by_version_descendings cc@s9t|}tjj|r5tj|tjr5t|rntj|dt |tjj |dVq5t tj |}x|D]}|j }|jds|jdr=tjj ||}tjj|rttj |dkrqnt ||}n t|}tj|||dtVq| rt|rttjj ||}x|D] } | VqrWq| r|jdrttjj ||} | j} WdQXxa| D]V} | jsqntjj || j} t| }x|D] }|VqWPqWqqWndS( s6Yield distributions accessible on a sys.path directoryR sEGG-INFOs .egg-infos .dist-infoit precedences .egg-linkN(t_normalize_cachedRRR taccesstR_OKRRRRRkR!RR3RRRt from_locationRRR t readlineststriptrstrip(R RRtpath_item_entriesR R3tfullpathR RRt entry_filet entry_linestlineRR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt find_on_pathsB '             t FileFindert_namespace_handlerst_namespace_packagescC@s|t|[^-]+) ( -(?P[^-]+) ( -py(?P[^-]+) ( -(?P.+) )? )? )? cB@seZdZd d ddZdZdZedZdZ dddZ e j dZ eddZed Zedd Zedd ZRS(s3Object representing an advertised importable objectcC@snt|std|n||_||_t||_tjddj|j |_ ||_ dS(NsInvalid module namesx[%s]t,( tMODULERnRt module_nameRtattrsRRRkR.R(RRR[R\R.R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   "cC@sfd|j|jf}|jr<|ddj|j7}n|jrb|ddj|j7}n|S(Ns%s = %st:R)s [%s]RY(RR[R\RkR.(RR-((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   cC@sdt|S(NsEntryPoint.parse(%r)(RA(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscO@sO| s|s|r,tjdtddn|rE|j||n|jS(sH Require packages for this EntryPoint, then resolve it. sJParameters to load are deprecated. Call .resolve and .require separately.R9i(R>R?tDeprecationWarningRrR(RRrRbR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@sdt|jddgdd}ytjt|j|SWn%tk r_}tt|nXdS(sD Resolve the entry point from its module and attrs. tfromlistRtleveliN( RR[t functoolstreduceRR\RRRA(RRtexc((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@si|jr%|j r%td|n|jj|j}tj|||}tttj|dS(Ns&Can't require() without a distribution( R.RRRRRR'RR(RR2R3RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRr s s]\s*(?P.+?)\s*=\s*(?P[\w.]+)\s*(:\s*(?P[\w.]+))?\s*(?P\[.*\])?\s*$cC@s|jj|}|s0d}t||n|j}|j|d}|drl|djdnd}||d|d|||S(sParse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional s9EntryPoint must be in 'name=module:attrs [extras]' formatR.tattrR)RR((tpatternRhRnt groupdictt _parse_extrasR (RtsrcRRpRtresR.R\((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  #cC@s9|s dStjd|}|jr2tn|jS(Ntx((RRtspecsRnR.(Rt extras_specR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRg$ s   cC@st|std|ni}xZt|D]L}|j||}|j|krptd||jn|||j '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. s R(RREt LegacyVersionRRRkRlR(R0R>R?RtvarsR(RtLVt is_legacyRq((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_warn_legacy_version s  cC@sgy |jSWnUtk rbt|j|j}|dkr^d}t||j|n|SXdS(Ns(Missing 'Version:' header and/or %s file(R~RR|t _get_metadatatPKG_INFORaRn(RRERq((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRE s   cC@sy |jSWntk rigd6}|_xdD]}xt|j|D]\}}|rd|kr|jdd\}}t|rg}qt|sg}qnt|pd}n|j |gj t |qRWq6W|SXdS(Ns requires.txts depends.txtR]i(s requires.txts depends.txt( t_Distribution__dep_mapRRaRRR RRRRR/R(RtdmRRMRRN((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_dep_map s    "     'cC@s|j}g}|j|jddxS|D]K}y|j|t|Wq/tk rytd||fq/Xq/W|S(s@List of Requirements needed for this distro if `extras` are useds%s has no such extra feature %rN((RR/R=RaRRR(RR.RtdepsR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s   cc@s5|j|r1x|j|D] }|VqWndS(N(RR(RRR.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR scC@s|dkrtj}n|j|d||tjkrt|jx6|jdD]"}|tjkrWt|qWqWWndS(s>Ensure distribution is importable on `path` (default=sys.path)R0snamespace_packages.txtN( RaRiRR!RR"RRR(RRR0tpkg((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytactivate s   cC@sOdt|jt|j|jp'tf}|jrK|d|j7}n|S(s@Return what this distribution's standard .egg filename should bes %s-%s-py%sR%(RR0RERWRfRj(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  cC@s(|jrd||jfSt|SdS(Ns%s (%s)(R"RA(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s cC@sMyt|dd}Wntk r/d}nX|p9d}d|j|fS(NREs[unknown version]s%s %s(RRaRnR0(RRE((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR% s    cC@s.|jdrt|nt|j|S(sADelegate all unrecognized public attributes to .metadata providerR(R4RRR(RRd((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt __getattr__- scK@s(|jt|tjj|||S(N(R&R#RRR(RRR RO((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR3 scC@sTt|jtjjr1d|j|jf}nd|j|jf}tj|S(s?Return a ``Requirement`` that matches this distribution exactlys%s==%ss%s===%s(RRRRERR0RR(Rtspec((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR=: scC@sD|j||}|dkr:td||ffn|jS(s=Return the `name` entry point of `group` or raise ImportErrorsEntry point %r not foundN(RxRaRR(RRmRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRvC s cC@scy |j}Wn3tk rBtj|jd|}|_nX|dk r_|j|iS|S(s=Return the entry point map for `group`, or the full entry mapsentry_points.txtN(t_ep_mapRRRrRRaR=(RRmtep_map((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRwJ s    cC@s|j|j|S(s<Return the EntryPoint object for `group`+`name`, or ``None``(RwR=(RRmR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRxV sc C@s|p |j}|sdSt|}tjj|}g|D]}|rVt|pY|^q>}xt|D]\}}||kr|rPqdSqo||kro|jtkro| r|||krdS|tjkr|j n|j |||j ||PqoqoW|tjkr.|j n|rG|j d|n |j |dSxMt ry|j ||d} Wntk rPq[X|| =|| =| }q[WdS(sEnsure self.location is on path If replace=False (default): - If location is already in path anywhere, do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent. - Else: add to the end of path. If replace=True: - If location is already on path anywhere (not eggs) or higher priority than its parent (eggs) do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent, removing any lower-priority entries. - Else: add it to the front of path. Nii(R"R#RRRt enumerateR"RRitcheck_version_conflictR$R6RR?Rn( RRtlocR0tnloctbdirRItnpathR tnp((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR!Z sB +       cC@s|jdkrdStj|jd}t|j}x|jdD]}|tjksJ||ksJ|tkr}qJn|dkrqJnt tj|dd}|rt|j |sJ|j |jrqJnt d|||jfqJWdS( Nt setuptoolssnamespace_packages.txts top_level.txtt pkg_resourcestsiteRsIModule %s was already imported from %s, but %s is being added to sys.path(RRR( R RLRMRRR"RiRR2RRaR4t issue_warning(RtnspRtmodnametfn((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s"  cC@s8y |jWn&tk r3tdt|tSXtS(NsUnbuilt egg for (RERnRRRR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR] s   cK@sYd}x0|jD]"}|j|t||dqW|jd|j|j|S(s@Copy this distribution, substituting in any changed keyword argss<project_name version py_version platform location precedenceR N(R RRRaRR(RRORwRd((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytclone s  cC@s g|jD]}|r |^q S(N(R(Rtdep((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR. sN((+RRRRRaRfRRRLR&RRR[RRRRRRRR RRRERRRRRRRRRRR=RvRwRxR!RR]RR.(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRc sN                    C   tEggInfoDistributioncB@seZdZRS(cC@s.t|j|j}|r*||_n|S(s Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. (R|RRR~(Rt md_version((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  (RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR stDistInfoDistributioncB@sJeZdZdZejdZedZedZ dZ RS(sGWrap an actual or potential sys.path entry w/metadata, .dist-info styletMETADATAs([\(,])\s*(\d.*?)\s*([,\)])cC@sTy |jSWnBtk rO|j|j}tjjj||_|jSXdS(sParse and cache metadataN(t _pkg_infoRRRtemailtparsertParsertparsestr(RR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_parsed_pkg_info s   cC@s6y |jSWn$tk r1|j|_|jSXdS(N(t_DistInfoDistribution__dep_mapRt_compute_dependencies(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s   c@sigd6}|_gx3|jjdp2gD]}jt|q3Wfd}t|d}|dj|xR|jjdpgD]8}t|j}t t|||||R?(RbROR`RT((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  tRequirementParseErrorcB@seZdZRS(cC@sdj|jS(NR(RkRb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s(RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR scc@stt|}xm|D]e}d|krA||jd }n|jdrs|d j}|t|7}nt|VqWdS(sYield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. s #s\iN(RxRRRR(RwR(RWRmR.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR# s  cB@sMeZdZdZdZdZdZdZedZ RS(cC@sytt|j|Wn+tjjk rG}tt|nX|j|_ t |j}||j |_ |_ g|jD]}|j|jf^q|_ttt|j|_|j |jt|j|jrt|jndf|_t|j|_dS(s>DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!N(RRRRR1tInvalidRequirementRRARt unsafe_nameRR3R0R t specifierR^RERkRRRR.RRNRathashCmpRt_Requirement__hash(Rtrequirement_stringRR0R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR7 s + $cC@st|to|j|jkS(N(RRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRK scC@s ||k S(N((RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRQ scC@sGt|tr1|j|jkr%tS|j}n|jj|dtS(Nt prereleases(RRR RRERtcontainsR(RR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRT s  cC@s|jS(N(R(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR` scC@sdt|S(NsRequirement.parse(%r)(RA(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRc RdcC@st|\}|S(N(R(R-R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRe s( RRRRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR6 s     cC@s:t|ts3d|tfdY}|jdS|jS(s&Get an mro for a type or classic classRcB@seZRS((RR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRo si(RRtobjectt__mro__(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_get_mrok s cC@sAx:tt|dt|D]}||kr||SqWdS(s2Return an adapter factory for `ob` from `registry`RN(RRR(tregistryR]tt((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRv s% cC@s8tjj|}tjj|s4tj|ndS(s1Ensure that the parent directory of `path` existsN(RRRR tmakedirs(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR} scC@s^tstdnt|\}}|rZ|rZt| rZt|t|dndS(s/Sandbox-bypassing version of ensure_directory()s*"os.mkdir" not supported on this platform.iN(RRR R RtR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRt s  cc@sd}g}xt|D]y}|jdr|jdrs|sI|rW||fVn|dd!j}g}qtd|q|j|qW||fVdS(ssSplit a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. t[t]iisInvalid section headingN(RaRR4RR(RnR6(R-tsectiontcontentR.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  cO@s7tj}ztt_tj||SWd|t_XdS(N(RR tos_openttempfiletmkstemp(RbROtold_open((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s   tignoretcategoryR6cO@s||||S(N((RRbR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt _call_aside s cC@sSt}||dR~RatpkgutilR^RjR)Rt email.parserRRRkRRRJRtimpt pip._vendorRtpip._vendor.six.movesRRRRRRR RRRR Rtos.pathR R timportlib.machineryt machineryRRRaR RRt version_infoRR?RrRR@RRRRERRCRRDRHRKRPRVRXR[R^R_R`t _sget_nonet _sset_noneRqt__all__t ExceptionRRRRRRRfRRRRRRRtRlRRfR;RgRRRRsRRuRvRwRxRRRRLR(RRt RuntimeErrorRRRRRRRRRRRRRRRRRRRRRRRRRRRRR!R/t ImpImporterRR0RR=R:RRRORPRR#RR8RRhRZR<t IGNORECASERRRvR|RRRRRRnRRR1RRRRRtRRtfilterwarningsRRIRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyts                               +b                                  .    z    a   '      .    !        ~ f/   5      PKZi`胟4site-packages/pip/_vendor/pkg_resources/__init__.pyonu[ abcF@@s dZddlmZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZyddlZWnek rEddlZnXddlmZddlmZm Z m!Z!ddlm"Z"y&ddlm#Z#m$Z$m%Z%e&Z'Wnek re(Z'nXdd lm)Z*dd l+m,Z,m-Z-yddl.j/Z0e0j1Wnek re2Z0nXdd lm3Z3dd lm4Z4e5d e5de5de5dddfej6koddfknrdZ7e j8e7ne2Z9e2Z:de;fdYZ<de=fdYZ>de>e4j?j@fdYZAde>e4j?jBfdYZCdZDiZEdZFdZGdZHdZId ZJd!ZKd"ZLd#ZMZNd$ZOd%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDddEdFdGdHdIdJdKdLdMdNdOdPdQdRdSdTdUdVdWdXdYdZd[d\d]d^d_d`dadbdcdddedfdgdhdigFZPd@eQfdjYZRdAeRfdkYZSdleSfdmYZTdBeRfdnYZUdCeRfdoYZViZWej?d ZXdZYdpZZdqZ[dZ\drZ]dsZ^dtZ_gduZ`dvZadwZbejcdxZdejcdyZeebZfdzZgd{ZhehZid|Zjd}Zke2d~ZldZmdXfdYZndYenfdYZod;e=fdYZpdeqfdYZrd:e=fdYZsesZtdDeufdYZvd<fdYZwdZxdZydZzdZ{dZ|dZ}e2dZ~d_fdYZe^e=ed`efdYZdaefdYZejd]efdYZeZdeqfdYZdefdYZde jfdYZdbefdYZe^e jedZefdYZd[efdYZd\efdYZeFddidZe(dZe(dZee jee(dZee=edZe(dZeejeee0dr(ee0jeneFddieFddidZdZdZdZe2dZdZeejeee jeee0dree0jendZee=edZidZdZdZdZejcdjZejcdejejBjZd?e=fdYZdZdZd=e=fdYZdefdYZdefdYZied6ed6ed6ZdZdefdYZdZd>e4jjfdYZdZdZdZdZdZdZe jdde<de&dZeedZedZdS(sZ Package resource API -------------------- A resource is a logical file contained within a package, or a logical subdirectory thereof. The package resource API expects resource names to have their path parts separated with ``/``, *not* whatever the local path separator is. Do not use os.path operations to manipulate resource names being passed into the API. The package resource API is designed to work with normal filesystem packages, .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. i(tabsolute_importN(t get_importer(tsix(turllibtmaptfilter(tutime(tmkdirtrenametunlink(topen(tisdirtsplit(tappdirs(t packagingspip._vendor.packaging.versions pip._vendor.packaging.specifierss"pip._vendor.packaging.requirementsspip._vendor.packaging.markersisLSupport for Python 3.0-3.2 has been dropped. Future versions will fail here.t PEP440WarningcB@seZdZRS(sa Used when there is an issue with a version or specifier not complying with PEP 440. (t__name__t __module__t__doc__(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR[st_SetuptoolsVersionMixincB@sYeZdZdZdZdZdZdZdZdZ dZ RS( cC@stt|jS(N(tsuperRt__hash__(tself((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRcscC@s9t|trt||kStt|j|SdS(N(t isinstancettupleRRt__lt__(Rtother((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRfscC@s9t|trt||kStt|j|SdS(N(RRRRt__le__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRlscC@s9t|trt||kStt|j|SdS(N(RRRRt__eq__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRrscC@s9t|trt||kStt|j|SdS(N(RRRRt__ge__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRxscC@s9t|trt||kStt|j|SdS(N(RRRRt__gt__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR~scC@s9t|trt||kStt|j|SdS(N(RRRRt__ne__(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@st||S(N(R(Rtkey((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt __getitem__sc#@stjdtjidd6dd6dd6dd6dd 6jfd fd }tjd td dx|t|D] }|VqWdS(Ns(\d+ | [a-z]+ | \.| -)tctpretpreviewsfinal-t-trct@tdevc3@suxij|D]X}||}| s|dkr>qn|d dkr_|jdVqd|VqWdVdS(Nt.it 0123456789it*s*final(R tzfill(tstpart(t component_retreplace(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_parse_version_partss c@sg}x|jD]}|jdr|dkrdx'|r`|ddkr`|jq=Wnx'|r|ddkr|jqgWn|j|qWt|S(NR+s*finalis*final-t00000000(tlowert startswithtpoptappendR(R-tpartsR.(R1(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytold_parse_versions sYou have iterated over the result of pkg_resources.parse_version. This is a legacy behavior which is inconsistent with the new version class introduced in setuptools 8.0. In most cases, conversion to a tuple is unnecessary. For comparison of versions, sort the Version instances directly. If you have another use case requiring the tuple, please file a bug with the setuptools project describing that need.t stackleveli(tretcompiletVERBOSEtgettwarningstwarntRuntimeWarningtstr(RR8R.((R1R/R0sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__iter__s ( RRRRRRRRRR!RB(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRbs        tSetuptoolsVersioncB@seZRS((RR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRCstSetuptoolsLegacyVersioncB@seZRS((RR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRDscC@s3yt|SWntjjk r.t|SXdS(N(RCRtversiontInvalidVersionRD(tv((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt parse_versionscK@s-tj|tjtj||dS(N(tglobalstupdatet _state_varstdicttfromkeys(tvartypetkw((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_declare_statescC@sLi}t}x6tjD](\}}|d|||||tcC@s{t}tj|}|dk rwtjdkrwy-ddjtd |jdf}Wqwt k rsqwXn|S(sZReturn this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. tdarwins macosx-%s-%sR)iiN( tget_build_platformtmacosVersionStringtmatchRatsystplatformtjoint _macosx_verstgroupt ValueError(tplattm((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_supported_platforms - trequiret run_scriptt get_providertget_distributiontload_entry_pointt get_entry_maptget_entry_infotiter_entry_pointstresource_stringtresource_streamtresource_filenametresource_listdirtresource_existstresource_isdirtdeclare_namespacet working_settadd_activation_listenertfind_distributionstset_extraction_pathtcleanup_resourcestget_default_cachet Environmentt WorkingSettResourceManagert Distributiont Requirementt EntryPointtResolutionErrortVersionConflicttDistributionNotFoundt UnknownExtratExtractionErrortparse_requirementsRHt safe_namet safe_versiont get_platformtcompatible_platformst yield_linestsplit_sectionst safe_extrat to_filenametinvalid_markertevaluate_markertensure_directorytnormalize_pathtEGG_DISTt BINARY_DISTt SOURCE_DISTt CHECKOUT_DISTt DEVELOP_DISTtIMetadataProvidertIResourceProvidert FileMetadatat PathMetadatat EggMetadatat EmptyProvidertempty_providert NullProvidert EggProvidertDefaultProvidert ZipProvidertregister_findertregister_namespace_handlertregister_loader_typetfixup_namespace_packagesRtrun_maintAvailableDistributionscB@seZdZdZRS(s.Abstract base for dependency resolution errorscC@s|jjt|jS(N(t __class__RtreprRb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__repr__Is(RRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRFscB@sDeZdZdZedZedZdZdZRS(s An already-installed version conflicts with the requested version. Should be initialized with the installed Distribution and the requested Requirement. s3{self.dist} is installed but {self.req} is requiredcC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytdistWscC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytreq[scC@s|jjtS(N(t _templatetformattlocals(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytreport_scC@s$|s |S|j|f}t|S(st If required_by is non-empty, return a version of self that is a ContextualVersionConflict. (RbtContextualVersionConflict(Rt required_byRb((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt with_contextbs( RRRRtpropertyRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRMs  RcB@s*eZdZejdZedZRS(s A VersionConflict that accepts a third parameter, the set of the requirements that required the installed Distribution. s by {self.required_by}cC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRus(RRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRms cB@sSeZdZdZedZedZedZdZdZ RS(s&A requested distribution was not foundsSThe '{self.req}' distribution was not found and is required by {self.requirers_str}cC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s |jdS(Ni(Rb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt requirersscC@s|js dSdj|jS(Nsthe applications, (RRk(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt requirers_strs cC@s|jjtS(N(RRR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s |jS(N(R(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__str__s( RRRRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRzs cB@seZdZRS(s>Distribution doesn't have an "extra feature" of the given name(RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRsiiicC@s|t|scB@s>eZdZdZdZdZdZdZRS(cC@sdS(s;Does the package's distribution contain the named metadata?N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt has_metadataDRdcC@sdS(s'The named metadata resource as a stringN((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt get_metadataGRdcC@sdS(sYield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_metadata_linesJRdcC@sdS(s>Is the named metadata a directory? (like ``os.path.isdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytmetadata_isdirPRdcC@sdS(s?List of metadata names in the directory (like ``os.listdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytmetadata_listdirSRdcC@sdS(s=Execute the named script in the supplied namespace dictionaryN((Rt namespace((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRsVRd(RRRRRRRRs(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRCs      cB@sDeZdZdZdZdZdZdZdZRS(s3An object that provides access to package resourcescC@sdS(sdReturn a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``N((tmanagert resource_name((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_resource_filename]RdcC@sdS(siReturn a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``N((RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_resource_streambRdcC@sdS(smReturn a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``N((RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_resource_stringgRdcC@sdS(s,Does the package contain the named resource?N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt has_resourcelRdcC@sdS(s>Is the named resource a directory? (like ``os.path.isdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRoRdcC@sdS(s?List of resource names in the directory (like ``os.listdir()``)N((R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR}rRd( RRRRRRRRR}(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRZs     cB@seZdZddZedZedZdZdZ dZ ddZ dZ d Z deed Zdded Zdded Zd ZedZdZdZdZRS(sDA collection of active distributions on sys.path (or a similar list)cC@s^g|_i|_i|_g|_|dkr<tj}nx|D]}|j|qCWdS(s?Create working set from list of path entries (default=sys.path)N(tentriest entry_keystby_keyt callbacksRaRiRt add_entry(RRtentry((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__init__ys       cC@se|}yddlm}Wntk r1|SXy|j|Wntk r`|j|SX|S(s1 Prepare the master working set. i(t __requires__(t__main__RRRrRt_build_from_requirements(tclstwsR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt _build_masters   cC@s|g}t|}|j|t}x|D]}|j|q4Wx0tjD]%}||jkrU|j|qUqUW|jtj(|S(sQ Build a working set from a requirement spec. Rewrites sys.path. (RtresolveRtaddRiRRR (Rtreq_specRtreqstdistsRR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs    cC@sT|jj|g|jj|x*t|tD]}|j||tq3WdS(sAdd a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) N(R t setdefaultRR6RRRR(RR R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s cC@s|jj|j|kS(s9True if `dist` is the active distribution for its project(R R=R (RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt __contains__scC@sC|jj|j}|dk r?||kr?t||n|S(sFind a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. N(R R=R RaR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cc@sgx`|D]X}|j|}|dkrGx4|jD] }|Vq5Wq||kr||VqqWdS(sYield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order). N(RwRatvalues(RRmRRRtep((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRys    cC@sQtjdj}|d}|j||d<|j|dj||dS(s?Locate distribution for `requires` and run `script_name` scriptiRiN(RiRRR\RrRs(RtrequiresRRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRss    cc@spi}xc|jD]X}||jkr+qnx:|j|D]+}||kr9d||<|j|Vq9q9WqWdS(sYield distributions for non-duplicate projects in the working set The yield order is the order in which the items' path entries were added to the working set. iN(RR R (RtseentitemR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRBs  cC@s|r"|j|j|d|n|dkr:|j}n|jj|g}|jj|jg}| r|j|jkrdS||j|j<|j|kr|j|jn|j|kr|j|jn|j |dS(sAdd `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set's ``.entries`` (if it wasn't already present). `dist` is only added to the working set if it's for a project that doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method will be called. R0N( t insert_onRRatlocationR RR R R6t _added_new(RRR tinsertR0tkeystkeys2((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   cC@s"t|ddd}i}i}g}t}tjt} x|r|jd} | |krmqFn|j| sqFn|j| j} | dkr|j j| j} | dks| | krq|rq|} |dkr| dkrt |j }qt g}t g} n|j| | |} || j<| dkrq| j| d} t| | qqn|j| n| | kr| | }t| | j|n| j| jddd}|j|x/|D]'}| |j| j| j|| Map each requirement to the extras that demanded it. c@s:fd|jddD}j p9t|S(s Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. c3@s(|]}jji|d6VqdS(textraN(tmarkertevaluate(t.0RM(R(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pys s(N(N(R=RaRNtany(RRt extra_evals((RsF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR,s (RRRR,(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR(scB@seZdZd eedZdZdZd dZ dZ dZ d dZ d dZ d Zd Zd ZRS( s5Searchable snapshot of distributions on a search pathcC@s,i|_||_||_|j|dS(s!Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distributions must be compatible with. If unspecified, it defaults to the current platform. `python` is an optional string naming the desired version of Python (e.g. ``'3.3'``); it defaults to the current version. You may explicitly set `platform` (and/or `python`) to ``None`` if you wish to map *all* distributions, not just those compatible with the running platform or Python version. N(t_distmapRjtpythontscan(Rt search_pathRjRT((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   cC@sC|jdks0|jdks0|j|jkoBt|j|jS(sIs distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned. N(RTRat py_versionRRj(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytcan_addscC@s|j|jj|dS(s"Remove `dist` from the environmentN(RSR tremove(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRY(scC@sQ|dkrtj}nx2|D]*}x!t|D]}|j|q2WqWdS(sdScan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added. N(RaRiRRR(RRVR R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRU,s    cC@s|j}|jj|gS(sReturn a newest-to-oldest list of distributions for `project_name` Uses case-insensitive `project_name` comparison, assuming all the project's distributions use their project's name converted to all lowercase as their key. (R3RSR=(RR0tdistribution_key((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR!;s cC@su|j|rq|jrq|jj|jg}||krq|j||jdtjddt qqndS(sLAdd `dist` if we ``can_add()`` it and it has not already been added R thashcmptreverseN( RXt has_versionRSRR R6R<toperatort attrgetterR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRFs   cC@sW|j|}|dk r|Sx%||jD]}||kr-|Sq-W|j||S(sFind distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable distribution isn't active, this method returns the newest distribution in the environment that meets the ``Requirement`` in `req`. If no suitable distribution is found, and `installer` is supplied, then the result of calling the environment's ``obtain(req, installer)`` method will be returned. N(RRaR tobtain(RRRR3R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR-Os   cC@s|dk r||SdS(sObtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.N(Ra(Rt requirementR3((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR`es cc@s0x)|jjD]}||r|VqqWdS(s=Yield the unique project names of the available distributionsN(RSR%(RR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRBqs cC@s{t|tr|j|nXt|trdxF|D](}x||D]}|j|qFWq5Wntd|f|S(s2In-place addition of a distribution or environmentsCan't add %r to environment(RRRRR(RRtprojectR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__iadd__ws cC@s@|jgdddd}x||fD]}||7}q(W|S(s4Add an environment or distribution to an environmentRjRTN(RRa(RRtnewR2((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt__add__sN(RRRRaRqtPY_MAJORRRXRYRUR!RR-R`RBRcRe(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs       cB@seZdZRS(sTAn error occurred extracting a resource The following attributes are available from instances of this exception: manager The resource manager that raised this exception cache_path The base directory for resource extraction original_error The exception instance that caused extraction to fail (RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dd Z ed Zd Zd Zed ZRS(s'Manage resource extraction and packagescC@s i|_dS(N(t cached_files(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@st|j|S(sDoes the named resource exist?(RtR(Rtpackage_or_requirementR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR~scC@st|j|S(s,Is the named resource an existing directory?(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@st|j||S(s4Return a true filesystem path for specified resource(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR|s cC@st|j||S(s9Return a readable file-like object for specified resource(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR{s cC@st|j||S(s%Return specified resource as a string(RtR(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRzs cC@st|j|S(s1List the contents of the named resource directory(RtR}(RRhR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR}s cC@sttjd}|jpt}tjdj}t|jt }||_ ||_ ||_ |dS(s5Give an error message for problems extracting file(s)is Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) to the Python egg cache: {old_exc} The Python egg cache directory is currently set to: {cache_path} Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. N( Ritexc_infotextraction_pathRttextwraptdedenttlstripRRRRt cache_pathtoriginal_error(Rtold_excRnttmplterr((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytextraction_errors   cC@sn|jpt}tjj||d|}yt|Wn|jnX|j|d|j|<|S(sReturn absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if provided, should be a sequence of path name parts "under" the egg's extraction location. This method should only be called by resource providers that need to obtain an extraction location, and only for names they intend to extract, as it tracks the generated names for possible cleanup later. s-tmpi( RjRRRRkt_bypass_ensure_directoryRst_warn_unsafe_extraction_pathRg(Rt archive_nametnamest extract_patht target_path((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytget_cache_paths   cC@swtjdkr*|jtjd r*dStj|j}|tj@sV|tj@rsd|}tj |t ndS(sN If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. tnttwindirNs%s is writable by group/others and vulnerable to attack when used with get_resource_filename. Consider a more secure location (set with .set_extraction_path or the PYTHON_EGG_CACHE environment variable).( RRR4tenvirontstattst_modetS_IWOTHtS_IWGRPR>R?t UserWarning(Rtmodetmsg((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRus &cC@s@tjdkr<tj|jdBd@}tj||ndS(s4Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don't have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are already in the filesystem. `tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine returns. tposiximiN(RRR~Rtchmod(RttempnametfilenameR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt postprocessscC@s%|jrtdn||_dS(sSet the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that routine's documentation for more details.) Resources are extracted to subdirectories of this path based upon information given by the ``IResourceProvider``. You may set this to a temporary directory, but then you must call ``cleanup_resources()`` to delete the extracted files when done. There is no guarantee that ``cleanup_resources()`` will be able to remove all extracted files. (Note: you may not change the extraction path for a given resource manager once resources have been extracted, unless you first call ``cleanup_resources()``.) s5Can't change extraction path, files already extractedN(RgRnRj(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR)s  cC@sdS(sB Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. This function does not have any concurrency protection, so it should generally only be called when the extraction path is a temporary directory exclusive to a single process. This method is not automatically called; you must call it explicitly or register it as an ``atexit`` function if you wish to ensure cleanup of a temporary directory used for extractions. N((Rtforce((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRCRdN((RRRRaRjRR~RR|R{RzR}RsRzt staticmethodRuRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs           cC@s"tjjdp!tjddS(s Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". tPYTHON_EGG_CACHEtappnames Python-Eggs(RR}R=R tuser_cache_dir(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRQscC@stjdd|S(sConvert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. s[^A-Za-z0-9.]+R%(R:tsub(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR]scC@sZyttjj|SWn9tjjk rU|jdd}tjdd|SXdS(sB Convert an arbitrary string to a standard version string RR)s[^A-Za-z0-9.]+R%N(RARREtVersionRFR0R:R(RE((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRes cC@stjdd|jS(sConvert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. s[^A-Za-z0-9.-]+R(R:RR3(RM((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRqscC@s|jddS(s|Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. R%R(R0(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRzscC@s;yt|Wn&tk r6}d|_d|_|SXtS(so Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. N(Rt SyntaxErrorRaRtlinenoR(ttextte((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs  cC@sLy tjj|}|jSWn%tjjk rG}t|nXdS(s Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. N(RtmarkerstMarkerROt InvalidMarkerR(RRMRNR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cB@seZdZdZdZdZdZdZdZ dZ dZ dZ dZ dZd Zd Zd Zd Zd ZdZdZdZdZdZRS(sETry to implement resources and metadata for arbitrary PEP 302 loaderscC@s:t|dd|_tjjt|dd|_dS(NRt__file__Rd(RRaRRRtdirnamet module_path(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|S(N(t_fnR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@stj|j||S(N(tiotBytesIOR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_getRR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_hasRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s%|jo$|j|j|j|S(N(tegg_infoRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@sE|js dS|j|j|j|}tjrA|jdS|S(NRdsutf-8(RRRRtPY3tdecode(RRtvalue((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@st|j|S(N(RR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_isdirRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s%|jo$|j|j|j|S(N(RRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@s|j|j|j|S(N(t_listdirRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR}scC@s)|jr%|j|j|j|SgS(N(RRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs c B@sd|}|j|s,ed|n|j|jdd}|jdd}|j|j|}||dsN( RRRRRRRRaRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR6s    t ZipManifestscB@s#eZdZedZeZRS(s zip manifest builder c@s?t|-fdjD}t|SWdQXdS(s Build a dictionary similar to the zipimport directory caches, except instead of tuples, store ZipInfo objects. Use a platform-specific path separator (os.sep) for the path keys for compatibility with pypy on Windows. c3@s3|])}|jdtjj|fVqdS(RN(R0Rtseptgetinfo(RPR(tzfile(sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pys UsN(tContextualZipFiletnamelistRL(RRRR((RsF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytbuildJs  (RRRRLRtload(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyREstMemoizedZipManifestscB@s)eZdZejddZdZRS(s% Memoized zipfile manifests. t manifest_modsmanifest mtimecC@svtjj|}tj|j}||ksC||j|krk|j|}|j||||"os.rename" and "os.unlink" are not supported on this platforms .$extracttdirR{(t_indexRRRRkRRRt WRITE_SUPPORTtIOErrorRzRRt _is_currentt_mkstemptwriteRRRRRRterrortisfileRR Rs( RRRRtlastRRt real_pathtoutfttmpnam((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs@$    c C@s|j|j|\}}tjj|s2tStj|}|j|ks_|j|krctS|j j |}t |d}|j }WdQX||kS(sK Return True if the file_path is current for this zip_path RN( RRRRRRR~tst_sizeRRRR R( Rt file_pathRRRR~t zip_contentstft file_contents((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscC@sa|jdkrZg}x6dD].}|j|r|j|j|qqW||_n|jS(Nsnative_libs.txtseager_resources.txt(snative_libs.txtseager_resources.txt(RRaRR/R(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs  cC@sy |jSWntk ri}x~|jD]s}|jtj}xX|rtjj|d }||kr||j|dPqF|jg||R?(RR treplacement_charRqR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRLs  cC@st|j|S(N(RR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRTs(RRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR1s     cB@seZdZdZRS(ssMetadata provider for egg directories Usage:: # Development eggs: egg_info = "/path/to/PackageName.egg-info" base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: egg_path = "/path/to/PackageName-ver-pyver-etc.egg" metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) dist = Distribution.from_filename(egg_path, metadata=metadata) cC@s||_||_dS(N(RR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRls (RRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRXscB@seZdZdZRS(s Metadata provider for .egg filescC@s`|jtj|_||_|jrFtjj|j|j|_n |j|_|j dS(s-Create a metadata provider from a zipimporterN( RRRRRtprefixRRkRR(Rtimporter((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRts   ! (RRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRqsRLt_distribution_finderscC@s|t|>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] cS@sYtjj|\}}tj|jd|g}g|D]}tjj|^q=S(s6 Parse each component of the filename R%( RRtsplitextt itertoolstchainR RRER(RtextR7R.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt _by_versionsR R\(tsortedR(RwR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_by_version_descendings cc@s9t|}tjj|r5tj|tjr5t|rntj|dt |tjj |dVq5t tj |}x|D]}|j }|jds|jdr=tjj ||}tjj|rttj |dkrqnt ||}n t|}tj|||dtVq| rt|rttjj ||}x|D] } | VqrWq| r|jdrttjj ||} | j} WdQXxa| D]V} | jsqntjj || j} t| }x|D] }|VqWPqWqqWndS( s6Yield distributions accessible on a sys.path directoryR sEGG-INFOs .egg-infos .dist-infoit precedences .egg-linkN(t_normalize_cachedRRR taccesstR_OKRRRRRkR!RR3RRRt from_locationRRR t readlineststriptrstrip(R RRtpath_item_entriesR R3tfullpathR RRt entry_filet entry_linestlineRR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt find_on_pathsB '             t FileFindert_namespace_handlerst_namespace_packagescC@s|t|[^-]+) ( -(?P[^-]+) ( -py(?P[^-]+) ( -(?P.+) )? )? )? cB@seZdZd d ddZdZdZedZdZ dddZ e j dZ eddZed Zedd Zedd ZRS(s3Object representing an advertised importable objectcC@snt|std|n||_||_t||_tjddj|j |_ ||_ dS(NsInvalid module namesx[%s]t,( tMODULERnRt module_nameRtattrsRRRkR.R(RRR[R\R.R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   "cC@sfd|j|jf}|jr<|ddj|j7}n|jrb|ddj|j7}n|S(Ns%s = %st:R)s [%s]RY(RR[R\RkR.(RR-((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs   cC@sdt|S(NsEntryPoint.parse(%r)(RA(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRscO@sO| s|s|r,tjdtddn|rE|j||n|jS(sH Require packages for this EntryPoint, then resolve it. sJParameters to load are deprecated. Call .resolve and .require separately.R9i(R>R?tDeprecationWarningRrR(RRrRbR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@sdt|jddgdd}ytjt|j|SWn%tk r_}tt|nXdS(sD Resolve the entry point from its module and attrs. tfromlistRtleveliN( RR[t functoolstreduceRR\RRRA(RRtexc((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRs cC@si|jr%|j r%td|n|jj|j}tj|||}tttj|dS(Ns&Can't require() without a distribution( R.RRRRRR'RR(RR2R3RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRr s s]\s*(?P.+?)\s*=\s*(?P[\w.]+)\s*(:\s*(?P[\w.]+))?\s*(?P\[.*\])?\s*$cC@s|jj|}|s0d}t||n|j}|j|d}|drl|djdnd}||d|d|||S(sParse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional s9EntryPoint must be in 'name=module:attrs [extras]' formatR.tattrR)RR((tpatternRhRnt groupdictt _parse_extrasR (RtsrcRRpRtresR.R\((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  #cC@s9|s dStjd|}|jr2tn|jS(Ntx((RRtspecsRnR.(Rt extras_specR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRg$ s   cC@st|std|ni}xZt|D]L}|j||}|j|krptd||jn|||j '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. s R(RREt LegacyVersionRRRkRlR(R0R>R?RtvarsR(RtLVt is_legacyRq((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_warn_legacy_version s  cC@sgy |jSWnUtk rbt|j|j}|dkr^d}t||j|n|SXdS(Ns(Missing 'Version:' header and/or %s file(R~RR|t _get_metadatatPKG_INFORaRn(RRERq((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRE s   cC@sy |jSWntk rigd6}|_xdD]}xt|j|D]\}}|rd|kr|jdd\}}t|rg}qt|sg}qnt|pd}n|j |gj t |qRWq6W|SXdS(Ns requires.txts depends.txtR]i(s requires.txts depends.txt( t_Distribution__dep_mapRRaRRR RRRRR/R(RtdmRRMRRN((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_dep_map s    "     'cC@s|j}g}|j|jddxS|D]K}y|j|t|Wq/tk rytd||fq/Xq/W|S(s@List of Requirements needed for this distro if `extras` are useds%s has no such extra feature %rN((RR/R=RaRRR(RR.RtdepsR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s   cc@s5|j|r1x|j|D] }|VqWndS(N(RR(RRR.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR scC@s|dkrtj}n|j|d||tjkrt|jx6|jdD]"}|tjkrWt|qWqWWndS(s>Ensure distribution is importable on `path` (default=sys.path)R0snamespace_packages.txtN( RaRiRR!RR"RRR(RRR0tpkg((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytactivate s   cC@sOdt|jt|j|jp'tf}|jrK|d|j7}n|S(s@Return what this distribution's standard .egg filename should bes %s-%s-py%sR%(RR0RERWRfRj(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  cC@s(|jrd||jfSt|SdS(Ns%s (%s)(R"RA(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s cC@sMyt|dd}Wntk r/d}nX|p9d}d|j|fS(NREs[unknown version]s%s %s(RRaRnR0(RRE((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR% s    cC@s.|jdrt|nt|j|S(sADelegate all unrecognized public attributes to .metadata providerR(R4RRR(RRd((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt __getattr__- scK@s(|jt|tjj|||S(N(R&R#RRR(RRR RO((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR3 scC@sTt|jtjjr1d|j|jf}nd|j|jf}tj|S(s?Return a ``Requirement`` that matches this distribution exactlys%s==%ss%s===%s(RRRRERR0RR(Rtspec((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR=: scC@sD|j||}|dkr:td||ffn|jS(s=Return the `name` entry point of `group` or raise ImportErrorsEntry point %r not foundN(RxRaRR(RRmRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRvC s cC@scy |j}Wn3tk rBtj|jd|}|_nX|dk r_|j|iS|S(s=Return the entry point map for `group`, or the full entry mapsentry_points.txtN(t_ep_mapRRRrRRaR=(RRmtep_map((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRwJ s    cC@s|j|j|S(s<Return the EntryPoint object for `group`+`name`, or ``None``(RwR=(RRmR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRxV sc C@s|p |j}|sdSt|}tjj|}g|D]}|rVt|pY|^q>}xt|D]\}}||kr|rPqdSqo||kro|jtkro| r|||krdS|tjkr|j n|j |||j ||PqoqoW|tjkr.|j n|rG|j d|n |j |dSxMt ry|j ||d} Wntk rPq[X|| =|| =| }q[WdS(sEnsure self.location is on path If replace=False (default): - If location is already in path anywhere, do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent. - Else: add to the end of path. If replace=True: - If location is already on path anywhere (not eggs) or higher priority than its parent (eggs) do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent, removing any lower-priority entries. - Else: add it to the front of path. Nii(R"R#RRRt enumerateR"RRitcheck_version_conflictR$R6RR?Rn( RRtlocR0tnloctbdirRItnpathR tnp((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR!Z sB +       cC@s|jdkrdStj|jd}t|j}x|jdD]}|tjksJ||ksJ|tkr}qJn|dkrqJnt tj|dd}|rt|j |sJ|j |jrqJnt d|||jfqJWdS( Nt setuptoolssnamespace_packages.txts top_level.txtt pkg_resourcestsiteRsIModule %s was already imported from %s, but %s is being added to sys.path(RRR( R RLRMRRR"RiRR2RRaR4t issue_warning(RtnspRtmodnametfn((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s"  cC@s8y |jWn&tk r3tdt|tSXtS(NsUnbuilt egg for (RERnRRRR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR] s   cK@sYd}x0|jD]"}|j|t||dqW|jd|j|j|S(s@Copy this distribution, substituting in any changed keyword argss<project_name version py_version platform location precedenceR N(R RRRaRR(RRORwRd((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pytclone s  cC@s g|jD]}|r |^q S(N(R(Rtdep((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR. sN((+RRRRRaRfRRRLR&RRR[RRRRRRRR RRRERRRRRRRRRRR=RvRwRxR!RR]RR.(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRc sN                    C   tEggInfoDistributioncB@seZdZRS(cC@s.t|j|j}|r*||_n|S(s Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. (R|RRR~(Rt md_version((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  (RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR stDistInfoDistributioncB@sJeZdZdZejdZedZedZ dZ RS(sGWrap an actual or potential sys.path entry w/metadata, .dist-info styletMETADATAs([\(,])\s*(\d.*?)\s*([,\)])cC@sTy |jSWnBtk rO|j|j}tjjj||_|jSXdS(sParse and cache metadataN(t _pkg_infoRRRtemailtparsertParsertparsestr(RR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_parsed_pkg_info s   cC@s6y |jSWn$tk r1|j|_|jSXdS(N(t_DistInfoDistribution__dep_mapRt_compute_dependencies(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s   c@sigd6}|_gx3|jjdp2gD]}jt|q3Wfd}t|d}|dj|xR|jjdpgD]8}t|j}t t|||||R?(RbROR`RT((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  tRequirementParseErrorcB@seZdZRS(cC@sdj|jS(NR(RkRb(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s(RRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR scc@stt|}xm|D]e}d|krA||jd }n|jdrs|d j}|t|7}nt|VqWdS(sYield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. s #s\iN(RxRRRR(RwR(RWRmR.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR# s  cB@sMeZdZdZdZdZdZdZedZ RS(cC@sytt|j|Wn+tjjk rG}tt|nX|j|_ t |j}||j |_ |_ g|jD]}|j|jf^q|_ttt|j|_|j |jt|j|jrt|jndf|_t|j|_dS(s>DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!N(RRRRR1tInvalidRequirementRRARt unsafe_nameRR3R0R t specifierR^RERkRRRR.RRNRathashCmpRt_Requirement__hash(Rtrequirement_stringRR0R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR7 s + $cC@st|to|j|jkS(N(RRR(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRK scC@s ||k S(N((RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRQ scC@sGt|tr1|j|jkr%tS|j}n|jj|dtS(Nt prereleases(RRR RRERtcontainsR(RR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRT s  cC@s|jS(N(R(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR` scC@sdt|S(NsRequirement.parse(%r)(RA(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRc RdcC@st|\}|S(N(R(R-R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRe s( RRRRRRRRRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR6 s     cC@s:t|ts3d|tfdY}|jdS|jS(s&Get an mro for a type or classic classRcB@seZRS((RR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRo si(RRtobjectt__mro__(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt_get_mrok s cC@sAx:tt|dt|D]}||kr||SqWdS(s2Return an adapter factory for `ob` from `registry`RN(RRR(tregistryR]tt((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRv s% cC@s8tjj|}tjj|s4tj|ndS(s1Ensure that the parent directory of `path` existsN(RRRR tmakedirs(RR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR} scC@s^tstdnt|\}}|rZ|rZt| rZt|t|dndS(s/Sandbox-bypassing version of ensure_directory()s*"os.mkdir" not supported on this platform.iN(RRR R RtR(RRR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyRt s  cc@sd}g}xt|D]y}|jdr|jdrs|sI|rW||fVn|dd!j}g}qtd|q|j|qW||fVdS(ssSplit a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. t[t]iisInvalid section headingN(RaRR4RR(RnR6(R-tsectiontcontentR.((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s  cO@s7tj}ztt_tj||SWd|t_XdS(N(RR tos_openttempfiletmkstemp(RbROtold_open((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyR s   tignoretcategoryR6cO@s||||S(N((RRbR((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyt _call_aside s cC@sSt}||dR~RatpkgutilR^RjR)Rt email.parserRRRkRRRJRtimpt pip._vendorRtpip._vendor.six.movesRRRRRRR RRRR Rtos.pathR R timportlib.machineryt machineryRRRaR RRt version_infoRR?RrRR@RRRRERRCRRDRHRKRPRVRXR[R^R_R`t _sget_nonet _sset_noneRqt__all__t ExceptionRRRRRRRfRRRRRRRtRlRRfR;RgRRRRsRRuRvRwRxRRRRLR(RRt RuntimeErrorRRRRRRRRRRRRRRRRRRRRRRRRRRRRR!R/t ImpImporterRR0RR=R:RRRORPRR#RR8RRhRZR<t IGNORECASERRRvR|RRRRRRnRRR1RRRRRtRRtfilterwarningsRRIRR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyts                               +b                                  .    z    a   '      .    !        ~ f/   5      PKZ-@&>>3site-packages/pip/_vendor/pkg_resources/__init__.pynu[# coding: utf-8 """ Package resource API -------------------- A resource is a logical file contained within a package, or a logical subdirectory thereof. The package resource API expects resource names to have their path parts separated with ``/``, *not* whatever the local path separator is. Do not use os.path operations to manipulate resource names being passed into the API. The package resource API is designed to work with normal filesystem packages, .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. """ from __future__ import absolute_import import sys import os import io import time import re import types import zipfile import zipimport import warnings import stat import functools import pkgutil import operator import platform import collections import plistlib import email.parser import tempfile import textwrap import itertools from pkgutil import get_importer try: import _imp except ImportError: # Python 3.2 compatibility import imp as _imp from pip._vendor import six from pip._vendor.six.moves import urllib, map, filter # capture these to bypass sandboxing from os import utime try: from os import mkdir, rename, unlink WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE WRITE_SUPPORT = False from os import open as os_open from os.path import isdir, split try: import importlib.machinery as importlib_machinery # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: importlib_machinery = None from pip._vendor import appdirs from pip._vendor import packaging __import__('pip._vendor.packaging.version') __import__('pip._vendor.packaging.specifiers') __import__('pip._vendor.packaging.requirements') __import__('pip._vendor.packaging.markers') if (3, 0) < sys.version_info < (3, 3): msg = ( "Support for Python 3.0-3.2 has been dropped. Future versions " "will fail here." ) warnings.warn(msg) # declare some globals that will be defined later to # satisfy the linters. require = None working_set = None class PEP440Warning(RuntimeWarning): """ Used when there is an issue with a version or specifier not complying with PEP 440. """ class _SetuptoolsVersionMixin(object): def __hash__(self): return super(_SetuptoolsVersionMixin, self).__hash__() def __lt__(self, other): if isinstance(other, tuple): return tuple(self) < other else: return super(_SetuptoolsVersionMixin, self).__lt__(other) def __le__(self, other): if isinstance(other, tuple): return tuple(self) <= other else: return super(_SetuptoolsVersionMixin, self).__le__(other) def __eq__(self, other): if isinstance(other, tuple): return tuple(self) == other else: return super(_SetuptoolsVersionMixin, self).__eq__(other) def __ge__(self, other): if isinstance(other, tuple): return tuple(self) >= other else: return super(_SetuptoolsVersionMixin, self).__ge__(other) def __gt__(self, other): if isinstance(other, tuple): return tuple(self) > other else: return super(_SetuptoolsVersionMixin, self).__gt__(other) def __ne__(self, other): if isinstance(other, tuple): return tuple(self) != other else: return super(_SetuptoolsVersionMixin, self).__ne__(other) def __getitem__(self, key): return tuple(self)[key] def __iter__(self): component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) replace = { 'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@', }.get def _parse_version_parts(s): for part in component_re.split(s): part = replace(part, part) if not part or part == '.': continue if part[:1] in '0123456789': # pad for numeric comparison yield part.zfill(8) else: yield '*' + part # ensure that alpha/beta/candidate are before final yield '*final' def old_parse_version(s): parts = [] for part in _parse_version_parts(s.lower()): if part.startswith('*'): # remove '-' before a prerelease tag if part < '*final': while parts and parts[-1] == '*final-': parts.pop() # remove trailing zeros from each series of numeric parts while parts and parts[-1] == '00000000': parts.pop() parts.append(part) return tuple(parts) # Warn for use of this function warnings.warn( "You have iterated over the result of " "pkg_resources.parse_version. This is a legacy behavior which is " "inconsistent with the new version class introduced in setuptools " "8.0. In most cases, conversion to a tuple is unnecessary. For " "comparison of versions, sort the Version instances directly. If " "you have another use case requiring the tuple, please file a " "bug with the setuptools project describing that need.", RuntimeWarning, stacklevel=1, ) for part in old_parse_version(str(self)): yield part class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): pass class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, packaging.version.LegacyVersion): pass def parse_version(v): try: return SetuptoolsVersion(v) except packaging.version.InvalidVersion: return SetuptoolsLegacyVersion(v) _state_vars = {} def _declare_state(vartype, **kw): globals().update(kw) _state_vars.update(dict.fromkeys(kw, vartype)) def __getstate__(): state = {} g = globals() for k, v in _state_vars.items(): state[k] = g['_sget_' + v](g[k]) return state def __setstate__(state): g = globals() for k, v in state.items(): g['_sset_' + _state_vars[k]](k, g[k], v) return state def _sget_dict(val): return val.copy() def _sset_dict(key, ob, state): ob.clear() ob.update(state) def _sget_object(val): return val.__getstate__() def _sset_object(key, ob, state): ob.__setstate__(state) _sget_none = _sset_none = lambda *args: None def get_supported_platform(): """Return this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version of Mac OS X that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the version of Mac OS X that we are *running*. To allow usage of packages that explicitly require a newer version of Mac OS X, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its platform strings, this function should be extended accordingly. """ plat = get_build_platform() m = macosVersionString.match(plat) if m is not None and sys.platform == "darwin": try: plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) except ValueError: # not Mac OS X pass return plat __all__ = [ # Basic resource access and distribution/entry point discovery 'require', 'run_script', 'get_provider', 'get_distribution', 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', 'resource_string', 'resource_stream', 'resource_filename', 'resource_listdir', 'resource_exists', 'resource_isdir', # Environmental control 'declare_namespace', 'working_set', 'add_activation_listener', 'find_distributions', 'set_extraction_path', 'cleanup_resources', 'get_default_cache', # Primary implementation classes 'Environment', 'WorkingSet', 'ResourceManager', 'Distribution', 'Requirement', 'EntryPoint', # Exceptions 'ResolutionError', 'VersionConflict', 'DistributionNotFound', 'UnknownExtra', 'ExtractionError', # Warnings 'PEP440Warning', # Parsing functions and string utilities 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', # filesystem utilities 'ensure_directory', 'normalize_path', # Distribution "precedence" constants 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', # "Provider" interfaces, implementations, and registration/lookup APIs 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', 'register_finder', 'register_namespace_handler', 'register_loader_type', 'fixup_namespace_packages', 'get_importer', # Deprecated/backward compatibility only 'run_main', 'AvailableDistributions', ] class ResolutionError(Exception): """Abstract base for dependency resolution errors""" def __repr__(self): return self.__class__.__name__ + repr(self.args) class VersionConflict(ResolutionError): """ An already-installed version conflicts with the requested version. Should be initialized with the installed Distribution and the requested Requirement. """ _template = "{self.dist} is installed but {self.req} is required" @property def dist(self): return self.args[0] @property def req(self): return self.args[1] def report(self): return self._template.format(**locals()) def with_context(self, required_by): """ If required_by is non-empty, return a version of self that is a ContextualVersionConflict. """ if not required_by: return self args = self.args + (required_by,) return ContextualVersionConflict(*args) class ContextualVersionConflict(VersionConflict): """ A VersionConflict that accepts a third parameter, the set of the requirements that required the installed Distribution. """ _template = VersionConflict._template + ' by {self.required_by}' @property def required_by(self): return self.args[2] class DistributionNotFound(ResolutionError): """A requested distribution was not found""" _template = ("The '{self.req}' distribution was not found " "and is required by {self.requirers_str}") @property def req(self): return self.args[0] @property def requirers(self): return self.args[1] @property def requirers_str(self): if not self.requirers: return 'the application' return ', '.join(self.requirers) def report(self): return self._template.format(**locals()) def __str__(self): return self.report() class UnknownExtra(ResolutionError): """Distribution doesn't have an "extra feature" of the given name""" _provider_factories = {} PY_MAJOR = sys.version[:3] EGG_DIST = 3 BINARY_DIST = 2 SOURCE_DIST = 1 CHECKOUT_DIST = 0 DEVELOP_DIST = -1 def register_loader_type(loader_type, provider_factory): """Register `provider_factory` to make providers for `loader_type` `loader_type` is the type or class of a PEP 302 ``module.__loader__``, and `provider_factory` is a function that, passed a *module* object, returns an ``IResourceProvider`` for that module. """ _provider_factories[loader_type] = provider_factory def get_provider(moduleOrReq): """Return an IResourceProvider for the named module or requirement""" if isinstance(moduleOrReq, Requirement): return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] try: module = sys.modules[moduleOrReq] except KeyError: __import__(moduleOrReq) module = sys.modules[moduleOrReq] loader = getattr(module, '__loader__', None) return _find_adapter(_provider_factories, loader)(module) def _macosx_vers(_cache=[]): if not _cache: version = platform.mac_ver()[0] # fallback for MacPorts if version == '': plist = '/System/Library/CoreServices/SystemVersion.plist' if os.path.exists(plist): if hasattr(plistlib, 'readPlist'): plist_content = plistlib.readPlist(plist) if 'ProductVersion' in plist_content: version = plist_content['ProductVersion'] _cache.append(version.split('.')) return _cache[0] def _macosx_arch(machine): return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) def get_build_platform(): """Return this platform's string for platform-specific distributions XXX Currently this is the same as ``distutils.util.get_platform()``, but it needs some hacks for Linux and Mac OS X. """ try: # Python 2.7 or >=3.2 from sysconfig import get_platform except ImportError: from distutils.util import get_platform plat = get_platform() if sys.platform == "darwin" and not plat.startswith('macosx-'): try: version = _macosx_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), _macosx_arch(machine)) except ValueError: # if someone is running a non-Mac darwin system, this will fall # through to the default implementation pass return plat macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") # XXX backward compat get_platform = get_build_platform def compatible_platforms(provided, required): """Can code for the `provided` platform run on the `required` platform? Returns true if either platform is ``None``, or the platforms are equal. XXX Needs compatibility checks for Linux and other unixy OSes. """ if provided is None or required is None or provided == required: # easy case return True # Mac OS X special cases reqMac = macosVersionString.match(required) if reqMac: provMac = macosVersionString.match(provided) # is this a Mac package? if not provMac: # this is backwards compatibility for packages built before # setuptools 0.6. All packages built after this point will # use the new macosx designation. provDarwin = darwinVersionString.match(provided) if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) if dversion == 7 and macosversion >= "10.3" or \ dversion == 8 and macosversion >= "10.4": return True # egg isn't macosx or legacy darwin return False # are they the same major version and machine type? if provMac.group(1) != reqMac.group(1) or \ provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? if int(provMac.group(2)) > int(reqMac.group(2)): return False return True # XXX Linux and other platforms' special cases should go here return False def run_script(dist_spec, script_name): """Locate distribution `dist_spec` and run its `script_name` script""" ns = sys._getframe(1).f_globals name = ns['__name__'] ns.clear() ns['__name__'] = name require(dist_spec)[0].run_script(script_name, ns) # backward compatibility run_main = run_script def get_distribution(dist): """Return a current distribution object for a Requirement or string""" if isinstance(dist, six.string_types): dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) if not isinstance(dist, Distribution): raise TypeError("Expected string, Requirement, or Distribution", dist) return dist def load_entry_point(dist, group, name): """Return `name` entry point of `group` for `dist` or raise ImportError""" return get_distribution(dist).load_entry_point(group, name) def get_entry_map(dist, group=None): """Return the entry point map for `group`, or the full entry map""" return get_distribution(dist).get_entry_map(group) def get_entry_info(dist, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return get_distribution(dist).get_entry_info(group, name) class IMetadataProvider: def has_metadata(name): """Does the package's distribution contain the named metadata?""" def get_metadata(name): """The named metadata resource as a string""" def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines Leading and trailing whitespace is stripped from each line, and lines with ``#`` as the first non-blank character are omitted.""" def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" def metadata_listdir(name): """List of metadata names in the directory (like ``os.listdir()``)""" def run_script(script_name, namespace): """Execute the named script in the supplied namespace dictionary""" class IResourceProvider(IMetadataProvider): """An object that provides access to package resources""" def get_resource_filename(manager, resource_name): """Return a true filesystem path for `resource_name` `manager` must be an ``IResourceManager``""" def get_resource_stream(manager, resource_name): """Return a readable file-like object for `resource_name` `manager` must be an ``IResourceManager``""" def get_resource_string(manager, resource_name): """Return a string containing the contents of `resource_name` `manager` must be an ``IResourceManager``""" def has_resource(resource_name): """Does the package contain the named resource?""" def resource_isdir(resource_name): """Is the named resource a directory? (like ``os.path.isdir()``)""" def resource_listdir(resource_name): """List of resource names in the directory (like ``os.listdir()``)""" class WorkingSet(object): """A collection of active distributions on sys.path (or a similar list)""" def __init__(self, entries=None): """Create working set from list of path entries (default=sys.path)""" self.entries = [] self.entry_keys = {} self.by_key = {} self.callbacks = [] if entries is None: entries = sys.path for entry in entries: self.add_entry(entry) @classmethod def _build_master(cls): """ Prepare the master working set. """ ws = cls() try: from __main__ import __requires__ except ImportError: # The main program does not list any requirements return ws # ensure the requirements are met try: ws.require(__requires__) except VersionConflict: return cls._build_from_requirements(__requires__) return ws @classmethod def _build_from_requirements(cls, req_spec): """ Build a working set from a requirement spec. Rewrites sys.path. """ # try it without defaults already on sys.path # by starting with an empty path ws = cls([]) reqs = parse_requirements(req_spec) dists = ws.resolve(reqs, Environment()) for dist in dists: ws.add(dist) # add any missing entries from sys.path for entry in sys.path: if entry not in ws.entries: ws.add_entry(entry) # then copy back to sys.path sys.path[:] = ws.entries return ws def add_entry(self, entry): """Add a path item to ``.entries``, finding any distributions on it ``find_distributions(entry, True)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value more than once, and the ``.entries`` of the ``sys.path`` WorkingSet should always equal ``sys.path``.) """ self.entry_keys.setdefault(entry, []) self.entries.append(entry) for dist in find_distributions(entry, True): self.add(dist, entry, False) def __contains__(self, dist): """True if `dist` is the active distribution for its project""" return self.by_key.get(dist.key) == dist def find(self, req): """Find a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is raised. If there is no active distribution for the requested project, ``None`` is returned. """ dist = self.by_key.get(req.key) if dist is not None and dist not in req: # XXX add more info raise VersionConflict(dist, req) return dist def iter_entry_points(self, group, name=None): """Yield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order). """ for dist in self: entries = dist.get_entry_map(group) if name is None: for ep in entries.values(): yield ep elif name in entries: yield entries[name] def run_script(self, requires, script_name): """Locate distribution for `requires` and run `script_name` script""" ns = sys._getframe(1).f_globals name = ns['__name__'] ns.clear() ns['__name__'] = name self.require(requires)[0].run_script(script_name, ns) def __iter__(self): """Yield distributions for non-duplicate projects in the working set The yield order is the order in which the items' path entries were added to the working set. """ seen = {} for item in self.entries: if item not in self.entry_keys: # workaround a cache issue continue for key in self.entry_keys[item]: if key not in seen: seen[key] = 1 yield self.by_key[key] def add(self, dist, entry=None, insert=True, replace=False): """Add `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set's ``.entries`` (if it wasn't already present). `dist` is only added to the working set if it's for a project that doesn't already have a distribution in the set, unless `replace=True`. If it's added, any callbacks registered with the ``subscribe()`` method will be called. """ if insert: dist.insert_on(self.entries, entry, replace=replace) if entry is None: entry = dist.location keys = self.entry_keys.setdefault(entry, []) keys2 = self.entry_keys.setdefault(dist.location, []) if not replace and dist.key in self.by_key: # ignore hidden distros return self.by_key[dist.key] = dist if dist.key not in keys: keys.append(dist.key) if dist.key not in keys2: keys2.append(dist.key) self._added_new(dist) def resolve(self, requirements, env=None, installer=None, replace_conflicting=False): """List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, if supplied, should be an ``Environment`` instance. If not supplied, it defaults to all distributions available within any entry or distribution in the working set. `installer`, if supplied, will be invoked with each requirement that cannot be met by an already-installed distribution; it should return a ``Distribution`` or ``None``. Unless `replace_conflicting=True`, raises a VersionConflict exception if any requirements are found on the path that have the correct name but the wrong version. Otherwise, if an `installer` is supplied it will be invoked to obtain the correct version of the requirement and activate it. """ # set up the stack requirements = list(requirements)[::-1] # set of processed requirements processed = {} # key -> dist best = {} to_activate = [] req_extras = _ReqExtras() # Mapping of requirement to set of distributions that required it; # useful for reporting info about conflicts. required_by = collections.defaultdict(set) while requirements: # process dependencies breadth-first req = requirements.pop(0) if req in processed: # Ignore cyclic or redundant dependencies continue if not req_extras.markers_pass(req): continue dist = best.get(req.key) if dist is None: # Find the best distribution and add it to the map dist = self.by_key.get(req.key) if dist is None or (dist not in req and replace_conflicting): ws = self if env is None: if dist is None: env = Environment(self.entries) else: # Use an empty environment and workingset to avoid # any further conflicts with the conflicting # distribution env = Environment([]) ws = WorkingSet([]) dist = best[req.key] = env.best_match(req, ws, installer) if dist is None: requirers = required_by.get(req, None) raise DistributionNotFound(req, requirers) to_activate.append(dist) if dist not in req: # Oops, the "best" so far conflicts with a dependency dependent_req = required_by[req] raise VersionConflict(dist, req).with_context(dependent_req) # push the new requirements onto the stack new_requirements = dist.requires(req.extras)[::-1] requirements.extend(new_requirements) # Register the new requirements needed by req for new_requirement in new_requirements: required_by[new_requirement].add(req.project_name) req_extras[new_requirement] = req.extras processed[req] = True # return list of distros to activate return to_activate def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` Example usage:: distributions, errors = working_set.find_plugins( Environment(plugin_dirlist) ) # add plugins+libs to sys.path map(working_set.add, distributions) # display errors print('Could not load', errors) The `plugin_env` should be an ``Environment`` instance that contains only distributions that are in the project's "plugin directory" or directories. The `full_env`, if supplied, should be an ``Environment`` contains all currently-available distributions. If `full_env` is not supplied, one is created automatically from the ``WorkingSet`` this method is called on, which will typically mean that every directory on ``sys.path`` will be scanned for distributions. `installer` is a standard installer callback as used by the ``resolve()`` method. The `fallback` flag indicates whether we should attempt to resolve older versions of a plugin if the newest version cannot be resolved. This method returns a 2-tuple: (`distributions`, `error_info`), where `distributions` is a list of the distributions found in `plugin_env` that were loadable, along with any other distributions that are needed to resolve their dependencies. `error_info` is a dictionary mapping unloadable plugin distributions to an exception instance describing the error that occurred. Usually this will be a ``DistributionNotFound`` or ``VersionConflict`` instance. """ plugin_projects = list(plugin_env) # scan project names in alphabetic order plugin_projects.sort() error_info = {} distributions = {} if full_env is None: env = Environment(self.entries) env += plugin_env else: env = full_env + plugin_env shadow_set = self.__class__([]) # put all our entries in shadow_set list(map(shadow_set.add, self)) for project_name in plugin_projects: for dist in plugin_env[project_name]: req = [dist.as_requirement()] try: resolvees = shadow_set.resolve(req, env, installer) except ResolutionError as v: # save error info error_info[dist] = v if fallback: # try the next older version of project continue else: # give up on this project, keep going break else: list(map(shadow_set.add, resolvees)) distributions.update(dict.fromkeys(resolvees)) # success, no need to try any more versions of this project break distributions = list(distributions) distributions.sort() return distributions, error_info def require(self, *requirements): """Ensure that distributions matching `requirements` are activated `requirements` must be a string or a (possibly-nested) sequence thereof, specifying the distributions and versions required. The return value is a sequence of the distributions that needed to be activated to fulfill the requirements; all relevant distributions are included, even if they were already activated in this working set. """ needed = self.resolve(parse_requirements(requirements)) for dist in needed: self.add(dist) return needed def subscribe(self, callback, existing=True): """Invoke `callback` for all distributions If `existing=True` (default), call on all existing ones, as well. """ if callback in self.callbacks: return self.callbacks.append(callback) if not existing: return for dist in self: callback(dist) def _added_new(self, dist): for callback in self.callbacks: callback(dist) def __getstate__(self): return ( self.entries[:], self.entry_keys.copy(), self.by_key.copy(), self.callbacks[:] ) def __setstate__(self, e_k_b_c): entries, keys, by_key, callbacks = e_k_b_c self.entries = entries[:] self.entry_keys = keys.copy() self.by_key = by_key.copy() self.callbacks = callbacks[:] class _ReqExtras(dict): """ Map each requirement to the extras that demanded it. """ def markers_pass(self, req): """ Evaluate markers for req against each extra that demanded it. Return False if the req has a marker and fails evaluation. Otherwise, return True. """ extra_evals = ( req.marker.evaluate({'extra': extra}) for extra in self.get(req, ()) + (None,) ) return not req.marker or any(extra_evals) class Environment(object): """Searchable snapshot of distributions on a search path""" def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distributions must be compatible with. If unspecified, it defaults to the current platform. `python` is an optional string naming the desired version of Python (e.g. ``'3.3'``); it defaults to the current version. You may explicitly set `platform` (and/or `python`) to ``None`` if you wish to map *all* distributions, not just those compatible with the running platform or Python version. """ self._distmap = {} self.platform = platform self.python = python self.scan(search_path) def can_add(self, dist): """Is distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned. """ return (self.python is None or dist.py_version is None or dist.py_version == self.python) \ and compatible_platforms(dist.platform, self.platform) def remove(self, dist): """Remove `dist` from the environment""" self._distmap[dist.key].remove(dist) def scan(self, search_path=None): """Scan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added. """ if search_path is None: search_path = sys.path for item in search_path: for dist in find_distributions(item): self.add(dist) def __getitem__(self, project_name): """Return a newest-to-oldest list of distributions for `project_name` Uses case-insensitive `project_name` comparison, assuming all the project's distributions use their project's name converted to all lowercase as their key. """ distribution_key = project_name.lower() return self._distmap.get(distribution_key, []) def add(self, dist): """Add `dist` if we ``can_add()`` it and it has not already been added """ if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) def best_match(self, req, working_set, installer=None): """Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable distribution isn't active, this method returns the newest distribution in the environment that meets the ``Requirement`` in `req`. If no suitable distribution is found, and `installer` is supplied, then the result of calling the environment's ``obtain(req, installer)`` method will be returned. """ dist = working_set.find(req) if dist is not None: return dist for dist in self[req.key]: if dist in req: return dist # try to download/install return self.obtain(req, installer) def obtain(self, requirement, installer=None): """Obtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows subclasses to attempt other ways of obtaining a distribution before falling back to the `installer` argument.""" if installer is not None: return installer(requirement) def __iter__(self): """Yield the unique project names of the available distributions""" for key in self._distmap.keys(): if self[key]: yield key def __iadd__(self, other): """In-place addition of a distribution or environment""" if isinstance(other, Distribution): self.add(other) elif isinstance(other, Environment): for project in other: for dist in other[project]: self.add(dist) else: raise TypeError("Can't add %r to environment" % (other,)) return self def __add__(self, other): """Add an environment or distribution to an environment""" new = self.__class__([], platform=None, python=None) for env in self, other: new += env return new # XXX backward compatibility AvailableDistributions = Environment class ExtractionError(RuntimeError): """An error occurred extracting a resource The following attributes are available from instances of this exception: manager The resource manager that raised this exception cache_path The base directory for resource extraction original_error The exception instance that caused extraction to fail """ class ResourceManager: """Manage resource extraction and packages""" extraction_path = None def __init__(self): self.cached_files = {} def resource_exists(self, package_or_requirement, resource_name): """Does the named resource exist?""" return get_provider(package_or_requirement).has_resource(resource_name) def resource_isdir(self, package_or_requirement, resource_name): """Is the named resource an existing directory?""" return get_provider(package_or_requirement).resource_isdir( resource_name ) def resource_filename(self, package_or_requirement, resource_name): """Return a true filesystem path for specified resource""" return get_provider(package_or_requirement).get_resource_filename( self, resource_name ) def resource_stream(self, package_or_requirement, resource_name): """Return a readable file-like object for specified resource""" return get_provider(package_or_requirement).get_resource_stream( self, resource_name ) def resource_string(self, package_or_requirement, resource_name): """Return specified resource as a string""" return get_provider(package_or_requirement).get_resource_string( self, resource_name ) def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" return get_provider(package_or_requirement).resource_listdir( resource_name ) def extraction_error(self): """Give an error message for problems extracting file(s)""" old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() tmpl = textwrap.dedent(""" Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) to the Python egg cache: {old_exc} The Python egg cache directory is currently set to: {cache_path} Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. """).lstrip() err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path err.original_error = old_exc raise err def get_cache_path(self, archive_name, names=()): """Return absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if provided, should be a sequence of path name parts "under" the egg's extraction location. This method should only be called by resource providers that need to obtain an extraction location, and only for names they intend to extract, as it tracks the generated names for possible cleanup later. """ extract_path = self.extraction_path or get_default_cache() target_path = os.path.join(extract_path, archive_name + '-tmp', *names) try: _bypass_ensure_directory(target_path) except: self.extraction_error() self._warn_unsafe_extraction_path(extract_path) self.cached_files[target_path] = 1 return target_path @staticmethod def _warn_unsafe_extraction_path(path): """ If the default extraction path is overridden and set to an insecure location, such as /tmp, it opens up an opportunity for an attacker to replace an extracted file with an unauthorized payload. Warn the user if a known insecure location is used. See Distribute #375 for more details. """ if os.name == 'nt' and not path.startswith(os.environ['windir']): # On Windows, permissions are generally restrictive by default # and temp directories are not writable by other users, so # bypass the warning. return mode = os.stat(path).st_mode if mode & stat.S_IWOTH or mode & stat.S_IWGRP: msg = ("%s is writable by group/others and vulnerable to attack " "when " "used with get_resource_filename. Consider a more secure " "location (set with .set_extraction_path or the " "PYTHON_EGG_CACHE environment variable)." % path) warnings.warn(msg, UserWarning) def postprocess(self, tempname, filename): """Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don't have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are already in the filesystem. `tempname` is the current (temporary) name of the file, and `filename` is the name it will be renamed to by the caller after this routine returns. """ if os.name == 'posix': # Make the resource executable mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 os.chmod(tempname, mode) def set_extraction_path(self, path): """Set the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that routine's documentation for more details.) Resources are extracted to subdirectories of this path based upon information given by the ``IResourceProvider``. You may set this to a temporary directory, but then you must call ``cleanup_resources()`` to delete the extracted files when done. There is no guarantee that ``cleanup_resources()`` will be able to remove all extracted files. (Note: you may not change the extraction path for a given resource manager once resources have been extracted, unless you first call ``cleanup_resources()``.) """ if self.cached_files: raise ValueError( "Can't change extraction path, files already extracted" ) self.extraction_path = path def cleanup_resources(self, force=False): """ Delete all extracted resource files and directories, returning a list of the file and directory names that could not be successfully removed. This function does not have any concurrency protection, so it should generally only be called when the extraction path is a temporary directory exclusive to a single process. This method is not automatically called; you must call it explicitly or register it as an ``atexit`` function if you wish to ensure cleanup of a temporary directory used for extractions. """ # XXX def get_default_cache(): """ Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". """ return ( os.environ.get('PYTHON_EGG_CACHE') or appdirs.user_cache_dir(appname='Python-Eggs') ) def safe_name(name): """Convert an arbitrary string to a standard distribution name Any runs of non-alphanumeric/. characters are replaced with a single '-'. """ return re.sub('[^A-Za-z0-9.]+', '-', name) def safe_version(version): """ Convert an arbitrary string to a standard version string """ try: # normalize the version return str(packaging.version.Version(version)) except packaging.version.InvalidVersion: version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version) def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name Any runs of non-alphanumeric characters are replaced with a single '_', and the result is always lowercased. """ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() def to_filename(name): """Convert a project or version name to its filename-escaped form Any '-' characters are currently replaced with '_'. """ return name.replace('-', '_') def invalid_marker(text): """ Validate text as a PEP 508 environment marker; return an exception if invalid or False otherwise. """ try: evaluate_marker(text) except SyntaxError as e: e.filename = None e.lineno = None return e return False def evaluate_marker(text, extra=None): """ Evaluate a PEP 508 environment marker. Return a boolean indicating the marker result in this environment. Raise SyntaxError if marker is invalid. This implementation uses the 'pyparsing' module. """ try: marker = packaging.markers.Marker(text) return marker.evaluate() except packaging.markers.InvalidMarker as e: raise SyntaxError(e) class NullProvider: """Try to implement resources and metadata for arbitrary PEP 302 loaders""" egg_name = None egg_info = None loader = None def __init__(self, module): self.loader = getattr(module, '__loader__', None) self.module_path = os.path.dirname(getattr(module, '__file__', '')) def get_resource_filename(self, manager, resource_name): return self._fn(self.module_path, resource_name) def get_resource_stream(self, manager, resource_name): return io.BytesIO(self.get_resource_string(manager, resource_name)) def get_resource_string(self, manager, resource_name): return self._get(self._fn(self.module_path, resource_name)) def has_resource(self, resource_name): return self._has(self._fn(self.module_path, resource_name)) def has_metadata(self, name): return self.egg_info and self._has(self._fn(self.egg_info, name)) def get_metadata(self, name): if not self.egg_info: return "" value = self._get(self._fn(self.egg_info, name)) return value.decode('utf-8') if six.PY3 else value def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) def resource_isdir(self, resource_name): return self._isdir(self._fn(self.module_path, resource_name)) def metadata_isdir(self, name): return self.egg_info and self._isdir(self._fn(self.egg_info, name)) def resource_listdir(self, resource_name): return self._listdir(self._fn(self.module_path, resource_name)) def metadata_listdir(self, name): if self.egg_info: return self._listdir(self._fn(self.egg_info, name)) return [] def run_script(self, script_name, namespace): script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError("No script named %r" % script_name) script_text = self.get_metadata(script).replace('\r\n', '\n') script_text = script_text.replace('\r', '\n') script_filename = self._fn(self.egg_info, script) namespace['__file__'] = script_filename if os.path.exists(script_filename): source = open(script_filename).read() code = compile(source, script_filename, 'exec') exec(code, namespace, namespace) else: from linecache import cache cache[script_filename] = ( len(script_text), 0, script_text.split('\n'), script_filename ) script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) def _has(self, path): raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) def _isdir(self, path): raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) def _listdir(self, path): raise NotImplementedError( "Can't perform this operation for unregistered loader type" ) def _fn(self, base, resource_name): if resource_name: return os.path.join(base, *resource_name.split('/')) return base def _get(self, path): if hasattr(self.loader, 'get_data'): return self.loader.get_data(path) raise NotImplementedError( "Can't perform this operation for loaders without 'get_data()'" ) register_loader_type(object, NullProvider) class EggProvider(NullProvider): """Provider based on a virtual filesystem""" def __init__(self, module): NullProvider.__init__(self, module) self._setup_prefix() def _setup_prefix(self): # we assume here that our metadata may be nested inside a "basket" # of multiple eggs; that's why we use module_path instead of .archive path = self.module_path old = None while path != old: if _is_unpacked_egg(path): self.egg_name = os.path.basename(path) self.egg_info = os.path.join(path, 'EGG-INFO') self.egg_root = path break old = path path, base = os.path.split(path) class DefaultProvider(EggProvider): """Provides access to package resources in the filesystem""" def _has(self, path): return os.path.exists(path) def _isdir(self, path): return os.path.isdir(path) def _listdir(self, path): return os.listdir(path) def get_resource_stream(self, manager, resource_name): return open(self._fn(self.module_path, resource_name), 'rb') def _get(self, path): with open(path, 'rb') as stream: return stream.read() @classmethod def _register(cls): loader_cls = getattr(importlib_machinery, 'SourceFileLoader', type(None)) register_loader_type(loader_cls, cls) DefaultProvider._register() class EmptyProvider(NullProvider): """Provider that returns nothing for all requests""" _isdir = _has = lambda self, path: False _get = lambda self, path: '' _listdir = lambda self, path: [] module_path = None def __init__(self): pass empty_provider = EmptyProvider() class ZipManifests(dict): """ zip manifest builder """ @classmethod def build(cls, path): """ Build a dictionary similar to the zipimport directory caches, except instead of tuples, store ZipInfo objects. Use a platform-specific path separator (os.sep) for the path keys for compatibility with pypy on Windows. """ with ContextualZipFile(path) as zfile: items = ( ( name.replace('/', os.sep), zfile.getinfo(name), ) for name in zfile.namelist() ) return dict(items) load = build class MemoizedZipManifests(ZipManifests): """ Memoized zipfile manifests. """ manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') def load(self, path): """ Load a manifest at path or return a suitable manifest already loaded. """ path = os.path.normpath(path) mtime = os.stat(path).st_mtime if path not in self or self[path].mtime != mtime: manifest = self.build(path) self[path] = self.manifest_mod(manifest, mtime) return self[path].manifest class ContextualZipFile(zipfile.ZipFile): """ Supplement ZipFile class to support context manager for Python 2.6 """ def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def __new__(cls, *args, **kwargs): """ Construct a ZipFile or ContextualZipFile as appropriate """ if hasattr(zipfile.ZipFile, '__exit__'): return zipfile.ZipFile(*args, **kwargs) return super(ContextualZipFile, cls).__new__(cls) class ZipProvider(EggProvider): """Resource support for zips and eggs""" eagers = None _zip_manifests = MemoizedZipManifests() def __init__(self, module): EggProvider.__init__(self, module) self.zip_pre = self.loader.archive + os.sep def _zipinfo_name(self, fspath): # Convert a virtual filename (full path to file) into a zipfile subpath # usable with the zipimport directory cache for our target archive if fspath.startswith(self.zip_pre): return fspath[len(self.zip_pre):] raise AssertionError( "%s is not a subpath of %s" % (fspath, self.zip_pre) ) def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path fspath = self.zip_pre + zip_path if fspath.startswith(self.egg_root + os.sep): return fspath[len(self.egg_root) + 1:].split(os.sep) raise AssertionError( "%s is not a subpath of %s" % (fspath, self.egg_root) ) @property def zipinfo(self): return self._zip_manifests.load(self.loader.archive) def get_resource_filename(self, manager, resource_name): if not self.egg_name: raise NotImplementedError( "resource_filename() only supported for .egg, not .zip" ) # no need to lock for extraction, since we use temp names zip_path = self._resource_to_zip(resource_name) eagers = self._get_eager_resources() if '/'.join(self._parts(zip_path)) in eagers: for name in eagers: self._extract_resource(manager, self._eager_to_zip(name)) return self._extract_resource(manager, zip_path) @staticmethod def _get_date_and_size(zip_stat): size = zip_stat.file_size # ymdhms+wday, yday, dst date_time = zip_stat.date_time + (0, 0, -1) # 1980 offset already done timestamp = time.mktime(date_time) return timestamp, size def _extract_resource(self, manager, zip_path): if zip_path in self._index(): for name in self._index()[zip_path]: last = self._extract_resource( manager, os.path.join(zip_path, name) ) # return the extracted directory name return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: raise IOError('"os.rename" and "os.unlink" are not supported ' 'on this platform') try: real_path = manager.get_cache_path( self.egg_name, self._parts(zip_path) ) if self._is_current(real_path, zip_path): return real_path outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) os.write(outf, self.loader.get_data(zip_path)) os.close(outf) utime(tmpnam, (timestamp, timestamp)) manager.postprocess(tmpnam, real_path) try: rename(tmpnam, real_path) except os.error: if os.path.isfile(real_path): if self._is_current(real_path, zip_path): # the file became current since it was checked above, # so proceed. return real_path # Windows, del old file and retry elif os.name == 'nt': unlink(real_path) rename(tmpnam, real_path) return real_path raise except os.error: # report a user-friendly error manager.extraction_error() return real_path def _is_current(self, file_path, zip_path): """ Return True if the file_path is current for this zip_path """ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not os.path.isfile(file_path): return False stat = os.stat(file_path) if stat.st_size != size or stat.st_mtime != timestamp: return False # check that the contents match zip_contents = self.loader.get_data(zip_path) with open(file_path, 'rb') as f: file_contents = f.read() return zip_contents == file_contents def _get_eager_resources(self): if self.eagers is None: eagers = [] for name in ('native_libs.txt', 'eager_resources.txt'): if self.has_metadata(name): eagers.extend(self.get_metadata_lines(name)) self.eagers = eagers return self.eagers def _index(self): try: return self._dirindex except AttributeError: ind = {} for path in self.zipinfo: parts = path.split(os.sep) while parts: parent = os.sep.join(parts[:-1]) if parent in ind: ind[parent].append(parts[-1]) break else: ind[parent] = [parts.pop()] self._dirindex = ind return ind def _has(self, fspath): zip_path = self._zipinfo_name(fspath) return zip_path in self.zipinfo or zip_path in self._index() def _isdir(self, fspath): return self._zipinfo_name(fspath) in self._index() def _listdir(self, fspath): return list(self._index().get(self._zipinfo_name(fspath), ())) def _eager_to_zip(self, resource_name): return self._zipinfo_name(self._fn(self.egg_root, resource_name)) def _resource_to_zip(self, resource_name): return self._zipinfo_name(self._fn(self.module_path, resource_name)) register_loader_type(zipimport.zipimporter, ZipProvider) class FileMetadata(EmptyProvider): """Metadata handler for standalone PKG-INFO files Usage:: metadata = FileMetadata("/path/to/PKG-INFO") This provider rejects all data and metadata requests except for PKG-INFO, which is treated as existing, and will be the contents of the file at the provided location. """ def __init__(self, path): self.path = path def has_metadata(self, name): return name == 'PKG-INFO' and os.path.isfile(self.path) def get_metadata(self, name): if name != 'PKG-INFO': raise KeyError("No metadata except PKG-INFO is available") with io.open(self.path, encoding='utf-8', errors="replace") as f: metadata = f.read() self._warn_on_replacement(metadata) return metadata def _warn_on_replacement(self, metadata): # Python 2.6 and 3.2 compat for: replacement_char = '�' replacement_char = b'\xef\xbf\xbd'.decode('utf-8') if replacement_char in metadata: tmpl = "{self.path} could not be properly decoded in UTF-8" msg = tmpl.format(**locals()) warnings.warn(msg) def get_metadata_lines(self, name): return yield_lines(self.get_metadata(name)) class PathMetadata(DefaultProvider): """Metadata provider for egg directories Usage:: # Development eggs: egg_info = "/path/to/PackageName.egg-info" base_dir = os.path.dirname(egg_info) metadata = PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] dist = Distribution(basedir, project_name=dist_name, metadata=metadata) # Unpacked egg directories: egg_path = "/path/to/PackageName-ver-pyver-etc.egg" metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) dist = Distribution.from_filename(egg_path, metadata=metadata) """ def __init__(self, path, egg_info): self.module_path = path self.egg_info = egg_info class EggMetadata(ZipProvider): """Metadata provider for .egg files""" def __init__(self, importer): """Create a metadata provider from a zipimporter""" self.zip_pre = importer.archive + os.sep self.loader = importer if importer.prefix: self.module_path = os.path.join(importer.archive, importer.prefix) else: self.module_path = importer.archive self._setup_prefix() _declare_state('dict', _distribution_finders={}) def register_finder(importer_type, distribution_finder): """Register `distribution_finder` to find distributions in sys.path items `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `distribution_finder` is a callable that, passed a path item and the importer instance, yields ``Distribution`` instances found on that path item. See ``pkg_resources.find_on_path`` for an example.""" _distribution_finders[importer_type] = distribution_finder def find_distributions(path_item, only=False): """Yield distributions accessible via `path_item`""" importer = get_importer(path_item) finder = _find_adapter(_distribution_finders, importer) return finder(importer, path_item, only) def find_eggs_in_zip(importer, path_item, only=False): """ Find eggs in zip files; possibly multiple nested eggs. """ if importer.archive.endswith('.whl'): # wheels are not supported with this finder # they don't have PKG-INFO metadata, and won't ever contain eggs return metadata = EggMetadata(importer) if metadata.has_metadata('PKG-INFO'): yield Distribution.from_filename(path_item, metadata=metadata) if only: # don't yield nested distros return for subitem in metadata.resource_listdir('/'): if _is_unpacked_egg(subitem): subpath = os.path.join(path_item, subitem) for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): yield dist register_finder(zipimport.zipimporter, find_eggs_in_zip) def find_nothing(importer, path_item, only=False): return () register_finder(object, find_nothing) def _by_version_descending(names): """ Given a list of filenames, return them in descending order by version number. >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] """ def _by_version(name): """ Parse each component of the filename """ name, ext = os.path.splitext(name) parts = itertools.chain(name.split('-'), [ext]) return [packaging.version.parse(part) for part in parts] return sorted(names, key=_by_version, reverse=True) def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if os.path.isdir(path_item) and os.access(path_item, os.R_OK): if _is_unpacked_egg(path_item): yield Distribution.from_filename( path_item, metadata=PathMetadata( path_item, os.path.join(path_item, 'EGG-INFO') ) ) else: # scan for .egg and .egg-info in directory path_item_entries = _by_version_descending(os.listdir(path_item)) for entry in path_item_entries: lower = entry.lower() if lower.endswith('.egg-info') or lower.endswith('.dist-info'): fullpath = os.path.join(path_item, entry) if os.path.isdir(fullpath): # egg-info directory, allow getting metadata if len(os.listdir(fullpath)) == 0: # Empty egg directory, skip. continue metadata = PathMetadata(path_item, fullpath) else: metadata = FileMetadata(fullpath) yield Distribution.from_location( path_item, entry, metadata, precedence=DEVELOP_DIST ) elif not only and _is_unpacked_egg(entry): dists = find_distributions(os.path.join(path_item, entry)) for dist in dists: yield dist elif not only and lower.endswith('.egg-link'): with open(os.path.join(path_item, entry)) as entry_file: entry_lines = entry_file.readlines() for line in entry_lines: if not line.strip(): continue path = os.path.join(path_item, line.rstrip()) dists = find_distributions(path) for item in dists: yield item break register_finder(pkgutil.ImpImporter, find_on_path) if hasattr(importlib_machinery, 'FileFinder'): register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) def register_namespace_handler(importer_type, namespace_handler): """Register `namespace_handler` to declare namespace packages `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item handler), and `namespace_handler` is a callable like this:: def namespace_handler(importer, path_entry, moduleName, module): # return a path_entry to use for child packages Namespace handlers are only called if the importer object has already agreed that it can handle the relevant path item, and they should only return a subpath if the module __path__ does not already contain an equivalent subpath. For an example namespace handler, see ``pkg_resources.file_ns_handler``. """ _namespace_handlers[importer_type] = namespace_handler def _handle_ns(packageName, path_item): """Ensure that named package includes a subpath of path_item (if needed)""" importer = get_importer(path_item) if importer is None: return None loader = importer.find_module(packageName) if loader is None: return None module = sys.modules.get(packageName) if module is None: module = sys.modules[packageName] = types.ModuleType(packageName) module.__path__ = [] _set_parent_ns(packageName) elif not hasattr(module, '__path__'): raise TypeError("Not a package:", packageName) handler = _find_adapter(_namespace_handlers, importer) subpath = handler(importer, path_item, packageName, module) if subpath is not None: path = module.__path__ path.append(subpath) loader.load_module(packageName) _rebuild_mod_path(path, packageName, module) return subpath def _rebuild_mod_path(orig_path, package_name, module): """ Rebuild module.__path__ ensuring that all entries are ordered corresponding to their sys.path order """ sys_path = [_normalize_cached(p) for p in sys.path] def safe_sys_path_index(entry): """ Workaround for #520 and #513. """ try: return sys_path.index(entry) except ValueError: return float('inf') def position_in_sys_path(path): """ Return the ordinal of the path based on its position in sys.path """ path_parts = path.split(os.sep) module_parts = package_name.count('.') + 1 parts = path_parts[:-module_parts] return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) orig_path.sort(key=position_in_sys_path) module.__path__[:] = [_normalize_cached(p) for p in orig_path] def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" _imp.acquire_lock() try: if packageName in _namespace_packages: return path, parent = sys.path, None if '.' in packageName: parent = '.'.join(packageName.split('.')[:-1]) declare_namespace(parent) if parent not in _namespace_packages: __import__(parent) try: path = sys.modules[parent].__path__ except AttributeError: raise TypeError("Not a package:", parent) # Track what packages are namespaces, so when new path items are added, # they can be updated _namespace_packages.setdefault(parent, []).append(packageName) _namespace_packages.setdefault(packageName, []) for path_item in path: # Ensure all the parent's path items are reflected in the child, # if they apply _handle_ns(packageName, path_item) finally: _imp.release_lock() def fixup_namespace_packages(path_item, parent=None): """Ensure that previously-declared namespace packages include path_item""" _imp.acquire_lock() try: for package in _namespace_packages.get(parent, ()): subpath = _handle_ns(package, path_item) if subpath: fixup_namespace_packages(subpath, package) finally: _imp.release_lock() def file_ns_handler(importer, path_item, packageName, module): """Compute an ns-package subpath for a filesystem or zipfile importer""" subpath = os.path.join(path_item, packageName.split('.')[-1]) normalized = _normalize_cached(subpath) for item in module.__path__: if _normalize_cached(item) == normalized: break else: # Only return the path if it's not already there return subpath register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) if hasattr(importlib_machinery, 'FileFinder'): register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): return None register_namespace_handler(object, null_ns_handler) def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" return os.path.normcase(os.path.realpath(filename)) def _normalize_cached(filename, _cache={}): try: return _cache[filename] except KeyError: _cache[filename] = result = normalize_path(filename) return result def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ return ( path.lower().endswith('.egg') ) def _set_parent_ns(packageName): parts = packageName.split('.') name = parts.pop() if parts: parent = '.'.join(parts) setattr(sys.modules[parent], name, sys.modules[packageName]) def yield_lines(strs): """Yield non-empty/non-comment lines of a string or sequence""" if isinstance(strs, six.string_types): for s in strs.splitlines(): s = s.strip() # skip blank lines/comments if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s MODULE = re.compile(r"\w+(\.\w+)*$").match EGG_NAME = re.compile( r""" (?P[^-]+) ( -(?P[^-]+) ( -py(?P[^-]+) ( -(?P.+) )? )? )? """, re.VERBOSE | re.IGNORECASE, ).match class EntryPoint(object): """Object representing an advertised importable object""" def __init__(self, name, module_name, attrs=(), extras=(), dist=None): if not MODULE(module_name): raise ValueError("Invalid module name", module_name) self.name = name self.module_name = module_name self.attrs = tuple(attrs) self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras self.dist = dist def __str__(self): s = "%s = %s" % (self.name, self.module_name) if self.attrs: s += ':' + '.'.join(self.attrs) if self.extras: s += ' [%s]' % ','.join(self.extras) return s def __repr__(self): return "EntryPoint.parse(%r)" % str(self) def load(self, require=True, *args, **kwargs): """ Require packages for this EntryPoint, then resolve it. """ if not require or args or kwargs: warnings.warn( "Parameters to load are deprecated. Call .resolve and " ".require separately.", DeprecationWarning, stacklevel=2, ) if require: self.require(*args, **kwargs) return self.resolve() def resolve(self): """ Resolve the entry point from its module and attrs. """ module = __import__(self.module_name, fromlist=['__name__'], level=0) try: return functools.reduce(getattr, self.attrs, module) except AttributeError as exc: raise ImportError(str(exc)) def require(self, env=None, installer=None): if self.extras and not self.dist: raise UnknownExtra("Can't require() without a distribution", self) reqs = self.dist.requires(self.extras) items = working_set.resolve(reqs, env, installer) list(map(working_set.add, items)) pattern = re.compile( r'\s*' r'(?P.+?)\s*' r'=\s*' r'(?P[\w.]+)\s*' r'(:\s*(?P[\w.]+))?\s*' r'(?P\[.*\])?\s*$' ) @classmethod def parse(cls, src, dist=None): """Parse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """ m = cls.pattern.match(src) if not m: msg = "EntryPoint must be in 'name=module:attrs [extras]' format" raise ValueError(msg, src) res = m.groupdict() extras = cls._parse_extras(res['extras']) attrs = res['attr'].split('.') if res['attr'] else () return cls(res['name'], res['module'], attrs, extras, dist) @classmethod def _parse_extras(cls, extras_spec): if not extras_spec: return () req = Requirement.parse('x' + extras_spec) if req.specs: raise ValueError() return req.extras @classmethod def parse_group(cls, group, lines, dist=None): """Parse an entry point group""" if not MODULE(group): raise ValueError("Invalid group name", group) this = {} for line in yield_lines(lines): ep = cls.parse(line, dist) if ep.name in this: raise ValueError("Duplicate entry point", group, ep.name) this[ep.name] = ep return this @classmethod def parse_map(cls, data, dist=None): """Parse a map of entry point groups""" if isinstance(data, dict): data = data.items() else: data = split_sections(data) maps = {} for group, lines in data: if group is None: if not lines: continue raise ValueError("Entry points must be listed in groups") group = group.strip() if group in maps: raise ValueError("Duplicate group name", group) maps[group] = cls.parse_group(group, lines, dist) return maps def _remove_md5_fragment(location): if not location: return '' parsed = urllib.parse.urlparse(location) if parsed[-1].startswith('md5='): return urllib.parse.urlunparse(parsed[:-1] + ('',)) return location def _version_from_file(lines): """ Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ is_version_line = lambda line: line.lower().startswith('version:') version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') return safe_version(value.strip()) or None class Distribution(object): """Wrap an actual or potential sys.path entry w/metadata""" PKG_INFO = 'PKG-INFO' def __init__(self, location=None, metadata=None, project_name=None, version=None, py_version=PY_MAJOR, platform=None, precedence=EGG_DIST): self.project_name = safe_name(project_name or 'Unknown') if version is not None: self._version = safe_version(version) self.py_version = py_version self.platform = platform self.location = location self.precedence = precedence self._provider = metadata or empty_provider @classmethod def from_location(cls, location, basename, metadata=None, **kw): project_name, version, py_version, platform = [None] * 4 basename, ext = os.path.splitext(basename) if ext.lower() in _distributionImpl: cls = _distributionImpl[ext.lower()] match = EGG_NAME(basename) if match: project_name, version, py_version, platform = match.group( 'name', 'ver', 'pyver', 'plat' ) return cls( location, metadata, project_name=project_name, version=version, py_version=py_version, platform=platform, **kw )._reload_version() def _reload_version(self): return self @property def hashcmp(self): return ( self.parsed_version, self.precedence, self.key, _remove_md5_fragment(self.location), self.py_version or '', self.platform or '', ) def __hash__(self): return hash(self.hashcmp) def __lt__(self, other): return self.hashcmp < other.hashcmp def __le__(self, other): return self.hashcmp <= other.hashcmp def __gt__(self, other): return self.hashcmp > other.hashcmp def __ge__(self, other): return self.hashcmp >= other.hashcmp def __eq__(self, other): if not isinstance(other, self.__class__): # It's not a Distribution, so they are not equal return False return self.hashcmp == other.hashcmp def __ne__(self, other): return not self == other # These properties have to be lazy so that we don't have to load any # metadata until/unless it's actually needed. (i.e., some distributions # may not know their name or version without loading PKG-INFO) @property def key(self): try: return self._key except AttributeError: self._key = key = self.project_name.lower() return key @property def parsed_version(self): if not hasattr(self, "_parsed_version"): self._parsed_version = parse_version(self.version) return self._parsed_version def _warn_legacy_version(self): LV = packaging.version.LegacyVersion is_legacy = isinstance(self._parsed_version, LV) if not is_legacy: return # While an empty version is technically a legacy version and # is not a valid PEP 440 version, it's also unlikely to # actually come from someone and instead it is more likely that # it comes from setuptools attempting to parse a filename and # including it in the list. So for that we'll gate this warning # on if the version is anything at all or not. if not self.version: return tmpl = textwrap.dedent(""" '{project_name} ({version})' is being parsed as a legacy, non PEP 440, version. You may find odd behavior and sort order. In particular it will be sorted as less than 0.0. It is recommended to migrate to PEP 440 compatible versions. """).strip().replace('\n', ' ') warnings.warn(tmpl.format(**vars(self)), PEP440Warning) @property def version(self): try: return self._version except AttributeError: version = _version_from_file(self._get_metadata(self.PKG_INFO)) if version is None: tmpl = "Missing 'Version:' header and/or %s file" raise ValueError(tmpl % self.PKG_INFO, self) return version @property def _dep_map(self): try: return self.__dep_map except AttributeError: dm = self.__dep_map = {None: []} for name in 'requires.txt', 'depends.txt': for extra, reqs in split_sections(self._get_metadata(name)): if extra: if ':' in extra: extra, marker = extra.split(':', 1) if invalid_marker(marker): # XXX warn reqs = [] elif not evaluate_marker(marker): reqs = [] extra = safe_extra(extra) or None dm.setdefault(extra, []).extend(parse_requirements(reqs)) return dm def requires(self, extras=()): """List of Requirements needed for this distro if `extras` are used""" dm = self._dep_map deps = [] deps.extend(dm.get(None, ())) for ext in extras: try: deps.extend(dm[safe_extra(ext)]) except KeyError: raise UnknownExtra( "%s has no such extra feature %r" % (self, ext) ) return deps def _get_metadata(self, name): if self.has_metadata(name): for line in self.get_metadata_lines(name): yield line def activate(self, path=None, replace=False): """Ensure distribution is importable on `path` (default=sys.path)""" if path is None: path = sys.path self.insert_on(path, replace=replace) if path is sys.path: fixup_namespace_packages(self.location) for pkg in self._get_metadata('namespace_packages.txt'): if pkg in sys.modules: declare_namespace(pkg) def egg_name(self): """Return what this distribution's standard .egg filename should be""" filename = "%s-%s-py%s" % ( to_filename(self.project_name), to_filename(self.version), self.py_version or PY_MAJOR ) if self.platform: filename += '-' + self.platform return filename def __repr__(self): if self.location: return "%s (%s)" % (self, self.location) else: return str(self) def __str__(self): try: version = getattr(self, 'version', None) except ValueError: version = None version = version or "[unknown version]" return "%s %s" % (self.project_name, version) def __getattr__(self, attr): """Delegate all unrecognized public attributes to .metadata provider""" if attr.startswith('_'): raise AttributeError(attr) return getattr(self._provider, attr) @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( _normalize_cached(filename), os.path.basename(filename), metadata, **kw ) def as_requirement(self): """Return a ``Requirement`` that matches this distribution exactly""" if isinstance(self.parsed_version, packaging.version.Version): spec = "%s==%s" % (self.project_name, self.parsed_version) else: spec = "%s===%s" % (self.project_name, self.parsed_version) return Requirement.parse(spec) def load_entry_point(self, group, name): """Return the `name` entry point of `group` or raise ImportError""" ep = self.get_entry_info(group, name) if ep is None: raise ImportError("Entry point %r not found" % ((group, name),)) return ep.load() def get_entry_map(self, group=None): """Return the entry point map for `group`, or the full entry map""" try: ep_map = self._ep_map except AttributeError: ep_map = self._ep_map = EntryPoint.parse_map( self._get_metadata('entry_points.txt'), self ) if group is not None: return ep_map.get(group, {}) return ep_map def get_entry_info(self, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return self.get_entry_map(group).get(name) def insert_on(self, path, loc=None, replace=False): """Ensure self.location is on path If replace=False (default): - If location is already in path anywhere, do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent. - Else: add to the end of path. If replace=True: - If location is already on path anywhere (not eggs) or higher priority than its parent (eggs) do nothing. - Else: - If it's an egg and its parent directory is on path, insert just ahead of the parent, removing any lower-priority entries. - Else: add it to the front of path. """ loc = loc or self.location if not loc: return nloc = _normalize_cached(loc) bdir = os.path.dirname(nloc) npath = [(p and _normalize_cached(p) or p) for p in path] for p, item in enumerate(npath): if item == nloc: if replace: break else: # don't modify path (even removing duplicates) if found and not replace return elif item == bdir and self.precedence == EGG_DIST: # if it's an .egg, give it precedence over its directory # UNLESS it's already been added to sys.path and replace=False if (not replace) and nloc in npath[p:]: return if path is sys.path: self.check_version_conflict() path.insert(p, loc) npath.insert(p, nloc) break else: if path is sys.path: self.check_version_conflict() if replace: path.insert(0, loc) else: path.append(loc) return # p is the spot where we found or inserted loc; now remove duplicates while True: try: np = npath.index(nloc, p + 1) except ValueError: break else: del npath[np], path[np] # ha! p = np return def check_version_conflict(self): if self.key == 'setuptools': # ignore the inevitable setuptools self-conflicts :( return nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) for modname in self._get_metadata('top_level.txt'): if (modname not in sys.modules or modname in nsp or modname in _namespace_packages): continue if modname in ('pkg_resources', 'setuptools', 'site'): continue fn = getattr(sys.modules[modname], '__file__', None) if fn and (normalize_path(fn).startswith(loc) or fn.startswith(self.location)): continue issue_warning( "Module %s was already imported from %s, but %s is being added" " to sys.path" % (modname, fn, self.location), ) def has_version(self): try: self.version except ValueError: issue_warning("Unbuilt egg for " + repr(self)) return False return True def clone(self, **kw): """Copy this distribution, substituting in any changed keyword args""" names = 'project_name version py_version platform location precedence' for attr in names.split(): kw.setdefault(attr, getattr(self, attr, None)) kw.setdefault('metadata', self._provider) return self.__class__(**kw) @property def extras(self): return [dep for dep in self._dep_map if dep] class EggInfoDistribution(Distribution): def _reload_version(self): """ Packages installed by distutils (e.g. numpy or scipy), which uses an old safe_version, and so their version numbers can get mangled when converted to filenames (e.g., 1.11.0.dev0+2329eae to 1.11.0.dev0_2329eae). These distributions will not be parsed properly downstream by Distribution and safe_version, so take an extra step and try to get the version number from the metadata file itself instead of the filename. """ md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) if md_version: self._version = md_version return self class DistInfoDistribution(Distribution): """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" PKG_INFO = 'METADATA' EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") @property def _parsed_pkg_info(self): """Parse and cache metadata""" try: return self._pkg_info except AttributeError: metadata = self.get_metadata(self.PKG_INFO) self._pkg_info = email.parser.Parser().parsestr(metadata) return self._pkg_info @property def _dep_map(self): try: return self.__dep_map except AttributeError: self.__dep_map = self._compute_dependencies() return self.__dep_map def _compute_dependencies(self): """Recompute this distribution's dependencies.""" dm = self.__dep_map = {None: []} reqs = [] # Including any condition expressions for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: reqs.extend(parse_requirements(req)) def reqs_for_extra(extra): for req in reqs: if not req.marker or req.marker.evaluate({'extra': extra}): yield req common = frozenset(reqs_for_extra(None)) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: s_extra = safe_extra(extra.strip()) dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) return dm _distributionImpl = { '.egg': Distribution, '.egg-info': EggInfoDistribution, '.dist-info': DistInfoDistribution, } def issue_warning(*args, **kw): level = 1 g = globals() try: # find the first stack frame that is *not* code in # the pkg_resources module, to use for the warning while sys._getframe(level).f_globals is g: level += 1 except ValueError: pass warnings.warn(stacklevel=level + 1, *args, **kw) class RequirementParseError(ValueError): def __str__(self): return ' '.join(self.args) def parse_requirements(strs): """Yield ``Requirement`` objects for each specification in `strs` `strs` must be a string, or a (possibly-nested) iterable thereof. """ # create a steppable iterator, so we can handle \-continuations lines = iter(yield_lines(strs)) for line in lines: # Drop comments -- a hash without a space may be in a URL. if ' #' in line: line = line[:line.find(' #')] # If there is a line continuation, drop it, and append the next line. if line.endswith('\\'): line = line[:-2].strip() line += next(lines) yield Requirement(line) class Requirement(packaging.requirements.Requirement): def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" try: super(Requirement, self).__init__(requirement_string) except packaging.requirements.InvalidRequirement as e: raise RequirementParseError(str(e)) self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() self.specs = [ (spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, self.specifier, frozenset(self.extras), str(self.marker) if self.marker else None, ) self.__hash = hash(self.hashCmp) def __eq__(self, other): return ( isinstance(other, Requirement) and self.hashCmp == other.hashCmp ) def __ne__(self, other): return not self == other def __contains__(self, item): if isinstance(item, Distribution): if item.key != self.key: return False item = item.version # Allow prereleases always in order to match the previous behavior of # this method. In the future this should be smarter and follow PEP 440 # more accurately. return self.specifier.contains(item, prereleases=True) def __hash__(self): return self.__hash def __repr__(self): return "Requirement.parse(%r)" % str(self) @staticmethod def parse(s): req, = parse_requirements(s) return req def _get_mro(cls): """Get an mro for a type or classic class""" if not isinstance(cls, type): class cls(cls, object): pass return cls.__mro__[1:] return cls.__mro__ def _find_adapter(registry, ob): """Return an adapter factory for `ob` from `registry`""" for t in _get_mro(getattr(ob, '__class__', type(ob))): if t in registry: return registry[t] def ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname) def _bypass_ensure_directory(path): """Sandbox-bypassing version of ensure_directory()""" if not WRITE_SUPPORT: raise IOError('"os.mkdir" not supported on this platform.') dirname, filename = split(path) if dirname and filename and not isdir(dirname): _bypass_ensure_directory(dirname) mkdir(dirname, 0o755) def split_sections(s): """Split a string or iterable thereof into (section, content) pairs Each ``section`` is a stripped version of the section header ("[section]") and each ``content`` is a list of stripped lines excluding blank lines and comment-only lines. If there are any such lines before the first section header, they're returned in a first ``section`` of ``None``. """ section = None content = [] for line in yield_lines(s): if line.startswith("["): if line.endswith("]"): if section or content: yield section, content section = line[1:-1].strip() content = [] else: raise ValueError("Invalid section heading", line) else: content.append(line) # wrap up last segment yield section, content def _mkstemp(*args, **kw): old_open = os.open try: # temporarily bypass sandboxing os.open = os_open return tempfile.mkstemp(*args, **kw) finally: # and then put it back os.open = old_open # Silence the PEP440Warning by default, so that end users don't get hit by it # randomly just because they use pkg_resources. We want to append the rule # because we want earlier uses of filterwarnings to take precedence over this # one. warnings.filterwarnings("ignore", category=PEP440Warning, append=True) # from jaraco.functools 1.3 def _call_aside(f, *args, **kwargs): f(*args, **kwargs) return f @_call_aside def _initialize(g=globals()): "Set up global resource manager (deliberately not state-saved)" manager = ResourceManager() g['_manager'] = manager for name in dir(manager): if not name.startswith('_'): g[name] = getattr(manager, name) @_call_aside def _initialize_master_working_set(): """ Prepare the master working set and make the ``require()`` API available. This function has explicit effects on the global state of pkg_resources. It is intended to be invoked once at the initialization of this module. Invocation by other packages is unsupported and done at their own risk. """ working_set = WorkingSet._build_master() _declare_state('object', working_set=working_set) require = working_set.require iter_entry_points = working_set.iter_entry_points add_activation_listener = working_set.subscribe run_script = working_set.run_script # backward compatibility run_main = run_script # Activate all distributions already on sys.path with replace=False and # ensure that all distributions added to the working set in the future # (e.g. by calling ``require()``) will get activated as well, # with higher priority (replace=True). dist = None # ensure dist is defined for del dist below for dist in working_set: dist.activate(replace=False) del dist add_activation_listener(lambda dist: dist.activate(replace=True), existing=False) working_set.entries = [] # match order list(map(working_set.add_entry, sys.path)) globals().update(locals()) PKZ!1site-packages/pip/_vendor/webencodings/labels.pycnu[ abc@sdZidd6dd6dd6dd6dd6dd6dd6dd 6dd6dd 6dd 6dd 6dd 6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6dd!6dd"6d#d$6d#d%6d#d#6d#d&6d#d'6d#d(6d#d)6d#d*6d+d,6d+d-6d+d.6d+d/6d+d06d+d16d+d+6d+d26d+d36d+d46d+d56d+d66d+d76d+d86d9d:6d9d;6d9d<6d9d=6d9d>6d9d96d9d?6d9d@6d9dA6d9dB6d9dC6d9dD6dEdF6dEdG6dEdH6dEdE6dEdI6dEdJ6dEdK6dEdL6dEdM6dEdN6dEdO6dPdQ6dPdP6dPdR6dSdT6dSdS6dSdU6dSdV6dSdW6dSdX6dSdY6dZdZ6dZd[6dZd\6d]d]6d]d^6d]d_6d`da6d`d`6d`db6d`dc6d`dd6d`de6dfdf6dgdh6dgdi6dgdj6dgdg6dgdk6dldl6dmdn6dmdo6dmdm6dmdp6dqdr6dqds6dqdt6dqdu6dqdv6dqdq6dwdx6dwdw6dwdy6dzd{6dzdz6dzd|6d}d~6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d}6d}d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6ZdS(s webencodings.labels ~~~~~~~~~~~~~~~~~~~ Map encoding labels to their name. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. sutf-8sunicode-1-1-utf-8tutf8tibm866t866tcp866tcsibm866s iso-8859-2t csisolatin2s iso-ir-101s iso8859-2tiso88592s iso_8859-2siso_8859-2:1987tl2tlatin2s iso-8859-3t csisolatin3s iso-ir-109s iso8859-3tiso88593s iso_8859-3siso_8859-3:1988tl3tlatin3s iso-8859-4t csisolatin4s iso-ir-110s iso8859-4tiso88594s iso_8859-4siso_8859-4:1988tl4tlatin4s iso-8859-5tcsisolatincyrillictcyrillics iso-ir-144s iso8859-5tiso88595s iso_8859-5siso_8859-5:1988s iso-8859-6tarabicsasmo-708t csiso88596et csiso88596itcsisolatinarabicsecma-114s iso-8859-6-es iso-8859-6-is iso-ir-127s iso8859-6tiso88596s iso_8859-6siso_8859-6:1987s iso-8859-7tcsisolatingreeksecma-118telot_928tgreektgreek8s iso-ir-126s iso8859-7tiso88597s iso_8859-7siso_8859-7:1987t sun_eu_greeks iso-8859-8t csiso88598etcsisolatinhebrewthebrews iso-8859-8-es iso-ir-138s iso8859-8tiso88598s iso_8859-8siso_8859-8:1988tvisuals iso-8859-8-it csiso88598itlogicals iso-8859-10t csisolatin6s iso-ir-157s iso8859-10t iso885910tl6tlatin6s iso-8859-13s iso8859-13t iso885913s iso-8859-14s iso8859-14t iso885914s iso-8859-15t csisolatin9s iso8859-15t iso885915s iso_8859-15tl9s iso-8859-16skoi8-rtcskoi8rtkoitkoi8tkoi8_rskoi8-ut macintosht csmacintoshtmacs x-mac-romans windows-874sdos-874s iso-8859-11s iso8859-11t iso885911stis-620s windows-1250tcp1250sx-cp1250s windows-1251tcp1251sx-cp1251s windows-1252sansi_x3.4-1968tasciitcp1252tcp819t csisolatin1tibm819s iso-8859-1s iso-ir-100s iso8859-1tiso88591s iso_8859-1siso_8859-1:1987tl1tlatin1sus-asciisx-cp1252s windows-1253tcp1253sx-cp1253s windows-1254tcp1254t csisolatin5s iso-8859-9s iso-ir-148s iso8859-9tiso88599s iso_8859-9siso_8859-9:1989tl5tlatin5sx-cp1254s windows-1255tcp1255sx-cp1255s windows-1256tcp1256sx-cp1256s windows-1257tcp1257sx-cp1257s windows-1258tcp1258sx-cp1258sx-mac-cyrillicsx-mac-ukrainiantgbktchinesetcsgb2312tcsiso58gb231280tgb2312tgb_2312s gb_2312-80s iso-ir-58sx-gbktgb18030s hz-gb-2312tbig5s big5-hkscsscn-big5tcsbig5sx-x-big5seuc-jptcseucpkdfmtjapanesesx-euc-jps iso-2022-jpt csiso2022jpt shift_jist csshiftjistms_kanjis shift-jistsjiss windows-31jsx-sjisseuc-krtcseuckrt csksc56011987s iso-ir-149tkoreansks_c_5601-1987sks_c_5601-1989tksc5601tksc_5601s windows-949s iso-2022-krt csiso2022krsutf-16besutf-16lesutf-16sx-user-definedN(t__doc__tLABELS(((sC/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.pyt sPKZcz,,3site-packages/pip/_vendor/webencodings/__init__.pycnu[ abc@s6dZddlmZddlZddlmZdZidd6d d 6d d 6d d6ZiZdZ dZ dZ de fdYZ e dZe dZe dZddZdZeddZddZdZeddZdZd e fd!YZd"e fd#YZdS($u webencodings ~~~~~~~~~~~~ This is a Python implementation of the `WHATWG Encoding standard `. See README for details. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. i(tunicode_literalsNi(tLABELSu0.5u iso-8859-8u iso-8859-8-iu mac-cyrillicux-mac-cyrillicu mac-romanu macintoshucp874u windows-874cCs|jdjjdS(u9Transform (only) ASCII letters to lower case: A-Z is mapped to a-z. :param string: An Unicode string. :returns: A new Unicode string. This is used for `ASCII case-insensitive `_ matching of encoding labels. The same matching is also used, among other things, for `CSS keywords `_. This is different from the :meth:`~py:str.lower` method of Unicode strings which also affect non-ASCII characters, sometimes mapping them into the ASCII range: >>> keyword = u'Bac\N{KELVIN SIGN}ground' >>> assert keyword.lower() == u'background' >>> assert ascii_lower(keyword) != keyword.lower() >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground' uutf8(tencodetlowertdecode(tstring((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt ascii_lower#scCst|jd}tj|}|dkr4dStj|}|dkr|dkrnddlm}n!tj||}t j |}t ||}|t|`_ algorithm. Supported labels are listed there. :param label: A string. :returns: An :class:`Encoding` object, or :obj:`None` for an unknown label. u ux-user-definedi(t codec_infoN( RtstripRtgettNonetCACHEtx_user_definedRt PYTHON_NAMEStcodecstlookuptEncoding(tlabeltnametencodingRt python_name((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR=s     cCsBt|dr|St|}|dkr>td|n|S(u Accept either an encoding object or label. :param encoding: An :class:`Encoding` object or a label string. :returns: An :class:`Encoding` object. :raises: :exc:`~exceptions.LookupError` for an unknown label. u codec_infouUnknown encoding label: %rN(thasattrRR t LookupError(tencoding_or_labelR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt _get_encoding[s   RcBs eZdZdZdZRS(uOReresents a character encoding such as UTF-8, that can be used for decoding or encoding. .. attribute:: name Canonical name of the encoding .. attribute:: codec_info The actual implementation of the encoding, a stdlib :class:`~codecs.CodecInfo` object. See :func:`codecs.register`. cCs||_||_dS(N(RR(tselfRR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt__init__|s cCs d|jS(Nu (R(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt__repr__s(t__name__t __module__t__doc__RR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRms uutf-8uutf-16leuutf-16beureplacecCsGt|}t|\}}|p'|}|jj||d|fS(u Decode a single string. :param input: A byte string :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :return: A ``(output, encoding)`` tuple of an Unicode string and an :obj:`Encoding`. i(Rt _detect_bomRR(tinputtfallback_encodingterrorst bom_encodingR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRs  cCsa|jdrt|dfS|jdr:t|dfS|jdrWt|dfSd|fS(uBReturn (bom_encoding, input), with any BOM removed from the input.sissiN(t startswitht_UTF16LEt_UTF16BEtUTF8R (R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRsustrictcCst|jj||dS(u; Encode a single string. :param input: An Unicode string. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :return: A byte string. i(RRR(R RR"((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRs cCs4t||}t||}t|}||fS(u "Pull"-based decoder. :param input: An iterable of byte strings. The input is first consumed just enough to determine the encoding based on the precense of a BOM, then consumed on demand when the return value is. :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :returns: An ``(output, encoding)`` tuple. :obj:`output` is an iterable of Unicode strings, :obj:`encoding` is the :obj:`Encoding` that is being used. (tIncrementalDecodert_iter_decode_generatortnext(R R!R"tdecodert generatorR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt iter_decodes ccs|j}t|}x|D]>}||}|r|jdk sIt|jV|VPqqW|ddt}|jdk st|jV|r|VndSx(|D] }||}|r|VqqW|ddt}|r|VndS(uqReturn a generator that first yields the :obj:`Encoding`, then yields output chukns as Unicode strings. ttfinalN(RtiterRR tAssertionErrortTrue(R R+Rtchuncktoutput((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR)s,       cCst||j}t||S(uY “Pull”-based encoder. :param input: An iterable of Unicode strings. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :returns: An iterable of byte strings. (tIncrementalEncoderRt_iter_encode_generator(R RR"R((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt iter_encodes ccsOx(|D] }||}|r|VqqW|ddt}|rK|VndS(NuR/(R2(R RR3R4((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR6s   R(cBs&eZdZddZedZRS(uO “Push”-based decoder. :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. ureplacecCs7t||_||_d|_d|_d|_dS(NR.(Rt_fallback_encodingt_errorst_bufferR t_decoderR(RR!R"((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRs    cCs|j}|dk r"|||S|j|}t|\}}|dkrt|dkrs| rs||_dS|j}n|jj|jj }||_||_ |||S(uDecode one chunk of the input. :param input: A byte string. :param final: Indicate that no more input is available. Must be :obj:`True` if this is the last call. :returns: An Unicode string. iuN( R;R R:RtlenR8RtincrementaldecoderR9RR(RR R/R+R((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR's         (RRRRtFalseR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR(s  R5cBseZdZeddZRS(u “Push”-based encoder. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. .. method:: encode(input, final=False) :param input: An Unicode string. :param final: Indicate that no more input is available. Must be :obj:`True` if this is the last call. :returns: A byte string. ustrictcCs(t|}|jj|j|_dS(N(RRtincrementalencoderR(RRR"((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRTs (RRRR'R(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR5Cs(Rt __future__RRtlabelsRtVERSIONR R RRRtobjectRR'R%R&RRRR-R)R7R6R(R5(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt s4             3PKZ, , 9site-packages/pip/_vendor/webencodings/x_user_defined.pyonu[ abc@sdZddlmZddlZdejfdYZdejfdYZdejfd YZd eejfd YZd eejfd YZej dddej dej dedededeZ dZ eje ZdS(u webencodings.x_user_defined ~~~~~~~~~~~~~~~~~~~~~~~~~~~ An implementation of the x-user-defined encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. i(tunicode_literalsNtCodeccBs eZddZddZRS(ustrictcCstj||tS(N(tcodecstcharmap_encodetencoding_table(tselftinputterrors((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pytencodescCstj||tS(N(Rtcharmap_decodetdecoding_table(RRR((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pytdecodes(t__name__t __module__RR (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyRs tIncrementalEncodercBseZedZRS(cCstj||jtdS(Ni(RRRR(RRtfinal((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyRs(R R tFalseR(((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyRstIncrementalDecodercBseZedZRS(cCstj||jtdS(Ni(RR RR (RRR((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR $s(R R RR (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR#st StreamWritercBseZRS((R R (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR(st StreamReadercBseZRS((R R (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR,stnameux-user-definedRR tincrementalencodertincrementaldecodert streamreadert streamwriteru  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~(t__doc__t __future__RRRRRRRt CodecInfoRR t codec_infoR t charmap_buildR(((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyt s$      PKZ"3site-packages/pip/_vendor/webencodings/mklabels.pycnu[ abc@szdZddlZyddlmZWn!ek rIddlmZnXdZdZedkrvedGHndS(s webencodings.mklabels ~~~~~~~~~~~~~~~~~~~~~ Regenarate the webencodings.labels module. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. iN(turlopencCs||jkst|S(N(tlowertAssertionError(tstring((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyt assert_lowerscsdg}gtjt|jjdD]\}|dD]K}|dD]:}tt|jdt|djdf^qJq<q.}td|D|j fd|D|j d d j |S( Ns""" webencodings.labels ~~~~~~~~~~~~~~~~~~~ Map encoding labels to their name. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ # XXX Do not edit! # This file is automatically generated by mklabels.py LABELS = { tasciit encodingstlabelstutnamecss!|]\}}t|VqdS(N(tlen(t.0tlabelR ((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pys 2sc3s6|],\}}d|dt||fVqdS(s %s:%s %s, t N(R (R R R (tmax_len(sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pys 4st}t( tjsontloadsRtreadtdecodetreprRtlstriptmaxtextendtappendtjoin(turltpartstcategorytencodingR R((RsE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pytgenerates (N   t__main__s.http://encoding.spec.whatwg.org/encodings.json( t__doc__RturllibRt ImportErrorturllib.requestRRt__name__(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyt s    ! PKZᅞ,,3site-packages/pip/_vendor/webencodings/__init__.pyonu[ abc@s6dZddlmZddlZddlmZdZidd6d d 6d d 6d d6ZiZdZ dZ dZ de fdYZ e dZe dZe dZddZdZeddZddZdZeddZdZd e fd!YZd"e fd#YZdS($u webencodings ~~~~~~~~~~~~ This is a Python implementation of the `WHATWG Encoding standard `. See README for details. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. i(tunicode_literalsNi(tLABELSu0.5u iso-8859-8u iso-8859-8-iu mac-cyrillicux-mac-cyrillicu mac-romanu macintoshucp874u windows-874cCs|jdjjdS(u9Transform (only) ASCII letters to lower case: A-Z is mapped to a-z. :param string: An Unicode string. :returns: A new Unicode string. This is used for `ASCII case-insensitive `_ matching of encoding labels. The same matching is also used, among other things, for `CSS keywords `_. This is different from the :meth:`~py:str.lower` method of Unicode strings which also affect non-ASCII characters, sometimes mapping them into the ASCII range: >>> keyword = u'Bac\N{KELVIN SIGN}ground' >>> assert keyword.lower() == u'background' >>> assert ascii_lower(keyword) != keyword.lower() >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground' uutf8(tencodetlowertdecode(tstring((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt ascii_lower#scCst|jd}tj|}|dkr4dStj|}|dkr|dkrnddlm}n!tj||}t j |}t ||}|t|`_ algorithm. Supported labels are listed there. :param label: A string. :returns: An :class:`Encoding` object, or :obj:`None` for an unknown label. u ux-user-definedi(t codec_infoN( RtstripRtgettNonetCACHEtx_user_definedRt PYTHON_NAMEStcodecstlookuptEncoding(tlabeltnametencodingRt python_name((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR=s     cCsBt|dr|St|}|dkr>td|n|S(u Accept either an encoding object or label. :param encoding: An :class:`Encoding` object or a label string. :returns: An :class:`Encoding` object. :raises: :exc:`~exceptions.LookupError` for an unknown label. u codec_infouUnknown encoding label: %rN(thasattrRR t LookupError(tencoding_or_labelR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt _get_encoding[s   RcBs eZdZdZdZRS(uOReresents a character encoding such as UTF-8, that can be used for decoding or encoding. .. attribute:: name Canonical name of the encoding .. attribute:: codec_info The actual implementation of the encoding, a stdlib :class:`~codecs.CodecInfo` object. See :func:`codecs.register`. cCs||_||_dS(N(RR(tselfRR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt__init__|s cCs d|jS(Nu (R(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt__repr__s(t__name__t __module__t__doc__RR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRms uutf-8uutf-16leuutf-16beureplacecCsGt|}t|\}}|p'|}|jj||d|fS(u Decode a single string. :param input: A byte string :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :return: A ``(output, encoding)`` tuple of an Unicode string and an :obj:`Encoding`. i(Rt _detect_bomRR(tinputtfallback_encodingterrorst bom_encodingR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRs  cCsa|jdrt|dfS|jdr:t|dfS|jdrWt|dfSd|fS(uBReturn (bom_encoding, input), with any BOM removed from the input.sissiN(t startswitht_UTF16LEt_UTF16BEtUTF8R (R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRsustrictcCst|jj||dS(u; Encode a single string. :param input: An Unicode string. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :return: A byte string. i(RRR(R RR"((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRs cCs4t||}t||}t|}||fS(u "Pull"-based decoder. :param input: An iterable of byte strings. The input is first consumed just enough to determine the encoding based on the precense of a BOM, then consumed on demand when the return value is. :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :returns: An ``(output, encoding)`` tuple. :obj:`output` is an iterable of Unicode strings, :obj:`encoding` is the :obj:`Encoding` that is being used. (tIncrementalDecodert_iter_decode_generatortnext(R R!R"tdecodert generatorR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt iter_decodes ccs|j}t|}x]|D])}||}|r|jV|VPqqW|ddt}|jV|rq|VndSx(|D] }||}|r||Vq|q|W|ddt}|r|VndS(uqReturn a generator that first yields the :obj:`Encoding`, then yields output chukns as Unicode strings. ttfinalN(RtiterRtTrue(R R+Rtchuncktoutput((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR)s(       cCst||j}t||S(uY “Pull”-based encoder. :param input: An iterable of Unicode strings. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :returns: An iterable of byte strings. (tIncrementalEncoderRt_iter_encode_generator(R RR"R((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt iter_encodes ccsOx(|D] }||}|r|VqqW|ddt}|rK|VndS(NuR/(R1(R RR2R3((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR5s   R(cBs&eZdZddZedZRS(uO “Push”-based decoder. :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. ureplacecCs7t||_||_d|_d|_d|_dS(NR.(Rt_fallback_encodingt_errorst_bufferR t_decoderR(RR!R"((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRs    cCs|j}|dk r"|||S|j|}t|\}}|dkrt|dkrs| rs||_dS|j}n|jj|jj }||_||_ |||S(uDecode one chunk of the input. :param input: A byte string. :param final: Indicate that no more input is available. Must be :obj:`True` if this is the last call. :returns: An Unicode string. iuN( R:R R9RtlenR7RtincrementaldecoderR8RR(RR R/R+R((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR's         (RRRRtFalseR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR(s  R4cBseZdZeddZRS(u “Push”-based encoder. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. .. method:: encode(input, final=False) :param input: An Unicode string. :param final: Indicate that no more input is available. Must be :obj:`True` if this is the last call. :returns: A byte string. ustrictcCs(t|}|jj|j|_dS(N(RRtincrementalencoderR(RRR"((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyRTs (RRRR'R(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyR4Cs(Rt __future__RRtlabelsRtVERSIONR R RRRtobjectRR'R%R&RRRR-R)R6R5R(R4(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/__init__.pyt s4             3PKZǭ##0site-packages/pip/_vendor/webencodings/labels.pynu[""" webencodings.labels ~~~~~~~~~~~~~~~~~~~ Map encoding labels to their name. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ # XXX Do not edit! # This file is automatically generated by mklabels.py LABELS = { 'unicode-1-1-utf-8': 'utf-8', 'utf-8': 'utf-8', 'utf8': 'utf-8', '866': 'ibm866', 'cp866': 'ibm866', 'csibm866': 'ibm866', 'ibm866': 'ibm866', 'csisolatin2': 'iso-8859-2', 'iso-8859-2': 'iso-8859-2', 'iso-ir-101': 'iso-8859-2', 'iso8859-2': 'iso-8859-2', 'iso88592': 'iso-8859-2', 'iso_8859-2': 'iso-8859-2', 'iso_8859-2:1987': 'iso-8859-2', 'l2': 'iso-8859-2', 'latin2': 'iso-8859-2', 'csisolatin3': 'iso-8859-3', 'iso-8859-3': 'iso-8859-3', 'iso-ir-109': 'iso-8859-3', 'iso8859-3': 'iso-8859-3', 'iso88593': 'iso-8859-3', 'iso_8859-3': 'iso-8859-3', 'iso_8859-3:1988': 'iso-8859-3', 'l3': 'iso-8859-3', 'latin3': 'iso-8859-3', 'csisolatin4': 'iso-8859-4', 'iso-8859-4': 'iso-8859-4', 'iso-ir-110': 'iso-8859-4', 'iso8859-4': 'iso-8859-4', 'iso88594': 'iso-8859-4', 'iso_8859-4': 'iso-8859-4', 'iso_8859-4:1988': 'iso-8859-4', 'l4': 'iso-8859-4', 'latin4': 'iso-8859-4', 'csisolatincyrillic': 'iso-8859-5', 'cyrillic': 'iso-8859-5', 'iso-8859-5': 'iso-8859-5', 'iso-ir-144': 'iso-8859-5', 'iso8859-5': 'iso-8859-5', 'iso88595': 'iso-8859-5', 'iso_8859-5': 'iso-8859-5', 'iso_8859-5:1988': 'iso-8859-5', 'arabic': 'iso-8859-6', 'asmo-708': 'iso-8859-6', 'csiso88596e': 'iso-8859-6', 'csiso88596i': 'iso-8859-6', 'csisolatinarabic': 'iso-8859-6', 'ecma-114': 'iso-8859-6', 'iso-8859-6': 'iso-8859-6', 'iso-8859-6-e': 'iso-8859-6', 'iso-8859-6-i': 'iso-8859-6', 'iso-ir-127': 'iso-8859-6', 'iso8859-6': 'iso-8859-6', 'iso88596': 'iso-8859-6', 'iso_8859-6': 'iso-8859-6', 'iso_8859-6:1987': 'iso-8859-6', 'csisolatingreek': 'iso-8859-7', 'ecma-118': 'iso-8859-7', 'elot_928': 'iso-8859-7', 'greek': 'iso-8859-7', 'greek8': 'iso-8859-7', 'iso-8859-7': 'iso-8859-7', 'iso-ir-126': 'iso-8859-7', 'iso8859-7': 'iso-8859-7', 'iso88597': 'iso-8859-7', 'iso_8859-7': 'iso-8859-7', 'iso_8859-7:1987': 'iso-8859-7', 'sun_eu_greek': 'iso-8859-7', 'csiso88598e': 'iso-8859-8', 'csisolatinhebrew': 'iso-8859-8', 'hebrew': 'iso-8859-8', 'iso-8859-8': 'iso-8859-8', 'iso-8859-8-e': 'iso-8859-8', 'iso-ir-138': 'iso-8859-8', 'iso8859-8': 'iso-8859-8', 'iso88598': 'iso-8859-8', 'iso_8859-8': 'iso-8859-8', 'iso_8859-8:1988': 'iso-8859-8', 'visual': 'iso-8859-8', 'csiso88598i': 'iso-8859-8-i', 'iso-8859-8-i': 'iso-8859-8-i', 'logical': 'iso-8859-8-i', 'csisolatin6': 'iso-8859-10', 'iso-8859-10': 'iso-8859-10', 'iso-ir-157': 'iso-8859-10', 'iso8859-10': 'iso-8859-10', 'iso885910': 'iso-8859-10', 'l6': 'iso-8859-10', 'latin6': 'iso-8859-10', 'iso-8859-13': 'iso-8859-13', 'iso8859-13': 'iso-8859-13', 'iso885913': 'iso-8859-13', 'iso-8859-14': 'iso-8859-14', 'iso8859-14': 'iso-8859-14', 'iso885914': 'iso-8859-14', 'csisolatin9': 'iso-8859-15', 'iso-8859-15': 'iso-8859-15', 'iso8859-15': 'iso-8859-15', 'iso885915': 'iso-8859-15', 'iso_8859-15': 'iso-8859-15', 'l9': 'iso-8859-15', 'iso-8859-16': 'iso-8859-16', 'cskoi8r': 'koi8-r', 'koi': 'koi8-r', 'koi8': 'koi8-r', 'koi8-r': 'koi8-r', 'koi8_r': 'koi8-r', 'koi8-u': 'koi8-u', 'csmacintosh': 'macintosh', 'mac': 'macintosh', 'macintosh': 'macintosh', 'x-mac-roman': 'macintosh', 'dos-874': 'windows-874', 'iso-8859-11': 'windows-874', 'iso8859-11': 'windows-874', 'iso885911': 'windows-874', 'tis-620': 'windows-874', 'windows-874': 'windows-874', 'cp1250': 'windows-1250', 'windows-1250': 'windows-1250', 'x-cp1250': 'windows-1250', 'cp1251': 'windows-1251', 'windows-1251': 'windows-1251', 'x-cp1251': 'windows-1251', 'ansi_x3.4-1968': 'windows-1252', 'ascii': 'windows-1252', 'cp1252': 'windows-1252', 'cp819': 'windows-1252', 'csisolatin1': 'windows-1252', 'ibm819': 'windows-1252', 'iso-8859-1': 'windows-1252', 'iso-ir-100': 'windows-1252', 'iso8859-1': 'windows-1252', 'iso88591': 'windows-1252', 'iso_8859-1': 'windows-1252', 'iso_8859-1:1987': 'windows-1252', 'l1': 'windows-1252', 'latin1': 'windows-1252', 'us-ascii': 'windows-1252', 'windows-1252': 'windows-1252', 'x-cp1252': 'windows-1252', 'cp1253': 'windows-1253', 'windows-1253': 'windows-1253', 'x-cp1253': 'windows-1253', 'cp1254': 'windows-1254', 'csisolatin5': 'windows-1254', 'iso-8859-9': 'windows-1254', 'iso-ir-148': 'windows-1254', 'iso8859-9': 'windows-1254', 'iso88599': 'windows-1254', 'iso_8859-9': 'windows-1254', 'iso_8859-9:1989': 'windows-1254', 'l5': 'windows-1254', 'latin5': 'windows-1254', 'windows-1254': 'windows-1254', 'x-cp1254': 'windows-1254', 'cp1255': 'windows-1255', 'windows-1255': 'windows-1255', 'x-cp1255': 'windows-1255', 'cp1256': 'windows-1256', 'windows-1256': 'windows-1256', 'x-cp1256': 'windows-1256', 'cp1257': 'windows-1257', 'windows-1257': 'windows-1257', 'x-cp1257': 'windows-1257', 'cp1258': 'windows-1258', 'windows-1258': 'windows-1258', 'x-cp1258': 'windows-1258', 'x-mac-cyrillic': 'x-mac-cyrillic', 'x-mac-ukrainian': 'x-mac-cyrillic', 'chinese': 'gbk', 'csgb2312': 'gbk', 'csiso58gb231280': 'gbk', 'gb2312': 'gbk', 'gb_2312': 'gbk', 'gb_2312-80': 'gbk', 'gbk': 'gbk', 'iso-ir-58': 'gbk', 'x-gbk': 'gbk', 'gb18030': 'gb18030', 'hz-gb-2312': 'hz-gb-2312', 'big5': 'big5', 'big5-hkscs': 'big5', 'cn-big5': 'big5', 'csbig5': 'big5', 'x-x-big5': 'big5', 'cseucpkdfmtjapanese': 'euc-jp', 'euc-jp': 'euc-jp', 'x-euc-jp': 'euc-jp', 'csiso2022jp': 'iso-2022-jp', 'iso-2022-jp': 'iso-2022-jp', 'csshiftjis': 'shift_jis', 'ms_kanji': 'shift_jis', 'shift-jis': 'shift_jis', 'shift_jis': 'shift_jis', 'sjis': 'shift_jis', 'windows-31j': 'shift_jis', 'x-sjis': 'shift_jis', 'cseuckr': 'euc-kr', 'csksc56011987': 'euc-kr', 'euc-kr': 'euc-kr', 'iso-ir-149': 'euc-kr', 'korean': 'euc-kr', 'ks_c_5601-1987': 'euc-kr', 'ks_c_5601-1989': 'euc-kr', 'ksc5601': 'euc-kr', 'ksc_5601': 'euc-kr', 'windows-949': 'euc-kr', 'csiso2022kr': 'iso-2022-kr', 'iso-2022-kr': 'iso-2022-kr', 'utf-16be': 'utf-16be', 'utf-16': 'utf-16le', 'utf-16le': 'utf-16le', 'x-user-defined': 'x-user-defined', } PKZHoE8site-packages/pip/_vendor/webencodings/x_user_defined.pynu[# coding: utf8 """ webencodings.x_user_defined ~~~~~~~~~~~~~~~~~~~~~~~~~~~ An implementation of the x-user-defined encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ from __future__ import unicode_literals import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self, input, errors='strict'): return codecs.charmap_encode(input, errors, encoding_table) def decode(self, input, errors='strict'): return codecs.charmap_decode(input, errors, decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input, self.errors, encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input, self.errors, decoding_table)[0] class StreamWriter(Codec, codecs.StreamWriter): pass class StreamReader(Codec, codecs.StreamReader): pass ### encodings module API codec_info = codecs.CodecInfo( name='x-user-defined', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table # Python 3: # for c in range(256): print(' %r' % chr(c if c < 128 else c + 0xF700)) decoding_table = ( '\x00' '\x01' '\x02' '\x03' '\x04' '\x05' '\x06' '\x07' '\x08' '\t' '\n' '\x0b' '\x0c' '\r' '\x0e' '\x0f' '\x10' '\x11' '\x12' '\x13' '\x14' '\x15' '\x16' '\x17' '\x18' '\x19' '\x1a' '\x1b' '\x1c' '\x1d' '\x1e' '\x1f' ' ' '!' '"' '#' '$' '%' '&' "'" '(' ')' '*' '+' ',' '-' '.' '/' '0' '1' '2' '3' '4' '5' '6' '7' '8' '9' ':' ';' '<' '=' '>' '?' '@' 'A' 'B' 'C' 'D' 'E' 'F' 'G' 'H' 'I' 'J' 'K' 'L' 'M' 'N' 'O' 'P' 'Q' 'R' 'S' 'T' 'U' 'V' 'W' 'X' 'Y' 'Z' '[' '\\' ']' '^' '_' '`' 'a' 'b' 'c' 'd' 'e' 'f' 'g' 'h' 'i' 'j' 'k' 'l' 'm' 'n' 'o' 'p' 'q' 'r' 's' 't' 'u' 'v' 'w' 'x' 'y' 'z' '{' '|' '}' '~' '\x7f' '\uf780' '\uf781' '\uf782' '\uf783' '\uf784' '\uf785' '\uf786' '\uf787' '\uf788' '\uf789' '\uf78a' '\uf78b' '\uf78c' '\uf78d' '\uf78e' '\uf78f' '\uf790' '\uf791' '\uf792' '\uf793' '\uf794' '\uf795' '\uf796' '\uf797' '\uf798' '\uf799' '\uf79a' '\uf79b' '\uf79c' '\uf79d' '\uf79e' '\uf79f' '\uf7a0' '\uf7a1' '\uf7a2' '\uf7a3' '\uf7a4' '\uf7a5' '\uf7a6' '\uf7a7' '\uf7a8' '\uf7a9' '\uf7aa' '\uf7ab' '\uf7ac' '\uf7ad' '\uf7ae' '\uf7af' '\uf7b0' '\uf7b1' '\uf7b2' '\uf7b3' '\uf7b4' '\uf7b5' '\uf7b6' '\uf7b7' '\uf7b8' '\uf7b9' '\uf7ba' '\uf7bb' '\uf7bc' '\uf7bd' '\uf7be' '\uf7bf' '\uf7c0' '\uf7c1' '\uf7c2' '\uf7c3' '\uf7c4' '\uf7c5' '\uf7c6' '\uf7c7' '\uf7c8' '\uf7c9' '\uf7ca' '\uf7cb' '\uf7cc' '\uf7cd' '\uf7ce' '\uf7cf' '\uf7d0' '\uf7d1' '\uf7d2' '\uf7d3' '\uf7d4' '\uf7d5' '\uf7d6' '\uf7d7' '\uf7d8' '\uf7d9' '\uf7da' '\uf7db' '\uf7dc' '\uf7dd' '\uf7de' '\uf7df' '\uf7e0' '\uf7e1' '\uf7e2' '\uf7e3' '\uf7e4' '\uf7e5' '\uf7e6' '\uf7e7' '\uf7e8' '\uf7e9' '\uf7ea' '\uf7eb' '\uf7ec' '\uf7ed' '\uf7ee' '\uf7ef' '\uf7f0' '\uf7f1' '\uf7f2' '\uf7f3' '\uf7f4' '\uf7f5' '\uf7f6' '\uf7f7' '\uf7f8' '\uf7f9' '\uf7fa' '\uf7fb' '\uf7fc' '\uf7fd' '\uf7fe' '\uf7ff' ) ### Encoding table encoding_table = codecs.charmap_build(decoding_table) PKZ9*0 0site-packages/pip/_vendor/webencodings/tests.pyonu[ abc@sdZddlmZddlmZmZmZmZmZm Z m Z m Z m Z dZ dZdZdZd Zd Zd Zd Zd ZdS(u webencodings.tests ~~~~~~~~~~~~~~~~~~ A basic test suite for Encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. i(tunicode_literalsi( tlookuptLABELStdecodetencodet iter_decodet iter_encodetIncrementalDecodertIncrementalEncodertUTF8cOs:y|||Wn|k r%dSXtd|dS(NuDid not raise %s.(tAssertionError(t exceptiontfunctiontargstkwargs((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt assert_raisess  cCsdS(N((((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt test_labelsscCsx\tD]T}x3dddgD]"}tdg||\}}qWt|}t|}qWxttjD]}qrWdS(Niii t(RRRRtsettvalues(tlabeltrepeattoutputt_tdecodertencodertname((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_all_labels0s   cCsptttddtttddtttgdtttgdtttdtttddS(Nséuinvalidué(Rt LookupErrorRRRRRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_invalid_labelCs cCsdS(N((((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt test_decodeLscCsdS(N((((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt test_encodebscCs d}dS(NcSs"t||\}}dj|S(Nu(Rtjoin(tinputtfallback_encodingRt _encoding((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pytiter_decode_to_stringls((R$((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_iter_decodeks cCsdS(N((((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_iter_encodescCsd}d}d}d}dS(Ns2, O#ɻtϨu2, O#ttaauaa((tencodedtdecoded((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_x_user_defineds N(t__doc__t __future__RRRRRRRRRRR RRRRRRR%R&R*(((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt s@     PKZo8P)P)2site-packages/pip/_vendor/webencodings/__init__.pynu[# coding: utf8 """ webencodings ~~~~~~~~~~~~ This is a Python implementation of the `WHATWG Encoding standard `. See README for details. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ from __future__ import unicode_literals import codecs from .labels import LABELS VERSION = '0.5' # Some names in Encoding are not valid Python aliases. Remap these. PYTHON_NAMES = { 'iso-8859-8-i': 'iso-8859-8', 'x-mac-cyrillic': 'mac-cyrillic', 'macintosh': 'mac-roman', 'windows-874': 'cp874'} CACHE = {} def ascii_lower(string): r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z. :param string: An Unicode string. :returns: A new Unicode string. This is used for `ASCII case-insensitive `_ matching of encoding labels. The same matching is also used, among other things, for `CSS keywords `_. This is different from the :meth:`~py:str.lower` method of Unicode strings which also affect non-ASCII characters, sometimes mapping them into the ASCII range: >>> keyword = u'Bac\N{KELVIN SIGN}ground' >>> assert keyword.lower() == u'background' >>> assert ascii_lower(keyword) != keyword.lower() >>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground' """ # This turns out to be faster than unicode.translate() return string.encode('utf8').lower().decode('utf8') def lookup(label): """ Look for an encoding by its label. This is the spec’s `get an encoding `_ algorithm. Supported labels are listed there. :param label: A string. :returns: An :class:`Encoding` object, or :obj:`None` for an unknown label. """ # Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020. label = ascii_lower(label.strip('\t\n\f\r ')) name = LABELS.get(label) if name is None: return None encoding = CACHE.get(name) if encoding is None: if name == 'x-user-defined': from .x_user_defined import codec_info else: python_name = PYTHON_NAMES.get(name, name) # Any python_name value that gets to here should be valid. codec_info = codecs.lookup(python_name) encoding = Encoding(name, codec_info) CACHE[name] = encoding return encoding def _get_encoding(encoding_or_label): """ Accept either an encoding object or label. :param encoding: An :class:`Encoding` object or a label string. :returns: An :class:`Encoding` object. :raises: :exc:`~exceptions.LookupError` for an unknown label. """ if hasattr(encoding_or_label, 'codec_info'): return encoding_or_label encoding = lookup(encoding_or_label) if encoding is None: raise LookupError('Unknown encoding label: %r' % encoding_or_label) return encoding class Encoding(object): """Reresents a character encoding such as UTF-8, that can be used for decoding or encoding. .. attribute:: name Canonical name of the encoding .. attribute:: codec_info The actual implementation of the encoding, a stdlib :class:`~codecs.CodecInfo` object. See :func:`codecs.register`. """ def __init__(self, name, codec_info): self.name = name self.codec_info = codec_info def __repr__(self): return '' % self.name #: The UTF-8 encoding. Should be used for new content and formats. UTF8 = lookup('utf-8') _UTF16LE = lookup('utf-16le') _UTF16BE = lookup('utf-16be') def decode(input, fallback_encoding, errors='replace'): """ Decode a single string. :param input: A byte string :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :return: A ``(output, encoding)`` tuple of an Unicode string and an :obj:`Encoding`. """ # Fail early if `encoding` is an invalid label. fallback_encoding = _get_encoding(fallback_encoding) bom_encoding, input = _detect_bom(input) encoding = bom_encoding or fallback_encoding return encoding.codec_info.decode(input, errors)[0], encoding def _detect_bom(input): """Return (bom_encoding, input), with any BOM removed from the input.""" if input.startswith(b'\xFF\xFE'): return _UTF16LE, input[2:] if input.startswith(b'\xFE\xFF'): return _UTF16BE, input[2:] if input.startswith(b'\xEF\xBB\xBF'): return UTF8, input[3:] return None, input def encode(input, encoding=UTF8, errors='strict'): """ Encode a single string. :param input: An Unicode string. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :return: A byte string. """ return _get_encoding(encoding).codec_info.encode(input, errors)[0] def iter_decode(input, fallback_encoding, errors='replace'): """ "Pull"-based decoder. :param input: An iterable of byte strings. The input is first consumed just enough to determine the encoding based on the precense of a BOM, then consumed on demand when the return value is. :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :returns: An ``(output, encoding)`` tuple. :obj:`output` is an iterable of Unicode strings, :obj:`encoding` is the :obj:`Encoding` that is being used. """ decoder = IncrementalDecoder(fallback_encoding, errors) generator = _iter_decode_generator(input, decoder) encoding = next(generator) return generator, encoding def _iter_decode_generator(input, decoder): """Return a generator that first yields the :obj:`Encoding`, then yields output chukns as Unicode strings. """ decode = decoder.decode input = iter(input) for chunck in input: output = decode(chunck) if output: assert decoder.encoding is not None yield decoder.encoding yield output break else: # Input exhausted without determining the encoding output = decode(b'', final=True) assert decoder.encoding is not None yield decoder.encoding if output: yield output return for chunck in input: output = decode(chunck) if output: yield output output = decode(b'', final=True) if output: yield output def iter_encode(input, encoding=UTF8, errors='strict'): """ “Pull”-based encoder. :param input: An iterable of Unicode strings. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. :returns: An iterable of byte strings. """ # Fail early if `encoding` is an invalid label. encode = IncrementalEncoder(encoding, errors).encode return _iter_encode_generator(input, encode) def _iter_encode_generator(input, encode): for chunck in input: output = encode(chunck) if output: yield output output = encode('', final=True) if output: yield output class IncrementalDecoder(object): """ “Push”-based decoder. :param fallback_encoding: An :class:`Encoding` object or a label string. The encoding to use if :obj:`input` does note have a BOM. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. """ def __init__(self, fallback_encoding, errors='replace'): # Fail early if `encoding` is an invalid label. self._fallback_encoding = _get_encoding(fallback_encoding) self._errors = errors self._buffer = b'' self._decoder = None #: The actual :class:`Encoding` that is being used, #: or :obj:`None` if that is not determined yet. #: (Ie. if there is not enough input yet to determine #: if there is a BOM.) self.encoding = None # Not known yet. def decode(self, input, final=False): """Decode one chunk of the input. :param input: A byte string. :param final: Indicate that no more input is available. Must be :obj:`True` if this is the last call. :returns: An Unicode string. """ decoder = self._decoder if decoder is not None: return decoder(input, final) input = self._buffer + input encoding, input = _detect_bom(input) if encoding is None: if len(input) < 3 and not final: # Not enough data yet. self._buffer = input return '' else: # No BOM encoding = self._fallback_encoding decoder = encoding.codec_info.incrementaldecoder(self._errors).decode self._decoder = decoder self.encoding = encoding return decoder(input, final) class IncrementalEncoder(object): """ “Push”-based encoder. :param encoding: An :class:`Encoding` object or a label string. :param errors: Type of error handling. See :func:`codecs.register`. :raises: :exc:`~exceptions.LookupError` for an unknown encoding label. .. method:: encode(input, final=False) :param input: An Unicode string. :param final: Indicate that no more input is available. Must be :obj:`True` if this is the last call. :returns: A byte string. """ def __init__(self, encoding=UTF8, errors='strict'): encoding = _get_encoding(encoding) self.encode = encoding.codec_info.incrementalencoder(errors).encode PKZ!1site-packages/pip/_vendor/webencodings/labels.pyonu[ abc@sdZidd6dd6dd6dd6dd6dd6dd6dd 6dd6dd 6dd 6dd 6dd 6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6dd!6dd"6d#d$6d#d%6d#d#6d#d&6d#d'6d#d(6d#d)6d#d*6d+d,6d+d-6d+d.6d+d/6d+d06d+d16d+d+6d+d26d+d36d+d46d+d56d+d66d+d76d+d86d9d:6d9d;6d9d<6d9d=6d9d>6d9d96d9d?6d9d@6d9dA6d9dB6d9dC6d9dD6dEdF6dEdG6dEdH6dEdE6dEdI6dEdJ6dEdK6dEdL6dEdM6dEdN6dEdO6dPdQ6dPdP6dPdR6dSdT6dSdS6dSdU6dSdV6dSdW6dSdX6dSdY6dZdZ6dZd[6dZd\6d]d]6d]d^6d]d_6d`da6d`d`6d`db6d`dc6d`dd6d`de6dfdf6dgdh6dgdi6dgdj6dgdg6dgdk6dldl6dmdn6dmdo6dmdm6dmdp6dqdr6dqds6dqdt6dqdu6dqdv6dqdq6dwdx6dwdw6dwdy6dzd{6dzdz6dzd|6d}d~6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d6d}d}6d}d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6ZdS(s webencodings.labels ~~~~~~~~~~~~~~~~~~~ Map encoding labels to their name. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. sutf-8sunicode-1-1-utf-8tutf8tibm866t866tcp866tcsibm866s iso-8859-2t csisolatin2s iso-ir-101s iso8859-2tiso88592s iso_8859-2siso_8859-2:1987tl2tlatin2s iso-8859-3t csisolatin3s iso-ir-109s iso8859-3tiso88593s iso_8859-3siso_8859-3:1988tl3tlatin3s iso-8859-4t csisolatin4s iso-ir-110s iso8859-4tiso88594s iso_8859-4siso_8859-4:1988tl4tlatin4s iso-8859-5tcsisolatincyrillictcyrillics iso-ir-144s iso8859-5tiso88595s iso_8859-5siso_8859-5:1988s iso-8859-6tarabicsasmo-708t csiso88596et csiso88596itcsisolatinarabicsecma-114s iso-8859-6-es iso-8859-6-is iso-ir-127s iso8859-6tiso88596s iso_8859-6siso_8859-6:1987s iso-8859-7tcsisolatingreeksecma-118telot_928tgreektgreek8s iso-ir-126s iso8859-7tiso88597s iso_8859-7siso_8859-7:1987t sun_eu_greeks iso-8859-8t csiso88598etcsisolatinhebrewthebrews iso-8859-8-es iso-ir-138s iso8859-8tiso88598s iso_8859-8siso_8859-8:1988tvisuals iso-8859-8-it csiso88598itlogicals iso-8859-10t csisolatin6s iso-ir-157s iso8859-10t iso885910tl6tlatin6s iso-8859-13s iso8859-13t iso885913s iso-8859-14s iso8859-14t iso885914s iso-8859-15t csisolatin9s iso8859-15t iso885915s iso_8859-15tl9s iso-8859-16skoi8-rtcskoi8rtkoitkoi8tkoi8_rskoi8-ut macintosht csmacintoshtmacs x-mac-romans windows-874sdos-874s iso-8859-11s iso8859-11t iso885911stis-620s windows-1250tcp1250sx-cp1250s windows-1251tcp1251sx-cp1251s windows-1252sansi_x3.4-1968tasciitcp1252tcp819t csisolatin1tibm819s iso-8859-1s iso-ir-100s iso8859-1tiso88591s iso_8859-1siso_8859-1:1987tl1tlatin1sus-asciisx-cp1252s windows-1253tcp1253sx-cp1253s windows-1254tcp1254t csisolatin5s iso-8859-9s iso-ir-148s iso8859-9tiso88599s iso_8859-9siso_8859-9:1989tl5tlatin5sx-cp1254s windows-1255tcp1255sx-cp1255s windows-1256tcp1256sx-cp1256s windows-1257tcp1257sx-cp1257s windows-1258tcp1258sx-cp1258sx-mac-cyrillicsx-mac-ukrainiantgbktchinesetcsgb2312tcsiso58gb231280tgb2312tgb_2312s gb_2312-80s iso-ir-58sx-gbktgb18030s hz-gb-2312tbig5s big5-hkscsscn-big5tcsbig5sx-x-big5seuc-jptcseucpkdfmtjapanesesx-euc-jps iso-2022-jpt csiso2022jpt shift_jist csshiftjistms_kanjis shift-jistsjiss windows-31jsx-sjisseuc-krtcseuckrt csksc56011987s iso-ir-149tkoreansks_c_5601-1987sks_c_5601-1989tksc5601tksc_5601s windows-949s iso-2022-krt csiso2022krsutf-16besutf-16lesutf-16sx-user-definedN(t__doc__tLABELS(((sC/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/labels.pyt sPKZ 3site-packages/pip/_vendor/webencodings/mklabels.pyonu[ abc@szdZddlZyddlmZWn!ek rIddlmZnXdZdZedkrvedGHndS(s webencodings.mklabels ~~~~~~~~~~~~~~~~~~~~~ Regenarate the webencodings.labels module. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. iN(turlopencCs|S(N((tstring((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyt assert_lowerscsdg}gtjt|jjdD]\}|dD]K}|dD]:}tt|jdt|djdf^qJq<q.}td|D|j fd|D|j d d j |S( Ns""" webencodings.labels ~~~~~~~~~~~~~~~~~~~ Map encoding labels to their name. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ # XXX Do not edit! # This file is automatically generated by mklabels.py LABELS = { tasciit encodingstlabelstutnamecss!|]\}}t|VqdS(N(tlen(t.0tlabelR((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pys 2sc3s6|],\}}d|dt||fVqdS(s %s:%s %s, t N(R(R R R(tmax_len(sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pys 4st}t( tjsontloadsRtreadtdecodetreprRtlstriptmaxtextendtappendtjoin(turltpartstcategorytencodingR R((R sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pytgenerates (N   t__main__s.http://encoding.spec.whatwg.org/encodings.json( t__doc__RturllibRt ImportErrorturllib.requestRRt__name__(((sE/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/mklabels.pyt s    ! PKZ, , 9site-packages/pip/_vendor/webencodings/x_user_defined.pycnu[ abc@sdZddlmZddlZdejfdYZdejfdYZdejfd YZd eejfd YZd eejfd YZej dddej dej dedededeZ dZ eje ZdS(u webencodings.x_user_defined ~~~~~~~~~~~~~~~~~~~~~~~~~~~ An implementation of the x-user-defined encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. i(tunicode_literalsNtCodeccBs eZddZddZRS(ustrictcCstj||tS(N(tcodecstcharmap_encodetencoding_table(tselftinputterrors((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pytencodescCstj||tS(N(Rtcharmap_decodetdecoding_table(RRR((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pytdecodes(t__name__t __module__RR (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyRs tIncrementalEncodercBseZedZRS(cCstj||jtdS(Ni(RRRR(RRtfinal((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyRs(R R tFalseR(((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyRstIncrementalDecodercBseZedZRS(cCstj||jtdS(Ni(RR RR (RRR((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR $s(R R RR (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR#st StreamWritercBseZRS((R R (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR(st StreamReadercBseZRS((R R (((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyR,stnameux-user-definedRR tincrementalencodertincrementaldecodert streamreadert streamwriteru  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~(t__doc__t __future__RRRRRRRt CodecInfoRR t codec_infoR t charmap_buildR(((sK/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/x_user_defined.pyt s$      PKZ>/site-packages/pip/_vendor/webencodings/tests.pynu[# coding: utf8 """ webencodings.tests ~~~~~~~~~~~~~~~~~~ A basic test suite for Encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ from __future__ import unicode_literals from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode, IncrementalDecoder, IncrementalEncoder, UTF8) def assert_raises(exception, function, *args, **kwargs): try: function(*args, **kwargs) except exception: return else: # pragma: no cover raise AssertionError('Did not raise %s.' % exception) def test_labels(): assert lookup('utf-8').name == 'utf-8' assert lookup('Utf-8').name == 'utf-8' assert lookup('UTF-8').name == 'utf-8' assert lookup('utf8').name == 'utf-8' assert lookup('utf8').name == 'utf-8' assert lookup('utf8 ').name == 'utf-8' assert lookup(' \r\nutf8\t').name == 'utf-8' assert lookup('u8') is None # Python label. assert lookup('utf-8 ') is None # Non-ASCII white space. assert lookup('US-ASCII').name == 'windows-1252' assert lookup('iso-8859-1').name == 'windows-1252' assert lookup('latin1').name == 'windows-1252' assert lookup('LATIN1').name == 'windows-1252' assert lookup('latin-1') is None assert lookup('LATİN1') is None # ASCII-only case insensitivity. def test_all_labels(): for label in LABELS: assert decode(b'', label) == ('', lookup(label)) assert encode('', label) == b'' for repeat in [0, 1, 12]: output, _ = iter_decode([b''] * repeat, label) assert list(output) == [] assert list(iter_encode([''] * repeat, label)) == [] decoder = IncrementalDecoder(label) assert decoder.decode(b'') == '' assert decoder.decode(b'', final=True) == '' encoder = IncrementalEncoder(label) assert encoder.encode('') == b'' assert encoder.encode('', final=True) == b'' # All encoding names are valid labels too: for name in set(LABELS.values()): assert lookup(name).name == name def test_invalid_label(): assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid') assert_raises(LookupError, encode, 'é', 'invalid') assert_raises(LookupError, iter_decode, [], 'invalid') assert_raises(LookupError, iter_encode, [], 'invalid') assert_raises(LookupError, IncrementalDecoder, 'invalid') assert_raises(LookupError, IncrementalEncoder, 'invalid') def test_decode(): assert decode(b'\x80', 'latin1') == ('€', lookup('latin1')) assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1')) assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8')) assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8')) assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii')) assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be')) assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le')) assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be')) assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le')) assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le')) assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be')) assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le')) assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le')) def test_encode(): assert encode('é', 'latin1') == b'\xe9' assert encode('é', 'utf8') == b'\xc3\xa9' assert encode('é', 'utf8') == b'\xc3\xa9' assert encode('é', 'utf-16') == b'\xe9\x00' assert encode('é', 'utf-16le') == b'\xe9\x00' assert encode('é', 'utf-16be') == b'\x00\xe9' def test_iter_decode(): def iter_decode_to_string(input, fallback_encoding): output, _encoding = iter_decode(input, fallback_encoding) return ''.join(output) assert iter_decode_to_string([], 'latin1') == '' assert iter_decode_to_string([b''], 'latin1') == '' assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é' assert iter_decode_to_string([b'hello'], 'latin1') == 'hello' assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello' assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello' assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é' assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é' assert iter_decode_to_string([ b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é' assert iter_decode_to_string([ b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD' assert iter_decode_to_string([ b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é' assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == '' assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»' assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é' assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é' assert iter_decode_to_string([ b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é' assert iter_decode_to_string([ b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo' def test_iter_encode(): assert b''.join(iter_encode([], 'latin1')) == b'' assert b''.join(iter_encode([''], 'latin1')) == b'' assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9' assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9' assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00' assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00' assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9' assert b''.join(iter_encode([ '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo' def test_x_user_defined(): encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca' decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca' encoded = b'aa' decoded = 'aa' assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined')) assert encode(decoded, 'x-user-defined') == encoded PKZP12site-packages/pip/_vendor/webencodings/mklabels.pynu[""" webencodings.mklabels ~~~~~~~~~~~~~~~~~~~~~ Regenarate the webencodings.labels module. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ import json try: from urllib import urlopen except ImportError: from urllib.request import urlopen def assert_lower(string): assert string == string.lower() return string def generate(url): parts = ['''\ """ webencodings.labels ~~~~~~~~~~~~~~~~~~~ Map encoding labels to their name. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ # XXX Do not edit! # This file is automatically generated by mklabels.py LABELS = { '''] labels = [ (repr(assert_lower(label)).lstrip('u'), repr(encoding['name']).lstrip('u')) for category in json.loads(urlopen(url).read().decode('ascii')) for encoding in category['encodings'] for label in encoding['labels']] max_len = max(len(label) for label, name in labels) parts.extend( ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name) for label, name in labels) parts.append('}') return ''.join(parts) if __name__ == '__main__': print(generate('http://encoding.spec.whatwg.org/encodings.json')) PKZcS^0site-packages/pip/_vendor/webencodings/tests.pycnu[ abc@sdZddlmZddlmZmZmZmZmZm Z m Z m Z m Z dZ dZdZdZd Zd Zd Zd Zd ZdS(u webencodings.tests ~~~~~~~~~~~~~~~~~~ A basic test suite for Encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. i(tunicode_literalsi( tlookuptLABELStdecodetencodet iter_decodet iter_encodetIncrementalDecodertIncrementalEncodertUTF8cOs:y|||Wn|k r%dSXtd|dS(NuDid not raise %s.(tAssertionError(t exceptiontfunctiontargstkwargs((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt assert_raisess  cCstdjdksttdjdks6ttdjdksQttdjdkslttdjdksttdjdksttdjdksttddksttddksttd jd ksttd jd ks#ttd jd ks>ttd jd ksYttddksqttddkstdS(Nuutf-8uUtf-8uUTF-8uutf8uutf8 u utf8 uu8uutf-8 uUS-ASCIIu windows-1252u iso-8859-1ulatin1uLATIN1ulatin-1uLATİN1(RtnameR tNone(((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt test_labelsscCsxVtD]N}td|dt|fks4ttd|dksOtxsdddgD]b}tdg||\}}t|gkstttdg||gks_tq_Wt|}|jddkst|jddt dks tt |}|jddks4t|jddt dkstqWx5t tj D]!}t|j |ksltqlWdS(Ntuiii tfinal(RRRR RRtlistRRtTrueRtsettvaluesR(tlabeltrepeattoutputt_tdecodertencoderR((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_all_labels0s ', ! %cCsptttddtttddtttgdtttgdtttdtttddS(Nséuinvalidué(Rt LookupErrorRRRRRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_invalid_labelCs cCsztdddtdfks'ttdtddtdfksTttdddtdfks{ttdtdtdfksttdddtdfksttd ddtdfksttd ddtd fksttd ddtd fks>ttdddtd fksettdddtd fksttdddtd fksttdddtd fksttdddtd fksttdddtd fks(ttdddtd fksOttdddtd fksvtdS(Nsulatin1u€séuutf8uéuasciiuésésuutf-16besuutf-16lesussuUTF-16BEsuUTF-16LEuUTF-16(RRR R (((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt test_decodeLs '-'''''''''''''cCstdddksttdddks6ttdddksQttdddkslttdddksttdd d kstdS( Nuéulatin1suutf8séuutf-16suutf-16leuutf-16bes(RR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt test_encodebs cCs>d}|gddks$t|dgddksBt|dgddks`t|dgddks~t|d d gddkst|d d gddkst|d gddkst|dgddkst|dddgddks t|dddgddksDt|ddddddgddksqt|dgddkst|dgddkst|dgddkst|dgddkst|ddddddgddkst|ddd gdd ks:tdS(!NcSs"t||\}}dj|S(Nu(Rtjoin(tinputtfallback_encodingRt _encoding((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pytiter_decode_to_stringlsulatin1uRsuéthellouhellothetllothelltoséuéséssstaua�sssuï»sssssshux-user-defineduhllo(R (R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_iter_decodeks. !!!!**cCsfdjtgddks$tdjtdgddksKtdjtdgddksrtdjtddddgddkstdjtddddgddkstdjtddddgddkstdjtddddgd d ks2tdjtdd dd gd dksbtdS(NRulatin1uuésuutf-16suutf-16leuutf-16besuhulloux-user-definedshllo(R$RR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_iter_encodes$''0000 cCs^d}d}d}d}t|d|tdfks?tt|d|ksZtdS(Ns2, O#ɻtϨu2, O#ttaauaaux-user-defined(RRR R(tencodedtdecoded((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyttest_x_user_defineds 'N(t__doc__t __future__RRRRRRRRRRR RRRR!R"R#R/R0R4(((sB/usr/lib/python2.7/site-packages/pip/_vendor/webencodings/tests.pyt s@     PKZY &site-packages/pip/_vendor/__init__.pyonu[ abc@@sKdZddlmZddlZddlZddlZeZej j ej j e Z dZerGejej je dej ej (edededed ed ed ed ed ededededededededededededededededededed ed!ed"ed#ed$ed%ed&ed'ed(ed)ed*ed+ed,ed-ed.ed/ed0ndS(1s pip._vendor is for vendoring dependencies of pip to prevent needing pip to depend on something external. Files inside of pip._vendor should be considered immutable and should only be updated to versions from upstream. i(tabsolute_importNcC@sdjt|}y t|ttddWntk ry t|ttddWntk ruqXtj|tj|<|jdd\}}t tj||tj|nXdS(Ns{0}.{1}tlevelit.i( tformatt__name__t __import__tglobalstlocalst ImportErrortsystmodulestrsplittsetattr(t modulenamet vendored_nametbasethead((s8/usr/lib/python2.7/site-packages/pip/_vendor/__init__.pytvendoreds    s*.whlt cachecontroltcoloramatdistlibtdistrothtml5libtlockfiletsixs six.movesssix.moves.urllibt packagingspackaging.versionspackaging.specifierst pkg_resourcestprogresstretryingtrequestssrequests.packagessrequests.packages.urllib3s&requests.packages.urllib3._collectionss$requests.packages.urllib3.connections(requests.packages.urllib3.connectionpools!requests.packages.urllib3.contribs*requests.packages.urllib3.contrib.ntlmpools+requests.packages.urllib3.contrib.pyopenssls$requests.packages.urllib3.exceptionss requests.packages.urllib3.fieldss"requests.packages.urllib3.fileposts"requests.packages.urllib3.packagess/requests.packages.urllib3.packages.ordered_dicts&requests.packages.urllib3.packages.sixs5requests.packages.urllib3.packages.ssl_match_hostnamesErequests.packages.urllib3.packages.ssl_match_hostname._implementations%requests.packages.urllib3.poolmanagers!requests.packages.urllib3.requests"requests.packages.urllib3.responsesrequests.packages.urllib3.utils)requests.packages.urllib3.util.connections&requests.packages.urllib3.util.requests'requests.packages.urllib3.util.responses$requests.packages.urllib3.util.retrys#requests.packages.urllib3.util.ssl_s&requests.packages.urllib3.util.timeouts"requests.packages.urllib3.util.url(t__doc__t __future__Rtglobtos.pathtosR tFalset DEBUNDLEDtpathtabspathtdirnamet__file__t WHEEL_DIRRtjoin(((s8/usr/lib/python2.7/site-packages/pip/_vendor/__init__.pytsh    )                                          PKZ͕͕#site-packages/pip/_vendor/distro.pynu[# Copyright 2015,2016 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ The ``distro`` package (``distro`` stands for Linux Distribution) provides information about the Linux distribution it runs on, such as a reliable machine-readable distro ID, or version information. It is a renewed alternative implementation for Python's original :py:func:`platform.linux_distribution` function, but it provides much more functionality. An alternative implementation became necessary because Python 3.5 deprecated this function, and Python 3.7 is expected to remove it altogether. Its predecessor function :py:func:`platform.dist` was already deprecated since Python 2.6 and is also expected to be removed in Python 3.7. Still, there are many cases in which access to Linux distribution information is needed. See `Python issue 1322 `_ for more information. """ import os import re import sys import json import shlex import logging import subprocess if not sys.platform.startswith('linux'): raise ImportError('Unsupported platform: {0}'.format(sys.platform)) _UNIXCONFDIR = '/etc' _OS_RELEASE_BASENAME = 'os-release' #: Translation table for normalizing the "ID" attribute defined in os-release #: files, for use by the :func:`distro.id` method. #: #: * Key: Value as defined in the os-release file, translated to lower case, #: with blanks translated to underscores. #: #: * Value: Normalized value. NORMALIZED_OS_ID = {} #: Translation table for normalizing the "Distributor ID" attribute returned by #: the lsb_release command, for use by the :func:`distro.id` method. #: #: * Key: Value as returned by the lsb_release command, translated to lower #: case, with blanks translated to underscores. #: #: * Value: Normalized value. NORMALIZED_LSB_ID = { 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux 'redhatenterpriseworkstation': 'rhel', # RHEL 6.7 } #: Translation table for normalizing the distro ID derived from the file name #: of distro release files, for use by the :func:`distro.id` method. #: #: * Key: Value as derived from the file name of a distro release file, #: translated to lower case, with blanks translated to underscores. #: #: * Value: Normalized value. NORMALIZED_DISTRO_ID = { 'redhat': 'rhel', # RHEL 6.x, 7.x } # Pattern for content of distro release file (reversed) _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)') # Pattern for base file name of distro release file _DISTRO_RELEASE_BASENAME_PATTERN = re.compile( r'(\w+)[-_](release|version)$') # Base file names to be ignored when searching for distro release file _DISTRO_RELEASE_IGNORE_BASENAMES = ( 'debian_version', 'lsb-release', 'oem-release', _OS_RELEASE_BASENAME, 'system-release' ) def linux_distribution(full_distribution_name=True): """ Return information about the current Linux distribution as a tuple ``(id_name, version, codename)`` with items as follows: * ``id_name``: If *full_distribution_name* is false, the result of :func:`distro.id`. Otherwise, the result of :func:`distro.name`. * ``version``: The result of :func:`distro.version`. * ``codename``: The result of :func:`distro.codename`. The interface of this function is compatible with the original :py:func:`platform.linux_distribution` function, supporting a subset of its parameters. The data it returns may not exactly be the same, because it uses more data sources than the original function, and that may lead to different data if the Linux distribution is not consistent across multiple data sources it provides (there are indeed such distributions ...). Another reason for differences is the fact that the :func:`distro.id` method normalizes the distro ID string to a reliable machine-readable value for a number of popular Linux distributions. """ return _distro.linux_distribution(full_distribution_name) def id(): """ Return the distro ID of the current Linux distribution, as a machine-readable string. For a number of Linux distributions, the returned distro ID value is *reliable*, in the sense that it is documented and that it does not change across releases of the distribution. This package maintains the following reliable distro ID values: ============== ========================================= Distro ID Distribution ============== ========================================= "ubuntu" Ubuntu "debian" Debian "rhel" RedHat Enterprise Linux "centos" CentOS "fedora" Fedora "sles" SUSE Linux Enterprise Server "opensuse" openSUSE "amazon" Amazon Linux "arch" Arch Linux "cloudlinux" CloudLinux OS "exherbo" Exherbo Linux "gentoo" GenToo Linux "ibm_powerkvm" IBM PowerKVM "kvmibm" KVM for IBM z Systems "linuxmint" Linux Mint "mageia" Mageia "mandriva" Mandriva Linux "parallels" Parallels "pidora" Pidora "raspbian" Raspbian "oracle" Oracle Linux (and Oracle Enterprise Linux) "scientific" Scientific Linux "slackware" Slackware "xenserver" XenServer ============== ========================================= If you have a need to get distros for reliable IDs added into this set, or if you find that the :func:`distro.id` function returns a different distro ID for one of the listed distros, please create an issue in the `distro issue tracker`_. **Lookup hierarchy and transformations:** First, the ID is obtained from the following sources, in the specified order. The first available and non-empty value is used: * the value of the "ID" attribute of the os-release file, * the value of the "Distributor ID" attribute returned by the lsb_release command, * the first part of the file name of the distro release file, The so determined ID value then passes the following transformations, before it is returned by this method: * it is translated to lower case, * blanks (which should not be there anyway) are translated to underscores, * a normalization of the ID is performed, based upon `normalization tables`_. The purpose of this normalization is to ensure that the ID is as reliable as possible, even across incompatible changes in the Linux distributions. A common reason for an incompatible change is the addition of an os-release file, or the addition of the lsb_release command, with ID values that differ from what was previously determined from the distro release file name. """ return _distro.id() def name(pretty=False): """ Return the name of the current Linux distribution, as a human-readable string. If *pretty* is false, the name is returned without version or codename. (e.g. "CentOS Linux") If *pretty* is true, the version and codename are appended. (e.g. "CentOS Linux 7.1.1503 (Core)") **Lookup hierarchy:** The name is obtained from the following sources, in the specified order. The first available and non-empty value is used: * If *pretty* is false: - the value of the "NAME" attribute of the os-release file, - the value of the "Distributor ID" attribute returned by the lsb_release command, - the value of the "" field of the distro release file. * If *pretty* is true: - the value of the "PRETTY_NAME" attribute of the os-release file, - the value of the "Description" attribute returned by the lsb_release command, - the value of the "" field of the distro release file, appended with the value of the pretty version ("" and "" fields) of the distro release file, if available. """ return _distro.name(pretty) def version(pretty=False, best=False): """ Return the version of the current Linux distribution, as a human-readable string. If *pretty* is false, the version is returned without codename (e.g. "7.0"). If *pretty* is true, the codename in parenthesis is appended, if the codename is non-empty (e.g. "7.0 (Maipo)"). Some distributions provide version numbers with different precisions in the different sources of distribution information. Examining the different sources in a fixed priority order does not always yield the most precise version (e.g. for Debian 8.2, or CentOS 7.1). The *best* parameter can be used to control the approach for the returned version: If *best* is false, the first non-empty version number in priority order of the examined sources is returned. If *best* is true, the most precise version number out of all examined sources is returned. **Lookup hierarchy:** In all cases, the version number is obtained from the following sources. If *best* is false, this order represents the priority order: * the value of the "VERSION_ID" attribute of the os-release file, * the value of the "Release" attribute returned by the lsb_release command, * the version number parsed from the "" field of the first line of the distro release file, * the version number parsed from the "PRETTY_NAME" attribute of the os-release file, if it follows the format of the distro release files. * the version number parsed from the "Description" attribute returned by the lsb_release command, if it follows the format of the distro release files. """ return _distro.version(pretty, best) def version_parts(best=False): """ Return the version of the current Linux distribution as a tuple ``(major, minor, build_number)`` with items as follows: * ``major``: The result of :func:`distro.major_version`. * ``minor``: The result of :func:`distro.minor_version`. * ``build_number``: The result of :func:`distro.build_number`. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.version_parts(best) def major_version(best=False): """ Return the major version of the current Linux distribution, as a string, if provided. Otherwise, the empty string is returned. The major version is the first part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.major_version(best) def minor_version(best=False): """ Return the minor version of the current Linux distribution, as a string, if provided. Otherwise, the empty string is returned. The minor version is the second part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.minor_version(best) def build_number(best=False): """ Return the build number of the current Linux distribution, as a string, if provided. Otherwise, the empty string is returned. The build number is the third part of the dot-separated version string. For a description of the *best* parameter, see the :func:`distro.version` method. """ return _distro.build_number(best) def like(): """ Return a space-separated list of distro IDs of distributions that are closely related to the current Linux distribution in regards to packaging and programming interfaces, for example distributions the current distribution is a derivative from. **Lookup hierarchy:** This information item is only provided by the os-release file. For details, see the description of the "ID_LIKE" attribute in the `os-release man page `_. """ return _distro.like() def codename(): """ Return the codename for the release of the current Linux distribution, as a string. If the distribution does not have a codename, an empty string is returned. Note that the returned codename is not always really a codename. For example, openSUSE returns "x86_64". This function does not handle such cases in any special way and just returns the string it finds, if any. **Lookup hierarchy:** * the codename within the "VERSION" attribute of the os-release file, if provided, * the value of the "Codename" attribute returned by the lsb_release command, * the value of the "" field of the distro release file. """ return _distro.codename() def info(pretty=False, best=False): """ Return certain machine-readable information items about the current Linux distribution in a dictionary, as shown in the following example: .. sourcecode:: python { 'id': 'rhel', 'version': '7.0', 'version_parts': { 'major': '7', 'minor': '0', 'build_number': '' }, 'like': 'fedora', 'codename': 'Maipo' } The dictionary structure and keys are always the same, regardless of which information items are available in the underlying data sources. The values for the various keys are as follows: * ``id``: The result of :func:`distro.id`. * ``version``: The result of :func:`distro.version`. * ``version_parts -> major``: The result of :func:`distro.major_version`. * ``version_parts -> minor``: The result of :func:`distro.minor_version`. * ``version_parts -> build_number``: The result of :func:`distro.build_number`. * ``like``: The result of :func:`distro.like`. * ``codename``: The result of :func:`distro.codename`. For a description of the *pretty* and *best* parameters, see the :func:`distro.version` method. """ return _distro.info(pretty, best) def os_release_info(): """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the current Linux distribution. See `os-release file`_ for details about these information items. """ return _distro.os_release_info() def lsb_release_info(): """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the current Linux distribution. See `lsb_release command output`_ for details about these information items. """ return _distro.lsb_release_info() def distro_release_info(): """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current Linux distribution. See `distro release file`_ for details about these information items. """ return _distro.distro_release_info() def os_release_attr(attribute): """ Return a single named information item from the os-release file data source of the current Linux distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `os-release file`_ for details about these information items. """ return _distro.os_release_attr(attribute) def lsb_release_attr(attribute): """ Return a single named information item from the lsb_release command output data source of the current Linux distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `lsb_release command output`_ for details about these information items. """ return _distro.lsb_release_attr(attribute) def distro_release_attr(attribute): """ Return a single named information item from the distro release file data source of the current Linux distribution. Parameters: * ``attribute`` (string): Key of the information item. Returns: * (string): Value of the information item, if the item exists. The empty string, if the item does not exist. See `distro release file`_ for details about these information items. """ return _distro.distro_release_attr(attribute) class LinuxDistribution(object): """ Provides information about a Linux distribution. This package creates a private module-global instance of this class with default initialization arguments, that is used by the `consolidated accessor functions`_ and `single source accessor functions`_. By using default initialization arguments, that module-global instance returns data about the current Linux distribution (i.e. the distro this package runs on). Normally, it is not necessary to create additional instances of this class. However, in situations where control is needed over the exact data sources that are used, instances of this class can be created with a specific distro release file, or a specific os-release file, or without invoking the lsb_release command. """ def __init__(self, include_lsb=True, os_release_file='', distro_release_file=''): """ The initialization method of this class gathers information from the available data sources, and stores that in private instance attributes. Subsequent access to the information items uses these private instance attributes, so that the data sources are read only once. Parameters: * ``include_lsb`` (bool): Controls whether the `lsb_release command output`_ is included as a data source. If the lsb_release command is not available in the program execution path, the data source for the lsb_release command will be empty. * ``os_release_file`` (string): The path name of the `os-release file`_ that is to be used as a data source. An empty string (the default) will cause the default path name to be used (see `os-release file`_ for details). If the specified or defaulted os-release file does not exist, the data source for the os-release file will be empty. * ``distro_release_file`` (string): The path name of the `distro release file`_ that is to be used as a data source. An empty string (the default) will cause a default search algorithm to be used (see `distro release file`_ for details). If the specified distro release file does not exist, or if no default distro release file can be found, the data source for the distro release file will be empty. Public instance attributes: * ``os_release_file`` (string): The path name of the `os-release file`_ that is actually used as a data source. The empty string if no distro release file is used as a data source. * ``distro_release_file`` (string): The path name of the `distro release file`_ that is actually used as a data source. The empty string if no distro release file is used as a data source. Raises: * :py:exc:`IOError`: Some I/O issue with an os-release file or distro release file. * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had some issue (other than not being available in the program execution path). * :py:exc:`UnicodeError`: A data source has unexpected characters or uses an unexpected encoding. """ self.os_release_file = os_release_file or \ os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME) self.distro_release_file = distro_release_file or '' # updated later self._os_release_info = self._get_os_release_info() self._lsb_release_info = self._get_lsb_release_info() \ if include_lsb else {} self._distro_release_info = self._get_distro_release_info() def __repr__(self): """Return repr of all info """ return \ "LinuxDistribution(" \ "os_release_file={0!r}, " \ "distro_release_file={1!r}, " \ "_os_release_info={2!r}, " \ "_lsb_release_info={3!r}, " \ "_distro_release_info={4!r})".format( self.os_release_file, self.distro_release_file, self._os_release_info, self._lsb_release_info, self._distro_release_info) def linux_distribution(self, full_distribution_name=True): """ Return information about the Linux distribution that is compatible with Python's :func:`platform.linux_distribution`, supporting a subset of its parameters. For details, see :func:`distro.linux_distribution`. """ return ( self.name() if full_distribution_name else self.id(), self.version(), self.codename() ) def id(self): """Return the distro ID of the Linux distribution, as a string. For details, see :func:`distro.id`. """ def normalize(distro_id, table): distro_id = distro_id.lower().replace(' ', '_') return table.get(distro_id, distro_id) distro_id = self.os_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_OS_ID) distro_id = self.lsb_release_attr('distributor_id') if distro_id: return normalize(distro_id, NORMALIZED_LSB_ID) distro_id = self.distro_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_DISTRO_ID) return '' def name(self, pretty=False): """ Return the name of the Linux distribution, as a string. For details, see :func:`distro.name`. """ name = self.os_release_attr('name') \ or self.lsb_release_attr('distributor_id') \ or self.distro_release_attr('name') if pretty: name = self.os_release_attr('pretty_name') \ or self.lsb_release_attr('description') if not name: name = self.distro_release_attr('name') version = self.version(pretty=True) if version: name = name + ' ' + version return name or '' def version(self, pretty=False, best=False): """ Return the version of the Linux distribution, as a string. For details, see :func:`distro.version`. """ versions = [ self.os_release_attr('version_id'), self.lsb_release_attr('release'), self.distro_release_attr('version_id'), self._parse_distro_release_content( self.os_release_attr('pretty_name')).get('version_id', ''), self._parse_distro_release_content( self.lsb_release_attr('description')).get('version_id', '') ] version = '' if best: # This algorithm uses the last version in priority order that has # the best precision. If the versions are not in conflict, that # does not matter; otherwise, using the last one instead of the # first one might be considered a surprise. for v in versions: if v.count(".") > version.count(".") or version == '': version = v else: for v in versions: if v != '': version = v break if pretty and version and self.codename(): version = u'{0} ({1})'.format(version, self.codename()) return version def version_parts(self, best=False): """ Return the version of the Linux distribution, as a tuple of version numbers. For details, see :func:`distro.version_parts`. """ version_str = self.version(best=best) if version_str: version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?') matches = version_regex.match(version_str) if matches: major, minor, build_number = matches.groups() return major, minor or '', build_number or '' return '', '', '' def major_version(self, best=False): """ Return the major version number of the current distribution. For details, see :func:`distro.major_version`. """ return self.version_parts(best)[0] def minor_version(self, best=False): """ Return the minor version number of the Linux distribution. For details, see :func:`distro.minor_version`. """ return self.version_parts(best)[1] def build_number(self, best=False): """ Return the build number of the Linux distribution. For details, see :func:`distro.build_number`. """ return self.version_parts(best)[2] def like(self): """ Return the IDs of distributions that are like the Linux distribution. For details, see :func:`distro.like`. """ return self.os_release_attr('id_like') or '' def codename(self): """ Return the codename of the Linux distribution. For details, see :func:`distro.codename`. """ return self.os_release_attr('codename') \ or self.lsb_release_attr('codename') \ or self.distro_release_attr('codename') \ or '' def info(self, pretty=False, best=False): """ Return certain machine-readable information about the Linux distribution. For details, see :func:`distro.info`. """ return dict( id=self.id(), version=self.version(pretty, best), version_parts=dict( major=self.major_version(best), minor=self.minor_version(best), build_number=self.build_number(best) ), like=self.like(), codename=self.codename(), ) def os_release_info(self): """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the Linux distribution. For details, see :func:`distro.os_release_info`. """ return self._os_release_info def lsb_release_info(self): """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the Linux distribution. For details, see :func:`distro.lsb_release_info`. """ return self._lsb_release_info def distro_release_info(self): """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the Linux distribution. For details, see :func:`distro.distro_release_info`. """ return self._distro_release_info def os_release_attr(self, attribute): """ Return a single named information item from the os-release file data source of the Linux distribution. For details, see :func:`distro.os_release_attr`. """ return self._os_release_info.get(attribute, '') def lsb_release_attr(self, attribute): """ Return a single named information item from the lsb_release command output data source of the Linux distribution. For details, see :func:`distro.lsb_release_attr`. """ return self._lsb_release_info.get(attribute, '') def distro_release_attr(self, attribute): """ Return a single named information item from the distro release file data source of the Linux distribution. For details, see :func:`distro.distro_release_attr`. """ return self._distro_release_info.get(attribute, '') def _get_os_release_info(self): """ Get the information items from the specified os-release file. Returns: A dictionary containing all information items. """ if os.path.isfile(self.os_release_file): with open(self.os_release_file) as release_file: return self._parse_os_release_content(release_file) return {} @staticmethod def _parse_os_release_content(lines): """ Parse the lines of an os-release file. Parameters: * lines: Iterable through the lines in the os-release file. Each line must be a unicode string or a UTF-8 encoded byte string. Returns: A dictionary containing all information items. """ props = {} lexer = shlex.shlex(lines, posix=True) lexer.whitespace_split = True # The shlex module defines its `wordchars` variable using literals, # making it dependent on the encoding of the Python source file. # In Python 2.6 and 2.7, the shlex source file is encoded in # 'iso-8859-1', and the `wordchars` variable is defined as a byte # string. This causes a UnicodeDecodeError to be raised when the # parsed content is a unicode object. The following fix resolves that # (... but it should be fixed in shlex...): if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): lexer.wordchars = lexer.wordchars.decode('iso-8859-1') tokens = list(lexer) for token in tokens: # At this point, all shell-like parsing has been done (i.e. # comments processed, quotes and backslash escape sequences # processed, multi-line values assembled, trailing newlines # stripped, etc.), so the tokens are now either: # * variable assignments: var=value # * commands or their arguments (not allowed in os-release) if '=' in token: k, v = token.split('=', 1) if isinstance(v, bytes): v = v.decode('utf-8') props[k.lower()] = v if k == 'VERSION': # this handles cases in which the codename is in # the `(CODENAME)` (rhel, centos, fedora) format # or in the `, CODENAME` format (Ubuntu). codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v) if codename: codename = codename.group() codename = codename.strip('()') codename = codename.strip(',') codename = codename.strip() # codename appears within paranthese. props['codename'] = codename else: props['codename'] = '' else: # Ignore any tokens that are not variable assignments pass return props def _get_lsb_release_info(self): """ Get the information items from the lsb_release command output. Returns: A dictionary containing all information items. """ cmd = 'lsb_release -a' process = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = process.communicate() stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8') code = process.returncode if code == 0: content = stdout.splitlines() return self._parse_lsb_release_content(content) elif code == 127: # Command not found return {} else: if sys.version_info[:2] >= (3, 5): raise subprocess.CalledProcessError(code, cmd, stdout, stderr) elif sys.version_info[:2] >= (2, 7): raise subprocess.CalledProcessError(code, cmd, stdout) elif sys.version_info[:2] == (2, 6): raise subprocess.CalledProcessError(code, cmd) @staticmethod def _parse_lsb_release_content(lines): """ Parse the output of the lsb_release command. Parameters: * lines: Iterable through the lines of the lsb_release output. Each line must be a unicode string or a UTF-8 encoded byte string. Returns: A dictionary containing all information items. """ props = {} for line in lines: line = line.decode('utf-8') if isinstance(line, bytes) else line kv = line.strip('\n').split(':', 1) if len(kv) != 2: # Ignore lines without colon. continue k, v = kv props.update({k.replace(' ', '_').lower(): v.strip()}) return props def _get_distro_release_info(self): """ Get the information items from the specified distro release file. Returns: A dictionary containing all information items. """ if self.distro_release_file: # If it was specified, we use it and parse what we can, even if # its file name or content does not match the expected pattern. distro_info = self._parse_distro_release_file( self.distro_release_file) basename = os.path.basename(self.distro_release_file) # The file name pattern for user-specified distro release files # is somewhat more tolerant (compared to when searching for the # file), because we want to use what was specified as best as # possible. match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) if match: distro_info['id'] = match.group(1) return distro_info else: basenames = os.listdir(_UNIXCONFDIR) # We sort for repeatability in cases where there are multiple # distro specific files; e.g. CentOS, Oracle, Enterprise all # containing `redhat-release` on top of their own. basenames.sort() for basename in basenames: if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: continue match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) if match: filepath = os.path.join(_UNIXCONFDIR, basename) distro_info = self._parse_distro_release_file(filepath) if 'name' in distro_info: # The name is always present if the pattern matches self.distro_release_file = filepath distro_info['id'] = match.group(1) return distro_info return {} def _parse_distro_release_file(self, filepath): """ Parse a distro release file. Parameters: * filepath: Path name of the distro release file. Returns: A dictionary containing all information items. """ if os.path.isfile(filepath): with open(filepath) as fp: # Only parse the first line. For instance, on SLES there # are multiple lines. We don't want them... return self._parse_distro_release_content(fp.readline()) return {} @staticmethod def _parse_distro_release_content(line): """ Parse a line from a distro release file. Parameters: * line: Line from the distro release file. Must be a unicode string or a UTF-8 encoded byte string. Returns: A dictionary containing all information items. """ if isinstance(line, bytes): line = line.decode('utf-8') matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match( line.strip()[::-1]) distro_info = {} if matches: # regexp ensures non-None distro_info['name'] = matches.group(3)[::-1] if matches.group(2): distro_info['version_id'] = matches.group(2)[::-1] if matches.group(1): distro_info['codename'] = matches.group(1)[::-1] elif line: distro_info['name'] = line.strip() return distro_info _distro = LinuxDistribution() def main(): import argparse logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) parser = argparse.ArgumentParser(description="Linux distro info tool") parser.add_argument( '--json', '-j', help="Output in machine readable format", action="store_true") args = parser.parse_args() if args.json: logger.info(json.dumps(info(), indent=4, sort_keys=True)) else: logger.info('Name: %s', name(pretty=True)) distribution_version = version(pretty=True) if distribution_version: logger.info('Version: %s', distribution_version) distribution_codename = codename() if distribution_codename: logger.info('Codename: %s', distribution_codename) if __name__ == '__main__': main() PKZh\h//'site-packages/pip/_vendor/ipaddress.pycnu[ abc@sdZddlmZddlZddlZdZefZyeefZWne k r`nXy e Z Wn)e k re Z e e kstnXdddkrdZn dZy ejZWnek rd ZnXd Zeed r d Zn d ZddZdefdYZdZdZdefdYZdefdYZdZedZ dZ!dZ"dZ#dZ$dZ%dZ&d Z'd!Z(d"Z)d#Z*d$efd%YZ+d&e+fd'YZ,d(e+fd)YZ-d*efd+YZ.d,e.e,fd-YZ/d.e/fd/YZ0d0e.e-fd1YZ1d2efd3YZ2e2e/_3d4efd5YZ4d6e4e,fd7YZ5d8e5fd9YZ6d:e4e-fd;YZ7d<efd=YZ8e8e5_3dS(>uA fast, lightweight IPv4/IPv6 manipulation library in Python. This library is used to create/poke/manipulate IPv4 and IPv6 addresses and networks. i(tunicode_literalsNu1.0.17sicCs|S(N((tbyt((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_bytes_to_byte_valsscCs'g|D]}tjd|d^qS(Ns!Bi(tstructtunpack(Rtb((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR"scCsP|dkstd}x1|D])}t|ts:t|d>|}qW|S(Nubigii(tAssertionErrort isinstancet_compat_int_types(tbytvalst endianesstrestbv((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_int_from_byte_vals's  cCst|tst|dks't|dkrm|dksK|dkr]tjdntjd|S|dkr|dks|dkrtjd ntjd |d ?|d @StdS(Nubigiiii u(integer out of range for 'I' format codes!Iiiu)integer out of range for 'QQ' format codes!QQi@lIl (RRRRterrortpacktNotImplementedError(tintvaltlengthR ((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_to_bytes0s  u bit_lengthcCs |jS(N(t bit_length(ti((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_bit_length?scCs/x(tjD]}||?dkr |Sq WdS(Ni(t itertoolstcount(RR ((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRBsiccs>|dkst|}x||kr9|V||7}qWdS(Ni(R(tstarttendtstepR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt _compat_rangeHs t_TotalOrderingMixincBsDeZdZdZdZdZdZdZdZRS(cCs tdS(N(R(tselftother((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__eq__WscCs$|j|}|tkrtS| S(N(R tNotImplemented(RRtequal((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__ne__Zs cCs tdS(N(R(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__lt__`scCs3|j|}|tks"| r/|j|S|S(N(R$R!R (RRtless((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__le__cs cCsI|j|}|tkrtS|j|}|tkr>tS|pG| S(N(R$R!R (RRR%R"((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__gt__is  cCs$|j|}|tkrtS| S(N(R$R!(RRR%((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__ge__rs (( t__name__t __module__t __slots__R R#R$R&R'R((((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRPs     i itAddressValueErrorcBseZdZRS(u%A Value Error related to the address.(R)R*t__doc__(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR,}stNetmaskValueErrorcBseZdZRS(u%A Value Error related to the netmask.(R)R*R-(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR.scCsyt|SWnttfk r'nXyt|SWnttfk rOnXt|trrtd|ntd|dS(uTake an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP address. Either IPv4 or IPv6 addresses may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Address or IPv6Address object. Raises: ValueError: if the *address* passed isn't either a v4 or a v6 address ux%r does not appear to be an IPv4 or IPv6 address. Did you pass in a bytes (str in Python 2) instead of a unicode object?u0%r does not appear to be an IPv4 or IPv6 addressN(t IPv4AddressR,R.t IPv6AddressRtbytest ValueError(taddress((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ip_addresss cCsyt||SWnttfk r*nXyt||SWnttfk rUnXt|trxtd|ntd|dS(uTake an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP network. Either IPv4 or IPv6 networks may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Network or IPv6Network object. Raises: ValueError: if the string passed isn't either a v4 or a v6 address. Or if the network has host bits set. ux%r does not appear to be an IPv4 or IPv6 network. Did you pass in a bytes (str in Python 2) instead of a unicode object?u0%r does not appear to be an IPv4 or IPv6 networkN(t IPv4NetworkR,R.t IPv6NetworkRR1R2(R3tstrict((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ip_networks cCsdyt|SWnttfk r'nXyt|SWnttfk rOnXtd|dS(ugTake an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP address. Either IPv4 or IPv6 addresses may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Interface or IPv6Interface object. Raises: ValueError: if the string passed isn't either a v4 or a v6 address. Notes: The IPv?Interface classes describe an Address on a particular Network, so they're basically a combination of both the Address and Network classes. u2%r does not appear to be an IPv4 or IPv6 interfaceN(t IPv4InterfaceR,R.t IPv6InterfaceR2(R3((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ip_interfacescCsAyt|ddSWn&tjtfk r<tdnXdS(u`Represent an address as 4 packed bytes in network (big-endian) order. Args: address: An integer representation of an IPv4 IP address. Returns: The integer address packed as 4 bytes in network (big-endian) order. Raises: ValueError: If the integer is negative or too large to be an IPv4 IP address. iubigu&Address negative or too large for IPv4N(RRRt OverflowErrorR2(R3((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytv4_int_to_packedscCsAyt|ddSWn&tjtfk r<tdnXdS(uRepresent an address as 16 packed bytes in network (big-endian) order. Args: address: An integer representation of an IPv6 IP address. Returns: The integer address packed as 16 bytes in network (big-endian) order. iubigu&Address negative or too large for IPv6N(RRRR<R2(R3((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytv6_int_to_packeds cCs>t|jd}t|dkr:td|n|S(uAHelper to split the netmask and raise AddressValueError if neededu/iuOnly one '/' permitted in %r(t _compat_strtsplittlenR,(R3taddr((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_split_optional_netmasksccslt|}t|}}x>|D]6}|j|jdkrS||fV|}n|}q#W||fVdS(uFind a sequence of sorted deduplicated IPv#Address. Args: addresses: a list of IPv#Address objects. Yields: A tuple containing the first and last IP addresses in the sequence. iN(titertnextt_ip(t addressestittfirsttlasttip((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_find_address_ranges     cCs,|dkr|St|t||d@S(uCount the number of zero bits on the right hand side. Args: number: an integer. bits: maximum number of bits to count. Returns: The number of zero bits on the right hand side of the number. ii(tminR(tnumbertbits((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_count_righthand_zero_bits0s ccsKt|tot|ts-tdn|j|jkrXtd||fn||krstdn|jdkrt}n$|jdkrt}n td|j}|j}|j}xz||krFt t ||t ||dd}||||f}|V|d|>7}|d|j krPqqWdS( uSummarize a network range given the first and last IP addresses. Example: >>> list(summarize_address_range(IPv4Address('192.0.2.0'), ... IPv4Address('192.0.2.130'))) ... #doctest: +NORMALIZE_WHITESPACE [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), IPv4Network('192.0.2.130/32')] Args: first: the first IPv4Address or IPv6Address in the range. last: the last IPv4Address or IPv6Address in the range. Returns: An iterator of the summarized IPv(4|6) network objects. Raise: TypeError: If the first and last objects are not IP addresses. If the first and last objects are not the same version. ValueError: If the last object is not greater than the first. If the version of the first address is not 4 or 6. u1first and last must be IP addresses, not networksu%%s and %s are not of the same versionu*last IP address must be greater than firstiiuunknown IP versioniN( Rt _BaseAddresst TypeErrortversionR2R5R6t_max_prefixlenRFRMRPRt _ALL_ONES(RIRJRKtip_bitst first_inttlast_inttnbitstnet((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytsummarize_address_range@s0       ccst|}i}xm|r|j}|j}|j|}|dkr[||| [IPv4Network('192.0.2.0/24')] This shouldn't be called directly; it is called via collapse_addresses([]). Args: addresses: A list of IPv4Network's or IPv6Network's Returns: A list of IPv4Network's or IPv6Network's depending on what we were passed. N( tlisttpoptsupernettgettNonetappendtsortedtvaluestbroadcast_address(RGtto_mergetsubnetsRZR^texistingRJ((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_collapse_addresses_internalws$         cCsg}g}g}x2|D]*}t|trw|rg|dj|jkrgtd||dfn|j|q|j|jkr|r|dj|jkrtd||dfny|j|jWqCtk r|j|j qCXq|r6|dj|jkr6td||dfn|j|qWt t |}|rx3t |D]"\}}|j t||qlWnt||S(uCollapse a list of IP objects. Example: collapse_addresses([IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/25')]) -> [IPv4Network('192.0.2.0/24')] Args: addresses: An iterator of IPv4Network or IPv6Network objects. Returns: An iterator of the collapsed IPv(4|6)Network objects. Raises: TypeError: If passed a list of mixed version objects. iu%%s and %s are not of the same version(RRQt_versionRRRat _prefixlenRTRKtAttributeErrortnetwork_addressRbtsetRLtextendR[Rh(RGtaddrstipstnetsRKRIRJ((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytcollapse_addressess4  cCs6t|tr|jSt|tr2|jStS(u2Return a key suitable for sorting between networks and addresses. Address and Network objects are not sortable by default; they're fundamentally different so the expression IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') doesn't make any sense. There are some times however, where you may wish to have ipaddress sort these for you anyway. If you need to do this, you can use this function as the key= argument to sorted(). Args: obj: either a Network or Address object. Returns: appropriate key. (Rt _BaseNetworkt_get_networks_keyRQt_get_address_keyR!(tobj((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytget_mixed_type_keys   t_IPAddressBasecBseZdZd ZedZedZedZedZdZ dZ e dZ e dZ e d Ze d Ze d Zd ZRS(uThe mother class.cCs |jS(u:Return the longhand version of the IP address as a string.(t_explode_shorthand_ip_string(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytexplodedscCs t|S(u;Return the shorthand version of the IP address as a string.(R?(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt compressedscCs |jS(uIThe name of the reverse DNS pointer for the IP address, e.g.: >>> ipaddress.ip_address("127.0.0.1").reverse_pointer '1.0.0.127.in-addr.arpa' >>> ipaddress.ip_address("2001:db8::1").reverse_pointer '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' (t_reverse_pointer(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytreverse_pointers cCs#dt|f}t|dS(Nu%200s has no version specified(ttypeR(Rtmsg((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRSscCsi|dkr.d}t|||jfn||jkred}t|||j|jfndS(Niu-%d (< 0) is not permitted as an IPv%d addressu2%d (>= 2**%d) is not permitted as an IPv%d address(R,RiRURT(RR3R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_check_int_addresss cCsDt|}||kr@d}t|||||jfndS(Nu~%r (len %d != %d) is not permitted as an IPv%d address. Did you pass in a bytes (str in Python 2) instead of a unicode object?(RAR,Ri(RR3t expected_lent address_lenR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_check_packed_address s    cCs|j|j|?AS(uTurn the prefix length into a bitwise netmask Args: prefixlen: An integer, the prefix length. Returns: An integer. (RU(tclst prefixlen((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_ip_int_from_prefix+s c Cst||j}|j|}||?}d|>d}||kr{|jd}t||d}d}t||n|S(uReturn prefix length from the bitwise netmask. Args: ip_int: An integer, the netmask in expanded bitwise format Returns: An integer, the prefix length. Raises: ValueError: If the input intermingles zeroes & ones iiubigu&Netmask pattern %r mixes zeroes & ones(RPRTRR2( Rtip_intttrailing_zeroesRt leading_onestall_onestbyteslentdetailsR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_prefix_from_ip_int8s      cCsd|}t|dS(Nu%r is not a valid netmask(R.(Rt netmask_strR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_report_invalid_netmaskQs cCstjj|s"|j|nyt|}Wntk rR|j|nXd|kom|jkns|j|n|S(u Return prefix length from a numeric string Args: prefixlen_str: The string to be converted Returns: An integer, the prefix length. Raises: NetmaskValueError: If the input is not a valid netmask i(t_BaseV4t_DECIMAL_DIGITSt issupersetRtintR2RT(Rt prefixlen_strR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_prefix_from_prefix_stringVs cCsy|j|}Wntk r3|j|nXy|j|SWntk rXnX||jN}y|j|SWntk r|j|nXdS(uTurn a netmask/hostmask string into a prefix length Args: ip_str: The netmask/hostmask to be converted Returns: An integer, the prefix length. Raises: NetmaskValueError: If the input is not a valid netmask/hostmask N(t_ip_int_from_stringR,RRR2RU(Rtip_strR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_prefix_from_ip_stringos    cCs|jt|ffS(N(t __class__R?(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt __reduce__s((R)R*R-R+tpropertyRzR{R}RSRRt classmethodRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRxs   "RQcBsneZdZd ZdZdZdZdZdZdZ dZ dZ d Z d Z RS( uA generic IP object. This IP class contains the version independent methods which are used by single IP addresses. cCs|jS(N(RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__int__scCs?y&|j|jko$|j|jkSWntk r:tSXdS(N(RFRiRkR!(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR s  cCst|tstSt|ts;td||fn|j|jkrftd||fn|j|jkr|j|jkStS(Nu"%s and %s are not of the same typeu%%s and %s are not of the same version(RRxR!RQRRRiRFtFalse(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR$scCs*t|tstS|jt||S(N(RRR!RR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__add__scCs*t|tstS|jt||S(N(RRR!RR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__sub__scCsd|jjt|fS(Nu%s(%r)(RR)R?(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__repr__scCst|j|jS(N(R?t_string_from_ip_intRF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__str__scCsttt|jS(N(thashthexRRF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__hash__scCs |j|fS(N(Ri(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRuscCs|j|jffS(N(RRF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs((R)R*R-R+RR R$RRRRRRuR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRQs         RscBseZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z ed Zed ZedZedZedZedZedZedZdZdZdZdd#dZdd#dZedZdZdZedZ edZ!edZ"ed Z#ed!Z$ed"Z%RS($u~A generic IP network object. This IP class contains the version independent methods which are used by networks. cCs i|_dS(N(t_cache(RR3((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__init__scCsd|jjt|fS(Nu%s(%r)(RR)R?(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|jfS(Nu%s/%d(RlR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRsccsNt|j}t|j}x)t|d|D]}|j|Vq2WdS(uGenerate Iterator over usable hosts in a network. This is like __iter__ except it doesn't return the network or broadcast addresses. iN(RRlRdRt_address_class(Rtnetworkt broadcasttx((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pythostssccsNt|j}t|j}x)t||dD]}|j|Vq2WdS(Ni(RRlRdRR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__iter__scCst|j}t|j}|dkrZ|||krItdn|j||S|d7}|||krtdn|j||SdS(Niuaddress out of rangei(RRlRdt IndexErrorR(RtnRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt __getitem__s  cCst|tstSt|ts;td||fn|j|jkrftd||fn|j|jkr|j|jkS|j|jkr|j|jkStS(Nu"%s and %s are not of the same typeu%%s and %s are not of the same version( RRxR!RsRRRiRltnetmaskR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR$scCs]yD|j|jkoB|j|jkoBt|jt|jkSWntk rXtSXdS(N(RiRlRRRkR!(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR s   cCs tt|jt|jAS(N(RRRlR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs`|j|jkrtSt|tr)tSt|jt|jkoYt|jkSSdS(N(RiRRRsRRlRFRd(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt __contains__s  cCs:|j|kp9|j|kp9|j|kp9|j|kS(u*Tell if self is partly contained in other.(RlRd(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytoverlaps)scCsW|jjd}|dkrS|jt|jt|jB}||jd other eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') IPv6Network('2001:db8::2000/124') > IPv6Network('2001:db8::1000/124') Raises: TypeError if the IP versions are different. u"%s and %s are not of the same typeiii(RiRRRlR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytcompare_networkss!cCs|j|j|jfS(uNetwork-only key function. Returns an object that identifies this address' network and netmask. This function is a suitable "key" argument for sorted() and list.sort(). (RiRlR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRtsic cs4|j|jkr|VdS|dk rp||jkrEtdn|dkr`tdn||j}n|dkrtdn|j|}||jkrtd||fnt|j}t|jd}t|jd|?}x4t|||D] }|j ||f}|Vq WdS(uThe subnets which join to make the current subnet. In the case that self contains only one IP (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 for IPv6), yield an iterator with just ourself. Args: prefixlen_diff: An integer, the amount the prefix length should be increased by. This should not be set if new_prefix is also set. new_prefix: The desired new prefix length. This must be a larger number (smaller prefix) than the existing prefix. This should not be set if prefixlen_diff is also set. Returns: An iterator of IPv(4|6) objects. Raises: ValueError: The prefixlen_diff is too small or too large. OR prefixlen_diff and new_prefix are both set or new_prefix is a smaller number than the current prefix (smaller number means a larger network) Nunew prefix must be longeriu(cannot set prefixlen_diff and new_prefixiuprefix length diff must be > 0u0prefix length diff %d is invalid for netblock %s( RjRTR`R2RRlRdRRR( Rtprefixlen_difft new_prefixt new_prefixlenRRRtnew_addrtcurrent((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRfs,    cCs|jdkr|S|dk rh||jkr=tdn|dkrXtdn|j|}n|j|}|dkrtd|j|fn|jt|jt|j|>@|fS(uThe supernet containing the current network. Args: prefixlen_diff: An integer, the amount the prefix length of the network should be decreased by. For example, given a /24 network and a prefixlen_diff of 3, a supernet with a /21 netmask is returned. Returns: An IPv4 network object. Raises: ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a negative prefix length. OR If prefixlen_diff and new_prefix are both set or new_prefix is a larger number than the current prefix (larger number means a smaller network) iunew prefix must be shorteriu(cannot set prefixlen_diff and new_prefixu;current prefixlen is %d, cannot have a prefixlen_diff of %dN(RjR`R2RRRRlR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR^s     cCs|jjo|jjS(uTest if the address is reserved for multicast use. Returns: A boolean, True if the address is a multicast address. See RFC 2373 2.7 for details. (Rlt is_multicastRd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRAs cCsp|j|jkrtSt|drVt|drV|j|jkoU|j|jkStdt|dS(Nunetwork_addressubroadcast_addressu9Unable to test subnet containment with element of type %s(RiRthasattrRlRdRRR~(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRMscCsp|j|jkrtSt|drVt|drV|j|jkoU|j|jkStdt|dS(Nunetwork_addressubroadcast_addressu9Unable to test subnet containment with element of type %s(RiRRRlRdRRR~(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt supernet_of[scCs|jjo|jjS(uTest if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges. (Rlt is_reservedRd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRis cCs|jjo|jjS(uTest if the address is reserved for link-local. Returns: A boolean, True if the address is reserved per RFC 4291. (Rlt is_link_localRd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRus cCs|jjo|jjS(uTest if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per iana-ipv4-special-registry or iana-ipv6-special-registry. (Rlt is_privateRd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs|j S(uTest if this address is allocated for public networks. Returns: A boolean, True if the address is not reserved per iana-ipv4-special-registry or iana-ipv6-special-registry. (R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt is_globals cCs|jjo|jjS(uTest if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 2373 2.5.2. (Rltis_unspecifiedRd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs|jjo|jjS(uTest if the address is a loopback address. Returns: A boolean, True if the address is a loopback address as defined in RFC 2373 2.5.3. (Rlt is_loopbackRd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs N(&R)R*R-RRRRRRR$R RRRRRdRRRRRRRRRRtR`RfR^RRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRssD          K 0 5)       Rc BseZdZdZdZdedZedZeddddd d d d d g Z eZ iZ dZ e dZe dZe dZe dZdZdZedZedZRS(uyBase IPv4 object. The following methods are used by IPv4 objects in both single IP addresses and networks. iiiu 0123456789iiiiiiiiicCs t|S(N(R?(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRyscCs||jkrt|tr'|}n6y|j|}Wn tk r\|j|}nXt|j|}||f|j| 255) not permitted(R2RRRAR(Rt octet_strRt octet_int((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs#djdt|ddDS(uTurns a 32-bit integer into dotted decimal notation. Args: ip_int: An integer, the IP address. Returns: The IP address as a string in dotted decimal notation. u.css@|]6}tt|tr1tjd|dn|VqdS(s!BiN(R?RR1RR(t.0R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys -siubig(tjoinR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR"s cCs|jd}y5gtt|D]}||jkr"|^q"}Wntk rXtSXt|t|krutS|d|dkrtStS(uTest if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask. u.ii(R@RRt_valid_mask_octetsR2RRAtTrue(RRRORtparts((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt _is_hostmask2s 5 cCs3t|jdddd}dj|dS(uReturn the reverse DNS pointer name for the IPv4 address. This implements the method described in RFC1035 3.5. u.Niu .in-addr.arpa(R?R@R(Rtreverse_octets((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR|Gs"cCs|jS(N(RT(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt max_prefixlenPscCs|jS(N(Ri(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRSTs((R)R*R-R+Rit IPV4LENGTHRUt frozensetRRRTRRyRRRRRRR|RRRS(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs" ' %  R/cBseZdZd ZdZedZedZedZedZ edZ ed Z ed Z ed Z RS( u/Represent and manipulate single IPv4 Addresses.u_ipu __weakref__cCst|tr)|j|||_dSt|trj|j|dt|}t|d|_dSt|}d|krt d|n|j ||_dS(u Args: address: A string or integer representing the IP Additionally, an integer can be passed, so IPv4Address('192.0.2.1') == IPv4Address(3221225985). or, more generally IPv4Address(int(IPv4Address('192.0.2.1'))) == IPv4Address('192.0.2.1') Raises: AddressValueError: If ipaddress isn't a valid IPv4 address. Niubigu/uUnexpected '/' in %r( RRRRFR1RRR R?R,R(RR3tbvstaddr_str((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR_s     cCs t|jS(u*The binary representation of this address.(R=RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytpackedscCs||jjkS(uTest if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within the reserved IPv4 Network range. (t _constantst_reserved_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cs tfdjjDS(uTest if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per iana-ipv4-special-registry. c3s|]}|kVqdS(N((RRZ(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys s(tanyRt_private_networks(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjko|j S(N(Rt_public_networkR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(uTest if the address is reserved for multicast use. Returns: A boolean, True if the address is multicast. See RFC 3171 for details. (Rt_multicast_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjkS(uTest if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 5735 3. (Rt_unspecified_address(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjkS(uTest if the address is a loopback address. Returns: A boolean, True if the address is a loopback per RFC 3330. (Rt_loopback_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(uTest if the address is reserved for link-local. Returns: A boolean, True if the address is link-local per RFC 3927. (Rt_linklocal_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(u_ipu __weakref__(R)R*R-R+RRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR/Ys $     R9cBszeZdZdZdZdZdZejZe dZ e dZ e dZ e dZ RS( cCs;t|ttfrGtj||t|j|_|j|_ dSt|t rtj||dt |dkrt |d|_ n |j|_ t|dt |_|jj|_|jj|_dSt|}tj||dt|dt |_|jj |_ |jj|_|jj|_dS(NiiR7(RR1RR/RR5RFRRTRjttupleRARRRRRC(RR3RB((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(   cCs d|j|j|jjfS(Nu%s/%d(RRFRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsVtj||}| s%|tkr)|Sy|j|jkSWntk rQtSXdS(N(R/R R!RRkR(RRt address_equal((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR s cCsOtj||}|tkr"tSy|j|jkSWntk rJtSXdS(N(R/R$R!RRkR(RRt address_less((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR$s  cCs|j|jAt|jjAS(N(RFRjRRRl(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs t|jS(N(R/RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRK scCsd|j|j|jfS(Nu%s/%s(RRFRj(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRFR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRFR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(R)R*RRR R$RRxRRRKRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR9s    R5cBs/eZdZeZedZedZRS(ueThis class represents and manipulates 32-bit IPv4 network + addresses.. Attributes: [examples for IPv4Network('192.0.2.0/27')] .network_address: IPv4Address('192.0.2.0') .hostmask: IPv4Address('0.0.0.31') .broadcast_address: IPv4Address('192.0.2.32') .netmask: IPv4Address('255.255.255.224') .prefixlen: 27 cCstj||t|ttfrVt||_|j|j\|_ |_ dSt|t rt |dkr|d}n |j}t|d|_|j|\|_ |_ t |j}|t |j @|kr|rtd|qt|t |j @|_ndSt|}t|j|d|_t |dkrf|d}n |j}|j|\|_ |_ |rtt |jt |j @|jkrtd|qntt |jt |j @|_|j |jdkr|j|_ndS(uInstantiate a new IPv4 network object. Args: address: A string or integer representing the IP [& network]. '192.0.2.0/24' '192.0.2.0/255.255.255.0' '192.0.0.2/0.0.0.255' are all functionally the same in IPv4. Similarly, '192.0.2.1' '192.0.2.1/255.255.255.255' '192.0.2.1/32' are also functionally equivalent. That is to say, failing to provide a subnetmask will create an object with a mask of /32. If the mask (portion after the / in the argument) is given in dotted quad form, it is treated as a netmask if it starts with a non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it starts with a zero field (e.g. 0.255.255.255 == /8), with the single exception of an all-zero mask which is treated as a netmask == /0. If no mask is given, a default of /32 is used. Additionally, an integer can be passed, so IPv4Network('192.0.2.1') == IPv4Network(3221225985) or, more generally IPv4Interface(int(IPv4Interface('192.0.2.1'))) == IPv4Interface('192.0.2.1') Raises: AddressValueError: If ipaddress isn't a valid IPv4 address. NetmaskValueError: If the netmask isn't valid for an IPv4 address. ValueError: If strict is True and a network address is not supplied. Niiu%s has host bits seti(RsRRRR1R/RlRRTRRjRRARR2RCRRR(RR3R7RRRB((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR0sB%      cCs3|jtdko'|jtdk o2|j S(uTest if this address is allocated for public networks. Returns: A boolean, True if the address is not reserved per iana-ipv4-special-registry. u 100.64.0.0/10(RlR5RdR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs ( R)R*R-R/RRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR5!s  Ut_IPv4ConstantscBseZedZedZedZedZedededededededed ed ed ed ed ededgZedZe dZ RS(u169.254.0.0/16u 127.0.0.0/8u 224.0.0.0/4u 100.64.0.0/10u 0.0.0.0/8u 10.0.0.0/8u 172.16.0.0/12u 192.0.0.0/29u192.0.0.170/31u 192.0.2.0/24u192.168.0.0/16u 198.18.0.0/15u198.51.100.0/24u203.0.113.0/24u 240.0.0.0/4u255.255.255.255/32u0.0.0.0( R)R*R5RRRRRRR/R(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(                  t_BaseV6cBseZdZdZdZdedZdZedZ eZ iZ e dZ e dZe dZe d Ze dd Zd Zd Zed ZedZRS(uyBase IPv6 object. The following methods are used by IPv6 objects in both single IP addresses and networks. iiiiu0123456789ABCDEFabcdefcCsl||jkrat|tr'|}n|j|}t|j|}||f|j|} || sC|dk rxd |}t|n| }qCqCW|dk r]|} t||d } |ds| d 8} | rd}t||qn|ds| d 8} | rd}t||qn|j| | } | d krd}t||jd |fqnt||jkrd}t||j|fn|dsd}t||n|dsd}t||nt|} d} d} yd} x5t | D]'} | d K} | |j || O} qW| d | K} x9t | dD]'} | d K} | |j || O} qRW| SWn)t k r}td||fnXdS(uTurn an IPv6 ip_str into an integer. Args: ip_str: A string, the IPv6 ip_str. Returns: An int, the IPv6 address Raises: AddressValueError: if ip_str isn't a valid IPv6 Address. uAddress cannot be emptyu:iu At least %d parts expected in %ru.iu%s in %ru%xiiiu!At most %d colons permitted in %ru At most one '::' permitted in %riu0Leading ':' only permitted as part of '::' in %ru1Trailing ':' only permitted as part of '::' in %ru/Expected at most %d other parts with '::' in %ru,Exactly %d parts expected without '::' in %rN( R,R@RAR/R]RFRat _HEXTET_COUNTR`Rtranget _parse_hextetR2(RRRt _min_partsRtipv4_intRt _max_partst skip_indexRtparts_hitparts_lot parts_skippedR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs            #     cCs]|jj|s%td|nt|dkrPd}t||nt|dS(u&Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn't strictly a hex number from [0..FFFF]. uOnly hex digits permitted in %riu$At most 4 characters permitted in %ri(t _HEX_DIGITSRR2RAR(Rt hextet_strR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyREs c Csd}d}d}d}xot|D]a\}}|dkrz|d7}|dkr\|}n||kr|}|}qq%d}d}q%W|dkr||}|t|kr|dg7}ndg|||+|dkrdg|}qn|S(uCompresses a list of hextets. Compresses a list of strings, replacing the longest continuous sequence of "0" in the list with "" and adding empty strings at the beginning or at the end of the string such that subsequently calling ":".join(hextets) will produce the compressed version of the IPv6 address. Args: hextets: A list of strings, the hextets to compress. Returns: A list of strings. iiu0iu(t enumerateRA( Rthextetstbest_doublecolon_starttbest_doublecolon_lentdoublecolon_starttdoublecolon_lentindexthextettbest_doublecolon_end((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compress_hextets_s.         cCs|d krt|j}n||jkr<tdnd|}gtdddD]$}dt|||d!d^qY}|j|}dj|S( u,Turns a 128-bit integer into hexadecimal notation. Args: ip_int: An integer, the IP address. Returns: A string, the hexadecimal representation of the address. Raises: ValueError: The address is bigger than 128 bits of all ones. uIPv6 address is too largeu%032xii iu%xiu:N(R`RRFRUR2RRR(RRthex_strRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs  =cCst|tr!t|j}n-t|trBt|j}n t|}|j|}d|}gtdddD]}|||d!^qz}t|ttfrddj ||j fSdj |S(uExpand a shortened IPv6 address. Args: ip_str: A string, the IPv6 address. Returns: A string, the expanded IPv6 address. u%032xii iu%s/%du:( RR6R?RlR:RKRRRsRRj(RRRRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRys   0cCs3|jdddjdd}dj|dS(uReturn the reverse DNS pointer name for the IPv6 address. This implements the method described in RFC3596 2.5. Niu:uu.u .ip6.arpa(RztreplaceR(Rt reverse_chars((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR|s"cCs|jS(N(RT(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|jS(N(Ri(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRSs(N(R)R*R-R+Rit IPV6LENGTHRURRRRTRRRRRRR`RRyR|RRRS(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs$ i0  R0cBseZdZdZdZedZedZedZedZ edZ ed Z ed Z ed Z ed Zed ZedZedZRS(u/Represent and manipulate single IPv6 Addresses.u_ipu __weakref__cCst|tr)|j|||_dSt|trj|j|dt|}t|d|_dSt|}d|krt d|n|j ||_dS(uInstantiate a new IPv6 address object. Args: address: A string or integer representing the IP Additionally, an integer can be passed, so IPv6Address('2001:db8::') == IPv6Address(42540766411282592856903984951653826560) or, more generally IPv6Address(int(IPv6Address('2001:db8::'))) == IPv6Address('2001:db8::') Raises: AddressValueError: If address isn't a valid IPv6 address. Niubigu/uUnexpected '/' in %r( RRRRFR1RRR R?R,R(RR3RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs     cCs t|jS(u*The binary representation of this address.(R>RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(uTest if the address is reserved for multicast use. Returns: A boolean, True if the address is a multicast address. See RFC 2373 2.7 for details. (RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cs tfdjjDS(uTest if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges. c3s|]}|kVqdS(N((RR(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys s(RRt_reserved_networks(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjkS(uTest if the address is reserved for link-local. Returns: A boolean, True if the address is reserved per RFC 4291. (RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(u`Test if the address is reserved for site-local. Note that the site-local address space has been deprecated by RFC 3879. Use is_private to test if this address is in the space of unique local addresses as defined by RFC 4193. Returns: A boolean, True if the address is reserved per RFC 3513 2.5.6. (Rt_sitelocal_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt is_site_local#s cs tfdjjDS(uTest if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per iana-ipv6-special-registry. c3s|]}|kVqdS(N((RRZ(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys :s(RRR(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR1s cCs|j S(uTest if this address is allocated for public networks. Returns: A boolean, true if the address is not reserved per iana-ipv6-special-registry. (R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR<s cCs |jdkS(uTest if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 2373 2.5.2. i(RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRGs cCs |jdkS(uTest if the address is a loopback address. Returns: A boolean, True if the address is a loopback address as defined in RFC 2373 2.5.3. i(RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRRs cCs(|jd?dkrdSt|jd@S(uReturn the IPv4 mapped address. Returns: If the IPv6 address is a v4 mapped address, return the IPv4 mapped address. Return None otherwise. i iIN(RFR`R/(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ipv4_mapped]s cCs@|jd?dkrdSt|jd?d@t|jd@fS(uTuple of embedded teredo IPs. Returns: Tuple of the (server, client) IPs or None if the address doesn't appear to be a teredo address (doesn't start with 2001::/32) i`i i@IN(RFR`R/(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytteredojs cCs,|jd?dkrdSt|jd?d@S(uReturn the IPv4 6to4 embedded address. Returns: The IPv4 6to4-embedded address if present or None if the address doesn't appear to contain a 6to4 embedded address. ipi iPIN(RFR`R/(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt sixtofourys (u_ipu __weakref__(R)R*R-R+RRRRRRR RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR0s %        R:cBseZdZdZdZdZdZejZe dZ e dZ e dZ e dZ e d Ze d ZRS( cCs;t|ttfrGtj||t|j|_|j|_ dSt|t rtj||dt |dkrt |d|_ n |j|_ t|dt |_|jj|_|jj|_dSt|}tj||dt|dt |_|jj|_|jj |_ |jj|_dS(NiiR7(RR1RR0RR6RFRRTRjRRARRRRRC(RR3RB((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(   cCs d|j|j|jjfS(Nu%s/%d(RRFRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsVtj||}| s%|tkr)|Sy|j|jkSWntk rQtSXdS(N(R0R R!RRkR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR s cCsOtj||}|tkr"tSy|j|jkSWntk rJtSXdS(N(R0R$R!RRkR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR$s  cCs|j|jAt|jjAS(N(RFRjRRRl(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs t|jS(N(R0RF(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRKscCsd|j|j|jfS(Nu%s/%s(RRFRj(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRFR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRFR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|jdko|jjS(Ni(RFRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|jdko|jjS(Ni(RFRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(R)R*RRR R$RRxRRRKRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR:s    R6cBs8eZdZeZedZdZedZ RS(uvThis class represents and manipulates 128-bit IPv6 networks. Attributes: [examples for IPv6('2001:db8::1000/124')] .network_address: IPv6Address('2001:db8::1000') .hostmask: IPv6Address('::f') .broadcast_address: IPv6Address('2001:db8::100f') .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') .prefixlen: 124 cCstj||t|ttfrVt||_|j|j\|_ |_ dSt|t rt |dkr|d}n |j}|j|\|_ |_ t|d|_t |j}|t |j @|kr|rtd|qt|t |j @|_ndSt|}t|j|d|_t |dkrf|d}n |j}|j|\|_ |_ |rtt |jt |j @|jkrtd|qntt |jt |j @|_|j |jdkr|j|_ndS(uInstantiate a new IPv6 Network object. Args: address: A string or integer representing the IPv6 network or the IP and prefix/netmask. '2001:db8::/128' '2001:db8:0000:0000:0000:0000:0000:0000/128' '2001:db8::' are all functionally the same in IPv6. That is to say, failing to provide a subnetmask will create an object with a mask of /128. Additionally, an integer can be passed, so IPv6Network('2001:db8::') == IPv6Network(42540766411282592856903984951653826560) or, more generally IPv6Network(int(IPv6Network('2001:db8::'))) == IPv6Network('2001:db8::') strict: A boolean. If true, ensure that we have been passed A true network address, eg, 2001:db8::1000/124 and not an IP address on a network, eg, 2001:db8::1/124. Raises: AddressValueError: If address isn't a valid IPv6 address. NetmaskValueError: If the netmask isn't valid for an IPv6 address. ValueError: If strict was True and a network address was not supplied. Niiu%s has host bits seti(RsRRR1RR0RlRRTRRjRRARR2RCRRR(RR3R7RRRB((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRsB       ccsRt|j}t|j}x-t|d|dD]}|j|Vq6WdS(uGenerate Iterator over usable hosts in a network. This is like __iter__ except it doesn't return the Subnet-Router anycast address. iN(RRlRdRR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR< scCs|jjo|jjS(u`Test if the address is reserved for site-local. Note that the site-local address space has been deprecated by RFC 3879. Use is_private to test if this address is in the space of unique local addresses as defined by RFC 4193. Returns: A boolean, True if the address is reserved per RFC 3513 2.5.6. (RlR Rd(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR H s ( R)R*R-R0RRRRRR (((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR6s   O t_IPv6ConstantscBseZedZedZedededededededed ed edg Zed ed ed ededededededededededededgZedZRS(u fe80::/10uff00::/8u::1/128u::/128u ::ffff:0:0/96u100::/64u 2001::/23u 2001:2::/48u 2001:db8::/32u 2001:10::/28ufc00::/7u::/8u100::/8u200::/7u400::/6u800::/5u1000::/4u4000::/3u6000::/3u8000::/3uA000::/3uC000::/3uE000::/4uF000::/5uF800::/6uFE00::/9u fec0::/10(R)R*R6RRRR R (((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRX s*           (9R-t __future__RRRt __version__RRtlongt NameErrortunicodeR?tstrR1RRt from_bytesR RkRRRRtobjectRRR R2R,R.R4RR8R;R=R>RCRLRPR[RhRrRwRxRQRsRR/R9R5RRRR0R:R6R(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt sx              ) $ $ #      7 1 6 =vRr V{!PKZ!3site-packages/pip/_vendor/requests/status_codes.pyonu[ abc@skddlmZiDdd6dd6dd6dd 6dd 6dd6dd6dd6dd6dd6dd 6dd#6dd(6dd*6dd,6dd.6dd26dd46dd76dd96dd;6dd=6ddA6ddE6ddH6ddJ6ddM6ddO6ddR6ddU6ddW6dd[6dd^6dd`6ddb6ddd6ddg6ddi6ddk6ddo6dds6ddu6ddy6dd{6dd~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6ZeddZxcejD]U\ZZxFeD]>Zeeeeej ds!eeej eq!q!WqWdS(i(t LookupDicttcontinueidtswitching_protocolsiet processingift checkpointigt uri_too_longtrequest_uri_too_longiztoktokaytall_oktall_okaytall_goods\o/s✓itcreateditaccepteditnon_authoritative_infotnon_authoritative_informationit no_contentit reset_contenttresetitpartial_contenttpartialit multi_statustmultiple_statust multi_statitmultiple_statiitalready_reporteditim_useditmultiple_choicesi,tmoved_permanentlytmoveds\o-i-tfoundi.t see_othertotheri/t not_modifiedi0t use_proxyi1t switch_proxyi2ttemporary_redirectttemporary_movedt temporaryi3tpermanent_redirecttresume_incompletetresumei4t bad_requesttbadit unauthorizeditpayment_requiredtpaymentit forbiddenit not_founds-o-itmethod_not_allowedt not_alloweditnot_acceptableitproxy_authentication_requiredt proxy_authtproxy_authenticationitrequest_timeoutttimeoutitconflictitgoneitlength_requireditprecondition_failedt preconditionitrequest_entity_too_largeitrequest_uri_too_largeitunsupported_media_typetunsupported_mediat media_typeitrequested_range_not_satisfiabletrequested_rangetrange_not_satisfiableitexpectation_failedit im_a_teapottteapott i_am_a_teapotitmisdirected_requestitunprocessable_entityt unprocessableitlockeditfailed_dependencyt dependencyitunordered_collectiont unordereditupgrade_requiredtupgradeitprecondition_requiredittoo_many_requeststtoo_manyitheader_fields_too_largetfields_too_largeit no_responsetnoneit retry_withtretryit$blocked_by_windows_parental_controlstparental_controlsitunavailable_for_legal_reasonst legal_reasonsitclient_closed_requestitinternal_server_errort server_errors/o\s✗itnot_implementedit bad_gatewayitservice_unavailablet unavailableitgateway_timeoutithttp_version_not_supportedt http_versionitvariant_also_negotiatesitinsufficient_storageitbandwidth_limit_exceededt bandwidthit not_extendeditnetwork_authentication_requiredt network_authtnetwork_authenticationitnamet status_codess\t/N(R(R(R(R(RR(RRR R R s\o/s✓(R (R (RR(R(RR(RR(RRRR(R(R(R(RRs\o-(R(RR (R!(R"(R#(R$R%R&(R'R(R)(R*R+(R,(R-R.(R/(R0s-o-(R1R2(R3(R4R5R6(R7R8(R9(R:(R;(R<R=(R>(R?(R@RARB(RCRDRE(RF(RGRHRI(RJ(RKRL(RM(RNRO(RPRQ(RRRS(RTR=(RURV(RWRX(RYRZ(R[R\(R]R^(R_R`(Ra(RbRcs/o\s✗(Rd(Re(RfRg(Rh(RiRj(Rk(Rl(RmRn(Ro(RpRqRr(s\Ru( t structuresRt_codestcodestitemstcodettitlesttitletsetattrt startswithtupper(((sE/usr/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.pyts  PKZ:<#J#J/site-packages/pip/_vendor/requests/adapters.pyonu[ abc@s5dZddlZddlZddlmZmZddlmZddl m Z ddl m Z ddlmZddlmZdd lmZdd lmZdd lmZdd lmZdd lmZddlmZddlmZddlmZddlmZddlmZm Z ddl!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+ddl,m-Z-m.Z.m/Z/mZmZm0Z0m1Z1ddl2m3Z3yddl4m5Z5Wne6k rdZ5nXe7Z8dZ9dZ:dZ<de=fdYZ>de>fd YZ?dS(!s requests.adapters ~~~~~~~~~~~~~~~~~ This module contains the transport adapters that Requests uses to define and maintain connections. iN(t PoolManagertproxy_from_url(t HTTPResponse(tTimeout(tRetry(tClosedPoolError(tConnectTimeoutError(t HTTPError(t MaxRetryError(tNewConnectionError(t ProxyError(t ProtocolError(tReadTimeoutError(tSSLError(t ResponseErrori(tResponse(turlparset basestring(tDEFAULT_CA_BUNDLE_PATHtget_encoding_from_headerstprepend_scheme_if_neededtget_auth_from_urlt urldefragautht select_proxy(tCaseInsensitiveDict(textract_cookies_to_jar(tConnectionErrortConnectTimeoutt ReadTimeoutR R t RetryErrort InvalidSchema(t_basic_auth_str(tSOCKSProxyManagercOstddS(Ns'Missing dependencies for SOCKS support.(R(targstkwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR +si it BaseAdaptercBs8eZdZdZededddZdZRS(sThe Base Transport AdaptercCstt|jdS(N(tsuperR#t__init__(tself((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR%7scCs tdS(sCSends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. N(tNotImplementedError(R&trequesttstreamttimeouttverifytcerttproxies((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytsend:scCs tdS(s!Cleans up adapter specific items.N(R'(R&((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytcloseLsN( t__name__t __module__t__doc__R%tFalsetNonetTrueR.R/(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR#4s   t HTTPAdaptercBseZdZdddddgZeeeedZdZdZ ed Z d Z d Z d Z dd ZdZdZdZdZededddZRS(sThe built-in HTTP Adapter for urllib3. Provides a general-case interface for Requests sessions to contact HTTP and HTTPS urls by implementing the Transport Adapter interface. This class will usually be created by the :class:`Session ` class under the covers. :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed connections. If you need granular control over the conditions under which we retry a request, import urllib3's ``Retry`` class and pass that instead. :param pool_block: Whether the connection pool should block for connections. Usage:: >>> import requests >>> s = requests.Session() >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) t max_retriestconfigt_pool_connectionst _pool_maxsizet _pool_blockcCs|tkr$tddt|_ntj||_i|_i|_tt|j ||_ ||_ ||_ |j ||d|dS(Nitreadtblock(tDEFAULT_RETRIESRR3R7tfrom_intR8t proxy_managerR$R6R%R9R:R;tinit_poolmanager(R&tpool_connectionst pool_maxsizeR7t pool_block((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR%ns      cstfdjDS(Nc3s'|]}|t|dfVqdS(N(tgetattrR4(t.0tattr(R&(sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pys s(tdictt __attrs__(R&((R&sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt __getstate__scCsbi|_i|_x*|jD]\}}t|||qW|j|j|jd|jdS(NR=(R@R8titemstsetattrRAR9R:R;(R&tstateRGtvalue((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt __setstate__s   c KsF||_||_||_td|d|d|dt||_dS(sInitializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. t num_poolstmaxsizeR=tstrictN(R9R:R;RR5t poolmanager(R&t connectionsRQR=t pool_kwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyRAs   c Ks||jkr|j|}n|jjdrt|\}}t|d|d|d|jd|jd|j|}|j|`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager tsockstusernametpasswordRPRQR=t proxy_headers( R@tlowert startswithRR R9R:R;RYR(R&tproxyt proxy_kwargstmanagerRWRXRY((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytproxy_manager_fors*     cCs|jjdr|rd }|tk r6|}n|sEt}n| s_tjj| rwtdj |nd|_ tjj |s||_ q||_ nd|_ d |_ d |_ |rt|ts|d|_|d|_n||_d |_|jrCtjj|j rCtdj |jn|jrtjj|j rtdj |jqnd S( sAVerify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: The SSL certificate to verify. thttpssFCould not find a suitable TLS CA certificate bundle, invalid path: {0}t CERT_REQUIREDt CERT_NONEiis:Could not find the TLS certificate file, invalid path: {0}s2Could not find the TLS key file, invalid path: {0}N(RZR[R4R5RtostpathtexiststIOErrortformatt cert_reqstisdirtca_certst ca_cert_dirt isinstanceRt cert_filetkey_file(R&tconnturlR+R,tcert_loc((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt cert_verifys8                cCst}t|dd|_tt|di|_t|j|_||_|jj |_ t |j t r|j j d|_ n |j |_ t|j||||_||_|S(sBuilds a :class:`Response ` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter ` :param req: The :class:`PreparedRequest ` used to generate the response. :param resp: The urllib3 response object. :rtype: requests.Response tstatustheaderssutf-8N(RRER4t status_codeRRtRtencodingtrawtreasonRlRptbytestdecodeRtcookiesR(t connection(R&treqtresptresponse((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytbuild_responses     cCsst||}|rEt|d}|j|}|j|}n*t|}|j}|jj|}|S(sReturns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. :rtype: urllib3.ConnectionPool thttp(RRR_tconnection_from_urlRtgeturlRS(R&RpR-R\R@Rotparsed((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytget_connection"s   cCs5|jjx!|jjD]}|jqWdS(sDisposes of any internal state. Currently, this closes the PoolManager and any active ProxyManager, which closes any pooled connections. N(RStclearR@tvalues(R&R\((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR/9s c Cst|j|}t|jj}|o3|dk}t}|rit|jj}|jd}n|j}|r| rt|j}n|S(s?Obtain the url to use when making the final request. If the message is being sent through a HTTP proxy, the full URL has to be used. Otherwise, we should only use the path portion of the URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param request: The :class:`PreparedRequest ` being sent. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. :rtype: str R`RV( RRpRtschemeR3RZR[tpath_urlR( R&R(R-R\Rtis_proxied_http_requesttusing_socks_proxyt proxy_schemeRp((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt request_urlCs  cKsdS(s"Add any headers needed by the connection. As of v2.0 this does nothing by default, but is left for overriding by users that subclass the :class:`HTTPAdapter `. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param request: The :class:`PreparedRequest ` to add headers to. :param kwargs: The keyword arguments from the call to send(). N((R&R(R"((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt add_headers`s cCs8i}t|\}}|r4t|||d`. :param proxies: The url of the proxy being used for this request. :rtype: dict sProxy-Authorization(RR(R&R\RtRWRX((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyRYns cCs}|j|j|}|j||j|||j||}|j||jdkphd|jk } t|t ry%|\} } t d| d| }Wqt k r} dj |} t | qXn't|t rnt d|d|}y| s[|j d|jd|d|jd|jd td td td td |jd| }nft|drv|j}n|jdt}y"|j|j|dtx-|jjD]\}}|j||qW|jx^|jD]S}|jtt|djd|jd|j||jdqW|jdy|jdt}Wntk r|j}nXt j!|d|d|d td t}Wn|j"nXWnt#t$j%fk r} t&| d|n{t'k r} t| j(t)r=t| j(t*s=t+| d|q=nt| j(t,rdt-| d|nt| j(t.rt/| d|nt| j(t0rt1| d|nt&| d|nt2k r} t&| d|nt.k r } t/| ndt0t3fk rl} t| t0rBt1| d|qmt| t4rft5| d|qmnX|j6||S(sSends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response sContent-LengthtconnectR<ssInvalid timeout {0}. Pass a (connect, read) timeout tuple, or a single float to set both timeouts to the same valuetmethodRptbodyRttredirecttassert_same_hosttpreload_contenttdecode_contenttretriesR*t proxy_pooltskip_accept_encodingisutf-8s s0 t bufferingtpoolR|R(N(7RRpRrRRRR4RtRlttuplet TimeoutSaucet ValueErrorRgturlopenRR3R7thasattrRt _get_conntDEFAULT_POOL_TIMEOUTt putrequestR5RKt putheadert endheadersR.thextlentencodet getresponset TypeErrorRt from_httplibR/R tsocketterrorRRRxRR RRRt _ProxyErrorR t _SSLErrorR Rt _HTTPErrorR RR(R&R(R)R*R+R,R-RoRptchunkedRR<teterrR~tlow_conntheaderRNtitr((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR.s            &       N(R0R1R2RItDEFAULT_POOLSIZER>tDEFAULT_POOLBLOCKR%RJRORAR_RrRR4RR/RRRYR3R5R.(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR6Qs$      % 4 %    (@R2tos.pathRcRtpip._vendor.urllib3.poolmanagerRRtpip._vendor.urllib3.responseRtpip._vendor.urllib3.utilRRtpip._vendor.urllib3.util.retryRtpip._vendor.urllib3.exceptionsRRRRRR R RR R R RRtmodelsRtcompatRRtutilsRRRRRRt structuresRR{Rt exceptionsRRRRRtauthRt!pip._vendor.urllib3.contrib.socksR t ImportErrorR3RRR>R4RtobjectR#R6(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt sB  .4  PKZ<ށ.site-packages/pip/_vendor/requests/packages.pynu[import sys # This code exists for backwards compatibility reasons. # I don't like it either. Just look the other way. :) for package in ('urllib3', 'idna', 'chardet'): vendored_package = "pip._vendor." + package locals()[package] = __import__(vendored_package) # This traversal is apparently necessary such that the identities are # preserved (requests.packages.urllib3.* is urllib3.*) for mod in list(sys.modules): if mod == vendored_package or mod.startswith(vendored_package + '.'): unprefixed_mod = mod[len("pip._vendor."):] sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] # Kinda cool, though, right? PKZ2ۄWW.site-packages/pip/_vendor/requests/cookies.pycnu[ abc@sQdZddlZddlZddlZddlZddlmZddlmZm Z m Z m Z yddl Z Wne k rddlZ nXdefdYZdefd YZd Zd Zddd Zd efdYZdejejfdYZdZdZdZdedZdZ dS(s requests.cookies ~~~~~~~~~~~~~~~~ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. iNi(tto_native_string(t cookielibturlparset urlunparsetMorselt MockRequestcBseZdZdZdZdZdZdZdZdZ ddZ d Z d Z d Zed Zed ZedZRS(sWraps a `requests.Request` to mimic a `urllib2.Request`. The code in `cookielib.CookieJar` expects this interface in order to correctly manage cookie policies, i.e., determine whether a cookie can be set, given the domains of the request and the cookie. The original request object is read-only. The client is responsible for collecting the new headers via `get_new_headers()` and interpreting them appropriately. You probably want `get_cookie_header`, defined below. cCs.||_i|_t|jjj|_dS(N(t_rt _new_headersRturltschemettype(tselftrequest((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt__init__&s  cCs|jS(N(R (R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytget_type+scCst|jjjS(N(RRRtnetloc(R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytget_host.scCs |jS(N(R(R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytget_origin_req_host1scCsx|jjjds|jjSt|jjddd}t|jj}t|j||j|j |j |j gS(NtHosttencodingsutf-8( RtheaderstgetRRRRR tpathtparamstquerytfragment(R thosttparsed((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt get_full_url4s cCstS(N(tTrue(R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytis_unverifiableBscCs||jjkp||jkS(N(RRR(R tname((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt has_headerEscCs%|jjj||jj||S(N(RRRR(R Rtdefault((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt get_headerHscCstddS(sMcookielib has no legitimate use for this method; add it back if you find one.s=Cookie headers should be added with add_unredirected_header()N(tNotImplementedError(R tkeytval((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt add_headerKscCs||j|(RR'RQtresulttbadargsterr((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyROs0   cCs!d}|dr_y$ttjt|d}Wqtk r[td|dqXn2|drd}tjtj|d|}ntd|ddt |ddt d|dd|d |j d |d d dd i|d d6dt dt |dd|j d|dpd S(sBConvert a Morsel object into a Cookie containing the one k/v pair.smax-agesmax-age: %s must be integerRs%a, %d-%b-%Y %H:%M:%S GMTRRRRBRRRRthttponlyRRRR'RiN( R/tintttimet ValueErrorRtcalendarttimegmtstrptimeRORR`R$R'(tmorselRt time_template((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyRNs0 $       cCs|dkrt}n|dk rg|D]}|j^q+}x@|D]5}|s_||krG|jt|||qGqGWn|S(s-Returns a CookieJar from a key/value dictionary. :param cookie_dict: Dict of key/values to insert into CookieJar. :param cookiejar: (optional) A cookiejar to add the cookies to. :param overwrite: (optional) If False, will not replace cookies already in the jar with new ones. N(R/RJRRPRO(t cookie_dictREt overwriteRGtnames_from_jarR((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytcookiejar_from_dicts    $cCst|tjs!tdnt|trKt|d|dt}nXt|tjry|j|Wqtk rx|D]}|j |qWqXn|S(sAdd cookies to cookiejar and returns a merged CookieJar. :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. s!You can only merge into CookieJarRER( RMRRoRRRR`RqtAttributeErrorRP(REtcookiest cookie_in_jar((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt merge_cookies s  (!R.RpRRt collectionst_internal_utilsRtcompatRRRRRzt ImportErrortdummy_threadingtobjectRR1R=RAR/RHt RuntimeErrorRIRotMutableMappingRJRRORNRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt s,    " H    # PKZN1site-packages/pip/_vendor/requests/exceptions.pycnu[ abc@sdZddlmZdefdYZdefdYZdefdYZd efd YZd efd YZd efdYZ dee fdYZ de fdYZ defdYZ defdYZ deefdYZdeefdYZdeefdYZdeefdYZdefd YZd!eefd"YZd#eefd$YZd%efd&YZd'efd(YZd)efd*YZd+eefd,YZd-efd.YZd/S(0s` requests.exceptions ~~~~~~~~~~~~~~~~~~~ This module contains the set of Requests' exceptions. i(t HTTPErrortRequestExceptioncBseZdZdZRS(sTThere was an ambiguous exception that occurred while handling your request. cOs|jdd}||_|jdd|_|dk rg|j rgt|drg|jj|_ntt|j||dS(sBInitialize RequestException with `request` and `response` objects.tresponsetrequestN(tpoptNoneRRthasattrtsuperRt__init__(tselftargstkwargsR((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRs (t__name__t __module__t__doc__R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR sRcBseZdZRS(sAn HTTP error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRstConnectionErrorcBseZdZRS(sA Connection error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR st ProxyErrorcBseZdZRS(sA proxy error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR$stSSLErrorcBseZdZRS(sAn SSL error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR(stTimeoutcBseZdZRS(sThe request timed out. Catching this error will catch both :exc:`~requests.exceptions.ConnectTimeout` and :exc:`~requests.exceptions.ReadTimeout` errors. (R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR,stConnectTimeoutcBseZdZRS(sThe request timed out while trying to connect to the remote server. Requests that produced this error are safe to retry. (R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR5st ReadTimeoutcBseZdZRS(s@The server did not send any data in the allotted amount of time.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR<st URLRequiredcBseZdZRS(s*A valid URL is required to make a request.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR@stTooManyRedirectscBseZdZRS(sToo many redirects.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRDst MissingSchemacBseZdZRS(s/The URL schema (e.g. http or https) is missing.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRHst InvalidSchemacBseZdZRS(s"See defaults.py for valid schemas.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRLst InvalidURLcBseZdZRS(s%The URL provided was somehow invalid.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRPst InvalidHeadercBseZdZRS(s.The header value provided was somehow invalid.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRTstChunkedEncodingErrorcBseZdZRS(s?The server declared chunked encoding but sent an invalid chunk.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRXstContentDecodingErrorcBseZdZRS(s!Failed to decode response content(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR\stStreamConsumedErrorcBseZdZRS(s2The content for this response was already consumed(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR`st RetryErrorcBseZdZRS(sCustom retries logic failed(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRdstUnrewindableBodyErrorcBseZdZRS(s:Requests encountered an error when trying to rewind a body(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRhstRequestsWarningcBseZdZRS(sBase warning for Requests.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR nstFileModeWarningcBseZdZRS(sJA file was opened in text mode, but Requests determined its binary length.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR!sstRequestsDependencyWarningcBseZdZRS(s@An imported dependency doesn't match the expected version range.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR"xsN(Rtpip._vendor.urllib3.exceptionsRt BaseHTTPErrortIOErrorRRRRRRRRRt ValueErrorRRRRRRt TypeErrorRRRtWarningR tDeprecationWarningR!R"(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyts. PKZ>t/site-packages/pip/_vendor/requests/__init__.pycnu[ abc@stdZddlmZddlmZddlZddlmZdZyeejejWn9e e fk rej dj ejejenXdd l mZejd edd lmZmZmZmZdd lmZmZmZmZdd lmZmZddlmZddlmZddlmZmZmZddl m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+ddl,m-Z-ddlm.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6ddl7Z7yddl7m8Z8Wn*e9k r@de7j:fdYZ8nXe7j;e<j=e8ejde4de>dS(s Requests HTTP Library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. Basic GET usage: >>> import requests >>> r = requests.get('https://www.python.org') >>> r.status_code 200 >>> 'Python is a programming language' in r.content True ... or POST: >>> payload = dict(key1='value1', key2='value2') >>> r = requests.post('http://httpbin.org/post', data=payload) >>> print(r.text) { ... "form": { "key2": "value2", "key1": "value1" }, ... } The other HTTP methods are supported - see `requests.api`. Full documentation is at . :copyright: (c) 2017 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. i(turllib3(tchardetNi(tRequestsDependencyWarningcCs-|jd}|dgks$tt|dkrF|jdn|\}}}t|t|t|}}}|dkst|dkst|dkst|jdd \}}}t|t|t|}}}|dkst|dkst|dks)tdS( Nt.tdevit0iiii(tsplittAssertionErrortlentappendtint(turllib3_versiontchardet_versiontmajortminortpatch((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pytcheck_compatibility1s&&sAurllib3 ({0}) or chardet ({1}) doesn't match a supported version!(tDependencyWarningtignore(t __title__t__description__t__url__t __version__(t __build__t __author__t__author_email__t __license__(t __copyright__t__cake__(tutils(tpackages(tRequesttResponsetPreparedRequest(trequesttgettheadtpostRtputtdeletetoptions(tsessiontSession(tcodes( tRequestExceptiontTimeoutt URLRequiredtTooManyRedirectst HTTPErrortConnectionErrortFileModeWarningtConnectTimeoutt ReadTimeout(t NullHandlerR5cBseZdZRS(cCsdS(N((tselftrecord((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pytemitss(t__name__t __module__R8(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyR5rstdefaultR (?t__doc__t pip._vendorRRtwarningst exceptionsRRRRt ValueErrortwarntformattpip._vendor.urllib3.exceptionsRt simplefilterRRRRRRRRRtRRtmodelsRR R!tapiR"R#R$R%RR&R'R(tsessionsR)R*t status_codesR+R,R-R.R/R0R1R2R3R4tloggingR5t ImportErrortHandlert getLoggerR9t addHandlertTrue(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyt)s<    "":@  PKZ>l55-site-packages/pip/_vendor/requests/compat.pyonu[ abc@s5dZddlmZddlZejZeddkZeddkZddlZerGddl m Z m Z m Z m Z mZmZmZmZmZddlmZmZmZmZmZdd lmZddlZdd lmZdd lmZdd lmZe Z!e Z"e#Z e$Z$e%e&e'fZ(e%e&fZ)ner1dd l*mZmZmZmZmZm Z m Z m Z m Z mZddl+mZmZmZmZmZddl,m-Zdd l.mZdd l/mZdd l0mZe Z!e Z e"Z"e e"fZ$e%e'fZ(e%fZ)ndS(sq requests.compat ~~~~~~~~~~~~~~~ This module handles import compatibility issues between Python 2 and Python 3. i(tchardetNiii( tquotetunquotet quote_plust unquote_plust urlencodet getproxiest proxy_bypasstproxy_bypass_environmenttgetproxies_environment(turlparset urlunparseturljointurlsplitt urldefrag(tparse_http_list(tMorsel(tStringIO(t OrderedDict( R R R R RRRRRR(RRRRR (t cookiejar(1t__doc__t pip._vendorRtsyst version_infot_vertis_py2tis_py3tjsonturllibRRRRRRRRR R R R R Rturllib2Rt cookielibtCookieRRt)pip._vendor.urllib3.packages.ordered_dictRtstrt builtin_strtbytestunicodet basestringtinttlongtfloatt numeric_typest integer_typest urllib.parseturllib.requestthttpRt http.cookiestiot collections(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/compat.pyt sB   @( F(  PKZ@*site-packages/pip/_vendor/requests/api.pycnu[ abc@sqdZddlmZdZd dZdZdZd d dZd dZ d d Z d Z d S( s requests.api ~~~~~~~~~~~~ This module implements the Requests API. :copyright: (c) 2012 by Kenneth Reitz. :license: Apache2, see LICENSE for more details. i(tsessionsc Ks2tj }|jd|d||SWdQXdS(s Constructs and sends a :class:`Request `. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How many seconds to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response ` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') tmethodturlN(RtSessiontrequest(RRtkwargstsession((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRs)cKs&|jdttd|d||S(sOSends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tallow_redirectstgettparams(t setdefaulttTrueR(RR R((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyR=s cKs |jdttd||S(sSends an OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response Rtoptions(R R R(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyR Ks cKs |jdttd||S(sSends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response Rthead(R tFalseR(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyR Xs cKstd|d|d||S(sSends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tposttdatatjson(R(RRRR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRes cKstd|d||S(sSends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tputR(R(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRss cKstd|d||S(sSends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tpatchR(R(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRs cKstd||S(sSends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tdelete(R(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRs N( t__doc__tRRtNoneRR R RRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyt s -    PKZwL&d&d,site-packages/pip/_vendor/requests/utils.pyonu[ abc@s\dZddlZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z ddl mZddlmZddlmZddlmZmZmZmZmZmZmZmZmZmZmZmZm Z m!Z!dd l"m#Z#dd l$m%Z%dd l&m'Z'm(Z(m)Z)m*Z*d@Z+ej,Z-idd6dd6Z.ej/dkrdZ0dZndZ1dZ2e3dZ4dZ5dZ6dZ7dZ8dZ9e3dZ:dZ;dZ<d Z=d!Z>d"Z?d#Z@d$ZAeBd%d&ZCd'ZDd(ZEd)ZFd*ZGd+ZHd,ZIejJd-ZKd.ZLdd/ZNd0ZOd1d2ZPd3ZQd4ZRd5jSd6ZTeTd7ZUeTd8ZVd9ZWd:ZXd;ZYejZd<Z[ejZd<Z\d=Z]d>Z^d?Z_dS(As requests.utils ~~~~~~~~~~~~~~ This module provides utility functions that are used within Requests that are also useful for external consumption. iNi(t __version__(tcerts(tto_native_string(tparse_http_list(tquoteturlparsetbyteststrt OrderedDicttunquotet getproxiest proxy_bypasst urlunparset basestringt integer_typestis_py3tproxy_bypass_environmenttgetproxies_environment(tcookiejar_from_dict(tCaseInsensitiveDict(t InvalidURLt InvalidHeadertFileModeWarningtUnrewindableBodyErrors.netrct_netrciPthttpithttpstWindowscCs"trddl}n ddl}yE|j|jd}|j|dd}|j|dd}Wntk rztSX| s| rtS|jd}x|D]w}|dkrd|krt Sn|j dd }|j d d }|j d d}t j ||t j rt SqWtS( Nis;Software\Microsoft\Windows\CurrentVersion\Internet Settingst ProxyEnableit ProxyOverridet;st.s\.t*s.*t?(Rtwinregt_winregtOpenKeytHKEY_CURRENT_USERt QueryValueExtOSErrortFalsetsplittTruetreplacetretmatchtI(thostR"tinternetSettingst proxyEnablet proxyOverridettest((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytproxy_bypass_registry.s2          cCs!trt|St|SdS(sReturn True, if the host should be bypassed. Checks proxy settings gathered from the environment, if specified, or the registry. N(RRR4(R/((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyR Os  cCs"t|dr|j}n|S(s/Returns an internal sequence dictionary update.titems(thasattrR5(td((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdict_to_sequence[scCsd}d}t|dr*t|}nt|drE|j}nmt|dry|j}Wntjk rzqXtj|j}d|j krt j dt qnt|drty|j }Wn,ttfk r|dk rq|}qqqtXt|drt|dkrty3|jdd |j }|j|pIdWqqttfk rmd}qqXqtn|dkrd}ntd||S( Nit__len__tlentfilenotbs%Requests has determined the content-length for this request using the binary size of the file: however, the file has been opened in text mode (i.e. without the 'b' flag in the mode). This may lead to an incorrect content-length. In Requests 3.0, support will be removed for files in text mode.ttelltseeki(tNoneR6R:R;tiotUnsupportedOperationtostfstattst_sizetmodetwarningstwarnRR=R'tIOErrorR>tmax(tot total_lengthtcurrent_positionR;((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt super_lends@       c CseyGddlm}m}d}x^tD]V}ytjjdj|}Wntk r_dSXtjj |r&|}Pq&q&W|dkrdSt |}d}t |t r|j d}n|jj|d} yG||j| } | r| drdnd} | | | d fSWn#|tfk rE|rFqFnXWnttfk r`nXdS( s;Returns the Requests tuple auth for a given url from netrc.i(tnetrctNetrcParseErrors~/{0}Nt:tasciiiii(RNROR?t NETRC_FILESRBtpatht expandusertformattKeyErrortexistsRt isinstanceRtdecodetnetlocR)tauthenticatorsRHt ImportErrortAttributeError( turlt raise_errorsRNROt netrc_pathtftloctritsplitstrR/Rtlogin_i((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_netrc_auths8    cCs[t|dd}|rWt|trW|ddkrW|ddkrWtjj|SdS(s0Tries to guess the filename of the given object.tnameitN(tgetattrR?RXR RBRStbasename(tobjRg((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytguess_filenames%cCsD|dkrdSt|ttttfr:tdnt|S(sTake an object and test to see if it can be represented as a dictionary. Unless it can not be represented as such, return an OrderedDict, e.g., :: >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') ValueError: need more than 1 value to unpack >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) :rtype: OrderedDict s+cannot encode objects that are not 2-tuplesN(R?RXRRtbooltintt ValueErrorR(tvalue((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytfrom_key_val_lists  cCse|dkrdSt|ttttfr:tdnt|tjr[|j }nt |S(sTake an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') ValueError: cannot encode objects that are not 2-tuples. :rtype: list s+cannot encode objects that are not 2-tuplesN( R?RXRRRnRoRpt collectionstMappingR5tlist(Rq((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytto_key_val_lists cCshg}x[t|D]M}|d |dko8dknrSt|dd!}n|j|qW|S(sParse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. It basically works like :func:`parse_set_header` just that items may appear multiple times and case sensitivity is preserved. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] To create a header from the :class:`list` again, use the :func:`dump_header` function. :param value: a string with a list header. :return: :class:`list` :rtype: list iit"(t_parse_list_headertunquote_header_valuetappend(Rqtresulttitem((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytparse_list_headers $cCsi}xt|D]~}d|kr5d||>> d = parse_dict_header('foo="is a fish", bar="as well"') >>> type(d) is dict True >>> sorted(d.items()) [('bar', 'as well'), ('foo', 'is a fish')] If there is no value for a key it will be `None`: >>> parse_dict_header('key_without_value') {'key_without_value': None} To create a header from the :class:`dict` again, use the :func:`dump_header` function. :param value: a string with a dict header. :return: :class:`dict` :rtype: dict t=iiRwN(RxR?R)Ry(RqR{R|Rg((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytparse_dict_header1s  $cCsq|rm|d|dko%dknrm|dd!}| sN|d dkrm|jddjddSn|S( sUnquotes a header value. (Reversal of :func:`quote_header_value`). This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. :rtype: str iiRwiis\\s\s\"(R+(Rqt is_filename((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyRyTs * cCs+i}x|D]}|j||j/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdict_from_cookiejarms cCs t||S(sReturns a CookieJar from a key/value dictionary. :param cj: CookieJar to insert cookies into. :param cookie_dict: Dict of key/values to insert into CookieJar. :rtype: CookieJar (R(RR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytadd_dict_to_cookiejar|scCsvtjdttjddtj}tjddtj}tjd}|j||j||j|S(slReturns encodings from given content string. :param content: bytestring to extract encodings from. sIn requests 3.0, get_encodings_from_content will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)s!]tflagss+]s$^<\?xml.*?encoding=["\']*(.+?)["\'>](RFRGtDeprecationWarningR,tcompileR.tfindall(tcontentt charset_ret pragma_retxml_re((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_encodings_from_contentscCs_|jd}|sdStj|\}}d|krK|djdSd|kr[dSdS(s}Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. :rtype: str s content-typetcharsets'"ttexts ISO-8859-1N(tgetR?tcgit parse_headertstrip(theaderst content_typetparams((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_encoding_from_headerss  ccs|jdkr)x|D] }|VqWdStj|jdd}x+|D]#}|j|}|rK|VqKqKW|jddt}|r|VndS(sStream decodes a iterator.NterrorsR+ttfinal(tencodingR?tcodecstgetincrementaldecoderRYR*(titeratortrR|tdecodertchunktrv((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytstream_decode_response_unicodes    ccsdd}|dks|dkr-t|}nx0|t|kr_||||!V||7}q0WdS(s Iterate over slices of a string.iN(R?R:(tstringt slice_lengthtpos((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt iter_slicess cCstjdtg}t|j}|rcyt|j|SWqctk r_|j|qcXnyt|j|ddSWnt k r|jSXdS(sReturns the requested content back in unicode. :param r: Response object to get unicode content from. Tried: 1. charset from content-type 2. fall back and replace all unicode characters :rtype: str sIn requests 3.0, get_unicode_from_response will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)RR+N( RFRGRRRRRt UnicodeErrorRzt TypeError(Rttried_encodingsR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_unicode_from_responses   t4ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzs0123456789-._~cCs|jd}xtdt|D]}||dd!}t|dkr|jrytt|d}Wn!tk rtd|nX|tkr|||d||/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytunquote_unreserveds  cCsKd}d}ytt|d|SWntk rFt|d|SXdS(sRe-quote the given URI. This function passes the given URI through an unquote/quote cycle to ensure that it is fully and consistently quoted. :rtype: str s!#$%&'()*+,/:;=?@[]~s!#$&'()*+,/:;=?@[]~tsafeN(RRR(Rtsafe_with_percenttsafe_without_percent((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt requote_uri s  cCstjdtj|d}|jd\}}tjdtjtt|d}tjdtj|d|@}||@||@kS(sThis function allows you to check if an IP belongs to a network subnet Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 :rtype: bool s=Lit/(tstructtunpacktsockett inet_atonR)tdotted_netmaskRo(tiptnettipaddrtnetaddrtbitstnetmasktnetwork((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytaddress_in_network#s +#cCs/ddd|>dA}tjtjd|S(sConverts mask from /xx format to xxx.xxx.xxx.xxx Example: if mask is 24 function returns 255.255.255.0 :rtype: str Iii s>I(Rt inet_ntoaRtpack(tmaskR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyR2scCs-ytj|Wntjk r(tSXtS(s :rtype: bool (RRterrorR(R*(t string_ip((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytis_ipv4_address=s cCs|jddkryt|jdd}Wntk rFtSX|dks_|dkrctSytj|jddWqtjk rtSXntStS(sV Very simple check of the cidr format in no_proxy variable. :rtype: bool Rii i( tcountRoR)RpR(RRRR*(tstring_networkR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt is_valid_cidrHs ccst|dk }|r4tjj|}|tj|/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt set_environ`s    c Cscd}|}|d kr*|d}nt|j}|r d|jddjdD}|jdd}t|rx|D]8}t|rt||rtSq||krtSqWq x@|D]5}|j |s|jddj |rtSqWnt d|8yt |}Wn t t jfk rNt}nXWd QX|r_tStS( sL Returns whether we should bypass proxies or not. :rtype: bool cSs(tjj|p'tjj|jS(N(RBRRtupper(tk((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt|Rtno_proxycss|]}|r|VqdS(N((t.0R/((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pys st Rt,RPiN(R?RRZR+R)RRRR*tendswithRR RRtgaierrorR(( R^Rt get_proxyt no_proxy_argRZRtproxy_ipR/tbypass((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytshould_bypass_proxiesvs4  %      + cCs!t|d|riStSdS(sA Return a dict of environment proxies. :rtype: dict RN(RR (R^R((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_environ_proxiesscCs|p i}t|}|jdkrC|j|j|jdS|jd|j|jd|jdg}d}x(|D] }||krz||}PqzqzW|S(sSelect a proxy for the url, if applicable. :param url: The url being for the request :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs talls://sall://N(RthostnameR?Rtscheme(R^tproxiesturlpartst proxy_keystproxyt proxy_key((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt select_proxys       spython-requestscCsd|tfS(sO Return a string representing the default user agent. :rtype: str s%s/%s(R(Rg((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdefault_user_agentscCs2titd6djd d6dd6dd 6S( s9 :rtype: requests.structures.CaseInsensitiveDict s User-Agents, tgziptdeflatesAccept-Encodings*/*tAccepts keep-alivet Connection(RR(RRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdefault_headerss  c Csg}d}xtjd|D]}y|jdd\}}Wntk ra|d}}nXi|jdd6}xa|jdD]P}y|jd\}}Wntk rPnX|j|||j|; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" :rtype: list s '"s, * '"R^R~(R,R)RpRRz( Rqtlinkst replace_charstvalR^Rtlinktparamtkey((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytparse_header_linkss    sRQiicCs|d }|tjtjfkr&dS|d tjkr=dS|d tjtjfkr]dS|jt}|dkr|dS|dkr|d d dtkrd S|d d dtkrd Sn|dkr|d t krd S|d t krdSnd S(s :rtype: str isutf-32is utf-8-sigisutf-16isutf-8Ns utf-16-beis utf-16-les utf-32-bes utf-32-le( Rt BOM_UTF32_LEt BOM_UTF32_BEtBOM_UTF8t BOM_UTF16_LEt BOM_UTF16_BERt_nullt_null2t_null3R?(tdatatsamplet nullcount((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytguess_json_utfs*    cCsSt||\}}}}}}|s7||}}nt||||||fS(sGiven a URL that may or may not have a scheme, prepend the given scheme. Does not replace a present scheme with the one provided as an argument. :rtype: str (RR (R^t new_schemeRRZRSRtquerytfragment((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytprepend_scheme_if_needed1s!cCsRt|}y"t|jt|jf}Wnttfk rMd}nX|S(s{Given a url with authentication components, extract them into a tuple of username,password. :rtype: (str,str) R(RR(RR tusernametpasswordR]R(R^tparsedtauth((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_auth_from_urlBs  " s^\S[^\r\n]*$|^$cCs|\}}t|tr$t}nt}y&|j|sOtd|nWn0tk rtd||t|fnXdS(sVerifies that header value is a string which doesn't contain leading whitespace or return characters. This prevents unintended header injection. :param header: tuple, in the format (name, value). s7Invalid return character or leading space in header: %ss>Value for header {%s: %s} must be of type str or bytes, not %sN(RXRt_CLEAN_HEADER_REGEX_BYTEt_CLEAN_HEADER_REGEX_STRR-RRttype(theaderRgRqtpat((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytcheck_header_validityWs   cCsft|\}}}}}}|s4||}}n|jddd}t|||||dfS(sW Given a url remove the fragment and the authentication part. :rtype: str t@iiR(RtrsplitR (R^RRZRSRR R ((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt urldefragauthls cCs}t|jdd}|dk rmt|jtrmy||jWqyttfk ritdqyXn tddS(sfMove file pointer back to its recorded starting position so it can be read again on redirect. R>s;An error occurred when rewinding request body for redirect.s+Unable to rewind request body for redirect.N( RjtbodyR?RXt_body_positionRRHR'R(tprepared_requestt body_seek((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt rewind_body}s(s.netrcR(`t__doc__RRRst contextlibR@RBtplatformR,RRRFRRRt_internal_utilsRtcompatRRxRRRRRR R R R R RRRRtcookiesRt structuresRt exceptionsRRRRRRtwheretDEFAULT_CA_BUNDLE_PATHt DEFAULT_PORTStsystemR4R8RMR(RfRmRrRvR}RRyRRRRRRRt frozensetRRRRRRRtcontextmanagerRRR?RRRRRtencodeRRRR RRRRRRRR!(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt s           ^"  ! = 3    #      %      9  "      PKZ2ۄWW.site-packages/pip/_vendor/requests/cookies.pyonu[ abc@sQdZddlZddlZddlZddlZddlmZddlmZm Z m Z m Z yddl Z Wne k rddlZ nXdefdYZdefd YZd Zd Zddd Zd efdYZdejejfdYZdZdZdZdedZdZ dS(s requests.cookies ~~~~~~~~~~~~~~~~ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. iNi(tto_native_string(t cookielibturlparset urlunparsetMorselt MockRequestcBseZdZdZdZdZdZdZdZdZ ddZ d Z d Z d Zed Zed ZedZRS(sWraps a `requests.Request` to mimic a `urllib2.Request`. The code in `cookielib.CookieJar` expects this interface in order to correctly manage cookie policies, i.e., determine whether a cookie can be set, given the domains of the request and the cookie. The original request object is read-only. The client is responsible for collecting the new headers via `get_new_headers()` and interpreting them appropriately. You probably want `get_cookie_header`, defined below. cCs.||_i|_t|jjj|_dS(N(t_rt _new_headersRturltschemettype(tselftrequest((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt__init__&s  cCs|jS(N(R (R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytget_type+scCst|jjjS(N(RRRtnetloc(R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytget_host.scCs |jS(N(R(R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytget_origin_req_host1scCsx|jjjds|jjSt|jjddd}t|jj}t|j||j|j |j |j gS(NtHosttencodingsutf-8( RtheaderstgetRRRRR tpathtparamstquerytfragment(R thosttparsed((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt get_full_url4s cCstS(N(tTrue(R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytis_unverifiableBscCs||jjkp||jkS(N(RRR(R tname((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt has_headerEscCs%|jjj||jj||S(N(RRRR(R Rtdefault((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt get_headerHscCstddS(sMcookielib has no legitimate use for this method; add it back if you find one.s=Cookie headers should be added with add_unredirected_header()N(tNotImplementedError(R tkeytval((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt add_headerKscCs||j|(RR'RQtresulttbadargsterr((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyROs0   cCs!d}|dr_y$ttjt|d}Wqtk r[td|dqXn2|drd}tjtj|d|}ntd|ddt |ddt d|dd|d |j d |d d dd i|d d6dt dt |dd|j d|dpd S(sBConvert a Morsel object into a Cookie containing the one k/v pair.smax-agesmax-age: %s must be integerRs%a, %d-%b-%Y %H:%M:%S GMTRRRRBRRRRthttponlyRRRR'RiN( R/tintttimet ValueErrorRtcalendarttimegmtstrptimeRORR`R$R'(tmorselRt time_template((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyRNs0 $       cCs|dkrt}n|dk rg|D]}|j^q+}x@|D]5}|s_||krG|jt|||qGqGWn|S(s-Returns a CookieJar from a key/value dictionary. :param cookie_dict: Dict of key/values to insert into CookieJar. :param cookiejar: (optional) A cookiejar to add the cookies to. :param overwrite: (optional) If False, will not replace cookies already in the jar with new ones. N(R/RJRRPRO(t cookie_dictREt overwriteRGtnames_from_jarR((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pytcookiejar_from_dicts    $cCst|tjs!tdnt|trKt|d|dt}nXt|tjry|j|Wqtk rx|D]}|j |qWqXn|S(sAdd cookies to cookiejar and returns a merged CookieJar. :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. s!You can only merge into CookieJarRER( RMRRoRRRR`RqtAttributeErrorRP(REtcookiest cookie_in_jar((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt merge_cookies s  (!R.RpRRt collectionst_internal_utilsRtcompatRRRRRzt ImportErrortdummy_threadingtobjectRR1R=RAR/RHt RuntimeErrorRIRotMutableMappingRJRRORNRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/requests/cookies.pyt s,    " H    # PKZA8 2site-packages/pip/_vendor/requests/status_codes.pynu[# -*- coding: utf-8 -*- from .structures import LookupDict _codes = { # Informational. 100: ('continue',), 101: ('switching_protocols',), 102: ('processing',), 103: ('checkpoint',), 122: ('uri_too_long', 'request_uri_too_long'), 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), 201: ('created',), 202: ('accepted',), 203: ('non_authoritative_info', 'non_authoritative_information'), 204: ('no_content',), 205: ('reset_content', 'reset'), 206: ('partial_content', 'partial'), 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 208: ('already_reported',), 226: ('im_used',), # Redirection. 300: ('multiple_choices',), 301: ('moved_permanently', 'moved', '\\o-'), 302: ('found',), 303: ('see_other', 'other'), 304: ('not_modified',), 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 308: ('permanent_redirect', 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 # Client Error. 400: ('bad_request', 'bad'), 401: ('unauthorized',), 402: ('payment_required', 'payment'), 403: ('forbidden',), 404: ('not_found', '-o-'), 405: ('method_not_allowed', 'not_allowed'), 406: ('not_acceptable',), 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 408: ('request_timeout', 'timeout'), 409: ('conflict',), 410: ('gone',), 411: ('length_required',), 412: ('precondition_failed', 'precondition'), 413: ('request_entity_too_large',), 414: ('request_uri_too_large',), 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 421: ('misdirected_request',), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), 425: ('unordered_collection', 'unordered'), 426: ('upgrade_required', 'upgrade'), 428: ('precondition_required', 'precondition'), 429: ('too_many_requests', 'too_many'), 431: ('header_fields_too_large', 'fields_too_large'), 444: ('no_response', 'none'), 449: ('retry_with', 'retry'), 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 451: ('unavailable_for_legal_reasons', 'legal_reasons'), 499: ('client_closed_request',), # Server Error. 500: ('internal_server_error', 'server_error', '/o\\', '✗'), 501: ('not_implemented',), 502: ('bad_gateway',), 503: ('service_unavailable', 'unavailable'), 504: ('gateway_timeout',), 505: ('http_version_not_supported', 'http_version'), 506: ('variant_also_negotiates',), 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), 511: ('network_authentication_required', 'network_auth', 'network_authentication'), } codes = LookupDict(name='status_codes') for code, titles in _codes.items(): for title in titles: setattr(codes, title, code) if not title.startswith(('\\', '/')): setattr(codes, title.upper(), code) PKZ\++1site-packages/pip/_vendor/requests/structures.pyonu[ abc@sUdZddlZddlmZdejfdYZdefdYZdS( sO requests.structures ~~~~~~~~~~~~~~~~~~~ Data structures that power Requests. iNi(t OrderedDicttCaseInsensitiveDictcBskeZdZd dZdZdZdZdZdZ dZ dZ d Z d Z RS( sA case-insensitive ``dict``-like object. Implements all methods and operations of ``collections.MutableMapping`` as well as dict's ``copy``. Also provides ``lower_items``. All keys are expected to be strings. The structure remembers the case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains testing is case insensitive:: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' cid['aCCEPT'] == 'application/json' # True list(cid) == ['Accept'] # True For example, ``headers['content-encoding']`` will return the value of a ``'Content-Encoding'`` response header, regardless of how the header name was originally stored. If the constructor, ``.update``, or equality comparison operations are given keys that have equal ``.lower()``s, the behavior is undefined. cKs5t|_|dkr!i}n|j||dS(N(Rt_storetNonetupdate(tselftdatatkwargs((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__init__*s   cCs||f|j|j<s(Rtvalues(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__iter__;scCs t|jS(N(tlenR(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__len__>scCsd|jjDS(s.Like iteritems(), but with all lowercase keys.css%|]\}}||dfVqdS(iN((Rtlowerkeytkeyval((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pys Ds(Rtitems(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt lower_itemsAscCsGt|tjr!t|}ntSt|jt|jkS(N(t isinstancet collectionstMappingRtNotImplementedtdictR(Rtother((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__eq__IscCst|jjS(N(RRR(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pytcopyRscCstt|jS(N(tstrRR(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__repr__UsN(t__name__t __module__t__doc__RRR R RRRRR R!R#(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyRs        t LookupDictcBs8eZdZddZdZdZddZRS(sDictionary lookup object.cCs ||_tt|jdS(N(tnametsuperR'R(RR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR\s cCs d|jS(Ns (R((R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR#`scCs|jj|dS(N(t__dict__tgetR(RR ((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR cscCs|jj||S(N(R*R+(RR tdefault((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR+hsN(R$R%R&RRR#R R+(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR'Ys    (R&RtcompatRtMutableMappingRRR'(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyts JPKZ  0site-packages/pip/_vendor/requests/structures.pynu[# -*- coding: utf-8 -*- """ requests.structures ~~~~~~~~~~~~~~~~~~~ Data structures that power Requests. """ import collections from .compat import OrderedDict class CaseInsensitiveDict(collections.MutableMapping): """A case-insensitive ``dict``-like object. Implements all methods and operations of ``collections.MutableMapping`` as well as dict's ``copy``. Also provides ``lower_items``. All keys are expected to be strings. The structure remembers the case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains testing is case insensitive:: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' cid['aCCEPT'] == 'application/json' # True list(cid) == ['Accept'] # True For example, ``headers['content-encoding']`` will return the value of a ``'Content-Encoding'`` response header, regardless of how the header name was originally stored. If the constructor, ``.update``, or equality comparison operations are given keys that have equal ``.lower()``s, the behavior is undefined. """ def __init__(self, data=None, **kwargs): self._store = OrderedDict() if data is None: data = {} self.update(data, **kwargs) def __setitem__(self, key, value): # Use the lowercased key for lookups, but store the actual # key alongside the value. self._store[key.lower()] = (key, value) def __getitem__(self, key): return self._store[key.lower()][1] def __delitem__(self, key): del self._store[key.lower()] def __iter__(self): return (casedkey for casedkey, mappedvalue in self._store.values()) def __len__(self): return len(self._store) def lower_items(self): """Like iteritems(), but with all lowercase keys.""" return ( (lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items() ) def __eq__(self, other): if isinstance(other, collections.Mapping): other = CaseInsensitiveDict(other) else: return NotImplemented # Compare insensitively return dict(self.lower_items()) == dict(other.lower_items()) # Copy is required def copy(self): return CaseInsensitiveDict(self._store.values()) def __repr__(self): return str(dict(self.items())) class LookupDict(dict): """Dictionary lookup object.""" def __init__(self, name=None): self.name = name super(LookupDict, self).__init__() def __repr__(self): return '' % (self.name) def __getitem__(self, key): # We allow fall-through here, so values default to None return self.__dict__.get(key, None) def get(self, key, default=None): return self.__dict__.get(key, default) PKZwrr-site-packages/pip/_vendor/requests/models.pycnu[ abc@sdZddlZddlZddlZddlZddlmZddlm Z ddl m Z ddl m Z mZmZmZddlmZdd lmZdd lmZdd lmZdd lmZmZmZdd lmZmZm Z m!Z!m"Z"m#Z#m$Z$ddl%m&Z&m'Z'ddl(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2ddl3m4Z4m5Z5m6Z6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=ddl3m>Z?ddl@mAZAeAjBeAjCeAjDeAjEeAjFfZGdZHddZIdZJdeKfdYZLdeKfdYZMdeMfdYZNdeLeMfdYZOdeKfd YZPdS(!s` requests.models ~~~~~~~~~~~~~~~ This module contains the primary objects that power Requests. iN(t RequestField(tencode_multipart_formdata(t parse_url(t DecodeErrortReadTimeoutErrort ProtocolErrortLocationParseError(tUnsupportedOperationi(t default_hooks(tCaseInsensitiveDict(t HTTPBasicAuth(tcookiejar_from_dicttget_cookie_headert_copy_cookie_jar(t HTTPErrort MissingSchemat InvalidURLtChunkedEncodingErrortContentDecodingErrortConnectionErrortStreamConsumedError(tto_native_stringtunicode_is_ascii( tguess_filenametget_auth_from_urlt requote_uritstream_decode_response_unicodetto_key_val_listtparse_header_linkst iter_slicestguess_json_utft super_lentcheck_header_validity( t cookielibt urlunparseturlsplitt urlencodetstrtbytestis_py2tchardett builtin_strt basestring(tjson(tcodesii iitRequestEncodingMixincBs5eZedZedZedZRS(cCssg}t|j}|j}|s-d}n|j||j}|rf|jd|j|ndj|S(sBuild the path URL to use.t/t?t(R#turltpathtappendtquerytjoin(tselfR1tpR2R4((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytpath_url=s     cCst|ttfr|St|dr,|St|drg}xt|D]\}}t|tsyt|d r|g}nxl|D]d}|dk r|jt|tr|jdn|t|tr|jdn|fqqWqNWt |dt S|SdS(sEncode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. treadt__iter__sutf-8tdoseqN( t isinstanceR%R&thasattrRR*tNoneR3tencodeR$tTrue(tdatatresulttktvstv((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt_encode_paramsRs    !3c Cs]|stdnt|tr3tdng}t|pEi}t|pWi}x|D]\}}t|tst|d r|g}nx|D]}|d k rt|tst|}n|jt|tr|j dn|t|tr|j dn|fqqWqdWx|D] \}}d }d } t|t t frt |dkr|\} } qt |dkr|\} } }q|\} } }} nt|p|} |} t| tttfr| } n | j} td|d| d | d | } | jd ||j| q3Wt|\}}||fS( sBuild the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). sFiles must be provided.sData must not be a string.R:sutf-8iitnameRAtfilenametheaderst content_typeN(t ValueErrorR<R*RR=R>R&R%R3tdecodeR?ttupletlisttlenRt bytearrayR9Rtmake_multipartR(tfilesRAt new_fieldstfieldstfieldtvalRERCtfttfhtfntfptfdatatrftbodyRJ((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt _encode_filesmsH    !3  !(t__name__t __module__tpropertyR8t staticmethodRFR^(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR-<stRequestHooksMixincBseZdZdZRS(cCs||jkr"td|nt|tjrK|j|j|n0t|dr{|j|jd|DndS(sProperly register a hook.s1Unsupported event specified, with event name "%s"R:css'|]}t|tjr|VqdS(N(R<t collectionstCallable(t.0th((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pys sN(thooksRKR<RdReR3R=textend(R6teventthook((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt register_hooks cCs5y|j|j|tSWntk r0tSXdS(siDeregister a previously registered hook. Returns True if the hook existed, False if not. N(RhtremoveR@RKtFalse(R6RjRk((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytderegister_hooks  (R_R`RlRo(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRcs tRequestc BsGeZdZddddddddddd ZdZdZRS(sA user-created :class:`Request ` object. Used to prepare a :class:`PreparedRequest `, which is sent to the server. :param method: HTTP method to use. :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. :param json: json for the body to attach to the request (if files or data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. :param hooks: dictionary of callback hooks, for internal usage. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> req.prepare() c Cs|dkrgn|}|dkr*gn|}|dkrBin|}|dkrZin|}| dkrrin| } t|_x6t| jD]"\} } |jd| d| qW||_||_||_||_ ||_ | |_ ||_ ||_ ||_dS(NRjRk(R>RRhRNtitemsRltmethodR1RIRRRAR+tparamstauthtcookies( R6RrR1RIRRRARsRtRuRhR+RCRE((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__init__s"         cCs d|jS(Ns(Rr(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__repr__scCsqt}|jd|jd|jd|jd|jd|jd|jd|jd|j d |j d |j |S( sXConstructs a :class:`PreparedRequest ` for transmission and returns it.RrR1RIRRRAR+RsRtRuRh( tPreparedRequesttprepareRrR1RIRRRAR+RsRtRuRh(R6R7((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRys            N(R_R`t__doc__R>RvRwRy(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRps  Rxc BseZdZdZddddddddddd ZdZdZdZe dZ dZ dZ dd Z d Zd d Zd ZdZRS(sThe fully mutable :class:`PreparedRequest ` object, containing the exact bytes that will be sent to the server. Generated from either a :class:`Request ` object or manually. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> r = req.prepare() >>> s = requests.Session() >>> s.send(r) cCsFd|_d|_d|_d|_d|_t|_d|_dS(N( R>RrR1RIt_cookiesR]RRht_body_position(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRvs      c Csk|j||j|||j||j||j||| |j|||j| dS(s6Prepares the entire request with the given parameters.N(tprepare_methodt prepare_urltprepare_headerstprepare_cookiest prepare_bodyt prepare_autht prepare_hooks( R6RrR1RIRRRARsRtRuRhR+((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRy+s   cCs d|jS(Ns(Rr(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRw=scCst}|j|_|j|_|jdk r?|jjnd|_t|j|_|j|_|j |_ |j |_ |S(N( RxRrR1RIR>tcopyR R{R]RhR|(R6R7((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR@s   '   cCs7||_|jdk r3t|jj|_ndS(sPrepares the given HTTP method.N(RrR>Rtupper(R6Rr((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR}Ks cCsOddl}y"|j|dtjd}Wn|jk rJtnX|S(Nituts46sutf-8(tidnaR?R@RLt IDNAErrort UnicodeError(thostR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt_get_idna_encoded_hostQs  " cCst|tr!|jd}ntr3t|n t|}|j}d|krz|jjd rz||_ dSy%t |\}}}}}}} Wn"t k r} t | j nX|sd} | jt|d} t| n|st d|nt|sRy|j|}Wqptk rNt dqpXn|jdrpt dn|pyd } | r| d 7} n| |7} |r| dt|7} n|sd }ntrst|tr|jd }nt| tr | jd } nt|tr.|jd }nt|trO|jd }nt| trs| jd } qsnt|ttfrt|}n|j|} | r|rd || f}q| }ntt|| |d|| g}||_ dS(sPrepares the given HTTP URL.tutf8t:thttpNsDInvalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?s Invalid URL %r: No host suppliedsURL has an invalid label.u*R0t@R.sutf-8s%s&%s(R<R&RLR'tunicodeR%tlstriptlowert startswithR1RRRtargstformatRRRRRR?RFRR"R>(R6R1RstschemeRtRtportR2R4tfragmentteterrortnetloct enc_params((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR~[sh " %       $cCsYt|_|rUx@|jD]/}t||\}}||jt|t complexjsontdumpsR<R&R?tallR=R*RNRMRdtMappingRt TypeErrortAttributeErrorRtgetattrRR|tIOErrortOSErrortobjecttNotImplementedErrorR)RIR^RFtprepare_content_lengthR](R6RARRR+R]RJt is_streamtlength((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRsJ %    cCsr|dk r7t|}|rnt||jdPrepare Content-Length header based on request method and bodysContent-LengthtGETtHEADt0N(RR(R>RR)RIRrtget(R6R]R((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs   'R0cCs|dkr6t|j}t|r-|nd}n|rt|trlt|dkrlt|}n||}|jj |j|j |j ndS(s"Prepares the given HTTP auth data.iN( R>RR1tanyR<RMROR t__dict__tupdateRR](R6RtR1turl_authtr((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs ! cCs_t|tjr||_nt||_t|j|}|dk r[||jd` object. Any subsequent calls to ``prepare_cookies`` will have no actual effect, unless the "Cookie" header is removed beforehand. tCookieN(R<R!t CookieJarR{R R R>RI(R6Rut cookie_header((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR$s   cCs5|p g}x"|D]}|j|||qWdS(sPrepares the given hooks.N(Rl(R6RhRj((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR8s  N(R_R`RzRvR>RyRwRR}RbRR~RRRRRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRxs    V E  tResponsec Bs7eZdZddddddddd d g Zd Zd Zd ZdZdZdZ dZ dZ dZ e dZe dZe dZe dZe dZdedZed"d"dZe dZe dZdZe dZd Zd!ZRS(#shThe :class:`Response ` object, which contains a server's response to an HTTP request. t_contentt status_codeRIR1thistorytencodingtreasonRutelapsedtrequestcCst|_t|_d|_d|_t|_d|_d|_ d|_ g|_ d|_ t i|_tjd|_d|_dS(Ni(RnRt_content_consumedR>t_nextRR RItrawR1RRRR Rutdatetimet timedeltaRR(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRvLs          cCs|S(N((R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __enter__{scGs|jdS(N(tclose(R6R((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__exit__~scs0jsjntfdjDS(Nc3s'|]}|t|dfVqdS(N(RR>(Rftattr(R6(s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pys s(Rtcontenttdictt __attrs__(R6((R6s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __getstate__s    cCsQx*|jD]\}}t|||q Wt|dtt|dddS(NRR(RqtsetattrR@R>(R6tstateRGR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __setstate__scCs d|jS(Ns(R(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRwscCs|jS(skReturns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. (tok(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__bool__scCs|jS(skReturns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. (R(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __nonzero__scCs |jdS(s,Allows you to use a response as an iterator.i(t iter_content(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR:scCs'y|jWntk r"tSXtS(skReturns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. (traise_for_statusRRnR@(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs  cCsd|jko|jtkS(sTrue if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). tlocation(RIRtREDIRECT_STATI(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt is_redirectscCs(d|jko'|jtjtjfkS(s@True if this Response one of the permanent versions of redirect.R(RIRR,tmoved_permanentlytpermanent_redirect(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytis_permanent_redirectscCs|jS(sTReturns a PreparedRequest for the next request in a redirect chain, if there is one.(R(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytnextscCstj|jdS(s7The apparent encoding, provided by the chardet library.R(R(tdetectR(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytapparent_encodingsicsfd}jr9tjtr9tn5dk rntt rntdtnt j}|}jr|n|}|rt |}n|S(sIterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. chunk_size must be of type int or None. A value of None will function differently depending on the value of `stream`. stream=True will read data as it arrives in whatever size the chunks are received. If stream=False, data is returned as a single chunk. If decode_unicode is True, content will be decoded using the best available encoding based on the response. c3stjdry,x%jjdtD] }|Vq.WWqtk r_}t|qtk r}}t|qtk r}t |qXn.x+trjj }|sPn|VqWt_ dS(Ntstreamtdecode_content( R=RRR@RRRRRRR9R(tchunkR(t chunk_sizeR6(s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytgenerates    s.chunk_size must be an int, it is instead a %s.N( RR<RtboolRR>tintRttypeRR(R6Rtdecode_unicodeRt reused_chunkst stream_chunkstchunks((RR6s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs  ccsd}x|jd|d|D]}|dk r>||}n|rV|j|}n |j}|r|dr|r|dd|dkr|j}nd}x|D] }|VqWqW|dk r|VndS(sIterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe. RRiN(R>Rtsplitt splitlinestpop(R6RRt delimitertpendingRtlinestline((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt iter_lines s   .   cCs|jtkr{|jr'tdn|jdksE|jdkrQd|_q{tj|j t prt|_nt |_|jS(s"Content of the response, in bytes.s2The content for this response was already consumediN( RRnRt RuntimeErrorRRR>R&R5RtCONTENT_CHUNK_SIZER@(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR*s   * cCsd}|j}|js"tdS|jdkr=|j}nyt|j|dd}Wn,ttfk rt|jdd}nX|S(sContent of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property. R0terrorstreplaceN(R>RRR%Rt LookupErrorR(R6RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyttext>s    cKs|j r}|jr}t|jdkr}t|j}|dk r}y tj|jj||SWqztk rvqzXq}ntj|j |S(sReturns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises ValueError: If the response body does not contain valid json. iN( RRRORR>RtloadsRLtUnicodeDecodeErrorR(R6tkwargsR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR+ds(   cCsj|jjd}i}|rft|}x9|D].}|jdpR|jd}|||(R6R((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs   N(R_R`RzRRvRRRRRwRRR:RaRRRRRRnRtITER_CHUNK_SIZER>RRRR+RRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRBs2 /     7&  (QRzRdRtsystencodings.idnat encodingstpip._vendor.urllib3.fieldsRtpip._vendor.urllib3.filepostRtpip._vendor.urllib3.utilRtpip._vendor.urllib3.exceptionsRRRRtioRRhRt structuresR RtR RuR R R t exceptionsRRRRRRRt_internal_utilsRRtutilsRRRRRRRRRR tcompatR!R"R#R$R%R&R'R(R)R*R+Rt status_codesR,tmovedtfoundtotherttemporary_redirectRRtDEFAULT_REDIRECT_LIMITRRRR-RcRpRxR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytsB    "4FF  nF;PKZjFKK2site-packages/pip/_vendor/requests/__version__.pyonu[ abc@s@dZdZdZdZdZdZdZdZdZd Z d S( trequestssPython HTTP for Humans.shttp://python-requests.orgs2.18.4is Kenneth Reitzsme@kennethreitz.orgs Apache 2.0sCopyright 2017 Kenneth Reitzu ✨ 🍰 ✨N( t __title__t__description__t__url__t __version__t __build__t __author__t__author_email__t __license__t __copyright__t__cake__(((sD/usr/lib/python2.7/site-packages/pip/_vendor/requests/__version__.pytsPKZ+ + 0site-packages/pip/_vendor/requests/exceptions.pynu[# -*- coding: utf-8 -*- """ requests.exceptions ~~~~~~~~~~~~~~~~~~~ This module contains the set of Requests' exceptions. """ from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): """There was an ambiguous exception that occurred while handling your request. """ def __init__(self, *args, **kwargs): """Initialize RequestException with `request` and `response` objects.""" response = kwargs.pop('response', None) self.response = response self.request = kwargs.pop('request', None) if (response is not None and not self.request and hasattr(response, 'request')): self.request = self.response.request super(RequestException, self).__init__(*args, **kwargs) class HTTPError(RequestException): """An HTTP error occurred.""" class ConnectionError(RequestException): """A Connection error occurred.""" class ProxyError(ConnectionError): """A proxy error occurred.""" class SSLError(ConnectionError): """An SSL error occurred.""" class Timeout(RequestException): """The request timed out. Catching this error will catch both :exc:`~requests.exceptions.ConnectTimeout` and :exc:`~requests.exceptions.ReadTimeout` errors. """ class ConnectTimeout(ConnectionError, Timeout): """The request timed out while trying to connect to the remote server. Requests that produced this error are safe to retry. """ class ReadTimeout(Timeout): """The server did not send any data in the allotted amount of time.""" class URLRequired(RequestException): """A valid URL is required to make a request.""" class TooManyRedirects(RequestException): """Too many redirects.""" class MissingSchema(RequestException, ValueError): """The URL schema (e.g. http or https) is missing.""" class InvalidSchema(RequestException, ValueError): """See defaults.py for valid schemas.""" class InvalidURL(RequestException, ValueError): """The URL provided was somehow invalid.""" class InvalidHeader(RequestException, ValueError): """The header value provided was somehow invalid.""" class ChunkedEncodingError(RequestException): """The server declared chunked encoding but sent an invalid chunk.""" class ContentDecodingError(RequestException, BaseHTTPError): """Failed to decode response content""" class StreamConsumedError(RequestException, TypeError): """The content for this response was already consumed""" class RetryError(RequestException): """Custom retries logic failed""" class UnrewindableBodyError(RequestException): """Requests encountered an error when trying to rewind a body""" # Warnings class RequestsWarning(Warning): """Base warning for Requests.""" pass class FileModeWarning(RequestsWarning, DeprecationWarning): """A file was opened in text mode, but Requests determined its binary length.""" pass class RequestsDependencyWarning(RequestsWarning): """An imported dependency doesn't match the expected version range.""" pass PKZc/]])site-packages/pip/_vendor/requests/api.pynu[# -*- coding: utf-8 -*- """ requests.api ~~~~~~~~~~~~ This module implements the Requests API. :copyright: (c) 2012 by Kenneth Reitz. :license: Apache2, see LICENSE for more details. """ from . import sessions def request(method, url, **kwargs): """Constructs and sends a :class:`Request `. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How many seconds to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response ` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') """ # By using the 'with' statement we are sure the session is closed, thus we # avoid leaving sockets open which can trigger a ResourceWarning in some # cases, and look like a memory leak in others. with sessions.Session() as session: return session.request(method=method, url=url, **kwargs) def get(url, params=None, **kwargs): r"""Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('get', url, params=params, **kwargs) def options(url, **kwargs): r"""Sends an OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('options', url, **kwargs) def head(url, **kwargs): r"""Sends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): r"""Sends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): r"""Sends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): r"""Sends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): r"""Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response """ return request('delete', url, **kwargs) PKZe}Gt,site-packages/pip/_vendor/requests/models.pynu[# -*- coding: utf-8 -*- """ requests.models ~~~~~~~~~~~~~~~ This module contains the primary objects that power Requests. """ import collections import datetime import sys # Import encoding now, to avoid implicit import later. # Implicit import within threads may cause LookupError when standard library is in a ZIP, # such as in Embedded Python. See https://github.com/requests/requests/issues/3578. import encodings.idna from pip._vendor.urllib3.fields import RequestField from pip._vendor.urllib3.filepost import encode_multipart_formdata from pip._vendor.urllib3.util import parse_url from pip._vendor.urllib3.exceptions import ( DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) from io import UnsupportedOperation from .hooks import default_hooks from .structures import CaseInsensitiveDict from .auth import HTTPBasicAuth from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar from .exceptions import ( HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, ContentDecodingError, ConnectionError, StreamConsumedError) from ._internal_utils import to_native_string, unicode_is_ascii from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, iter_slices, guess_json_utf, super_len, check_header_validity) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, is_py2, chardet, builtin_str, basestring) from .compat import json as complexjson from .status_codes import codes #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( codes.moved, # 301 codes.found, # 302 codes.other, # 303 codes.temporary_redirect, # 307 codes.permanent_redirect, # 308 ) DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 class RequestEncodingMixin(object): @property def path_url(self): """Build the path URL to use.""" url = [] p = urlsplit(self.url) path = p.path if not path: path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url) @staticmethod def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v)) return urlencode(result, doseq=True) else: return data @staticmethod def _encode_files(files, data): """Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). """ if (not files): raise ValueError("Files must be provided.") elif isinstance(data, basestring): raise ValueError("Data must not be a string.") new_fields = [] fields = to_key_val_list(data or {}) files = to_key_val_list(files or {}) for field, val in fields: if isinstance(val, basestring) or not hasattr(val, '__iter__'): val = [val] for v in val: if v is not None: # Don't call str() on bytestrings: in Py3 it all goes wrong. if not isinstance(v, bytes): v = str(v) new_fields.append( (field.decode('utf-8') if isinstance(field, bytes) else field, v.encode('utf-8') if isinstance(v, str) else v)) for (k, v) in files: # support for explicit filename ft = None fh = None if isinstance(v, (tuple, list)): if len(v) == 2: fn, fp = v elif len(v) == 3: fn, fp, ft = v else: fn, fp, ft, fh = v else: fn = guess_filename(v) or k fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp else: fdata = fp.read() rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) body, content_type = encode_multipart_formdata(new_fields) return body, content_type class RequestHooksMixin(object): def register_hook(self, event, hook): """Properly register a hook.""" if event not in self.hooks: raise ValueError('Unsupported event specified, with event name "%s"' % (event)) if isinstance(hook, collections.Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) def deregister_hook(self, event, hook): """Deregister a previously registered hook. Returns True if the hook existed, False if not. """ try: self.hooks[event].remove(hook) return True except ValueError: return False class Request(RequestHooksMixin): """A user-created :class:`Request ` object. Used to prepare a :class:`PreparedRequest `, which is sent to the server. :param method: HTTP method to use. :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. :param json: json for the body to attach to the request (if files or data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. :param hooks: dictionary of callback hooks, for internal usage. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> req.prepare() """ def __init__(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): # Default empty dicts for dict params. data = [] if data is None else data files = [] if files is None else files headers = {} if headers is None else headers params = {} if params is None else params hooks = {} if hooks is None else hooks self.hooks = default_hooks() for (k, v) in list(hooks.items()): self.register_hook(event=k, hook=v) self.method = method self.url = url self.headers = headers self.files = files self.data = data self.json = json self.params = params self.auth = auth self.cookies = cookies def __repr__(self): return '' % (self.method) def prepare(self): """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" p = PreparedRequest() p.prepare( method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks, ) return p class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): """The fully mutable :class:`PreparedRequest ` object, containing the exact bytes that will be sent to the server. Generated from either a :class:`Request ` object or manually. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> r = req.prepare() >>> s = requests.Session() >>> s.send(r) """ def __init__(self): #: HTTP verb to send to the server. self.method = None #: HTTP URL to send the request to. self.url = None #: dictionary of HTTP headers. self.headers = None # The `CookieJar` used to create the Cookie header will be stored here # after prepare_cookies is called self._cookies = None #: request body to send to the server. self.body = None #: dictionary of callback hooks, for internal usage. self.hooks = default_hooks() #: integer denoting starting position of a readable file-like body. self._body_position = None def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. # This MUST go after prepare_auth. Authenticators could add a hook self.prepare_hooks(hooks) def __repr__(self): return '' % (self.method) def copy(self): p = PreparedRequest() p.method = self.method p.url = self.url p.headers = self.headers.copy() if self.headers is not None else None p._cookies = _copy_cookie_jar(self._cookies) p.body = self.body p.hooks = self.hooks p._body_position = self._body_position return p def prepare_method(self, method): """Prepares the given HTTP method.""" self.method = method if self.method is not None: self.method = to_native_string(self.method.upper()) @staticmethod def _get_idna_encoded_host(host): import idna try: host = idna.encode(host, uts46=True).decode('utf-8') except idna.IDNAError: raise UnicodeError return host def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. #: https://github.com/requests/requests/pull/2238 if isinstance(url, bytes): url = url.decode('utf8') else: url = unicode(url) if is_py2 else str(url) # Remove leading whitespaces from url url = url.lstrip() # Don't do any URL preparation for non-HTTP schemes like `mailto`, # `data` etc to work around exceptions from `url_parse`, which # handles RFC 3986 only. if ':' in url and not url.lower().startswith('http'): self.url = url return # Support for unicode domain names and paths. try: scheme, auth, host, port, path, query, fragment = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if not scheme: error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") error = error.format(to_native_string(url, 'utf8')) raise MissingSchema(error) if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) # In general, we want to try IDNA encoding the hostname if the string contains # non-ASCII characters. This allows users to automatically get the correct IDNA # behaviour. For strings containing only ASCII characters, we need to also verify # it doesn't start with a wildcard (*), before allowing the unencoded hostname. if not unicode_is_ascii(host): try: host = self._get_idna_encoded_host(host) except UnicodeError: raise InvalidURL('URL has an invalid label.') elif host.startswith(u'*'): raise InvalidURL('URL has an invalid label.') # Carefully reconstruct the network location netloc = auth or '' if netloc: netloc += '@' netloc += host if port: netloc += ':' + str(port) # Bare domains aren't valid URLs. if not path: path = '/' if is_py2: if isinstance(scheme, str): scheme = scheme.encode('utf-8') if isinstance(netloc, str): netloc = netloc.encode('utf-8') if isinstance(path, str): path = path.encode('utf-8') if isinstance(query, str): query = query.encode('utf-8') if isinstance(fragment, str): fragment = fragment.encode('utf-8') if isinstance(params, (str, bytes)): params = to_native_string(params) enc_params = self._encode_params(params) if enc_params: if query: query = '%s&%s' % (query, enc_params) else: query = enc_params url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) self.url = url def prepare_headers(self, headers): """Prepares the given HTTP headers.""" self.headers = CaseInsensitiveDict() if headers: for header in headers.items(): # Raise exception on invalid header value. check_header_validity(header) name, value = header self.headers[to_native_string(name)] = value def prepare_body(self, data, files, json=None): """Prepares the given HTTP body data.""" # Check if file, fo, generator, iterator. # If not, run through normal process. # Nottin' on you. body = None content_type = None if not data and json is not None: # urllib3 requires a bytes-like body. Python 2's json.dumps # provides this natively, but Python 3 gives a Unicode string. content_type = 'application/json' body = complexjson.dumps(json) if not isinstance(body, bytes): body = body.encode('utf-8') is_stream = all([ hasattr(data, '__iter__'), not isinstance(data, (basestring, list, tuple, collections.Mapping)) ]) try: length = super_len(data) except (TypeError, AttributeError, UnsupportedOperation): length = None if is_stream: body = data if getattr(body, 'tell', None) is not None: # Record the current file position before reading. # This will allow us to rewind a file in the event # of a redirect. try: self._body_position = body.tell() except (IOError, OSError): # This differentiates from None, allowing us to catch # a failed `tell()` later when trying to rewind the body self._body_position = object() if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' else: # Multi-part file uploads. if files: (body, content_type) = self._encode_files(files, data) else: if data: body = self._encode_params(data) if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None else: content_type = 'application/x-www-form-urlencoded' self.prepare_content_length(body) # Add content-type if it wasn't explicitly provided. if content_type and ('content-type' not in self.headers): self.headers['Content-Type'] = content_type self.body = body def prepare_content_length(self, body): """Prepare Content-Length header based on request method and body""" if body is not None: length = super_len(body) if length: # If length exists, set it. Otherwise, we fallback # to Transfer-Encoding: chunked. self.headers['Content-Length'] = builtin_str(length) elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: # Set Content-Length to 0 for methods that can have a body # but don't provide one. (i.e. not GET or HEAD) self.headers['Content-Length'] = '0' def prepare_auth(self, auth, url=''): """Prepares the given HTTP auth data.""" # If no Auth is explicitly provided, extract it from the URL first. if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if isinstance(auth, tuple) and len(auth) == 2: # special-case basic HTTP auth auth = HTTPBasicAuth(*auth) # Allow auth to make its changes. r = auth(self) # Update self to reflect the auth changes. self.__dict__.update(r.__dict__) # Recompute Content-Length self.prepare_content_length(self.body) def prepare_cookies(self, cookies): """Prepares the given HTTP cookie data. This function eventually generates a ``Cookie`` header from the given cookies using cookielib. Due to cookielib's design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the :class:`PreparedRequest ` object. Any subsequent calls to ``prepare_cookies`` will have no actual effect, unless the "Cookie" header is removed beforehand. """ if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: self._cookies = cookiejar_from_dict(cookies) cookie_header = get_cookie_header(self._cookies, self) if cookie_header is not None: self.headers['Cookie'] = cookie_header def prepare_hooks(self, hooks): """Prepares the given hooks.""" # hooks can be passed as None to the prepare method and to this # method. To prevent iterating over None, simply use an empty list # if hooks is False-y hooks = hooks or [] for event in hooks: self.register_hook(event, hooks[event]) class Response(object): """The :class:`Response ` object, which contains a server's response to an HTTP request. """ __attrs__ = [ '_content', 'status_code', 'headers', 'url', 'history', 'encoding', 'reason', 'cookies', 'elapsed', 'request' ] def __init__(self): self._content = False self._content_consumed = False self._next = None #: Integer Code of responded HTTP Status, e.g. 404 or 200. self.status_code = None #: Case-insensitive Dictionary of Response Headers. #: For example, ``headers['content-encoding']`` will return the #: value of a ``'Content-Encoding'`` response header. self.headers = CaseInsensitiveDict() #: File-like object representation of response (for advanced usage). #: Use of ``raw`` requires that ``stream=True`` be set on the request. # This requirement does not apply for use internally to Requests. self.raw = None #: Final URL location of Response. self.url = None #: Encoding to decode with when accessing r.text. self.encoding = None #: A list of :class:`Response ` objects from #: the history of the Request. Any redirect responses will end #: up here. The list is sorted from the oldest to the most recent request. self.history = [] #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". self.reason = None #: A CookieJar of Cookies the server sent back. self.cookies = cookiejar_from_dict({}) #: The amount of time elapsed between sending the request #: and the arrival of the response (as a timedelta). #: This property specifically measures the time taken between sending #: the first byte of the request and finishing parsing the headers. It #: is therefore unaffected by consuming the response content or the #: value of the ``stream`` keyword argument. self.elapsed = datetime.timedelta(0) #: The :class:`PreparedRequest ` object to which this #: is a response. self.request = None def __enter__(self): return self def __exit__(self, *args): self.close() def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. if not self._content_consumed: self.content return dict( (attr, getattr(self, attr, None)) for attr in self.__attrs__ ) def __setstate__(self, state): for name, value in state.items(): setattr(self, name, value) # pickled objects do not have .raw setattr(self, '_content_consumed', True) setattr(self, 'raw', None) def __repr__(self): return '' % (self.status_code) def __bool__(self): """Returns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. """ return self.ok def __nonzero__(self): """Returns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. """ return self.ok def __iter__(self): """Allows you to use a response as an iterator.""" return self.iter_content(128) @property def ok(self): """Returns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. """ try: self.raise_for_status() except HTTPError: return False return True @property def is_redirect(self): """True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). """ return ('location' in self.headers and self.status_code in REDIRECT_STATI) @property def is_permanent_redirect(self): """True if this Response one of the permanent versions of redirect.""" return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) @property def next(self): """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" return self._next @property def apparent_encoding(self): """The apparent encoding, provided by the chardet library.""" return chardet.detect(self.content)['encoding'] def iter_content(self, chunk_size=1, decode_unicode=False): """Iterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. chunk_size must be of type int or None. A value of None will function differently depending on the value of `stream`. stream=True will read data as it arrives in whatever size the chunks are received. If stream=False, data is returned as a single chunk. If decode_unicode is True, content will be decoded using the best available encoding based on the response. """ def generate(): # Special case for urllib3. if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) else: # Standard file-like object. while True: chunk = self.raw.read(chunk_size) if not chunk: break yield chunk self._content_consumed = True if self._content_consumed and isinstance(self._content, bool): raise StreamConsumedError() elif chunk_size is not None and not isinstance(chunk_size, int): raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) stream_chunks = generate() chunks = reused_chunks if self._content_consumed else stream_chunks if decode_unicode: chunks = stream_decode_response_unicode(chunks, self) return chunks def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe. """ pending = None for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): if pending is not None: chunk = pending + chunk if delimiter: lines = chunk.split(delimiter) else: lines = chunk.splitlines() if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: pending = lines.pop() else: pending = None for line in lines: yield line if pending is not None: yield pending @property def content(self): """Content of the response, in bytes.""" if self._content is False: # Read the contents. if self._content_consumed: raise RuntimeError( 'The content for this response was already consumed') if self.status_code == 0 or self.raw is None: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 # since we exhausted the data. return self._content @property def text(self): """Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property. """ # Try charset from content-type content = None encoding = self.encoding if not self.content: return str('') # Fallback to auto-detected encoding. if self.encoding is None: encoding = self.apparent_encoding # Decode unicode from given encoding. try: content = str(self.content, encoding, errors='replace') except (LookupError, TypeError): # A LookupError is raised if the encoding was not found which could # indicate a misspelling or similar mistake. # # A TypeError can be raised if encoding is None # # So we try blindly encoding. content = str(self.content, errors='replace') return content def json(self, **kwargs): r"""Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises ValueError: If the response body does not contain valid json. """ if not self.encoding and self.content and len(self.content) > 3: # No encoding set. JSON RFC 4627 section 3 states we should expect # UTF-8, -16 or -32. Detect which one to use; If the detection or # decoding fails, fall back to `self.text` (using chardet to make # a best guess). encoding = guess_json_utf(self.content) if encoding is not None: try: return complexjson.loads( self.content.decode(encoding), **kwargs ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, # and the server didn't bother to tell us what codec *was* # used. pass return complexjson.loads(self.text, **kwargs) @property def links(self): """Returns the parsed header links of the response, if any.""" header = self.headers.get('link') # l = MultiDict() l = {} if header: links = parse_header_links(header) for link in links: key = link.get('rel') or link.get('url') l[key] = link return l def raise_for_status(self): """Raises stored :class:`HTTPError`, if one occurred.""" http_error_msg = '' if isinstance(self.reason, bytes): # We attempt to decode utf-8 first because some servers # choose to localize their reason strings. If the string # isn't utf-8, we fall back to iso-8859-1 for all other # encodings. (See PR #3538) try: reason = self.reason.decode('utf-8') except UnicodeDecodeError: reason = self.reason.decode('iso-8859-1') else: reason = self.reason if 400 <= self.status_code < 500: http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) elif 500 <= self.status_code < 600: http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self) def close(self): """Releases the connection back to the pool. Once this method has been called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.* """ if not self._content_consumed: self.raw.close() release_conn = getattr(self.raw, 'release_conn', None) if release_conn is not None: release_conn() PKZ6dWdW/site-packages/pip/_vendor/requests/sessions.pycnu[ abc@s+dZddlZddlZddlZddlmZddlmZddlm Z ddl m Z m Z m Z mZmZddlmZmZmZmZdd lmZmZmZdd lmZmZdd lmZdd lmZm Z dd l!m"Z"m#Z#m$Z$m%Z%ddl&m'Z'ddl(m)Z)ddlm*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0ddl1m2Z2ddlm3Z3ej4dkry ej5Z6Wne7k rej8Z6nXn ejZ6e dZ9e dZ:de;fdYZ<de<fdYZ=dZ>dS(s requests.session ~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). iN(tMapping(t timedeltai(t_basic_auth_str(t cookielibtis_py3t OrderedDictturljointurlparse(tcookiejar_from_dicttextract_cookies_to_jartRequestsCookieJart merge_cookies(tRequesttPreparedRequesttDEFAULT_REDIRECT_LIMIT(t default_hookst dispatch_hook(tto_native_string(tto_key_val_listtdefault_headers(tTooManyRedirectst InvalidSchematChunkedEncodingErrortContentDecodingError(tCaseInsensitiveDict(t HTTPAdapter(t requote_uritget_environ_proxiestget_netrc_authtshould_bypass_proxiestget_auth_from_urlt rewind_bodyt DEFAULT_PORTS(tcodes(tREDIRECT_STATItWindowscCs|dkr|S|dkr |St|to;t|tsB|S|t|}|jt|g|jD]\}}|dkrt|^qt}x|D] }||=qW|S(sDetermines appropriate setting for a given request, taking into account the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` N(tNonet isinstanceRRtupdatetitems(trequest_settingtsession_settingt dict_classtmerged_settingtktvt none_keystkey((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt merge_setting2s  1  cCsZ|dks!|jdgkr%|S|dksF|jdgkrJ|St|||S(sProperly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. tresponseN(R$tgetR0(t request_hookst session_hooksR*((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt merge_hooksQs !!tSessionRedirectMixincBsPeZdZdZededdedZdZdZ dZ RS(cCs?|jr;|jd}tr.|jd}nt|dSdS(s7Receives a Response. Returns a redirect URI or ``None``tlocationtlatin1tutf8N(t is_redirecttheadersRtencodeRR$(tselftrespR7((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytget_redirect_targetbs    cCst|}t|}|j|jkr.tS|jdkrn|jdkrn|jdkrn|jdkrntS|j|jk}|j|jk}tj|jddf}| r|j|kr|j|krtS|p|S(sFDecide whether Authorization header should be removed when redirectingthttpiPthttpsiN(iPN(iN( RthostnametTruetschemetportR$tFalseR R2(R=told_urltnew_urlt old_parsedt new_parsedt changed_porttchanged_schemet default_port((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytshould_strip_authxs  c ksg} |j|} x| r|j} | j|| d|_y |jWn-tttfk r~|jj dt nXt |j|j krt d|j d|n|j| jdrt|j} dt| j| f} nt| }|j} |js3t|jt| } n t| } t| | _|j| ||jtjtjfkrd}x!|D]}| jj|dqWd| _ n| j}y |d =Wnt!k rnXt"| j#||jt$| j#|j%| j&| j#|j'| |}|j(| || j)dk oVd|kpVd |k}|rlt*| n| }|r|Vq|j+|d |d |d |d|d|dt | }t"|j%| |j|j|} |VqWdS(sBReceives a Response. Returns a generator of Responses or Requests.itdecode_contentsExceeded %s redirects.R1s//s%s:%ssContent-Lengths Content-TypesTransfer-EncodingtCookietstreamttimeouttverifytcerttproxiestallow_redirectsN(sContent-Lengths Content-TypesTransfer-Encoding(,R?tcopytappendthistorytcontentRRt RuntimeErrortrawtreadRFtlent max_redirectsRtcloset startswithRturlRRDtgeturltnetlocRRtrebuild_methodt status_codeR!ttemporary_redirecttpermanent_redirectR;tpopR$tbodytKeyErrorR t_cookiesR tcookiestprepare_cookiestrebuild_proxiest rebuild_autht_body_positionRtsend(R=R>treqRQRRRSRTRUtyield_requeststadapter_kwargsthistRbtprepared_requestt parsed_rurltparsedtpurged_headerstheaderR;t rewindable((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytresolve_redirectssr                 cCs{|j}|j}d|kr@|j|jj|r@|d=n|jrUt|nd}|dk rw|j|ndS(sWhen being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. t AuthorizationN(R;RbRNtrequestt trust_envRR$t prepare_auth(R=RwR1R;Rbtnew_auth((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRps  $  c Cs5|dk r|ni}|j}|j}t|j}|j}|jd}t|d|}|jr| rt |d|} | j|| jd} | r|j || qnd|kr|d=nyt ||\} } Wnt k rd\} } nX| r1| r1t | | |d>> import requests >>> s = requests.Session() >>> s.get('http://httpbin.org/get') Or as a context manager:: >>> with requests.Session() as s: >>> s.get('http://httpbin.org/get') R;RmtauthRUthookstparamsRSRTtprefetchtadaptersRQRR_cCst|_d|_i|_t|_i|_t|_ t |_ d|_ t |_t |_ti|_t|_|jdt|jdtdS(Nshttps://shttp://(RR;R$RRURRRRFRQRCRSRTRR_RRRmRRtmountR(R=((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt__init__js           cCs|S(N((R=((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt __enter__scGs|jdS(N(R`(R=targs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt__exit__scCs*|jp i}t|tjs0t|}nttt|j|}|j}|jr| r|j rt |j }nt }|j d|j jd|j d|jd|jd|jdt|j|jdtdt|j|jd t||jd |d t|j|j |S( sConstructs a :class:`PreparedRequest ` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request ` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. :rtype: requests.PreparedRequest RRbtfilestdatatjsonR;R*RRRmR(RmR%Rt CookieJarRR R RRRRbR tprepareRtupperRRRR0R;RRR5R(R=RRmtmerged_cookiesRtp((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytprepare_requests*        cCstd|jd|d|d|d|p-id|d|p?id|d |d | }|j|}| poi} |j|j| | ||}i| d 6| d 6}|j||j||}|S( sConstructs a :class:`Request `, prepares it and sends it. Returns :class:`Response ` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response RRbR;RRRRRRmRRRRV(R RRtmerge_environment_settingsRbR&Rr(R=RRbRRR;RmRRRRRVRURRQRSRTRRstpreptsettingst send_kwargsR>((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRs*)       cKs#|jdt|jd||S(sSends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RVR(RRCR(R=Rbtkwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyR2scKs#|jdt|jd||S(sSends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RVtOPTIONS(RRCR(R=RbR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytoptions!scKs#|jdt|jd||S(sSends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RVR(RRFR(R=RbR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pythead,scKs|jd|d|d||S(sSends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RRR(R(R=RbRRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytpost7s cKs|jd|d||S(sYSends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response tPUTR(R(R=RbRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytputCs cKs|jd|d||S(s[Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response tPATCHR(R(R=RbRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytpatchNs cKs|jd||S(sSends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response tDELETE(R(R=RbR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytdeleteYsc Ks|jd|j|jd|j|jd|j|jd|jt|trjtdn|jdt }|j d}|j }|j d|j }t}|j||}t|} td| |_td |||}|jr1x-|jD]} t|j| j| jq Wnt|j||j|j|||} |r{g| D]} | ^qing} | r| jd || j}| |_n|sy(t|j||d t ||_Wqtk rqXn|s|jn|S( sISend a given PreparedRequest. :rtype: requests.Response RQRSRTRUs#You can only send PreparedRequests.RVRbtsecondsR1iRt(RRQRSRTRUR%R t ValueErrorRiRCR2Rt get_adapterRbtpreferred_clockRrRtelapsedRRYR RmRR\R}tinserttnextt_nextt StopIterationRZ( R=RRRVRQRtadaptertstarttrRR>tgenRY((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRrcsB     %  (  c Cs|jr|dk r$|jdnd}t|d|}x*|jD]\}} |j|| qIW|tks|dkrtjjdptjjd}qnt ||j }t ||j }t ||j }t ||j }i|d6|d6|d6|d6S( s^ Check the environment and merge it with some settings. :rtype: dict RtREQUESTS_CA_BUNDLEtCURL_CA_BUNDLERSRURQRTN(RR$R2RR'RRCtostenvironR0RURQRSRT( R=RbRURQRSRTRt env_proxiesR,R-((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRs !cCsMx6|jjD]%\}}|jj|r|SqWtd|dS(s~ Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter s*No connection adapters were found for '%s'N(RR'tlowerRaR(R=RbtprefixR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRscCs(x!|jjD]}|jqWdS(s+Closes all adapters and as such the sessionN(RtvaluesR`(R=R-((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyR`scCso||j|s(tdictt __attrs__(R=tstate((R=sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt __getstate__scCs1x*|jD]\}}t|||q WdS(N(R'tsetattr(R=RRtvalue((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt __setstate__sN(RRt__doc__RRRRRR$RCRR2RRRRRRRrRRR`RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRQs2  7   ) D  I    cCstS(sQ Returns a :class:`Session` for context-management. :rtype: Session (R(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytsessions(?RRtplatformttimet collectionsRtdatetimeRRRtcompatRRRRRRmRR R R tmodelsR R RRRRt_internal_utilsRtutilsRRt exceptionsRRRRt structuresRRRRRRRRRR t status_codesR!R"tsystemt perf_counterRtAttributeErrortclockR0R5tobjectR6RR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt s<   (""4     PKZ\++1site-packages/pip/_vendor/requests/structures.pycnu[ abc@sUdZddlZddlmZdejfdYZdefdYZdS( sO requests.structures ~~~~~~~~~~~~~~~~~~~ Data structures that power Requests. iNi(t OrderedDicttCaseInsensitiveDictcBskeZdZd dZdZdZdZdZdZ dZ dZ d Z d Z RS( sA case-insensitive ``dict``-like object. Implements all methods and operations of ``collections.MutableMapping`` as well as dict's ``copy``. Also provides ``lower_items``. All keys are expected to be strings. The structure remembers the case of the last key to be set, and ``iter(instance)``, ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` will contain case-sensitive keys. However, querying and contains testing is case insensitive:: cid = CaseInsensitiveDict() cid['Accept'] = 'application/json' cid['aCCEPT'] == 'application/json' # True list(cid) == ['Accept'] # True For example, ``headers['content-encoding']`` will return the value of a ``'Content-Encoding'`` response header, regardless of how the header name was originally stored. If the constructor, ``.update``, or equality comparison operations are given keys that have equal ``.lower()``s, the behavior is undefined. cKs5t|_|dkr!i}n|j||dS(N(Rt_storetNonetupdate(tselftdatatkwargs((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__init__*s   cCs||f|j|j<s(Rtvalues(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__iter__;scCs t|jS(N(tlenR(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__len__>scCsd|jjDS(s.Like iteritems(), but with all lowercase keys.css%|]\}}||dfVqdS(iN((Rtlowerkeytkeyval((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pys Ds(Rtitems(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt lower_itemsAscCsGt|tjr!t|}ntSt|jt|jkS(N(t isinstancet collectionstMappingRtNotImplementedtdictR(Rtother((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__eq__IscCst|jjS(N(RRR(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pytcopyRscCstt|jS(N(tstrRR(R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyt__repr__UsN(t__name__t __module__t__doc__RRR R RRRRR R!R#(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyRs        t LookupDictcBs8eZdZddZdZdZddZRS(sDictionary lookup object.cCs ||_tt|jdS(N(tnametsuperR'R(RR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR\s cCs d|jS(Ns (R((R((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR#`scCs|jj|dS(N(t__dict__tgetR(RR ((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR cscCs|jj||S(N(R*R+(RR tdefault((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR+hsN(R$R%R&RRR#R R+(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyR'Ys    (R&RtcompatRtMutableMappingRRR'(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/structures.pyts JPKZu_@gg/site-packages/pip/_vendor/requests/__init__.pyonu[ abc@stdZddlmZddlmZddlZddlmZdZyeejejWn9e e fk rej dj ejejenXdd l mZejd edd lmZmZmZmZdd lmZmZmZmZdd lmZmZddlmZddlmZddlmZmZmZddl m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'm(Z(ddl)m*Z*m+Z+ddl,m-Z-ddlm.Z.m/Z/m0Z0m1Z1m2Z2m3Z3m4Z4m5Z5m6Z6ddl7Z7yddl7m8Z8Wn*e9k r@de7j:fdYZ8nXe7j;e<j=e8ejde4de>dS(s Requests HTTP Library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. Basic GET usage: >>> import requests >>> r = requests.get('https://www.python.org') >>> r.status_code 200 >>> 'Python is a programming language' in r.content True ... or POST: >>> payload = dict(key1='value1', key2='value2') >>> r = requests.post('http://httpbin.org/post', data=payload) >>> print(r.text) { ... "form": { "key2": "value2", "key1": "value1" }, ... } The other HTTP methods are supported - see `requests.api`. Full documentation is at . :copyright: (c) 2017 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. i(turllib3(tchardetNi(tRequestsDependencyWarningcCs|jd}t|dkr1|jdn|\}}}t|t|t|}}}|jdd \}}}t|t|t|}}}dS(Nt.it0i(tsplittlentappendtint(turllib3_versiontchardet_versiontmajortminortpatch((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pytcheck_compatibility1s&&sAurllib3 ({0}) or chardet ({1}) doesn't match a supported version!(tDependencyWarningtignore(t __title__t__description__t__url__t __version__(t __build__t __author__t__author_email__t __license__(t __copyright__t__cake__(tutils(tpackages(tRequesttResponsetPreparedRequest(trequesttgettheadtpostR tputtdeletetoptions(tsessiontSession(tcodes( tRequestExceptiontTimeoutt URLRequiredtTooManyRedirectst HTTPErrortConnectionErrortFileModeWarningtConnectTimeoutt ReadTimeout(t NullHandlerR3cBseZdZRS(cCsdS(N((tselftrecord((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pytemitss(t__name__t __module__R6(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyR3rstdefaultR(?t__doc__t pip._vendorRRtwarningst exceptionsRRRtAssertionErrort ValueErrortwarntformattpip._vendor.urllib3.exceptionsRt simplefilterRRRRRRRRRtRRtmodelsRRRtapiR R!R"R#R R$R%R&tsessionsR'R(t status_codesR)R*R+R,R-R.R/R0R1R2tloggingR3t ImportErrortHandlert getLoggerR7t addHandlertTrue(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/__init__.pyt)s<    "":@  PKZ!3site-packages/pip/_vendor/requests/status_codes.pycnu[ abc@skddlmZiDdd6dd6dd6dd 6dd 6dd6dd6dd6dd6dd6dd 6dd#6dd(6dd*6dd,6dd.6dd26dd46dd76dd96dd;6dd=6ddA6ddE6ddH6ddJ6ddM6ddO6ddR6ddU6ddW6dd[6dd^6dd`6ddb6ddd6ddg6ddi6ddk6ddo6dds6ddu6ddy6dd{6dd~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6ZeddZxcejD]U\ZZxFeD]>Zeeeeej ds!eeej eq!q!WqWdS(i(t LookupDicttcontinueidtswitching_protocolsiet processingift checkpointigt uri_too_longtrequest_uri_too_longiztoktokaytall_oktall_okaytall_goods\o/s✓itcreateditaccepteditnon_authoritative_infotnon_authoritative_informationit no_contentit reset_contenttresetitpartial_contenttpartialit multi_statustmultiple_statust multi_statitmultiple_statiitalready_reporteditim_useditmultiple_choicesi,tmoved_permanentlytmoveds\o-i-tfoundi.t see_othertotheri/t not_modifiedi0t use_proxyi1t switch_proxyi2ttemporary_redirectttemporary_movedt temporaryi3tpermanent_redirecttresume_incompletetresumei4t bad_requesttbadit unauthorizeditpayment_requiredtpaymentit forbiddenit not_founds-o-itmethod_not_allowedt not_alloweditnot_acceptableitproxy_authentication_requiredt proxy_authtproxy_authenticationitrequest_timeoutttimeoutitconflictitgoneitlength_requireditprecondition_failedt preconditionitrequest_entity_too_largeitrequest_uri_too_largeitunsupported_media_typetunsupported_mediat media_typeitrequested_range_not_satisfiabletrequested_rangetrange_not_satisfiableitexpectation_failedit im_a_teapottteapott i_am_a_teapotitmisdirected_requestitunprocessable_entityt unprocessableitlockeditfailed_dependencyt dependencyitunordered_collectiont unordereditupgrade_requiredtupgradeitprecondition_requiredittoo_many_requeststtoo_manyitheader_fields_too_largetfields_too_largeit no_responsetnoneit retry_withtretryit$blocked_by_windows_parental_controlstparental_controlsitunavailable_for_legal_reasonst legal_reasonsitclient_closed_requestitinternal_server_errort server_errors/o\s✗itnot_implementedit bad_gatewayitservice_unavailablet unavailableitgateway_timeoutithttp_version_not_supportedt http_versionitvariant_also_negotiatesitinsufficient_storageitbandwidth_limit_exceededt bandwidthit not_extendeditnetwork_authentication_requiredt network_authtnetwork_authenticationitnamet status_codess\t/N(R(R(R(R(RR(RRR R R s\o/s✓(R (R (RR(R(RR(RR(RRRR(R(R(R(RRs\o-(R(RR (R!(R"(R#(R$R%R&(R'R(R)(R*R+(R,(R-R.(R/(R0s-o-(R1R2(R3(R4R5R6(R7R8(R9(R:(R;(R<R=(R>(R?(R@RARB(RCRDRE(RF(RGRHRI(RJ(RKRL(RM(RNRO(RPRQ(RRRS(RTR=(RURV(RWRX(RYRZ(R[R\(R]R^(R_R`(Ra(RbRcs/o\s✗(Rd(Re(RfRg(Rh(RiRj(Rk(Rl(RmRn(Ro(RpRqRr(s\Ru( t structuresRt_codestcodestitemstcodettitlesttitletsetattrt startswithtupper(((sE/usr/lib/python2.7/site-packages/pip/_vendor/requests/status_codes.pyts  PKZG6site-packages/pip/_vendor/requests/_internal_utils.pyonu[ abc@s;dZddlmZmZmZddZdZdS(s requests._internal_utils ~~~~~~~~~~~~~~ Provides utility functions that are consumed internally by Requests which depend on extremely few external helpers (such as compat) i(tis_py2t builtin_strtstrtasciicCsCt|tr|}n'tr0|j|}n|j|}|S(sGiven a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise. (t isinstanceRRtencodetdecode(tstringtencodingtout((sH/usr/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pytto_native_strings  cCs.y|jdtSWntk r)tSXdS(sDetermine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool RN(RtTruetUnicodeEncodeErrortFalse(tu_string((sH/usr/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pytunicode_is_asciis   N(t__doc__tcompatRRRR R(((sH/usr/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pyt s PKZ>l55-site-packages/pip/_vendor/requests/compat.pycnu[ abc@s5dZddlmZddlZejZeddkZeddkZddlZerGddl m Z m Z m Z m Z mZmZmZmZmZddlmZmZmZmZmZdd lmZddlZdd lmZdd lmZdd lmZe Z!e Z"e#Z e$Z$e%e&e'fZ(e%e&fZ)ner1dd l*mZmZmZmZmZm Z m Z m Z m Z mZddl+mZmZmZmZmZddl,m-Zdd l.mZdd l/mZdd l0mZe Z!e Z e"Z"e e"fZ$e%e'fZ(e%fZ)ndS(sq requests.compat ~~~~~~~~~~~~~~~ This module handles import compatibility issues between Python 2 and Python 3. i(tchardetNiii( tquotetunquotet quote_plust unquote_plust urlencodet getproxiest proxy_bypasstproxy_bypass_environmenttgetproxies_environment(turlparset urlunparseturljointurlsplitt urldefrag(tparse_http_list(tMorsel(tStringIO(t OrderedDict( R R R R RRRRRR(RRRRR (t cookiejar(1t__doc__t pip._vendorRtsyst version_infot_vertis_py2tis_py3tjsonturllibRRRRRRRRR R R R R Rturllib2Rt cookielibtCookieRRt)pip._vendor.urllib3.packages.ordered_dictRtstrt builtin_strtbytestunicodet basestringtinttlongtfloatt numeric_typest integer_typest urllib.parseturllib.requestthttpRt http.cookiestiot collections(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/compat.pyt sB   @( F(  PKZ]< C C +site-packages/pip/_vendor/requests/help.pycnu[ abc@s dZddlmZddlZddlZddlZddlZddlmZddlm Z ddlm Z ddl m Z ydd lmZWn#ek rdZdZdZnXddlZddlZd Zd Zd Zed kr endS(s'Module containing bug report helper(s).i(tprint_functionN(tidna(turllib3(tchardeti(t __version__(t pyopensslcCstj}|dkr'tj}n|dkrdtjjtjjtjjf}tjjdkrdj |tjjg}qn<|dkrtj}n!|dkrtj}nd}i|d 6|d 6S( sReturn a dict with the Python implementation and version. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms. tCPythontPyPys%s.%s.%stfinalttJythont IronPythontUnknowntnametversion( tplatformtpython_implementationtpython_versiontsystpypy_version_infotmajortminortmicrot releaseleveltjoin(timplementationtimplementation_version((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pyt_implementations       c Csqy$itjd6tjd6}Wn%tk rKidd6dd6}nXt}itjd6}itjd6}idd6dd6}t rit jd6dt j j d6}nit t ddd6}it tddd6}t td d}i|dk rd|ndd6}i |d 6|d 6|d 6tdk d 6|d6|d6|d6|d6|d6itd6d6S(s&Generate information for a bug report.tsystemtreleaseR RR topenssl_versions%xRtOPENSSL_VERSION_NUMBERRRt system_ssltusing_pyopensslt pyOpenSSLRRt cryptographyRtrequestsN(RRRtIOErrorRRRRtNonetOpenSSLtSSLRtgetattrR#RtsslRtrequests_version( t platform_infotimplementation_infot urllib3_infot chardet_infotpyopenssl_infotcryptography_infot idna_infoR tsystem_ssl_info((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pytinfo;sJ       cCs&ttjtdtdddS(s)Pretty-print the bug information as JSON.t sort_keystindentiN(tprinttjsontdumpsR4tTrue(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pytmainrst__main__(t__doc__t __future__RR8RRR*t pip._vendorRRRR RR+tpackages.urllib3.contribRt ImportErrorR&R'R#RR4R;t__name__(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pyts,         ! 7  PKZwaMZZ,site-packages/pip/_vendor/requests/compat.pynu[# -*- coding: utf-8 -*- """ requests.compat ~~~~~~~~~~~~~~~ This module handles import compatibility issues between Python 2 and Python 3. """ from pip._vendor import chardet import sys # ------- # Pythons # ------- # Syntax sugar. _ver = sys.version_info #: Python 2.x? is_py2 = (_ver[0] == 2) #: Python 3.x? is_py3 = (_ver[0] == 3) # try: # import simplejson as json # except ImportError: import json # --------- # Specifics # --------- if is_py2: from urllib import ( quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment) from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag from urllib2 import parse_http_list import cookielib from Cookie import Morsel from StringIO import StringIO from pip._vendor.urllib3.packages.ordered_dict import OrderedDict builtin_str = str bytes = str str = unicode basestring = basestring numeric_types = (int, long, float) integer_types = (int, long) elif is_py3: from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment from http import cookiejar as cookielib from http.cookies import Morsel from io import StringIO from collections import OrderedDict builtin_str = str str = str bytes = bytes basestring = (str, bytes) numeric_types = (int, float) integer_types = (int,) PKZ9HH5site-packages/pip/_vendor/requests/_internal_utils.pynu[# -*- coding: utf-8 -*- """ requests._internal_utils ~~~~~~~~~~~~~~ Provides utility functions that are consumed internally by Requests which depend on extremely few external helpers (such as compat) """ from .compat import is_py2, builtin_str, str def to_native_string(string, encoding='ascii'): """Given a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise. """ if isinstance(string, builtin_str): out = string else: if is_py2: out = string.encode(encoding) else: out = string.decode(encoding) return out def unicode_is_ascii(u_string): """Determine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool """ assert isinstance(u_string, str) try: u_string.encode('ascii') return True except UnicodeEncodeError: return False PKZ ;,site-packages/pip/_vendor/requests/hooks.pyonu[ abc@s%dZdgZdZdZdS(s requests.hooks ~~~~~~~~~~~~~~ This module provides the capabilities for the Requests hooks system. Available hooks: ``response``: The response generated from a Request. tresponsecCstdtDS(Ncss|]}|gfVqdS(N((t.0tevent((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pys s(tdicttHOOKS(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyt default_hooksscKs{|p t}|j|}|rwt|dr?|g}nx5|D]*}|||}|dk rF|}qFqFWn|S(s6Dispatches a hook dictionary on a given piece of data.t__call__N(RtgetthasattrtNone(tkeythookst hook_datatkwargsthookt _hook_data((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyt dispatch_hooks   N(t__doc__RRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyt s  PKZC3 G G-site-packages/pip/_vendor/requests/cookies.pynu[# -*- coding: utf-8 -*- """ requests.cookies ~~~~~~~~~~~~~~~~ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ import copy import time import calendar import collections from ._internal_utils import to_native_string from .compat import cookielib, urlparse, urlunparse, Morsel try: import threading except ImportError: import dummy_threading as threading class MockRequest(object): """Wraps a `requests.Request` to mimic a `urllib2.Request`. The code in `cookielib.CookieJar` expects this interface in order to correctly manage cookie policies, i.e., determine whether a cookie can be set, given the domains of the request and the cookie. The original request object is read-only. The client is responsible for collecting the new headers via `get_new_headers()` and interpreting them appropriately. You probably want `get_cookie_header`, defined below. """ def __init__(self, request): self._r = request self._new_headers = {} self.type = urlparse(self._r.url).scheme def get_type(self): return self.type def get_host(self): return urlparse(self._r.url).netloc def get_origin_req_host(self): return self.get_host() def get_full_url(self): # Only return the response's URL if the user hadn't set the Host # header if not self._r.headers.get('Host'): return self._r.url # If they did set it, retrieve it and reconstruct the expected domain host = to_native_string(self._r.headers['Host'], encoding='utf-8') parsed = urlparse(self._r.url) # Reconstruct the URL as we expect it return urlunparse([ parsed.scheme, host, parsed.path, parsed.params, parsed.query, parsed.fragment ]) def is_unverifiable(self): return True def has_header(self, name): return name in self._r.headers or name in self._new_headers def get_header(self, name, default=None): return self._r.headers.get(name, self._new_headers.get(name, default)) def add_header(self, key, val): """cookielib has no legitimate use for this method; add it back if you find one.""" raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") def add_unredirected_header(self, name, value): self._new_headers[name] = value def get_new_headers(self): return self._new_headers @property def unverifiable(self): return self.is_unverifiable() @property def origin_req_host(self): return self.get_origin_req_host() @property def host(self): return self.get_host() class MockResponse(object): """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. ...what? Basically, expose the parsed HTTP headers from the server response the way `cookielib` expects to see them. """ def __init__(self, headers): """Make a MockResponse for `cookielib` to read. :param headers: a httplib.HTTPMessage or analogous carrying the headers """ self._headers = headers def info(self): return self._headers def getheaders(self, name): self._headers.getheaders(name) def extract_cookies_to_jar(jar, request, response): """Extract the cookies from the response into a CookieJar. :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ if not (hasattr(response, '_original_response') and response._original_response): return # the _original_response field is the wrapped httplib.HTTPResponse object, req = MockRequest(request) # pull out the HTTPMessage with the headers and put it in the mock: res = MockResponse(response._original_response.msg) jar.extract_cookies(res, req) def get_cookie_header(jar, request): """ Produce an appropriate Cookie header string to be sent with `request`, or None. :rtype: str """ r = MockRequest(request) jar.add_cookie_header(r) return r.get_new_headers().get('Cookie') def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """Unsets a cookie by name, by default over all domains and paths. Wraps CookieJar.clear(), is O(n). """ clearables = [] for cookie in cookiejar: if cookie.name != name: continue if domain is not None and domain != cookie.domain: continue if path is not None and path != cookie.path: continue clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name) class CookieConflictError(RuntimeError): """There are two cookies that meet the criteria specified in the cookie jar. Use .get and .set and include domain and path args in order to be more specific. """ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface. This is the CookieJar we create by default for requests and sessions that don't specify one, since some clients may expect response.cookies and session.cookies to support dict operations. Requests does not use the dict interface internally; it's just for compatibility with external client code. All requests code should work out of the box with externally provided instances of ``CookieJar``, e.g. ``LWPCookieJar`` and ``FileCookieJar``. Unlike a regular CookieJar, this class is pickleable. .. warning:: dictionary operations that are normally O(1) may be O(n). """ def get(self, name, default=None, domain=None, path=None): """Dict-like get() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over multiple domains. .. warning:: operation is O(n), not O(1). """ try: return self._find_no_duplicates(name, domain, path) except KeyError: return default def set(self, name, value, **kwargs): """Dict-like set() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over multiple domains. """ # support client code that unsets cookies by assignment of a None value: if value is None: remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) return if isinstance(value, Morsel): c = morsel_to_cookie(value) else: c = create_cookie(name, value, **kwargs) self.set_cookie(c) return c def iterkeys(self): """Dict-like iterkeys() that returns an iterator of names of cookies from the jar. .. seealso:: itervalues() and iteritems(). """ for cookie in iter(self): yield cookie.name def keys(self): """Dict-like keys() that returns a list of names of cookies from the jar. .. seealso:: values() and items(). """ return list(self.iterkeys()) def itervalues(self): """Dict-like itervalues() that returns an iterator of values of cookies from the jar. .. seealso:: iterkeys() and iteritems(). """ for cookie in iter(self): yield cookie.value def values(self): """Dict-like values() that returns a list of values of cookies from the jar. .. seealso:: keys() and items(). """ return list(self.itervalues()) def iteritems(self): """Dict-like iteritems() that returns an iterator of name-value tuples from the jar. .. seealso:: iterkeys() and itervalues(). """ for cookie in iter(self): yield cookie.name, cookie.value def items(self): """Dict-like items() that returns a list of name-value tuples from the jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value pairs. .. seealso:: keys() and values(). """ return list(self.iteritems()) def list_domains(self): """Utility method to list all the domains in the jar.""" domains = [] for cookie in iter(self): if cookie.domain not in domains: domains.append(cookie.domain) return domains def list_paths(self): """Utility method to list all the paths in the jar.""" paths = [] for cookie in iter(self): if cookie.path not in paths: paths.append(cookie.path) return paths def multiple_domains(self): """Returns True if there are multiple domains in the jar. Returns False otherwise. :rtype: bool """ domains = [] for cookie in iter(self): if cookie.domain is not None and cookie.domain in domains: return True domains.append(cookie.domain) return False # there is only one domain in jar def get_dict(self, domain=None, path=None): """Takes as an argument an optional domain and path and returns a plain old Python dict of name-value pairs of cookies that meet the requirements. :rtype: dict """ dictionary = {} for cookie in iter(self): if ( (domain is None or cookie.domain == domain) and (path is None or cookie.path == path) ): dictionary[cookie.name] = cookie.value return dictionary def __contains__(self, name): try: return super(RequestsCookieJar, self).__contains__(name) except CookieConflictError: return True def __getitem__(self, name): """Dict-like __getitem__() for compatibility with client code. Throws exception if there are more than one cookie with name. In that case, use the more explicit get() method instead. .. warning:: operation is O(n), not O(1). """ return self._find_no_duplicates(name) def __setitem__(self, name, value): """Dict-like __setitem__ for compatibility with client code. Throws exception if there is already a cookie of that name in the jar. In that case, use the more explicit set() method instead. """ self.set(name, value) def __delitem__(self, name): """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s ``remove_cookie_by_name()``. """ remove_cookie_by_name(self, name) def set_cookie(self, cookie, *args, **kwargs): if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): cookie.value = cookie.value.replace('\\"', '') return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) def update(self, other): """Updates this jar with cookies from another CookieJar or dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: self.set_cookie(copy.copy(cookie)) else: super(RequestsCookieJar, self).update(other) def _find(self, name, domain=None, path=None): """Requests uses this method internally to get cookie values. If there are conflicting cookies, _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown if there are conflicting cookies. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :return: cookie.value """ for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: return cookie.value raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def _find_no_duplicates(self, name, domain=None, path=None): """Both ``__get_item__`` and ``get`` call this function: it's never used elsewhere in Requests. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :raises KeyError: if cookie is not found :raises CookieConflictError: if there are multiple cookies that match name and optionally domain and path :return: cookie.value """ toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: # if there are multiple cookies that meet passed in criteria raise CookieConflictError('There are multiple cookies with name, %r' % (name)) toReturn = cookie.value # we will eventually return this as long as no cookie conflict if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def __getstate__(self): """Unlike a normal CookieJar, this class is pickleable.""" state = self.__dict__.copy() # remove the unpickleable RLock object state.pop('_cookies_lock') return state def __setstate__(self, state): """Unlike a normal CookieJar, this class is pickleable.""" self.__dict__.update(state) if '_cookies_lock' not in self.__dict__: self._cookies_lock = threading.RLock() def copy(self): """Return a copy of this RequestsCookieJar.""" new_cj = RequestsCookieJar() new_cj.update(self) return new_cj def _copy_cookie_jar(jar): if jar is None: return None if hasattr(jar, 'copy'): # We're dealing with an instance of RequestsCookieJar return jar.copy() # We're dealing with a generic CookieJar instance new_jar = copy.copy(jar) new_jar.clear() for cookie in jar: new_jar.set_cookie(copy.copy(cookie)) return new_jar def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie"). """ result = dict( version=0, name=name, value=value, port=None, domain='', path='/', secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False,) badargs = set(kwargs) - set(result) if badargs: err = 'create_cookie() got unexpected keyword arguments: %s' raise TypeError(err % list(badargs)) result.update(kwargs) result['port_specified'] = bool(result['port']) result['domain_specified'] = bool(result['domain']) result['domain_initial_dot'] = result['domain'].startswith('.') result['path_specified'] = bool(result['path']) return cookielib.Cookie(**result) def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" expires = None if morsel['max-age']: try: expires = int(time.time() + int(morsel['max-age'])) except ValueError: raise TypeError('max-age: %s must be integer' % morsel['max-age']) elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' expires = calendar.timegm( time.strptime(morsel['expires'], time_template) ) return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, ) def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): """Returns a CookieJar from a key/value dictionary. :param cookie_dict: Dict of key/values to insert into CookieJar. :param cookiejar: (optional) A cookiejar to add the cookies to. :param overwrite: (optional) If False, will not replace cookies already in the jar with new ones. """ if cookiejar is None: cookiejar = RequestsCookieJar() if cookie_dict is not None: names_from_jar = [cookie.name for cookie in cookiejar] for name in cookie_dict: if overwrite or (name not in names_from_jar): cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) return cookiejar def merge_cookies(cookiejar, cookies): """Add cookies to cookiejar and returns a merged CookieJar. :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. """ if not isinstance(cookiejar, cookielib.CookieJar): raise ValueError('You can only merge into CookieJar') if isinstance(cookies, dict): cookiejar = cookiejar_from_dict( cookies, cookiejar=cookiejar, overwrite=False) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) except AttributeError: for cookie_in_jar in cookies: cookiejar.set_cookie(cookie_in_jar) return cookiejar PKZ7$Ypp.site-packages/pip/_vendor/requests/sessions.pynu[# -*- coding: utf-8 -*- """ requests.session ~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). """ import os import platform import time from collections import Mapping from datetime import timedelta from .auth import _basic_auth_str from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from ._internal_utils import to_native_string from .utils import to_key_val_list, default_headers from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter from .utils import ( requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, get_auth_from_url, rewind_body, DEFAULT_PORTS ) from .status_codes import codes # formerly defined here, reexposed here for backward compatibility from .models import REDIRECT_STATI # Preferred clock, based on which one is more accurate on a given system. if platform.system() == 'Windows': try: # Python 3.3+ preferred_clock = time.perf_counter except AttributeError: # Earlier than Python 3. preferred_clock = time.clock else: preferred_clock = time.time def merge_setting(request_setting, session_setting, dict_class=OrderedDict): """Determines appropriate setting for a given request, taking into account the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` """ if session_setting is None: return request_setting if request_setting is None: return session_setting # Bypass if not a dictionary (e.g. verify) if not ( isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) ): return request_setting merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) # Remove keys that are set to None. Extract keys first to avoid altering # the dictionary during iteration. none_keys = [k for (k, v) in merged_setting.items() if v is None] for key in none_keys: del merged_setting[key] return merged_setting def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): """Properly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. """ if session_hooks is None or session_hooks.get('response') == []: return request_hooks if request_hooks is None or request_hooks.get('response') == []: return session_hooks return merge_setting(request_hooks, session_hooks, dict_class) class SessionRedirectMixin(object): def get_redirect_target(self, resp): """Receives a Response. Returns a redirect URI or ``None``""" # Due to the nature of how requests processes redirects this method will # be called at least once upon the original response and at least twice # on each subsequent redirect response (if any). # If a custom mixin is used to handle this logic, it may be advantageous # to cache the redirect location onto the response object as a private # attribute. if resp.is_redirect: location = resp.headers['location'] # Currently the underlying http module on py3 decode headers # in latin1, but empirical evidence suggests that latin1 is very # rarely used with non-ASCII characters in HTTP headers. # It is more likely to get UTF8 header rather than latin1. # This causes incorrect handling of UTF8 encoded location headers. # To solve this, we re-encode the location in latin1. if is_py3: location = location.encode('latin1') return to_native_string(location, 'utf8') return None def should_strip_auth(self, old_url, new_url): """Decide whether Authorization header should be removed when redirecting""" old_parsed = urlparse(old_url) new_parsed = urlparse(new_url) if old_parsed.hostname != new_parsed.hostname: return True # Special case: allow http -> https redirect when using the standard # ports. This isn't specified by RFC 7235, but is kept to avoid # breaking backwards compatibility with older versions of requests # that allowed any redirects on the same host. if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): return False # Handle default port usage corresponding to scheme. changed_port = old_parsed.port != new_parsed.port changed_scheme = old_parsed.scheme != new_parsed.scheme default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) if (not changed_scheme and old_parsed.port in default_port and new_parsed.port in default_port): return False # Standard case: root URI must match return changed_port or changed_scheme def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): """Receives a Response. Returns a generator of Responses or Requests.""" hist = [] # keep track of history url = self.get_redirect_target(resp) while url: prepared_request = req.copy() # Update history and keep track of redirects. # resp.history must ignore the original request in this loop hist.append(resp) resp.history = hist[1:] try: resp.content # Consume socket so it can be released except (ChunkedEncodingError, ContentDecodingError, RuntimeError): resp.raw.read(decode_content=False) if len(resp.history) >= self.max_redirects: raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp) # Release the connection back into the pool. resp.close() # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url) # The scheme should be lower case... parsed = urlparse(url) url = parsed.geturl() # Facilitate relative 'location' headers, as allowed by RFC 7231. # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') # Compliant with RFC3986, we percent encode the url. if not parsed.netloc: url = urljoin(resp.url, requote_uri(url)) else: url = requote_uri(url) prepared_request.url = to_native_string(url) self.rebuild_method(prepared_request, resp) # https://github.com/requests/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): # https://github.com/requests/requests/issues/3490 purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers try: del headers['Cookie'] except KeyError: pass # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared # request, use the old one that we haven't yet touched. extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) merge_cookies(prepared_request._cookies, self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) # Rebuild auth and proxy information. proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) # A failed tell() sets `_body_position` to `object()`. This non-None # value ensures `rewindable` will be True, allowing us to raise an # UnrewindableBodyError, instead of hanging the connection. rewindable = ( prepared_request._body_position is not None and ('Content-Length' in headers or 'Transfer-Encoding' in headers) ) # Attempt to rewind consumed file-like object. if rewindable: rewind_body(prepared_request) # Override the original request. req = prepared_request if yield_requests: yield req else: resp = self.send( req, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, allow_redirects=False, **adapter_kwargs ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) # extract redirect url, if any, for the next loop url = self.get_redirect_target(resp) yield resp def rebuild_auth(self, prepared_request, response): """When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """ headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): # If we get redirected to a new host, we should strip out any # authentication headers. del headers['Authorization'] # .netrc might have more auth for us on our new host. new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) return def rebuild_proxies(self, prepared_request, proxies): """This method re-evaluates the proxy configuration by considering the environment variables. If we are redirected to a URL covered by NO_PROXY, we strip the proxy configuration. Otherwise, we set missing proxy keys for this URL (in case they were stripped by a previous redirect). This method also replaces the Proxy-Authorization header where necessary. :rtype: dict """ proxies = proxies if proxies is not None else {} headers = prepared_request.headers url = prepared_request.url scheme = urlparse(url).scheme new_proxies = proxies.copy() no_proxy = proxies.get('no_proxy') bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) if self.trust_env and not bypass_proxy: environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) proxy = environ_proxies.get(scheme, environ_proxies.get('all')) if proxy: new_proxies.setdefault(scheme, proxy) if 'Proxy-Authorization' in headers: del headers['Proxy-Authorization'] try: username, password = get_auth_from_url(new_proxies[scheme]) except KeyError: username, password = None, None if username and password: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return new_proxies def rebuild_method(self, prepared_request, response): """When being redirected we may want to change the method of the request based on certain specs or browser behavior. """ method = prepared_request.method # http://tools.ietf.org/html/rfc7231#section-6.4.4 if response.status_code == codes.see_other and method != 'HEAD': method = 'GET' # Do what the browsers do, despite standards... # First, turn 302s into GETs. if response.status_code == codes.found and method != 'HEAD': method = 'GET' # Second, if a POST is responded to with a 301, turn it into a GET. # This bizarre behaviour is explained in Issue 1704. if response.status_code == codes.moved and method == 'POST': method = 'GET' prepared_request.method = method class Session(SessionRedirectMixin): """A Requests session. Provides cookie persistence, connection-pooling, and configuration. Basic Usage:: >>> import requests >>> s = requests.Session() >>> s.get('http://httpbin.org/get') Or as a context manager:: >>> with requests.Session() as s: >>> s.get('http://httpbin.org/get') """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', 'max_redirects', ] def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request ` sent from this #: :class:`Session `. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request `. self.auth = None #: Dictionary mapping protocol or protocol and host to the URL of the proxy #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to #: be used on each :class:`Request `. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request `. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. self.verify = True #: SSL client certificate default, if String, path to ssl client #: cert file (.pem). If Tuple, ('cert', 'key') pair. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is #: 30. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Trust environment settings for proxy configuration, default #: authentication and similar. self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar `, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. self.adapters = OrderedDict() self.mount('https://', HTTPAdapter()) self.mount('http://', HTTPAdapter()) def __enter__(self): return self def __exit__(self, *args): self.close() def prepare_request(self, request): """Constructs a :class:`PreparedRequest ` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request ` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. :rtype: requests.PreparedRequest """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = merge_cookies( merge_cookies(RequestsCookieJar(), self.cookies), cookies) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None): """Constructs a :class:`Request `, prepares it and sends it. Returns :class:`Response ` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response """ # Create the Request. req = Request( method=method.upper(), url=url, headers=headers, files=files, data=data or {}, json=json, params=params or {}, auth=auth, cookies=cookies, hooks=hooks, ) prep = self.prepare_request(req) proxies = proxies or {} settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) # Send the request. send_kwargs = { 'timeout': timeout, 'allow_redirects': allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp def get(self, url, **kwargs): r"""Sends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return self.request('GET', url, **kwargs) def options(self, url, **kwargs): r"""Sends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return self.request('OPTIONS', url, **kwargs) def head(self, url, **kwargs): r"""Sends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) def post(self, url, data=None, json=None, **kwargs): r"""Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): r"""Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('PUT', url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): r"""Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('PATCH', url, data=data, **kwargs) def delete(self, url, **kwargs): r"""Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('DELETE', url, **kwargs) def send(self, request, **kwargs): """Send a given PreparedRequest. :rtype: requests.Response """ # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. kwargs.setdefault('stream', self.stream) kwargs.setdefault('verify', self.verify) kwargs.setdefault('cert', self.cert) kwargs.setdefault('proxies', self.proxies) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if isinstance(request, Request): raise ValueError('You can only send PreparedRequests.') # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') hooks = request.hooks # Get the appropriate adapter to use adapter = self.get_adapter(url=request.url) # Start time (approximately) of the request start = preferred_clock() # Send the request r = adapter.send(request, **kwargs) # Total elapsed time of the request (approximately) elapsed = preferred_clock() - start r.elapsed = timedelta(seconds=elapsed) # Response manipulation hooks r = dispatch_hook('response', hooks, r, **kwargs) # Persist cookies if r.history: # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, request, r.raw) # Redirect resolving generator. gen = self.resolve_redirects(r, request, **kwargs) # Resolve redirects if allowed. history = [resp for resp in gen] if allow_redirects else [] # Shuffle things around if there's history. if history: # Insert the first (original) request at the start history.insert(0, r) # Get the last request made r = history.pop() r.history = history # If redirects aren't being followed, store the response on the Request for Response.next(). if not allow_redirects: try: r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) except StopIteration: pass if not stream: r.content return r def merge_environment_settings(self, url, proxies, stream, verify, cert): """ Check the environment and merge it with some settings. :rtype: dict """ # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. no_proxy = proxies.get('no_proxy') if proxies is not None else None env_proxies = get_environ_proxies(url, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) # Look for requests environment configuration and be compatible # with cURL. if verify is True or verify is None: verify = (os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('CURL_CA_BUNDLE')) # Merge all the kwargs. proxies = merge_setting(proxies, self.proxies) stream = merge_setting(stream, self.stream) verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) return {'verify': verify, 'proxies': proxies, 'stream': stream, 'cert': cert} def get_adapter(self, url): """ Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter """ for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix): return adapter # Nothing matches :-/ raise InvalidSchema("No connection adapters were found for '%s'" % url) def close(self): """Closes all adapters and as such the session""" for v in self.adapters.values(): v.close() def mount(self, prefix, adapter): """Registers a connection adapter to a prefix. Adapters are sorted in descending order by prefix length. """ self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key) def __getstate__(self): state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) return state def __setstate__(self, state): for attr, value in state.items(): setattr(self, attr, value) def session(): """ Returns a :class:`Session` for context-management. :rtype: Session """ return Session() PKZwrr-site-packages/pip/_vendor/requests/models.pyonu[ abc@sdZddlZddlZddlZddlZddlmZddlm Z ddl m Z ddl m Z mZmZmZddlmZdd lmZdd lmZdd lmZdd lmZmZmZdd lmZmZm Z m!Z!m"Z"m#Z#m$Z$ddl%m&Z&m'Z'ddl(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0m1Z1m2Z2ddl3m4Z4m5Z5m6Z6m7Z7m8Z8m9Z9m:Z:m;Z;m<Z<m=Z=ddl3m>Z?ddl@mAZAeAjBeAjCeAjDeAjEeAjFfZGdZHddZIdZJdeKfdYZLdeKfdYZMdeMfdYZNdeLeMfdYZOdeKfd YZPdS(!s` requests.models ~~~~~~~~~~~~~~~ This module contains the primary objects that power Requests. iN(t RequestField(tencode_multipart_formdata(t parse_url(t DecodeErrortReadTimeoutErrort ProtocolErrortLocationParseError(tUnsupportedOperationi(t default_hooks(tCaseInsensitiveDict(t HTTPBasicAuth(tcookiejar_from_dicttget_cookie_headert_copy_cookie_jar(t HTTPErrort MissingSchemat InvalidURLtChunkedEncodingErrortContentDecodingErrortConnectionErrortStreamConsumedError(tto_native_stringtunicode_is_ascii( tguess_filenametget_auth_from_urlt requote_uritstream_decode_response_unicodetto_key_val_listtparse_header_linkst iter_slicestguess_json_utft super_lentcheck_header_validity( t cookielibt urlunparseturlsplitt urlencodetstrtbytestis_py2tchardett builtin_strt basestring(tjson(tcodesii iitRequestEncodingMixincBs5eZedZedZedZRS(cCssg}t|j}|j}|s-d}n|j||j}|rf|jd|j|ndj|S(sBuild the path URL to use.t/t?t(R#turltpathtappendtquerytjoin(tselfR1tpR2R4((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytpath_url=s     cCst|ttfr|St|dr,|St|drg}xt|D]\}}t|tsyt|d r|g}nxl|D]d}|dk r|jt|tr|jdn|t|tr|jdn|fqqWqNWt |dt S|SdS(sEncode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. treadt__iter__sutf-8tdoseqN( t isinstanceR%R&thasattrRR*tNoneR3tencodeR$tTrue(tdatatresulttktvstv((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt_encode_paramsRs    !3c Cs]|stdnt|tr3tdng}t|pEi}t|pWi}x|D]\}}t|tst|d r|g}nx|D]}|d k rt|tst|}n|jt|tr|j dn|t|tr|j dn|fqqWqdWx|D] \}}d }d } t|t t frt |dkr|\} } qt |dkr|\} } }q|\} } }} nt|p|} |} t| tttfr| } n | j} td|d| d | d | } | jd ||j| q3Wt|\}}||fS( sBuild the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). sFiles must be provided.sData must not be a string.R:sutf-8iitnameRAtfilenametheaderst content_typeN(t ValueErrorR<R*RR=R>R&R%R3tdecodeR?ttupletlisttlenRt bytearrayR9Rtmake_multipartR(tfilesRAt new_fieldstfieldstfieldtvalRERCtfttfhtfntfptfdatatrftbodyRJ((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt _encode_filesmsH    !3  !(t__name__t __module__tpropertyR8t staticmethodRFR^(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR-<stRequestHooksMixincBseZdZdZRS(cCs||jkr"td|nt|tjrK|j|j|n0t|dr{|j|jd|DndS(sProperly register a hook.s1Unsupported event specified, with event name "%s"R:css'|]}t|tjr|VqdS(N(R<t collectionstCallable(t.0th((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pys sN(thooksRKR<RdReR3R=textend(R6teventthook((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt register_hooks cCs5y|j|j|tSWntk r0tSXdS(siDeregister a previously registered hook. Returns True if the hook existed, False if not. N(RhtremoveR@RKtFalse(R6RjRk((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytderegister_hooks  (R_R`RlRo(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRcs tRequestc BsGeZdZddddddddddd ZdZdZRS(sA user-created :class:`Request ` object. Used to prepare a :class:`PreparedRequest `, which is sent to the server. :param method: HTTP method to use. :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. :param json: json for the body to attach to the request (if files or data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. :param hooks: dictionary of callback hooks, for internal usage. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> req.prepare() c Cs|dkrgn|}|dkr*gn|}|dkrBin|}|dkrZin|}| dkrrin| } t|_x6t| jD]"\} } |jd| d| qW||_||_||_||_ ||_ | |_ ||_ ||_ ||_dS(NRjRk(R>RRhRNtitemsRltmethodR1RIRRRAR+tparamstauthtcookies( R6RrR1RIRRRARsRtRuRhR+RCRE((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__init__s"         cCs d|jS(Ns(Rr(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__repr__scCsqt}|jd|jd|jd|jd|jd|jd|jd|jd|j d |j d |j |S( sXConstructs a :class:`PreparedRequest ` for transmission and returns it.RrR1RIRRRAR+RsRtRuRh( tPreparedRequesttprepareRrR1RIRRRAR+RsRtRuRh(R6R7((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRys            N(R_R`t__doc__R>RvRwRy(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRps  Rxc BseZdZdZddddddddddd ZdZdZdZe dZ dZ dZ dd Z d Zd d Zd ZdZRS(sThe fully mutable :class:`PreparedRequest ` object, containing the exact bytes that will be sent to the server. Generated from either a :class:`Request ` object or manually. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> r = req.prepare() >>> s = requests.Session() >>> s.send(r) cCsFd|_d|_d|_d|_d|_t|_d|_dS(N( R>RrR1RIt_cookiesR]RRht_body_position(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRvs      c Csk|j||j|||j||j||j||| |j|||j| dS(s6Prepares the entire request with the given parameters.N(tprepare_methodt prepare_urltprepare_headerstprepare_cookiest prepare_bodyt prepare_autht prepare_hooks( R6RrR1RIRRRARsRtRuRhR+((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRy+s   cCs d|jS(Ns(Rr(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRw=scCst}|j|_|j|_|jdk r?|jjnd|_t|j|_|j|_|j |_ |j |_ |S(N( RxRrR1RIR>tcopyR R{R]RhR|(R6R7((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR@s   '   cCs7||_|jdk r3t|jj|_ndS(sPrepares the given HTTP method.N(RrR>Rtupper(R6Rr((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR}Ks cCsOddl}y"|j|dtjd}Wn|jk rJtnX|S(Nituts46sutf-8(tidnaR?R@RLt IDNAErrort UnicodeError(thostR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt_get_idna_encoded_hostQs  " cCst|tr!|jd}ntr3t|n t|}|j}d|krz|jjd rz||_ dSy%t |\}}}}}}} Wn"t k r} t | j nX|sd} | jt|d} t| n|st d|nt|sRy|j|}Wqptk rNt dqpXn|jdrpt dn|pyd } | r| d 7} n| |7} |r| dt|7} n|sd }ntrst|tr|jd }nt| tr | jd } nt|tr.|jd }nt|trO|jd }nt| trs| jd } qsnt|ttfrt|}n|j|} | r|rd || f}q| }ntt|| |d|| g}||_ dS(sPrepares the given HTTP URL.tutf8t:thttpNsDInvalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?s Invalid URL %r: No host suppliedsURL has an invalid label.u*R0t@R.sutf-8s%s&%s(R<R&RLR'tunicodeR%tlstriptlowert startswithR1RRRtargstformatRRRRRR?RFRR"R>(R6R1RstschemeRtRtportR2R4tfragmentteterrortnetloct enc_params((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR~[sh " %       $cCsYt|_|rUx@|jD]/}t||\}}||jt|t complexjsontdumpsR<R&R?tallR=R*RNRMRdtMappingRt TypeErrortAttributeErrorRtgetattrRR|tIOErrortOSErrortobjecttNotImplementedErrorR)RIR^RFtprepare_content_lengthR](R6RARRR+R]RJt is_streamtlength((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRsJ %    cCsr|dk r7t|}|rnt||jdPrepare Content-Length header based on request method and bodysContent-LengthtGETtHEADt0N(RR(R>RR)RIRrtget(R6R]R((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs   'R0cCs|dkr6t|j}t|r-|nd}n|rt|trlt|dkrlt|}n||}|jj |j|j |j ndS(s"Prepares the given HTTP auth data.iN( R>RR1tanyR<RMROR t__dict__tupdateRR](R6RtR1turl_authtr((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs ! cCs_t|tjr||_nt||_t|j|}|dk r[||jd` object. Any subsequent calls to ``prepare_cookies`` will have no actual effect, unless the "Cookie" header is removed beforehand. tCookieN(R<R!t CookieJarR{R R R>RI(R6Rut cookie_header((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR$s   cCs5|p g}x"|D]}|j|||qWdS(sPrepares the given hooks.N(Rl(R6RhRj((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR8s  N(R_R`RzRvR>RyRwRR}RbRR~RRRRRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRxs    V E  tResponsec Bs7eZdZddddddddd d g Zd Zd Zd ZdZdZdZ dZ dZ dZ e dZe dZe dZe dZe dZdedZed"d"dZe dZe dZdZe dZd Zd!ZRS(#shThe :class:`Response ` object, which contains a server's response to an HTTP request. t_contentt status_codeRIR1thistorytencodingtreasonRutelapsedtrequestcCst|_t|_d|_d|_t|_d|_d|_ d|_ g|_ d|_ t i|_tjd|_d|_dS(Ni(RnRt_content_consumedR>t_nextRR RItrawR1RRRR Rutdatetimet timedeltaRR(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRvLs          cCs|S(N((R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __enter__{scGs|jdS(N(tclose(R6R((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__exit__~scs0jsjntfdjDS(Nc3s'|]}|t|dfVqdS(N(RR>(Rftattr(R6(s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pys s(Rtcontenttdictt __attrs__(R6((R6s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __getstate__s    cCsQx*|jD]\}}t|||q Wt|dtt|dddS(NRR(RqtsetattrR@R>(R6tstateRGR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __setstate__scCs d|jS(Ns(R(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRwscCs|jS(skReturns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. (tok(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt__bool__scCs|jS(skReturns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. (R(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt __nonzero__scCs |jdS(s,Allows you to use a response as an iterator.i(t iter_content(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR:scCs'y|jWntk r"tSXtS(skReturns True if :attr:`status_code` is less than 400. This attribute checks if the status code of the response is between 400 and 600 to see if there was a client error or a server error. If the status code, is between 200 and 400, this will return True. This is **not** a check to see if the response code is ``200 OK``. (traise_for_statusRRnR@(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs  cCsd|jko|jtkS(sTrue if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). tlocation(RIRtREDIRECT_STATI(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt is_redirectscCs(d|jko'|jtjtjfkS(s@True if this Response one of the permanent versions of redirect.R(RIRR,tmoved_permanentlytpermanent_redirect(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytis_permanent_redirectscCs|jS(sTReturns a PreparedRequest for the next request in a redirect chain, if there is one.(R(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytnextscCstj|jdS(s7The apparent encoding, provided by the chardet library.R(R(tdetectR(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytapparent_encodingsicsfd}jr9tjtr9tn5dk rntt rntdtnt j}|}jr|n|}|rt |}n|S(sIterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. chunk_size must be of type int or None. A value of None will function differently depending on the value of `stream`. stream=True will read data as it arrives in whatever size the chunks are received. If stream=False, data is returned as a single chunk. If decode_unicode is True, content will be decoded using the best available encoding based on the response. c3stjdry,x%jjdtD] }|Vq.WWqtk r_}t|qtk r}}t|qtk r}t |qXn.x+trjj }|sPn|VqWt_ dS(Ntstreamtdecode_content( R=RRR@RRRRRRR9R(tchunkR(t chunk_sizeR6(s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytgenerates    s.chunk_size must be an int, it is instead a %s.N( RR<RtboolRR>tintRttypeRR(R6Rtdecode_unicodeRt reused_chunkst stream_chunkstchunks((RR6s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs  ccsd}x|jd|d|D]}|dk r>||}n|rV|j|}n |j}|r|dr|r|dd|dkr|j}nd}x|D] }|VqWqW|dk r|VndS(sIterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe. RRiN(R>Rtsplitt splitlinestpop(R6RRt delimitertpendingRtlinestline((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyt iter_lines s   .   cCs|jtkr{|jr'tdn|jdksE|jdkrQd|_q{tj|j t prt|_nt |_|jS(s"Content of the response, in bytes.s2The content for this response was already consumediN( RRnRt RuntimeErrorRRR>R&R5RtCONTENT_CHUNK_SIZER@(R6((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR*s   * cCsd}|j}|js"tdS|jdkr=|j}nyt|j|dd}Wn,ttfk rt|jdd}nX|S(sContent of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property. R0terrorstreplaceN(R>RRR%Rt LookupErrorR(R6RR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyttext>s    cKs|j r}|jr}t|jdkr}t|j}|dk r}y tj|jj||SWqztk rvqzXq}ntj|j |S(sReturns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises ValueError: If the response body does not contain valid json. iN( RRRORR>RtloadsRLtUnicodeDecodeErrorR(R6tkwargsR((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyR+ds(   cCsj|jjd}i}|rft|}x9|D].}|jdpR|jd}|||(R6R((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRs   N(R_R`RzRRvRRRRRwRRR:RaRRRRRRnRtITER_CHUNK_SIZER>RRRR+RRR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pyRBs2 /     7&  (QRzRdRtsystencodings.idnat encodingstpip._vendor.urllib3.fieldsRtpip._vendor.urllib3.filepostRtpip._vendor.urllib3.utilRtpip._vendor.urllib3.exceptionsRRRRtioRRhRt structuresR RtR RuR R R t exceptionsRRRRRRRt_internal_utilsRRtutilsRRRRRRRRRR tcompatR!R"R#R$R%R&R'R(R)R*R+Rt status_codesR,tmovedtfoundtotherttemporary_redirectRRtDEFAULT_REDIRECT_LIMITRRRR-RcRpRxR(((s?/usr/lib/python2.7/site-packages/pip/_vendor/requests/models.pytsB    "4FF  nF;PKZ)jj,site-packages/pip/_vendor/requests/certs.pyonu[ abc@s1dZddlmZedkr-eGHndS(sF requests.certs ~~~~~~~~~~~~~~ This module returns the preferred default CA certificate bundle. There is only one — the one from the certifi package. If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. i(twheret__main__N(t__doc__tpip._vendor.certifiRt__name__(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/certs.pyts PKZE=&&+site-packages/pip/_vendor/requests/auth.pyonu[ abc@sdZddlZddlZddlZddlZddlZddlZddlmZddl m Z m Z m Z ddl mZddlmZddlmZd Zd Zd Zd efd YZdefdYZdefdYZdefdYZdS(s] requests.auth ~~~~~~~~~~~~~ This module contains the authentication handlers for Requests. iN(t b64encodei(turlparsetstrt basestring(textract_cookies_to_jar(tto_native_string(tparse_dict_headers!application/x-www-form-urlencodedsmultipart/form-datacCst|ts:tjdj|dtt|}nt|tsttjdj|dtt|}nt|tr|jd}nt|tr|jd}ndtt dj ||fj }|S(sReturns a Basic Auth string.sNon-string usernames will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.tcategorysNon-string passwords will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.tlatin1sBasic t:( t isinstanceRtwarningstwarntformattDeprecationWarningRtencodeRRtjointstrip(tusernametpasswordtauthstr((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt_basic_auth_strs&   %tAuthBasecBseZdZdZRS(s4Base class that all auth implementations derive fromcCstddS(NsAuth hooks must be callable.(tNotImplementedError(tselftr((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__call__Ks(t__name__t __module__t__doc__R(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyRHst HTTPBasicAuthcBs2eZdZdZdZdZdZRS(s?Attaches HTTP Basic Authentication to the given Request object.cCs||_||_dS(N(RR(RRR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__init__Rs cCs:t|jt|ddk|jt|ddkgS(NRR(tallRtgetattrtNoneR(Rtother((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__eq__VscCs ||k S(N((RR#((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__ne__\scCs t|j|j|jd<|S(Nt Authorization(RRRtheaders(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyR_s(RRRRR$R%R(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyROs    t HTTPProxyAuthcBseZdZdZRS(s=Attaches HTTP Proxy Authentication to a given Request object.cCs t|j|j|jd<|S(NsProxy-Authorization(RRRR'(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyRgs(RRRR(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyR(dstHTTPDigestAuthcBsVeZdZdZdZdZdZdZdZdZ dZ RS( s@Attaches HTTP Digest Authentication to the given Request object.cCs%||_||_tj|_dS(N(RRt threadingtlocalt _thread_local(RRR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyRos  cCsat|jds]t|j_d|j_d|j_i|j_d|j_d|j_ ndS(Ntinitti( thasattrR,tTrueR-t last_noncet nonce_counttchalR"tpost num_401_calls(R((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytinit_per_thread_stateus     csN|jjd}|jjd}|jjjd}|jjjd}|jjjd}d|dkrzd}n |j}|dks|dkrd} | n|d krd } | nfd } dkrdSd} t|} | jp d }| jr+|d | j7}nd|j||j f}d||f}|}|}||jj kr|jj d7_ n d|j_ d|jj }t |jj j d}||j d7}|tjj d7}|tjd7}tj|jd }|dkrJd|||f}n|sl| |d||f}nP|dksd|jdkrd|||d|f}| ||}ndS||j_ d|j||||f}|r|d|7}n|r|d|7}n| r)|d| 7}n|rF|d||f7}nd|S(s :rtype: str trealmtnoncetqopt algorithmtopaquetMD5sMD5-SESScSs4t|tr!|jd}ntj|jS(Nsutf-8(R RRthashlibtmd5t hexdigest(tx((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytmd5_utf8stSHAcSs4t|tr!|jd}ntj|jS(Nsutf-8(R RRR=tsha1R?(R@((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytsha_utf8scsd||fS(Ns%s:%s((tstd(t hash_utf8(s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytR.t/t?s%s:%s:%ss%s:%sis%08xsutf-8iitautht,s%s:%s:%s:%s:%ss>username="%s", realm="%s", nonce="%s", uri="%s", response="%s"s , opaque="%s"s, algorithm="%s"s , digest="%s"s , qop="auth", nc=%s, cnonce="%s"s Digest %sN(R,R3tgetR"tupperRtpathtqueryRRR1R2RRttimetctimetosturandomR=RCR?tsplit(RtmethodturlR7R8R9R:R;t _algorithmRARDtKDtentdigtp_parsedROtA1tA2tHA1tHA2tncvalueREtcnoncetrespdigtnoncebittbase((RGs=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytbuild_digest_headersr               ! cKs|jrd|j_ndS(s)Reset num_401_calls counter on redirects.iN(t is_redirectR,R5(RRtkwargs((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pythandle_redirects cKsd|jkodkns/d|j_|S|jjd k r]|jjj|jjn|jj dd}d|j kr~|jjdkr~|jjd7_t j dd t j }t|jd|d d|j_|j|j|jj}t|j|j|j|j|j|j|j|j|jd <|jj||}|jj|||_|Sd|j_|S( so Takes the given response and tries digest-auth, if needed. :rtype: requests.Response iiiswww-authenticateR.tdigestisdigest tflagstcountR&N(t status_codeR,R5R4R"trequesttbodytseekR'RMtlowertretcompilet IGNORECASERtsubR3tcontenttclosetcopyRt_cookiestrawtprepare_cookiesReRVRWt connectiontsendthistorytappend(RRRgts_authtpattprept_r((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt handle_401s.  $$   cCs|j|jjr8|j|j|j|jds$       ,PKZwL&d&d,site-packages/pip/_vendor/requests/utils.pycnu[ abc@s\dZddlZddlZddlZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl m Z ddl mZddlmZddlmZddlmZmZmZmZmZmZmZmZmZmZmZmZm Z m!Z!dd l"m#Z#dd l$m%Z%dd l&m'Z'm(Z(m)Z)m*Z*d@Z+ej,Z-idd6dd6Z.ej/dkrdZ0dZndZ1dZ2e3dZ4dZ5dZ6dZ7dZ8dZ9e3dZ:dZ;dZ<d Z=d!Z>d"Z?d#Z@d$ZAeBd%d&ZCd'ZDd(ZEd)ZFd*ZGd+ZHd,ZIejJd-ZKd.ZLdd/ZNd0ZOd1d2ZPd3ZQd4ZRd5jSd6ZTeTd7ZUeTd8ZVd9ZWd:ZXd;ZYejZd<Z[ejZd<Z\d=Z]d>Z^d?Z_dS(As requests.utils ~~~~~~~~~~~~~~ This module provides utility functions that are used within Requests that are also useful for external consumption. iNi(t __version__(tcerts(tto_native_string(tparse_http_list(tquoteturlparsetbyteststrt OrderedDicttunquotet getproxiest proxy_bypasst urlunparset basestringt integer_typestis_py3tproxy_bypass_environmenttgetproxies_environment(tcookiejar_from_dict(tCaseInsensitiveDict(t InvalidURLt InvalidHeadertFileModeWarningtUnrewindableBodyErrors.netrct_netrciPthttpithttpstWindowscCs"trddl}n ddl}yE|j|jd}|j|dd}|j|dd}Wntk rztSX| s| rtS|jd}x|D]w}|dkrd|krt Sn|j dd }|j d d }|j d d}t j ||t j rt SqWtS( Nis;Software\Microsoft\Windows\CurrentVersion\Internet Settingst ProxyEnableit ProxyOverridet;st.s\.t*s.*t?(Rtwinregt_winregtOpenKeytHKEY_CURRENT_USERt QueryValueExtOSErrortFalsetsplittTruetreplacetretmatchtI(thostR"tinternetSettingst proxyEnablet proxyOverridettest((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytproxy_bypass_registry.s2          cCs!trt|St|SdS(sReturn True, if the host should be bypassed. Checks proxy settings gathered from the environment, if specified, or the registry. N(RRR4(R/((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyR Os  cCs"t|dr|j}n|S(s/Returns an internal sequence dictionary update.titems(thasattrR5(td((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdict_to_sequence[scCsd}d}t|dr*t|}nt|drE|j}nmt|dry|j}Wntjk rzqXtj|j}d|j krt j dt qnt|drty|j }Wn,ttfk r|dk rq|}qqqtXt|drt|dkrty3|jdd |j }|j|pIdWqqttfk rmd}qqXqtn|dkrd}ntd||S( Nit__len__tlentfilenotbs%Requests has determined the content-length for this request using the binary size of the file: however, the file has been opened in text mode (i.e. without the 'b' flag in the mode). This may lead to an incorrect content-length. In Requests 3.0, support will be removed for files in text mode.ttelltseeki(tNoneR6R:R;tiotUnsupportedOperationtostfstattst_sizetmodetwarningstwarnRR=R'tIOErrorR>tmax(tot total_lengthtcurrent_positionR;((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt super_lends@       c CseyGddlm}m}d}x^tD]V}ytjjdj|}Wntk r_dSXtjj |r&|}Pq&q&W|dkrdSt |}d}t |t r|j d}n|jj|d} yG||j| } | r| drdnd} | | | d fSWn#|tfk rE|rFqFnXWnttfk r`nXdS( s;Returns the Requests tuple auth for a given url from netrc.i(tnetrctNetrcParseErrors~/{0}Nt:tasciiiii(RNROR?t NETRC_FILESRBtpatht expandusertformattKeyErrortexistsRt isinstanceRtdecodetnetlocR)tauthenticatorsRHt ImportErrortAttributeError( turlt raise_errorsRNROt netrc_pathtftloctritsplitstrR/Rtlogin_i((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_netrc_auths8    cCs[t|dd}|rWt|trW|ddkrW|ddkrWtjj|SdS(s0Tries to guess the filename of the given object.tnameitN(tgetattrR?RXR RBRStbasename(tobjRg((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytguess_filenames%cCsD|dkrdSt|ttttfr:tdnt|S(sTake an object and test to see if it can be represented as a dictionary. Unless it can not be represented as such, return an OrderedDict, e.g., :: >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') ValueError: need more than 1 value to unpack >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) :rtype: OrderedDict s+cannot encode objects that are not 2-tuplesN(R?RXRRtbooltintt ValueErrorR(tvalue((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytfrom_key_val_lists  cCse|dkrdSt|ttttfr:tdnt|tjr[|j }nt |S(sTake an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') ValueError: cannot encode objects that are not 2-tuples. :rtype: list s+cannot encode objects that are not 2-tuplesN( R?RXRRRnRoRpt collectionstMappingR5tlist(Rq((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytto_key_val_lists cCshg}x[t|D]M}|d |dko8dknrSt|dd!}n|j|qW|S(sParse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. It basically works like :func:`parse_set_header` just that items may appear multiple times and case sensitivity is preserved. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] To create a header from the :class:`list` again, use the :func:`dump_header` function. :param value: a string with a list header. :return: :class:`list` :rtype: list iit"(t_parse_list_headertunquote_header_valuetappend(Rqtresulttitem((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytparse_list_headers $cCsi}xt|D]~}d|kr5d||>> d = parse_dict_header('foo="is a fish", bar="as well"') >>> type(d) is dict True >>> sorted(d.items()) [('bar', 'as well'), ('foo', 'is a fish')] If there is no value for a key it will be `None`: >>> parse_dict_header('key_without_value') {'key_without_value': None} To create a header from the :class:`dict` again, use the :func:`dump_header` function. :param value: a string with a dict header. :return: :class:`dict` :rtype: dict t=iiRwN(RxR?R)Ry(RqR{R|Rg((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytparse_dict_header1s  $cCsq|rm|d|dko%dknrm|dd!}| sN|d dkrm|jddjddSn|S( sUnquotes a header value. (Reversal of :func:`quote_header_value`). This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. :rtype: str iiRwiis\\s\s\"(R+(Rqt is_filename((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyRyTs * cCs+i}x|D]}|j||j/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdict_from_cookiejarms cCs t||S(sReturns a CookieJar from a key/value dictionary. :param cj: CookieJar to insert cookies into. :param cookie_dict: Dict of key/values to insert into CookieJar. :rtype: CookieJar (R(RR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytadd_dict_to_cookiejar|scCsvtjdttjddtj}tjddtj}tjd}|j||j||j|S(slReturns encodings from given content string. :param content: bytestring to extract encodings from. sIn requests 3.0, get_encodings_from_content will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)s!]tflagss+]s$^<\?xml.*?encoding=["\']*(.+?)["\'>](RFRGtDeprecationWarningR,tcompileR.tfindall(tcontentt charset_ret pragma_retxml_re((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_encodings_from_contentscCs_|jd}|sdStj|\}}d|krK|djdSd|kr[dSdS(s}Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. :rtype: str s content-typetcharsets'"ttexts ISO-8859-1N(tgetR?tcgit parse_headertstrip(theaderst content_typetparams((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_encoding_from_headerss  ccs|jdkr)x|D] }|VqWdStj|jdd}x+|D]#}|j|}|rK|VqKqKW|jddt}|r|VndS(sStream decodes a iterator.NterrorsR+ttfinal(tencodingR?tcodecstgetincrementaldecoderRYR*(titeratortrR|tdecodertchunktrv((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytstream_decode_response_unicodes    ccsdd}|dks|dkr-t|}nx0|t|kr_||||!V||7}q0WdS(s Iterate over slices of a string.iN(R?R:(tstringt slice_lengthtpos((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt iter_slicess cCstjdtg}t|j}|rcyt|j|SWqctk r_|j|qcXnyt|j|ddSWnt k r|jSXdS(sReturns the requested content back in unicode. :param r: Response object to get unicode content from. Tried: 1. charset from content-type 2. fall back and replace all unicode characters :rtype: str sIn requests 3.0, get_unicode_from_response will be removed. For more information, please see the discussion on issue #2266. (This warning should only appear once.)RR+N( RFRGRRRRRt UnicodeErrorRzt TypeError(Rttried_encodingsR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_unicode_from_responses   t4ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzs0123456789-._~cCs|jd}xtdt|D]}||dd!}t|dkr|jrytt|d}Wn!tk rtd|nX|tkr|||d||/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytunquote_unreserveds  cCsKd}d}ytt|d|SWntk rFt|d|SXdS(sRe-quote the given URI. This function passes the given URI through an unquote/quote cycle to ensure that it is fully and consistently quoted. :rtype: str s!#$%&'()*+,/:;=?@[]~s!#$&'()*+,/:;=?@[]~tsafeN(RRR(Rtsafe_with_percenttsafe_without_percent((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt requote_uri s  cCstjdtj|d}|jd\}}tjdtjtt|d}tjdtj|d|@}||@||@kS(sThis function allows you to check if an IP belongs to a network subnet Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 :rtype: bool s=Lit/(tstructtunpacktsockett inet_atonR)tdotted_netmaskRo(tiptnettipaddrtnetaddrtbitstnetmasktnetwork((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytaddress_in_network#s +#cCs/ddd|>dA}tjtjd|S(sConverts mask from /xx format to xxx.xxx.xxx.xxx Example: if mask is 24 function returns 255.255.255.0 :rtype: str Iii s>I(Rt inet_ntoaRtpack(tmaskR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyR2scCs-ytj|Wntjk r(tSXtS(s :rtype: bool (RRterrorR(R*(t string_ip((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytis_ipv4_address=s cCs|jddkryt|jdd}Wntk rFtSX|dks_|dkrctSytj|jddWqtjk rtSXntStS(sV Very simple check of the cidr format in no_proxy variable. :rtype: bool Rii i( tcountRoR)RpR(RRRR*(tstring_networkR((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt is_valid_cidrHs ccst|dk }|r4tjj|}|tj|/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt set_environ`s    c Cscd}|}|d kr*|d}nt|j}|r d|jddjdD}|jdd}t|rx|D]8}t|rt||rtSq||krtSqWq x@|D]5}|j |s|jddj |rtSqWnt d|8yt |}Wn t t jfk rNt}nXWd QX|r_tStS( sL Returns whether we should bypass proxies or not. :rtype: bool cSs(tjj|p'tjj|jS(N(RBRRtupper(tk((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt|Rtno_proxycss|]}|r|VqdS(N((t.0R/((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pys st Rt,RPiN(R?RRZR+R)RRRR*tendswithRR RRtgaierrorR(( R^Rt get_proxyt no_proxy_argRZRtproxy_ipR/tbypass((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytshould_bypass_proxiesvs4  %      + cCs!t|d|riStSdS(sA Return a dict of environment proxies. :rtype: dict RN(RR (R^R((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_environ_proxiesscCs|p i}t|}|jdkrC|j|j|jdS|jd|j|jd|jdg}d}x(|D] }||krz||}PqzqzW|S(sSelect a proxy for the url, if applicable. :param url: The url being for the request :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs talls://sall://N(RthostnameR?Rtscheme(R^tproxiesturlpartst proxy_keystproxyt proxy_key((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt select_proxys       spython-requestscCsd|tfS(sO Return a string representing the default user agent. :rtype: str s%s/%s(R(Rg((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdefault_user_agentscCs2titd6djd d6dd6dd 6S( s9 :rtype: requests.structures.CaseInsensitiveDict s User-Agents, tgziptdeflatesAccept-Encodings*/*tAccepts keep-alivet Connection(RR(RRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytdefault_headerss  c Csg}d}xtjd|D]}y|jdd\}}Wntk ra|d}}nXi|jdd6}xa|jdD]P}y|jd\}}Wntk rPnX|j|||j|; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" :rtype: list s '"s, * '"R^R~(R,R)RpRRz( Rqtlinkst replace_charstvalR^Rtlinktparamtkey((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytparse_header_linkss    sRQiicCs|d }|tjtjfkr&dS|d tjkr=dS|d tjtjfkr]dS|jt}|dkr|dS|dkr|d d dtkrd S|d d dtkrd Sn|dkr|d t krd S|d t krdSnd S(s :rtype: str isutf-32is utf-8-sigisutf-16isutf-8Ns utf-16-beis utf-16-les utf-32-bes utf-32-le( Rt BOM_UTF32_LEt BOM_UTF32_BEtBOM_UTF8t BOM_UTF16_LEt BOM_UTF16_BERt_nullt_null2t_null3R?(tdatatsamplet nullcount((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytguess_json_utfs*    cCsSt||\}}}}}}|s7||}}nt||||||fS(sGiven a URL that may or may not have a scheme, prepend the given scheme. Does not replace a present scheme with the one provided as an argument. :rtype: str (RR (R^t new_schemeRRZRSRtquerytfragment((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytprepend_scheme_if_needed1s!cCsRt|}y"t|jt|jf}Wnttfk rMd}nX|S(s{Given a url with authentication components, extract them into a tuple of username,password. :rtype: (str,str) R(RR(RR tusernametpasswordR]R(R^tparsedtauth((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytget_auth_from_urlBs  " s^\S[^\r\n]*$|^$cCs|\}}t|tr$t}nt}y&|j|sOtd|nWn0tk rtd||t|fnXdS(sVerifies that header value is a string which doesn't contain leading whitespace or return characters. This prevents unintended header injection. :param header: tuple, in the format (name, value). s7Invalid return character or leading space in header: %ss>Value for header {%s: %s} must be of type str or bytes, not %sN(RXRt_CLEAN_HEADER_REGEX_BYTEt_CLEAN_HEADER_REGEX_STRR-RRttype(theaderRgRqtpat((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pytcheck_header_validityWs   cCsft|\}}}}}}|s4||}}n|jddd}t|||||dfS(sW Given a url remove the fragment and the authentication part. :rtype: str t@iiR(RtrsplitR (R^RRZRSRR R ((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt urldefragauthls cCs}t|jdd}|dk rmt|jtrmy||jWqyttfk ritdqyXn tddS(sfMove file pointer back to its recorded starting position so it can be read again on redirect. R>s;An error occurred when rewinding request body for redirect.s+Unable to rewind request body for redirect.N( RjtbodyR?RXt_body_positionRRHR'R(tprepared_requestt body_seek((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt rewind_body}s(s.netrcR(`t__doc__RRRst contextlibR@RBtplatformR,RRRFRRRt_internal_utilsRtcompatRRxRRRRRR R R R R RRRRtcookiesRt structuresRt exceptionsRRRRRRtwheretDEFAULT_CA_BUNDLE_PATHt DEFAULT_PORTStsystemR4R8RMR(RfRmRrRvR}RRyRRRRRRRt frozensetRRRRRRRtcontextmanagerRRR?RRRRRtencodeRRRR RRRRRRRR!(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/utils.pyt s           ^"  ! = 3    #      %      9  "      PKZ0 .site-packages/pip/_vendor/requests/__init__.pynu[# -*- coding: utf-8 -*- # __ # /__) _ _ _ _ _/ _ # / ( (- (/ (/ (- _) / _) # / """ Requests HTTP Library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. Basic GET usage: >>> import requests >>> r = requests.get('https://www.python.org') >>> r.status_code 200 >>> 'Python is a programming language' in r.content True ... or POST: >>> payload = dict(key1='value1', key2='value2') >>> r = requests.post('http://httpbin.org/post', data=payload) >>> print(r.text) { ... "form": { "key2": "value2", "key1": "value1" }, ... } The other HTTP methods are supported - see `requests.api`. Full documentation is at . :copyright: (c) 2017 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. """ from pip._vendor import urllib3 from pip._vendor import chardet import warnings from .exceptions import RequestsDependencyWarning def check_compatibility(urllib3_version, chardet_version): urllib3_version = urllib3_version.split('.') assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. # Sometimes, urllib3 only reports its version as 16.1. if len(urllib3_version) == 2: urllib3_version.append('0') # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) # urllib3 >= 1.21.1, <= 1.22 assert major == 1 assert minor >= 21 assert minor <= 22 # Check chardet for compatibility. major, minor, patch = chardet_version.split('.')[:3] major, minor, patch = int(major), int(minor), int(patch) # chardet >= 3.0.2, < 3.1.0 assert major == 3 assert minor < 1 assert patch >= 2 # Check imported dependencies for compatibility. try: check_compatibility(urllib3.__version__, chardet.__version__) except (AssertionError, ValueError): warnings.warn("urllib3 ({0}) or chardet ({1}) doesn't match a supported " "version!".format(urllib3.__version__, chardet.__version__), RequestsDependencyWarning) # Attempt to enable urllib3's SNI support, if possible # try: # from pip._vendor.urllib3.contrib import pyopenssl # pyopenssl.inject_into_urllib3() # except ImportError: # pass # urllib3's DependencyWarnings should be silenced. from pip._vendor.urllib3.exceptions import DependencyWarning warnings.simplefilter('ignore', DependencyWarning) from .__version__ import __title__, __description__, __url__, __version__ from .__version__ import __build__, __author__, __author_email__, __license__ from .__version__ import __copyright__, __cake__ from . import utils from . import packages from .models import Request, Response, PreparedRequest from .api import request, get, head, post, patch, put, delete, options from .sessions import session, Session from .status_codes import codes from .exceptions import ( RequestException, Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError, FileModeWarning, ConnectTimeout, ReadTimeout ) # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) # FileModeWarnings go off per the default. warnings.simplefilter('default', FileModeWarning, append=True) PKZ@*site-packages/pip/_vendor/requests/api.pyonu[ abc@sqdZddlmZdZd dZdZdZd d dZd dZ d d Z d Z d S( s requests.api ~~~~~~~~~~~~ This module implements the Requests API. :copyright: (c) 2012 by Kenneth Reitz. :license: Apache2, see LICENSE for more details. i(tsessionsc Ks2tj }|jd|d||SWdQXdS(s Constructs and sends a :class:`Request `. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How many seconds to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response ` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') tmethodturlN(RtSessiontrequest(RRtkwargstsession((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRs)cKs&|jdttd|d||S(sOSends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tallow_redirectstgettparams(t setdefaulttTrueR(RR R((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyR=s cKs |jdttd||S(sSends an OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response Rtoptions(R R R(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyR Ks cKs |jdttd||S(sSends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response Rthead(R tFalseR(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyR Xs cKstd|d|d||S(sSends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tposttdatatjson(R(RRRR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRes cKstd|d||S(sSends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tputR(R(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRss cKstd|d||S(sSends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tpatchR(R(RRR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRs cKstd||S(sSends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response tdelete(R(RR((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyRs N( t__doc__tRRtNoneRR R RRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/requests/api.pyt s -    PKZ`SS*site-packages/pip/_vendor/requests/help.pynu["""Module containing bug report helper(s).""" from __future__ import print_function import json import platform import sys import ssl from pip._vendor import idna from pip._vendor import urllib3 from pip._vendor import chardet from . import __version__ as requests_version try: from .packages.urllib3.contrib import pyopenssl except ImportError: pyopenssl = None OpenSSL = None cryptography = None else: import OpenSSL import cryptography def _implementation(): """Return a dict with the Python implementation and version. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms. """ implementation = platform.python_implementation() if implementation == 'CPython': implementation_version = platform.python_version() elif implementation == 'PyPy': implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, sys.pypy_version_info.minor, sys.pypy_version_info.micro) if sys.pypy_version_info.releaselevel != 'final': implementation_version = ''.join([ implementation_version, sys.pypy_version_info.releaselevel ]) elif implementation == 'Jython': implementation_version = platform.python_version() # Complete Guess elif implementation == 'IronPython': implementation_version = platform.python_version() # Complete Guess else: implementation_version = 'Unknown' return {'name': implementation, 'version': implementation_version} def info(): """Generate information for a bug report.""" try: platform_info = { 'system': platform.system(), 'release': platform.release(), } except IOError: platform_info = { 'system': 'Unknown', 'release': 'Unknown', } implementation_info = _implementation() urllib3_info = {'version': urllib3.__version__} chardet_info = {'version': chardet.__version__} pyopenssl_info = { 'version': None, 'openssl_version': '', } if OpenSSL: pyopenssl_info = { 'version': OpenSSL.__version__, 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, } cryptography_info = { 'version': getattr(cryptography, '__version__', ''), } idna_info = { 'version': getattr(idna, '__version__', ''), } # OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module. system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None) system_ssl_info = { 'version': '%x' % system_ssl if system_ssl is not None else '' } return { 'platform': platform_info, 'implementation': implementation_info, 'system_ssl': system_ssl_info, 'using_pyopenssl': pyopenssl is not None, 'pyOpenSSL': pyopenssl_info, 'urllib3': urllib3_info, 'chardet': chardet_info, 'cryptography': cryptography_info, 'idna': idna_info, 'requests': { 'version': requests_version, }, } def main(): """Pretty-print the bug information as JSON.""" print(json.dumps(info(), sort_keys=True, indent=2)) if __name__ == '__main__': main() PKZ;̵BB/site-packages/pip/_vendor/requests/packages.pyonu[ abc@sddlZxdD]ZdeZeeees   PKZ;̵BB/site-packages/pip/_vendor/requests/packages.pycnu[ abc@sddlZxdD]ZdeZeeees   PKZ ;,site-packages/pip/_vendor/requests/hooks.pycnu[ abc@s%dZdgZdZdZdS(s requests.hooks ~~~~~~~~~~~~~~ This module provides the capabilities for the Requests hooks system. Available hooks: ``response``: The response generated from a Request. tresponsecCstdtDS(Ncss|]}|gfVqdS(N((t.0tevent((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pys s(tdicttHOOKS(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyt default_hooksscKs{|p t}|j|}|rwt|dr?|g}nx5|D]*}|||}|dk rF|}qFqFWn|S(s6Dispatches a hook dictionary on a given piece of data.t__call__N(RtgetthasattrtNone(tkeythookst hook_datatkwargsthookt _hook_data((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyt dispatch_hooks   N(t__doc__RRR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/hooks.pyt s  PKZE=&&+site-packages/pip/_vendor/requests/auth.pycnu[ abc@sdZddlZddlZddlZddlZddlZddlZddlmZddl m Z m Z m Z ddl mZddlmZddlmZd Zd Zd Zd efd YZdefdYZdefdYZdefdYZdS(s] requests.auth ~~~~~~~~~~~~~ This module contains the authentication handlers for Requests. iN(t b64encodei(turlparsetstrt basestring(textract_cookies_to_jar(tto_native_string(tparse_dict_headers!application/x-www-form-urlencodedsmultipart/form-datacCst|ts:tjdj|dtt|}nt|tsttjdj|dtt|}nt|tr|jd}nt|tr|jd}ndtt dj ||fj }|S(sReturns a Basic Auth string.sNon-string usernames will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.tcategorysNon-string passwords will no longer be supported in Requests 3.0.0. Please convert the object you've passed in ({0!r}) to a string or bytes object in the near future to avoid problems.tlatin1sBasic t:( t isinstanceRtwarningstwarntformattDeprecationWarningRtencodeRRtjointstrip(tusernametpasswordtauthstr((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt_basic_auth_strs&   %tAuthBasecBseZdZdZRS(s4Base class that all auth implementations derive fromcCstddS(NsAuth hooks must be callable.(tNotImplementedError(tselftr((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__call__Ks(t__name__t __module__t__doc__R(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyRHst HTTPBasicAuthcBs2eZdZdZdZdZdZRS(s?Attaches HTTP Basic Authentication to the given Request object.cCs||_||_dS(N(RR(RRR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__init__Rs cCs:t|jt|ddk|jt|ddkgS(NRR(tallRtgetattrtNoneR(Rtother((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__eq__VscCs ||k S(N((RR#((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt__ne__\scCs t|j|j|jd<|S(Nt Authorization(RRRtheaders(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyR_s(RRRRR$R%R(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyROs    t HTTPProxyAuthcBseZdZdZRS(s=Attaches HTTP Proxy Authentication to a given Request object.cCs t|j|j|jd<|S(NsProxy-Authorization(RRRR'(RR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyRgs(RRRR(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyR(dstHTTPDigestAuthcBsVeZdZdZdZdZdZdZdZdZ dZ RS( s@Attaches HTTP Digest Authentication to the given Request object.cCs%||_||_tj|_dS(N(RRt threadingtlocalt _thread_local(RRR((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyRos  cCsat|jds]t|j_d|j_d|j_i|j_d|j_d|j_ ndS(Ntinitti( thasattrR,tTrueR-t last_noncet nonce_counttchalR"tpost num_401_calls(R((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytinit_per_thread_stateus     csN|jjd}|jjd}|jjjd}|jjjd}|jjjd}d|dkrzd}n |j}|dks|dkrd} | n|d krd } | nfd } dkrdSd} t|} | jp d }| jr+|d | j7}nd|j||j f}d||f}|}|}||jj kr|jj d7_ n d|j_ d|jj }t |jj j d}||j d7}|tjj d7}|tjd7}tj|jd }|dkrJd|||f}n|sl| |d||f}nP|dksd|jdkrd|||d|f}| ||}ndS||j_ d|j||||f}|r|d|7}n|r|d|7}n| r)|d| 7}n|rF|d||f7}nd|S(s :rtype: str trealmtnoncetqopt algorithmtopaquetMD5sMD5-SESScSs4t|tr!|jd}ntj|jS(Nsutf-8(R RRthashlibtmd5t hexdigest(tx((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytmd5_utf8stSHAcSs4t|tr!|jd}ntj|jS(Nsutf-8(R RRR=tsha1R?(R@((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytsha_utf8scsd||fS(Ns%s:%s((tstd(t hash_utf8(s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytR.t/t?s%s:%s:%ss%s:%sis%08xsutf-8iitautht,s%s:%s:%s:%s:%ss>username="%s", realm="%s", nonce="%s", uri="%s", response="%s"s , opaque="%s"s, algorithm="%s"s , digest="%s"s , qop="auth", nc=%s, cnonce="%s"s Digest %sN(R,R3tgetR"tupperRtpathtqueryRRR1R2RRttimetctimetosturandomR=RCR?tsplit(RtmethodturlR7R8R9R:R;t _algorithmRARDtKDtentdigtp_parsedROtA1tA2tHA1tHA2tncvalueREtcnoncetrespdigtnoncebittbase((RGs=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pytbuild_digest_headersr               ! cKs|jrd|j_ndS(s)Reset num_401_calls counter on redirects.iN(t is_redirectR,R5(RRtkwargs((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pythandle_redirects cKsd|jkodkns/d|j_|S|jjd k r]|jjj|jjn|jj dd}d|j kr~|jjdkr~|jjd7_t j dd t j }t|jd|d d|j_|j|j|jj}t|j|j|j|j|j|j|j|j|jd <|jj||}|jj|||_|Sd|j_|S( so Takes the given response and tries digest-auth, if needed. :rtype: requests.Response iiiswww-authenticateR.tdigestisdigest tflagstcountR&N(t status_codeR,R5R4R"trequesttbodytseekR'RMtlowertretcompilet IGNORECASERtsubR3tcontenttclosetcopyRt_cookiestrawtprepare_cookiesReRVRWt connectiontsendthistorytappend(RRRgts_authtpattprept_r((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/auth.pyt handle_401s.  $$   cCs|j|jjr8|j|j|j|jds$       ,PKZ]< C C +site-packages/pip/_vendor/requests/help.pyonu[ abc@s dZddlmZddlZddlZddlZddlZddlmZddlm Z ddlm Z ddl m Z ydd lmZWn#ek rdZdZdZnXddlZddlZd Zd Zd Zed kr endS(s'Module containing bug report helper(s).i(tprint_functionN(tidna(turllib3(tchardeti(t __version__(t pyopensslcCstj}|dkr'tj}n|dkrdtjjtjjtjjf}tjjdkrdj |tjjg}qn<|dkrtj}n!|dkrtj}nd}i|d 6|d 6S( sReturn a dict with the Python implementation and version. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms. tCPythontPyPys%s.%s.%stfinalttJythont IronPythontUnknowntnametversion( tplatformtpython_implementationtpython_versiontsystpypy_version_infotmajortminortmicrot releaseleveltjoin(timplementationtimplementation_version((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pyt_implementations       c Csqy$itjd6tjd6}Wn%tk rKidd6dd6}nXt}itjd6}itjd6}idd6dd6}t rit jd6dt j j d6}nit t ddd6}it tddd6}t td d}i|dk rd|ndd6}i |d 6|d 6|d 6tdk d 6|d6|d6|d6|d6|d6itd6d6S(s&Generate information for a bug report.tsystemtreleaseR RR topenssl_versions%xRtOPENSSL_VERSION_NUMBERRRt system_ssltusing_pyopensslt pyOpenSSLRRt cryptographyRtrequestsN(RRRtIOErrorRRRRtNonetOpenSSLtSSLRtgetattrR#RtsslRtrequests_version( t platform_infotimplementation_infot urllib3_infot chardet_infotpyopenssl_infotcryptography_infot idna_infoR tsystem_ssl_info((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pytinfo;sJ       cCs&ttjtdtdddS(s)Pretty-print the bug information as JSON.t sort_keystindentiN(tprinttjsontdumpsR4tTrue(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pytmainrst__main__(t__doc__t __future__RR8RRR*t pip._vendorRRRR RR+tpackages.urllib3.contribRt ImportErrorR&R'R#RR4R;t__name__(((s=/usr/lib/python2.7/site-packages/pip/_vendor/requests/help.pyts,         ! 7  PKZu6site-packages/pip/_vendor/requests/_internal_utils.pycnu[ abc@s;dZddlmZmZmZddZdZdS(s requests._internal_utils ~~~~~~~~~~~~~~ Provides utility functions that are consumed internally by Requests which depend on extremely few external helpers (such as compat) i(tis_py2t builtin_strtstrtasciicCsCt|tr|}n'tr0|j|}n|j|}|S(sGiven a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise. (t isinstanceRRtencodetdecode(tstringtencodingtout((sH/usr/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pytto_native_strings  cCsCt|tsty|jdtSWntk r>tSXdS(sDetermine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool RN(RRtAssertionErrorRtTruetUnicodeEncodeErrortFalse(tu_string((sH/usr/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pytunicode_is_asciis   N(t__doc__tcompatRRRR R(((sH/usr/lib/python2.7/site-packages/pip/_vendor/requests/_internal_utils.pyt s PKZ|y%1site-packages/pip/_vendor/requests/__version__.pynu[# .-. .-. .-. . . .-. .-. .-. .-. # |( |- |.| | | |- `-. | `-. # ' ' `-' `-`.`-' `-' `-' ' `-' __title__ = 'requests' __description__ = 'Python HTTP for Humans.' __url__ = 'http://python-requests.org' __version__ = '2.18.4' __build__ = 0x021804 __author__ = 'Kenneth Reitz' __author_email__ = 'me@kennethreitz.org' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2017 Kenneth Reitz' __cake__ = u'\u2728 \U0001f370 \u2728' PKZ6dWdW/site-packages/pip/_vendor/requests/sessions.pyonu[ abc@s+dZddlZddlZddlZddlmZddlmZddlm Z ddl m Z m Z m Z mZmZddlmZmZmZmZdd lmZmZmZdd lmZmZdd lmZdd lmZm Z dd l!m"Z"m#Z#m$Z$m%Z%ddl&m'Z'ddl(m)Z)ddlm*Z*m+Z+m,Z,m-Z-m.Z.m/Z/m0Z0ddl1m2Z2ddlm3Z3ej4dkry ej5Z6Wne7k rej8Z6nXn ejZ6e dZ9e dZ:de;fdYZ<de<fdYZ=dZ>dS(s requests.session ~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). iN(tMapping(t timedeltai(t_basic_auth_str(t cookielibtis_py3t OrderedDictturljointurlparse(tcookiejar_from_dicttextract_cookies_to_jartRequestsCookieJart merge_cookies(tRequesttPreparedRequesttDEFAULT_REDIRECT_LIMIT(t default_hookst dispatch_hook(tto_native_string(tto_key_val_listtdefault_headers(tTooManyRedirectst InvalidSchematChunkedEncodingErrortContentDecodingError(tCaseInsensitiveDict(t HTTPAdapter(t requote_uritget_environ_proxiestget_netrc_authtshould_bypass_proxiestget_auth_from_urlt rewind_bodyt DEFAULT_PORTS(tcodes(tREDIRECT_STATItWindowscCs|dkr|S|dkr |St|to;t|tsB|S|t|}|jt|g|jD]\}}|dkrt|^qt}x|D] }||=qW|S(sDetermines appropriate setting for a given request, taking into account the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` N(tNonet isinstanceRRtupdatetitems(trequest_settingtsession_settingt dict_classtmerged_settingtktvt none_keystkey((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt merge_setting2s  1  cCsZ|dks!|jdgkr%|S|dksF|jdgkrJ|St|||S(sProperly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. tresponseN(R$tgetR0(t request_hookst session_hooksR*((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt merge_hooksQs !!tSessionRedirectMixincBsPeZdZdZededdedZdZdZ dZ RS(cCs?|jr;|jd}tr.|jd}nt|dSdS(s7Receives a Response. Returns a redirect URI or ``None``tlocationtlatin1tutf8N(t is_redirecttheadersRtencodeRR$(tselftrespR7((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytget_redirect_targetbs    cCst|}t|}|j|jkr.tS|jdkrn|jdkrn|jdkrn|jdkrntS|j|jk}|j|jk}tj|jddf}| r|j|kr|j|krtS|p|S(sFDecide whether Authorization header should be removed when redirectingthttpiPthttpsiN(iPN(iN( RthostnametTruetschemetportR$tFalseR R2(R=told_urltnew_urlt old_parsedt new_parsedt changed_porttchanged_schemet default_port((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytshould_strip_authxs  c ksg} |j|} x| r|j} | j|| d|_y |jWn-tttfk r~|jj dt nXt |j|j krt d|j d|n|j| jdrt|j} dt| j| f} nt| }|j} |js3t|jt| } n t| } t| | _|j| ||jtjtjfkrd}x!|D]}| jj|dqWd| _ n| j}y |d =Wnt!k rnXt"| j#||jt$| j#|j%| j&| j#|j'| |}|j(| || j)dk oVd|kpVd |k}|rlt*| n| }|r|Vq|j+|d |d |d |d|d|dt | }t"|j%| |j|j|} |VqWdS(sBReceives a Response. Returns a generator of Responses or Requests.itdecode_contentsExceeded %s redirects.R1s//s%s:%ssContent-Lengths Content-TypesTransfer-EncodingtCookietstreamttimeouttverifytcerttproxiestallow_redirectsN(sContent-Lengths Content-TypesTransfer-Encoding(,R?tcopytappendthistorytcontentRRt RuntimeErrortrawtreadRFtlent max_redirectsRtcloset startswithRturlRRDtgeturltnetlocRRtrebuild_methodt status_codeR!ttemporary_redirecttpermanent_redirectR;tpopR$tbodytKeyErrorR t_cookiesR tcookiestprepare_cookiestrebuild_proxiest rebuild_autht_body_positionRtsend(R=R>treqRQRRRSRTRUtyield_requeststadapter_kwargsthistRbtprepared_requestt parsed_rurltparsedtpurged_headerstheaderR;t rewindable((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytresolve_redirectssr                 cCs{|j}|j}d|kr@|j|jj|r@|d=n|jrUt|nd}|dk rw|j|ndS(sWhen being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. t AuthorizationN(R;RbRNtrequestt trust_envRR$t prepare_auth(R=RwR1R;Rbtnew_auth((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRps  $  c Cs5|dk r|ni}|j}|j}t|j}|j}|jd}t|d|}|jr| rt |d|} | j|| jd} | r|j || qnd|kr|d=nyt ||\} } Wnt k rd\} } nX| r1| r1t | | |d>> import requests >>> s = requests.Session() >>> s.get('http://httpbin.org/get') Or as a context manager:: >>> with requests.Session() as s: >>> s.get('http://httpbin.org/get') R;RmtauthRUthookstparamsRSRTtprefetchtadaptersRQRR_cCst|_d|_i|_t|_i|_t|_ t |_ d|_ t |_t |_ti|_t|_|jdt|jdtdS(Nshttps://shttp://(RR;R$RRURRRRFRQRCRSRTRR_RRRmRRtmountR(R=((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt__init__js           cCs|S(N((R=((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt __enter__scGs|jdS(N(R`(R=targs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt__exit__scCs*|jp i}t|tjs0t|}nttt|j|}|j}|jr| r|j rt |j }nt }|j d|j jd|j d|jd|jd|jdt|j|jdtdt|j|jd t||jd |d t|j|j |S( sConstructs a :class:`PreparedRequest ` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request ` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. :rtype: requests.PreparedRequest RRbtfilestdatatjsonR;R*RRRmR(RmR%Rt CookieJarRR R RRRRbR tprepareRtupperRRRR0R;RRR5R(R=RRmtmerged_cookiesRtp((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytprepare_requests*        cCstd|jd|d|d|d|p-id|d|p?id|d |d | }|j|}| poi} |j|j| | ||}i| d 6| d 6}|j||j||}|S( sConstructs a :class:`Request `, prepares it and sends it. Returns :class:`Response ` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response RRbR;RRRRRRmRRRRV(R RRtmerge_environment_settingsRbR&Rr(R=RRbRRR;RmRRRRRVRURRQRSRTRRstpreptsettingst send_kwargsR>((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRs*)       cKs#|jdt|jd||S(sSends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RVR(RRCR(R=Rbtkwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyR2scKs#|jdt|jd||S(sSends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RVtOPTIONS(RRCR(R=RbR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytoptions!scKs#|jdt|jd||S(sSends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RVR(RRFR(R=RbR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pythead,scKs|jd|d|d||S(sSends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response RRR(R(R=RbRRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytpost7s cKs|jd|d||S(sYSends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response tPUTR(R(R=RbRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytputCs cKs|jd|d||S(s[Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response tPATCHR(R(R=RbRR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytpatchNs cKs|jd||S(sSends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response tDELETE(R(R=RbR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytdeleteYsc Ks|jd|j|jd|j|jd|j|jd|jt|trjtdn|jdt }|j d}|j }|j d|j }t}|j||}t|} td| |_td |||}|jr1x-|jD]} t|j| j| jq Wnt|j||j|j|||} |r{g| D]} | ^qing} | r| jd || j}| |_n|sy(t|j||d t ||_Wqtk rqXn|s|jn|S( sISend a given PreparedRequest. :rtype: requests.Response RQRSRTRUs#You can only send PreparedRequests.RVRbtsecondsR1iRt(RRQRSRTRUR%R t ValueErrorRiRCR2Rt get_adapterRbtpreferred_clockRrRtelapsedRRYR RmRR\R}tinserttnextt_nextt StopIterationRZ( R=RRRVRQRtadaptertstarttrRR>tgenRY((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRrcsB     %  (  c Cs|jr|dk r$|jdnd}t|d|}x*|jD]\}} |j|| qIW|tks|dkrtjjdptjjd}qnt ||j }t ||j }t ||j }t ||j }i|d6|d6|d6|d6S( s^ Check the environment and merge it with some settings. :rtype: dict RtREQUESTS_CA_BUNDLEtCURL_CA_BUNDLERSRURQRTN(RR$R2RR'RRCtostenvironR0RURQRSRT( R=RbRURQRSRTRt env_proxiesR,R-((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRs !cCsMx6|jjD]%\}}|jj|r|SqWtd|dS(s~ Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter s*No connection adapters were found for '%s'N(RR'tlowerRaR(R=RbtprefixR((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRscCs(x!|jjD]}|jqWdS(s+Closes all adapters and as such the sessionN(RtvaluesR`(R=R-((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyR`scCso||j|s(tdictt __attrs__(R=tstate((R=sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt __getstate__scCs1x*|jD]\}}t|||q WdS(N(R'tsetattr(R=RRtvalue((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt __setstate__sN(RRt__doc__RRRRRR$RCRR2RRRRRRRrRRR`RRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyRQs2  7   ) D  I    cCstS(sQ Returns a :class:`Session` for context-management. :rtype: Session (R(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pytsessions(?RRtplatformttimet collectionsRtdatetimeRRRtcompatRRRRRRmRR R R tmodelsR R RRRRt_internal_utilsRtutilsRRt exceptionsRRRRt structuresRRRRRRRRRR t status_codesR!R"tsystemt perf_counterRtAttributeErrortclockR0R5tobjectR6RR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/sessions.pyt s<   (""4     PKZyڣ+site-packages/pip/_vendor/requests/hooks.pynu[# -*- coding: utf-8 -*- """ requests.hooks ~~~~~~~~~~~~~~ This module provides the capabilities for the Requests hooks system. Available hooks: ``response``: The response generated from a Request. """ HOOKS = ['response'] def default_hooks(): return dict((event, []) for event in HOOKS) # TODO: response is the only one def dispatch_hook(key, hooks, hook_data, **kwargs): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or dict() hooks = hooks.get(key) if hooks: if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: _hook_data = hook(hook_data, **kwargs) if _hook_data is not None: hook_data = _hook_data return hook_data PKZN1site-packages/pip/_vendor/requests/exceptions.pyonu[ abc@sdZddlmZdefdYZdefdYZdefdYZd efd YZd efd YZd efdYZ dee fdYZ de fdYZ defdYZ defdYZ deefdYZdeefdYZdeefdYZdeefdYZdefd YZd!eefd"YZd#eefd$YZd%efd&YZd'efd(YZd)efd*YZd+eefd,YZd-efd.YZd/S(0s` requests.exceptions ~~~~~~~~~~~~~~~~~~~ This module contains the set of Requests' exceptions. i(t HTTPErrortRequestExceptioncBseZdZdZRS(sTThere was an ambiguous exception that occurred while handling your request. cOs|jdd}||_|jdd|_|dk rg|j rgt|drg|jj|_ntt|j||dS(sBInitialize RequestException with `request` and `response` objects.tresponsetrequestN(tpoptNoneRRthasattrtsuperRt__init__(tselftargstkwargsR((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRs (t__name__t __module__t__doc__R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR sRcBseZdZRS(sAn HTTP error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRstConnectionErrorcBseZdZRS(sA Connection error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR st ProxyErrorcBseZdZRS(sA proxy error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR$stSSLErrorcBseZdZRS(sAn SSL error occurred.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR(stTimeoutcBseZdZRS(sThe request timed out. Catching this error will catch both :exc:`~requests.exceptions.ConnectTimeout` and :exc:`~requests.exceptions.ReadTimeout` errors. (R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR,stConnectTimeoutcBseZdZRS(sThe request timed out while trying to connect to the remote server. Requests that produced this error are safe to retry. (R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR5st ReadTimeoutcBseZdZRS(s@The server did not send any data in the allotted amount of time.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR<st URLRequiredcBseZdZRS(s*A valid URL is required to make a request.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR@stTooManyRedirectscBseZdZRS(sToo many redirects.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRDst MissingSchemacBseZdZRS(s/The URL schema (e.g. http or https) is missing.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRHst InvalidSchemacBseZdZRS(s"See defaults.py for valid schemas.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRLst InvalidURLcBseZdZRS(s%The URL provided was somehow invalid.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRPst InvalidHeadercBseZdZRS(s.The header value provided was somehow invalid.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRTstChunkedEncodingErrorcBseZdZRS(s?The server declared chunked encoding but sent an invalid chunk.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRXstContentDecodingErrorcBseZdZRS(s!Failed to decode response content(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR\stStreamConsumedErrorcBseZdZRS(s2The content for this response was already consumed(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR`st RetryErrorcBseZdZRS(sCustom retries logic failed(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRdstUnrewindableBodyErrorcBseZdZRS(s:Requests encountered an error when trying to rewind a body(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyRhstRequestsWarningcBseZdZRS(sBase warning for Requests.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR nstFileModeWarningcBseZdZRS(sJA file was opened in text mode, but Requests determined its binary length.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR!sstRequestsDependencyWarningcBseZdZRS(s@An imported dependency doesn't match the expected version range.(R R R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyR"xsN(Rtpip._vendor.urllib3.exceptionsRt BaseHTTPErrortIOErrorRRRRRRRRRt ValueErrorRRRRRRt TypeErrorRRRtWarningR tDeprecationWarningR!R"(((sC/usr/lib/python2.7/site-packages/pip/_vendor/requests/exceptions.pyts. PKZ:<#J#J/site-packages/pip/_vendor/requests/adapters.pycnu[ abc@s5dZddlZddlZddlmZmZddlmZddl m Z ddl m Z ddlmZddlmZdd lmZdd lmZdd lmZdd lmZdd lmZddlmZddlmZddlmZddlmZddlmZm Z ddl!m"Z"m#Z#m$Z$m%Z%m&Z&m'Z'ddl(m)Z)ddl*m+Z+ddl,m-Z-m.Z.m/Z/mZmZm0Z0m1Z1ddl2m3Z3yddl4m5Z5Wne6k rdZ5nXe7Z8dZ9dZ:dZ<de=fdYZ>de>fd YZ?dS(!s requests.adapters ~~~~~~~~~~~~~~~~~ This module contains the transport adapters that Requests uses to define and maintain connections. iN(t PoolManagertproxy_from_url(t HTTPResponse(tTimeout(tRetry(tClosedPoolError(tConnectTimeoutError(t HTTPError(t MaxRetryError(tNewConnectionError(t ProxyError(t ProtocolError(tReadTimeoutError(tSSLError(t ResponseErrori(tResponse(turlparset basestring(tDEFAULT_CA_BUNDLE_PATHtget_encoding_from_headerstprepend_scheme_if_neededtget_auth_from_urlt urldefragautht select_proxy(tCaseInsensitiveDict(textract_cookies_to_jar(tConnectionErrortConnectTimeoutt ReadTimeoutR R t RetryErrort InvalidSchema(t_basic_auth_str(tSOCKSProxyManagercOstddS(Ns'Missing dependencies for SOCKS support.(R(targstkwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR +si it BaseAdaptercBs8eZdZdZededddZdZRS(sThe Base Transport AdaptercCstt|jdS(N(tsuperR#t__init__(tself((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR%7scCs tdS(sCSends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. N(tNotImplementedError(R&trequesttstreamttimeouttverifytcerttproxies((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytsend:scCs tdS(s!Cleans up adapter specific items.N(R'(R&((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytcloseLsN( t__name__t __module__t__doc__R%tFalsetNonetTrueR.R/(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR#4s   t HTTPAdaptercBseZdZdddddgZeeeedZdZdZ ed Z d Z d Z d Z dd ZdZdZdZdZededddZRS(sThe built-in HTTP Adapter for urllib3. Provides a general-case interface for Requests sessions to contact HTTP and HTTPS urls by implementing the Transport Adapter interface. This class will usually be created by the :class:`Session ` class under the covers. :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed connections. If you need granular control over the conditions under which we retry a request, import urllib3's ``Retry`` class and pass that instead. :param pool_block: Whether the connection pool should block for connections. Usage:: >>> import requests >>> s = requests.Session() >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) t max_retriestconfigt_pool_connectionst _pool_maxsizet _pool_blockcCs|tkr$tddt|_ntj||_i|_i|_tt|j ||_ ||_ ||_ |j ||d|dS(Nitreadtblock(tDEFAULT_RETRIESRR3R7tfrom_intR8t proxy_managerR$R6R%R9R:R;tinit_poolmanager(R&tpool_connectionst pool_maxsizeR7t pool_block((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR%ns      cstfdjDS(Nc3s'|]}|t|dfVqdS(N(tgetattrR4(t.0tattr(R&(sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pys s(tdictt __attrs__(R&((R&sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt __getstate__scCsbi|_i|_x*|jD]\}}t|||qW|j|j|jd|jdS(NR=(R@R8titemstsetattrRAR9R:R;(R&tstateRGtvalue((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt __setstate__s   c KsF||_||_||_td|d|d|dt||_dS(sInitializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. t num_poolstmaxsizeR=tstrictN(R9R:R;RR5t poolmanager(R&t connectionsRQR=t pool_kwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyRAs   c Ks||jkr|j|}n|jjdrt|\}}t|d|d|d|jd|jd|j|}|j|`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager tsockstusernametpasswordRPRQR=t proxy_headers( R@tlowert startswithRR R9R:R;RYR(R&tproxyt proxy_kwargstmanagerRWRXRY((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytproxy_manager_fors*     cCs|jjdr|rd }|tk r6|}n|sEt}n| s_tjj| rwtdj |nd|_ tjj |s||_ q||_ nd|_ d |_ d |_ |rt|ts|d|_|d|_n||_d |_|jrCtjj|j rCtdj |jn|jrtjj|j rtdj |jqnd S( sAVerify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: The SSL certificate to verify. thttpssFCould not find a suitable TLS CA certificate bundle, invalid path: {0}t CERT_REQUIREDt CERT_NONEiis:Could not find the TLS certificate file, invalid path: {0}s2Could not find the TLS key file, invalid path: {0}N(RZR[R4R5RtostpathtexiststIOErrortformatt cert_reqstisdirtca_certst ca_cert_dirt isinstanceRt cert_filetkey_file(R&tconnturlR+R,tcert_loc((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt cert_verifys8                cCst}t|dd|_tt|di|_t|j|_||_|jj |_ t |j t r|j j d|_ n |j |_ t|j||||_||_|S(sBuilds a :class:`Response ` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter ` :param req: The :class:`PreparedRequest ` used to generate the response. :param resp: The urllib3 response object. :rtype: requests.Response tstatustheaderssutf-8N(RRER4t status_codeRRtRtencodingtrawtreasonRlRptbytestdecodeRtcookiesR(t connection(R&treqtresptresponse((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytbuild_responses     cCsst||}|rEt|d}|j|}|j|}n*t|}|j}|jj|}|S(sReturns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. :rtype: urllib3.ConnectionPool thttp(RRR_tconnection_from_urlRtgeturlRS(R&RpR-R\R@Rotparsed((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pytget_connection"s   cCs5|jjx!|jjD]}|jqWdS(sDisposes of any internal state. Currently, this closes the PoolManager and any active ProxyManager, which closes any pooled connections. N(RStclearR@tvalues(R&R\((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR/9s c Cst|j|}t|jj}|o3|dk}t}|rit|jj}|jd}n|j}|r| rt|j}n|S(s?Obtain the url to use when making the final request. If the message is being sent through a HTTP proxy, the full URL has to be used. Otherwise, we should only use the path portion of the URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param request: The :class:`PreparedRequest ` being sent. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. :rtype: str R`RV( RRpRtschemeR3RZR[tpath_urlR( R&R(R-R\Rtis_proxied_http_requesttusing_socks_proxyt proxy_schemeRp((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt request_urlCs  cKsdS(s"Add any headers needed by the connection. As of v2.0 this does nothing by default, but is left for overriding by users that subclass the :class:`HTTPAdapter `. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param request: The :class:`PreparedRequest ` to add headers to. :param kwargs: The keyword arguments from the call to send(). N((R&R(R"((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt add_headers`s cCs8i}t|\}}|r4t|||d`. :param proxies: The url of the proxy being used for this request. :rtype: dict sProxy-Authorization(RR(R&R\RtRWRX((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyRYns cCs}|j|j|}|j||j|||j||}|j||jdkphd|jk } t|t ry%|\} } t d| d| }Wqt k r} dj |} t | qXn't|t rnt d|d|}y| s[|j d|jd|d|jd|jd td td td td |jd| }nft|drv|j}n|jdt}y"|j|j|dtx-|jjD]\}}|j||qW|jx^|jD]S}|jtt|djd|jd|j||jdqW|jdy|jdt}Wntk r|j}nXt j!|d|d|d td t}Wn|j"nXWnt#t$j%fk r} t&| d|n{t'k r} t| j(t)r=t| j(t*s=t+| d|q=nt| j(t,rdt-| d|nt| j(t.rt/| d|nt| j(t0rt1| d|nt&| d|nt2k r} t&| d|nt.k r } t/| ndt0t3fk rl} t| t0rBt1| d|qmt| t4rft5| d|qmnX|j6||S(sSends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response sContent-LengthtconnectR<ssInvalid timeout {0}. Pass a (connect, read) timeout tuple, or a single float to set both timeouts to the same valuetmethodRptbodyRttredirecttassert_same_hosttpreload_contenttdecode_contenttretriesR*t proxy_pooltskip_accept_encodingisutf-8s s0 t bufferingtpoolR|R(N(7RRpRrRRRR4RtRlttuplet TimeoutSaucet ValueErrorRgturlopenRR3R7thasattrRt _get_conntDEFAULT_POOL_TIMEOUTt putrequestR5RKt putheadert endheadersR.thextlentencodet getresponset TypeErrorRt from_httplibR/R tsocketterrorRRRxRR RRRt _ProxyErrorR t _SSLErrorR Rt _HTTPErrorR RR(R&R(R)R*R+R,R-RoRptchunkedRR<teterrR~tlow_conntheaderRNtitr((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR.s            &       N(R0R1R2RItDEFAULT_POOLSIZER>tDEFAULT_POOLBLOCKR%RJRORAR_RrRR4RR/RRRYR3R5R.(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyR6Qs$      % 4 %    (@R2tos.pathRcRtpip._vendor.urllib3.poolmanagerRRtpip._vendor.urllib3.responseRtpip._vendor.urllib3.utilRRtpip._vendor.urllib3.util.retryRtpip._vendor.urllib3.exceptionsRRRRRR R RR R R RRtmodelsRtcompatRRtutilsRRRRRRt structuresRR{Rt exceptionsRRRRRtauthRt!pip._vendor.urllib3.contrib.socksR t ImportErrorR3RRR>R4RtobjectR#R6(((sA/usr/lib/python2.7/site-packages/pip/_vendor/requests/adapters.pyt sB  .4  PKZ&&*site-packages/pip/_vendor/requests/auth.pynu[# -*- coding: utf-8 -*- """ requests.auth ~~~~~~~~~~~~~ This module contains the authentication handlers for Requests. """ import os import re import time import hashlib import threading import warnings from base64 import b64encode from .compat import urlparse, str, basestring from .cookies import extract_cookies_to_jar from ._internal_utils import to_native_string from .utils import parse_dict_header CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' CONTENT_TYPE_MULTI_PART = 'multipart/form-data' def _basic_auth_str(username, password): """Returns a Basic Auth string.""" # "I want us to put a big-ol' comment on top of it that # says that this behaviour is dumb but we need to preserve # it because people are relying on it." # - Lukasa # # These are here solely to maintain backwards compatibility # for things like ints. This will be removed in 3.0.0. if not isinstance(username, basestring): warnings.warn( "Non-string usernames will no longer be supported in Requests " "3.0.0. Please convert the object you've passed in ({0!r}) to " "a string or bytes object in the near future to avoid " "problems.".format(username), category=DeprecationWarning, ) username = str(username) if not isinstance(password, basestring): warnings.warn( "Non-string passwords will no longer be supported in Requests " "3.0.0. Please convert the object you've passed in ({0!r}) to " "a string or bytes object in the near future to avoid " "problems.".format(password), category=DeprecationWarning, ) password = str(password) # -- End Removal -- if isinstance(username, str): username = username.encode('latin1') if isinstance(password, str): password = password.encode('latin1') authstr = 'Basic ' + to_native_string( b64encode(b':'.join((username, password))).strip() ) return authstr class AuthBase(object): """Base class that all auth implementations derive from""" def __call__(self, r): raise NotImplementedError('Auth hooks must be callable.') class HTTPBasicAuth(AuthBase): """Attaches HTTP Basic Authentication to the given Request object.""" def __init__(self, username, password): self.username = username self.password = password def __eq__(self, other): return all([ self.username == getattr(other, 'username', None), self.password == getattr(other, 'password', None) ]) def __ne__(self, other): return not self == other def __call__(self, r): r.headers['Authorization'] = _basic_auth_str(self.username, self.password) return r class HTTPProxyAuth(HTTPBasicAuth): """Attaches HTTP Proxy Authentication to a given Request object.""" def __call__(self, r): r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) return r class HTTPDigestAuth(AuthBase): """Attaches HTTP Digest Authentication to the given Request object.""" def __init__(self, username, password): self.username = username self.password = password # Keep state in per-thread local storage self._thread_local = threading.local() def init_per_thread_state(self): # Ensure state is initialized just once per-thread if not hasattr(self._thread_local, 'init'): self._thread_local.init = True self._thread_local.last_nonce = '' self._thread_local.nonce_count = 0 self._thread_local.chal = {} self._thread_local.pos = None self._thread_local.num_401_calls = None def build_digest_header(self, method, url): """ :rtype: str """ realm = self._thread_local.chal['realm'] nonce = self._thread_local.chal['nonce'] qop = self._thread_local.chal.get('qop') algorithm = self._thread_local.chal.get('algorithm') opaque = self._thread_local.chal.get('opaque') hash_utf8 = None if algorithm is None: _algorithm = 'MD5' else: _algorithm = algorithm.upper() # lambdas assume digest modules are imported at the top level if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': def md5_utf8(x): if isinstance(x, str): x = x.encode('utf-8') return hashlib.md5(x).hexdigest() hash_utf8 = md5_utf8 elif _algorithm == 'SHA': def sha_utf8(x): if isinstance(x, str): x = x.encode('utf-8') return hashlib.sha1(x).hexdigest() hash_utf8 = sha_utf8 KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) if hash_utf8 is None: return None # XXX not implemented yet entdig = None p_parsed = urlparse(url) #: path is request-uri defined in RFC 2616 which should not be empty path = p_parsed.path or "/" if p_parsed.query: path += '?' + p_parsed.query A1 = '%s:%s:%s' % (self.username, realm, self.password) A2 = '%s:%s' % (method, path) HA1 = hash_utf8(A1) HA2 = hash_utf8(A2) if nonce == self._thread_local.last_nonce: self._thread_local.nonce_count += 1 else: self._thread_local.nonce_count = 1 ncvalue = '%08x' % self._thread_local.nonce_count s = str(self._thread_local.nonce_count).encode('utf-8') s += nonce.encode('utf-8') s += time.ctime().encode('utf-8') s += os.urandom(8) cnonce = (hashlib.sha1(s).hexdigest()[:16]) if _algorithm == 'MD5-SESS': HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) if not qop: respdig = KD(HA1, "%s:%s" % (nonce, HA2)) elif qop == 'auth' or 'auth' in qop.split(','): noncebit = "%s:%s:%s:%s:%s" % ( nonce, ncvalue, cnonce, 'auth', HA2 ) respdig = KD(HA1, noncebit) else: # XXX handle auth-int. return None self._thread_local.last_nonce = nonce # XXX should the partial digests be encoded too? base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ 'response="%s"' % (self.username, realm, nonce, path, respdig) if opaque: base += ', opaque="%s"' % opaque if algorithm: base += ', algorithm="%s"' % algorithm if entdig: base += ', digest="%s"' % entdig if qop: base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) return 'Digest %s' % (base) def handle_redirect(self, r, **kwargs): """Reset num_401_calls counter on redirects.""" if r.is_redirect: self._thread_local.num_401_calls = 1 def handle_401(self, r, **kwargs): """ Takes the given response and tries digest-auth, if needed. :rtype: requests.Response """ # If response is not 4xx, do not auth # See https://github.com/requests/requests/issues/3772 if not 400 <= r.status_code < 500: self._thread_local.num_401_calls = 1 return r if self._thread_local.pos is not None: # Rewind the file position indicator of the body to where # it was to resend the request. r.request.body.seek(self._thread_local.pos) s_auth = r.headers.get('www-authenticate', '') if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: self._thread_local.num_401_calls += 1 pat = re.compile(r'digest ', flags=re.IGNORECASE) self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) # Consume content and release the original connection # to allow our new request to reuse the same one. r.content r.close() prep = r.request.copy() extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) prep.headers['Authorization'] = self.build_digest_header( prep.method, prep.url) _r = r.connection.send(prep, **kwargs) _r.history.append(r) _r.request = prep return _r self._thread_local.num_401_calls = 1 return r def __call__(self, r): # Initialize per-thread state, if needed self.init_per_thread_state() # If we have a saved nonce, skip the 401 if self._thread_local.last_nonce: r.headers['Authorization'] = self.build_digest_header(r.method, r.url) try: self._thread_local.pos = r.body.tell() except AttributeError: # In the case of HTTPDigestAuth being reused and the body of # the previous request was a file-like object, pos has the # file position of the previous body. Ensure it's set to # None. self._thread_local.pos = None r.register_hook('response', self.handle_401) r.register_hook('response', self.handle_redirect) self._thread_local.num_401_calls = 1 return r def __eq__(self, other): return all([ self.username == getattr(other, 'username', None), self.password == getattr(other, 'password', None) ]) def __ne__(self, other): return not self == other PKZ+site-packages/pip/_vendor/requests/certs.pynu[#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests.certs ~~~~~~~~~~~~~~ This module returns the preferred default CA certificate bundle. There is only one — the one from the certifi package. If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. """ from pip._vendor.certifi import where if __name__ == '__main__': print(where()) PKZjFKK2site-packages/pip/_vendor/requests/__version__.pycnu[ abc@s@dZdZdZdZdZdZdZdZdZd Z d S( trequestssPython HTTP for Humans.shttp://python-requests.orgs2.18.4is Kenneth Reitzsme@kennethreitz.orgs Apache 2.0sCopyright 2017 Kenneth Reitzu ✨ 🍰 ✨N( t __title__t__description__t__url__t __version__t __build__t __author__t__author_email__t __license__t __copyright__t__cake__(((sD/usr/lib/python2.7/site-packages/pip/_vendor/requests/__version__.pytsPKZ#PRRR.site-packages/pip/_vendor/requests/adapters.pynu[# -*- coding: utf-8 -*- """ requests.adapters ~~~~~~~~~~~~~~~~~ This module contains the transport adapters that Requests uses to define and maintain connections. """ import os.path import socket from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url from pip._vendor.urllib3.response import HTTPResponse from pip._vendor.urllib3.util import Timeout as TimeoutSauce from pip._vendor.urllib3.util.retry import Retry from pip._vendor.urllib3.exceptions import ClosedPoolError from pip._vendor.urllib3.exceptions import ConnectTimeoutError from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError from pip._vendor.urllib3.exceptions import MaxRetryError from pip._vendor.urllib3.exceptions import NewConnectionError from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError from pip._vendor.urllib3.exceptions import ProtocolError from pip._vendor.urllib3.exceptions import ReadTimeoutError from pip._vendor.urllib3.exceptions import SSLError as _SSLError from pip._vendor.urllib3.exceptions import ResponseError from .models import Response from .compat import urlparse, basestring from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, prepend_scheme_if_needed, get_auth_from_url, urldefragauth, select_proxy) from .structures import CaseInsensitiveDict from .cookies import extract_cookies_to_jar from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, ProxyError, RetryError, InvalidSchema) from .auth import _basic_auth_str try: from pip._vendor.urllib3.contrib.socks import SOCKSProxyManager except ImportError: def SOCKSProxyManager(*args, **kwargs): raise InvalidSchema("Missing dependencies for SOCKS support.") DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 DEFAULT_POOL_TIMEOUT = None class BaseAdapter(object): """The Base Transport Adapter""" def __init__(self): super(BaseAdapter, self).__init__() def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. """ raise NotImplementedError def close(self): """Cleans up adapter specific items.""" raise NotImplementedError class HTTPAdapter(BaseAdapter): """The built-in HTTP Adapter for urllib3. Provides a general-case interface for Requests sessions to contact HTTP and HTTPS urls by implementing the Transport Adapter interface. This class will usually be created by the :class:`Session ` class under the covers. :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed connections. If you need granular control over the conditions under which we retry a request, import urllib3's ``Retry`` class and pass that instead. :param pool_block: Whether the connection pool should block for connections. Usage:: >>> import requests >>> s = requests.Session() >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) """ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', '_pool_block'] def __init__(self, pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, pool_block=DEFAULT_POOLBLOCK): if max_retries == DEFAULT_RETRIES: self.max_retries = Retry(0, read=False) else: self.max_retries = Retry.from_int(max_retries) self.config = {} self.proxy_manager = {} super(HTTPAdapter, self).__init__() self._pool_connections = pool_connections self._pool_maxsize = pool_maxsize self._pool_block = pool_block self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) def __getstate__(self): return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) def __setstate__(self, state): # Can't handle by adding 'proxy_manager' to self.__attrs__ because # self.poolmanager uses a lambda function, which isn't pickleable. self.proxy_manager = {} self.config = {} for attr, value in state.items(): setattr(self, attr, value) self.init_poolmanager(self._pool_connections, self._pool_maxsize, block=self._pool_block) def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): """Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """ # save these values for pickling self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs) def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager """ if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] elif proxy.lower().startswith('socks'): username, password = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager( proxy, username=username, password=password, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs ) else: proxy_headers = self.proxy_headers(proxy) manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return manager def cert_verify(self, conn, url, verify, cert): """Verify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: The SSL certificate to verify. """ if url.lower().startswith('https') and verify: cert_loc = None # Allow self-specified cert location. if verify is not True: cert_loc = verify if not cert_loc: cert_loc = DEFAULT_CA_BUNDLE_PATH if not cert_loc or not os.path.exists(cert_loc): raise IOError("Could not find a suitable TLS CA certificate bundle, " "invalid path: {0}".format(cert_loc)) conn.cert_reqs = 'CERT_REQUIRED' if not os.path.isdir(cert_loc): conn.ca_certs = cert_loc else: conn.ca_cert_dir = cert_loc else: conn.cert_reqs = 'CERT_NONE' conn.ca_certs = None conn.ca_cert_dir = None if cert: if not isinstance(cert, basestring): conn.cert_file = cert[0] conn.key_file = cert[1] else: conn.cert_file = cert conn.key_file = None if conn.cert_file and not os.path.exists(conn.cert_file): raise IOError("Could not find the TLS certificate file, " "invalid path: {0}".format(conn.cert_file)) if conn.key_file and not os.path.exists(conn.key_file): raise IOError("Could not find the TLS key file, " "invalid path: {0}".format(conn.key_file)) def build_response(self, req, resp): """Builds a :class:`Response ` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter ` :param req: The :class:`PreparedRequest ` used to generate the response. :param resp: The urllib3 response object. :rtype: requests.Response """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, 'status', None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, resp) # Give the Response some context. response.request = req response.connection = self return response def get_connection(self, url, proxies=None): """Returns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. :rtype: urllib3.ConnectionPool """ proxy = select_proxy(url, proxies) if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_url(url) else: # Only scheme should be lower case parsed = urlparse(url) url = parsed.geturl() conn = self.poolmanager.connection_from_url(url) return conn def close(self): """Disposes of any internal state. Currently, this closes the PoolManager and any active ProxyManager, which closes any pooled connections. """ self.poolmanager.clear() for proxy in self.proxy_manager.values(): proxy.clear() def request_url(self, request, proxies): """Obtain the url to use when making the final request. If the message is being sent through a HTTP proxy, the full URL has to be used. Otherwise, we should only use the path portion of the URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param request: The :class:`PreparedRequest ` being sent. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. :rtype: str """ proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme is_proxied_http_request = (proxy and scheme != 'https') using_socks_proxy = False if proxy: proxy_scheme = urlparse(proxy).scheme.lower() using_socks_proxy = proxy_scheme.startswith('socks') url = request.path_url if is_proxied_http_request and not using_socks_proxy: url = urldefragauth(request.url) return url def add_headers(self, request, **kwargs): """Add any headers needed by the connection. As of v2.0 this does nothing by default, but is left for overriding by users that subclass the :class:`HTTPAdapter `. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param request: The :class:`PreparedRequest ` to add headers to. :param kwargs: The keyword arguments from the call to send(). """ pass def proxy_headers(self, proxy): """Returns a dictionary of the headers to add to any request sent through a proxy. This works with urllib3 magic to ensure that they are correctly sent to the proxy, rather than in a tunnelled request if CONNECT is being used. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter `. :param proxies: The url of the proxy being used for this request. :rtype: dict """ headers = {} username, password = get_auth_from_url(proxy) if username: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return headers def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) ` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ conn = self.get_connection(request.url, proxies) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers(request) chunked = not (request.body is None or 'Content-Length' in request.headers) if isinstance(timeout, tuple): try: connect, read = timeout timeout = TimeoutSauce(connect=connect, read=read) except ValueError as e: # this may raise a string formatting error. err = ("Invalid timeout {0}. Pass a (connect, read) " "timeout tuple, or a single float to set " "both timeouts to the same value".format(timeout)) raise ValueError(err) elif isinstance(timeout, TimeoutSauce): pass else: timeout = TimeoutSauce(connect=timeout, read=timeout) try: if not chunked: resp = conn.urlopen( method=request.method, url=url, body=request.body, headers=request.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.max_retries, timeout=timeout ) # Send the request. else: if hasattr(conn, 'proxy_pool'): conn = conn.proxy_pool low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: low_conn.putrequest(request.method, url, skip_accept_encoding=True) for header, value in request.headers.items(): low_conn.putheader(header, value) low_conn.endheaders() for i in request.body: low_conn.send(hex(len(i))[2:].encode('utf-8')) low_conn.send(b'\r\n') low_conn.send(i) low_conn.send(b'\r\n') low_conn.send(b'0\r\n\r\n') # Receive the response from the server try: # For Python 2.7+ versions, use buffering of HTTP # responses r = low_conn.getresponse(buffering=True) except TypeError: # For compatibility with Python 2.6 versions and back r = low_conn.getresponse() resp = HTTPResponse.from_httplib( r, pool=conn, connection=low_conn, preload_content=False, decode_content=False ) except: # If we hit any problems here, clean up the connection. # Then, reraise so that we can handle the actual exception. low_conn.close() raise except (ProtocolError, socket.error) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): # TODO: Remove this in 3.0.0: see #2811 if not isinstance(e.reason, NewConnectionError): raise ConnectTimeout(e, request=request) if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) if isinstance(e.reason, _ProxyError): raise ProxyError(e, request=request) if isinstance(e.reason, _SSLError): # This branch is for urllib3 v1.22 and later. raise SSLError(e, request=request) raise ConnectionError(e, request=request) except ClosedPoolError as e: raise ConnectionError(e, request=request) except _ProxyError as e: raise ProxyError(e) except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): # This branch is for urllib3 versions earlier than v1.22 raise SSLError(e, request=request) elif isinstance(e, ReadTimeoutError): raise ReadTimeout(e, request=request) else: raise return self.build_response(request, resp) PKZ)jj,site-packages/pip/_vendor/requests/certs.pycnu[ abc@s1dZddlmZedkr-eGHndS(sF requests.certs ~~~~~~~~~~~~~~ This module returns the preferred default CA certificate bundle. There is only one — the one from the certifi package. If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. i(twheret__main__N(t__doc__tpip._vendor.certifiRt__name__(((s>/usr/lib/python2.7/site-packages/pip/_vendor/requests/certs.pyts PKZfƴ^/l/l+site-packages/pip/_vendor/requests/utils.pynu[# -*- coding: utf-8 -*- """ requests.utils ~~~~~~~~~~~~~~ This module provides utility functions that are used within Requests that are also useful for external consumption. """ import cgi import codecs import collections import contextlib import io import os import platform import re import socket import struct import warnings from .__version__ import __version__ from . import certs # to_native_string is unused here, but imported here for backwards compatibility from ._internal_utils import to_native_string from .compat import parse_http_list as _parse_list_header from .compat import ( quote, urlparse, bytes, str, OrderedDict, unquote, getproxies, proxy_bypass, urlunparse, basestring, integer_types, is_py3, proxy_bypass_environment, getproxies_environment) from .cookies import cookiejar_from_dict from .structures import CaseInsensitiveDict from .exceptions import ( InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) NETRC_FILES = ('.netrc', '_netrc') DEFAULT_CA_BUNDLE_PATH = certs.where() DEFAULT_PORTS = {'http': 80, 'https': 443} if platform.system() == 'Windows': # provide a proxy_bypass version on Windows without DNS lookups def proxy_bypass_registry(host): if is_py3: import winreg else: import _winreg as winreg try: internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') proxyEnable = winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0] proxyOverride = winreg.QueryValueEx(internetSettings, 'ProxyOverride')[0] except OSError: return False if not proxyEnable or not proxyOverride: return False # make a check value list from the registry entry: replace the # '' string by the localhost entry and the corresponding # canonical entry. proxyOverride = proxyOverride.split(';') # now check if we match one of the registry values. for test in proxyOverride: if test == '': if '.' not in host: return True test = test.replace(".", r"\.") # mask dots test = test.replace("*", r".*") # change glob sequence test = test.replace("?", r".") # change glob char if re.match(test, host, re.I): return True return False def proxy_bypass(host): # noqa """Return True, if the host should be bypassed. Checks proxy settings gathered from the environment, if specified, or the registry. """ if getproxies_environment(): return proxy_bypass_environment(host) else: return proxy_bypass_registry(host) def dict_to_sequence(d): """Returns an internal sequence dictionary update.""" if hasattr(d, 'items'): d = d.items() return d def super_len(o): total_length = None current_position = 0 if hasattr(o, '__len__'): total_length = len(o) elif hasattr(o, 'len'): total_length = o.len elif hasattr(o, 'fileno'): try: fileno = o.fileno() except io.UnsupportedOperation: pass else: total_length = os.fstat(fileno).st_size # Having used fstat to determine the file length, we need to # confirm that this file was opened up in binary mode. if 'b' not in o.mode: warnings.warn(( "Requests has determined the content-length for this " "request using the binary size of the file: however, the " "file has been opened in text mode (i.e. without the 'b' " "flag in the mode). This may lead to an incorrect " "content-length. In Requests 3.0, support will be removed " "for files in text mode."), FileModeWarning ) if hasattr(o, 'tell'): try: current_position = o.tell() except (OSError, IOError): # This can happen in some weird situations, such as when the file # is actually a special file descriptor like stdin. In this # instance, we don't know what the length is, so set it to zero and # let requests chunk it instead. if total_length is not None: current_position = total_length else: if hasattr(o, 'seek') and total_length is None: # StringIO and BytesIO have seek but no useable fileno try: # seek to end of file o.seek(0, 2) total_length = o.tell() # seek back to current position to support # partially read file-like objects o.seek(current_position or 0) except (OSError, IOError): total_length = 0 if total_length is None: total_length = 0 return max(0, total_length - current_position) def get_netrc_auth(url, raise_errors=False): """Returns the Requests tuple auth for a given url from netrc.""" try: from netrc import netrc, NetrcParseError netrc_path = None for f in NETRC_FILES: try: loc = os.path.expanduser('~/{0}'.format(f)) except KeyError: # os.path.expanduser can fail when $HOME is undefined and # getpwuid fails. See http://bugs.python.org/issue20164 & # https://github.com/requests/requests/issues/1846 return if os.path.exists(loc): netrc_path = loc break # Abort early if there isn't one. if netrc_path is None: return ri = urlparse(url) # Strip port numbers from netloc. This weird `if...encode`` dance is # used for Python 3.2, which doesn't support unicode literals. splitstr = b':' if isinstance(url, str): splitstr = splitstr.decode('ascii') host = ri.netloc.split(splitstr)[0] try: _netrc = netrc(netrc_path).authenticators(host) if _netrc: # Return with login / password login_i = (0 if _netrc[0] else 1) return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): # If there was a parsing error or a permissions issue reading the file, # we'll just skip netrc auth unless explicitly asked to raise errors. if raise_errors: raise # AppEngine hackiness. except (ImportError, AttributeError): pass def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, 'name', None) if (name and isinstance(name, basestring) and name[0] != '<' and name[-1] != '>'): return os.path.basename(name) def from_key_val_list(value): """Take an object and test to see if it can be represented as a dictionary. Unless it can not be represented as such, return an OrderedDict, e.g., :: >>> from_key_val_list([('key', 'val')]) OrderedDict([('key', 'val')]) >>> from_key_val_list('string') ValueError: need more than 1 value to unpack >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) :rtype: OrderedDict """ if value is None: return None if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') return OrderedDict(value) def to_key_val_list(value): """Take an object and test to see if it can be represented as a dictionary. If it can be, return a list of tuples, e.g., :: >>> to_key_val_list([('key', 'val')]) [('key', 'val')] >>> to_key_val_list({'key': 'val'}) [('key', 'val')] >>> to_key_val_list('string') ValueError: cannot encode objects that are not 2-tuples. :rtype: list """ if value is None: return None if isinstance(value, (str, bytes, bool, int)): raise ValueError('cannot encode objects that are not 2-tuples') if isinstance(value, collections.Mapping): value = value.items() return list(value) # From mitsuhiko/werkzeug (used with permission). def parse_list_header(value): """Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. It basically works like :func:`parse_set_header` just that items may appear multiple times and case sensitivity is preserved. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] To create a header from the :class:`list` again, use the :func:`dump_header` function. :param value: a string with a list header. :return: :class:`list` :rtype: list """ result = [] for item in _parse_list_header(value): if item[:1] == item[-1:] == '"': item = unquote_header_value(item[1:-1]) result.append(item) return result # From mitsuhiko/werkzeug (used with permission). def parse_dict_header(value): """Parse lists of key, value pairs as described by RFC 2068 Section 2 and convert them into a python dict: >>> d = parse_dict_header('foo="is a fish", bar="as well"') >>> type(d) is dict True >>> sorted(d.items()) [('bar', 'as well'), ('foo', 'is a fish')] If there is no value for a key it will be `None`: >>> parse_dict_header('key_without_value') {'key_without_value': None} To create a header from the :class:`dict` again, use the :func:`dump_header` function. :param value: a string with a dict header. :return: :class:`dict` :rtype: dict """ result = {} for item in _parse_list_header(value): if '=' not in item: result[item] = None continue name, value = item.split('=', 1) if value[:1] == value[-1:] == '"': value = unquote_header_value(value[1:-1]) result[name] = value return result # From mitsuhiko/werkzeug (used with permission). def unquote_header_value(value, is_filename=False): r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. :rtype: str """ if value and value[0] == value[-1] == '"': # this is not the real unquoting, but fixing this so that the # RFC is met will result in bugs with internet explorer and # probably some other browsers as well. IE for example is # uploading files with "C:\foo\bar.txt" as filename value = value[1:-1] # if this is a filename and the starting characters look like # a UNC path, then just return the value without quotes. Using the # replace sequence below on a UNC path has the effect of turning # the leading double slash into a single slash and then # _fix_ie_filename() doesn't work correctly. See #458. if not is_filename or value[:2] != '\\\\': return value.replace('\\\\', '\\').replace('\\"', '"') return value def dict_from_cookiejar(cj): """Returns a key/value dictionary from a CookieJar. :param cj: CookieJar object to extract cookies from. :rtype: dict """ cookie_dict = {} for cookie in cj: cookie_dict[cookie.name] = cookie.value return cookie_dict def add_dict_to_cookiejar(cj, cookie_dict): """Returns a CookieJar from a key/value dictionary. :param cj: CookieJar to insert cookies into. :param cookie_dict: Dict of key/values to insert into CookieJar. :rtype: CookieJar """ return cookiejar_from_dict(cookie_dict, cj) def get_encodings_from_content(content): """Returns encodings from given content string. :param content: bytestring to extract encodings from. """ warnings.warn(( 'In requests 3.0, get_encodings_from_content will be removed. For ' 'more information, please see the discussion on issue #2266. (This' ' warning should only appear once.)'), DeprecationWarning) charset_re = re.compile(r']', flags=re.I) pragma_re = re.compile(r']', flags=re.I) xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') return (charset_re.findall(content) + pragma_re.findall(content) + xml_re.findall(content)) def get_encoding_from_headers(headers): """Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. :rtype: str """ content_type = headers.get('content-type') if not content_type: return None content_type, params = cgi.parse_header(content_type) if 'charset' in params: return params['charset'].strip("'\"") if 'text' in content_type: return 'ISO-8859-1' def stream_decode_response_unicode(iterator, r): """Stream decodes a iterator.""" if r.encoding is None: for item in iterator: yield item return decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') for chunk in iterator: rv = decoder.decode(chunk) if rv: yield rv rv = decoder.decode(b'', final=True) if rv: yield rv def iter_slices(string, slice_length): """Iterate over slices of a string.""" pos = 0 if slice_length is None or slice_length <= 0: slice_length = len(string) while pos < len(string): yield string[pos:pos + slice_length] pos += slice_length def get_unicode_from_response(r): """Returns the requested content back in unicode. :param r: Response object to get unicode content from. Tried: 1. charset from content-type 2. fall back and replace all unicode characters :rtype: str """ warnings.warn(( 'In requests 3.0, get_unicode_from_response will be removed. For ' 'more information, please see the discussion on issue #2266. (This' ' warning should only appear once.)'), DeprecationWarning) tried_encodings = [] # Try charset from content-type encoding = get_encoding_from_headers(r.headers) if encoding: try: return str(r.content, encoding) except UnicodeError: tried_encodings.append(encoding) # Fall back: try: return str(r.content, encoding, errors='replace') except TypeError: return r.content # The unreserved URI characters (RFC 3986) UNRESERVED_SET = frozenset( "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") def unquote_unreserved(uri): """Un-escape any percent-escape sequences in a URI that are unreserved characters. This leaves all reserved, illegal and non-ASCII bytes encoded. :rtype: str """ parts = uri.split('%') for i in range(1, len(parts)): h = parts[i][0:2] if len(h) == 2 and h.isalnum(): try: c = chr(int(h, 16)) except ValueError: raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) if c in UNRESERVED_SET: parts[i] = c + parts[i][2:] else: parts[i] = '%' + parts[i] else: parts[i] = '%' + parts[i] return ''.join(parts) def requote_uri(uri): """Re-quote the given URI. This function passes the given URI through an unquote/quote cycle to ensure that it is fully and consistently quoted. :rtype: str """ safe_with_percent = "!#$%&'()*+,/:;=?@[]~" safe_without_percent = "!#$&'()*+,/:;=?@[]~" try: # Unquote only the unreserved characters # Then quote only illegal characters (do not quote reserved, # unreserved, or '%') return quote(unquote_unreserved(uri), safe=safe_with_percent) except InvalidURL: # We couldn't unquote the given URI, so let's try quoting it, but # there may be unquoted '%'s in the URI. We need to make sure they're # properly quoted so they do not cause issues elsewhere. return quote(uri, safe=safe_without_percent) def address_in_network(ip, net): """This function allows you to check if an IP belongs to a network subnet Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 :rtype: bool """ ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] netaddr, bits = net.split('/') netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask return (ipaddr & netmask) == (network & netmask) def dotted_netmask(mask): """Converts mask from /xx format to xxx.xxx.xxx.xxx Example: if mask is 24 function returns 255.255.255.0 :rtype: str """ bits = 0xffffffff ^ (1 << 32 - mask) - 1 return socket.inet_ntoa(struct.pack('>I', bits)) def is_ipv4_address(string_ip): """ :rtype: bool """ try: socket.inet_aton(string_ip) except socket.error: return False return True def is_valid_cidr(string_network): """ Very simple check of the cidr format in no_proxy variable. :rtype: bool """ if string_network.count('/') == 1: try: mask = int(string_network.split('/')[1]) except ValueError: return False if mask < 1 or mask > 32: return False try: socket.inet_aton(string_network.split('/')[0]) except socket.error: return False else: return False return True @contextlib.contextmanager def set_environ(env_name, value): """Set the environment variable 'env_name' to 'value' Save previous value, yield, and then restore the previous value stored in the environment variable 'env_name'. If 'value' is None, do nothing""" value_changed = value is not None if value_changed: old_value = os.environ.get(env_name) os.environ[env_name] = value try: yield finally: if value_changed: if old_value is None: del os.environ[env_name] else: os.environ[env_name] = old_value def should_bypass_proxies(url, no_proxy): """ Returns whether we should bypass proxies or not. :rtype: bool """ get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) # First check whether no_proxy is defined. If it is, check that the URL # we're getting isn't in the no_proxy list. no_proxy_arg = no_proxy if no_proxy is None: no_proxy = get_proxy('no_proxy') netloc = urlparse(url).netloc if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the netloc, both with and without the port. no_proxy = ( host for host in no_proxy.replace(' ', '').split(',') if host ) ip = netloc.split(':')[0] if is_ipv4_address(ip): for proxy_ip in no_proxy: if is_valid_cidr(proxy_ip): if address_in_network(ip, proxy_ip): return True elif ip == proxy_ip: # If no_proxy ip was defined in plain IP notation instead of cidr notation & # matches the IP of the index return True else: for host in no_proxy: if netloc.endswith(host) or netloc.split(':')[0].endswith(host): # The URL does match something in no_proxy, so we don't want # to apply the proxies on this URL. return True # If the system proxy settings indicate that this URL should be bypassed, # don't proxy. # The proxy_bypass function is incredibly buggy on OS X in early versions # of Python 2.6, so allow this call to fail. Only catch the specific # exceptions we've seen, though: this call failing in other ways can reveal # legitimate problems. with set_environ('no_proxy', no_proxy_arg): try: bypass = proxy_bypass(netloc) except (TypeError, socket.gaierror): bypass = False if bypass: return True return False def get_environ_proxies(url, no_proxy=None): """ Return a dict of environment proxies. :rtype: dict """ if should_bypass_proxies(url, no_proxy=no_proxy): return {} else: return getproxies() def select_proxy(url, proxies): """Select a proxy for the url, if applicable. :param url: The url being for the request :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs """ proxies = proxies or {} urlparts = urlparse(url) if urlparts.hostname is None: return proxies.get(urlparts.scheme, proxies.get('all')) proxy_keys = [ urlparts.scheme + '://' + urlparts.hostname, urlparts.scheme, 'all://' + urlparts.hostname, 'all', ] proxy = None for proxy_key in proxy_keys: if proxy_key in proxies: proxy = proxies[proxy_key] break return proxy def default_user_agent(name="python-requests"): """ Return a string representing the default user agent. :rtype: str """ return '%s/%s' % (name, __version__) def default_headers(): """ :rtype: requests.structures.CaseInsensitiveDict """ return CaseInsensitiveDict({ 'User-Agent': default_user_agent(), 'Accept-Encoding': ', '.join(('gzip', 'deflate')), 'Accept': '*/*', 'Connection': 'keep-alive', }) def parse_header_links(value): """Return a dict of parsed link headers proxies. i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" :rtype: list """ links = [] replace_chars = ' \'"' for val in re.split(', *<', value): try: url, params = val.split(';', 1) except ValueError: url, params = val, '' link = {'url': url.strip('<> \'"')} for param in params.split(';'): try: key, value = param.split('=') except ValueError: break link[key.strip(replace_chars)] = value.strip(replace_chars) links.append(link) return links # Null bytes; no need to recreate these on each call to guess_json_utf _null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 _null2 = _null * 2 _null3 = _null * 3 def guess_json_utf(data): """ :rtype: str """ # JSON always starts with two ASCII characters, so detection is as # easy as counting the nulls and from their location and count # determine the encoding. Also detect a BOM, if present. sample = data[:4] if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): return 'utf-32' # BOM included if sample[:3] == codecs.BOM_UTF8: return 'utf-8-sig' # BOM included, MS style (discouraged) if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): return 'utf-16' # BOM included nullcount = sample.count(_null) if nullcount == 0: return 'utf-8' if nullcount == 2: if sample[::2] == _null2: # 1st and 3rd are null return 'utf-16-be' if sample[1::2] == _null2: # 2nd and 4th are null return 'utf-16-le' # Did not detect 2 valid UTF-16 ascii-range characters if nullcount == 3: if sample[:3] == _null3: return 'utf-32-be' if sample[1:] == _null3: return 'utf-32-le' # Did not detect a valid UTF-32 ascii-range character return None def prepend_scheme_if_needed(url, new_scheme): """Given a URL that may or may not have a scheme, prepend the given scheme. Does not replace a present scheme with the one provided as an argument. :rtype: str """ scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) # urlparse is a finicky beast, and sometimes decides that there isn't a # netloc present. Assume that it's being over-cautious, and switch netloc # and path if urlparse decided there was no netloc. if not netloc: netloc, path = path, netloc return urlunparse((scheme, netloc, path, params, query, fragment)) def get_auth_from_url(url): """Given a url with authentication components, extract them into a tuple of username,password. :rtype: (str,str) """ parsed = urlparse(url) try: auth = (unquote(parsed.username), unquote(parsed.password)) except (AttributeError, TypeError): auth = ('', '') return auth # Moved outside of function to avoid recompile every call _CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') _CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') def check_header_validity(header): """Verifies that header value is a string which doesn't contain leading whitespace or return characters. This prevents unintended header injection. :param header: tuple, in the format (name, value). """ name, value = header if isinstance(value, bytes): pat = _CLEAN_HEADER_REGEX_BYTE else: pat = _CLEAN_HEADER_REGEX_STR try: if not pat.match(value): raise InvalidHeader("Invalid return character or leading space in header: %s" % name) except TypeError: raise InvalidHeader("Value for header {%s: %s} must be of type str or " "bytes, not %s" % (name, value, type(value))) def urldefragauth(url): """ Given a url remove the fragment and the authentication part. :rtype: str """ scheme, netloc, path, params, query, fragment = urlparse(url) # see func:`prepend_scheme_if_needed` if not netloc: netloc, path = path, netloc netloc = netloc.rsplit('@', 1)[-1] return urlunparse((scheme, netloc, path, params, query, '')) def rewind_body(prepared_request): """Move file pointer back to its recorded starting position so it can be read again on redirect. """ body_seek = getattr(prepared_request.body, 'seek', None) if body_seek is not None and isinstance(prepared_request._body_position, integer_types): try: body_seek(prepared_request._body_position) except (IOError, OSError): raise UnrewindableBodyError("An error occurred when rewinding request " "body for redirect.") else: raise UnrewindableBodyError("Unable to rewind request body for redirect.") PKZ!{ &&%site-packages/pip/_vendor/retrying.pynu[## Copyright 2013-2014 Ray Holder ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. import random from pip._vendor import six import sys import time import traceback # sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint... MAX_WAIT = 1073741823 def retry(*dargs, **dkw): """ Decorator function that instantiates the Retrying object @param *dargs: positional arguments passed to Retrying object @param **dkw: keyword arguments passed to the Retrying object """ # support both @retry and @retry() as valid syntax if len(dargs) == 1 and callable(dargs[0]): def wrap_simple(f): @six.wraps(f) def wrapped_f(*args, **kw): return Retrying().call(f, *args, **kw) return wrapped_f return wrap_simple(dargs[0]) else: def wrap(f): @six.wraps(f) def wrapped_f(*args, **kw): return Retrying(*dargs, **dkw).call(f, *args, **kw) return wrapped_f return wrap class Retrying(object): def __init__(self, stop=None, wait=None, stop_max_attempt_number=None, stop_max_delay=None, wait_fixed=None, wait_random_min=None, wait_random_max=None, wait_incrementing_start=None, wait_incrementing_increment=None, wait_exponential_multiplier=None, wait_exponential_max=None, retry_on_exception=None, retry_on_result=None, wrap_exception=False, stop_func=None, wait_func=None, wait_jitter_max=None): self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay self._wait_fixed = 1000 if wait_fixed is None else wait_fixed self._wait_random_min = 0 if wait_random_min is None else wait_random_min self._wait_random_max = 1000 if wait_random_max is None else wait_random_max self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max # TODO add chaining of stop behaviors # stop behavior stop_funcs = [] if stop_max_attempt_number is not None: stop_funcs.append(self.stop_after_attempt) if stop_max_delay is not None: stop_funcs.append(self.stop_after_delay) if stop_func is not None: self.stop = stop_func elif stop is None: self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs) else: self.stop = getattr(self, stop) # TODO add chaining of wait behaviors # wait behavior wait_funcs = [lambda *args, **kwargs: 0] if wait_fixed is not None: wait_funcs.append(self.fixed_sleep) if wait_random_min is not None or wait_random_max is not None: wait_funcs.append(self.random_sleep) if wait_incrementing_start is not None or wait_incrementing_increment is not None: wait_funcs.append(self.incrementing_sleep) if wait_exponential_multiplier is not None or wait_exponential_max is not None: wait_funcs.append(self.exponential_sleep) if wait_func is not None: self.wait = wait_func elif wait is None: self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs) else: self.wait = getattr(self, wait) # retry on exception filter if retry_on_exception is None: self._retry_on_exception = self.always_reject else: self._retry_on_exception = retry_on_exception # TODO simplify retrying by Exception types # retry on result filter if retry_on_result is None: self._retry_on_result = self.never_reject else: self._retry_on_result = retry_on_result self._wrap_exception = wrap_exception def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms): """Stop after the previous attempt >= stop_max_attempt_number.""" return previous_attempt_number >= self._stop_max_attempt_number def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms): """Stop after the time from the first attempt >= stop_max_delay.""" return delay_since_first_attempt_ms >= self._stop_max_delay def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """Don't sleep at all before retrying.""" return 0 def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """Sleep a fixed amount of time between each retry.""" return self._wait_fixed def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """Sleep a random amount of time between wait_random_min and wait_random_max""" return random.randint(self._wait_random_min, self._wait_random_max) def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """ Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment """ result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) if result < 0: result = 0 return result def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): exp = 2 ** previous_attempt_number result = self._wait_exponential_multiplier * exp if result > self._wait_exponential_max: result = self._wait_exponential_max if result < 0: result = 0 return result def never_reject(self, result): return False def always_reject(self, result): return True def should_reject(self, attempt): reject = False if attempt.has_exception: reject |= self._retry_on_exception(attempt.value[1]) else: reject |= self._retry_on_result(attempt.value) return reject def call(self, fn, *args, **kwargs): start_time = int(round(time.time() * 1000)) attempt_number = 1 while True: try: attempt = Attempt(fn(*args, **kwargs), attempt_number, False) except: tb = sys.exc_info() attempt = Attempt(tb, attempt_number, True) if not self.should_reject(attempt): return attempt.get(self._wrap_exception) delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time if self.stop(attempt_number, delay_since_first_attempt_ms): if not self._wrap_exception and attempt.has_exception: # get() on an attempt with an exception should cause it to be raised, but raise just in case raise attempt.get() else: raise RetryError(attempt) else: sleep = self.wait(attempt_number, delay_since_first_attempt_ms) if self._wait_jitter_max: jitter = random.random() * self._wait_jitter_max sleep = sleep + max(0, jitter) time.sleep(sleep / 1000.0) attempt_number += 1 class Attempt(object): """ An Attempt encapsulates a call to a target function that may end as a normal return value from the function or an Exception depending on what occurred during the execution. """ def __init__(self, value, attempt_number, has_exception): self.value = value self.attempt_number = attempt_number self.has_exception = has_exception def get(self, wrap_exception=False): """ Return the return value of this Attempt instance or raise an Exception. If wrap_exception is true, this Attempt is wrapped inside of a RetryError before being raised. """ if self.has_exception: if wrap_exception: raise RetryError(self) else: six.reraise(self.value[0], self.value[1], self.value[2]) else: return self.value def __repr__(self): if self.has_exception: return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2]))) else: return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value) class RetryError(Exception): """ A RetryError encapsulates the last Attempt instance right before giving up. """ def __init__(self, last_attempt): self.last_attempt = last_attempt def __str__(self): return "RetryError[{0}]".format(self.last_attempt) PKZ6/6/'site-packages/pip/_vendor/ipaddress.pyonu[ abc@s dZddlmZddlZddlZdZefZyeefZWne k r`nXy e Z Wne k re Z nXdddkrdZ n dZ y ejZWnek rd ZnXd Zeed rd Zn d ZddZdefdYZdZdZdefdYZdefdYZdZedZdZdZ dZ!dZ"dZ#dZ$d Z%d!Z&d"Z'd#Z(d$efd%YZ)d&e)fd'YZ*d(e)fd)YZ+d*efd+YZ,d,e,e*fd-YZ-d.e-fd/YZ.d0e,e+fd1YZ/d2efd3YZ0e0e-_1d4efd5YZ2d6e2e*fd7YZ3d8e3fd9YZ4d:e2e+fd;YZ5d<efd=YZ6e6e3_1dS(>uA fast, lightweight IPv4/IPv6 manipulation library in Python. This library is used to create/poke/manipulate IPv4 and IPv6 addresses and networks. i(tunicode_literalsNu1.0.17sicCs|S(N((tbyt((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_bytes_to_byte_valsscCs'g|D]}tjd|d^qS(Ns!Bi(tstructtunpack(Rtb((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR"scCs)d}x|D]}|d>|}q W|S(Nii((tbytvalst endianesstrestbv((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_int_from_byte_vals's cCs|dkrF|dks$|d kr6tjdntjd|S|dkr|dksj|dkr|tjd ntjd |d ?|d @StdS(Niiii u(integer out of range for 'I' format codes!Iiiu)integer out of range for 'QQ' format codes!QQi@lIl (RterrortpacktNotImplementedError(tintvaltlengthR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_to_bytes0s  u bit_lengthcCs |jS(N(t bit_length(ti((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compat_bit_length?scCs/x(tjD]}||?dkr |Sq WdS(Ni(t itertoolstcount(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRBsiccs,|}x||kr'|V||7}q WdS(N((tstarttendtstepR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt _compat_rangeHst_TotalOrderingMixincBsDeZdZdZdZdZdZdZdZRS(cCs tdS(N(R (tselftother((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__eq__WscCs$|j|}|tkrtS| S(N(RtNotImplemented(RRtequal((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__ne__Zs cCs tdS(N(R (RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__lt__`scCs3|j|}|tks"| r/|j|S|S(N(R!RR(RRtless((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__le__cs cCsI|j|}|tkrtS|j|}|tkr>tS|pG| S(N(R!RR(RRR"R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__gt__is  cCs$|j|}|tkrtS| S(N(R!R(RRR"((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__ge__rs (( t__name__t __module__t __slots__RR R!R#R$R%(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRPs     i itAddressValueErrorcBseZdZRS(u%A Value Error related to the address.(R&R't__doc__(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR)}stNetmaskValueErrorcBseZdZRS(u%A Value Error related to the netmask.(R&R'R*(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR+scCsyt|SWnttfk r'nXyt|SWnttfk rOnXt|trrtd|ntd|dS(uTake an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP address. Either IPv4 or IPv6 addresses may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Address or IPv6Address object. Raises: ValueError: if the *address* passed isn't either a v4 or a v6 address ux%r does not appear to be an IPv4 or IPv6 address. Did you pass in a bytes (str in Python 2) instead of a unicode object?u0%r does not appear to be an IPv4 or IPv6 addressN(t IPv4AddressR)R+t IPv6Addresst isinstancetbytest ValueError(taddress((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ip_addresss cCsyt||SWnttfk r*nXyt||SWnttfk rUnXt|trxtd|ntd|dS(uTake an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP network. Either IPv4 or IPv6 networks may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Network or IPv6Network object. Raises: ValueError: if the string passed isn't either a v4 or a v6 address. Or if the network has host bits set. ux%r does not appear to be an IPv4 or IPv6 network. Did you pass in a bytes (str in Python 2) instead of a unicode object?u0%r does not appear to be an IPv4 or IPv6 networkN(t IPv4NetworkR)R+t IPv6NetworkR.R/R0(R1tstrict((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ip_networks cCsdyt|SWnttfk r'nXyt|SWnttfk rOnXtd|dS(ugTake an IP string/int and return an object of the correct type. Args: address: A string or integer, the IP address. Either IPv4 or IPv6 addresses may be supplied; integers less than 2**32 will be considered to be IPv4 by default. Returns: An IPv4Interface or IPv6Interface object. Raises: ValueError: if the string passed isn't either a v4 or a v6 address. Notes: The IPv?Interface classes describe an Address on a particular Network, so they're basically a combination of both the Address and Network classes. u2%r does not appear to be an IPv4 or IPv6 interfaceN(t IPv4InterfaceR)R+t IPv6InterfaceR0(R1((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ip_interfacescCsAyt|ddSWn&tjtfk r<tdnXdS(u`Represent an address as 4 packed bytes in network (big-endian) order. Args: address: An integer representation of an IPv4 IP address. Returns: The integer address packed as 4 bytes in network (big-endian) order. Raises: ValueError: If the integer is negative or too large to be an IPv4 IP address. iubigu&Address negative or too large for IPv4N(RRR t OverflowErrorR0(R1((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytv4_int_to_packedscCsAyt|ddSWn&tjtfk r<tdnXdS(uRepresent an address as 16 packed bytes in network (big-endian) order. Args: address: An integer representation of an IPv6 IP address. Returns: The integer address packed as 16 bytes in network (big-endian) order. iubigu&Address negative or too large for IPv6N(RRR R:R0(R1((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytv6_int_to_packeds cCs>t|jd}t|dkr:td|n|S(uAHelper to split the netmask and raise AddressValueError if neededu/iuOnly one '/' permitted in %r(t _compat_strtsplittlenR)(R1taddr((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_split_optional_netmasksccslt|}t|}}x>|D]6}|j|jdkrS||fV|}n|}q#W||fVdS(uFind a sequence of sorted deduplicated IPv#Address. Args: addresses: a list of IPv#Address objects. Yields: A tuple containing the first and last IP addresses in the sequence. iN(titertnextt_ip(t addressestittfirsttlasttip((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_find_address_ranges     cCs,|dkr|St|t||d@S(uCount the number of zero bits on the right hand side. Args: number: an integer. bits: maximum number of bits to count. Returns: The number of zero bits on the right hand side of the number. ii(tminR(tnumbertbits((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_count_righthand_zero_bits0s ccsKt|tot|ts-tdn|j|jkrXtd||fn||krstdn|jdkrt}n$|jdkrt}n td|j}|j}|j}xz||krFt t ||t ||dd}||||f}|V|d|>7}|d|j krPqqWdS( uSummarize a network range given the first and last IP addresses. Example: >>> list(summarize_address_range(IPv4Address('192.0.2.0'), ... IPv4Address('192.0.2.130'))) ... #doctest: +NORMALIZE_WHITESPACE [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), IPv4Network('192.0.2.130/32')] Args: first: the first IPv4Address or IPv6Address in the range. last: the last IPv4Address or IPv6Address in the range. Returns: An iterator of the summarized IPv(4|6) network objects. Raise: TypeError: If the first and last objects are not IP addresses. If the first and last objects are not the same version. ValueError: If the last object is not greater than the first. If the version of the first address is not 4 or 6. u1first and last must be IP addresses, not networksu%%s and %s are not of the same versionu*last IP address must be greater than firstiiuunknown IP versioniN( R.t _BaseAddresst TypeErrortversionR0R3R4t_max_prefixlenRDRKRNRt _ALL_ONES(RGRHRItip_bitst first_inttlast_inttnbitstnet((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytsummarize_address_range@s0       ccst|}i}xm|r|j}|j}|j|}|dkr[||| [IPv4Network('192.0.2.0/24')] This shouldn't be called directly; it is called via collapse_addresses([]). Args: addresses: A list of IPv4Network's or IPv6Network's Returns: A list of IPv4Network's or IPv6Network's depending on what we were passed. N( tlisttpoptsupernettgettNonetappendtsortedtvaluestbroadcast_address(REtto_mergetsubnetsRXR\texistingRH((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_collapse_addresses_internalws$         cCsg}g}g}x2|D]*}t|trw|rg|dj|jkrgtd||dfn|j|q|j|jkr|r|dj|jkrtd||dfny|j|jWqCtk r|j|j qCXq|r6|dj|jkr6td||dfn|j|qWt t |}|rx3t |D]"\}}|j t||qlWnt||S(uCollapse a list of IP objects. Example: collapse_addresses([IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/25')]) -> [IPv4Network('192.0.2.0/24')] Args: addresses: An iterator of IPv4Network or IPv6Network objects. Returns: An iterator of the collapsed IPv(4|6)Network objects. Raises: TypeError: If passed a list of mixed version objects. iu%%s and %s are not of the same version(R.ROt_versionRPR_t _prefixlenRRRItAttributeErrortnetwork_addressR`tsetRJtextendRYRf(REtaddrstipstnetsRIRGRH((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytcollapse_addressess4  cCs6t|tr|jSt|tr2|jStS(u2Return a key suitable for sorting between networks and addresses. Address and Network objects are not sortable by default; they're fundamentally different so the expression IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') doesn't make any sense. There are some times however, where you may wish to have ipaddress sort these for you anyway. If you need to do this, you can use this function as the key= argument to sorted(). Args: obj: either a Network or Address object. Returns: appropriate key. (R.t _BaseNetworkt_get_networks_keyROt_get_address_keyR(tobj((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytget_mixed_type_keys   t_IPAddressBasecBseZdZd ZedZedZedZedZdZ dZ e dZ e dZ e d Ze d Ze d Zd ZRS(uThe mother class.cCs |jS(u:Return the longhand version of the IP address as a string.(t_explode_shorthand_ip_string(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytexplodedscCs t|S(u;Return the shorthand version of the IP address as a string.(R=(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt compressedscCs |jS(uIThe name of the reverse DNS pointer for the IP address, e.g.: >>> ipaddress.ip_address("127.0.0.1").reverse_pointer '1.0.0.127.in-addr.arpa' >>> ipaddress.ip_address("2001:db8::1").reverse_pointer '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' (t_reverse_pointer(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytreverse_pointers cCs#dt|f}t|dS(Nu%200s has no version specified(ttypeR (Rtmsg((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRQscCsi|dkr.d}t|||jfn||jkred}t|||j|jfndS(Niu-%d (< 0) is not permitted as an IPv%d addressu2%d (>= 2**%d) is not permitted as an IPv%d address(R)RgRSRR(RR1R}((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_check_int_addresss cCsDt|}||kr@d}t|||||jfndS(Nu~%r (len %d != %d) is not permitted as an IPv%d address. Did you pass in a bytes (str in Python 2) instead of a unicode object?(R?R)Rg(RR1t expected_lent address_lenR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_check_packed_address s    cCs|j|j|?AS(uTurn the prefix length into a bitwise netmask Args: prefixlen: An integer, the prefix length. Returns: An integer. (RS(tclst prefixlen((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_ip_int_from_prefix+s c Cst||j}|j|}||?}d|>d}||kr{|jd}t||d}d}t||n|S(uReturn prefix length from the bitwise netmask. Args: ip_int: An integer, the netmask in expanded bitwise format Returns: An integer, the prefix length. Raises: ValueError: If the input intermingles zeroes & ones iiubigu&Netmask pattern %r mixes zeroes & ones(RNRRRR0( Rtip_intttrailing_zeroesRt leading_onestall_onestbyteslentdetailsR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_prefix_from_ip_int8s      cCsd|}t|dS(Nu%r is not a valid netmask(R+(Rt netmask_strR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_report_invalid_netmaskQs cCstjj|s"|j|nyt|}Wntk rR|j|nXd|kom|jkns|j|n|S(u Return prefix length from a numeric string Args: prefixlen_str: The string to be converted Returns: An integer, the prefix length. Raises: NetmaskValueError: If the input is not a valid netmask i(t_BaseV4t_DECIMAL_DIGITSt issupersetRtintR0RR(Rt prefixlen_strR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_prefix_from_prefix_stringVs cCsy|j|}Wntk r3|j|nXy|j|SWntk rXnX||jN}y|j|SWntk r|j|nXdS(uTurn a netmask/hostmask string into a prefix length Args: ip_str: The netmask/hostmask to be converted Returns: An integer, the prefix length. Raises: NetmaskValueError: If the input is not a valid netmask/hostmask N(t_ip_int_from_stringR)RRR0RS(Rtip_strR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_prefix_from_ip_stringos    cCs|jt|ffS(N(t __class__R=(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt __reduce__s((R&R'R*R(tpropertyRxRyR{RQR~Rt classmethodRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRvs   "ROcBsneZdZd ZdZdZdZdZdZdZ dZ dZ d Z d Z RS( uA generic IP object. This IP class contains the version independent methods which are used by single IP addresses. cCs|jS(N(RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__int__scCs?y&|j|jko$|j|jkSWntk r:tSXdS(N(RDRgRiR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs  cCst|tstSt|ts;td||fn|j|jkrftd||fn|j|jkr|j|jkStS(Nu"%s and %s are not of the same typeu%%s and %s are not of the same version(R.RvRRORPRgRDtFalse(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR!scCs*t|tstS|jt||S(N(R.t_compat_int_typesRRR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__add__scCs*t|tstS|jt||S(N(R.RRRR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__sub__scCsd|jjt|fS(Nu%s(%r)(RR&R=(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__repr__scCst|j|jS(N(R=t_string_from_ip_intRD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__str__scCsttt|jS(N(thashthexRRD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__hash__scCs |j|fS(N(Rg(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRsscCs|j|jffS(N(RRD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs((R&R'R*R(RRR!RRRRRRsR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyROs         RqcBseZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z ed Zed ZedZedZedZedZedZedZdZdZdZdd#dZdd#dZedZdZdZedZ edZ!edZ"ed Z#ed!Z$ed"Z%RS($u~A generic IP network object. This IP class contains the version independent methods which are used by networks. cCs i|_dS(N(t_cache(RR1((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__init__scCsd|jjt|fS(Nu%s(%r)(RR&R=(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|jfS(Nu%s/%d(RjR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRsccsNt|j}t|j}x)t|d|D]}|j|Vq2WdS(uGenerate Iterator over usable hosts in a network. This is like __iter__ except it doesn't return the network or broadcast addresses. iN(RRjRbRt_address_class(Rtnetworkt broadcasttx((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pythostssccsNt|j}t|j}x)t||dD]}|j|Vq2WdS(Ni(RRjRbRR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt__iter__scCst|j}t|j}|dkrZ|||krItdn|j||S|d7}|||krtdn|j||SdS(Niuaddress out of rangei(RRjRbt IndexErrorR(RtnRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt __getitem__s  cCst|tstSt|ts;td||fn|j|jkrftd||fn|j|jkr|j|jkS|j|jkr|j|jkStS(Nu"%s and %s are not of the same typeu%%s and %s are not of the same version( R.RvRRqRPRgRjtnetmaskR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR!scCs]yD|j|jkoB|j|jkoBt|jt|jkSWntk rXtSXdS(N(RgRjRRRiR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs   cCs tt|jt|jAS(N(RRRjR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs`|j|jkrtSt|tr)tSt|jt|jkoYt|jkSSdS(N(RgRR.RqRRjRDRb(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt __contains__s  cCs:|j|kp9|j|kp9|j|kp9|j|kS(u*Tell if self is partly contained in other.(RjRb(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytoverlaps)scCsW|jjd}|dkrS|jt|jt|jB}||jd other eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25') IPv6Network('2001:db8::2000/124') > IPv6Network('2001:db8::1000/124') Raises: TypeError if the IP versions are different. u"%s and %s are not of the same typeiii(RgRPRjR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytcompare_networkss!cCs|j|j|jfS(uNetwork-only key function. Returns an object that identifies this address' network and netmask. This function is a suitable "key" argument for sorted() and list.sort(). (RgRjR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRrsic cs4|j|jkr|VdS|dk rp||jkrEtdn|dkr`tdn||j}n|dkrtdn|j|}||jkrtd||fnt|j}t|jd}t|jd|?}x4t|||D] }|j ||f}|Vq WdS(uThe subnets which join to make the current subnet. In the case that self contains only one IP (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 for IPv6), yield an iterator with just ourself. Args: prefixlen_diff: An integer, the amount the prefix length should be increased by. This should not be set if new_prefix is also set. new_prefix: The desired new prefix length. This must be a larger number (smaller prefix) than the existing prefix. This should not be set if prefixlen_diff is also set. Returns: An iterator of IPv(4|6) objects. Raises: ValueError: The prefixlen_diff is too small or too large. OR prefixlen_diff and new_prefix are both set or new_prefix is a smaller number than the current prefix (smaller number means a larger network) Nunew prefix must be longeriu(cannot set prefixlen_diff and new_prefixiuprefix length diff must be > 0u0prefix length diff %d is invalid for netblock %s( RhRRR^R0RRjRbRRR( Rtprefixlen_difft new_prefixt new_prefixlenRRRtnew_addrtcurrent((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRds,    cCs|jdkr|S|dk rh||jkr=tdn|dkrXtdn|j|}n|j|}|dkrtd|j|fn|jt|jt|j|>@|fS(uThe supernet containing the current network. Args: prefixlen_diff: An integer, the amount the prefix length of the network should be decreased by. For example, given a /24 network and a prefixlen_diff of 3, a supernet with a /21 netmask is returned. Returns: An IPv4 network object. Raises: ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a negative prefix length. OR If prefixlen_diff and new_prefix are both set or new_prefix is a larger number than the current prefix (larger number means a smaller network) iunew prefix must be shorteriu(cannot set prefixlen_diff and new_prefixu;current prefixlen is %d, cannot have a prefixlen_diff of %dN(RhR^R0RRRRjR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR\s     cCs|jjo|jjS(uTest if the address is reserved for multicast use. Returns: A boolean, True if the address is a multicast address. See RFC 2373 2.7 for details. (Rjt is_multicastRb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRAs cCsp|j|jkrtSt|drVt|drV|j|jkoU|j|jkStdt|dS(Nunetwork_addressubroadcast_addressu9Unable to test subnet containment with element of type %s(RgRthasattrRjRbRPR|(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRMscCsp|j|jkrtSt|drVt|drV|j|jkoU|j|jkStdt|dS(Nunetwork_addressubroadcast_addressu9Unable to test subnet containment with element of type %s(RgRRRjRbRPR|(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt supernet_of[scCs|jjo|jjS(uTest if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges. (Rjt is_reservedRb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRis cCs|jjo|jjS(uTest if the address is reserved for link-local. Returns: A boolean, True if the address is reserved per RFC 4291. (Rjt is_link_localRb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRus cCs|jjo|jjS(uTest if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per iana-ipv4-special-registry or iana-ipv6-special-registry. (Rjt is_privateRb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs|j S(uTest if this address is allocated for public networks. Returns: A boolean, True if the address is not reserved per iana-ipv4-special-registry or iana-ipv6-special-registry. (R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt is_globals cCs|jjo|jjS(uTest if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 2373 2.5.2. (Rjtis_unspecifiedRb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs|jjo|jjS(uTest if the address is a loopback address. Returns: A boolean, True if the address is a loopback address as defined in RFC 2373 2.5.3. (Rjt is_loopbackRb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs N(&R&R'R*RRRRRRR!RRRRRRbRRRRRRRRRRrR^RdR\RRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRqsD          K 0 5)       Rc BseZdZdZdZdedZedZeddddd d d d d g Z eZ iZ dZ e dZe dZe dZe dZdZdZedZedZRS(uyBase IPv4 object. The following methods are used by IPv4 objects in both single IP addresses and networks. iiiu 0123456789iiiiiiiiicCs t|S(N(R=(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRwscCs||jkrt|tr'|}n6y|j|}Wn tk r\|j|}nXt|j|}||f|j|R?R tmapt _parse_octetR0(RRtoctetstexc((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|stdn|jj|s@d}t||nt|dkrkd}t||nt|d}|dkr|ddkrd }t||n|d krtd |n|S( u Convert a decimal octet into an integer. Args: octet_str: A string, the number to parse. Returns: The octet as an integer. Raises: ValueError: if the octet isn't strictly a decimal from [0..255]. uEmpty octet not permittedu#Only decimal digits permitted in %riu$At most 3 characters permitted in %ri iiu0u3Ambiguous (octal/decimal) value in %r not permittediuOctet %d (> 255) not permitted(R0RRR?R(Rt octet_strR}t octet_int((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs#djdt|ddDS(uTurns a 32-bit integer into dotted decimal notation. Args: ip_int: An integer, the IP address. Returns: The IP address as a string in dotted decimal notation. u.css@|]6}tt|tr1tjd|dn|VqdS(s!BiN(R=R.R/RR(t.0R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys -siubig(tjoinR(RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR"s cCs|jd}y5gtt|D]}||jkr"|^q"}Wntk rXtSXt|t|krutS|d|dkrtStS(uTest if the IP string is a hostmask (rather than a netmask). Args: ip_str: A string, the potential hostmask. Returns: A boolean, True if the IP string is a hostmask. u.ii(R>RRt_valid_mask_octetsR0RR?tTrue(RRRMRtparts((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt _is_hostmask2s 5 cCs3t|jdddd}dj|dS(uReturn the reverse DNS pointer name for the IPv4 address. This implements the method described in RFC1035 3.5. u.Niu .in-addr.arpa(R=R>R(Rtreverse_octets((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRzGs"cCs|jS(N(RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt max_prefixlenPscCs|jS(N(Rg(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRQTs((R&R'R*R(Rgt IPV4LENGTHRSt frozensetRRRRRRwRRRRRRRzRRRQ(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs" ' %  R,cBseZdZd ZdZedZedZedZedZ edZ ed Z ed Z ed Z RS( u/Represent and manipulate single IPv4 Addresses.u_ipu __weakref__cCst|tr)|j|||_dSt|trj|j|dt|}t|d|_dSt|}d|krt d|n|j ||_dS(u Args: address: A string or integer representing the IP Additionally, an integer can be passed, so IPv4Address('192.0.2.1') == IPv4Address(3221225985). or, more generally IPv4Address(int(IPv4Address('192.0.2.1'))) == IPv4Address('192.0.2.1') Raises: AddressValueError: If ipaddress isn't a valid IPv4 address. Niubigu/uUnexpected '/' in %r( R.RR~RDR/RRR R=R)R(RR1tbvstaddr_str((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR_s     cCs t|jS(u*The binary representation of this address.(R;RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytpackedscCs||jjkS(uTest if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within the reserved IPv4 Network range. (t _constantst_reserved_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cs tfdjjDS(uTest if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per iana-ipv4-special-registry. c3s|]}|kVqdS(N((RRX(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys s(tanyRt_private_networks(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjko|j S(N(Rt_public_networkR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(uTest if the address is reserved for multicast use. Returns: A boolean, True if the address is multicast. See RFC 3171 for details. (Rt_multicast_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjkS(uTest if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 5735 3. (Rt_unspecified_address(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjkS(uTest if the address is a loopback address. Returns: A boolean, True if the address is a loopback per RFC 3330. (Rt_loopback_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(uTest if the address is reserved for link-local. Returns: A boolean, True if the address is link-local per RFC 3927. (Rt_linklocal_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(u_ipu __weakref__(R&R'R*R(RRRRRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR,Ys $     R7cBszeZdZdZdZdZdZejZe dZ e dZ e dZ e dZ RS( cCs;t|ttfrGtj||t|j|_|j|_ dSt|t rtj||dt |dkrt |d|_ n |j|_ t|dt |_|jj|_|jj|_dSt|}tj||dt|dt |_|jj |_ |jj|_|jj|_dS(NiiR5(R.R/RR,RR3RDRRRRhttupleR?RRRRRA(RR1R@((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(   cCs d|j|j|jjfS(Nu%s/%d(RRDRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsVtj||}| s%|tkr)|Sy|j|jkSWntk rQtSXdS(N(R,RRRRiR(RRt address_equal((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCsOtj||}|tkr"tSy|j|jkSWntk rJtSXdS(N(R,R!RRRiR(RRt address_less((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR!s  cCs|j|jAt|jjAS(N(RDRhRRRj(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs t|jS(N(R,RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRI scCsd|j|j|jfS(Nu%s/%s(RRDRh(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRDR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRDR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(R&R'RRRR!RRvRRRIRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR7s    R3cBs/eZdZeZedZedZRS(ueThis class represents and manipulates 32-bit IPv4 network + addresses.. Attributes: [examples for IPv4Network('192.0.2.0/27')] .network_address: IPv4Address('192.0.2.0') .hostmask: IPv4Address('0.0.0.31') .broadcast_address: IPv4Address('192.0.2.32') .netmask: IPv4Address('255.255.255.224') .prefixlen: 27 cCstj||t|ttfrVt||_|j|j\|_ |_ dSt|t rt |dkr|d}n |j}t|d|_|j|\|_ |_ t |j}|t |j @|kr|rtd|qt|t |j @|_ndSt|}t|j|d|_t |dkrf|d}n |j}|j|\|_ |_ |rtt |jt |j @|jkrtd|qntt |jt |j @|_|j |jdkr|j|_ndS(uInstantiate a new IPv4 network object. Args: address: A string or integer representing the IP [& network]. '192.0.2.0/24' '192.0.2.0/255.255.255.0' '192.0.0.2/0.0.0.255' are all functionally the same in IPv4. Similarly, '192.0.2.1' '192.0.2.1/255.255.255.255' '192.0.2.1/32' are also functionally equivalent. That is to say, failing to provide a subnetmask will create an object with a mask of /32. If the mask (portion after the / in the argument) is given in dotted quad form, it is treated as a netmask if it starts with a non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it starts with a zero field (e.g. 0.255.255.255 == /8), with the single exception of an all-zero mask which is treated as a netmask == /0. If no mask is given, a default of /32 is used. Additionally, an integer can be passed, so IPv4Network('192.0.2.1') == IPv4Network(3221225985) or, more generally IPv4Interface(int(IPv4Interface('192.0.2.1'))) == IPv4Interface('192.0.2.1') Raises: AddressValueError: If ipaddress isn't a valid IPv4 address. NetmaskValueError: If the netmask isn't valid for an IPv4 address. ValueError: If strict is True and a network address is not supplied. Niiu%s has host bits seti(RqRR.RR/R,RjRRRRRhRR?RR0RARRR(RR1R5RRR@((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR0sB%      cCs3|jtdko'|jtdk o2|j S(uTest if this address is allocated for public networks. Returns: A boolean, True if the address is not reserved per iana-ipv4-special-registry. u 100.64.0.0/10(RjR3RbR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs ( R&R'R*R,RRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR3!s  Ut_IPv4ConstantscBseZedZedZedZedZedededededededed ed ed ed ed ededgZedZe dZ RS(u169.254.0.0/16u 127.0.0.0/8u 224.0.0.0/4u 100.64.0.0/10u 0.0.0.0/8u 10.0.0.0/8u 172.16.0.0/12u 192.0.0.0/29u192.0.0.170/31u 192.0.2.0/24u192.168.0.0/16u 198.18.0.0/15u198.51.100.0/24u203.0.113.0/24u 240.0.0.0/4u255.255.255.255/32u0.0.0.0( R&R'R3RRRRRRR,R(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(                  t_BaseV6cBseZdZdZdZdedZdZedZ eZ iZ e dZ e dZe dZe d Ze dd Zd Zd Zed ZedZRS(uyBase IPv6 object. The following methods are used by IPv6 objects in both single IP addresses and networks. iiiiu0123456789ABCDEFabcdefcCsl||jkrat|tr'|}n|j|}t|j|}||f|j|} || sC|dk rxd |}t|n| }qCqCW|dk r]|} t||d } |ds| d 8} | rd}t||qn|ds| d 8} | rd}t||qn|j| | } | d krd}t||jd |fqnt||jkrd}t||j|fn|dsd}t||n|dsd}t||nt|} d} d} yd} x5t | D]'} | d K} | |j || O} qW| d | K} x9t | dD]'} | d K} | |j || O} qRW| SWn)t k r}td||fnXdS(uTurn an IPv6 ip_str into an integer. Args: ip_str: A string, the IPv6 ip_str. Returns: An int, the IPv6 address Raises: AddressValueError: if ip_str isn't a valid IPv6 Address. uAddress cannot be emptyu:iu At least %d parts expected in %ru.iu%s in %ru%xiiiu!At most %d colons permitted in %ru At most one '::' permitted in %riu0Leading ':' only permitted as part of '::' in %ru1Trailing ':' only permitted as part of '::' in %ru/Expected at most %d other parts with '::' in %ru,Exactly %d parts expected without '::' in %rN( R)R>R?R,R[RDR_t _HEXTET_COUNTR^Rtranget _parse_hextetR0(RRRt _min_partsR}tipv4_intRt _max_partst skip_indexRtparts_hitparts_lot parts_skippedR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs            #     cCs]|jj|s%td|nt|dkrPd}t||nt|dS(u&Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn't strictly a hex number from [0..FFFF]. uOnly hex digits permitted in %riu$At most 4 characters permitted in %ri(t _HEX_DIGITSRR0R?R(Rt hextet_strR}((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyREs c Csd}d}d}d}xot|D]a\}}|dkrz|d7}|dkr\|}n||kr|}|}qq%d}d}q%W|dkr||}|t|kr|dg7}ndg|||+|dkrdg|}qn|S(uCompresses a list of hextets. Compresses a list of strings, replacing the longest continuous sequence of "0" in the list with "" and adding empty strings at the beginning or at the end of the string such that subsequently calling ":".join(hextets) will produce the compressed version of the IPv6 address. Args: hextets: A list of strings, the hextets to compress. Returns: A list of strings. iiu0iu(t enumerateR?( Rthextetstbest_doublecolon_starttbest_doublecolon_lentdoublecolon_starttdoublecolon_lentindexthextettbest_doublecolon_end((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt_compress_hextets_s.         cCs|d krt|j}n||jkr<tdnd|}gtdddD]$}dt|||d!d^qY}|j|}dj|S( u,Turns a 128-bit integer into hexadecimal notation. Args: ip_int: An integer, the IP address. Returns: A string, the hexadecimal representation of the address. Raises: ValueError: The address is bigger than 128 bits of all ones. uIPv6 address is too largeu%032xii iu%xiu:N(R^RRDRSR0RRR(RRthex_strRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs  =cCst|tr!t|j}n-t|trBt|j}n t|}|j|}d|}gtdddD]}|||d!^qz}t|ttfrddj ||j fSdj |S(uExpand a shortened IPv6 address. Args: ip_str: A string, the IPv6 address. Returns: A string, the expanded IPv6 address. u%032xii iu%s/%du:( R.R4R=RjR8RIRRRqRRh(RRRRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRws   0cCs3|jdddjdd}dj|dS(uReturn the reverse DNS pointer name for the IPv6 address. This implements the method described in RFC3596 2.5. Niu:uu.u .ip6.arpa(RxtreplaceR(Rt reverse_chars((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRzs"cCs|jS(N(RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|jS(N(Rg(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRQs(N(R&R'R*R(Rgt IPV6LENGTHRSRRRRRRRRRRRR^RRwRzRRRQ(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs$ i0  R-cBseZdZdZdZedZedZedZedZ edZ ed Z ed Z ed Z ed Zed ZedZedZRS(u/Represent and manipulate single IPv6 Addresses.u_ipu __weakref__cCst|tr)|j|||_dSt|trj|j|dt|}t|d|_dSt|}d|krt d|n|j ||_dS(uInstantiate a new IPv6 address object. Args: address: A string or integer representing the IP Additionally, an integer can be passed, so IPv6Address('2001:db8::') == IPv6Address(42540766411282592856903984951653826560) or, more generally IPv6Address(int(IPv6Address('2001:db8::'))) == IPv6Address('2001:db8::') Raises: AddressValueError: If address isn't a valid IPv6 address. Niubigu/uUnexpected '/' in %r( R.RR~RDR/RRR R=R)R(RR1RR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs     cCs t|jS(u*The binary representation of this address.(R<RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(uTest if the address is reserved for multicast use. Returns: A boolean, True if the address is a multicast address. See RFC 2373 2.7 for details. (RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cs tfdjjDS(uTest if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges. c3s|]}|kVqdS(N((RR(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys s(RRt_reserved_networks(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCs||jjkS(uTest if the address is reserved for link-local. Returns: A boolean, True if the address is reserved per RFC 4291. (RR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs||jjkS(u`Test if the address is reserved for site-local. Note that the site-local address space has been deprecated by RFC 3879. Use is_private to test if this address is in the space of unique local addresses as defined by RFC 4193. Returns: A boolean, True if the address is reserved per RFC 3513 2.5.6. (Rt_sitelocal_network(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt is_site_local#s cs tfdjjDS(uTest if this address is allocated for private networks. Returns: A boolean, True if the address is reserved per iana-ipv6-special-registry. c3s|]}|kVqdS(N((RRX(R(s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pys :s(RRR(R((Rs9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR1s cCs|j S(uTest if this address is allocated for public networks. Returns: A boolean, true if the address is not reserved per iana-ipv6-special-registry. (R(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR<s cCs |jdkS(uTest if the address is unspecified. Returns: A boolean, True if this is the unspecified address as defined in RFC 2373 2.5.2. i(RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRGs cCs |jdkS(uTest if the address is a loopback address. Returns: A boolean, True if the address is a loopback address as defined in RFC 2373 2.5.3. i(RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRRs cCs(|jd?dkrdSt|jd@S(uReturn the IPv4 mapped address. Returns: If the IPv6 address is a v4 mapped address, return the IPv4 mapped address. Return None otherwise. i iIN(RDR^R,(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt ipv4_mapped]s cCs@|jd?dkrdSt|jd?d@t|jd@fS(uTuple of embedded teredo IPs. Returns: Tuple of the (server, client) IPs or None if the address doesn't appear to be a teredo address (doesn't start with 2001::/32) i`i i@IN(RDR^R,(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pytteredojs cCs,|jd?dkrdSt|jd?d@S(uReturn the IPv4 6to4 embedded address. Returns: The IPv4 6to4-embedded address if present or None if the address doesn't appear to contain a 6to4 embedded address. ipi iPIN(RDR^R,(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt sixtofourys (u_ipu __weakref__(R&R'R*R(RRRRRRR RRRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR-s %        R8cBseZdZdZdZdZdZejZe dZ e dZ e dZ e dZ e d Ze d ZRS( cCs;t|ttfrGtj||t|j|_|j|_ dSt|t rtj||dt |dkrt |d|_ n |j|_ t|dt |_|jj|_|jj|_dSt|}tj||dt|dt |_|jj|_|jj |_ |jj|_dS(NiiR5(R.R/RR-RR4RDRRRRhRR?RRRRRA(RR1R@((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(   cCs d|j|j|jjfS(Nu%s/%d(RRDRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsVtj||}| s%|tkr)|Sy|j|jkSWntk rQtSXdS(N(R-RRRRiR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs cCsOtj||}|tkr"tSy|j|jkSWntk rJtSXdS(N(R-R!RRRiR(RRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR!s  cCs|j|jAt|jjAS(N(RDRhRRRj(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs t|jS(N(R-RD(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRIscCsd|j|j|jfS(Nu%s/%s(RRDRh(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRDR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCsd|j|j|jfS(Nu%s/%s(RRDR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|jdko|jjS(Ni(RDRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRscCs|jdko|jjS(Ni(RDRR(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRs(R&R'RRRR!RRvRRRIRRRRR(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR8s    R4cBs8eZdZeZedZdZedZ RS(uvThis class represents and manipulates 128-bit IPv6 networks. Attributes: [examples for IPv6('2001:db8::1000/124')] .network_address: IPv6Address('2001:db8::1000') .hostmask: IPv6Address('::f') .broadcast_address: IPv6Address('2001:db8::100f') .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0') .prefixlen: 124 cCstj||t|ttfrVt||_|j|j\|_ |_ dSt|t rt |dkr|d}n |j}|j|\|_ |_ t|d|_t |j}|t |j @|kr|rtd|qt|t |j @|_ndSt|}t|j|d|_t |dkrf|d}n |j}|j|\|_ |_ |rtt |jt |j @|jkrtd|qntt |jt |j @|_|j |jdkr|j|_ndS(uInstantiate a new IPv6 Network object. Args: address: A string or integer representing the IPv6 network or the IP and prefix/netmask. '2001:db8::/128' '2001:db8:0000:0000:0000:0000:0000:0000/128' '2001:db8::' are all functionally the same in IPv6. That is to say, failing to provide a subnetmask will create an object with a mask of /128. Additionally, an integer can be passed, so IPv6Network('2001:db8::') == IPv6Network(42540766411282592856903984951653826560) or, more generally IPv6Network(int(IPv6Network('2001:db8::'))) == IPv6Network('2001:db8::') strict: A boolean. If true, ensure that we have been passed A true network address, eg, 2001:db8::1000/124 and not an IP address on a network, eg, 2001:db8::1/124. Raises: AddressValueError: If address isn't a valid IPv6 address. NetmaskValueError: If the netmask isn't valid for an IPv6 address. ValueError: If strict was True and a network address was not supplied. Niiu%s has host bits seti(RqRR.R/RR-RjRRRRRhRR?RR0RARRR(RR1R5RRR@((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRsB       ccsRt|j}t|j}x-t|d|dD]}|j|Vq6WdS(uGenerate Iterator over usable hosts in a network. This is like __iter__ except it doesn't return the Subnet-Router anycast address. iN(RRjRbRR(RRRR((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR< scCs|jjo|jjS(u`Test if the address is reserved for site-local. Note that the site-local address space has been deprecated by RFC 3879. Use is_private to test if this address is in the space of unique local addresses as defined by RFC 4193. Returns: A boolean, True if the address is reserved per RFC 3513 2.5.6. (RjR Rb(R((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR H s ( R&R'R*R-RRRRRR (((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyR4s   O t_IPv6ConstantscBseZedZedZedededededededed ed edg Zed ed ed ededededededededededededgZedZRS(u fe80::/10uff00::/8u::1/128u::/128u ::ffff:0:0/96u100::/64u 2001::/23u 2001:2::/48u 2001:db8::/32u 2001:10::/28ufc00::/7u::/8u100::/8u200::/7u400::/6u800::/5u1000::/4u4000::/3u6000::/3u8000::/3uA000::/3uC000::/3uE000::/4uF000::/5uF800::/6uFE00::/9u fec0::/10(R&R'R4RRRR R (((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyRX s*           (7R*t __future__RRRt __version__RRtlongt NameErrortunicodeR=tstrRt from_bytesR RiRRRRtobjectRRR R0R)R+R2RR6R9R;R<RARJRNRYRfRpRuRvRORqRR,R7R3RRRR-R8R4R(((s9/usr/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyt sx              ) $ $ #      7 1 6 =vRr V{!PKZo銯-site-packages/pip/_vendor/chardet/version.pyonu[ abc@sdZdZejdZdS(s This module exists only to simplify retrieving the version number of chardet from within setup.py and from chardet subpackages. :author: Dan Blanchard (dan.blanchard@gmail.com) s3.0.4t.N(t__doc__t __version__tsplittVERSION(((s?/usr/lib/python2.7/site-packages/pip/_vendor/chardet/version.pytsPKZ 2site-packages/pip/_vendor/chardet/hebrewprober.pyonu[ abc@s:ddlmZddlmZdefdYZdS(i(t CharSetProber(t ProbingStatet HebrewProbercBseZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZdZdZdZedZedZedZRS(iiiiiiiiiiig{Gz?s ISO-8859-8s windows-1255cCsWtt|jd|_d|_d|_d|_d|_d|_ |j dS(N( tsuperRt__init__tNonet_final_char_logical_scoret_final_char_visual_scoret_prevt _before_prevt_logical_probert_visual_probertreset(tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyRs      cCs(d|_d|_d|_d|_dS(Nit (RRRR (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyR s   cCs||_||_dS(N(R R (R t logicalProbert visualProber((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytset_model_proberss cCs(||j|j|j|j|jgkS(N(t FINAL_KAFt FINAL_MEMt FINAL_NUNtFINAL_PEt FINAL_TSADI(R tc((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytis_finalscCs"||j|j|j|jgkS(N(t NORMAL_KAFt NORMAL_MEMt NORMAL_NUNt NORMAL_PE(R R((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyt is_non_finals cCs|jtjkrtjS|j|}x|D]}|dkr|jdkr|j|jrt|jd7_q|j|jr|j d7_ qqn?|jdkr|j|jr|dkr|j d7_ n|j|_||_q/Wtj S(NRi( tstateRtNOT_MEtfilter_high_byte_onlyR RRRRRt DETECTING(R tbyte_strtcur((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytfeeds     cCs|j|j}||jkr&|jS||j kr=|jS|jj|jj}||jkro|jS||j kr|jS|dkr|jS|jS(Ng( RRtMIN_FINAL_CHAR_DISTANCEtLOGICAL_HEBREW_NAMEtVISUAL_HEBREW_NAMER tget_confidenceR tMIN_MODEL_DISTANCE(R tfinalsubtmodelsub((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyt charset_names  cCsdS(NtHebrew((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytlanguagescCs8|jjtjkr1|jjtjkr1tjStjS(N(R RRRR R!(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyRs(t__name__t __module__RRRRRRRRRt NORMAL_TSADIR%R)R'R&RR RRRR$tpropertyR,R.R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyRs.    ;N(t charsetproberRtenumsRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytscPKZ~1site-packages/pip/_vendor/chardet/gb2312prober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import GB2312DistributionAnalysis from .mbcssm import GB2312_SM_MODEL class GB2312Prober(MultiByteCharSetProber): def __init__(self): super(GB2312Prober, self).__init__() self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) self.distribution_analyzer = GB2312DistributionAnalysis() self.reset() @property def charset_name(self): return "GB2312" @property def language(self): return "Chinese" PKZo銯-site-packages/pip/_vendor/chardet/version.pycnu[ abc@sdZdZejdZdS(s This module exists only to simplify retrieving the version number of chardet from within setup.py and from chardet subpackages. :author: Dan Blanchard (dan.blanchard@gmail.com) s3.0.4t.N(t__doc__t __version__tsplittVERSION(((s?/usr/lib/python2.7/site-packages/pip/_vendor/chardet/version.pytsPKZ{7bb,site-packages/pip/_vendor/chardet/jpcntx.pycnu[ abc@sRdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSfSZdefdYZdefd YZd efd YZd S( iiiiiitJapaneseContextAnalysiscBs\eZdZdZdZdZdZdZdZdZ dZ d Z d Z RS( iiidiicCs;d|_d|_d|_d|_d|_|jdS(N(tNonet _total_relt _rel_samplet_need_to_skip_char_numt_last_char_ordert_donetreset(tself((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyt__init__{s      cCs;d|_dg|j|_d|_d|_t|_dS(Nii(RtNUM_OF_CATEGORYRRRtFalseR(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyRs    cCs|jr dS|j}x||kr|j|||d!\}}||7}||krt|||_d|_q|dkr|jdkr|jd7_|j|jkrt|_Pn|jt|j|cd7/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytfeeds         !cCs|j|jkS(N(RtENOUGH_REL_THRESHOLD(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytgot_enough_datascCs6|j|jkr+|j|jd|jS|jSdS(Ni(RtMINIMUM_DATA_THRESHOLDRt DONT_KNOW(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytget_confidencescCsdS(Nii(ii((RR((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR s( t__name__t __module__R RRR RR RRRRR (((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyRts    tSJISContextAnalysiscBs)eZdZedZdZRS(cCs tt|jd|_dS(Nt SHIFT_JIS(tsuperRR t _charset_name(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR scCs|jS(N(R (R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyt charset_namescCs|s dS|d}d|ko+dknsLd|koGdknrd}|d kszd |koudknrd |_qnd}t|dkr|d}|d krd|kod knr|d|fSnd|fS(NiiiiiiiiiitCP932ii(ii(R tlen(RRt first_charRt second_char((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR s 8( ((RRR tpropertyR!R (((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyRs tEUCJPContextAnalysiscBseZdZRS(cCs|s d S|d}|dks<d|ko7dknrEd}n|dkrZd }nd}t|dkr|d}|d krd|kod knr|d|fSnd|fS( Niiiiiiiiiii(ii(R#(RRR$RR%((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR s (    ((RRR (((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR'sN(RtobjectRRR'(((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytsCPKZ/U|?`?`4site-packages/pip/_vendor/chardet/langgreekmodel.pyonu[ abc@svdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6ZdS(iiiiiRidihi^ibieitifioiiui\iXiqiUiOiviiiSiCiriwi_icimiiHiFiPiQi<i`i]iYiDixiaiMiViEi7iNisiAiBi:iLijigiWikipiiZiJiii=i$i.iGiIi6ili{inii3i+i)i"i[i(i4i/i,i5i&i1i;i'i#i0ii%i!i-i8i2iTi9iyiiiii|iiiiii i iii ii iiii iiiii iii*ii@iKiiiitchar_to_order_maptprecedence_matrixgs?ttypical_positive_ratiotkeep_english_letters ISO-8859-7t charset_nametGreektlanguages windows-1253N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiRidihi^ibieitifioiiui\iXiqiUiOiviiiSiCiriwi_icimiiiiiiiiHiFiPiQi<i`i]iYiDixiaiMiViEi7iNisiAiBi:iLijigiWikipiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiZiiiiiiiiiiiJiiiiiiiii=i$i.iGiIii6iili{inii3i+i)i"i[i(i4i/i,i5i&i1i;i'i#i0ii%i!i-i8i2iTi9ixiyiiiii|iiiiii i iii ii iiii iiiii iii*ii@iKiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiRidihi^ibieitifioiiui\iXiqiUiOiviiiSiCiriwi_icimiiiiiiiiHiFiPiQi<i`i]iYiDixiaiMiViEi7iNisiAiBi:iLijigiWikipiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii=iiiiiiiiiiiJiiiiiiiiii$i.iGiIii6iili{inii3i+i)i"i[i(i4i/i,i5i&i1i;i'i#i0ii%i!i-i8i2iTi9ixiyiiiii|iiiiii i iii ii iiii iiiii iii*ii@iKiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin7_char_to_order_maptwin1253_char_to_order_maptGreekLangModeltFalsetLatin7GreekModeltWin1253GreekModel(((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.pyt#sZ  PKZ"?,,2site-packages/pip/_vendor/chardet/langthaimodel.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # The following result for thai was collected from a limited sample (1M). # Character Mapping Table: TIS620CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, 223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, 236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, ) # Model Table: # total sequences: 100% # first 512 sequences: 92.6386% # first 1024 sequences:7.3177% # rest sequences: 1.0230% # negative sequences: 0.0436% ThaiLangModel = ( 0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, 0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, 3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, 0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, 3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, 3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, 3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, 3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, 3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, 3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, 2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, 3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, 0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, 0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, 1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, 3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, 3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, 1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, 0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, 2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, 0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, 3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, 2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, 3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, 0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, 3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, 3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, 2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, 3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, 2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, 3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, 3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, 3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, 3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, 1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, 0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, 0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, 3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, 3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, 1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, 3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, 3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, 0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, 0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, 1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, 1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, 3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, 0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, 3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, 0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, 0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, 0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, 0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, 0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, 0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, 0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, 3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, 0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, 0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, 3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, 2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, 0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, 3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, 1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, 1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, 1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, ) TIS620ThaiModel = { 'char_to_order_map': TIS620CharToOrderMap, 'precedence_matrix': ThaiLangModel, 'typical_positive_ratio': 0.926386, 'keep_english_letter': False, 'charset_name': "TIS-620", 'language': 'Thai', } PKZc?fafa8site-packages/pip/_vendor/chardet/langhungarianmodel.pyonu[ abc@svdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6ZdS(iiiiii(i6i-i i2i1i&i'i5i$i)i"i#i/i.iGi+i!i%i9i0i@iDi7i4iiiiiii ii iiii iiiiCi iiiiiAi>ii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiKiiiiiiiiiOiiiiiiiiiiiiiiiii3iQiiNiiiii,iiii=iiiiii:iiBi;iiii<iEi?iiiiRiiJiiFiPiiHiiiSiMiTiiLiUiiiiiIi*iiiiii8iiiViWitchar_to_order_maptprecedence_matrixg(P?ttypical_positive_ratiotkeep_english_letters ISO-8859-2t charset_namet Hungariantlanguages windows-1250N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(i6i-i i2i1i&i'i5i$i)i"i#i/i.iGi+i!i%i9i0i@iDi7i4iiiiiiiiiiiii ii iiii iiiiCi iiiiiAi>ii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiKiiiiiiiiiOiiiiiiiiiiiiiiiii3iQiiNiiiii,iiii=iiiiii:iiBi;iiii<iEi?iiiiRiiJiiFiPiiHiiiSiMiTiiLiUiiiiiIi*iiiiii8iiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(i6i-i i2i1i&i'i5i$i)i"i#i/i.iHi+i!i%i9i0i@iDi7i4iiiiiiiiiiiii ii iiii iiiiCi iiiiiAi>ii iiiiiiiiiiiiiiiiiiiiiiiiiiNiiEiiiiiiiiiiiiiiiiiLiiiiiiiiiQiiiiiiiiiiiiiiiii3iSiiPiiiii,iiii=iiiiii:iiBi;iiii<iFi?iiiiTiiKiiGiRiiIiiiUiOiViiMiWiiiiiJi*iiiiii8iiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin2_HungarianCharToOrderMaptwin1250HungarianCharToOrderMaptHungarianLangModeltTruetLatin2HungarianModeltWin1250HungarianModel(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.pyt#sZ  PKZtVV1site-packages/pip/_vendor/chardet/euctwprober.pyonu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tEUCTWDistributionAnalysis(tEUCTW_SM_MODELt EUCTWProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyR"s cCsdS(NsEUC-TW((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyt charset_name(scCsdS(NtTaiwan((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pytlanguage,s(t__name__t __module__RtpropertyR R (((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyR!s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pytsPKZY$7site-packages/pip/_vendor/chardet/charsetgroupprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import ProbingState from .charsetprober import CharSetProber class CharSetGroupProber(CharSetProber): def __init__(self, lang_filter=None): super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) self._active_num = 0 self.probers = [] self._best_guess_prober = None def reset(self): super(CharSetGroupProber, self).reset() self._active_num = 0 for prober in self.probers: if prober: prober.reset() prober.active = True self._active_num += 1 self._best_guess_prober = None @property def charset_name(self): if not self._best_guess_prober: self.get_confidence() if not self._best_guess_prober: return None return self._best_guess_prober.charset_name @property def language(self): if not self._best_guess_prober: self.get_confidence() if not self._best_guess_prober: return None return self._best_guess_prober.language def feed(self, byte_str): for prober in self.probers: if not prober: continue if not prober.active: continue state = prober.feed(byte_str) if not state: continue if state == ProbingState.FOUND_IT: self._best_guess_prober = prober return self.state elif state == ProbingState.NOT_ME: prober.active = False self._active_num -= 1 if self._active_num <= 0: self._state = ProbingState.NOT_ME return self.state return self.state def get_confidence(self): state = self.state if state == ProbingState.FOUND_IT: return 0.99 elif state == ProbingState.NOT_ME: return 0.01 best_conf = 0.0 self._best_guess_prober = None for prober in self.probers: if not prober: continue if not prober.active: self.logger.debug('%s not active', prober.charset_name) continue conf = prober.get_confidence() self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) if best_conf < conf: best_conf = conf self._best_guess_prober = prober if not self._best_guess_prober: return 0.0 return best_conf PKZ 8__2site-packages/pip/_vendor/chardet/gb2312prober.pycnu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tGB2312DistributionAnalysis(tGB2312_SM_MODELt GB2312ProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyR"s cCsdS(NtGB2312((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyt charset_name(scCsdS(NtChinese((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pytlanguage,s(t__name__t __module__RtpropertyR R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyR!s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pytsPKZޏ7site-packages/pip/_vendor/chardet/universaldetector.pycnu[ abc@sdZddlZddlZddlZddlmZddlmZmZm Z ddl m Z ddl m Z ddlmZdd lmZd efd YZdS( s Module containing the UniversalDetector detector class, which is the primary class a user of ``chardet`` should use. :author: Mark Pilgrim (initial port to Python) :author: Shy Shalom (original C code) :author: Dan Blanchard (major refactoring for 3.0) :author: Ian Cordasco iNi(tCharSetGroupProber(t InputStatetLanguageFiltert ProbingState(tEscCharSetProber(t Latin1Prober(tMBCSGroupProber(tSBCSGroupProbertUniversalDetectorcBseZdZdZejdZejdZejdZidd6dd6d d 6d d 6d d6dd6dd6dd6Z e j dZ dZ dZdZRS(sq The ``UniversalDetector`` class underlies the ``chardet.detect`` function and coordinates all of the different charset probers. To get a ``dict`` containing an encoding and its confidence, you can simply run: .. code:: u = UniversalDetector() u.feed(some_bytes) u.close() detected = u.result g?s[-]s(|~{)s[-]s Windows-1252s iso-8859-1s Windows-1250s iso-8859-2s Windows-1251s iso-8859-5s Windows-1256s iso-8859-6s Windows-1253s iso-8859-7s Windows-1255s iso-8859-8s Windows-1254s iso-8859-9s Windows-1257s iso-8859-13cCsqd|_g|_d|_d|_d|_d|_d|_||_t j t |_ d|_ |jdS(N(tNonet_esc_charset_probert_charset_proberstresulttdonet _got_datat _input_statet _last_chart lang_filtertloggingt getLoggert__name__tloggert_has_win_bytestreset(tselfR((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyt__init__Qs         cCsidd6dd6dd6|_t|_t|_t|_tj|_d|_ |j rg|j j nx|j D]}|j qqWdS(s Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents. tencodinggt confidencetlanguagetN( R R tFalseR RRRt PURE_ASCIIRRR RR (Rtprober((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyR^s      cCsy|jr dSt|sdSt|ts;t|}n|js{|jtjrwidd6dd6dd6|_n|jtj tj fridd6dd6dd6|_n|jd rid d6dd6dd6|_nl|jd rid d6dd6dd6|_n<|jtj tj frOid d6dd6dd6|_nt |_|jddk r{t |_dSn|jtjkr|jj|rtj|_q|jtjkr|jj|j|rtj|_qn|d|_|jtjkr|js(t|j|_n|jj|tjkrui|jjd6|jjd6|jj d6|_t |_qun|jtjkru|j!st"|jg|_!|jt#j$@r|j!j%t&n|j!j%t'nx`|j!D]U}|j|tjkri|jd6|jd6|j d6|_t |_PqqW|j(j|rut |_)qundS(s Takes a chunk of a document and feeds it through all of the relevant charset probers. After calling ``feed``, you can check the value of the ``done`` attribute to see if you need to continue feeding the ``UniversalDetector`` more data, or if it has made a prediction (in the ``result`` attribute). .. note:: You should always call ``close`` when you're done feeding in your document if ``done`` is not already ``True``. Ns UTF-8-SIGRg?RRRsUTF-32ssX-ISO-10646-UCS-4-3412ssX-ISO-10646-UCS-4-2143sUTF-16i(*R tlent isinstancet bytearrayRt startswithtcodecstBOM_UTF8R t BOM_UTF32_LEt BOM_UTF32_BEtBOM_LEtBOM_BEtTrueR RRRtHIGH_BYTE_DETECTORtsearcht HIGH_BYTEt ESC_DETECTORRt ESC_ASCIIR RRtfeedRtFOUND_ITt charset_nametget_confidenceRR RRtNON_CJKtappendRRtWIN_BYTE_DETECTORR(Rtbyte_strR ((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyR1os~                  c Cs>|jr|jSt|_|js5|jjdn1|jtjkrhidd6dd6dd6|_n|jtj krfd }d}d }xD|j D]9}|sqn|j }||kr|}|}qqW|rf||j krf|j}|jj}|j }|jd r?|jr?|jj||}q?ni|d6|d6|jd6|_qfn|jjtjkr7|jdd kr7|jjd x|j D]}|sqnt|trx^|jD]+}|jjd |j|j|j qWq|jjd |j|j|j qWq7n|jS( s Stop analyzing the current document and come up with a final prediction. :returns: The ``result`` attribute, a ``dict`` with the keys `encoding`, `confidence`, and `language`. sno data received!tasciiRg?RRRgsiso-8859s no probers hit minimum thresholds%s %s confidence = %sN(R R R+RRtdebugRRRR.R R R4tMINIMUM_THRESHOLDR3tlowerR$Rt ISO_WIN_MAPtgetRtgetEffectiveLevelRtDEBUGR"Rtprobers( Rtprober_confidencetmax_prober_confidencet max_proberR R3tlower_charset_nameRt group_prober((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pytcloses`              (Rt __module__t__doc__R;tretcompileR,R/R7R=RtALLRRR1RG(((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyR3s"    m(RIR%RRJtcharsetgroupproberRtenumsRRRt escproberRt latin1proberRtmbcsgroupproberRtsbcsgroupproberRtobjectR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyt$s   PKZO /site-packages/pip/_vendor/chardet/escprober.pycnu[ abc@sxddlmZddlmZddlmZmZmZddlm Z m Z m Z m Z defdYZ dS(i(t CharSetProber(tCodingStateMachine(tLanguageFiltert ProbingStatet MachineState(t HZ_SM_MODELtISO2022CN_SM_MODELtISO2022JP_SM_MODELtISO2022KR_SM_MODELtEscCharSetProbercBsSeZdZddZdZedZedZdZ dZ RS(s This CharSetProber uses a "code scheme" approach for detecting encodings, whereby easily recognizable escape or shift sequences are relied on to identify these encodings. cCstt|jd|g|_|jtj@ra|jjtt |jjtt n|jtj @r|jjtt n|jtj @r|jjttnd|_d|_d|_d|_|jdS(Nt lang_filter(tsuperR t__init__t coding_smR RtCHINESE_SIMPLIFIEDtappendRRRtJAPANESERtKOREANRtNonetactive_sm_countt_detected_charsett_detected_languaget_statetreset(tselfR ((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyR *s     cCsntt|jx0|jD]%}|s/qnt|_|jqWt|j|_d|_ d|_ dS(N( R R RR tTruetactivetlenRRRR(RR ((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyR:s  cCs|jS(N(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyt charset_nameEscCs|jS(N(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pytlanguageIscCs|jr dSdSdS(NgGz?g(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pytget_confidenceMs cCsx|D]}x|jD]}| s|j r4qn|j|}|tjkrt|_|jd8_|jdkrtj|_ |j Sq|tj krtj |_ |j |_|j|_|j SqWqW|j S(Nii(R Rt next_stateRtERRORtFalseRRtNOT_MERtstatetITS_MEtFOUND_ITtget_coding_state_machineRRR(Rtbyte_strtcR t coding_state((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pytfeedSs"      N( t__name__t __module__t__doc__RR RtpropertyRRRR*(((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyR #s   N(t charsetproberRtcodingstatemachineRtenumsRRRtescsmRRRRR (((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyts"PKZ=nn.site-packages/pip/_vendor/chardet/escprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .codingstatemachine import CodingStateMachine from .enums import LanguageFilter, ProbingState, MachineState from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, ISO2022KR_SM_MODEL) class EscCharSetProber(CharSetProber): """ This CharSetProber uses a "code scheme" approach for detecting encodings, whereby easily recognizable escape or shift sequences are relied on to identify these encodings. """ def __init__(self, lang_filter=None): super(EscCharSetProber, self).__init__(lang_filter=lang_filter) self.coding_sm = [] if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) if self.lang_filter & LanguageFilter.JAPANESE: self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) if self.lang_filter & LanguageFilter.KOREAN: self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) self.active_sm_count = None self._detected_charset = None self._detected_language = None self._state = None self.reset() def reset(self): super(EscCharSetProber, self).reset() for coding_sm in self.coding_sm: if not coding_sm: continue coding_sm.active = True coding_sm.reset() self.active_sm_count = len(self.coding_sm) self._detected_charset = None self._detected_language = None @property def charset_name(self): return self._detected_charset @property def language(self): return self._detected_language def get_confidence(self): if self._detected_charset: return 0.99 else: return 0.00 def feed(self, byte_str): for c in byte_str: for coding_sm in self.coding_sm: if not coding_sm or not coding_sm.active: continue coding_state = coding_sm.next_state(c) if coding_state == MachineState.ERROR: coding_sm.active = False self.active_sm_count -= 1 if self.active_sm_count <= 0: self._state = ProbingState.NOT_ME return self.state elif coding_state == MachineState.ITS_ME: self._state = ProbingState.FOUND_IT self._detected_charset = coding_sm.get_coding_state_machine() self._detected_language = coding_sm.language return self.state return self.state PKZA0ЭЭ-site-packages/pip/_vendor/chardet/jisfreq.pycnu[ abc@sdZdZdZdS(g@ii(iiiiii'iOii}i ii ii] i i ii iiiiiii iiiiiXi}iiikiig i i ikiiiiiiiiii%i&i0i1i,i-iiiiiii iii<iiipiiiiiiigiiiiWiXii ih i"iiiii i ii\iii i/ ii iiiii0 ii ihi iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii ii!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7iiViji4iiiiBiii8i9i:i;i<i=i>iviiiiSiieiifi iid i+iiiaiw iiiiIii~ii ii?i@iAiBiCiDi ii- i iiiii iiEi^ i. i i3iii iFi/ iYi i iiiji iiGiHiIiJiqii1 iiYiiki/ii2iii#i iii*iiiiiii[ii\i5ii!i!i i%i@ilii'iAii i i4i iiiiii<iiii-iii7iSiii~iKi=iiEii;iii7i7i8imi&iii iiOiKi=i~idiiiLiMiNiOiPiQiRiSiTiUiViYi>iJi"ip ipiiiiiTi_iii.iXiiiLijieii9iPi iliyiii iDiii i)ihi iFi?ii+iiigiciiiBi]iNiii8iji:i5iii7iiiRi4iGidiiiiniihiti6i3i$iWiCi ii: ix iii*iV i iWiXiYiZi[i\i]i^i_i`iiaibicidieifigihiiijikiliminioipiqirisiti iuiviwixiyizi{i|iiiiili}i i~iii i i iiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiikii i> i i i i i ij iiiiZi[i\i]ii ii ii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i iiiiiii2 iii{iTiia i i^iii|i i=iimik i i ii iViy i ii iq iiii i iiiii!ii ii ii iiCiOi3iiiiiir iiinii+iiii ipiqi i; iAi iiiCi0 iini iiiTiCioi ii i# iii~iiPiiiiii ii i i$ iz ii i&iiiii iiiii_iiiUii is i ii iIiiiiiiii3 iii$iWi1 i5iPi iiiiiiiX iiiil iiiHii iiiiii i iWii~ipii$iiii@iLi i iiiiuii iJiI iW iii_ii i<iiii]iDiiiiihiiiiiiifi-ii}it ii iiiii,ii ibiiiXiiiFii{ i`iii+i3iqiimii4 iiiiiiBiliX i ii% iii% i2 iuii i iiJii|irii@i iii iii ii iL iiiiiiib iJ i icii iiiiii4 iRiim iTiii iiiiiie iii? i iiiii-i ii*iEi ii+i ii& i iii/iii.isi[ii+ii ii iBic iiiiii i) ii,i iu ii ii9i& iiiiiiv iiY i i3 ii iiiiDiiiiidiii| i(iyiiiii<i8i iiii i id i5 iBiini iiiii|i i!ii)i@ in imi i i0iaiii izi'iii iWii[iviii iii io i i ii iii' ii i"iii9iii iiJiiihiiie iii\iiiip i@iiQi9iiii iiiiw iiiEiiJiIi iiiiiifii ii i i1i ii iii' iri iiii iiici iqiixiii i2iiiii i*iiibiFiiviidi iii!iRiiiQiiiiPiiiiiiii( iii_ ii` iiif ii6 iiAiiia iiiuiTi i2iiiiii i^iCii ii[ii ivi%iiiini!iFiiZi^iiiidiiiix iii]i iBiici i iiiPiqi i iHiY iii( ii iigi}iKi iii4 iiWii iiCig i-idiikii4i} iiq i~ i+i iiihi@ i i i@iA iDi:iiiei iiqiwiiiiiiiidiiZii* iM i[iiisiZ i i ii>ii'i-i i ii ii) iiiiitiFi7 iiti#iiiiiiYiiifikib i1ii ii6ioic iiqi iiii iuii:iiiN iiUi=i iviiliNiiii]ii;iii iiiliLi i}iiiiiB ii8 iQii#i`iTii ixiiri iiiiii ii3iniiiiiii{iii@i iiii9 iiii5 ih id iZ iiiiiiiiiisi-ir iiiii[ ii0iiiii.iSi iRiO iii ii;i i4iy i i;iiz iipii i i ii: i(i|i ii"iiiiii iOiieiiii iC iti) ii6iKiii8iiP iihii ii ii3i$iii* i=ibie ii i4iii i.i7ii\ iiiiji iiiiiiii/iiii i=ii^ii5 i ii i\iiiii9i#i+ i is i,ii7iiiYiii iMii+ ii iQiii6 iiiiiiyi iisi#i{ i iUifi<iiiviii)i iiiiiimi<iii iiigii ilii iD ibiui i iDiBiii i[ i ii8iii>i iii iiii i] iiDiiii6iZii i5ii i i<imi i, i iiui^ i iiigiiiIiiigi ii\i:i iMiti iiEiioiiiiiEiRii iiEij iigiWiiiKi iCii=ii]i$i!ii iiii`iKiviii0iiii^ii i3ii"iiiiiiaik ii; iwiiiiiii iyiiiiiPiii iwiiiit i iii iiiJiaiii]iiii iiiiiLihiiii iu ijiii#iCiii iaii!isi| ii iYii iiiihiOi5iiiziii iSiiLii.ii&ii i< i7iisiA iiiMi iisii*i, i%i i iikii&ifiiOiji"ii(i-ii[iii ii- i=ii} iiiMiSiii iiiii ii i i ii ii~ iiJitikivieiyiiii< i if iiMiki iiioiiiiwiv ilii]iii.i iiii/if iqi$ig iiGiiinii>i6 i= ii iii iNi ixiiei* iih iiii$iiiioibi,ii iiitiii i ii iVii iiiiiiiw iiipiHi iViiii<iZi i8iriwii&ii ii/iiiii>ii> iE ixii&ieiiwi iii5iii ii\iiiiSii iiiii2iiiii+i'ii%iiix iOii iQ iiiiiii_iHiiii igiiiy ii i0i&i+ i iiii il i'ii'izii i ii_ iiii i ii? i ii iiz iim i|i ii ig i0iii ii*ii i i`iwiii i#iici)iiiR i iiiTiiiriiViiii_iiiiiriii i` ixiii iiifiiin i= i*iAii i(iixiiS ii{iT ii9iiiMi iriii;ii(ii%ii[ia ib iiiDiiii- i\ iii iio iEimi)ii!iiic ii, i1ii] iiii>iIiii iTiiii iiiyiikizixiiNi iiii i5i i i"iiHi<id ii iiih i ii ip iii:iLi1iii iii> iiq i?iZiMiIi iiB iK iDii#i i iYi>iiiii`iiiiiiiiiiiii iiiciiiiiiiTiiaii2iyi/ii"iU iiibii ii/ii ii i i{i iij ii ii5i*iiiiiaixiii iiFiiii7 iii i?iiwiiiiii+ii8 ii iNi iii,iinii i i ioisi_i?ii iii i iii?i0i i iiyi3ifi i=iiilii iivi]ii i iiizi iiyijiiui iipiijiii iiiziiir iiiiiiiiiFii'i i{ ini ixiui$iiiiMiiiik iipi i^ iqi i`i|ii ii i| i i i ii i iiii iGi ii-iiZi1iYii- i ieiii[iC iiiie i i.i-iiii iii i i7 ibil i iiii{ii.i i iizii i ii? iii9 ii iiii}ii iaii i iQi idi;iV ii/i^iD ii?im i i i9iIi iii iii i{i}i iiiiii&ii~iiXii i} i ii9i ii:ii"i\iii i_ ii8iWi~ i^i%iPisi iwii ii8 i iLiii. iii iiiNiiiPiiSiiii:iRi'i0ibiii iiiiizii iiiiiKiii i iriiqi iiin iL iiiiiiiiiiXi@ iigiiiii4ii: iiiiri iiQi ii i;i'i i>iibimiiwiiiis iiiiA iit i9 iiiii io i^iii2iiciiii i iriii i!iitii1i{i iiiiiisiiiii iixiiviaip iii iiLiOiiiUi i.iii)i. iiEi i"iiyii iiiii(i iiiq iE i6i i i2iui6i iiii. iii iiiViiisi iiiGiGijitiiWii iii`iiiiui ii ii{iQii iOiCi ii iiJi i i i$iiii/ iii_iiiij iRii7iii/ ii3iDiu i6iXi4iii iiii>iiii0 i`i` i iiiiUii i irii i)ii5iiHii iii~ir ii iiiii iii]iiiiiii i@ iii iaiiviiii/i!i6iiii7iwi i i3ici8ixii iiB iiiM iiiF iigiiii(ii i: iiiHii if iii"i i~iii i iyii i ii i!iNi i4iA iidieiiiziiii iiii0 ii iiiiiii{iii i iifi(iliiGi iii^iB imiF i|iiUiii2iRiii#ii iiiFii iUiiii1i icikiiN iSi iii iii$iO ii iYiiii iiGi iiiW i iC i?i iii~iiiUiFiiiiiiii iC i iiiiei iiiii/ iQi_iv i; i i iPi)iiGimi iiiG i,iIizi_i ii i9inii iAi i iX ii#iVi)i< i%ii}iiiHiw i5iipi.iiiimiiii i#ihi|ibii ii@i(i^izi iiAi i i i"i ig ii|iiiiii6iiXiii,iii iii$i#iIik iiiii:i iiQi ioih iVi$i iitiiiiY i,iii i iD ii i~i i iii%iii iiii ii i i7i/i iiii iEiVii iE iuii&iiii i iioiiiZiiiii i iiiii!iini8ii'iiiiiiii iiGi iRi2iii:ii,iiiZ ilii(iXiP i?ii i ioiiiii iiWi*iii;i8ii&i)ii i ioiiiiiHi_iii0iix ii0 ii; ii iiiXii iii1iiZi*i%i iii|idi`iiiSiji}i\iiiiG i ii ifiqi%iBii1 ipii"iiiii i|iiii iil i2 ii i iii@ihii;iiii ii iii&iiii}ii%i iii i?ii iia iy ii iii ii= i1 ii<ii{i#iiiii iAii iitiii ii> i iKi3 iii=iii\i i i'iAi+igiib iKiiiii ii ii4 ii*iim i ii ioiNii0i(ii ii$i4i i{i iiiQ iiiUiiii,i`ii iii ij i)i ii-i6ii iF i2i)ii i*i iYN(i(iiiiii'iOii}i ii ii] i i ii iiiiiii iiiiiXi}iiikiig i i ikiiiiiiiiii%i&i0i1i,i-iiiiiii iii<iiipiiiiiiigiiiiWiXii ih i"iiiii i ii\iii i/ ii iiiii0 ii ihi iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii ii!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7iiViji4iiiiBiii8i9i:i;i<i=i>iviiiiSiieiifi iid i+iiiaiw iiiiIii~ii ii?i@iAiBiCiDi ii- i iiiii iiEi^ i. i i3iii iFi/ iYi i iiiji iiGiHiIiJiqii1 iiYiiki/ii2iii#i iii*iiiiiii[ii\i5ii!i!i i%i@ilii'iAii i i4i iiiiii<iiii-iii7iSiii~iKi=iiEii;iii7i7i8imi&iii iiOiKi=i~idiiiLiMiNiOiPiQiRiSiTiUiViYi>iJi"ip ipiiiiiTi_iii.iXiiiLijieii9iPi iliyiii iDiii i)ihi iFi?ii+iiigiciiiBi]iNiii8iji:i5iii7iiiRi4iGidiiiiniihiti6i3i$iWiCi ii: ix iii*iV i iWiXiYiZi[i\i]i^i_i`iiaibicidieifigihiiijikiliminioipiqirisiti iuiviwixiyizi{i|iiiiili}i i~iii i i iiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiikii i> i i i i i ij iiiiZi[i\i]ii ii ii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i iiiiiii2 iii{iTiia i i^iii|i i=iimik i i ii iViy i ii iq iiii i iiiii!ii ii ii iiCiOi3iiiiiir iiinii+iiii ipiqi i; iAi iiiCi0 iini iiiTiCioi ii i# iii~iiPiiiiii ii i i$ iz ii i&iiiii iiiii_iiiUii is i ii iIiiiiiiii3 iii$iWi1 i5iPi iiiiiiiX iiiil iiiHii iiiiii i iWii~ipii$iiii@iLi i iiiiuii iJiI iW iii_ii i<iiii]iDiiiiihiiiiiiifi-ii}it ii iiiii,ii ibiiiXiiiFii{ i`iii+i3iqiimii4 iiiiiiBiliX i ii% iii% i2 iuii i iiJii|irii@i iii iii ii iL iiiiiiib iJ i icii iiiiii4 iRiim iTiii iiiiiie iii? i iiiii-i ii*iEi ii+i ii& i iii/iii.isi[ii+ii ii iBic iiiiii i) ii,i iu ii ii9i& iiiiiiv iiY i i3 ii iiiiDiiiiidiii| i(iyiiiii<i8i iiii i id i5 iBiini iiiii|i i!ii)i@ in imi i i0iaiii izi'iii iWii[iviii iii io i i ii iii' ii i"iii9iii iiJiiihiiie iii\iiiip i@iiQi9iiii iiiiw iiiEiiJiIi iiiiiifii ii i i1i ii iii' iri iiii iiici iqiixiii i2iiiii i*iiibiFiiviidi iii!iRiiiQiiiiPiiiiiiii( iii_ ii` iiif ii6 iiAiiia iiiuiTi i2iiiiii i^iCii ii[ii ivi%iiiini!iFiiZi^iiiidiiiix iii]i iBiici i iiiPiqi i iHiY iii( ii iigi}iKi iii4 iiWii iiCig i-idiikii4i} iiq i~ i+i iiihi@ i i i@iA iDi:iiiei iiqiwiiiiiiiidiiZii* iM i[iiisiZ i i ii>ii'i-i i ii ii) iiiiitiFi7 iiti#iiiiiiYiiifikib i1ii ii6ioic iiqi iiii iuii:iiiN iiUi=i iviiliNiiii]ii;iii iiiliLi i}iiiiiB ii8 iQii#i`iTii ixiiri iiiiii ii3iniiiiiii{iii@i iiii9 iiii5 ih id iZ iiiiiiiiiisi-ir iiiii[ ii0iiiii.iSi iRiO iii ii;i i4iy i i;iiz iipii i i ii: i(i|i ii"iiiiii iOiieiiii iC iti) ii6iKiii8iiP iihii ii ii3i$iii* i=ibie ii i4iii i.i7ii\ iiiiji iiiiiiii/iiii i=ii^ii5 i ii i\iiiii9i#i+ i is i,ii7iiiYiii iMii+ ii iQiii6 iiiiiiyi iisi#i{ i iUifi<iiiviii)i iiiiiimi<iii iiigii ilii iD ibiui i iDiBiii i[ i ii8iii>i iii iiii i] iiDiiii6iZii i5ii i i<imi i, i iiui^ i iiigiiiIiiigi ii\i:i iMiti iiEiioiiiiiEiRii iiEij iigiWiiiKi iCii=ii]i$i!ii iiii`iKiviii0iiii^ii i3ii"iiiiiiaik ii; iwiiiiiii iyiiiiiPiii iwiiiit i iii iiiJiaiii]iiii iiiiiLihiiii iu ijiii#iCiii iaii!isi| ii iYii iiiihiOi5iiiziii iSiiLii.ii&ii i< i7iisiA iiiMi iisii*i, i%i i iikii&ifiiOiji"ii(i-ii[iii ii- i=ii} iiiMiSiii iiiii ii i i ii ii~ iiJitikivieiyiiii< i if iiMiki iiioiiiiwiv ilii]iii.i iiii/if iqi$ig iiGiiinii>i6 i= ii iii iNi ixiiei* iih iiii$iiiioibi,ii iiitiii i ii iVii iiiiiiiw iiipiHi iViiii<iZi i8iriwii&ii ii/iiiii>ii> iE ixii&ieiiwi iii5iii ii\iiiiSii iiiii2iiiii+i'ii%iiix iOii iQ iiiiiii_iHiiii igiiiy ii i0i&i+ i iiii il i'ii'izii i ii_ iiii i ii? i ii iiz iim i|i ii ig i0iii ii*ii i i`iwiii i#iici)iiiR i iiiTiiiriiViiii_iiiiiriii i` ixiii iiifiiin i= i*iAii i(iixiiS ii{iT ii9iiiMi iriii;ii(ii%ii[ia ib iiiDiiii- i\ iii iio iEimi)ii!iiic ii, i1ii] iiii>iIiii iTiiii iiiyiikizixiiNi iiii i5i i i"iiHi<id ii iiih i ii ip iii:iLi1iii iii> iiq i?iZiMiIi iiB iK iDii#i i iYi>iiiii`iiiiiiiiiiiii iiiciiiiiiiTiiaii2iyi/ii"iU iiibii ii/ii ii i i{i iij ii ii5i*iiiiiaixiii iiFiiii7 iii i?iiwiiiiii+ii8 ii iNi iii,iinii i i ioisi_i?ii iii i iii?i0i i iiyi3ifi i=iiilii iivi]ii i iiizi iiyijiiui iipiijiii iiiziiir iiiiiiiiiFii'i i{ ini ixiui$iiiiMiiiik iipi i^ iqi i`i|ii ii i| i i i ii i iiii iGi ii-iiZi1iYii- i ieiii[iC iiiie i i.i-iiii iii i i7 ibil i iiii{ii.i i iizii i ii? iii9 ii iiii}ii iaii i iQi idi;iV ii/i^iD ii?im i i i9iIi iii iii i{i}i iiiiii&ii~iiXii i} i ii9i ii:ii"i\iii i_ ii8iWi~ i^i%iPisi iwii ii8 i iLiii. iii iiiNiiiPiiSiiii:iRi'i0ibiii iiiiizii iiiiiKiii i iriiqi iiin iL iiiiiiiiiiXi@ iigiiiii4ii: iiiiri iiQi ii i;i'i i>iibimiiwiiiis iiiiA iit i9 iiiii io i^iii2iiciiii i iriii i!iitii1i{i iiiiiisiiiii iixiiviaip iii iiLiOiiiUi i.iii)i. iiEi i"iiyii iiiii(i iiiq iE i6i i i2iui6i iiii. iii iiiViiisi iiiGiGijitiiWii iii`iiiiui ii ii{iQii iOiCi ii iiJi i i i$iiii/ iii_iiiij iRii7iii/ ii3iDiu i6iXi4iii iiii>iiii0 i`i` i iiiiUii i irii i)ii5iiHii iii~ir ii iiiii iii]iiiiiii i@ iii iaiiviiii/i!i6iiii7iwi i i3ici8ixii iiB iiiM iiiF iigiiii(ii i: iiiHii if iii"i i~iii i iyii i ii i!iNi i4iA iidieiiiziiii iiii0 ii iiiiiii{iii i iifi(iliiGi iii^iB imiF i|iiUiii2iRiii#ii iiiFii iUiiii1i icikiiN iSi iii iii$iO ii iYiiii iiGi iiiW i iC i?i iii~iiiUiFiiiiiiii iC i iiiiei iiiii/ iQi_iv i; i i iPi)iiGimi iiiG i,iIizi_i ii i9inii iAi i iX ii#iVi)i< i%ii}iiiHiw i5iipi.iiiimiiii i#ihi|ibii ii@i(i^izi iiAi i i i"i ig ii|iiiiii6iiXiii,iii iii$i#iIik iiiii:i iiQi ioih iVi$i iitiiiiY i,iii i iD ii i~i i iii%iii iiii ii i i7i/i iiii iEiVii iE iuii&iiii i iioiiiZiiiii i iiiii!iini8ii'iiiiiiii iiGi iRi2iii:ii,iiiZ ilii(iXiP i?ii i ioiiiii iiWi*iii;i8ii&i)ii i ioiiiiiHi_iii0iix ii0 ii; ii iiiXii iii1iiZi*i%i iii|idi`iiiSiji}i\iiiiG i ii ifiqi%iBii1 ipii"iiiii i|iiii iil i2 ii i iii@ihii;iiii ii iii&iiii}ii%i iii i?ii iia iy ii iii ii= i1 ii<ii{i#iiiii iAii iitiii ii> i iKi3 iii=iii\i i i'iAi+igiib iKiiiii ii ii4 ii*iim i ii ioiNii0i(ii ii$i4i i{i iiiQ iiiUiiii,i`ii iii ij i)i ii-i6ii iF i2i)ii i*i iY(tJIS_TYPICAL_DISTRIBUTION_RATIOtJIS_TABLE_SIZEtJIS_CHAR_TO_FREQ_ORDER(((s?/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.pyt,s$PKZU01017site-packages/pip/_vendor/chardet/langhungarianmodel.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: Latin2_HungarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, 253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, 159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, 175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, 191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, 221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, 232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, 245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, ) win1250HungarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, 253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, 161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, 177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, 191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, 221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, 232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, 245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, ) # Model Table: # total sequences: 100% # first 512 sequences: 94.7368% # first 1024 sequences:5.2623% # rest sequences: 0.8894% # negative sequences: 0.0009% HungarianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, 3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, 3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, 3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, 0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, 3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, 0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, 3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, 3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, 1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, 1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, 1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, 3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, 2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, 2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, 2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, 2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, 2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, 3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, 2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, 2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, 2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, 1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, 1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, 3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, 1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, 1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, 2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, 2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, 2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, 3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, 2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, 1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, 1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, 2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, 2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, 1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, 1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, 2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, 1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, 1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, 2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, 2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, 1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, 1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, 1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, 0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, 2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, 2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, 1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, 2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, 1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, 1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, 2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, 2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, 2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, 1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, 2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, 0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, 0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, 0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, 2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, 0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, ) Latin2HungarianModel = { 'char_to_order_map': Latin2_HungarianCharToOrderMap, 'precedence_matrix': HungarianLangModel, 'typical_positive_ratio': 0.947368, 'keep_english_letter': True, 'charset_name': "ISO-8859-2", 'language': 'Hungarian', } Win1250HungarianModel = { 'char_to_order_map': win1250HungarianCharToOrderMap, 'precedence_matrix': HungarianLangModel, 'typical_positive_ratio': 0.947368, 'keep_english_letter': True, 'charset_name': "windows-1250", 'language': 'Hungarian', } PKZY(=$$5site-packages/pip/_vendor/chardet/chardistribution.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, EUCTW_TYPICAL_DISTRIBUTION_RATIO) from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, EUCKR_TYPICAL_DISTRIBUTION_RATIO) from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, GB2312_TYPICAL_DISTRIBUTION_RATIO) from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, BIG5_TYPICAL_DISTRIBUTION_RATIO) from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, JIS_TYPICAL_DISTRIBUTION_RATIO) class CharDistributionAnalysis(object): ENOUGH_DATA_THRESHOLD = 1024 SURE_YES = 0.99 SURE_NO = 0.01 MINIMUM_DATA_THRESHOLD = 3 def __init__(self): # Mapping table to get frequency order from char order (get from # GetOrder()) self._char_to_freq_order = None self._table_size = None # Size of above table # This is a constant value which varies from language to language, # used in calculating confidence. See # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html # for further detail. self.typical_distribution_ratio = None self._done = None self._total_chars = None self._freq_chars = None self.reset() def reset(self): """reset analyser, clear any state""" # If this flag is set to True, detection is done and conclusion has # been made self._done = False self._total_chars = 0 # Total characters encountered # The number of characters whose frequency order is less than 512 self._freq_chars = 0 def feed(self, char, char_len): """feed a character with known length""" if char_len == 2: # we only care about 2-bytes character in our distribution analysis order = self.get_order(char) else: order = -1 if order >= 0: self._total_chars += 1 # order is valid if order < self._table_size: if 512 > self._char_to_freq_order[order]: self._freq_chars += 1 def get_confidence(self): """return confidence based on existing data""" # if we didn't receive any character in our consideration range, # return negative answer if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: return self.SURE_NO if self._total_chars != self._freq_chars: r = (self._freq_chars / ((self._total_chars - self._freq_chars) * self.typical_distribution_ratio)) if r < self.SURE_YES: return r # normalize confidence (we don't want to be 100% sure) return self.SURE_YES def got_enough_data(self): # It is not necessary to receive all data to draw conclusion. # For charset detection, certain amount of data is enough return self._total_chars > self.ENOUGH_DATA_THRESHOLD def get_order(self, byte_str): # We do not handle characters based on the original encoding string, # but convert this encoding string to a number, here called order. # This allows multiple encodings of a language to share one frequency # table. return -1 class EUCTWDistributionAnalysis(CharDistributionAnalysis): def __init__(self): super(EUCTWDistributionAnalysis, self).__init__() self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER self._table_size = EUCTW_TABLE_SIZE self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO def get_order(self, byte_str): # for euc-TW encoding, we are interested # first byte range: 0xc4 -- 0xfe # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char = byte_str[0] if first_char >= 0xC4: return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 else: return -1 class EUCKRDistributionAnalysis(CharDistributionAnalysis): def __init__(self): super(EUCKRDistributionAnalysis, self).__init__() self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER self._table_size = EUCKR_TABLE_SIZE self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO def get_order(self, byte_str): # for euc-KR encoding, we are interested # first byte range: 0xb0 -- 0xfe # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char = byte_str[0] if first_char >= 0xB0: return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 else: return -1 class GB2312DistributionAnalysis(CharDistributionAnalysis): def __init__(self): super(GB2312DistributionAnalysis, self).__init__() self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER self._table_size = GB2312_TABLE_SIZE self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO def get_order(self, byte_str): # for GB2312 encoding, we are interested # first byte range: 0xb0 -- 0xfe # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char, second_char = byte_str[0], byte_str[1] if (first_char >= 0xB0) and (second_char >= 0xA1): return 94 * (first_char - 0xB0) + second_char - 0xA1 else: return -1 class Big5DistributionAnalysis(CharDistributionAnalysis): def __init__(self): super(Big5DistributionAnalysis, self).__init__() self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER self._table_size = BIG5_TABLE_SIZE self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO def get_order(self, byte_str): # for big5 encoding, we are interested # first byte range: 0xa4 -- 0xfe # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe # no validation needed here. State machine has done that first_char, second_char = byte_str[0], byte_str[1] if first_char >= 0xA4: if second_char >= 0xA1: return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 else: return 157 * (first_char - 0xA4) + second_char - 0x40 else: return -1 class SJISDistributionAnalysis(CharDistributionAnalysis): def __init__(self): super(SJISDistributionAnalysis, self).__init__() self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER self._table_size = JIS_TABLE_SIZE self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO def get_order(self, byte_str): # for sjis encoding, we are interested # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe # no validation needed here. State machine has done that first_char, second_char = byte_str[0], byte_str[1] if (first_char >= 0x81) and (first_char <= 0x9F): order = 188 * (first_char - 0x81) elif (first_char >= 0xE0) and (first_char <= 0xEF): order = 188 * (first_char - 0xE0 + 31) else: return -1 order = order + second_char - 0x40 if second_char > 0x7F: order = -1 return order class EUCJPDistributionAnalysis(CharDistributionAnalysis): def __init__(self): super(EUCJPDistributionAnalysis, self).__init__() self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER self._table_size = JIS_TABLE_SIZE self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO def get_order(self, byte_str): # for euc-JP encoding, we are interested # first byte range: 0xa0 -- 0xfe # second byte range: 0xa1 -- 0xfe # no validation needed here. State machine has done that char = byte_str[0] if char >= 0xA0: return 94 * (char - 0xA1) + byte_str[1] - 0xa1 else: return -1 PKZZZ5site-packages/pip/_vendor/chardet/sbcharsetprober.pycnu[ abc@sFddlmZddlmZmZmZdefdYZdS(i(t CharSetProber(tCharacterCategoryt ProbingStatetSequenceLikelihoodtSingleByteCharSetProbercBsheZdZdZdZdZed dZdZ e dZ e dZ dZ d ZRS( i@igffffff?g?cCsitt|j||_||_||_d|_d|_d|_ d|_ d|_ |j dS(N( tsuperRt__init__t_modelt _reversedt _name_probertNonet _last_ordert _seq_counterst _total_seqst _total_chart _freq_chartreset(tselftmodeltreversedt name_prober((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyR's        cCsQtt|jd|_dgtj|_d|_d|_d|_ dS(Nii( RRRR Rtget_num_categoriesR R RR(R((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyR5s    cCs"|jr|jjS|jdSdS(Nt charset_name(R RR(R((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyR?s  cCs'|jr|jjS|jjdSdS(Ntlanguage(R RRtget(R((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyRFs  c Cs|jds|j|}n|s,|jS|jd}xt|D]\}}||}|tjkr}|jd7_n||jkr+|jd7_|j |jkr+|j d7_ |j s|j |j|}|jd|}n%||j|j }|jd|}|j |cd7sPKZEPP/site-packages/pip/_vendor/chardet/gb2312freq.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # GB2312 most frequently used character table # # Char to FreqOrder table , from hz6763 # 512 --> 0.79 -- 0.79 # 1024 --> 0.92 -- 0.13 # 2048 --> 0.98 -- 0.06 # 6768 --> 1.00 -- 0.02 # # Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 # Random Distribution Ration = 512 / (3755 - 512) = 0.157 # # Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 GB2312_TABLE_SIZE = 3760 GB2312_CHAR_TO_FREQ_ORDER = ( 1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, 2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, 2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, 1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, 1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, 1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, 2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, 3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, 1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, 2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, 2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, 1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, 3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, 1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, 2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, 1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, 3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, 1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, 2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, 1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, 3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, 3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, 3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, 1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, 3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, 2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, 1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, 1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, 4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, 3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, 3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, 1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, 2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, 1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, 1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, 3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, 3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, 4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, 3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, 1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, 1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, 4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, 3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, 1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, 1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, 2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, 3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, 4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, 3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, 2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, 2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, 2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, 2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, 3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, 2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, 2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, 1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, 2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, 1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, 1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, 1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, 2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, 3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, 2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, 2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, 2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, 3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, 1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, 1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, 2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, 1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, 3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, 1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, 1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, 3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, 2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, 1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, 4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, 1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, 1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, 3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, 1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, 1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, 1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, 1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, 3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, 4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, 3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, 2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, 2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, 1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, 3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, 2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, 1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, 1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, 2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, 2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, 3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, 4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, 3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, 3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, 2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, 1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, 3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, 4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, 2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, 1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, 1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, 1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, 3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, 1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, 1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, 2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, 2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, 2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, 1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, 1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, 2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, 1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, 1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, 2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, 2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, 3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, 1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, 4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, 3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, 1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, 3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, 1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, 4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, 1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, 2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, 1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, 1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, 3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, 2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, 1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, 1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, 1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, 3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, 2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, 3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, 3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, 3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, 2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, 2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, 1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, 1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, 3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, 3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, 1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, 1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, 3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, 2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, 2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, 1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, 3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, 4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, 1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, 2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, 3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, 3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, 1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, 2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, 1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, 1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, 1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, 1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, 1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, 1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 ) PKZicc+site-packages/pip/_vendor/chardet/mbcssm.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import MachineState # BIG5 BIG5_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f 4,4,4,4,4,4,4,4, # 80 - 87 4,4,4,4,4,4,4,4, # 88 - 8f 4,4,4,4,4,4,4,4, # 90 - 97 4,4,4,4,4,4,4,4, # 98 - 9f 4,3,3,3,3,3,3,3, # a0 - a7 3,3,3,3,3,3,3,3, # a8 - af 3,3,3,3,3,3,3,3, # b0 - b7 3,3,3,3,3,3,3,3, # b8 - bf 3,3,3,3,3,3,3,3, # c0 - c7 3,3,3,3,3,3,3,3, # c8 - cf 3,3,3,3,3,3,3,3, # d0 - d7 3,3,3,3,3,3,3,3, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,3,3,3, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,3,3,0 # f8 - ff ) BIG5_ST = ( MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 ) BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) BIG5_SM_MODEL = {'class_table': BIG5_CLS, 'class_factor': 5, 'state_table': BIG5_ST, 'char_len_table': BIG5_CHAR_LEN_TABLE, 'name': 'Big5'} # CP949 CP949_CLS = ( 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff ) CP949_ST = ( #cls= 0 1 2 3 4 5 6 7 8 9 # previous state = MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 ) CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) CP949_SM_MODEL = {'class_table': CP949_CLS, 'class_factor': 10, 'state_table': CP949_ST, 'char_len_table': CP949_CHAR_LEN_TABLE, 'name': 'CP949'} # EUC-JP EUCJP_CLS = ( 4,4,4,4,4,4,4,4, # 00 - 07 4,4,4,4,4,4,5,5, # 08 - 0f 4,4,4,4,4,4,4,4, # 10 - 17 4,4,4,5,4,4,4,4, # 18 - 1f 4,4,4,4,4,4,4,4, # 20 - 27 4,4,4,4,4,4,4,4, # 28 - 2f 4,4,4,4,4,4,4,4, # 30 - 37 4,4,4,4,4,4,4,4, # 38 - 3f 4,4,4,4,4,4,4,4, # 40 - 47 4,4,4,4,4,4,4,4, # 48 - 4f 4,4,4,4,4,4,4,4, # 50 - 57 4,4,4,4,4,4,4,4, # 58 - 5f 4,4,4,4,4,4,4,4, # 60 - 67 4,4,4,4,4,4,4,4, # 68 - 6f 4,4,4,4,4,4,4,4, # 70 - 77 4,4,4,4,4,4,4,4, # 78 - 7f 5,5,5,5,5,5,5,5, # 80 - 87 5,5,5,5,5,5,1,3, # 88 - 8f 5,5,5,5,5,5,5,5, # 90 - 97 5,5,5,5,5,5,5,5, # 98 - 9f 5,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,0,5 # f8 - ff ) EUCJP_ST = ( 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 ) EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, 'class_factor': 6, 'state_table': EUCJP_ST, 'char_len_table': EUCJP_CHAR_LEN_TABLE, 'name': 'EUC-JP'} # EUC-KR EUCKR_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 1,1,1,1,1,1,1,1, # 40 - 47 1,1,1,1,1,1,1,1, # 48 - 4f 1,1,1,1,1,1,1,1, # 50 - 57 1,1,1,1,1,1,1,1, # 58 - 5f 1,1,1,1,1,1,1,1, # 60 - 67 1,1,1,1,1,1,1,1, # 68 - 6f 1,1,1,1,1,1,1,1, # 70 - 77 1,1,1,1,1,1,1,1, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,3,3,3, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,3,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 2,2,2,2,2,2,2,2, # e0 - e7 2,2,2,2,2,2,2,2, # e8 - ef 2,2,2,2,2,2,2,2, # f0 - f7 2,2,2,2,2,2,2,0 # f8 - ff ) EUCKR_ST = ( MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f ) EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, 'class_factor': 4, 'state_table': EUCKR_ST, 'char_len_table': EUCKR_CHAR_LEN_TABLE, 'name': 'EUC-KR'} # EUC-TW EUCTW_CLS = ( 2,2,2,2,2,2,2,2, # 00 - 07 2,2,2,2,2,2,0,0, # 08 - 0f 2,2,2,2,2,2,2,2, # 10 - 17 2,2,2,0,2,2,2,2, # 18 - 1f 2,2,2,2,2,2,2,2, # 20 - 27 2,2,2,2,2,2,2,2, # 28 - 2f 2,2,2,2,2,2,2,2, # 30 - 37 2,2,2,2,2,2,2,2, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,2, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,6,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,3,4,4,4,4,4,4, # a0 - a7 5,5,1,1,1,1,1,1, # a8 - af 1,1,1,1,1,1,1,1, # b0 - b7 1,1,1,1,1,1,1,1, # b8 - bf 1,1,3,1,3,3,3,3, # c0 - c7 3,3,3,3,3,3,3,3, # c8 - cf 3,3,3,3,3,3,3,3, # d0 - d7 3,3,3,3,3,3,3,3, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,3,3,3, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,3,3,0 # f8 - ff ) EUCTW_ST = ( MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f ) EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, 'class_factor': 7, 'state_table': EUCTW_ST, 'char_len_table': EUCTW_CHAR_LEN_TABLE, 'name': 'x-euc-tw'} # GB2312 GB2312_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 3,3,3,3,3,3,3,3, # 30 - 37 3,3,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,4, # 78 - 7f 5,6,6,6,6,6,6,6, # 80 - 87 6,6,6,6,6,6,6,6, # 88 - 8f 6,6,6,6,6,6,6,6, # 90 - 97 6,6,6,6,6,6,6,6, # 98 - 9f 6,6,6,6,6,6,6,6, # a0 - a7 6,6,6,6,6,6,6,6, # a8 - af 6,6,6,6,6,6,6,6, # b0 - b7 6,6,6,6,6,6,6,6, # b8 - bf 6,6,6,6,6,6,6,6, # c0 - c7 6,6,6,6,6,6,6,6, # c8 - cf 6,6,6,6,6,6,6,6, # d0 - d7 6,6,6,6,6,6,6,6, # d8 - df 6,6,6,6,6,6,6,6, # e0 - e7 6,6,6,6,6,6,6,6, # e8 - ef 6,6,6,6,6,6,6,6, # f0 - f7 6,6,6,6,6,6,6,0 # f8 - ff ) GB2312_ST = ( MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f ) # To be accurate, the length of class 6 can be either 2 or 4. # But it is not necessary to discriminate between the two since # it is used for frequency analysis only, and we are validating # each code range there as well. So it is safe to set it to be # 2 here. GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) GB2312_SM_MODEL = {'class_table': GB2312_CLS, 'class_factor': 7, 'state_table': GB2312_ST, 'char_len_table': GB2312_CHAR_LEN_TABLE, 'name': 'GB2312'} # Shift_JIS SJIS_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 2,2,2,2,2,2,2,2, # 40 - 47 2,2,2,2,2,2,2,2, # 48 - 4f 2,2,2,2,2,2,2,2, # 50 - 57 2,2,2,2,2,2,2,2, # 58 - 5f 2,2,2,2,2,2,2,2, # 60 - 67 2,2,2,2,2,2,2,2, # 68 - 6f 2,2,2,2,2,2,2,2, # 70 - 77 2,2,2,2,2,2,2,1, # 78 - 7f 3,3,3,3,3,2,2,3, # 80 - 87 3,3,3,3,3,3,3,3, # 88 - 8f 3,3,3,3,3,3,3,3, # 90 - 97 3,3,3,3,3,3,3,3, # 98 - 9f #0xa0 is illegal in sjis encoding, but some pages does #contain such byte. We need to be more error forgiven. 2,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 3,3,3,3,3,3,3,3, # e0 - e7 3,3,3,3,3,4,4,4, # e8 - ef 3,3,3,3,3,3,3,3, # f0 - f7 3,3,3,3,3,0,0,0) # f8 - ff SJIS_ST = ( MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 ) SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) SJIS_SM_MODEL = {'class_table': SJIS_CLS, 'class_factor': 6, 'state_table': SJIS_ST, 'char_len_table': SJIS_CHAR_LEN_TABLE, 'name': 'Shift_JIS'} # UCS2-BE UCS2BE_CLS = ( 0,0,0,0,0,0,0,0, # 00 - 07 0,0,1,0,0,2,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,3,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,3,3,3,3,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,0,0,0,0,0,0,0, # a0 - a7 0,0,0,0,0,0,0,0, # a8 - af 0,0,0,0,0,0,0,0, # b0 - b7 0,0,0,0,0,0,0,0, # b8 - bf 0,0,0,0,0,0,0,0, # c0 - c7 0,0,0,0,0,0,0,0, # c8 - cf 0,0,0,0,0,0,0,0, # d0 - d7 0,0,0,0,0,0,0,0, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,4,5 # f8 - ff ) UCS2BE_ST = ( 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 ) UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, 'class_factor': 6, 'state_table': UCS2BE_ST, 'char_len_table': UCS2BE_CHAR_LEN_TABLE, 'name': 'UTF-16BE'} # UCS2-LE UCS2LE_CLS = ( 0,0,0,0,0,0,0,0, # 00 - 07 0,0,1,0,0,2,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,3,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,3,3,3,3,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 0,0,0,0,0,0,0,0, # 80 - 87 0,0,0,0,0,0,0,0, # 88 - 8f 0,0,0,0,0,0,0,0, # 90 - 97 0,0,0,0,0,0,0,0, # 98 - 9f 0,0,0,0,0,0,0,0, # a0 - a7 0,0,0,0,0,0,0,0, # a8 - af 0,0,0,0,0,0,0,0, # b0 - b7 0,0,0,0,0,0,0,0, # b8 - bf 0,0,0,0,0,0,0,0, # c0 - c7 0,0,0,0,0,0,0,0, # c8 - cf 0,0,0,0,0,0,0,0, # d0 - d7 0,0,0,0,0,0,0,0, # d8 - df 0,0,0,0,0,0,0,0, # e0 - e7 0,0,0,0,0,0,0,0, # e8 - ef 0,0,0,0,0,0,0,0, # f0 - f7 0,0,0,0,0,0,4,5 # f8 - ff ) UCS2LE_ST = ( 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 ) UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, 'class_factor': 6, 'state_table': UCS2LE_ST, 'char_len_table': UCS2LE_CHAR_LEN_TABLE, 'name': 'UTF-16LE'} # UTF-8 UTF8_CLS = ( 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value 1,1,1,1,1,1,0,0, # 08 - 0f 1,1,1,1,1,1,1,1, # 10 - 17 1,1,1,0,1,1,1,1, # 18 - 1f 1,1,1,1,1,1,1,1, # 20 - 27 1,1,1,1,1,1,1,1, # 28 - 2f 1,1,1,1,1,1,1,1, # 30 - 37 1,1,1,1,1,1,1,1, # 38 - 3f 1,1,1,1,1,1,1,1, # 40 - 47 1,1,1,1,1,1,1,1, # 48 - 4f 1,1,1,1,1,1,1,1, # 50 - 57 1,1,1,1,1,1,1,1, # 58 - 5f 1,1,1,1,1,1,1,1, # 60 - 67 1,1,1,1,1,1,1,1, # 68 - 6f 1,1,1,1,1,1,1,1, # 70 - 77 1,1,1,1,1,1,1,1, # 78 - 7f 2,2,2,2,3,3,3,3, # 80 - 87 4,4,4,4,4,4,4,4, # 88 - 8f 4,4,4,4,4,4,4,4, # 90 - 97 4,4,4,4,4,4,4,4, # 98 - 9f 5,5,5,5,5,5,5,5, # a0 - a7 5,5,5,5,5,5,5,5, # a8 - af 5,5,5,5,5,5,5,5, # b0 - b7 5,5,5,5,5,5,5,5, # b8 - bf 0,0,6,6,6,6,6,6, # c0 - c7 6,6,6,6,6,6,6,6, # c8 - cf 6,6,6,6,6,6,6,6, # d0 - d7 6,6,6,6,6,6,6,6, # d8 - df 7,8,8,8,8,8,8,8, # e0 - e7 8,8,8,8,8,9,8,8, # e8 - ef 10,11,11,11,11,11,11,11, # f0 - f7 12,13,13,13,14,15,0,0 # f8 - ff ) UTF8_ST = ( MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 9, 11, 8, 7, 6, 5, 4, 3,#08-0f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf ) UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) UTF8_SM_MODEL = {'class_table': UTF8_CLS, 'class_factor': 16, 'state_table': UTF8_ST, 'char_len_table': UTF8_CHAR_LEN_TABLE, 'name': 'UTF-8'} PKZU885site-packages/pip/_vendor/chardet/mbcsgroupprober.pycnu[ abc@sddlmZddlmZddlmZddlmZddlm Z ddl m Z ddl m Z ddlmZdd lmZd efd YZd S( i(tCharSetGroupProber(t UTF8Prober(t SJISProber(t EUCJPProber(t GB2312Prober(t EUCKRProber(t CP949Prober(t Big5Prober(t EUCTWProbertMBCSGroupProbercBseZddZRS(cCs`tt|jd|ttttttt t g|_ |j dS(Nt lang_filter( tsuperR t__init__RRRRRRRRtproberstreset(tselfR ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyR *sN(t__name__t __module__tNoneR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyR )sN(tcharsetgroupproberRt utf8proberRt sjisproberRt eucjpproberRt gb2312proberRt euckrproberRt cp949proberRt big5proberRt euctwproberRR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pytsPKZ '.site-packages/pip/_vendor/chardet/__init__.pycnu[ abc@sIddlmZmZddlmZddlmZmZdZdS(i(tPY2tPY3(tUniversalDetector(t __version__tVERSIONcCskt|tsKt|ts<tdjt|qKt|}nt}|j||jS(s Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` s4Expected object of type bytes or bytearray, got: {0}( t isinstancet bytearraytbytest TypeErrortformatttypeRtfeedtclose(tbyte_strtdetector((s@/usr/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pytdetects   N( tcompatRRtuniversaldetectorRtversionRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pytsPKZ\>,site-packages/pip/_vendor/chardet/compat.pyonu[ abc@s^ddlZejdkr<eZeZeefZeZ neZeZe efZeZ dS(iNii(ii( tsyst version_infotTruetPY2tFalsetPY3tstrtunicodetbase_strt text_typetbytes(((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/compat.pyts    PKZ&2site-packages/pip/_vendor/chardet/cli/__init__.pycnu[ abc@sdS(N((((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.pyttPKZ AR R 4site-packages/pip/_vendor/chardet/cli/chardetect.pycnu[ abc@@sdZddlmZmZmZddlZddlZddlmZddl m Z ddl m Z ddZ dd Zed krendS( u Script which takes one or more file paths and reports on their detected encodings Example:: % chardetect somefile someotherfile somefile: windows-1252 with confidence 0.5 someotherfile: ascii with confidence 1.0 If no paths are provided, it takes its input from stdin. i(tabsolute_importtprint_functiontunicode_literalsN(t __version__(tPY2(tUniversalDetectorustdincC@st}x4|D],}t|}|j||jrPqqW|j|j}trt|jtj d}n|drdj ||d|dSdj |SdS(u Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str uignoreuencodingu{0}: {1} with confidence {2}u confidenceu{0}: no resultN( Rt bytearraytfeedtdonetclosetresultRtdecodetsystgetfilesystemencodingtformat(tlinestnametutlineR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pytdescription_ofs         c C@stjdd}|jddddtjddd d trHtjn tjjg|jd d d ddjt |j |}xU|j D]J}|j rt dddddtjnt t||jqWdS(u Handles command line arguments and gets things started. :param argv: List of arguments, as if specified on the command-line. If None, ``sys.argv[1:]`` is used instead. :type argv: list of str t descriptionuVTakes one or more file paths and reports their detected encodingsuinputthelpu^File whose encoding we would like to determine. (default: stdin)ttypeurbtnargsu*tdefaultu --versiontactionuversiontversionu %(prog)s {0}u0You are running chardetect interactively. Press u8CTRL-D twice at the start of a blank line to signal the u4end of your input. If you want help, run chardetect u--help tfileN(targparsetArgumentParsert add_argumenttFileTypeRR tstdintbufferRRt parse_argstinputtisattytprinttstderrRR(targvtparsertargstf((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pytmain6s     u__main__(t__doc__t __future__RRRRR tchardetRtchardet.compatRtchardet.universaldetectorRRtNoneR+t__name__(((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pyts     PKZ&2site-packages/pip/_vendor/chardet/cli/__init__.pyonu[ abc@sdS(N((((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/__init__.pyttPKZ_ݭ 3site-packages/pip/_vendor/chardet/cli/chardetect.pynu[#!/usr/bin/env python """ Script which takes one or more file paths and reports on their detected encodings Example:: % chardetect somefile someotherfile somefile: windows-1252 with confidence 0.5 someotherfile: ascii with confidence 1.0 If no paths are provided, it takes its input from stdin. """ from __future__ import absolute_import, print_function, unicode_literals import argparse import sys from chardet import __version__ from chardet.compat import PY2 from chardet.universaldetector import UniversalDetector def description_of(lines, name='stdin'): """ Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str """ u = UniversalDetector() for line in lines: line = bytearray(line) u.feed(line) # shortcut out of the loop to save reading further - particularly useful if we read a BOM. if u.done: break u.close() result = u.result if PY2: name = name.decode(sys.getfilesystemencoding(), 'ignore') if result['encoding']: return '{0}: {1} with confidence {2}'.format(name, result['encoding'], result['confidence']) else: return '{0}: no result'.format(name) def main(argv=None): """ Handles command line arguments and gets things started. :param argv: List of arguments, as if specified on the command-line. If None, ``sys.argv[1:]`` is used instead. :type argv: list of str """ # Get command line arguments parser = argparse.ArgumentParser( description="Takes one or more file paths and reports their detected \ encodings") parser.add_argument('input', help='File whose encoding we would like to determine. \ (default: stdin)', type=argparse.FileType('rb'), nargs='*', default=[sys.stdin if PY2 else sys.stdin.buffer]) parser.add_argument('--version', action='version', version='%(prog)s {0}'.format(__version__)) args = parser.parse_args(argv) for f in args.input: if f.isatty(): print("You are running chardetect interactively. Press " + "CTRL-D twice at the start of a blank line to signal the " + "end of your input. If you want help, run chardetect " + "--help\n", file=sys.stderr) print(description_of(f, f.name)) if __name__ == '__main__': main() PKZ21site-packages/pip/_vendor/chardet/cli/__init__.pynu[ PKZ AR R 4site-packages/pip/_vendor/chardet/cli/chardetect.pyonu[ abc@@sdZddlmZmZmZddlZddlZddlmZddl m Z ddl m Z ddZ dd Zed krendS( u Script which takes one or more file paths and reports on their detected encodings Example:: % chardetect somefile someotherfile somefile: windows-1252 with confidence 0.5 someotherfile: ascii with confidence 1.0 If no paths are provided, it takes its input from stdin. i(tabsolute_importtprint_functiontunicode_literalsN(t __version__(tPY2(tUniversalDetectorustdincC@st}x4|D],}t|}|j||jrPqqW|j|j}trt|jtj d}n|drdj ||d|dSdj |SdS(u Return a string describing the probable encoding of a file or list of strings. :param lines: The lines to get the encoding of. :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str uignoreuencodingu{0}: {1} with confidence {2}u confidenceu{0}: no resultN( Rt bytearraytfeedtdonetclosetresultRtdecodetsystgetfilesystemencodingtformat(tlinestnametutlineR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pytdescription_ofs         c C@stjdd}|jddddtjddd d trHtjn tjjg|jd d d ddjt |j |}xU|j D]J}|j rt dddddtjnt t||jqWdS(u Handles command line arguments and gets things started. :param argv: List of arguments, as if specified on the command-line. If None, ``sys.argv[1:]`` is used instead. :type argv: list of str t descriptionuVTakes one or more file paths and reports their detected encodingsuinputthelpu^File whose encoding we would like to determine. (default: stdin)ttypeurbtnargsu*tdefaultu --versiontactionuversiontversionu %(prog)s {0}u0You are running chardetect interactively. Press u8CTRL-D twice at the start of a blank line to signal the u4end of your input. If you want help, run chardetect u--help tfileN(targparsetArgumentParsert add_argumenttFileTypeRR tstdintbufferRRt parse_argstinputtisattytprinttstderrRR(targvtparsertargstf((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pytmain6s     u__main__(t__doc__t __future__RRRRR tchardetRtchardet.compatRtchardet.universaldetectorRRtNoneR+t__name__(((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cli/chardetect.pyts     PKZ^ 0site-packages/pip/_vendor/chardet/utf8prober.pyonu[ abc@s`ddlmZddlmZmZddlmZddlmZdefdYZ dS(i(t CharSetProber(t ProbingStatet MachineState(tCodingStateMachine(t UTF8_SM_MODELt UTF8ProbercBsPeZdZdZdZedZedZdZdZ RS(g?cCs9tt|jtt|_d|_|jdS(N( tsuperRt__init__RRt coding_smtNonet _num_mb_charstreset(tself((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyR&s cCs-tt|j|jjd|_dS(Ni(RRR RR (R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyR ,s cCsdS(Nsutf-8((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyt charset_name1scCsdS(Nt((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pytlanguage5scCsx|D]}|jj|}|tjkr>tj|_Pq|tjkr]tj|_Pq|tj kr|jj dkr|j d7_ qqqW|j tj kr|j|jkrtj|_qn|j S(Nii(Rt next_stateRtERRORRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlenR tstatet DETECTINGtget_confidencetSHORTCUT_THRESHOLD(R tbyte_strtct coding_state((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pytfeed9s   cCs9d}|jdkr1||j|j9}d|S|SdS(NgGz?ig?(R t ONE_CHAR_PROB(R tunlike((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyRLs ( t__name__t __module__R RR tpropertyR RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyR#s   N( t charsetproberRtenumsRRtcodingstatemachineRtmbcssmRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pytsPKZs#cJJ,site-packages/pip/_vendor/chardet/mbcssm.pycnu[ abc@sddlmZd"ZejejejdejejejejejejejejejejejejejejejejejejejejfZd#Zied6dd6ed 6ed 6d d 6Zd$Z ejejdejejejddejd ejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejfFZ d%Z ie d6dd6e d 6e d 6dd 6Z d&Z ddddejejejejejejejejejejejejejejejejejejejejejejejejejejdejdejejejejejejejf(Zd'Zie d6d d6ed 6ed 6dd 6Zd(ZejejdejejejejejejejejejejejejejfZd)Zied6dd6ed 6ed 6dd 6Zd*Zejejejddddejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejejejejejejejf0Zd+Zied6dd6ed 6ed 6dd 6Zd,Zejejejejejejdejejejejejejejejejejejejejejejejejdejejejejejejejejejdejejejejejejejejejejejejejf0Zd-Zied6dd6ed 6ed 6dd 6Zd.ZejejejdejejejejejejejejejejejejejejejejejejejejfZd/Zied6d d6ed 6ed 6dd 6Z d0Z!dddejddejejejejejejejejejejejejd d d d ejejd d d d d ejd d d d d d dddejddd d ejd d d d d d d ejejejejf8Z"d1Z#ie!d6d d6e"d 6e#d 6dd 6Z$d2Z%d d dd ddejejejejejejejejejejejejdddejejejdddejdejd d dd dddddejdddejejejdddddejdejejejf8Z&d3Z'ie%d6d d6e&d 6e'd 6dd 6Z(d4Z)ejejejejejejddddddd dddejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejddddejejejejejejejejejejejejejdddejejejejejejejejejejejejddddejejejejejejejejejejejejejejddejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejdddejejejejejejejejejejejejejejejejejejejejejejejejejejejfZ*d5Z+ie)d6dd6e*d 6e+d 6d d 6Z,d!S(6i(t MachineStateiiiit class_tableit class_factort state_tabletchar_len_tabletBig5tnameiiii i tCP949sEUC-JPsEUC-KRsx-euc-twtGB2312t Shift_JISsUTF-16BEsUTF-16LEi i i iiisUTF-8N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii( iiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iii i i i i i i i i i i i iiii(iiiiiiiiiiiiiiii(-tenumsRtBIG5_CLStERRORtSTARTtITS_MEtBIG5_STtBIG5_CHAR_LEN_TABLEt BIG5_SM_MODELt CP949_CLStCP949_STtCP949_CHAR_LEN_TABLEtCP949_SM_MODELt EUCJP_CLStEUCJP_STtEUCJP_CHAR_LEN_TABLEtEUCJP_SM_MODELt EUCKR_CLStEUCKR_STtEUCKR_CHAR_LEN_TABLEtEUCKR_SM_MODELt EUCTW_CLStEUCTW_STtEUCTW_CHAR_LEN_TABLEtEUCTW_SM_MODELt GB2312_CLSt GB2312_STtGB2312_CHAR_LEN_TABLEtGB2312_SM_MODELtSJIS_CLStSJIS_STtSJIS_CHAR_LEN_TABLEt SJIS_SM_MODELt UCS2BE_CLSt UCS2BE_STtUCS2BE_CHAR_LEN_TABLEtUCS2BE_SM_MODELt UCS2LE_CLSt UCS2LE_STtUCS2LE_CHAR_LEN_TABLEtUCS2LE_SM_MODELtUTF8_CLStUTF8_STtUTF8_CHAR_LEN_TABLEt UTF8_SM_MODEL(((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.pytsh-06  0<<<<<B  $00-3  -6  $000-6  -00--6  -06  !0$*  0'!*  *0000$0'0$0*0$0-0$0-0'006 PKZ2d^+^+5site-packages/pip/_vendor/chardet/langturkishmodel.pynu[# -*- coding: utf-8 -*- ######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Özgür Baskın - Turkish Language Model # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: Latin5_TurkishCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, 255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, 255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, 180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, 164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, 150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, 124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, ) TurkishLangModel = ( 3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, 3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, 3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, 3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, 3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, 3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, 3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, 2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, 3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, 2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, 1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, 2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, 3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, 3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, 2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, 3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, 2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, 3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, 3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, 3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, 0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, 3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, 0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, 3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, 3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, 2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, 2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, 2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, 3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, 0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, 1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, 3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, 1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, 3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, 2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, 0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, 3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, 0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, 1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, 1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, 2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, 2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, 0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, 2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, 3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, 3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, 0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, 3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, 1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, 0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, 3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, 0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, 3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, 3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, 1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, 2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, 0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, 3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, 0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, 0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, 3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, 0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, 0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, 3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, 0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, 3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, 0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, 0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, 3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, 0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, 3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, 0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, 3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, 0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, 0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, 3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, 0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, 3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, 0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, 0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, 0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, 0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, 1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, 0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, 0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, 3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, 0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, 2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, 2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, 0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, 1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, 0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, ) Latin5TurkishModel = { 'char_to_order_map': Latin5_TurkishCharToOrderMap, 'precedence_matrix': TurkishLangModel, 'typical_positive_ratio': 0.970290, 'keep_english_letter': True, 'charset_name': "ISO-8859-9", 'language': 'Turkish', } PKZaa8site-packages/pip/_vendor/chardet/langbulgarianmodel.pycnu[ abc@svdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6ZdS(iiiiiMiZicidiHimikieiOiiQifiLi^iRiniili[iJiwiTi`ioiisiAiEiFiBi?iDipigi\iihi_iViWiGitiiUi]iaiqiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii-iiii i#i+i%i,i7i/i(i;i!i.i&i$i)ii'ii"i3i0i1i5i2i6i9i=iiCii<i8iii ii iiiiii i iiii iiiiiiiiiiiiKi4ii*ii>iiii:iibiiiiiiixiNi@iSiyiuiXiziYijiIiPiviritchar_to_order_maptprecedence_matrixg! _B?ttypical_positive_ratiotkeep_english_letters ISO-8859-5t charset_namet Bulgairantlanguages windows-1251t BulgarianN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiMiZicidiHimikieiOiiQifiLi^iRiniili[iJiwiTi`ioiisiiiiiiiAiEiFiBi?iDipigi\iihi_iViWiGitiiUi]iaiqiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiQiiiiiiiiiiiiii-iiii i#i+i%i,i7i/i(i;i!i.i&i$i)ii'ii"i3i0i1i5i2i6i9i=iiCii<i8iii ii iiiiii i iiii iiiiiiiiiiiiKi4ii*ii>iiii:iibiiiiiii[ii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiMiZicidiHimikieiOiiQifiLi^iRiniili[iJiwiTi`ioiisiiiiiiiAiEiFiBi?iDipigi\iihi_iViWiGitiiUi]iaiqiiiiiiiiiiiiiiiiiiixiiiiiiiiiNi@iSiyibiuiiiiiiiiiiiXiiiiiziYijiiiiii-iiiIiPiviriiiiii>i:iiiiiii i#i+i%i,i7i/i(i;i!i.i&i$i)ii'ii"i3i0i1i5i2i6i9i=iiCii<i8iii ii iiiiii i iiii iiiiiiiiiiiiKi4ii*i(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin5_BulgarianCharToOrderMaptwin1251BulgarianCharToOrderMaptBulgarianLangModeltFalsetLatin5BulgarianModeltWin1251BulgarianModel(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.pyt&sZ  PKZ&0site-packages/pip/_vendor/chardet/euckrprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCKRDistributionAnalysis from .mbcssm import EUCKR_SM_MODEL class EUCKRProber(MultiByteCharSetProber): def __init__(self): super(EUCKRProber, self).__init__() self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) self.distribution_analyzer = EUCKRDistributionAnalysis() self.reset() @property def charset_name(self): return "EUC-KR" @property def language(self): return "Korean" PKZ\/site-packages/pip/_vendor/chardet/euctwfreq.pycnu[ abc@sdZdZdZdS(g?iiiiiiii iRiiiiiii inii!i iiLi,iAiis iiiiiiL iS iii iiii.iNiiiiii:iiii?iii=iNiKiiiil iii i ii i ii iiii ioi$ iiiii ici8iiiiiiiiizi|iit i"i ie i@i\ii iiiiiiiFiiM iQiHiiiPiviif iiiiDiT iiiFiN iiEi iOii/iisii3i<i2i ii&iLiiiO iiiGiiiM iii?ii i`iiF i*iigi iZi i:ii iiKi ii iiiiii`iiiig ii i iqii~iiiP i ii iii!iiuiii*iii i ii~iiiiieiiGi^iiii iUiCiiBiiiiiii ijioi/iiiOi2 i[iii i& iiiiiSi(ii iipi]ii6iii i' iiiiii8ii+i% i[iii\iiiiiiX i( iii i0ii iHi i i"i!ii+i i1i"iii iOiG iiifi1iiiiiiii2i9iili,ii iiiiii}ih i#iq iQiMi&i iXiii#iii iiiijiiiMiii i%ii ii$iii'i iiN ii ii7iiJii!iiiiiiMii) iPiU iiii ii%i i ii ii i iIii3 ii iir ii i iiiimiii$iiixii iii ii ii%i&ii iiiiiiii&ii'iii'iii.ii iiiH iiiii$i#iiDi&iAi iiiiiiUiGiiii iQ iPiSi'iiidi i0iFiii* iiiiiiiJii iUiiiiI iR iii<iiS i:i7 i ii i i9ii}ii iiiViPiT ii)iCiiii& i i i)iiim iiiiiii4 iiiin i*iiiiiiiO iiiii i+ i(i i iiiU i(ii5iYiji iliiuii)i i*i+iV i i=i iiiiiii4ii!iiiTi, ixiiieii iiJ iPiis i5iAiV i/iki i iili!i iiAii`ii i iiAii iiii ii i iiiiiiMi iiiiiiW iii it iii+i}iij i8 iiiiiiiii- i)imi iiW ii ii iiaii iP iK i i,ii ii7i' iu ii{ik iiiii i9 iiiiiiiiiii1iibii i iiio iiX i,iQ iiiiii i i iX iiii5iDiiiliii[i iiiY ii%ii. iY ii*iii iiR iii ipiiinicigi+iii'i2iiii{iil iim iii: iifi ii|i5 iiiiiZ i i i i ii i,ii iii;iiIiYiii[ iXi"ii iiii~iii,iikiii-iiiiiiDiiii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i! iiiu iiiii i iaiiiiv iS ii}i; iiDii=ii0i ii\ i( iiiivi iiiiii.iii9iHiiiii] iEiiiiii!ii/ iii i i-iiiQi*ii.i/iiiii0 i3ii" ii*iRiiiiWiii/iibi.ii iiiiiiiRii ii) ii2ii iiiiiiiZ iiT iiw iiii iiiiiii[ iOii i i`i7ix i^ iiiiiii\ iiiiiiigiiin ii i_ii~iiii0ii iaiiy i] iiiii iii i;ii i-i iiL iiii/iiiii`ii#i"iiiii0i isiiiidisi iiY ii ikiwio ii iiii1ii2iigip iQiiUi iv iiiiiCiSii^ iii5iiiBii_i ibiNiiiXiiii iiLici i i iKiw i* iaiGi3ii i iii1i6i i2i6 iNiii i i3ii:ii\iqiiii-iii i3i iiiix i ii irii4i i_ iiU iiii@ii5i ii ii ii/i+ i6i iiiiiii i7 iZ iiii8i4iiCiii iilii` iiiiiiYiiiii5i i{iiiii$i7iiMiV i0irii iigiiVii8ii9i, iZiii{i#iW i0iii$i i i ii4ii[iiviiiciiViM i$i- iiiiX iii idiiiWi[ iii iiiq i ii1 iiiiitiiii ii i i2 ii2i:i3 i i.iii2ii iiiiii%i iii i i iiiiiii i i6i i4 ii|iiiSiiii_ i iiiii`iiiii i3iii;ir iiii7iYiLi;ii i iiiiiTi\iiii<iiVi5 iii iii ii"ixi%ii;i8 i iii2iEi=i!iiii^iiwi i"iiiis ii#i<iQiiiri$ii]ii%ii. i&i i3i5iBiiy i'iii ii i i>iY ia ib i(i i i)i*i iiiii:ii6ii^ii iii(i i+i+i i ixi,i i.ihi/ i-iiii iVi ii ihiiz i.iiiii<iii;i/i?iii i i0i8i i ii5i6i i_i)i1i2i i3iyi iii i i^i4i i i?i$i+i\ i i5i_iWii iz i6i0 ii i] i iip i7i ii8i^ i< iiVi9i i6 iiIiwi}ic i^ii i9i@ii iAi iii i:iei1iiiBiiiiii i i ii7 i;i<ii=i{ ii(i`iRi1 iYiiIi ii i i6i id i>iiie i?iioiiqiYiniiiliiCiDi{ ii i_ iEiPiUii i iiiiEimii@ii:i ii iAiiiiii# i iNi i iBiiCiWiTiTiii iif i;iDii&i i| iiiiii iii<i= i iii iVii iioii iEi iFiii iiii iiiDi-i8 iiGidi i i iHit iiiIizi iiJi i iKii i| iiiii> iiiiiiiLi:i i=iiMii? i iiiHiiiiiNiii iOiPiiFikiQi9 i iii i iu iiv iiiiiiRiSiii} i i>iiiii i: iTi iiN iiCiiiiUi~ii iWiiGiiii,iiiiiOii|iiiiOi?iVi` ii ii~ i iw iiii@i!iii; iiiiiWii"i iXiiYiiRig iuii ih i ix i=i iiZii< iii[i i#ii= i i\i$ii]iHiIi^i7iO i$ i i_i%iviJii i!iwitii`iaibi9 i&ii"i{ii i i'iciidi8iiiii[i9iiidiZ i ieiii i@ i!i(i"ifiiii i#i$iTigiKii i%iiWiMihiiiiiiii%iiiiii i` iiyi7imiLijiki i: i> i!iiii iBi)ili|i ii iziiii iiMi ii iiimiiniuiq ivii i i*iNioipiiiqi7i.iriiy i iii iifii i isi! iyi iOi i iii+iPij i i>iiii iiYi iAitiiui% i i ivi iiz i2 iwi,i i iii3iQiii ixi i6i9ik iii&i6i ii-i iiRiyiiQi9ici i'i.i ioi(iBiFi? il i i)iei i izi iSiTi iii{i/iii" i ibi/ii`iii0iuiiviii1ii i iiUii*i i/iEiiSii8iZi|i}iFi+i ii!i~i2i,iFiP iim ii iieisiiCi ir iiii iiai3iZi%i-iiii i4iiiiAiii i iii iiiiDi.iiii inii iii iii ii3 i5iJi0i is i ii ii@ iiiVii/i i i6iWiA i7ioi iiPi0ii1iin iiiaiQi2iipi:iiiPiibiiiii i{ i3i7iEiii iii| iFiGiXiiiiii]i i4iii} iii i5ii&ii8i i ii iiiii iLiiiii i iYi iiiii4 i iB i9i8iiiii iHi i/i3iZi[ii io iii:i0i ini\ii]ii^iiii i iiiIiiii;ii4ii<ii=i;i ii iiiXi iiii5 i_ii i iiiii}i0i>ip iJiiiKii ii& iFiiii i i#ii[iiiifi i>iq ii?ir ii6iiii i iiiiijiiiii@i-ipi9i; i6iyiAiii i i' iiiiiiiiViii`ii4iii iKi iai i6 i iiiiii"iiiKi i iiii iYiGioiibifici7i is ipiri i8iiiii i iA iQ i iC i,i ii iiii\iLiiuii~ i iii9iiiBiiZiii iCiiiijidii0ihigiiei iMi} i7 i ii iiDi iiiiifi:iiiNi;i iEiFii( iii<iiiGiMi=iwigii~ it i?iiii itimi# i ii+iiihiiiiii[ i iOi i iii iiB iii$ it ii iiia ii iiHiiiiiQi"i< i i ii-i1ii#iu i'iiD i i ii i iIii>iJi% iiiPi i iii i i=ia iiKiiv iiii i ii ii_iijii?iE i1i i iikii8iQii iliii i imi@i iibiiii iRi iii5ii iiiLi iw iiniiii4iHi iiMii iiiiiii\ iioiiNi] i ii ii\ix igiiEi iiiiiiipii iiqi iiiiiiOiii iri iZi isitiiii iPieiii iiioi ii^ ii}iiiiziAi& iii i iiini-ii iQiiii iiiiRiiui iiiiBiii iy ii iviwiBihiqiiii<ii*iiiSi"iTiCi>i)i ixiUiiii~iiiiziiViiiiiC i)iqiiiWi i iiiDiiEiTiFipiii#i#iyii iiiGiiz iiiii ii iiSiiXi iiiiiu i i=iHiii_ iiRiiiiiiwiiiYizi{iqiii3ii5ii i i{ ii i|i}iSi~ii i i iZi[iiiiii` iiF ii]i i iniiiIii i iiiii iG iTiJiiKiziSiri\i) ii iv ii5iiii ib iiijiiiii iii]i^ii iiciiii i ii iai:i1i iiiiAihi ia i i6iiii ib i-ii iH i_ii ii i iii iiii| ii* ifii ii i iD ii iLiWiiiiUii i ii$ii i i iNiiiR ikiiE iiiiiMiiS i' ii`iisiiF ii iai} i i i( ijii i~ iViBi[ib iWi iibi iI iic i i ii{ii i ii ici i i idi i i]iiii= ieiiiiii iii iNi iiifigi i i iihiiiii i=iiXiiiiiiiiid iiHiCi iiiiTiWi i ii9ii iNi iKiEiji4iJ i iYi iiG i ikiiihiOii i i i]iPiZiiliiQi[ikiiminiRioic iiiiiiSiXiiii\i+iiiiiIiTiUiUiii iili iDiiiipiiqi iki imii iK iiiiiiiVi8 i i iiri@iWiri iCiGiiii ii^i ii i8iti i:i i i!iinisi i"i i#i$i]iti i%i i&iriui'i+ i(i)iie i i}i iiii*i4i1i+ii i i ii ii,ii i> iXitiri? ii i-i iviui{iiw iwii.ixiT i, iii i/i0ii ii1i ii2i iGiRi i i^iiYi iZi i i[i3i^iiii ii i) i\i i4i@ivii5ii6i7iid i]i ii^i8ii&i i ii i9 iyiuii i iiziRii{ii ii(i_ii<i9iili iii_i iSi`iki i[i iii ixi:i i;i<i! i$i=ii|iii>idi i i?iei iiii ii iii i}i@iAiBii.i~iaiOi i ii`ibii i%i iii i;i iiiiCiiDi iiiif i" iL i i i ii iEi;iiFi&iGi i iix iHi)iiiciiIidiTii i ieiiiii iiJi igi~iiUiie i iiiifiai8igi i ibihiKiy iii iLii: i iii ii*iM iiii icitiiMiNiii ii ii iiif iiiYidieii*i iiOiH ii# ii#i; i i iiiiI i\i@ iii isi$ i iPiQi i i iRijiSiTii+iiUiiVi iiLi i!i iJ i\i4iWi iiXiYiZii[i>iwikifiii+iii% iii$ii\iiiili i]iili^i_ig i%ii`iaigiiii iiN iK ibicidii ic iwi iei ibiyi ifihigihihii'iii imiiz ijid iii i i i<iii|iiiiii iiiO iki iliii iaiiimiii ini iiui iniiiie i& i ioiii i i< iipiqiP iQ iiji ii iii(ii_irisi i iii<iEiR ifiiiii]iiiIiti=iiuiivii iioi i iki,iliwi' ii imiiiiFi`ii if i ii iSiJixiiyiziniiL iipiOii ii i{i i iiA iyiiiU ii|i}i~iFi iqii i iiiiii iiiipiiiB iioi iiiipii- iiriiiqiqii( iiriiiisi iii iii i(i. isi) iiti i ii* iiuiiiTiviM i{ iiitii'iiii iiiiiiiig iZiwi i iciiji+ ipiiiixiii ih iiii iiii i, i= i ii i iuiiiii/ iyii i iyivi i iiwiiiiii ii iziiiii>iiCi'iiii{iiZiiii iii iiaiii iiqi?ii ii|ii i"ii ii i}iiiii=iiAi i1ii)iiii?i i iiiixi ii ii i i iihiiS iiii(ii iii'ixiyii ii i i iiiii_i iii> ii}iPiii iiii~ii iN iicii iiiiHii ii iiiiiiiiiiiiij iii i* iiii i i iiih iii_iii/i i<ii~i7ik iT iii]i iU ibi itiiig iizii ii| iiiiiiiii{iii ii0 ii ii+ i i, iiiiIiiiii i iiiBi ii4isi iiiii i iii?iCiii iiGi iiiiiiiiiiiiiii|i1 iiimiC iiil iiii i} ii? iiii iiih iiiIiiiiV iiiiiiii^iiii i- i. iii)iii iiii2 iiiiii ii iiAii ii- iiiii i ii ii iiiiii ii iii iiiQi i iO iiiiiiiiziKii ii[i iJiii@ ii. iiiiiD ii}i/ i@ii iii i i@iiRiii ij i~iiiJiii iiiii i0 iimiiiim i~ ii i ii1 iii iiiiii idiiiiXiii9iii iiiUiiiiiiLii3 ii ii i i iii i iii iii$i(iviii&iibii iii i iiiiii{iiiiiiiiiii2 ii iiii iii iiiiiiii^iiiV i iiiiiiiiiiiiii2iW ii iW ioii iik iX iii iA i4 ii ii3 iii iii ii iii iiiiiii i iiHiiiiiiiiiiiDiiiIisi iii ii iiiiiwii{iiiiiii/ in i,iiiii i iii ii ii i ii4 iB iiiiP i5 i ii i ii ii iii|iiziiiii i'ii5 iiii! ii iyiiiViii i i iiiiiiiiii i iii-iiiiii ii i i iiii ii" i%iiiiJiC iiixiiii i i iil i6 iiiii ii iiiXii ii7 ii i iNi im iiii iWi i i!i ii i i>i8 ii"i#i9 ij iD i i ii$i i%ii iiiXi&ii'iE i i(iF ii)iixi ikiiiii*i+ii iiDiii,iiiiiiiKi-i.i/i: ii0iJiii1i|i iii iii iEi*i2i iiiiiiHixi iii3ii iii3iiii; ipii< i iii ii4i5i i i6iiOii i7i8ii9ii:i ii;iiiii<i iiY i=i>i i i?ii@iAiBi i ii= i iiio i ifiiii i i i iCi iDii# ii i> iiEii iFiZ iE iii i[ i$ i iiGi? ii iiiG iiii@ iHiiA i\ iLiiiii iIi i iiiiiiiJi i i i iKiLi] iMiNi iijiiQ i iB iOiPiiiQi iiiRi iSiTii(iiiiC iiUiiVi iWiXiiiYiiiiZiiidi[iciH ii_ii#i0 i\ii]iini i iiii^iii_ik iD ii`iaii ibii i iciiiiidiii iiiidi^ iE ieiiii iiiifii_ i7iiii i iiF iiigi-iii! iiihi iqi;i iiiiXii i iiiiijiiii ikiliii~imiG i` iniyip iiioi>iipiiqi iiiriia iiiisiti" iui|ii?iiiiviiiiiieiiiwixiyiiiiizi{i@i|ii i}i i iii% isiiR ii.iiMi?i ii~iiib iii@ii iiiii iiiiiiii ii6 iI iiii iiiiic iiiiii i i2iiiimii@iiii iiiNii iifiiiiZi iiiiii i\iH iid iiiJ ii ii i iiii iiK iiii# iiii ii iiiiKiiiiigiiiihii>ii.iiiiiiiii)in itiiiii io i iI i i iiiq iiir i iiiiiip ii ii iziiiiUiiJ iii1 iK itiiiiiii iiiJiiiL i iiiiiiN(iiiiiii iRiiiiiii inii!i iiLi,iAiis iiiiiiL iS iii iiii.iNiiiiii:iiii?iii=iNiKiiiil iii i ii i ii iiii ioi$ iiiii ici8iiiiiiiiizi|iit i"i ie i@i\ii iiiiiiiFiiM iQiHiiiPiviif iiiiDiT iiiFiN iiEi iOii/iisii3i<i2i ii&iLiiiO iiiGiiiM iii?ii i`iiF i*iigi iZi i:ii iiKi ii iiiiii`iiiig ii i iqii~iiiP i ii iii!iiuiii*iii i ii~iiiiieiiGi^iiii iUiCiiBiiiiiii ijioi/iiiOi2 i[iii i& iiiiiSi(ii iipi]ii6iii i' iiiiii8ii+i% i[iii\iiiiiiX i( iii i0ii iHi i i"i!ii+i i1i"iii iOiG iiifi1iiiiiiii2i9iili,ii iiiiii}ih i#iq iQiMi&i iXiii#iii iiiijiiiMiii i%ii ii$iii'i iiN ii ii7iiJii!iiiiiiMii) iPiU iiii ii%i i ii ii i iIii3 ii iir ii i iiiimiii$iiixii iii ii ii%i&ii iiiiiiii&ii'iii'iii.ii iiiH iiiii$i#iiDi&iAi iiiiiiUiGiiii iQ iPiSi'iiidi i0iFiii* iiiiiiiJii iUiiiiI iR iii<iiS i:i7 i ii i i9ii}ii iiiViPiT ii)iCiiii& i i i)iiim iiiiiii4 iiiin i*iiiiiiiO iiiii i+ i(i i iiiU i(ii5iYiji iliiuii)i i*i+iV i i=i iiiiiii4ii!iiiTi, ixiiieii iiJ iPiis i5iAiV i/iki i iili!i iiAii`ii i iiAii iiii ii i iiiiiiMi iiiiiiW iii it iii+i}iij i8 iiiiiiiii- i)imi iiW ii ii iiaii iP iK i i,ii ii7i' iu ii{ik iiiii i9 iiiiiiiiiii1iibii i iiio iiX i,iQ iiiiii i i iX iiii5iDiiiliii[i iiiY ii%ii. iY ii*iii iiR iii ipiiinicigi+iii'i2iiii{iil iim iii: iifi ii|i5 iiiiiZ i i i i ii i,ii iii;iiIiYiii[ iXi"ii iiii~iii,iikiii-iiiiiiDiiii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i! iiiu iiiii i iaiiiiv iS ii}i; iiDii=ii0i ii\ i( iiiivi iiiiii.iii9iHiiiii] iEiiiiii!ii/ iii i i-iiiQi*ii.i/iiiii0 i3ii" ii*iRiiiiWiii/iibi.ii iiiiiiiRii ii) ii2ii iiiiiiiZ iiT iiw iiii iiiiiii[ iOii i i`i7ix i^ iiiiiii\ iiiiiiigiiin ii i_ii~iiii0ii iaiiy i] iiiii iii i;ii i-i iiL iiii/iiiii`ii#i"iiiii0i isiiiidisi iiY ii ikiwio ii iiii1ii2iigip iQiiUi iv iiiiiCiSii^ iii5iiiBii_i ibiNiiiXiiii iiLici i i iKiw i* iaiGi3ii i iii1i6i i2i6 iNiii i i3ii:ii\iqiiii-iii i3i iiiix i ii irii4i i_ iiU iiii@ii5i ii ii ii/i+ i6i iiiiiii i7 iZ iiii8i4iiCiii iilii` iiiiiiYiiiii5i i{iiiii$i7iiMiV i0irii iigiiVii8ii9i, iZiii{i#iW i0iii$i i i ii4ii[iiviiiciiViM i$i- iiiiX iii idiiiWi[ iii iiiq i ii1 iiiiitiiii ii i i2 ii2i:i3 i i.iii2ii iiiiii%i iii i i iiiiiii i i6i i4 ii|iiiSiiii_ i iiiii`iiiii i3iii;ir iiii7iYiLi;ii i iiiiiTi\iiii<iiVi5 iii iii ii"ixi%ii;i8 i iii2iEi=i!iiii^iiwi i"iiiis ii#i<iQiiiri$ii]ii%ii. i&i i3i5iBiiy i'iii ii i i>iY ia ib i(i i i)i*i iiiii:ii6ii^ii iii(i i+i+i i ixi,i i.ihi/ i-iiii iVi ii ihiiz i.iiiii<iii;i/i?iii i i0i8i i ii5i6i i_i)i1i2i i3iyi iii i i^i4i i i?i$i+i\ i i5i_iWii iz i6i0 ii i] i iip i7i ii8i^ i< iiVi9i i6 iiIiwi}ic i^ii i9i@ii iAi iii i:iei1iiiBiiiiii i i ii7 i;i<ii=i{ ii(i`iRi1 iYiiIi ii i i6i id i>iiie i?iioiiqiYiniiiliiCiDi{ ii i_ iEiPiUii i iiiiEimii@ii:i ii iAiiiiii# i iNi i iBiiCiWiTiTiii iif i;iDii&i i| iiiiii iii<i= i iii iVii iioii iEi iFiii iiii iiiDi-i8 iiGidi i i iHit iiiIizi iiJi i iKii i| iiiii> iiiiiiiLi:i i=iiMii? i iiiHiiiiiNiii iOiPiiFikiQi9 i iii i iu iiv iiiiiiRiSiii} i i>iiiii i: iTi iiN iiCiiiiUi~ii iWiiGiiii,iiiiiOii|iiiiOi?iVi` ii ii~ i iw iiii@i!iii; iiiiiWii"i iXiiYiiRig iuii ih i ix i=i iiZii< iii[i i#ii= i i\i$ii]iHiIi^i7iO i$ i i_i%iviJii i!iwitii`iaibi9 i&ii"i{ii i i'iciidi8iiiii[i9iiidiZ i ieiii i@ i!i(i"ifiiii i#i$iTigiKii i%iiWiMihiiiiiiii%iiiiii i` iiyi7imiLijiki i: i> i!iiii iBi)ili|i ii iziiii iiMi ii iiimiiniuiq ivii i i*iNioipiiiqi7i.iriiy i iii iifii i isi! iyi iOi i iii+iPij i i>iiii iiYi iAitiiui% i i ivi iiz i2 iwi,i i iii3iQiii ixi i6i9ik iii&i6i ii-i iiRiyiiQi9ici i'i.i ioi(iBiFi? il i i)iei i izi iSiTi iii{i/iii" i ibi/ii`iii0iuiiviii1ii i iiUii*i i/iEiiSii8iZi|i}iFi+i ii!i~i2i,iFiP iim ii iieisiiCi ir iiii iiai3iZi%i-iiii i4iiiiAiii i iii iiiiDi.iiii inii iii iii ii3 i5iJi0i is i ii ii@ iiiVii/i i i6iWiA i7ioi iiPi0ii1iin iiiaiQi2iipi:iiiPiibiiiii i{ i3i7iEiii iii| iFiGiXiiiiii]i i4iii} iii i5ii&ii8i i ii iiiii iLiiiii i iYi iiiii4 i iB i9i8iiiii iHi i/i3iZi[ii io iii:i0i ini\ii]ii^iiii i iiiIiiii;ii4ii<ii=i;i ii iiiXi iiii5 i_ii i iiiii}i0i>ip iJiiiKii ii& iFiiii i i#ii[iiiifi i>iq ii?ir ii6iiii i iiiiijiiiii@i-ipi9i; i6iyiAiii i i' iiiiiiiiViii`ii4iii iKi iai i6 i iiiiii"iiiKi i iiii iYiGioiibifici7i is ipiri i8iiiii i iA iQ i iC i,i ii iiii\iLiiuii~ i iii9iiiBiiZiii iCiiiijidii0ihigiiei iMi} i7 i ii iiDi iiiiifi:iiiNi;i iEiFii( iii<iiiGiMi=iwigii~ it i?iiii itimi# i ii+iiihiiiiii[ i iOi i iii iiB iii$ it ii iiia ii iiHiiiiiQi"i< i i ii-i1ii#iu i'iiD i i ii i iIii>iJi% iiiPi i iii i i=ia iiKiiv iiii i ii ii_iijii?iE i1i i iikii8iQii iliii i imi@i iibiiii iRi iii5ii iiiLi iw iiniiii4iHi iiMii iiiiiii\ iioiiNi] i ii ii\ix igiiEi iiiiiiipii iiqi iiiiiiOiii iri iZi isitiiii iPieiii iiioi ii^ ii}iiiiziAi& iii i iiini-ii iQiiii iiiiRiiui iiiiBiii iy ii iviwiBihiqiiii<ii*iiiSi"iTiCi>i)i ixiUiiii~iiiiziiViiiiiC i)iqiiiWi i iiiDiiEiTiFipiii#i#iyii iiiGiiz iiiii ii iiSiiXi iiiiiu i i=iHiii_ iiRiiiiiiwiiiYizi{iqiii3ii5ii i i{ ii i|i}iSi~ii i i iZi[iiiiii` iiF ii]i i iniiiIii i iiiii iG iTiJiiKiziSiri\i) ii iv ii5iiii ib iiijiiiii iii]i^ii iiciiii i ii iai:i1i iiiiAihi ia i i6iiii ib i-ii iH i_ii ii i iii iiii| ii* ifii ii i iD ii iLiWiiiiUii i ii$ii i i iNiiiR ikiiE iiiiiMiiS i' ii`iisiiF ii iai} i i i( ijii i~ iViBi[ib iWi iibi iI iic i i ii{ii i ii ici i i idi i i]iiii= ieiiiiii iii iNi iiifigi i i iihiiiii i=iiXiiiiiiiiid iiHiCi iiiiTiWi i ii9ii iNi iKiEiji4iJ i iYi iiG i ikiiihiOii i i i]iPiZiiliiQi[ikiiminiRioic iiiiiiSiXiiii\i+iiiiiIiTiUiUiii iili iDiiiipiiqi iki imii iK iiiiiiiVi8 i i iiri@iWiri iCiGiiii ii^i ii i8iti i:i i i!iinisi i"i i#i$i]iti i%i i&iriui'i+ i(i)iie i i}i iiii*i4i1i+ii i i ii ii,ii i> iXitiri? ii i-i iviui{iiw iwii.ixiT i, iii i/i0ii ii1i ii2i iGiRi i i^iiYi iZi i i[i3i^iiii ii i) i\i i4i@ivii5ii6i7iid i]i ii^i8ii&i i ii i9 iyiuii i iiziRii{ii ii(i_ii<i9iili iii_i iSi`iki i[i iii ixi:i i;i<i! i$i=ii|iii>idi i i?iei iiii ii iii i}i@iAiBii.i~iaiOi i ii`ibii i%i iii i;i iiiiCiiDi iiiif i" iL i i i ii iEi;iiFi&iGi i iix iHi)iiiciiIidiTii i ieiiiii iiJi igi~iiUiie i iiiifiai8igi i ibihiKiy iii iLii: i iii ii*iM iiii icitiiMiNiii ii ii iiif iiiYidieii*i iiOiH ii# ii#i; i i iiiiI i\i@ iii isi$ i iPiQi i i iRijiSiTii+iiUiiVi iiLi i!i iJ i\i4iWi iiXiYiZii[i>iwikifiii+iii% iii$ii\iiiili i]iili^i_ig i%ii`iaigiiii iiN iK ibicidii ic iwi iei ibiyi ifihigihihii'iii imiiz ijid iii i i i<iii|iiiiii iiiO iki iliii iaiiimiii ini iiui iniiiie i& i ioiii i i< iipiqiP iQ iiji ii iii(ii_irisi i iii<iEiR ifiiiii]iiiIiti=iiuiivii iioi i iki,iliwi' ii imiiiiFi`ii if i ii iSiJixiiyiziniiL iipiOii ii i{i i iiA iyiiiU ii|i}i~iFi iqii i iiiiii iiiipiiiB iioi iiiipii- iiriiiqiqii( iiriiiisi iii iii i(i. isi) iiti i ii* iiuiiiTiviM i{ iiitii'iiii iiiiiiiig iZiwi i iciiji+ ipiiiixiii ih iiii iiii i, i= i ii i iuiiiii/ iyii i iyivi i iiwiiiiii ii iziiiii>iiCi'iiii{iiZiiii iii iiaiii iiqi?ii ii|ii i"ii ii i}iiiii=iiAi i1ii)iiii?i i iiiixi ii ii i i iihiiS iiii(ii iii'ixiyii ii i i iiiii_i iii> ii}iPiii iiii~ii iN iicii iiiiHii ii iiiiiiiiiiiiij iii i* iiii i i iiih iii_iii/i i<ii~i7ik iT iii]i iU ibi itiiig iizii ii| iiiiiiiii{iii ii0 ii ii+ i i, iiiiIiiiii i iiiBi ii4isi iiiii i iii?iCiii iiGi iiiiiiiiiiiiiii|i1 iiimiC iiil iiii i} ii? iiii iiih iiiIiiiiV iiiiiiii^iiii i- i. iii)iii iiii2 iiiiii ii iiAii ii- iiiii i ii ii iiiiii ii iii iiiQi i iO iiiiiiiiziKii ii[i iJiii@ ii. iiiiiD ii}i/ i@ii iii i i@iiRiii ij i~iiiJiii iiiii i0 iimiiiim i~ ii i ii1 iii iiiiii idiiiiXiii9iii iiiUiiiiiiLii3 ii ii i i iii i iii iii$i(iviii&iibii iii i iiiiii{iiiiiiiiiii2 ii iiii iii iiiiiiii^iiiV i iiiiiiiiiiiiii2iW ii iW ioii iik iX iii iA i4 ii ii3 iii iii ii iii iiiiiii i iiHiiiiiiiiiiiDiiiIisi iii ii iiiiiwii{iiiiiii/ in i,iiiii i iii ii ii i ii4 iB iiiiP i5 i ii i ii ii iii|iiziiiii i'ii5 iiii! ii iyiiiViii i i iiiiiiiiii i iii-iiiiii ii i i iiii ii" i%iiiiJiC iiixiiii i i iil i6 iiiii ii iiiXii ii7 ii i iNi im iiii iWi i i!i ii i i>i8 ii"i#i9 ij iD i i ii$i i%ii iiiXi&ii'iE i i(iF ii)iixi ikiiiii*i+ii iiDiii,iiiiiiiKi-i.i/i: ii0iJiii1i|i iii iii iEi*i2i iiiiiiHixi iii3ii iii3iiii; ipii< i iii ii4i5i i i6iiOii i7i8ii9ii:i ii;iiiii<i iiY i=i>i i i?ii@iAiBi i ii= i iiio i ifiiii i i i iCi iDii# ii i> iiEii iFiZ iE iii i[ i$ i iiGi? ii iiiG iiii@ iHiiA i\ iLiiiii iIi i iiiiiiiJi i i i iKiLi] iMiNi iijiiQ i iB iOiPiiiQi iiiRi iSiTii(iiiiC iiUiiVi iWiXiiiYiiiiZiiidi[iciH ii_ii#i0 i\ii]iini i iiii^iii_ik iD ii`iaii ibii i iciiiiidiii iiiidi^ iE ieiiii iiiifii_ i7iiii i iiF iiigi-iii! iiihi iqi;i iiiiXii i iiiiijiiii ikiliii~imiG i` iniyip iiioi>iipiiqi iiiriia iiiisiti" iui|ii?iiiiviiiiiieiiiwixiyiiiiizi{i@i|ii i}i i iii% isiiR ii.iiMi?i ii~iiib iii@ii iiiii iiiiiiii ii6 iI iiii iiiiic iiiiii i i2iiiimii@iiii iiiNii iifiiiiZi iiiiii i\iH iid iiiJ ii ii i iiii iiK iiii# iiii ii iiiiKiiiiigiiiihii>ii.iiiiiiiii)in itiiiii io i iI i i iiiq iiir i iiiiiip ii ii iziiiiUiiJ iii1 iK itiiiiiii iiiJiiiL i iiiiii(t EUCTW_TYPICAL_DISTRIBUTION_RATIOtEUCTW_TABLE_SIZEtEUCTW_CHAR_TO_FREQ_ORDER(((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.pyt,sPKZdF]ww7site-packages/pip/_vendor/chardet/langcyrillicmodel.pycnu[ abc@sNdZdZdZdZdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z dS(iiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii ii'iiiii ii iiiii iiiii iiiiiiii6i;i%i,i:i)i0i5i.i7i*i<i$i1i&ii"i#i+i-i i(i4i8i!i=i>i3i9i/i?i2iFitchar_to_order_maptprecedence_matrixglP@?ttypical_positive_ratiotkeep_english_lettersKOI8-Rt charset_nametRussiantlanguages windows-1251s ISO-8859-5t MacCyrillictIBM866tIBM855N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii ii'iiiii ii iiiii iiiii iiiiiiii6i;i%i,i:i)i0i5i.i7i*i<i$i1i&ii"i#i+i-i i(i4i8i!i=i>i3i9i/i?i2iF(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iii ii iiiiii ii iiii iiii'iiiiii6iiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iii ii iiiiii ii iiii iiii'iiiiii6iiiiiiiDiiiiiiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiii ii iiiiii ii iiii iiii'iiiiii6iiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiDiiiiiiiiiiiiiiiiiiiiiiiii;i6iFii%ii,ii:i i)ii0i'i5ii.iiiiiiiii7ii*iiiiii<iiiiiiii i$iiiiiiiiii1i i&iiii"iiiiii#iii+i i-ii ii(ii4ii8i i!ii=iiii>ii3ii9ii/ii?ii2iii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iii ii iiiiii ii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iiii'iiiiii6iiiiiiiDiiiiiiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tKOI8R_char_to_order_maptwin1251_char_to_order_maptlatin5_char_to_order_maptmacCyrillic_char_to_order_maptIBM855_char_to_order_maptIBM866_char_to_order_maptRussianLangModeltFalset Koi8rModeltWin1251CyrillicModeltLatin5CyrillicModeltMacCyrillicModelt Ibm866Modelt Ibm855Model(((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.pyts      PKZU885site-packages/pip/_vendor/chardet/mbcsgroupprober.pyonu[ abc@sddlmZddlmZddlmZddlmZddlm Z ddl m Z ddl m Z ddlmZdd lmZd efd YZd S( i(tCharSetGroupProber(t UTF8Prober(t SJISProber(t EUCJPProber(t GB2312Prober(t EUCKRProber(t CP949Prober(t Big5Prober(t EUCTWProbertMBCSGroupProbercBseZddZRS(cCs`tt|jd|ttttttt t g|_ |j dS(Nt lang_filter( tsuperR t__init__RRRRRRRRtproberstreset(tselfR ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyR *sN(t__name__t __module__tNoneR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pyR )sN(tcharsetgroupproberRt utf8proberRt sjisproberRt eucjpproberRt gb2312proberRt euckrproberRt cp949proberRt big5proberRt euctwproberRR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.pytsPKZlH2site-packages/pip/_vendor/chardet/latin1prober.pycnu[ abc@sddlmZddlmZdZdZdZdZdZdZ dZ dZ d Z d Z eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee eeeeeee ee ee eeeeeeeeeeee ee ee e eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee e e e e e e e e e e e e e e e e e e e e e e ee e e e e e e e e e e e e e e e e e e e e e e e e e e e e e e ee e e e e e e e fZdZd efd YZd S(i(t CharSetProber(t ProbingStateiiiiiiiit Latin1ProbercBsJeZdZdZedZedZdZdZRS(cCs3tt|jd|_d|_|jdS(N(tsuperRt__init__tNonet_last_char_classt _freq_countertreset(tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyRas  cCs*t|_dgt|_tj|dS(Ni(tOTHRt FREQ_CAT_NUMRRR(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyRgs cCsdS(Ns ISO-8859-1((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyt charset_namelscCsdS(Nt((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytlanguagepscCs~|j|}xe|D]]}t|}t|jt|}|dkrWtj|_Pn|j|cd7<||_qW|j S(Nii( tfilter_with_english_letterstLatin1_CharToClasstLatin1ClassModelRt CLASS_NUMRtNOT_MEt_stateRtstate(R tbyte_strtct char_classtfreq((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytfeedts      cCs}|jtjkrdSt|j}|dkr:d}n |jd|jdd|}|dkrod}n|d}|S(Ng{Gz?giig4@g\(\?(RRRtsumR(R ttotalt confidence((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytget_confidences     ( t__name__t __module__RRtpropertyR RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyR`s    N(@iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(t charsetproberRtenumsRR tUDFR tASCtASStACVtACOtASVtASORRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytsh PKZ' ^^/site-packages/pip/_vendor/chardet/euckrfreq.pycnu[ abc0 @sdZdZd2 Zd1 S(3 g@i0 i iixitiiiiHiaiiiii+iiWiuiihii]iiiiiiiiiiiviwiiiiimiFi!ipii iiiiiiixii/iiiiiiiii9iiiitii-iyiiKiiiiiiiiOiiniiiiiii0iii<i4i{iiiiiiiiiriiiiiiiiiiiiXiXiiiiiYiii&iiPiiiiiiii^iiiiiiii9iiiiiiiQii"iitiiiiii]i{i7iiii{ii;iiuiizi/ii|iiiii7iii.iiiiiiiiiiiiii{iiii#i|i}ii~iiti8ii_i i i i i i!iiii_iiiiiii*iiuii`i"iii|iiiiiiiaiiiiiii?iiRi!i i/iii!i"i=iii#ii$i%i&i'i(i)i,ii'ibi$i*ii+ii-ii,iiiiii&iUiii#iii-i.i'iiifi/isii0iiiiii ii9iei[i1iiiiZiii:iiii2i3iiGiiiiyi4iiii5i6i7i,iwiiisi8iii9i:iii~i;ii<i;i}i=i>i?ioi)iii@iiAiBiii2iYiCiDii<iEiFiGiHiIi%iJiKiLiMiNiOii`i>iPii=iQiiRiiSiTi;iiiii iUiiViWiXi4iYiiiZi[ii\i]iii^iii_i"iPiii`i;ii~iHiaiiviizi?iiiibiii<icididieifici0iidiigiyiiihiiisi0iji=ikiiliiii<ibiiiUi iiiiIiminioiipiqirisitiuiiiii6iviwii*ii]ixiyiiiziZii-i:iibi i{i|ii&i'iii5iiii>i}i~iwiigiiii6ii%iii(iiviiiiwiiiiiEiiiiiiiiiifiiVii7iiiiiBiiNii[ii'iiiiiiiiSiiiiiieiiixiiiiiiiiiiiiiiiiiiiiii?iiiiiqiifii(i)iii~ii\iiiiii)iiiiiiiiiiii$iiiiiiiliiiii~iiiCiiiii@iiiiii2iiiKiiziViiiiiiQirifiiiiii ihi+i3ii1ii iiiiigi(iiiiziiiiiiiiiiiiiiiiiiAii<ijiiiMigii2iiiiiiiiiiVihiiiJiii0iiibiiiiiiiiiiYiiiiiiiai!i*iiiiiiKiDi8iiRiiBii@iiiiiiiiyiiiiiiXi:iii#iiiiiiiGiiikiii=iiiii!iiiiJiii=iiii}iiijiiiiiiiiiiiiiEiiiiiijiOi4iiiiiiiiii ivi]iiCiiiiiiiioiiiii iiiiliciAiiiiiiiiiiTiiikiiiiiii3i*iiqiii>iiiiiii+iiiiii;iipiixiiiii iiiii ii i ii i iiiiiliiiiiii)iiiiimi8iiiDiiiii iiiiiiii7iLiBiiiDiiiiiitiiii i!i i"i#i$i%iRi&i'i(i)i*ii+i,ii,i-i.imii i^i/iciiEiiiiaimi0iEi1i2i3iiii4iTiiii5iiiii6i7iiniiioi8i9iFiiii:iGi;i<i=i>i?i@iAiBiCiDiEi$iFiGiii%iiipiHiIiJiKiLiiiiiiiiMiNiOiPiQiRiiiSi/iTiiUiiiiViiIiHiiiiiiWiiXiiqiYiZi[i\iiriisi]i^i_i`i iviLiaiii.iibiFi>iiijiciZiiiBi6ii`idieiii|iifiii5igiihiiiiHiijiiikiilii1iminioipiqiriCisitiuiviwiixiyizi{iii|i}i~iiiiiiiiiOiiiiiiiiiiiiiEiiiqiiiIii\ii-iiiSiiii iieiiiiliiMiiiQiiPiii^ii i-iFiiiiiiiiiiiiiiiiiii i.iitiiiiiJiiiiiiiigiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiuiiii9i iiii ii$iiiiii5i%iikiLiiiiiiiiiiiAiiiiiiiiiiRiiuiiiiii)iii:iiiiiiii"i$iviiiiciZiiiiiii*iWiiKiiLii+iiiiiiiiii iBiii?iiiiiiiiiiMi[i5iiiiniiiiiiCii'iiiiiiiii iiii iiiiiiiiFiTi/iiiiii8iiuiiiiKii(iMiiiiiiiiiiiiiiiTii?iiiiiiiiei i(iiiiiii%i0iiiiOii i8i i i i i iiii i i iii#iii iiiVi iiiPi i iMiWii ii ii|i i iSiiiiiiii i iii i i i i i ii i ii9iDiiiiiii i3iii i i i{i iii i i ii iiiiii! ikiiiiiNiiYii" i# i$ i% i& i' i( i]i) i* i+ i, i- i. ii/ iii>iiii1 iiiii\ii2 ii3 iii4 i5 i6 iwi7 ii8 ii3iiii9 i: iLi iii; ii< iiiiiii iiii= iiJi> i? i@ iiA iiB iC iD iiE iiiF iG iH iI iiiiiiiJ iK iiiL iM iN iiIiO iP iiQ iR iS iiT iiU iV iW iX iiY iZ i[ i\ i] iii^ i_ i` ia iiib ic id ie iiif ig iUiixi ih iiii ij iiii4i&iiii!iiiiSiyi iii"i ii#ik iil im in iiiipiQii.io iUiiOiip iiq ir is ixit iiu iv iw iii$ix iiy ii`iz i{ i| i} ii~ i i i i i i i iiCiiiii%iyiiioi ii i i i i i ii i i i iii iii i ii@iii iGi iiii2iiiii i i i iNi i iiiii i i i i iri iiziniiiiPi i#i i&iQi iimi iciii i'i iwi i ii(ili@iiii)ii*i i i i ii i i i iii iii i i iHii i ii iii i i ii i iri iiiAi iiiii i}i,iii:iiiii iiIi iNii1i iWiiiiiiii i+i i i1iibii iqi i,i iiieiii_iidi i i i i i iii2iiii ii i i i ii ii i ii-i iii i i i i i ii i i i i i ii ii i iii7i ii ii i i i i i i iiiiiiiii,iGiiii ii^i i i.i iigii ii i i iii iiii_ii i i iiihihii{i i|iiii3i i i iii i i i\iiiii i i i i i i ii i i i iiii iji iiii4ii i i i i i iii i i i ii6idii/i i iiiioi iiRiiii i&i ikiniziiiii ii iiiXiidiiSii}i i}iii~iiiii i! iNi" iiiiiii# iiisi$ i% i& ifiDii1i' iii( i@i) i^iiii* ii+ i, i- i. i/ iJi+i0 i1 i2 iiTii3 i4 i5 i6 ii7 iAi.iiii8 i9 ii"i: i; ii< i= i> iii0i? ii@ iiA iB iiC iD iE iF iG iii_i[iH iI iii`iaiJ iiiiiK iL iM iN iiO iiiiiiiP iQ iR N(0 i iixitiiiiHiaiiiii+iiWiuiihii]iiiiiiiiiiiviwiiiiimiFi!ipii iiiiiiixii/iiiiiiiii9iiiitii-iyiiKiiiiiiiiOiiniiiiiii0iii<i4i{iiiiiiiiiriiiiiiiiiiiiXiXiiiiiYiii&iiPiiiiiiii^iiiiiiii9iiiiiiiQii"iitiiiiii]i{i7iiii{ii;iiuiizi/ii|iiiii7iii.iiiiiiiiiiiiii{iiii#i|i}ii~iiti8ii_i i i i i i!iiii_iiiiiii*iiuii`i"iii|iiiiiiiaiiiiiii?iiRi!i i/iii!i"i=iii#ii$i%i&i'i(i)i,ii'ibi$i*ii+ii-ii,iiiiii&iUiii#iii-i.i'iiifi/isii0iiiiii ii9iei[i1iiiiZiii:iiii2i3iiGiiiiyi4iiii5i6i7i,iwiiisi8iii9i:iii~i;ii<i;i}i=i>i?ioi)iii@iiAiBiii2iYiCiDii<iEiFiGiHiIi%iJiKiLiMiNiOii`i>iPii=iQiiRiiSiTi;iiiii iUiiViWiXi4iYiiiZi[ii\i]iii^iii_i"iPiii`i;ii~iHiaiiviizi?iiiibiii<icididieifici0iidiigiyiiihiiisi0iji=ikiiliiii<ibiiiUi iiiiIiminioiipiqirisitiuiiiii6iviwii*ii]ixiyiiiziZii-i:iibi i{i|ii&i'iii5iiii>i}i~iwiigiiii6ii%iii(iiviiiiwiiiiiEiiiiiiiiiifiiVii7iiiiiBiiNii[ii'iiiiiiiiSiiiiiieiiixiiiiiiiiiiiiiiiiiiiiii?iiiiiqiifii(i)iii~ii\iiiiii)iiiiiiiiiiii$iiiiiiiliiiii~iiiCiiiii@iiiiii2iiiKiiziViiiiiiQirifiiiiii ihi+i3ii1ii iiiiigi(iiiiziiiiiiiiiiiiiiiiiiAii<ijiiiMigii2iiiiiiiiiiVihiiiJiii0iiibiiiiiiiiiiYiiiiiiiai!i*iiiiiiKiDi8iiRiiBii@iiiiiiiiyiiiiiiXi:iii#iiiiiiiGiiikiii=iiiii!iiiiJiii=iiii}iiijiiiiiiiiiiiiiEiiiiiijiOi4iiiiiiiiii ivi]iiCiiiiiiiioiiiii iiiiliciAiiiiiiiiiiTiiikiiiiiii3i*iiqiii>iiiiiii+iiiiii;iipiixiiiii iiiii ii i ii i iiiiiliiiiiii)iiiiimi8iiiDiiiii iiiiiiii7iLiBiiiDiiiiiitiiii i!i i"i#i$i%iRi&i'i(i)i*ii+i,ii,i-i.imii i^i/iciiEiiiiaimi0iEi1i2i3iiii4iTiiii5iiiii6i7iiniiioi8i9iFiiii:iGi;i<i=i>i?i@iAiBiCiDiEi$iFiGiii%iiipiHiIiJiKiLiiiiiiiiMiNiOiPiQiRiiiSi/iTiiUiiiiViiIiHiiiiiiWiiXiiqiYiZi[i\iiriisi]i^i_i`i iviLiaiii.iibiFi>iiijiciZiiiBi6ii`idieiii|iifiii5igiihiiiiHiijiiikiilii1iminioipiqiriCisitiuiviwiixiyizi{iii|i}i~iiiiiiiiiOiiiiiiiiiiiiiEiiiqiiiIii\ii-iiiSiiii iieiiiiliiMiiiQiiPiii^ii i-iFiiiiiiiiiiiiiiiiiii i.iitiiiiiJiiiiiiiigiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiuiiii9i iiii ii$iiiiii5i%iikiLiiiiiiiiiiiAiiiiiiiiiiRiiuiiiiii)iii:iiiiiiii"i$iviiiiciZiiiiiii*iWiiKiiLii+iiiiiiiiii iBiii?iiiiiiiiiiMi[i5iiiiniiiiiiCii'iiiiiiiii iiii iiiiiiiiFiTi/iiiiii8iiuiiiiKii(iMiiiiiiiiiiiiiiiTii?iiiiiiiiei i(iiiiiii%i0iiiiOii i8i i i i i iiii i i iii#iii iiiVi iiiPi i iMiWii ii ii|i i iSiiiiiiii i iii i i i i i ii i ii9iDiiiiiii i3iii i i i{i iii i i ii iiiiii! ikiiiiiNiiYii" i# i$ i% i& i' i( i]i) i* i+ i, i- i. ii/ iii>iiii0 i1 iiiii\ii2 ii3 iii4 i5 i6 iwi7 ii8 ii3iiii9 i: iLi iii; ii< iiiiiii iiii= iiJi> i? i@ iiA iiB iC iD iiE iiiF iG iH iI iiiiiiiJ iK iiiL iM iN iiIiO iP iiQ iR iS iiT iiU iV iW iX iiY iZ i[ i\ i] iii^ i_ i` ia iiib ic id ie iiif ig iUiixi ih iiii ij iiii4i&iiii!iiiiSiyi iii"i ii#ik iil im in iiiipiQii.io iUiiOiip iiq ir is ixit iiu iv iw iii$ix iiy ii`iz i{ i| i} ii~ i i i i i i i iiCiiiii%iyiiioi ii i i i i i ii i i i iii iii i ii@iii iGi iiii2iiiii i i i iNi i iiiii i i i i iri iiziniiiiPi i#i i&iQi iimi iciii i'i iwi i ii(ili@iiii)ii*i i i i ii i i i iii iii i i iHii i ii iii i i ii i iri iiiAi iiiii i}i,iii:iiiii iiIi iNii1i iWiiiiiiii i+i i i1iibii iqi i,i iiieiii_iidi i i i i i iii2iiii ii i i i ii ii i ii-i iii i i i i i ii i i i i i ii ii i iii7i ii ii i i i i i i iiiiiiiii,iGiiii ii^i i i.i iigii ii i i iii iiii_ii i i iiihihii{i i|iiii3i i i iii i i i\iiiii i i i i i i ii i i i iiii iji iiii4ii i i i i i iii i i i ii6idii/i i iiiioi iiRiiii i&i ikiniziiiii ii iiiXiidiiSii}i i}iii~iiiii i! iNi" iiiiiii# iiisi$ i% i& ifiDii1i' iii( i@i) i^iiii* ii+ i, i- i. i/ iJi+i0 i1 i2 iiTii3 i4 i5 i6 ii7 iAi.iiii8 i9 ii"i: i; ii< i= i> iii0i? ii@ iiA iB iiC iD iE iF iG iii_i[iH iI iii`iaiJ iiiiiK iL iM iN iiO iiiiiiiP iQ iR (t EUCKR_TYPICAL_DISTRIBUTION_RATIOtEUCKR_TABLE_SIZEtEUCKR_CHAR_TO_FREQ_ORDER(((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.pyt)s(PKZ9zz-site-packages/pip/_vendor/chardet/big5freq.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Big5 frequency table # by Taiwan's Mandarin Promotion Council # # # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 #Char to FreqOrder table BIG5_TABLE_SIZE = 5376 BIG5_CHAR_TO_FREQ_ORDER = ( 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 ) PKZO 4site-packages/pip/_vendor/chardet/sbcsgroupprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetgroupprober import CharSetGroupProber from .sbcharsetprober import SingleByteCharSetProber from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, Latin5CyrillicModel, MacCyrillicModel, Ibm866Model, Ibm855Model) from .langgreekmodel import Latin7GreekModel, Win1253GreekModel from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel # from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel from .langthaimodel import TIS620ThaiModel from .langhebrewmodel import Win1255HebrewModel from .hebrewprober import HebrewProber from .langturkishmodel import Latin5TurkishModel class SBCSGroupProber(CharSetGroupProber): def __init__(self): super(SBCSGroupProber, self).__init__() self.probers = [ SingleByteCharSetProber(Win1251CyrillicModel), SingleByteCharSetProber(Koi8rModel), SingleByteCharSetProber(Latin5CyrillicModel), SingleByteCharSetProber(MacCyrillicModel), SingleByteCharSetProber(Ibm866Model), SingleByteCharSetProber(Ibm855Model), SingleByteCharSetProber(Latin7GreekModel), SingleByteCharSetProber(Win1253GreekModel), SingleByteCharSetProber(Latin5BulgarianModel), SingleByteCharSetProber(Win1251BulgarianModel), # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) # after we retrain model. # SingleByteCharSetProber(Latin2HungarianModel), # SingleByteCharSetProber(Win1250HungarianModel), SingleByteCharSetProber(TIS620ThaiModel), SingleByteCharSetProber(Latin5TurkishModel), ] hebrew_prober = HebrewProber() logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, False, hebrew_prober) visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, hebrew_prober) hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) self.probers.extend([hebrew_prober, logical_hebrew_prober, visual_hebrew_prober]) self.reset() PKZcOO+site-packages/pip/_vendor/chardet/escsm.pyonu[ abc@sddlmZdZejejdejejejejejejejejejejejejejejejejejejejdejdejdejdddejdejdddejdejdejejejejejejejf0ZdZied6dd 6ed 6ed 6d d 6dd6ZdZ ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejddejejejejejejejejejejejejejejejejejejejejejejf@Z dZ ie d6dd 6e d 6e d 6dd 6dd6Z dZ ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejdejejejejejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejfHZdZie d6dd 6ed 6ed 6dd 6dd6Zd Zejdejejejejejejejejejejejejejejejejejejejdejejejejejejdejejejejejejejejejejejf(Zd!Zied6dd 6ed 6ed 6dd 6dd6ZdS("i(t MachineStateiiiiiit class_tablet class_factort state_tabletchar_len_tables HZ-GB-2312tnametChinesetlanguagei s ISO-2022-CNiii s ISO-2022-JPtJapaneses ISO-2022-KRtKoreanN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii( iiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii( iiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(tenumsRtHZ_CLStSTARTtERRORtITS_MEtHZ_STtHZ_CHAR_LEN_TABLEt HZ_SM_MODELt ISO2022CN_CLSt ISO2022CN_STtISO2022CN_CHAR_LEN_TABLEtISO2022CN_SM_MODELt ISO2022JP_CLSt ISO2022JP_STtISO2022JP_CHAR_LEN_TABLEtISO2022JP_SM_MODELt ISO2022KR_CLSt ISO2022KR_STtISO2022KR_CHAR_LEN_TABLEtISO2022KR_SM_MODEL(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.pytsp-0-!!3  -00-0*06  -000*-006  -0--6 PKZdWT 5site-packages/pip/_vendor/chardet/mbcharsetprober.pycnu[ abc@s@ddlmZddlmZmZdefdYZdS(i(t CharSetProber(t ProbingStatet MachineStatetMultiByteCharSetProbercBsSeZdZddZdZedZedZdZ dZ RS(s MultiByteCharSetProber cCs>tt|jd|d|_d|_ddg|_dS(Nt lang_filteri(tsuperRt__init__tNonetdistribution_analyzert coding_smt _last_char(tselfR((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR's  cCsXtt|j|jr,|jjn|jrE|jjnddg|_dS(Ni(RRtresetR RR (R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR -s   cCs tdS(N(tNotImplementedError(R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyt charset_name5scCs tdS(N(R (R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pytlanguage9scCsixtt|D]}|jj||}|tjkrm|jjd|j|j |t j |_ Pq|tj krt j|_ Pq|tjkr|jj}|dkr|d|jd<|jj|j|q|jj||d|d!|qqW|d|jd<|jt jkrb|jjrb|j|jkrbt j|_ qbn|jS(Ns!%s %s prober hit error at byte %siii(trangetlenR t next_stateRtERRORtloggertdebugRRRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlenR Rtfeedtstatet DETECTINGtgot_enough_datatget_confidencetSHORTCUT_THRESHOLD(R tbyte_strtit coding_statetchar_len((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR=s.    cCs |jjS(N(RR (R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR ZsN( t__name__t __module__t__doc__RRR tpropertyRRRR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR"s   N(t charsetproberRtenumsRRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pytsPKZ6uMU U 4site-packages/pip/_vendor/chardet/mbcharsetprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # Proofpoint, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .enums import ProbingState, MachineState class MultiByteCharSetProber(CharSetProber): """ MultiByteCharSetProber """ def __init__(self, lang_filter=None): super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) self.distribution_analyzer = None self.coding_sm = None self._last_char = [0, 0] def reset(self): super(MultiByteCharSetProber, self).reset() if self.coding_sm: self.coding_sm.reset() if self.distribution_analyzer: self.distribution_analyzer.reset() self._last_char = [0, 0] @property def charset_name(self): raise NotImplementedError @property def language(self): raise NotImplementedError def feed(self, byte_str): for i in range(len(byte_str)): coding_state = self.coding_sm.next_state(byte_str[i]) if coding_state == MachineState.ERROR: self.logger.debug('%s %s prober hit error at byte %s', self.charset_name, self.language, i) self._state = ProbingState.NOT_ME break elif coding_state == MachineState.ITS_ME: self._state = ProbingState.FOUND_IT break elif coding_state == MachineState.START: char_len = self.coding_sm.get_current_charlen() if i == 0: self._last_char[1] = byte_str[0] self.distribution_analyzer.feed(self._last_char, char_len) else: self.distribution_analyzer.feed(byte_str[i - 1:i + 1], char_len) self._last_char[0] = byte_str[-1] if self.state == ProbingState.DETECTING: if (self.distribution_analyzer.got_enough_data() and (self.get_confidence() > self.SHORTCUT_THRESHOLD)): self._state = ProbingState.FOUND_IT return self.state def get_confidence(self): return self.distribution_analyzer.get_confidence() PKZW6 .site-packages/pip/_vendor/chardet/big5freq.pyonu[ abc@sdZdZdZdS(g?iii iiiii iRiiiiiaii inii!i iiLi,iBii iiiiii{ i] iii ijiii.iNiiiiii:iiii?iii=iNiKiiikiq iii i ii ibii iiii ioi2 iiiii ici8iiiiiiiii{i|ii i"i i i@i\ii iiiiiiiFii| iQiHiiiPivii iiiiDi^ iiiFi} iiEi iOii0iisii4i<i2i ii&iMiii~ iiiGiii[ iii?ii iaiiK i*iigi iZi i:ii iiKi ii iiiiii`iliii ii i iqii~iii i ii iiiciiuiii*iii i ii~imiiiieiiGi^iinii iUiCiiCiiiiiii ijioi/iiiPi7 i[iii i? iiiioiSi(ii iipi]ii6iji i@ iiiiii8ii+i3 i[iii\iiiiii] iA iii i1iiiHi i idiii+i i2iiii iOiL iiifi1iiiiiiii3i9iili,ii iiiiiii ieiz iQiMi&i iXiiiiii iiiikipiiMiii i%ii iiiii'i ii\ i ii7iiJii!iiiiiiNiiB iPi_ iqiii iii i ii ii i iIii8 ii ii{ ii i iiiimiiifiiixii iii ii iigiii iiiiiiii&ii'iiiiii.ii iiiM iiiii$i#iiDihiAi iiiriiiUiGiiii i iQiSiiiiidi i0iFiiiC iiiii iiJii iUiiiiN i iii<ii i:iA i ii i i9ii}ii iiiWiPi ii)iDiiii4 i i i)iiir iisiiitii9 iiiis i*iiiiiii] iiiii iD iji( i iii iiui5iYiji iliiuiii iii` i i=i iiiiiii5ii!iiiTiE ixiiieii iiO iPii| i6iAi i/iki i iili!i iiBii`ii i iiAii iiii ivi i iiwiiiiMi iiiiiia iii i} iixi,i}ii iB iiiiiiiiiF ikimi ii ii ii iibii) i^ iP i i,ii ii7i5 i~ iyi|i iziiii iC i{iiiiiiiiii1iibii i iiit ii ii_ iiiiii i i ib iiii5iDiiilii|i[i* iii ii%iiG i^ iiliii ii` iii ipiiinicigimiii'i2iiii{ii ii iiiD iifi ii|i: iiiii i i i i ii inii iii;iiIiYi}ii iXi"ii iiiiiii-iiliiiiiiiiiEii~ii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i+ iii iiiii i iaiiii ia ii}iE iiDii=ii0i ii i6 iiiivi!iiiiiiiii9iHiiiii iFiiiiii"iiH iii i ioiiiRi*ii.iiiiiiI i3ii, ii*iSiiiiXiii/iibipii iiiiiiiRii ii7 ii2ii iiiiiii_ iib ii iiii iiiiiii` iOii i i`i7i i iiiiiiia iiiiiiigiii ii i_ii~iiiiii iaii ib iiiii iii i;ii i-i iiQ iiiiqiiiii`ii#i#iiiiiri isiiiiditi iic ii ikiwi ii iiiiiiiigi iQiiUi i iiiiiCiSiic iii5iiiBii_i iciNiiiYiiii iiLidi i i iKi i8 iaiGiii i iiisi6i iti; iNiii i i4ii:ii\iqiiii.iii iui iiii i ii iriii i iic iiii@iii ii ii ii/i9 ii iiiiiii i< id iiii8iviiCiii iimii iiiiiiZiiiiiwi i{iiiii$iiiMid i0irii iigiiViiiii: iZiii{i$ie i0iii$i i i ii5ii[iiviiiciiViR i$i; iiiif iii idiiiWie iii iii i iiJ iiiiiuii ii ii i iK ii2iiL i i.iii2ii iiiiii%i iii i iiiiiiii i ixi iM ii}iiiTiiiid i iiiii`iiiii i3iii;i iiiiyiYiLiii i iiiiiUi\iiiiiiViN iii iii ii"ixi%ii;i= i iii2iEii!iiii^ii"iwi i#iiii ii$i<iQiiiri%ii]ii&ii< i'i i3i6iBii i(iii ii i iig i i i)i i i*i+i! iiiii:ii7ii^ii iii(i i,i+i i ixi-ii/ihi= i.iiiiiVi ii ihii i/iiiii<iii;i0iiii i i1izi i ii5i6i i_i)i2i3ii4iyi" iii ii^i5i i i?i%i+if i i6i_iWiii i7i> ii ig i# iiu i8i ii9ih iF iiVi{i iO iiIiwi~i i^iii:iiiiiiii i;iei1iiiiiiiii i i iiP i<i=ii>i ii(i`iRi? iYiiIi iii i7i i i?iii i@iioiiqiYiniiiliiii ii ii iEiPiVii! i iiiiiniiAii|i ii iBiiiiii- i iOi i iCiiDiWiTiTiiiii i}iEii&i i iiiiii iii~iG i iii$ iWiiiioii iFiiGiii" iiii# iiiDi-iQ iiHiei i iiIi iiiJizi iiKiiiLii$ i iiiiiH iiiiiiiMi:i iiiNiiI i iiiHiiiiiOiii iPiQiiikiRiR i iii i i ii iiiiiiSiTiii i% iiiiii& iS iUi iiS iiCiiiiVi~ii!iXiiiiii,iiiiiOii|iiiiPiie iiii i i iiiiiiiiT iiiiiWiiiiXiiYiiRi iviii ii i=i iiZiiU iii[i iiiV i% i\iii]iii^i8iT i. i' i_iiviiiiiwitii`iaibi> iii"i{ii i iiciidi9iiiii[i9iiidih i ieiii iJ i!iiifiiii iiiTigiii i%iiWiMihiiiiiiiiiiiiii( ij iiyi7imiijiki i? iW i"iiii iBiili|i ii) iziii iii iiiiimiiniuiv iwii i* iiioipiiiqi7i.irii i iii iifii i isi& iyi ii i iiiii i i>iiii iiYi iitiiui/ i+ i ivi ii i@ iwii i iii3iiii, ixi i6i9i i iii6i iii! iiiyiiQi:ici iii- ioiiiFiX i i iifi iizi iii iii{i/iii' i ibiii`iiiiui iviiiii i i iiii i0iEiiSii8i[i|i}iGii ii!i~iiiFiU ii ii iieisiii iw iiii" i iai iZi&iiiii i iiiiAiii iiii iiiiiiiii ioii ii i i ii iiA i iJi0i ix i ii iiY iiiiii i i iiZ i8ioiiiQiiiii iiiaiQiiipi;iiiPiibiiiii i ii iiii iii iiiiiiiii]iiiii iiiiii'iii. i ii iiiii iLiiiiii ii/ iiiiiB i0 i[ ii8iiiii iii/i3iiiii iiii0i iniiiiiiiii i iiiiiiiii4iiiii<i ii iiiYi iiiiC iiii iiiii}i1ii iiiiKii ii0 iFiiii ii#ii\iiiifi i>i iii iiiiii i iiiiijiiiiii-ipi9i@ i6iyiiii i i1 iiiiiiiiViiiii4iii iKi1 ii iD i iiiiii#iiii i2 iiii iZiHipiiigiii i iqiri iiiiii i iK iV i i\ i,i ii iiii]iiiuii iiiiiiiiiZiii iiiiijiii0ihihiii ii iE iii iii iiiiiiiiiii iiii2 iiiiiiiMiiwiii iy i?iiii itimi( i ii+iiiiiiiiii i ii i iii iiL iii) i ii iiif ii iiiiiiiRi"iA i3 i ii-i1ii$i i(ii] i i# ii$ i iiii i* iiii i ii i i=ik ii!ii iiii i ii ii_iiiii^ i2ii iiii9iii iiii4 i iii iibiiii ii iii5ii iii"i i iiiiii4iIi ii#ii iiiiiiij iiii$ik i ii ii\i igiiEi5 iiiiiiiii iii6 iiii ii%iii ii i[i iiiiii i&ieiii iiioi iil i i}iiiizii+ iii iiiini-ii i'iiii iiii(iii iiiiiiii ii iiiBiiiqiiii<ii*iji)i"i*ii>i)i7 ii+iiiiiiiizii,iiiiiM i)iriii-i i iiiiiiTiipiii#i#iiiiiiii iiii i ii iiSii.i8 iiiiiz i9 i=iiiim iiSiiiiiixiii/iiiqiii3ii5ii: i i ii iiiiii i; i i0i1iiiiiin ii_ ii]i i iniiiii i% iiiii i` iiiiiziTisi2i3 ii i{ ii5iiii ig iiikiiiii iii3i4ii iici iii i iiiai:i1i iii!iAihi io i i6i"iii il i-ii ia i5ii ii i ii#i! ii$ii ii4 ifii i%i& i iN i iiWiiiiii i' ii$iii< i iNiiiW ikiiO iiiiiiiX i, ii6i&itiiP ii i7i i i i- ijii i iiCi[ip ii i'i8i ib iim i i ii{i(i i ii i9i i i i:i i i^iiiiB i;iiiiii iii ii ii)i<i=i ii ii>iiiii i=iiiiiiiiiiin iiHiDi i?iiUiWi i= ii:ii iNi iKiEi@i4ic i ii> iiQ i iAiiihiii i i( i]iiiiBi*iiiliiCiDiiEih iiiiiiiXiiiii+iiii+iJiiiUiii iimi iEiiiiFiiGi iki inii id iiiiiiiiF i!i iiri@iiHi"iCiGiiii ii^i ii i8iti) i;i i? i!iioiIi i"i#i#i$iiJi i%i i&iriKi'i5 i(i)iio i@ i}iA ii,ii*i4i1i+ii i i ii" ii,ii iC iiuiriD i-i i-i iLivi{ii| iMi.i.iNiY i6 iii i/i0i/i ii1i$ii2i%iGiRiB i i_iiiC ii iD ii3iiiii ii i. ii i4i@iwii5ii6i7iii ii&i0ii8i i&i i* ii+ iG iOiuii# i i1iPiRiiQii ii)iii=i9iili iiiiE iSiiki i\i iii iyi:i i;i<iF i%i=iiRiii>idi'i, i?iei$ ii2i3i- ii i4ii(iSi@iAiBii.iTiiOi i iiiii i&i iii i<i iiiiCiiDi iiUiip iG ie i i i ii iEi;iViFi'iGi)i ii} iHi*iiWiiiIiiTii i iiiiii iiJi. igi~iiUiij i iiiXiii8ii*i iiiKi~ iii iLiiH i iii% ii+if iii iitiiMiNiii ii ii iiik iYiiYiiii*i/ iiOiR iZiH ii#iI i i iii5iS i]iE i[i\i isiI i+iPiQi,i i iRiiSiTii,iiUi]iVi iiLi-i!i iT i\i4iWi iiXiYiZii[i>iwiiiii+i^iiJ ii_i$ii\iiiili.i]i`ii^i_iq i%ii`iaiiii i6ig iU ibicidiai iq ixi iei ibiyi ifihigihiii'iii iii ijir ii i0 i i=iibi|iiiiii i iih iki iliii iaiicimiii ini iiui iidieiis iK i/ioi7ii& i iJ i8ipiqii ij ifii igi iii(ii`irisi i iii<iFik ifiiiii]iiiIiti>iiuiivii iii i0ii-iiwiL ii iiiiiGiaii it i ii iSiKixiiyiziiiV iiiOii1 ii i{i i iiF iyii9iZ ii|i}i~iFi iii i i:iiiii2 iiiipii;iG iii ihiiiii7 iiiiiqiiiM iiiiiii iii iii i(i8 iiN i ii3 i iiO i<iiiiTiiW i iiiii(iiii1iiiiiiiiu iZii i iciijiP ipiii iiii iv iiijii iiii iQ iK i ii i' iiiiii9 iii i izii i iiikiiiii ii2i iiliii>i=iCi'iii>i iiZiiii imini iibiii i iqi?ii ii ii i"ii i?i i}iiioi@i>iiAi i1ii)iiii?i i iiiixi ii ii i3i i ihii il iiii)ii ipii'iiii4ii i i iiiiAi_i iBiiL ii iPiqii iiriiiiiX iicisi! iiiiHii iw iiiiCiDitiiiiiiix i ii i/ iiii5i i4 iuiir iii_iii/i i<ii~i7iy im iii^i5 in ibi itiiil iiii6ii i7iiiiiiiviiii i8i: ii ii0 i i1 iiwiiIiiiii i( iiiBi ii4isi iiiii i iii?iCiixi iEiHi iiFiyiziGiiiii{iiii|ii; i}i9imiH i~iiz iiis i iiM iiii iiim iiiIiiiio iiiiiiii_iiii i2 i3 iHii*iii iiii< iiii:ii ii iiAii iiR iiiii i ii" ii iiiiii ii iii iiiQi i iY iiiiiiii{iKii ii[i iJiiiN iiS iiiiiI iiiT i@ii iii i i@iiRiii it iiiiJiIii iiiii iU iimiiii{ i ii i i;iV iii iii<iii idiiiiXiii9iii6 iiiUiiii iiLi!i= ii) ii i i iii i iii ii=i$i(iviii&iicii iii i ii>iiii{i"iiiii#iiiiiW ii iiii ii$i i%iiiiii&i^iii[ i ii'iiiiiiii(i iii2i\ iJi ip ipin iiu iq i)ii iO i> ii i*iX iii i+ii i,i iii iiiiiKii i iiIi-iii!iiiiii.iDiiiJisi# iii ii7 iiii/iwii|iiiiii"i4 i| i,iiiii iiiiiiiii?iY iP i0ii1iZ iZ i ii i ii i#i@i2ii|iiziiiiiAi'ii? ii ii8 ii iyiiLiVi i3i i i i$iiiiMii%ii i i i i i.ii4iNiii ii i i iiii i5i9 i%iiiiKiQ iiixi6i7iiBi i iOiv i[ iii&ii ii iiiXii* ii\ iPi i iNi+ iw iiiiiWiCi ii iQiDi i?i] iiii^ io iR i i iEii iiFi i'iiXiiGiiS i iiT iRii8ixi ikiSiiTiii ii iiDii9i!iiiii(iiLi"i#i$i%i_ ii&iJi:iUii}i i)ii i;i<i iEi+i'i iiHi=iVi*iHiyi, i+iWi3i>i ii?i(iIi,i-i` iqiia i iii ii)i*iXi i+iiOiiYi,i-ii.ii/i ii0i.iiii1i iJir i2i3i$ i i4ii5i6i7i iib i iii} i ifiiKii i i i i8iZi9ii: ii ic ii:ii i;is iJ i@ii it i; i i/i<id ii iiiU ii0iie i=iif iu iMiiiii i>i i[iiAiLiiii?i i i i i@iAiv iBiCi\iijii[ i ig iDiEiiiFi iBiiGi iiHii(iiMiCih iiIiNiJi iKiLiiiMii1i2iNiiidiOidiV iDi`ii#i5 iPi]iQiEini i iii^iRi_iFiSip ii iiTiUii i iVii i iiGiiiieiHiOi iiPi`iWiw ij iXi ii3i iaiiIiYiJix i7iiii- i i4ik iibiZi-iii. icii[i iri;i iKi5iLiXii i iii\i]iii6i% i^i_iii~i`il iy iaizi~ iiibi?iici7idi iiieiiz iiiMifigi/ ihi|ii@idiiiiiNiiii8ifiiijikiiOiPiiilimi@ini9i ioi i iii< isii\ iQi.iRiNi@i iipiiSi{ iqiiAii iiiiTi iiii:iiiri isi@ iW i;itiui iviwixiyi| iiiiizi i i3iQiiimiiAi{i|ii ii}iOiRi iigiii~iZi iiiiii i\im ii} ii<iX ii ii i iiii iiY iiii0 iiii i=i iiiiLiiiiihiiiiii>i>ii/i?iUiiiiiiei)ix itiifiSii iy i in i i iiTi i@ii i iiiiiiii ii iziUigiiViio iii6 ip itiiiiiii iAiiJiVihiZ i iiWiiiiN(ii iiiii iRiiiiiaii inii!i iiLi,iBii iiiiii{ i] iii ijiii.iNiiiiii:iiii?iii=iNiKiiikiq iii i ii ibii iiii ioi2 iiiii ici8iiiiiiiii{i|ii i"i i i@i\ii iiiiiiiFii| iQiHiiiPivii iiiiDi^ iiiFi} iiEi iOii0iisii4i<i2i ii&iMiii~ iiiGiii[ iii?ii iaiiK i*iigi iZi i:ii iiKi ii iiiiii`iliii ii i iqii~iii i ii iiiciiuiii*iii i ii~imiiiieiiGi^iinii iUiCiiCiiiiiii ijioi/iiiPi7 i[iii i? iiiioiSi(ii iipi]ii6iji i@ iiiiii8ii+i3 i[iii\iiiiii] iA iii i1iiiHi i idiii+i i2iiii iOiL iiifi1iiiiiiii3i9iili,ii iiiiiii ieiz iQiMi&i iXiiiiii iiiikipiiMiii i%ii iiiii'i ii\ i ii7iiJii!iiiiiiNiiB iPi_ iqiii iii i ii ii i iIii8 ii ii{ ii i iiiimiiifiiixii iii ii iigiii iiiiiiii&ii'iiiiii.ii iiiM iiiii$i#iiDihiAi iiiriiiUiGiiii i iQiSiiiiidi i0iFiiiC iiiii iiJii iUiiiiN i iii<ii i:iA i ii i i9ii}ii iiiWiPi ii)iDiiii4 i i i)iiir iisiiitii9 iiiis i*iiiiiii] iiiii iD iji( i iii iiui5iYiji iliiuiii iii` i i=i iiiiiii5ii!iiiTiE ixiiieii iiO iPii| i6iAi i/iki i iili!i iiBii`ii i iiAii iiii ivi i iiwiiiiMi iiiiiia iii i} iixi,i}ii iB iiiiiiiiiF ikimi ii ii ii iibii) i^ iP i i,ii ii7i5 i~ iyi|i iziiii iC i{iiiiiiiiii1iibii i iiit ii ii_ iiiiii i i ib iiii5iDiiilii|i[i* iii ii%iiG i^ iiliii ii` iii ipiiinicigimiii'i2iiii{ii ii iiiD iifi ii|i: iiiii i i i i ii inii iii;iiIiYi}ii iXi"ii iiiiiii-iiliiiiiiiiiEii~ii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i+ iii iiiii i iaiiii ia ii}iE iiDii=ii0i ii i6 iiiivi!iiiiiiiii9iHiiiii iFiiiiii"iiH iii i ioiiiRi*ii.iiiiiiI i3ii, ii*iSiiiiXiii/iibipii iiiiiiiRii ii7 ii2ii iiiiiii_ iib ii iiii iiiiiii` iOii i i`i7i i iiiiiiia iiiiiiigiii ii i_ii~iiiiii iaii ib iiiii iii i;ii i-i iiQ iiiiqiiiii`ii#i#iiiiiri isiiiiditi iic ii ikiwi ii iiiiiiiigi iQiiUi i iiiiiCiSiic iii5iiiBii_i iciNiiiYiiii iiLidi i i iKi i8 iaiGiii i iiisi6i iti; iNiii i i4ii:ii\iqiiii.iii iui iiii i ii iriii i iic iiii@iii ii ii ii/i9 ii iiiiiii i< id iiii8iviiCiii iimii iiiiiiZiiiiiwi i{iiiii$iiiMid i0irii iigiiViiiii: iZiii{i$ie i0iii$i i i ii5ii[iiviiiciiViR i$i; iiiif iii idiiiWie iii iii i iiJ iiiiiuii ii ii i iK ii2iiL i i.iii2ii iiiiii%i iii i iiiiiiii i ixi iM ii}iiiTiiiid i iiiii`iiiii i3iii;i iiiiyiYiLiii i iiiiiUi\iiiiiiViN iii iii ii"ixi%ii;i= i iii2iEii!iiii^ii"iwi i#iiii ii$i<iQiiiri%ii]ii&ii< i'i i3i6iBii i(iii ii i iig i i i)i i i*i+i! iiiii:ii7ii^ii iii(i i,i+i i ixi-ii/ihi= i.iiiiiVi ii ihii i/iiiii<iii;i0iiii i i1izi i ii5i6i i_i)i2i3ii4iyi" iii ii^i5i i i?i%i+if i i6i_iWiii i7i> ii ig i# iiu i8i ii9ih iF iiVi{i iO iiIiwi~i i^iii:iiiiiiii i;iei1iiiiiiiii i i iiP i<i=ii>i ii(i`iRi? iYiiIi iii i7i i i?iii i@iioiiqiYiniiiliiii ii ii iEiPiVii! i iiiiiniiAii|i ii iBiiiiii- i iOi i iCiiDiWiTiTiiiii i}iEii&i i iiiiii iii~iG i iii$ iWiiiioii iFiiGiii" iiii# iiiDi-iQ iiHiei i iiIi iiiJizi iiKiiiLii$ i iiiiiH iiiiiiiMi:i iiiNiiI i iiiHiiiiiOiii iPiQiiikiRiR i iii i i ii iiiiiiSiTiii i% iiiiii& iS iUi iiS iiCiiiiVi~ii!iXiiiiii,iiiiiOii|iiiiPiie iiii i i iiiiiiiiT iiiiiWiiiiXiiYiiRi iviii ii i=i iiZiiU iii[i iiiV i% i\iii]iii^i8iT i. i' i_iiviiiiiwitii`iaibi> iii"i{ii i iiciidi9iiiii[i9iiidih i ieiii iJ i!iiifiiii iiiTigiii i%iiWiMihiiiiiiiiiiiiii( ij iiyi7imiijiki i? iW i"iiii iBiili|i ii) iziii iii iiiiimiiniuiv iwii i* iiioipiiiqi7i.irii i iii iifii i isi& iyi ii i iiiii i i>iiii iiYi iitiiui/ i+ i ivi ii i@ iwii i iii3iiii, ixi i6i9i i iii6i iii! iiiyiiQi:ici iii- ioiiiFiX i i iifi iizi iii iii{i/iii' i ibiii`iiiiui iviiiii i i iiii i0iEiiSii8i[i|i}iGii ii!i~iiiFiU ii ii iieisiii iw iiii" i iai iZi&iiiii i iiiiAiii iiii iiiiiiiii ioii ii i i ii iiA i iJi0i ix i ii iiY iiiiii i i iiZ i8ioiiiQiiiii iiiaiQiiipi;iiiPiibiiiii i ii iiii iii iiiiiiiii]iiiii iiiiii'iii. i ii iiiii iLiiiiii ii/ iiiiiB i0 i[ ii8iiiii iii/i3iiiii iiii0i iniiiiiiiii i iiiiiiiii4iiiii<i ii iiiYi iiiiC iiii iiiii}i1ii iiiiKii ii0 iFiiii ii#ii\iiiifi i>i iii iiiiii i iiiiijiiiiii-ipi9i@ i6iyiiii i i1 iiiiiiiiViiiii4iii iKi1 ii iD i iiiiii#iiii i2 iiii iZiHipiiigiii i iqiri iiiiii i iK iV i i\ i,i ii iiii]iiiuii iiiiiiiiiZiii iiiiijiii0ihihiii ii iE iii iii iiiiiiiiiii iiii2 iiiiiiiMiiwiii iy i?iiii itimi( i ii+iiiiiiiiii i ii i iii iiL iii) i ii iiif ii iiiiiiiRi"iA i3 i ii-i1ii$i i(ii] i i# ii$ i iiii i* iiii i ii i i=ik ii!ii iiii i ii ii_iiiii^ i2ii iiii9iii iiii4 i iii iibiiii ii iii5ii iii"i i iiiiii4iIi ii#ii iiiiiiij iiii$ik i ii ii\i igiiEi5 iiiiiiiii iii6 iiii ii%iii ii i[i iiiiii i&ieiii iiioi iil i i}iiiizii+ iii iiiini-ii i'iiii iiii(iii iiiiiiii ii iiiBiiiqiiii<ii*iji)i"i*ii>i)i7 ii+iiiiiiiizii,iiiiiM i)iriii-i i iiiiiiTiipiii#i#iiiiiiii iiii i ii iiSii.i8 iiiiiz i9 i=iiiim iiSiiiiiixiii/iiiqiii3ii5ii: i i ii iiiiii i; i i0i1iiiiiin ii_ ii]i i iniiiii i% iiiii i` iiiiiziTisi2i3 ii i{ ii5iiii ig iiikiiiii iii3i4ii iici iii i iiiai:i1i iii!iAihi io i i6i"iii il i-ii ia i5ii ii i ii#i! ii$ii ii4 ifii i%i& i iN ii iiWiiiiii i' ii$iii< i iNiiiW ikiiO iiiiiiiX i, ii6i&itiiP ii i7i i i i- ijii i iiCi[ip ii i'i8i ib iim i i ii{i(i i ii i9i i i i:i i i^iiiiB i;iiiiii iii ii ii)i<i=i ii ii>iiiii i=iiiiiiiiiiin iiHiDi i?iiUiWi i= ii:ii iNi iKiEi@i4ic i ii> iiQ i iAiiihiii i i( i]iiiiBi*iiiliiCiDiiEih iiiiiiiXiiiii+iiii+iJiiiUiii iimi iEiiiiFiiGi iki inii id iiiiiiiiF i!i iiri@iiHi"iCiGiiii ii^i ii i8iti) i;i i? i!iioiIi i"i#i#i$iiJi i%i i&iriKi'i5 i(i)iio i@ i}iA ii,ii*i4i1i+ii i i ii" ii,ii iC iiuiriD i-i i-i iLivi{ii| iMi.i.iNiY i6 iii i/i0i/i ii1i$ii2i%iGiRiB i i_iiiC ii iD ii3iiiii ii i. ii i4i@iwii5ii6i7iii ii&i0ii8i i&i i* ii+ iG iOiuii# i i1iPiRiiQii ii)iii=i9iili iiiiE iSiiki i\i iii iyi:i i;i<iF i%i=iiRiii>idi'i, i?iei$ ii2i3i- ii i4ii(iSi@iAiBii.iTiiOi i iiiii i&i iii i<i iiiiCiiDi iiUiip iG ie i i i ii iEi;iViFi'iGi)i ii} iHi*iiWiiiIiiTii i iiiiii iiJi. igi~iiUiij i iiiXiii8ii*i iiiKi~ iii iLiiH i iii% ii+if iii iitiiMiNiii ii ii iiik iYiiYiiii*i/ iiOiR iZiH ii#iI i i iii5iS i]iE i[i\i isiI i+iPiQi,i i iRiiSiTii,iiUi]iVi iiLi-i!i iT i\i4iWi iiXiYiZii[i>iwiiiii+i^iiJ ii_i$ii\iiiili.i]i`ii^i_iq i%ii`iaiiii i6ig iU ibicidiai iq ixi iei ibiyi ifihigihiii'iii iii ijir ii i0 i i=iibi|iiiiii i iih iki iliii iaiicimiii ini iiui iidieiis iK i/ioi7ii& i iJ i8ipiqii ij ifii igi iii(ii`irisi i iii<iFik ifiiiii]iiiIiti>iiuiivii iii i0ii-iiwiL ii iiiiiGiaii it i ii iSiKixiiyiziiiV iiiOii1 ii i{i i iiF iyii9iZ ii|i}i~iFi iii i i:iiiii2 iiiipii;iG iii ihiiiii7 iiiiiqiiiM iiiiiii iii iii i(i8 iiN i ii3 i iiO i<iiiiTiiW i iiiii(iiii1iiiiiiiiu iZii i iciijiP ipiii iiii iv iiijii iiii iQ iK i ii i' iiiiii9 iii i izii i iiikiiiii ii2i iiliii>i=iCi'iii>i iiZiiii imini iibiii i iqi?ii ii ii i"ii i?i i}iiioi@i>iiAi i1ii)iiii?i i iiiixi ii ii i3i i ihii il iiii)ii ipii'iiii4ii i i iiiiAi_i iBiiL ii iPiqii iiriiiiiX iicisi! iiiiHii iw iiiiCiDitiiiiiiix i ii i/ iiii5i i4 iuiir iii_iii/i i<ii~i7iy im iii^i5 in ibi itiiil iiii6ii i7iiiiiiiviiii i8i: ii ii0 i i1 iiwiiIiiiii i( iiiBi ii4isi iiiii i iii?iCiixi iEiHi iiFiyiziGiiiii{iiii|ii; i}i9imiH i~iiz iiis i iiM iiii iiim iiiIiiiio iiiiiiii_iiii i2 i3 iHii*iii iiii< iiii:ii ii iiAii iiR iiiii i ii" ii iiiiii ii iii iiiQi i iY iiiiiiii{iKii ii[i iJiiiN iiS iiiiiI iiiT i@ii iii i i@iiRiii it iiiiJiIii iiiii iU iimiiii{ i ii i i;iV iii iii<iii idiiiiXiii9iii6 iiiUiiii iiLi!i= ii) ii i i iii i iii ii=i$i(iviii&iicii iii i ii>iiii{i"iiiii#iiiiiW ii iiii ii$i i%iiiiii&i^iii[ i ii'iiiiiiii(i iii2i\ iJi ip ipin iiu iq i)ii iO i> ii i*iX iii i+ii i,i iii iiiiiKii i iiIi-iii!iiiiii.iDiiiJisi# iii ii7 iiii/iwii|iiiiii"i4 i| i,iiiii iiiiiiiii?iY iP i0ii1iZ iZ i ii i ii i#i@i2ii|iiziiiiiAi'ii? ii ii8 ii iyiiLiVi i3i i i i$iiiiMii%ii i i i i i.ii4iNiii ii i i iiii i5i9 i%iiiiKiQ iiixi6i7iiBi i iOiv i[ iii&ii ii iiiXii* ii\ iPi i iNi+ iw iiiiiWiCi ii iQiDi i?i] iiii^ io iR i i iEii iiFi i'iiXiiGiiS i iiT iRii8ixi ikiSiiTiii ii iiDii9i!iiiii(iiLi"i#i$i%i_ ii&iJi:iUii}i i)ii i;i<i iEi+i'i iiHi=iVi*iHiyi, i+iWi3i>i ii?i(iIi,i-i` iqiia i iii ii)i*iXi i+iiOiiYi,i-ii.ii/i ii0i.iiii1i iJir i2i3i$ i i4ii5i6i7i iib i iii} i ifiiKii i i i i8iZi9ii: ii ic ii:ii i;is iJ i@ii it i; i i/i<id ii iiiU ii0iie i=iif iu iMiiiii i>i i[iiAiLiiii?i i i i i@iAiv iBiCi\iijii[ i ig iDiEiiiFi iBiiGi iiHii(iiMiCih iiIiNiJi iKiLiiiMii1i2iNiiidiOidiV iDi`ii#i5 iPi]iQiEini i iii^iRi_iFiSip ii iiTiUii i iVii i iiGiiiieiHiOi iiPi`iWiw ij iXi ii3i iaiiIiYiJix i7iiii- i i4ik iibiZi-iii. icii[i iri;i iKi5iLiXii i iii\i]iii6i% i^i_iii~i`il iy iaizi~ iiibi?iici7idi iiieiiz iiiMifigi/ ihi|ii@idiiiiiNiiii8ifiiijikiiOiPiiilimi@ini9i ioi i iii< isii\ iQi.iRiNi@i iipiiSi{ iqiiAii iiiiTi iiii:iiiri isi@ iW i;itiui iviwixiyi| iiiiizi i i3iQiiimiiAi{i|ii ii}iOiRi iigiii~iZi iiiiii i\im ii} ii<iX ii ii i iiii iiY iiii0 iiii i=i iiiiLiiiiihiiiiii>i>ii/i?iUiiiiiiei)ix itiifiSii iy i in i i iiTi i@ii i iiiiiiii ii iziUigiiViio iii6 ip itiiiiiii iAiiJiVihiZ i iiWiiii(tBIG5_TYPICAL_DISTRIBUTION_RATIOtBIG5_TABLE_SIZEtBIG5_CHAR_TO_FREQ_ORDER(((s@/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.pyt+sPKZZZ5site-packages/pip/_vendor/chardet/sbcharsetprober.pyonu[ abc@sFddlmZddlmZmZmZdefdYZdS(i(t CharSetProber(tCharacterCategoryt ProbingStatetSequenceLikelihoodtSingleByteCharSetProbercBsheZdZdZdZdZed dZdZ e dZ e dZ dZ d ZRS( i@igffffff?g?cCsitt|j||_||_||_d|_d|_d|_ d|_ d|_ |j dS(N( tsuperRt__init__t_modelt _reversedt _name_probertNonet _last_ordert _seq_counterst _total_seqst _total_chart _freq_chartreset(tselftmodeltreversedt name_prober((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyR's        cCsQtt|jd|_dgtj|_d|_d|_d|_ dS(Nii( RRRR Rtget_num_categoriesR R RR(R((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyR5s    cCs"|jr|jjS|jdSdS(Nt charset_name(R RR(R((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyR?s  cCs'|jr|jjS|jjdSdS(Ntlanguage(R RRtget(R((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcharsetprober.pyRFs  c Cs|jds|j|}n|s,|jS|jd}xt|D]\}}||}|tjkr}|jd7_n||jkr+|jd7_|j |jkr+|j d7_ |j s|j |j|}|jd|}n%||j|j }|jd|}|j |cd7sPKZh'2'27site-packages/pip/_vendor/chardet/langbulgarianmodel.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: # this table is modified base on win1251BulgarianCharToOrderMap, so # only number <64 is sure valid Latin5_BulgarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 ) win1251BulgarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 ) # Model Table: # total sequences: 100% # first 512 sequences: 96.9392% # first 1024 sequences:3.0618% # rest sequences: 0.2992% # negative sequences: 0.0020% BulgarianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, 3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, 0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, 0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, 0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, 0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, 0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, 2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, 3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, 1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, 3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, 1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, 2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, 2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, 3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, 1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, 2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, 2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, 3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, 1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, 2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, 2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, 2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, 1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, 2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, 1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, 3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, 1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, 3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, 1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, 2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, 1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, 2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, 1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, 2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, 1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, 1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, 1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, 2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, 1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, 2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, 1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, 0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, 1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, 1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, 1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, 0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, 0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, 1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, 1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, 1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, 1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, ) Latin5BulgarianModel = { 'char_to_order_map': Latin5_BulgarianCharToOrderMap, 'precedence_matrix': BulgarianLangModel, 'typical_positive_ratio': 0.969392, 'keep_english_letter': False, 'charset_name': "ISO-8859-5", 'language': 'Bulgairan', } Win1251BulgarianModel = { 'char_to_order_map': win1251BulgarianCharToOrderMap, 'precedence_matrix': BulgarianLangModel, 'typical_positive_ratio': 0.969392, 'keep_english_letter': False, 'charset_name': "windows-1251", 'language': 'Bulgarian', } PKZfQ,Q,4site-packages/pip/_vendor/chardet/langhebrewmodel.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Simon Montagu # Portions created by the Initial Developer are Copyright (C) 2005 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # Shoshannah Forbes - original C code (?) # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Windows-1255 language model # Character Mapping Table: WIN1255_CHAR_TO_ORDER_MAP = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, 215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, 106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, 238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, ) # Model Table: # total sequences: 100% # first 512 sequences: 98.4004% # first 1024 sequences: 1.5981% # rest sequences: 0.087% # negative sequences: 0.0015% HEBREW_LANG_MODEL = ( 0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, 3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, 1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, 1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, 1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, 1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, 1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, 0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, 0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, 1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, 3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, 0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, 0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, 0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, 0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, 0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, 3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, 0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, 0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, 0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, 0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, 0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, 0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, 3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, 0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, 0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, 3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, 0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, 1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, 0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, 3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, 0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, 0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, 0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, 0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, 2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, 0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, 0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, 3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, 0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, 1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, 0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, 2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, 1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, 2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, 2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, 0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, 0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, ) Win1255HebrewModel = { 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, 'precedence_matrix': HEBREW_LANG_MODEL, 'typical_positive_ratio': 0.984004, 'keep_english_letter': False, 'charset_name': "windows-1255", 'language': 'Hebrew', } PKZ1site-packages/pip/_vendor/chardet/latin1prober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .enums import ProbingState FREQ_CAT_NUM = 4 UDF = 0 # undefined OTH = 1 # other ASC = 2 # ascii capital letter ASS = 3 # ascii small letter ACV = 4 # accent capital vowel ACO = 5 # accent capital other ASV = 6 # accent small vowel ASO = 7 # accent small other CLASS_NUM = 8 # total classes Latin1_CharToClass = ( OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF ) # 0 : illegal # 1 : very unlikely # 2 : normal # 3 : very likely Latin1ClassModel = ( # UDF OTH ASC ASS ACV ACO ASV ASO 0, 0, 0, 0, 0, 0, 0, 0, # UDF 0, 3, 3, 3, 3, 3, 3, 3, # OTH 0, 3, 3, 3, 3, 3, 3, 3, # ASC 0, 3, 3, 3, 1, 1, 3, 3, # ASS 0, 3, 3, 3, 1, 2, 1, 2, # ACV 0, 3, 3, 3, 3, 3, 3, 3, # ACO 0, 3, 1, 3, 1, 1, 1, 3, # ASV 0, 3, 1, 3, 1, 1, 3, 3, # ASO ) class Latin1Prober(CharSetProber): def __init__(self): super(Latin1Prober, self).__init__() self._last_char_class = None self._freq_counter = None self.reset() def reset(self): self._last_char_class = OTH self._freq_counter = [0] * FREQ_CAT_NUM CharSetProber.reset(self) @property def charset_name(self): return "ISO-8859-1" @property def language(self): return "" def feed(self, byte_str): byte_str = self.filter_with_english_letters(byte_str) for c in byte_str: char_class = Latin1_CharToClass[c] freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + char_class] if freq == 0: self._state = ProbingState.NOT_ME break self._freq_counter[freq] += 1 self._last_char_class = char_class return self.state def get_confidence(self): if self.state == ProbingState.NOT_ME: return 0.01 total = sum(self._freq_counter) if total < 0.01: confidence = 0.0 else: confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) / total) if confidence < 0.0: confidence = 0.0 # lower the confidence of latin1 so that other more accurate # detector can take priority. confidence = confidence * 0.73 return confidence PKZs#cJJ,site-packages/pip/_vendor/chardet/mbcssm.pyonu[ abc@sddlmZd"ZejejejdejejejejejejejejejejejejejejejejejejejejfZd#Zied6dd6ed 6ed 6d d 6Zd$Z ejejdejejejddejd ejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejfFZ d%Z ie d6dd6e d 6e d 6dd 6Z d&Z ddddejejejejejejejejejejejejejejejejejejejejejejejejejejdejdejejejejejejejf(Zd'Zie d6d d6ed 6ed 6dd 6Zd(ZejejdejejejejejejejejejejejejejfZd)Zied6dd6ed 6ed 6dd 6Zd*Zejejejddddejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejejejejejejejf0Zd+Zied6dd6ed 6ed 6dd 6Zd,Zejejejejejejdejejejejejejejejejejejejejejejejejdejejejejejejejejejdejejejejejejejejejejejejejf0Zd-Zied6dd6ed 6ed 6dd 6Zd.ZejejejdejejejejejejejejejejejejejejejejejejejejfZd/Zied6d d6ed 6ed 6dd 6Z d0Z!dddejddejejejejejejejejejejejejd d d d ejejd d d d d ejd d d d d d dddejddd d ejd d d d d d d ejejejejf8Z"d1Z#ie!d6d d6e"d 6e#d 6dd 6Z$d2Z%d d dd ddejejejejejejejejejejejejdddejejejdddejdejd d dd dddddejdddejejejdddddejdejejejf8Z&d3Z'ie%d6d d6e&d 6e'd 6dd 6Z(d4Z)ejejejejejejddddddd dddejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejddddejejejejejejejejejejejejejdddejejejejejejejejejejejejddddejejejejejejejejejejejejejejddejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejddddejejejejejejejejejejejejejejejdejejejejejejejejejejejejdddejejejejejejejejejejejejejejejejejejejejejejejejejejejfZ*d5Z+ie)d6dd6e*d 6e+d 6d d 6Z,d!S(6i(t MachineStateiiiit class_tableit class_factort state_tabletchar_len_tabletBig5tnameiiii i tCP949sEUC-JPsEUC-KRsx-euc-twtGB2312t Shift_JISsUTF-16BEsUTF-16LEi i i iiisUTF-8N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii( iiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iii i i i i i i i i i i i iiii(iiiiiiiiiiiiiiii(-tenumsRtBIG5_CLStERRORtSTARTtITS_MEtBIG5_STtBIG5_CHAR_LEN_TABLEt BIG5_SM_MODELt CP949_CLStCP949_STtCP949_CHAR_LEN_TABLEtCP949_SM_MODELt EUCJP_CLStEUCJP_STtEUCJP_CHAR_LEN_TABLEtEUCJP_SM_MODELt EUCKR_CLStEUCKR_STtEUCKR_CHAR_LEN_TABLEtEUCKR_SM_MODELt EUCTW_CLStEUCTW_STtEUCTW_CHAR_LEN_TABLEtEUCTW_SM_MODELt GB2312_CLSt GB2312_STtGB2312_CHAR_LEN_TABLEtGB2312_SM_MODELtSJIS_CLStSJIS_STtSJIS_CHAR_LEN_TABLEt SJIS_SM_MODELt UCS2BE_CLSt UCS2BE_STtUCS2BE_CHAR_LEN_TABLEtUCS2BE_SM_MODELt UCS2LE_CLSt UCS2LE_STtUCS2LE_CHAR_LEN_TABLEtUCS2LE_SM_MODELtUTF8_CLStUTF8_STtUTF8_CHAR_LEN_TABLEt UTF8_SM_MODEL(((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcssm.pytsh-06  0<<<<<B  $00-3  -6  $000-6  -00--6  -06  !0$*  0'!*  *0000$0'0$0*0$0-0$0-0'006 PKZ9add,site-packages/pip/_vendor/chardet/jisfreq.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # # Japanese frequency table, applied to both S-JIS and EUC-JP # They are sorted in order. # 128 --> 0.77094 # 256 --> 0.85710 # 512 --> 0.92635 # 1024 --> 0.97130 # 2048 --> 0.99431 # # Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 # Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 # # Typical Distribution Ratio, 25% of IDR JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 # Char to FreqOrder table , JIS_TABLE_SIZE = 4368 JIS_CHAR_TO_FREQ_ORDER = ( 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 ) PKZ '.site-packages/pip/_vendor/chardet/__init__.pyonu[ abc@sIddlmZmZddlmZddlmZmZdZdS(i(tPY2tPY3(tUniversalDetector(t __version__tVERSIONcCskt|tsKt|ts<tdjt|qKt|}nt}|j||jS(s Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` s4Expected object of type bytes or bytearray, got: {0}( t isinstancet bytearraytbytest TypeErrortformatttypeRtfeedtclose(tbyte_strtdetector((s@/usr/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pytdetects   N( tcompatRRtuniversaldetectorRtversionRRR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/chardet/__init__.pytsPKZcOO+site-packages/pip/_vendor/chardet/escsm.pycnu[ abc@sddlmZdZejejdejejejejejejejejejejejejejejejejejejejdejdejdejdddejdejdddejdejdejejejejejejejf0ZdZied6dd 6ed 6ed 6d d 6dd6ZdZ ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejejejejejejejddejejejejejejejejejejejejejejejejejejejejejejf@Z dZ ie d6dd 6e d 6e d 6dd 6dd6Z dZ ejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejejdejejejdejejejejejdejejejejejejejejejejejejejejejejejejejejejejejejejejejejfHZdZie d6dd 6ed 6ed 6dd 6dd6Zd Zejdejejejejejejejejejejejejejejejejejejejdejejejejejejdejejejejejejejejejejejf(Zd!Zied6dd 6ed 6ed 6dd 6dd6ZdS("i(t MachineStateiiiiiit class_tablet class_factort state_tabletchar_len_tables HZ-GB-2312tnametChinesetlanguagei s ISO-2022-CNiii s ISO-2022-JPtJapaneses ISO-2022-KRtKoreanN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii( iiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii( iiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(iiiiii(tenumsRtHZ_CLStSTARTtERRORtITS_MEtHZ_STtHZ_CHAR_LEN_TABLEt HZ_SM_MODELt ISO2022CN_CLSt ISO2022CN_STtISO2022CN_CHAR_LEN_TABLEtISO2022CN_SM_MODELt ISO2022JP_CLSt ISO2022JP_STtISO2022JP_CHAR_LEN_TABLEtISO2022JP_SM_MODELt ISO2022KR_CLSt ISO2022KR_STtISO2022KR_CHAR_LEN_TABLEtISO2022KR_SM_MODEL(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escsm.pytsp-0-!!3  -00-0*06  -000*-006  -0--6 PKZ: 8site-packages/pip/_vendor/chardet/codingstatemachine.pycnu[ abc@s6ddlZddlmZdefdYZdS(iNi(t MachineStatetCodingStateMachinecBsJeZdZdZdZdZdZdZedZ RS(s A state machine to verify a byte sequence for a particular encoding. For each byte the detector receives, it will feed that byte to every active state machine available, one byte at a time. The state machine changes its state based on its previous state and the byte it receives. There are 3 states in a state machine that are of interest to an auto-detector: START state: This is the state to start with, or a legal byte sequence (i.e. a valid code point) for character has been identified. ME state: This indicates that the state machine identified a byte sequence that is specific to the charset it is designed for and that there is no other possible encoding which can contain this byte sequence. This will to lead to an immediate positive answer for the detector. ERROR state: This indicates the state machine identified an illegal byte sequence for that encoding. This will lead to an immediate negative answer for this encoding. Detector will exclude this encoding from consideration from here on. cCsD||_d|_d|_d|_tjt|_|j dS(Ni( t_modelt_curr_byte_post_curr_char_lentNonet _curr_statetloggingt getLoggert__name__tloggertreset(tselftsm((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyt__init__7s     cCstj|_dS(N(RtSTARTR(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyR ?scCs|jd|}|jtjkrCd|_|jd||_n|j|jd|}|jd||_|jd7_|jS(Nt class_tableitchar_len_tablet class_factort state_tablei(RRRRRR(R tct byte_classt curr_state((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyt next_stateBs cCs|jS(N(R(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pytget_current_charlenPscCs |jdS(Ntname(R(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pytget_coding_state_machineSscCs |jdS(Ntlanguage(R(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyRVs( R t __module__t__doc__RR RRRtpropertyR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyR!s     (RtenumsRtobjectR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyts PKZ4site-packages/pip/_vendor/chardet/mbcsgroupprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # Proofpoint, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetgroupprober import CharSetGroupProber from .utf8prober import UTF8Prober from .sjisprober import SJISProber from .eucjpprober import EUCJPProber from .gb2312prober import GB2312Prober from .euckrprober import EUCKRProber from .cp949prober import CP949Prober from .big5prober import Big5Prober from .euctwprober import EUCTWProber class MBCSGroupProber(CharSetGroupProber): def __init__(self, lang_filter=None): super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) self.probers = [ UTF8Prober(), SJISProber(), EUCJPProber(), GB2312Prober(), EUCKRProber(), CP949Prober(), Big5Prober(), EUCTWProber() ] self.reset() PKZ+q[[6site-packages/pip/_vendor/chardet/langturkishmodel.pyonu[ abc@s@dZdZied6ed6dd6ed6dd6dd6ZdS(iii%i/i'ii4i$i-i5i<ii1ii.i*i0iEi,i#ii3i&i>iAi+i8iiii iiiiiii ii iiii@iii ii i9i:i iiiiiiiiiiiiiiiiiiiiiiiieiiiiiiiiijiiiiiiiidiiiiiiiii^iPi]iiiiii?iiiiiii~i}i|ihiIiciOiUi{i6izibi\iyixi[igiwiDiviuiaitisi2iZiriqipioi7i)i(iViYiFi;iNiGiRiXi!iMiBiTiSiniKi=i`iiCimiJiWifi"i_iQiliLiHiiiikitchar_to_order_maptprecedence_matrixgX4 ?ttypical_positive_ratiotkeep_english_letters ISO-8859-9t charset_nametTurkishtlanguageN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii%i/i'ii4i$i-i5i<ii1ii.i*i0iEi,i#ii3i&i>iAi+i8iiiiiiiiii iiiiiii ii iiii@iii ii i9i:i iiiiiiiiiiiiiiiiiiiiiiiiiiiiieiiiiiiiiijiiiiiiiidiiiiiiiii^iPi]iiiiii?iiiiiii~i}i|ihiIiciOiUi{i6izibi\iyixi[igiwiDiviuiaitisi2iZiriqipioi7i)i(iViYiFi;iNiGiRiXi!iMiBiTiSiniKi=i`iiCimiJiWifi"i_iQiliLiHiiiik(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin5_TurkishCharToOrderMaptTurkishLangModeltTruetLatin5TurkishModel(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.pyt%s,PKZERMM0site-packages/pip/_vendor/chardet/big5prober.pycnu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tBig5DistributionAnalysis(t BIG5_SM_MODELt Big5ProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyR#s cCsdS(NtBig5((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyt charset_name)scCsdS(NtChinese((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pytlanguage-s(t__name__t __module__RtpropertyR R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyR"s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pytsPKZ8ЕЕ0site-packages/pip/_vendor/chardet/gb2312freq.pycnu[ abc@sdZdZdZdS(g?iiiii< iTii iQii i iii<iw ii9 ii iiiiZ iiiqi i i iW iyiiieioiiv ii i iiii iLiBi iQ iiiiQiiiEiifiiiiiiiidi( iyiii,i i i iE ieii iWiRiii| iR ii i i iX i ijiEii i+iPiiiiiii;iim iiliiii iiieiHiiiii^ iki6ii" iFi i i i i^ i i iiii?i`iui$i1i ii i iUi i1 iiigiliii iiGii2iiq i i iPiiL iiiiiy iii i iMiikii4 iiiiii i i@ i`ii5iiiN ii7i3iiniiuiGi&ibihi0i iM iNii i iY i ii i iii i&i iQi i7i^ihiiiJ iiPiiiiriiiiiiiT i-iGi,iui i ii$iii<iBii$i>iiix iLiWiiini iiTiiZii i i{ iii i iii i?i i0ibi;iDiiiiiCiSiiUi> i|igi ii i iKii iiii% i5 iiiiil iihi iii?ii i iUiiii ii*i< i i ii ii iii ii i! iX iii iiQ igiiii iiri ii iqii iiz ii i3ii iii i i]iciziii7 ii_ii iCii&iaiHi| ili ii=iiiIiPi ii~ i8i`i iw i i iTii i iiii iiiiii[iq iii igii|iCi ii iN iIiii5 i i(ii/iHiK i iiiii9iiIiii|iiii ii iWii iii^i~iiii i i= iXii3i iIiii iiiii3iriiiioi ii i{iiiiCii!iHi iiiifii= ii1i[i i| iDii%iitii i ii ii.iiiiyiiTiii9 i i i#iiiJiiniiii ii>iBi"iiiai iiMiKii iW i i i i! ir iFi+ i i i iix iHiJiiiii2iKiiR i$ii9i it ii ii iiiiiiiiii%i~i i` iiiii'iviii ii|ii'i iin i*ii iiFii%ii)i~ii iiiJi&ii i!iD iiwii[i ii i i i' i iikiii i i iiiiiiiih iBi_i i?iigihii'iiiiiiiiiiiii5i] ii iiiiiiii1 ii)ia iiiii; iii_iiiQi/iu i i iji[ii}ii iii iiiiQi^ i iGiizi iiiViOii ii ii imiui i6ii i-i$i iiRii} i ipir i] i iDi iiiiiiiiiiii1iniiiitiYiG i i i/ i|iti0i iiiii\i i iivii iiciiii i~iAi"ii" i!i8iCi8 ii iji i1iili>i ii] i i i~iiLi(iiiKidiiviDiiii%i iii i, ii2ii iiij i-i.i imiKi iiioi i;i ii idi@iai iM i1 iLi@i2 iiwiitiQi i/ii ii~iiiii i/ii i_ i#i iOiiOiGiiiki[iOioiii- i iii i!i i iIiiii{iri i.ihi i iT iiii'iL iaii i@iii iii iT i/ iiiii iiMiii il iii i{i8i#i iii9iiSiRi iwiiZii0 iEigig ii i iiLiiipiiiSi iiiii i@iiI iLi; iOiqiii) i i:iFii5iJidiA ii i) iEiLiYiiii im i ii i iiic iiRii!iiii ii(iii]iIii i iieiiii iiii`ii iii ii[iiC i(iPi,i i}iO i' iLi\ i i i iiiiZiiisi'i iUiiiiiN iiii i iiMiiiiii iiiiiigi'iii ii iiPi0 is iiiip i iniiii i\i iii i!ihiiimi(i& i iCiiBi)i5 iiiiiii iii>iiiii i ii{iNiiiiEi3iiJi#iicii ii$ ii5iixi i]iiikii iiiiP ii iiiriiL i izidiii#i iii[ iSiihiiGiiiii iUi+i iyiiwi"iiVi2 i&izi i iiJ iP i iiiii% iZi$ io iT iKi1 i ii`iVi ii id ii iii+i iiAii iiii$ii i_i3i\ iiiiiibi ifiiiii iAiniiixiiFi iOi[iii(i ii i#i7i2 ixiif i i i!i6i{ iiiYiiii+iiiii iiii i iKiei i i iTi$iiiih ii iRiii"i iiZi i_ ii@iiai iii3 i iki)ijiii ieii iA i^ii iW iiihi: iz i) ii| iii ii i8 ijii iii3i i iziipiyi i i2 ioiD i=ii.ii9 iaifii*i_ii i*ii i i ii i- ii i}ii i ii* i itiiti6 iqiiiiiii iSi iii!i)iiyi iii ij ii iiViIiiig iiiLii iipiDii i ii%ii iM iiii$i i iiE i"iU iie i i|i iQiii ii ii i1 iiiii$i i]i4i&iiiziii`iiwi iiii i)i% i&iiii*iii8i2i> iSi iii1i#i ii ii#i i iiii iF iii, i> i+i i iiZ i3iqii i$ii iVi i;iiiNiimiKiWiiii5i i iRi4i iIiiDiiii i i i iiii%iKi|iii iiiii9iin iii i-i3 i2 ipiii? ii~iiiiiiCii[ii iii i3 i> iili i`ix iiii i# iiii0iiii iL i iiQiiRii?i(imii` ii3iiii& i ii isiDii0 ib ifi@ i iii ii=i i i<iiii ii6iiis iii iSiii i8iZ iii ii iii i ii iiiiNi i i iLi iJ iii ibiig iii[ iiiiii~ ii i iiifiiiii i i` i4i= iiiili? iici ii iiii iiiiiiiiwiii i%ii id i^i+iii iZiiF iI iit ii i/i iiiyi* isi&i+iii) iii iii i i ie iiiQi] ii i_i7 ii0iciuiiiLiki iiiii#iiiiiiii?iiii ini i iii iii iHiiYiiu ii i iiJii_ i_i&iiii iiiZi%ii isiiSii i iViiiiii1i'iiiFi\i i|i iWiii|iiSiWi i* i iRiliPi#i i i-iq iii i ii4iiiiiia iiiii@ i iXiii%iii8 i3i,ii} iiiidixi ii# ii6i i i.iiiAi iir i i iR idiii+ iiii i{i{iHi i iiiiiici i i iii,iiI iici ii iii=i iJi. i,i>iiiiLiei]ii iC iiii*i; i ii iipi iiiRii9 i i ii iii?i:i i i iQi'isiiwi[i ii-i i)iiiMiiiii]i}i6i@i iiUiiii/iiis i iiik i? ii, i= i1iMi iii/iS id iiio i6 ikiui iiiWi{iixiWii iiiix i^ iii[ i+ i7i:iiU iii!iii i.iHiTiiYiii ii@i"iCisi i i iG ibii7i~i. iUi iqi iiii{iiiiYi/i iiiAi ii*i:i8i'i i ii iiA i_i0iiiiiiiiKi i iit i* iii iiii i i iiiDiiii>iiivii i i'iiViii iiikii i\iiiii!i ii*iU iiiiiizii\if iii i iiii iiii i iiB iii iii{ii=i ii&i iiV i ii"i! i i iii i( i=ii i ii9i ii4iaiii-iQ ii ii imiiiiiCiii7i}iqiDi4iiuii iiiv iiiF iiu ia iiH ioi i iliKiA iii izi4 i i! iH ii ii5 iijii`ii iB i:iOiai i#iiiidiii(i i& ii^imig iE iio ii(iG i$i@i iIiiiAi ii`iXii7ii/ii$ii iihiiii iUi"iTiiiiimiii!iiiiii iiMi iiii i2iqiii9iii iiiii/iiii ipi@iX iijiii i^ ibi i iniiviiiioioiiiviii0ii9ii$ iifiS izi iiDi{iiXiii iVi3 iYi ii iAiiiiiiqiU i i: ii2iji ii i{ ii iFi i i ii iiiiciiiR i;ik ihii i iiY iOixiiziZi<iv ii i i iiIiii4 iMiiiiib iiii ii iiCii iiiO ii8iii i.i]i iVic i iii iH iiTiii ii> i ii i3iiii,iiifiiiiliTiWiHi.i'i>ipiiii"ii iJii i ivi2iii ixi i2iigi i4iii i iii{ iz i i i iii iiiiii?iiii iJiiii i i8iiiE ij i\ii8i8 ii iiii`ii>ibiSiiii i iii0i iiiEi iidiiii1iiiini@ii iiTii iiiQiiCii iK i;ikiiiiii_i\ ieiiwiiiiiMii. iriaiii i ii iiiigii i' i iii i iTiii4ioii iXiyiixii i i iU i^i: iiii ii"i#iii ii iiiiNiUiiip ii" i-i i iii:ii ii iiii iiiW i iG iLii iGiw ioiii iMir iii iiQ iiI iNi%i iXiii i7 i iaii iii i' iiihi i}iNiyi?itii iD iiiii iliiii iiie iJisi2i*iyiiiiii iiqi ii5i}is i$iiii5 iBiiii# idisi} ii~iiib iiO i/ i]iii i]i2iiifi7iiiOi iRii# i i[iiEii if iS i idiFiiiiC iV i>ii iiBii i i1i6imiii iiiiip i ii i i<izi i| iXiiiisie i]iiii i iiOi i+i iiiiii:i iAii ii ii( i imii4iP ii inii ioioiiimiiyiii8i ii<iii<ic iC iii&iwi i i$i i9i&ii i)i`iiiiiiGibiEii:iQio iF i iii i iFi|i;i iii3i4 i%ii:i iciipi6 i\iCii0i9 ii iiii~ iii"ih iiii i)ii i iii* ii i i i%ii iiii i.iii9i iii iii ii"i! ii, i ig iJ igiF iiii ii;ii iZiEi^iA iiili5ii ii4i' iiiki ii i2ii ii iXi7iii i{iiBixi=iei(ii iii'iiiiiiiiiii iD iiikii iii-i, i iei7 i iii iiiini} ip i7ijii ii9ii iiiii0iPi iiiiiiO i i i iiUi iii< iii iJiitiNii)i i.iii*i iAiir i] i)iiiiiGii_ ipik iii i% i ii ii>ibi_i^i?ii:iYiDi%iii iiiiiiYii i iiiiil iAii i i iDiBi i ii i@ i(iv iiiigi i i}iviiii iiiii iiiiiiii i-i~iiy i%i i i_i iiiiiijiMi iiGi+i iiii iiii3 i;i iGi)ii iii i ii ii i ii,iIi iii iXiAi\iiiiP ivii iVii=i i i6 i<iiX i i i ii8 i- iY iiiii iiiciiii9i i- iuii ii iR i i ii6ii iih i}i i? iw i ii ia i( i ii1ii ii>iii i ii]i iXiriPi0 i,ii+ ijii`iiiii. iii ii& iWiB iii|i7 iUii iiWiiYii iiw i iii iiiiiuiibiiq iiibiii ii=i$ iiNiJiiiUiiii iiiiiKi,iii4iri{i iiHiipi5iii=i i iEi9ii;i i<ii5i iii ii\iiiipiiiiiriiNiii^iiiiiK ii i*ii{ i+ i ii*i8iiii iiii ij ii@il i6ii2i8ii iiii'iiiiB iiii\iiiS iV ii~iiiii i iiVi iz i i^i i iii_ i} iG iqiiI i iii8itihiS ii}iRi i9iiSin iPiiB iiiisiiXi}ifiiii iiiiiiin iIi^iiTiixiiriii_iibiiOiPiQiRiSN(iiii< iTii iQii i iii<iw ii9 ii iiiiZ iiiqi i i iW iyiiieioiiv ii i iiii iLiBi iQ iiiiQiiiEiifiiiiiiiidi( iyiii,i i i iE ieii iWiRiii| iR ii i i iX i ijiEii i+iPiiiiiii;iim iiliiii iiieiHiiiii^ iki6ii" iFi i i i i^ i i iiii?i`iui$i1i ii i iUi i1 iiigiliii iiGii2iiq i i iPiiL iiiiiy iii i iMiikii4 iiiiii i i@ i`ii5iiiN ii7i3iiniiuiGi&ibihi0i iM iNii i iY i ii i iii i&i iQi i7i^ihiiiJ iiPiiiiriiiiiiiT i-iGi,iui i ii$iii<iBii$i>iiix iLiWiiini iiTiiZii i i{ iii i iii i?i i0ibi;iDiiiiiCiSiiUi> i|igi ii i iKii iiii% i5 iiiiil iihi iii?ii i iUiiii ii*i< i i ii ii iii ii i! iX iii iiQ igiiii iiri ii iqii iiz ii i3ii iii i i]iciziii7 ii_ii iCii&iaiHi| ili ii=iiiIiPi ii~ i8i`i iw i i iTii i iiii iiiiii[iq iii igii|iCi ii iN iIiii5 i i(ii/iHiK i iiiii9iiIiii|iiii ii iWii iii^i~iiii i i= iXii3i iIiii iiiii3iriiiioi ii i{iiiiCii!iHi iiiifii= ii1i[i i| iDii%iitii i ii ii.iiiiyiiTiii9 i i i#iiiJiiniiii ii>iBi"iiiai iiMiKii iW i i i i! ir iFi+ i i i iix iHiJiiiii2iKiiR i$ii9i it ii ii iiiiiiiiii%i~i i` iiiii'iviii ii|ii'i iin i*ii iiFii%ii)i~ii iiiJi&ii i!iD iiwii[i ii i i i' i iikiii i i iiiiiiiih iBi_i i?iigihii'iiiiiiiiiiiii5i] ii iiiiiiii1 ii)ia iiiii; iii_iiiQi/iu i i iji[ii}ii iii iiiiQi^ i iGiizi iiiViOii ii ii imiui i6ii i-i$i iiRii} i ipir i] i iDi iiiiiiiiiiii1iniiiitiYiG i i i/ i|iti0i iiiii\i i iivii iiciiii i~iAi"ii" i!i8iCi8 ii iji i1iili>i ii] i i i~iiLi(iiiKidiiviDiiii%i iii i, ii2ii iiij i-i.i imiKi iiioi i;i ii idi@iai iM i1 iLi@i2 iiwiitiQi i/ii ii~iiiii i/ii i_ i#i iOiiOiGiiiki[iOioiii- i iii i!i i iIiiii{iri i.ihi i iT iiii'iL iaii i@iii iii iT i/ iiiii iiMiii il iii i{i8i#i iii9iiSiRi iwiiZii0 iEigig ii i iiLiiipiiiSi iiiii i@iiI iLi; iOiqiii) i i:iFii5iJidiA ii i) iEiLiYiiii im i ii i iiic iiRii!iiii ii(iii]iIii i iieiiii iiii`ii iii ii[iiC i(iPi,i i}iO i' iLi\ i i i iiiiZiiisi'i iUiiiiiN iiii i iiMiiiiii iiiiiigi'iii ii iiPi0 is iiiip i iniiii i\i iii i!ihiiimi(i& i iCiiBi)i5 iiiiiii iii>iiiii i ii{iNiiiiEi3iiJi#iicii ii$ ii5iixi i]iiikii iiiiP ii iiiriiL i izidiii#i iii[ iSiihiiGiiiii iUi+i iyiiwi"iiVi2 i&izi i iiJ iP i iiiii% iZi$ io iT iKi1 i ii`iVi ii id ii iii+i iiAii iiii$ii i_i3i\ iiiiiibi ifiiiii iAiniiixiiFi iOi[iii(i ii i#i7i2 ixiif i i i!i6i{ iiiYiiii+iiiii iiii i iKiei i i iTi$iiiih ii iRiii"i iiZi i_ ii@iiai iii3 i iki)ijiii ieii iA i^ii iW iiihi: iz i) ii| iii ii i8 ijii iii3i i iziipiyi i i2 ioiD i=ii.ii9 iaifii*i_ii i*ii i i ii i- ii i}ii i ii* i itiiti6 iqiiiiiii iSi iii!i)iiyi iii ij ii iiViIiiig iiiLii iipiDii i ii%ii iM iiii$i i iiE i"iU iie i i|i iQiii ii ii i1 iiiii$i i]i4i&iiiziii`iiwi iiii i)i% i&iiii*iii8i2i> iSi iii1i#i ii ii#i i iiii iF iii, i> i+i i iiZ i3iqii i$ii iVi i;iiiNiimiKiWiiii5i i iRi4i iIiiDiiii i i i iiii%iKi|iii iiiii9iin iii i-i3 i2 ipiii? ii~iiiiiiCii[ii iii i3 i> iili i`ix iiii i# iiii0iiii iL i iiQiiRii?i(imii` ii3iiii& i ii isiDii0 ib ifi@ i iii ii=i i i<iiii ii6iiis iii iSiii i8iZ iii ii iii i ii iiiiNi i i iLi iJ iii ibiig iii[ iiiiii~ ii i iiifiiiii i i` i4i= iiiili? iici ii iiii iiiiiiiiwiii i%ii id i^i+iii iZiiF iI iit ii i/i iiiyi* isi&i+iii) iii iii i i ie iiiQi] ii i_i7 ii0iciuiiiLiki iiiii#iiiiiiii?iiii ini i iii iii iHiiYiiu ii i iiJii_ i_i&iiii iiiZi%ii isiiSii i iViiiiii1i'iiiFi\i i|i iWiii|iiSiWi i* i iRiliPi#i i i-iq iii i ii4iiiiiia iiiii@ i iXiii%iii8 i3i,ii} iiiidixi ii# ii6i i i.iiiAi iir i i iR idiii+ iiii i{i{iHi i iiiiiici i i iii,iiI iici ii iii=i iJi. i,i>iiiiLiei]ii iC iiii*i; i ii iipi iiiRii9 i i ii iii?i:i i i iQi'isiiwi[i ii-i i)iiiMiiiii]i}i6i@i iiUiiii/iiis i iiik i? ii, i= i1iMi iii/iS id iiio i6 ikiui iiiWi{iixiWii iiiix i^ iii[ i+ i7i:iiU iii!iii i.iHiTiiYiii ii@i"iCisi i i iG ibii7i~i. iUi iqi iiii{iiiiYi/i iiiAi ii*i:i8i'i i ii iiA i_i0iiiiiiiiKi i iit i* iii iiii i i iiiDiiii>iiivii i i'iiViii iiikii i\iiiii!i ii*iU iiiiiizii\if iii i iiii iiii i iiB iii iii{ii=i ii&i iiV i ii"i! i i iii i( i=ii i ii9i ii4iaiii-iQ ii ii imiiiiiCiii7i}iqiDi4iiuii iiiv iiiF iiu ia iiH ioi i iliKiA iii izi4 i i! iH ii ii5 iijii`ii iB i:iOiai i#iiiidiii(i i& ii^imig iE iio ii(iG i$i@i iIiiiAi ii`iXii7ii/ii$ii iihiiii iUi"iTiiiiimiii!iiiiii iiMi iiii i2iqiii9iii iiiii/iiii ipi@iX iijiii i^ ibi i iniiviiiioioiiiviii0ii9ii$ iifiS izi iiDi{iiXiii iVi3 iYi ii iAiiiiiiqiU i i: ii2iji ii i{ ii iFi i i ii iiiiciiiR i;ik ihii i iiY iOixiiziZi<iv ii i i iiIiii4 iMiiiiib iiii ii iiCii iiiO ii8iii i.i]i iVic i iii iH iiTiii ii> i ii i3iiii,iiifiiiiliTiWiHi.i'i>ipiiii"ii iJii i ivi2iii ixi i2iigi i4iii i iii{ iz i i i iii iiiiii?iiii iJiiii i i8iiiE ij i\ii8i8 ii iiii`ii>ibiSiiii i iii0i iiiEi iidiiii1iiiini@ii iiTii iiiQiiCii iK i;ikiiiiii_i\ ieiiwiiiiiMii. iriaiii i ii iiiigii i' i iii i iTiii4ioii iXiyiixii i i iU i^i: iiii ii"i#iii ii iiiiNiUiiip ii" i-i i iii:ii ii iiii iiiW i iG iLii iGiw ioiii iMir iii iiQ iiI iNi%i iXiii i7 i iaii iii i' iiihi i}iNiyi?itii iD iiiii iliiii iiie iJisi2i*iyiiiiii iiqi ii5i}is i$iiii5 iBiiii# idisi} ii~iiib iiO i/ i]iii i]i2iiifi7iiiOi iRii# i i[iiEii if iS i idiFiiiiC iV i>ii iiBii i i1i6imiii iiiiip i ii i i<izi i| iXiiiisie i]iiii i iiOi i+i iiiiii:i iAii ii ii( i imii4iP ii inii ioioiiimiiyiii8i ii<iii<ic iC iii&iwi i i$i i9i&ii i)i`iiiiiiGibiEii:iQio iF i iii i iFi|i;i iii3i4 i%ii:i iciipi6 i\iCii0i9 ii iiii~ iii"ih iiii i)ii i iii* ii i i i%ii iiii i.iii9i iii iii ii"i! ii, i ig iJ igiF iiii ii;ii iZiEi^iA iiili5ii ii4i' iiiki ii i2ii ii iXi7iii i{iiBixi=iei(ii iii'iiiiiiiiiii iD iiikii iii-i, i iei7 i iii iiiini} ip i7ijii ii9ii iiiii0iPi iiiiiiO i i i iiUi iii< iii iJiitiNii)i i.iii*i iAiir i] i)iiiiiGii_ ipik iii i% i ii ii>ibi_i^i?ii:iYiDi%iii iiiiiiYii i iiiiil iAii i i iDiBi i ii i@ i(iv iiiigi i i}iviiii iiiii iiiiiiii i-i~iiy i%i i i_i iiiiiijiMi iiGi+i iiii iiii3 i;i iGi)ii iii i ii ii i ii,iIi iii iXiAi\iiiiP ivii iVii=i i i6 i<iiX i i i ii8 i- iY iiiii iiiciiii9i i- iuii ii iR i i ii6ii iih i}i i? iw i ii ia i( i ii1ii ii>iii i ii]i iXiriPi0 i,ii+ ijii`iiiii. iii ii& iWiB iii|i7 iUii iiWiiYii iiw i iii iiiiiuiibiiq iiibiii ii=i$ iiNiJiiiUiiii iiiiiKi,iii4iri{i iiHiipi5iii=i i iEi9ii;i i<ii5i iii ii\iiiipiiiiiriiNiii^iiiiiK ii i*ii{ i+ i ii*i8iiii iiii ij ii@il i6ii2i8ii iiii'iiiiB iiii\iiiS iV ii~iiiii i iiVi iz i i^i i iii_ i} iG iqiiI i iii8itihiS ii}iRi i9iiSin iPiiB iiiisiiXi}ifiiii iiiiiiin iIi^iiTiixiiriii_iibiiOiPiQiRiS(t!GB2312_TYPICAL_DISTRIBUTION_RATIOtGB2312_TABLE_SIZEtGB2312_CHAR_TO_FREQ_ORDER(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.pyt*sPKZ\>,site-packages/pip/_vendor/chardet/compat.pycnu[ abc@s^ddlZejdkr<eZeZeefZeZ neZeZe efZeZ dS(iNii(ii( tsyst version_infotTruetPY2tFalsetPY3tstrtunicodetbase_strt text_typetbytes(((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/compat.pyts    PKZO /site-packages/pip/_vendor/chardet/escprober.pyonu[ abc@sxddlmZddlmZddlmZmZmZddlm Z m Z m Z m Z defdYZ dS(i(t CharSetProber(tCodingStateMachine(tLanguageFiltert ProbingStatet MachineState(t HZ_SM_MODELtISO2022CN_SM_MODELtISO2022JP_SM_MODELtISO2022KR_SM_MODELtEscCharSetProbercBsSeZdZddZdZedZedZdZ dZ RS(s This CharSetProber uses a "code scheme" approach for detecting encodings, whereby easily recognizable escape or shift sequences are relied on to identify these encodings. cCstt|jd|g|_|jtj@ra|jjtt |jjtt n|jtj @r|jjtt n|jtj @r|jjttnd|_d|_d|_d|_|jdS(Nt lang_filter(tsuperR t__init__t coding_smR RtCHINESE_SIMPLIFIEDtappendRRRtJAPANESERtKOREANRtNonetactive_sm_countt_detected_charsett_detected_languaget_statetreset(tselfR ((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyR *s     cCsntt|jx0|jD]%}|s/qnt|_|jqWt|j|_d|_ d|_ dS(N( R R RR tTruetactivetlenRRRR(RR ((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyR:s  cCs|jS(N(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyt charset_nameEscCs|jS(N(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pytlanguageIscCs|jr dSdSdS(NgGz?g(R(R((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pytget_confidenceMs cCsx|D]}x|jD]}| s|j r4qn|j|}|tjkrt|_|jd8_|jdkrtj|_ |j Sq|tj krtj |_ |j |_|j|_|j SqWqW|j S(Nii(R Rt next_stateRtERRORtFalseRRtNOT_MERtstatetITS_MEtFOUND_ITtget_coding_state_machineRRR(Rtbyte_strtcR t coding_state((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pytfeedSs"      N( t__name__t __module__t__doc__RR RtpropertyRRRR*(((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyR #s   N(t charsetproberRtcodingstatemachineRtenumsRRRtescsmRRRRR (((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/escprober.pyts"PKZdF]ww7site-packages/pip/_vendor/chardet/langcyrillicmodel.pyonu[ abc@sNdZdZdZdZdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z ied6ed6dd6ed6dd6dd6Z dS(iiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii ii'iiiii ii iiiii iiiii iiiiiiii6i;i%i,i:i)i0i5i.i7i*i<i$i1i&ii"i#i+i-i i(i4i8i!i=i>i3i9i/i?i2iFitchar_to_order_maptprecedence_matrixglP@?ttypical_positive_ratiotkeep_english_lettersKOI8-Rt charset_nametRussiantlanguages windows-1251s ISO-8859-5t MacCyrillictIBM866tIBM855N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii ii'iiiii ii iiiii iiiii iiiiiiii6i;i%i,i:i)i0i5i.i7i*i<i$i1i&ii"i#i+i-i i(i4i8i!i=i>i3i9i/i?i2iF(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iii ii iiiiii ii iiii iiii'iiiiii6iiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iii ii iiiiii ii iiii iiii'iiiiii6iiiiiiiDiiiiiiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiDiiii ii iiiiii ii iiii iiii'iiiiii6iiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiiiiiiiDiiiiiiiiiiiiiiiiiiiiiiiii;i6iFii%ii,ii:i i)ii0i'i5ii.iiiiiiiii7ii*iiiiii<iiiiiiii i$iiiiiiiiii1i i&iiii"iiiiii#iii+i i-ii ii(ii4ii8i i!ii=iiii>ii3ii9ii/ii?ii2iii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiJiiKiiiiiiiiiiiiiiiiiiiGiiBiiAiiLii@iiiMiHiiEiCiiNiIiiiOiiiiiiiiii%i,i!i.i)i0i8i3i*i<i$i1i&ii"i#i-i i(i4i5i7i:i2i9i?iFi>i=i/i;i+iii ii iiiiii ii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii iiii'iiiiii6iiiiiiiDiiiiiiiiiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tKOI8R_char_to_order_maptwin1251_char_to_order_maptlatin5_char_to_order_maptmacCyrillic_char_to_order_maptIBM855_char_to_order_maptIBM866_char_to_order_maptRussianLangModeltFalset Koi8rModeltWin1251CyrillicModeltLatin5CyrillicModeltMacCyrillicModelt Ibm866Modelt Ibm855Model(((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.pyts      PKZݾuN9 9 +site-packages/pip/_vendor/chardet/enums.pyonu[ abc@sdZdefdYZdefdYZdefdYZdefdYZd efd YZd efd YZd S(sr All of the Enums that are used throughout the chardet package. :author: Dan Blanchard (dan.blanchard@gmail.com) t InputStatecBs eZdZdZdZdZRS(sS This enum represents the different states a universal detector can be in. iii(t__name__t __module__t__doc__t PURE_ASCIIt ESC_ASCIIt HIGH_BYTE(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyRstLanguageFiltercBsJeZdZdZdZdZdZdZdZeeBZ e eBeBZ RS(sj This enum represents the different language filters we can apply to a ``UniversalDetector``. iiiiii( RRRtCHINESE_SIMPLIFIEDtCHINESE_TRADITIONALtJAPANESEtKOREANtNON_CJKtALLtCHINESEtCJK(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyRs t ProbingStatecBs eZdZdZdZdZRS(sG This enum represents the different states a prober can be in. iii(RRRt DETECTINGtFOUND_ITtNOT_ME(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR st MachineStatecBs eZdZdZdZdZRS(sN This enum represents the different states a state machine can be in. iii(RRRtSTARTtERRORtITS_ME(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR)stSequenceLikelihoodcBs5eZdZdZdZdZdZedZRS(sX This enum represents the likelihood of a character following the previous one. iiiicCsdS(s::returns: The number of likelihood categories in the enum.i((tcls((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pytget_num_categories;s( RRRtNEGATIVEtUNLIKELYtLIKELYtPOSITIVEt classmethodR(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR2s tCharacterCategorycBs,eZdZdZdZdZdZdZRS(s This enum represents the different categories language models for ``SingleByteCharsetProber`` put characters into. Anything less than CONTROL is considered a letter. iiiii(RRRt UNDEFINEDt LINE_BREAKtSYMBOLtDIGITtCONTROL(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR As N(RtobjectRRRRRR (((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyts    PKZ `))*site-packages/pip/_vendor/chardet/escsm.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import MachineState HZ_CLS = ( 1,0,0,0,0,0,0,0, # 00 - 07 0,0,0,0,0,0,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,1,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,0,0,0,0,0,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,0,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,4,0,5,2,0, # 78 - 7f 1,1,1,1,1,1,1,1, # 80 - 87 1,1,1,1,1,1,1,1, # 88 - 8f 1,1,1,1,1,1,1,1, # 90 - 97 1,1,1,1,1,1,1,1, # 98 - 9f 1,1,1,1,1,1,1,1, # a0 - a7 1,1,1,1,1,1,1,1, # a8 - af 1,1,1,1,1,1,1,1, # b0 - b7 1,1,1,1,1,1,1,1, # b8 - bf 1,1,1,1,1,1,1,1, # c0 - c7 1,1,1,1,1,1,1,1, # c8 - cf 1,1,1,1,1,1,1,1, # d0 - d7 1,1,1,1,1,1,1,1, # d8 - df 1,1,1,1,1,1,1,1, # e0 - e7 1,1,1,1,1,1,1,1, # e8 - ef 1,1,1,1,1,1,1,1, # f0 - f7 1,1,1,1,1,1,1,1, # f8 - ff ) HZ_ST = ( MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f ) HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) HZ_SM_MODEL = {'class_table': HZ_CLS, 'class_factor': 6, 'state_table': HZ_ST, 'char_len_table': HZ_CHAR_LEN_TABLE, 'name': "HZ-GB-2312", 'language': 'Chinese'} ISO2022CN_CLS = ( 2,0,0,0,0,0,0,0, # 00 - 07 0,0,0,0,0,0,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,1,0,0,0,0, # 18 - 1f 0,0,0,0,0,0,0,0, # 20 - 27 0,3,0,0,0,0,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,4,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 2,2,2,2,2,2,2,2, # 80 - 87 2,2,2,2,2,2,2,2, # 88 - 8f 2,2,2,2,2,2,2,2, # 90 - 97 2,2,2,2,2,2,2,2, # 98 - 9f 2,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 2,2,2,2,2,2,2,2, # e0 - e7 2,2,2,2,2,2,2,2, # e8 - ef 2,2,2,2,2,2,2,2, # f0 - f7 2,2,2,2,2,2,2,2, # f8 - ff ) ISO2022CN_ST = ( MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f ) ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, 'class_factor': 9, 'state_table': ISO2022CN_ST, 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, 'name': "ISO-2022-CN", 'language': 'Chinese'} ISO2022JP_CLS = ( 2,0,0,0,0,0,0,0, # 00 - 07 0,0,0,0,0,0,2,2, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,1,0,0,0,0, # 18 - 1f 0,0,0,0,7,0,0,0, # 20 - 27 3,0,0,0,0,0,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 6,0,4,0,8,0,0,0, # 40 - 47 0,9,5,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 2,2,2,2,2,2,2,2, # 80 - 87 2,2,2,2,2,2,2,2, # 88 - 8f 2,2,2,2,2,2,2,2, # 90 - 97 2,2,2,2,2,2,2,2, # 98 - 9f 2,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 2,2,2,2,2,2,2,2, # e0 - e7 2,2,2,2,2,2,2,2, # e8 - ef 2,2,2,2,2,2,2,2, # f0 - f7 2,2,2,2,2,2,2,2, # f8 - ff ) ISO2022JP_ST = ( MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 ) ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, 'class_factor': 10, 'state_table': ISO2022JP_ST, 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, 'name': "ISO-2022-JP", 'language': 'Japanese'} ISO2022KR_CLS = ( 2,0,0,0,0,0,0,0, # 00 - 07 0,0,0,0,0,0,0,0, # 08 - 0f 0,0,0,0,0,0,0,0, # 10 - 17 0,0,0,1,0,0,0,0, # 18 - 1f 0,0,0,0,3,0,0,0, # 20 - 27 0,4,0,0,0,0,0,0, # 28 - 2f 0,0,0,0,0,0,0,0, # 30 - 37 0,0,0,0,0,0,0,0, # 38 - 3f 0,0,0,5,0,0,0,0, # 40 - 47 0,0,0,0,0,0,0,0, # 48 - 4f 0,0,0,0,0,0,0,0, # 50 - 57 0,0,0,0,0,0,0,0, # 58 - 5f 0,0,0,0,0,0,0,0, # 60 - 67 0,0,0,0,0,0,0,0, # 68 - 6f 0,0,0,0,0,0,0,0, # 70 - 77 0,0,0,0,0,0,0,0, # 78 - 7f 2,2,2,2,2,2,2,2, # 80 - 87 2,2,2,2,2,2,2,2, # 88 - 8f 2,2,2,2,2,2,2,2, # 90 - 97 2,2,2,2,2,2,2,2, # 98 - 9f 2,2,2,2,2,2,2,2, # a0 - a7 2,2,2,2,2,2,2,2, # a8 - af 2,2,2,2,2,2,2,2, # b0 - b7 2,2,2,2,2,2,2,2, # b8 - bf 2,2,2,2,2,2,2,2, # c0 - c7 2,2,2,2,2,2,2,2, # c8 - cf 2,2,2,2,2,2,2,2, # d0 - d7 2,2,2,2,2,2,2,2, # d8 - df 2,2,2,2,2,2,2,2, # e0 - e7 2,2,2,2,2,2,2,2, # e8 - ef 2,2,2,2,2,2,2,2, # f0 - f7 2,2,2,2,2,2,2,2, # f8 - ff ) ISO2022KR_ST = ( MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 ) ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, 'class_factor': 6, 'state_table': ISO2022KR_ST, 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, 'name': "ISO-2022-KR", 'language': 'Korean'} PKZA0site-packages/pip/_vendor/chardet/euctwprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCTWDistributionAnalysis from .mbcssm import EUCTW_SM_MODEL class EUCTWProber(MultiByteCharSetProber): def __init__(self): super(EUCTWProber, self).__init__() self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) self.distribution_analyzer = EUCTWDistributionAnalysis() self.reset() @property def charset_name(self): return "EUC-TW" @property def language(self): return "Taiwan" PKZERMM0site-packages/pip/_vendor/chardet/big5prober.pyonu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tBig5DistributionAnalysis(t BIG5_SM_MODELt Big5ProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyR#s cCsdS(NtBig5((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyt charset_name)scCsdS(NtChinese((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pytlanguage-s(t__name__t __module__RtpropertyR R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pyR"s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5prober.pytsPKZ' ^^/site-packages/pip/_vendor/chardet/euckrfreq.pyonu[ abc0 @sdZdZd2 Zd1 S(3 g@i0 i iixitiiiiHiaiiiii+iiWiuiihii]iiiiiiiiiiiviwiiiiimiFi!ipii iiiiiiixii/iiiiiiiii9iiiitii-iyiiKiiiiiiiiOiiniiiiiii0iii<i4i{iiiiiiiiiriiiiiiiiiiiiXiXiiiiiYiii&iiPiiiiiiii^iiiiiiii9iiiiiiiQii"iitiiiiii]i{i7iiii{ii;iiuiizi/ii|iiiii7iii.iiiiiiiiiiiiii{iiii#i|i}ii~iiti8ii_i i i i i i!iiii_iiiiiii*iiuii`i"iii|iiiiiiiaiiiiiii?iiRi!i i/iii!i"i=iii#ii$i%i&i'i(i)i,ii'ibi$i*ii+ii-ii,iiiiii&iUiii#iii-i.i'iiifi/isii0iiiiii ii9iei[i1iiiiZiii:iiii2i3iiGiiiiyi4iiii5i6i7i,iwiiisi8iii9i:iii~i;ii<i;i}i=i>i?ioi)iii@iiAiBiii2iYiCiDii<iEiFiGiHiIi%iJiKiLiMiNiOii`i>iPii=iQiiRiiSiTi;iiiii iUiiViWiXi4iYiiiZi[ii\i]iii^iii_i"iPiii`i;ii~iHiaiiviizi?iiiibiii<icididieifici0iidiigiyiiihiiisi0iji=ikiiliiii<ibiiiUi iiiiIiminioiipiqirisitiuiiiii6iviwii*ii]ixiyiiiziZii-i:iibi i{i|ii&i'iii5iiii>i}i~iwiigiiii6ii%iii(iiviiiiwiiiiiEiiiiiiiiiifiiVii7iiiiiBiiNii[ii'iiiiiiiiSiiiiiieiiixiiiiiiiiiiiiiiiiiiiiii?iiiiiqiifii(i)iii~ii\iiiiii)iiiiiiiiiiii$iiiiiiiliiiii~iiiCiiiii@iiiiii2iiiKiiziViiiiiiQirifiiiiii ihi+i3ii1ii iiiiigi(iiiiziiiiiiiiiiiiiiiiiiAii<ijiiiMigii2iiiiiiiiiiVihiiiJiii0iiibiiiiiiiiiiYiiiiiiiai!i*iiiiiiKiDi8iiRiiBii@iiiiiiiiyiiiiiiXi:iii#iiiiiiiGiiikiii=iiiii!iiiiJiii=iiii}iiijiiiiiiiiiiiiiEiiiiiijiOi4iiiiiiiiii ivi]iiCiiiiiiiioiiiii iiiiliciAiiiiiiiiiiTiiikiiiiiii3i*iiqiii>iiiiiii+iiiiii;iipiixiiiii iiiii ii i ii i iiiiiliiiiiii)iiiiimi8iiiDiiiii iiiiiiii7iLiBiiiDiiiiiitiiii i!i i"i#i$i%iRi&i'i(i)i*ii+i,ii,i-i.imii i^i/iciiEiiiiaimi0iEi1i2i3iiii4iTiiii5iiiii6i7iiniiioi8i9iFiiii:iGi;i<i=i>i?i@iAiBiCiDiEi$iFiGiii%iiipiHiIiJiKiLiiiiiiiiMiNiOiPiQiRiiiSi/iTiiUiiiiViiIiHiiiiiiWiiXiiqiYiZi[i\iiriisi]i^i_i`i iviLiaiii.iibiFi>iiijiciZiiiBi6ii`idieiii|iifiii5igiihiiiiHiijiiikiilii1iminioipiqiriCisitiuiviwiixiyizi{iii|i}i~iiiiiiiiiOiiiiiiiiiiiiiEiiiqiiiIii\ii-iiiSiiii iieiiiiliiMiiiQiiPiii^ii i-iFiiiiiiiiiiiiiiiiiii i.iitiiiiiJiiiiiiiigiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiuiiii9i iiii ii$iiiiii5i%iikiLiiiiiiiiiiiAiiiiiiiiiiRiiuiiiiii)iii:iiiiiiii"i$iviiiiciZiiiiiii*iWiiKiiLii+iiiiiiiiii iBiii?iiiiiiiiiiMi[i5iiiiniiiiiiCii'iiiiiiiii iiii iiiiiiiiFiTi/iiiiii8iiuiiiiKii(iMiiiiiiiiiiiiiiiTii?iiiiiiiiei i(iiiiiii%i0iiiiOii i8i i i i i iiii i i iii#iii iiiVi iiiPi i iMiWii ii ii|i i iSiiiiiiii i iii i i i i i ii i ii9iDiiiiiii i3iii i i i{i iii i i ii iiiiii! ikiiiiiNiiYii" i# i$ i% i& i' i( i]i) i* i+ i, i- i. ii/ iii>iiii1 iiiii\ii2 ii3 iii4 i5 i6 iwi7 ii8 ii3iiii9 i: iLi iii; ii< iiiiiii iiii= iiJi> i? i@ iiA iiB iC iD iiE iiiF iG iH iI iiiiiiiJ iK iiiL iM iN iiIiO iP iiQ iR iS iiT iiU iV iW iX iiY iZ i[ i\ i] iii^ i_ i` ia iiib ic id ie iiif ig iUiixi ih iiii ij iiii4i&iiii!iiiiSiyi iii"i ii#ik iil im in iiiipiQii.io iUiiOiip iiq ir is ixit iiu iv iw iii$ix iiy ii`iz i{ i| i} ii~ i i i i i i i iiCiiiii%iyiiioi ii i i i i i ii i i i iii iii i ii@iii iGi iiii2iiiii i i i iNi i iiiii i i i i iri iiziniiiiPi i#i i&iQi iimi iciii i'i iwi i ii(ili@iiii)ii*i i i i ii i i i iii iii i i iHii i ii iii i i ii i iri iiiAi iiiii i}i,iii:iiiii iiIi iNii1i iWiiiiiiii i+i i i1iibii iqi i,i iiieiii_iidi i i i i i iii2iiii ii i i i ii ii i ii-i iii i i i i i ii i i i i i ii ii i iii7i ii ii i i i i i i iiiiiiiii,iGiiii ii^i i i.i iigii ii i i iii iiii_ii i i iiihihii{i i|iiii3i i i iii i i i\iiiii i i i i i i ii i i i iiii iji iiii4ii i i i i i iii i i i ii6idii/i i iiiioi iiRiiii i&i ikiniziiiii ii iiiXiidiiSii}i i}iii~iiiii i! iNi" iiiiiii# iiisi$ i% i& ifiDii1i' iii( i@i) i^iiii* ii+ i, i- i. i/ iJi+i0 i1 i2 iiTii3 i4 i5 i6 ii7 iAi.iiii8 i9 ii"i: i; ii< i= i> iii0i? ii@ iiA iB iiC iD iE iF iG iii_i[iH iI iii`iaiJ iiiiiK iL iM iN iiO iiiiiiiP iQ iR N(0 i iixitiiiiHiaiiiii+iiWiuiihii]iiiiiiiiiiiviwiiiiimiFi!ipii iiiiiiixii/iiiiiiiii9iiiitii-iyiiKiiiiiiiiOiiniiiiiii0iii<i4i{iiiiiiiiiriiiiiiiiiiiiXiXiiiiiYiii&iiPiiiiiiii^iiiiiiii9iiiiiiiQii"iitiiiiii]i{i7iiii{ii;iiuiizi/ii|iiiii7iii.iiiiiiiiiiiiii{iiii#i|i}ii~iiti8ii_i i i i i i!iiii_iiiiiii*iiuii`i"iii|iiiiiiiaiiiiiii?iiRi!i i/iii!i"i=iii#ii$i%i&i'i(i)i,ii'ibi$i*ii+ii-ii,iiiiii&iUiii#iii-i.i'iiifi/isii0iiiiii ii9iei[i1iiiiZiii:iiii2i3iiGiiiiyi4iiii5i6i7i,iwiiisi8iii9i:iii~i;ii<i;i}i=i>i?ioi)iii@iiAiBiii2iYiCiDii<iEiFiGiHiIi%iJiKiLiMiNiOii`i>iPii=iQiiRiiSiTi;iiiii iUiiViWiXi4iYiiiZi[ii\i]iii^iii_i"iPiii`i;ii~iHiaiiviizi?iiiibiii<icididieifici0iidiigiyiiihiiisi0iji=ikiiliiii<ibiiiUi iiiiIiminioiipiqirisitiuiiiii6iviwii*ii]ixiyiiiziZii-i:iibi i{i|ii&i'iii5iiii>i}i~iwiigiiii6ii%iii(iiviiiiwiiiiiEiiiiiiiiiifiiVii7iiiiiBiiNii[ii'iiiiiiiiSiiiiiieiiixiiiiiiiiiiiiiiiiiiiiii?iiiiiqiifii(i)iii~ii\iiiiii)iiiiiiiiiiii$iiiiiiiliiiii~iiiCiiiii@iiiiii2iiiKiiziViiiiiiQirifiiiiii ihi+i3ii1ii iiiiigi(iiiiziiiiiiiiiiiiiiiiiiAii<ijiiiMigii2iiiiiiiiiiVihiiiJiii0iiibiiiiiiiiiiYiiiiiiiai!i*iiiiiiKiDi8iiRiiBii@iiiiiiiiyiiiiiiXi:iii#iiiiiiiGiiikiii=iiiii!iiiiJiii=iiii}iiijiiiiiiiiiiiiiEiiiiiijiOi4iiiiiiiiii ivi]iiCiiiiiiiioiiiii iiiiliciAiiiiiiiiiiTiiikiiiiiii3i*iiqiii>iiiiiii+iiiiii;iipiixiiiii iiiii ii i ii i iiiiiliiiiiii)iiiiimi8iiiDiiiii iiiiiiii7iLiBiiiDiiiiiitiiii i!i i"i#i$i%iRi&i'i(i)i*ii+i,ii,i-i.imii i^i/iciiEiiiiaimi0iEi1i2i3iiii4iTiiii5iiiii6i7iiniiioi8i9iFiiii:iGi;i<i=i>i?i@iAiBiCiDiEi$iFiGiii%iiipiHiIiJiKiLiiiiiiiiMiNiOiPiQiRiiiSi/iTiiUiiiiViiIiHiiiiiiWiiXiiqiYiZi[i\iiriisi]i^i_i`i iviLiaiii.iibiFi>iiijiciZiiiBi6ii`idieiii|iifiii5igiihiiiiHiijiiikiilii1iminioipiqiriCisitiuiviwiixiyizi{iii|i}i~iiiiiiiiiOiiiiiiiiiiiiiEiiiqiiiIii\ii-iiiSiiii iieiiiiliiMiiiQiiPiii^ii i-iFiiiiiiiiiiiiiiiiiii i.iitiiiiiJiiiiiiiigiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiuiiii9i iiii ii$iiiiii5i%iikiLiiiiiiiiiiiAiiiiiiiiiiRiiuiiiiii)iii:iiiiiiii"i$iviiiiciZiiiiiii*iWiiKiiLii+iiiiiiiiii iBiii?iiiiiiiiiiMi[i5iiiiniiiiiiCii'iiiiiiiii iiii iiiiiiiiFiTi/iiiiii8iiuiiiiKii(iMiiiiiiiiiiiiiiiTii?iiiiiiiiei i(iiiiiii%i0iiiiOii i8i i i i i iiii i i iii#iii iiiVi iiiPi i iMiWii ii ii|i i iSiiiiiiii i iii i i i i i ii i ii9iDiiiiiii i3iii i i i{i iii i i ii iiiiii! ikiiiiiNiiYii" i# i$ i% i& i' i( i]i) i* i+ i, i- i. ii/ iii>iiii0 i1 iiiii\ii2 ii3 iii4 i5 i6 iwi7 ii8 ii3iiii9 i: iLi iii; ii< iiiiiii iiii= iiJi> i? i@ iiA iiB iC iD iiE iiiF iG iH iI iiiiiiiJ iK iiiL iM iN iiIiO iP iiQ iR iS iiT iiU iV iW iX iiY iZ i[ i\ i] iii^ i_ i` ia iiib ic id ie iiif ig iUiixi ih iiii ij iiii4i&iiii!iiiiSiyi iii"i ii#ik iil im in iiiipiQii.io iUiiOiip iiq ir is ixit iiu iv iw iii$ix iiy ii`iz i{ i| i} ii~ i i i i i i i iiCiiiii%iyiiioi ii i i i i i ii i i i iii iii i ii@iii iGi iiii2iiiii i i i iNi i iiiii i i i i iri iiziniiiiPi i#i i&iQi iimi iciii i'i iwi i ii(ili@iiii)ii*i i i i ii i i i iii iii i i iHii i ii iii i i ii i iri iiiAi iiiii i}i,iii:iiiii iiIi iNii1i iWiiiiiiii i+i i i1iibii iqi i,i iiieiii_iidi i i i i i iii2iiii ii i i i ii ii i ii-i iii i i i i i ii i i i i i ii ii i iii7i ii ii i i i i i i iiiiiiiii,iGiiii ii^i i i.i iigii ii i i iii iiii_ii i i iiihihii{i i|iiii3i i i iii i i i\iiiii i i i i i i ii i i i iiii iji iiii4ii i i i i i iii i i i ii6idii/i i iiiioi iiRiiii i&i ikiniziiiii ii iiiXiidiiSii}i i}iii~iiiii i! iNi" iiiiiii# iiisi$ i% i& ifiDii1i' iii( i@i) i^iiii* ii+ i, i- i. i/ iJi+i0 i1 i2 iiTii3 i4 i5 i6 ii7 iAi.iiii8 i9 ii"i: i; ii< i= i> iii0i? ii@ iiA iB iiC iD iE iF iG iii_i[iH iI iii`iaiJ iiiiiK iL iM iN iiO iiiiiiiP iQ iR (t EUCKR_TYPICAL_DISTRIBUTION_RATIOtEUCKR_TABLE_SIZEtEUCKR_CHAR_TO_FREQ_ORDER(((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrfreq.pyt)s(PKZc?fafa8site-packages/pip/_vendor/chardet/langhungarianmodel.pycnu[ abc@svdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6ZdS(iiiiii(i6i-i i2i1i&i'i5i$i)i"i#i/i.iGi+i!i%i9i0i@iDi7i4iiiiiii ii iiii iiiiCi iiiiiAi>ii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiKiiiiiiiiiOiiiiiiiiiiiiiiiii3iQiiNiiiii,iiii=iiiiii:iiBi;iiii<iEi?iiiiRiiJiiFiPiiHiiiSiMiTiiLiUiiiiiIi*iiiiii8iiiViWitchar_to_order_maptprecedence_matrixg(P?ttypical_positive_ratiotkeep_english_letters ISO-8859-2t charset_namet Hungariantlanguages windows-1250N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(i6i-i i2i1i&i'i5i$i)i"i#i/i.iGi+i!i%i9i0i@iDi7i4iiiiiiiiiiiii ii iiii iiiiCi iiiiiAi>ii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiKiiiiiiiiiOiiiiiiiiiiiiiiiii3iQiiNiiiii,iiii=iiiiii:iiBi;iiii<iEi?iiiiRiiJiiFiPiiHiiiSiMiTiiLiUiiiiiIi*iiiiii8iiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(i6i-i i2i1i&i'i5i$i)i"i#i/i.iHi+i!i%i9i0i@iDi7i4iiiiiiiiiiiii ii iiii iiiiCi iiiiiAi>ii iiiiiiiiiiiiiiiiiiiiiiiiiiNiiEiiiiiiiiiiiiiiiiiLiiiiiiiiiQiiiiiiiiiiiiiiiii3iSiiPiiiii,iiii=iiiiii:iiBi;iiii<iFi?iiiiTiiKiiGiRiiIiiiUiOiViiMiWiiiiiJi*iiiiii8iiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin2_HungarianCharToOrderMaptwin1250HungarianCharToOrderMaptHungarianLangModeltTruetLatin2HungarianModeltWin1250HungarianModel(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langhungarianmodel.pyt#sZ  PKZu4site-packages/pip/_vendor/chardet/sbcharsetprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .enums import CharacterCategory, ProbingState, SequenceLikelihood class SingleByteCharSetProber(CharSetProber): SAMPLE_SIZE = 64 SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 POSITIVE_SHORTCUT_THRESHOLD = 0.95 NEGATIVE_SHORTCUT_THRESHOLD = 0.05 def __init__(self, model, reversed=False, name_prober=None): super(SingleByteCharSetProber, self).__init__() self._model = model # TRUE if we need to reverse every pair in the model lookup self._reversed = reversed # Optional auxiliary prober for name decision self._name_prober = name_prober self._last_order = None self._seq_counters = None self._total_seqs = None self._total_char = None self._freq_char = None self.reset() def reset(self): super(SingleByteCharSetProber, self).reset() # char order of last character self._last_order = 255 self._seq_counters = [0] * SequenceLikelihood.get_num_categories() self._total_seqs = 0 self._total_char = 0 # characters that fall in our sampling range self._freq_char = 0 @property def charset_name(self): if self._name_prober: return self._name_prober.charset_name else: return self._model['charset_name'] @property def language(self): if self._name_prober: return self._name_prober.language else: return self._model.get('language') def feed(self, byte_str): if not self._model['keep_english_letter']: byte_str = self.filter_international_words(byte_str) if not byte_str: return self.state char_to_order_map = self._model['char_to_order_map'] for i, c in enumerate(byte_str): # XXX: Order is in range 1-64, so one would think we want 0-63 here, # but that leads to 27 more test failures than before. order = char_to_order_map[c] # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but # CharacterCategory.SYMBOL is actually 253, so we use CONTROL # to make it closer to the original intent. The only difference # is whether or not we count digits and control characters for # _total_char purposes. if order < CharacterCategory.CONTROL: self._total_char += 1 if order < self.SAMPLE_SIZE: self._freq_char += 1 if self._last_order < self.SAMPLE_SIZE: self._total_seqs += 1 if not self._reversed: i = (self._last_order * self.SAMPLE_SIZE) + order model = self._model['precedence_matrix'][i] else: # reverse the order of the letters in the lookup i = (order * self.SAMPLE_SIZE) + self._last_order model = self._model['precedence_matrix'][i] self._seq_counters[model] += 1 self._last_order = order charset_name = self._model['charset_name'] if self.state == ProbingState.DETECTING: if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: confidence = self.get_confidence() if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: self.logger.debug('%s confidence = %s, we have a winner', charset_name, confidence) self._state = ProbingState.FOUND_IT elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: self.logger.debug('%s confidence = %s, below negative ' 'shortcut threshhold %s', charset_name, confidence, self.NEGATIVE_SHORTCUT_THRESHOLD) self._state = ProbingState.NOT_ME return self.state def get_confidence(self): r = 0.01 if self._total_seqs > 0: r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / self._total_seqs / self._model['typical_positive_ratio']) r = r * self._freq_char / self._total_char if r >= 1.0: r = 0.99 return r PKZ|enn+site-packages/pip/_vendor/chardet/compat.pynu[######################## BEGIN LICENSE BLOCK ######################## # Contributor(s): # Dan Blanchard # Ian Cordasco # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import sys if sys.version_info < (3, 0): PY2 = True PY3 = False base_str = (str, unicode) text_type = unicode else: PY2 = False PY3 = True base_str = (bytes, str) text_type = str PKZW6 .site-packages/pip/_vendor/chardet/big5freq.pycnu[ abc@sdZdZdZdS(g?iii iiiii iRiiiiiaii inii!i iiLi,iBii iiiiii{ i] iii ijiii.iNiiiiii:iiii?iii=iNiKiiikiq iii i ii ibii iiii ioi2 iiiii ici8iiiiiiiii{i|ii i"i i i@i\ii iiiiiiiFii| iQiHiiiPivii iiiiDi^ iiiFi} iiEi iOii0iisii4i<i2i ii&iMiii~ iiiGiii[ iii?ii iaiiK i*iigi iZi i:ii iiKi ii iiiiii`iliii ii i iqii~iii i ii iiiciiuiii*iii i ii~imiiiieiiGi^iinii iUiCiiCiiiiiii ijioi/iiiPi7 i[iii i? iiiioiSi(ii iipi]ii6iji i@ iiiiii8ii+i3 i[iii\iiiiii] iA iii i1iiiHi i idiii+i i2iiii iOiL iiifi1iiiiiiii3i9iili,ii iiiiiii ieiz iQiMi&i iXiiiiii iiiikipiiMiii i%ii iiiii'i ii\ i ii7iiJii!iiiiiiNiiB iPi_ iqiii iii i ii ii i iIii8 ii ii{ ii i iiiimiiifiiixii iii ii iigiii iiiiiiii&ii'iiiiii.ii iiiM iiiii$i#iiDihiAi iiiriiiUiGiiii i iQiSiiiiidi i0iFiiiC iiiii iiJii iUiiiiN i iii<ii i:iA i ii i i9ii}ii iiiWiPi ii)iDiiii4 i i i)iiir iisiiitii9 iiiis i*iiiiiii] iiiii iD iji( i iii iiui5iYiji iliiuiii iii` i i=i iiiiiii5ii!iiiTiE ixiiieii iiO iPii| i6iAi i/iki i iili!i iiBii`ii i iiAii iiii ivi i iiwiiiiMi iiiiiia iii i} iixi,i}ii iB iiiiiiiiiF ikimi ii ii ii iibii) i^ iP i i,ii ii7i5 i~ iyi|i iziiii iC i{iiiiiiiiii1iibii i iiit ii ii_ iiiiii i i ib iiii5iDiiilii|i[i* iii ii%iiG i^ iiliii ii` iii ipiiinicigimiii'i2iiii{ii ii iiiD iifi ii|i: iiiii i i i i ii inii iii;iiIiYi}ii iXi"ii iiiiiii-iiliiiiiiiiiEii~ii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i+ iii iiiii i iaiiii ia ii}iE iiDii=ii0i ii i6 iiiivi!iiiiiiiii9iHiiiii iFiiiiii"iiH iii i ioiiiRi*ii.iiiiiiI i3ii, ii*iSiiiiXiii/iibipii iiiiiiiRii ii7 ii2ii iiiiiii_ iib ii iiii iiiiiii` iOii i i`i7i i iiiiiiia iiiiiiigiii ii i_ii~iiiiii iaii ib iiiii iii i;ii i-i iiQ iiiiqiiiii`ii#i#iiiiiri isiiiiditi iic ii ikiwi ii iiiiiiiigi iQiiUi i iiiiiCiSiic iii5iiiBii_i iciNiiiYiiii iiLidi i i iKi i8 iaiGiii i iiisi6i iti; iNiii i i4ii:ii\iqiiii.iii iui iiii i ii iriii i iic iiii@iii ii ii ii/i9 ii iiiiiii i< id iiii8iviiCiii iimii iiiiiiZiiiiiwi i{iiiii$iiiMid i0irii iigiiViiiii: iZiii{i$ie i0iii$i i i ii5ii[iiviiiciiViR i$i; iiiif iii idiiiWie iii iii i iiJ iiiiiuii ii ii i iK ii2iiL i i.iii2ii iiiiii%i iii i iiiiiiii i ixi iM ii}iiiTiiiid i iiiii`iiiii i3iii;i iiiiyiYiLiii i iiiiiUi\iiiiiiViN iii iii ii"ixi%ii;i= i iii2iEii!iiii^ii"iwi i#iiii ii$i<iQiiiri%ii]ii&ii< i'i i3i6iBii i(iii ii i iig i i i)i i i*i+i! iiiii:ii7ii^ii iii(i i,i+i i ixi-ii/ihi= i.iiiiiVi ii ihii i/iiiii<iii;i0iiii i i1izi i ii5i6i i_i)i2i3ii4iyi" iii ii^i5i i i?i%i+if i i6i_iWiii i7i> ii ig i# iiu i8i ii9ih iF iiVi{i iO iiIiwi~i i^iii:iiiiiiii i;iei1iiiiiiiii i i iiP i<i=ii>i ii(i`iRi? iYiiIi iii i7i i i?iii i@iioiiqiYiniiiliiii ii ii iEiPiVii! i iiiiiniiAii|i ii iBiiiiii- i iOi i iCiiDiWiTiTiiiii i}iEii&i i iiiiii iii~iG i iii$ iWiiiioii iFiiGiii" iiii# iiiDi-iQ iiHiei i iiIi iiiJizi iiKiiiLii$ i iiiiiH iiiiiiiMi:i iiiNiiI i iiiHiiiiiOiii iPiQiiikiRiR i iii i i ii iiiiiiSiTiii i% iiiiii& iS iUi iiS iiCiiiiVi~ii!iXiiiiii,iiiiiOii|iiiiPiie iiii i i iiiiiiiiT iiiiiWiiiiXiiYiiRi iviii ii i=i iiZiiU iii[i iiiV i% i\iii]iii^i8iT i. i' i_iiviiiiiwitii`iaibi> iii"i{ii i iiciidi9iiiii[i9iiidih i ieiii iJ i!iiifiiii iiiTigiii i%iiWiMihiiiiiiiiiiiiii( ij iiyi7imiijiki i? iW i"iiii iBiili|i ii) iziii iii iiiiimiiniuiv iwii i* iiioipiiiqi7i.irii i iii iifii i isi& iyi ii i iiiii i i>iiii iiYi iitiiui/ i+ i ivi ii i@ iwii i iii3iiii, ixi i6i9i i iii6i iii! iiiyiiQi:ici iii- ioiiiFiX i i iifi iizi iii iii{i/iii' i ibiii`iiiiui iviiiii i i iiii i0iEiiSii8i[i|i}iGii ii!i~iiiFiU ii ii iieisiii iw iiii" i iai iZi&iiiii i iiiiAiii iiii iiiiiiiii ioii ii i i ii iiA i iJi0i ix i ii iiY iiiiii i i iiZ i8ioiiiQiiiii iiiaiQiiipi;iiiPiibiiiii i ii iiii iii iiiiiiiii]iiiii iiiiii'iii. i ii iiiii iLiiiiii ii/ iiiiiB i0 i[ ii8iiiii iii/i3iiiii iiii0i iniiiiiiiii i iiiiiiiii4iiiii<i ii iiiYi iiiiC iiii iiiii}i1ii iiiiKii ii0 iFiiii ii#ii\iiiifi i>i iii iiiiii i iiiiijiiiiii-ipi9i@ i6iyiiii i i1 iiiiiiiiViiiii4iii iKi1 ii iD i iiiiii#iiii i2 iiii iZiHipiiigiii i iqiri iiiiii i iK iV i i\ i,i ii iiii]iiiuii iiiiiiiiiZiii iiiiijiii0ihihiii ii iE iii iii iiiiiiiiiii iiii2 iiiiiiiMiiwiii iy i?iiii itimi( i ii+iiiiiiiiii i ii i iii iiL iii) i ii iiif ii iiiiiiiRi"iA i3 i ii-i1ii$i i(ii] i i# ii$ i iiii i* iiii i ii i i=ik ii!ii iiii i ii ii_iiiii^ i2ii iiii9iii iiii4 i iii iibiiii ii iii5ii iii"i i iiiiii4iIi ii#ii iiiiiiij iiii$ik i ii ii\i igiiEi5 iiiiiiiii iii6 iiii ii%iii ii i[i iiiiii i&ieiii iiioi iil i i}iiiizii+ iii iiiini-ii i'iiii iiii(iii iiiiiiii ii iiiBiiiqiiii<ii*iji)i"i*ii>i)i7 ii+iiiiiiiizii,iiiiiM i)iriii-i i iiiiiiTiipiii#i#iiiiiiii iiii i ii iiSii.i8 iiiiiz i9 i=iiiim iiSiiiiiixiii/iiiqiii3ii5ii: i i ii iiiiii i; i i0i1iiiiiin ii_ ii]i i iniiiii i% iiiii i` iiiiiziTisi2i3 ii i{ ii5iiii ig iiikiiiii iii3i4ii iici iii i iiiai:i1i iii!iAihi io i i6i"iii il i-ii ia i5ii ii i ii#i! ii$ii ii4 ifii i%i& i iN i iiWiiiiii i' ii$iii< i iNiiiW ikiiO iiiiiiiX i, ii6i&itiiP ii i7i i i i- ijii i iiCi[ip ii i'i8i ib iim i i ii{i(i i ii i9i i i i:i i i^iiiiB i;iiiiii iii ii ii)i<i=i ii ii>iiiii i=iiiiiiiiiiin iiHiDi i?iiUiWi i= ii:ii iNi iKiEi@i4ic i ii> iiQ i iAiiihiii i i( i]iiiiBi*iiiliiCiDiiEih iiiiiiiXiiiii+iiii+iJiiiUiii iimi iEiiiiFiiGi iki inii id iiiiiiiiF i!i iiri@iiHi"iCiGiiii ii^i ii i8iti) i;i i? i!iioiIi i"i#i#i$iiJi i%i i&iriKi'i5 i(i)iio i@ i}iA ii,ii*i4i1i+ii i i ii" ii,ii iC iiuiriD i-i i-i iLivi{ii| iMi.i.iNiY i6 iii i/i0i/i ii1i$ii2i%iGiRiB i i_iiiC ii iD ii3iiiii ii i. ii i4i@iwii5ii6i7iii ii&i0ii8i i&i i* ii+ iG iOiuii# i i1iPiRiiQii ii)iii=i9iili iiiiE iSiiki i\i iii iyi:i i;i<iF i%i=iiRiii>idi'i, i?iei$ ii2i3i- ii i4ii(iSi@iAiBii.iTiiOi i iiiii i&i iii i<i iiiiCiiDi iiUiip iG ie i i i ii iEi;iViFi'iGi)i ii} iHi*iiWiiiIiiTii i iiiiii iiJi. igi~iiUiij i iiiXiii8ii*i iiiKi~ iii iLiiH i iii% ii+if iii iitiiMiNiii ii ii iiik iYiiYiiii*i/ iiOiR iZiH ii#iI i i iii5iS i]iE i[i\i isiI i+iPiQi,i i iRiiSiTii,iiUi]iVi iiLi-i!i iT i\i4iWi iiXiYiZii[i>iwiiiii+i^iiJ ii_i$ii\iiiili.i]i`ii^i_iq i%ii`iaiiii i6ig iU ibicidiai iq ixi iei ibiyi ifihigihiii'iii iii ijir ii i0 i i=iibi|iiiiii i iih iki iliii iaiicimiii ini iiui iidieiis iK i/ioi7ii& i iJ i8ipiqii ij ifii igi iii(ii`irisi i iii<iFik ifiiiii]iiiIiti>iiuiivii iii i0ii-iiwiL ii iiiiiGiaii it i ii iSiKixiiyiziiiV iiiOii1 ii i{i i iiF iyii9iZ ii|i}i~iFi iii i i:iiiii2 iiiipii;iG iii ihiiiii7 iiiiiqiiiM iiiiiii iii iii i(i8 iiN i ii3 i iiO i<iiiiTiiW i iiiii(iiii1iiiiiiiiu iZii i iciijiP ipiii iiii iv iiijii iiii iQ iK i ii i' iiiiii9 iii i izii i iiikiiiii ii2i iiliii>i=iCi'iii>i iiZiiii imini iibiii i iqi?ii ii ii i"ii i?i i}iiioi@i>iiAi i1ii)iiii?i i iiiixi ii ii i3i i ihii il iiii)ii ipii'iiii4ii i i iiiiAi_i iBiiL ii iPiqii iiriiiiiX iicisi! iiiiHii iw iiiiCiDitiiiiiiix i ii i/ iiii5i i4 iuiir iii_iii/i i<ii~i7iy im iii^i5 in ibi itiiil iiii6ii i7iiiiiiiviiii i8i: ii ii0 i i1 iiwiiIiiiii i( iiiBi ii4isi iiiii i iii?iCiixi iEiHi iiFiyiziGiiiii{iiii|ii; i}i9imiH i~iiz iiis i iiM iiii iiim iiiIiiiio iiiiiiii_iiii i2 i3 iHii*iii iiii< iiii:ii ii iiAii iiR iiiii i ii" ii iiiiii ii iii iiiQi i iY iiiiiiii{iKii ii[i iJiiiN iiS iiiiiI iiiT i@ii iii i i@iiRiii it iiiiJiIii iiiii iU iimiiii{ i ii i i;iV iii iii<iii idiiiiXiii9iii6 iiiUiiii iiLi!i= ii) ii i i iii i iii ii=i$i(iviii&iicii iii i ii>iiii{i"iiiii#iiiiiW ii iiii ii$i i%iiiiii&i^iii[ i ii'iiiiiiii(i iii2i\ iJi ip ipin iiu iq i)ii iO i> ii i*iX iii i+ii i,i iii iiiiiKii i iiIi-iii!iiiiii.iDiiiJisi# iii ii7 iiii/iwii|iiiiii"i4 i| i,iiiii iiiiiiiii?iY iP i0ii1iZ iZ i ii i ii i#i@i2ii|iiziiiiiAi'ii? ii ii8 ii iyiiLiVi i3i i i i$iiiiMii%ii i i i i i.ii4iNiii ii i i iiii i5i9 i%iiiiKiQ iiixi6i7iiBi i iOiv i[ iii&ii ii iiiXii* ii\ iPi i iNi+ iw iiiiiWiCi ii iQiDi i?i] iiii^ io iR i i iEii iiFi i'iiXiiGiiS i iiT iRii8ixi ikiSiiTiii ii iiDii9i!iiiii(iiLi"i#i$i%i_ ii&iJi:iUii}i i)ii i;i<i iEi+i'i iiHi=iVi*iHiyi, i+iWi3i>i ii?i(iIi,i-i` iqiia i iii ii)i*iXi i+iiOiiYi,i-ii.ii/i ii0i.iiii1i iJir i2i3i$ i i4ii5i6i7i iib i iii} i ifiiKii i i i i8iZi9ii: ii ic ii:ii i;is iJ i@ii it i; i i/i<id ii iiiU ii0iie i=iif iu iMiiiii i>i i[iiAiLiiii?i i i i i@iAiv iBiCi\iijii[ i ig iDiEiiiFi iBiiGi iiHii(iiMiCih iiIiNiJi iKiLiiiMii1i2iNiiidiOidiV iDi`ii#i5 iPi]iQiEini i iii^iRi_iFiSip ii iiTiUii i iVii i iiGiiiieiHiOi iiPi`iWiw ij iXi ii3i iaiiIiYiJix i7iiii- i i4ik iibiZi-iii. icii[i iri;i iKi5iLiXii i iii\i]iii6i% i^i_iii~i`il iy iaizi~ iiibi?iici7idi iiieiiz iiiMifigi/ ihi|ii@idiiiiiNiiii8ifiiijikiiOiPiiilimi@ini9i ioi i iii< isii\ iQi.iRiNi@i iipiiSi{ iqiiAii iiiiTi iiii:iiiri isi@ iW i;itiui iviwixiyi| iiiiizi i i3iQiiimiiAi{i|ii ii}iOiRi iigiii~iZi iiiiii i\im ii} ii<iX ii ii i iiii iiY iiii0 iiii i=i iiiiLiiiiihiiiiii>i>ii/i?iUiiiiiiei)ix itiifiSii iy i in i i iiTi i@ii i iiiiiiii ii iziUigiiViio iii6 ip itiiiiiii iAiiJiVihiZ i iiWiiiiN(ii iiiii iRiiiiiaii inii!i iiLi,iBii iiiiii{ i] iii ijiii.iNiiiiii:iiii?iii=iNiKiiikiq iii i ii ibii iiii ioi2 iiiii ici8iiiiiiiii{i|ii i"i i i@i\ii iiiiiiiFii| iQiHiiiPivii iiiiDi^ iiiFi} iiEi iOii0iisii4i<i2i ii&iMiii~ iiiGiii[ iii?ii iaiiK i*iigi iZi i:ii iiKi ii iiiiii`iliii ii i iqii~iii i ii iiiciiuiii*iii i ii~imiiiieiiGi^iinii iUiCiiCiiiiiii ijioi/iiiPi7 i[iii i? iiiioiSi(ii iipi]ii6iji i@ iiiiii8ii+i3 i[iii\iiiiii] iA iii i1iiiHi i idiii+i i2iiii iOiL iiifi1iiiiiiii3i9iili,ii iiiiiii ieiz iQiMi&i iXiiiiii iiiikipiiMiii i%ii iiiii'i ii\ i ii7iiJii!iiiiiiNiiB iPi_ iqiii iii i ii ii i iIii8 ii ii{ ii i iiiimiiifiiixii iii ii iigiii iiiiiiii&ii'iiiiii.ii iiiM iiiii$i#iiDihiAi iiiriiiUiGiiii i iQiSiiiiidi i0iFiiiC iiiii iiJii iUiiiiN i iii<ii i:iA i ii i i9ii}ii iiiWiPi ii)iDiiii4 i i i)iiir iisiiitii9 iiiis i*iiiiiii] iiiii iD iji( i iii iiui5iYiji iliiuiii iii` i i=i iiiiiii5ii!iiiTiE ixiiieii iiO iPii| i6iAi i/iki i iili!i iiBii`ii i iiAii iiii ivi i iiwiiiiMi iiiiiia iii i} iixi,i}ii iB iiiiiiiiiF ikimi ii ii ii iibii) i^ iP i i,ii ii7i5 i~ iyi|i iziiii iC i{iiiiiiiiii1iibii i iiit ii ii_ iiiiii i i ib iiii5iDiiilii|i[i* iii ii%iiG i^ iiliii ii` iii ipiiinicigimiii'i2iiii{ii ii iiiD iifi ii|i: iiiii i i i i ii inii iii;iiIiYi}ii iXi"ii iiiiiii-iiliiiiiiiiiEii~ii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i+ iii iiiii i iaiiii ia ii}iE iiDii=ii0i ii i6 iiiivi!iiiiiiiii9iHiiiii iFiiiiii"iiH iii i ioiiiRi*ii.iiiiiiI i3ii, ii*iSiiiiXiii/iibipii iiiiiiiRii ii7 ii2ii iiiiiii_ iib ii iiii iiiiiii` iOii i i`i7i i iiiiiiia iiiiiiigiii ii i_ii~iiiiii iaii ib iiiii iii i;ii i-i iiQ iiiiqiiiii`ii#i#iiiiiri isiiiiditi iic ii ikiwi ii iiiiiiiigi iQiiUi i iiiiiCiSiic iii5iiiBii_i iciNiiiYiiii iiLidi i i iKi i8 iaiGiii i iiisi6i iti; iNiii i i4ii:ii\iqiiii.iii iui iiii i ii iriii i iic iiii@iii ii ii ii/i9 ii iiiiiii i< id iiii8iviiCiii iimii iiiiiiZiiiiiwi i{iiiii$iiiMid i0irii iigiiViiiii: iZiii{i$ie i0iii$i i i ii5ii[iiviiiciiViR i$i; iiiif iii idiiiWie iii iii i iiJ iiiiiuii ii ii i iK ii2iiL i i.iii2ii iiiiii%i iii i iiiiiiii i ixi iM ii}iiiTiiiid i iiiii`iiiii i3iii;i iiiiyiYiLiii i iiiiiUi\iiiiiiViN iii iii ii"ixi%ii;i= i iii2iEii!iiii^ii"iwi i#iiii ii$i<iQiiiri%ii]ii&ii< i'i i3i6iBii i(iii ii i iig i i i)i i i*i+i! iiiii:ii7ii^ii iii(i i,i+i i ixi-ii/ihi= i.iiiiiVi ii ihii i/iiiii<iii;i0iiii i i1izi i ii5i6i i_i)i2i3ii4iyi" iii ii^i5i i i?i%i+if i i6i_iWiii i7i> ii ig i# iiu i8i ii9ih iF iiVi{i iO iiIiwi~i i^iii:iiiiiiii i;iei1iiiiiiiii i i iiP i<i=ii>i ii(i`iRi? iYiiIi iii i7i i i?iii i@iioiiqiYiniiiliiii ii ii iEiPiVii! i iiiiiniiAii|i ii iBiiiiii- i iOi i iCiiDiWiTiTiiiii i}iEii&i i iiiiii iii~iG i iii$ iWiiiioii iFiiGiii" iiii# iiiDi-iQ iiHiei i iiIi iiiJizi iiKiiiLii$ i iiiiiH iiiiiiiMi:i iiiNiiI i iiiHiiiiiOiii iPiQiiikiRiR i iii i i ii iiiiiiSiTiii i% iiiiii& iS iUi iiS iiCiiiiVi~ii!iXiiiiii,iiiiiOii|iiiiPiie iiii i i iiiiiiiiT iiiiiWiiiiXiiYiiRi iviii ii i=i iiZiiU iii[i iiiV i% i\iii]iii^i8iT i. i' i_iiviiiiiwitii`iaibi> iii"i{ii i iiciidi9iiiii[i9iiidih i ieiii iJ i!iiifiiii iiiTigiii i%iiWiMihiiiiiiiiiiiiii( ij iiyi7imiijiki i? iW i"iiii iBiili|i ii) iziii iii iiiiimiiniuiv iwii i* iiioipiiiqi7i.irii i iii iifii i isi& iyi ii i iiiii i i>iiii iiYi iitiiui/ i+ i ivi ii i@ iwii i iii3iiii, ixi i6i9i i iii6i iii! iiiyiiQi:ici iii- ioiiiFiX i i iifi iizi iii iii{i/iii' i ibiii`iiiiui iviiiii i i iiii i0iEiiSii8i[i|i}iGii ii!i~iiiFiU ii ii iieisiii iw iiii" i iai iZi&iiiii i iiiiAiii iiii iiiiiiiii ioii ii i i ii iiA i iJi0i ix i ii iiY iiiiii i i iiZ i8ioiiiQiiiii iiiaiQiiipi;iiiPiibiiiii i ii iiii iii iiiiiiiii]iiiii iiiiii'iii. i ii iiiii iLiiiiii ii/ iiiiiB i0 i[ ii8iiiii iii/i3iiiii iiii0i iniiiiiiiii i iiiiiiiii4iiiii<i ii iiiYi iiiiC iiii iiiii}i1ii iiiiKii ii0 iFiiii ii#ii\iiiifi i>i iii iiiiii i iiiiijiiiiii-ipi9i@ i6iyiiii i i1 iiiiiiiiViiiii4iii iKi1 ii iD i iiiiii#iiii i2 iiii iZiHipiiigiii i iqiri iiiiii i iK iV i i\ i,i ii iiii]iiiuii iiiiiiiiiZiii iiiiijiii0ihihiii ii iE iii iii iiiiiiiiiii iiii2 iiiiiiiMiiwiii iy i?iiii itimi( i ii+iiiiiiiiii i ii i iii iiL iii) i ii iiif ii iiiiiiiRi"iA i3 i ii-i1ii$i i(ii] i i# ii$ i iiii i* iiii i ii i i=ik ii!ii iiii i ii ii_iiiii^ i2ii iiii9iii iiii4 i iii iibiiii ii iii5ii iii"i i iiiiii4iIi ii#ii iiiiiiij iiii$ik i ii ii\i igiiEi5 iiiiiiiii iii6 iiii ii%iii ii i[i iiiiii i&ieiii iiioi iil i i}iiiizii+ iii iiiini-ii i'iiii iiii(iii iiiiiiii ii iiiBiiiqiiii<ii*iji)i"i*ii>i)i7 ii+iiiiiiiizii,iiiiiM i)iriii-i i iiiiiiTiipiii#i#iiiiiiii iiii i ii iiSii.i8 iiiiiz i9 i=iiiim iiSiiiiiixiii/iiiqiii3ii5ii: i i ii iiiiii i; i i0i1iiiiiin ii_ ii]i i iniiiii i% iiiii i` iiiiiziTisi2i3 ii i{ ii5iiii ig iiikiiiii iii3i4ii iici iii i iiiai:i1i iii!iAihi io i i6i"iii il i-ii ia i5ii ii i ii#i! ii$ii ii4 ifii i%i& i iN ii iiWiiiiii i' ii$iii< i iNiiiW ikiiO iiiiiiiX i, ii6i&itiiP ii i7i i i i- ijii i iiCi[ip ii i'i8i ib iim i i ii{i(i i ii i9i i i i:i i i^iiiiB i;iiiiii iii ii ii)i<i=i ii ii>iiiii i=iiiiiiiiiiin iiHiDi i?iiUiWi i= ii:ii iNi iKiEi@i4ic i ii> iiQ i iAiiihiii i i( i]iiiiBi*iiiliiCiDiiEih iiiiiiiXiiiii+iiii+iJiiiUiii iimi iEiiiiFiiGi iki inii id iiiiiiiiF i!i iiri@iiHi"iCiGiiii ii^i ii i8iti) i;i i? i!iioiIi i"i#i#i$iiJi i%i i&iriKi'i5 i(i)iio i@ i}iA ii,ii*i4i1i+ii i i ii" ii,ii iC iiuiriD i-i i-i iLivi{ii| iMi.i.iNiY i6 iii i/i0i/i ii1i$ii2i%iGiRiB i i_iiiC ii iD ii3iiiii ii i. ii i4i@iwii5ii6i7iii ii&i0ii8i i&i i* ii+ iG iOiuii# i i1iPiRiiQii ii)iii=i9iili iiiiE iSiiki i\i iii iyi:i i;i<iF i%i=iiRiii>idi'i, i?iei$ ii2i3i- ii i4ii(iSi@iAiBii.iTiiOi i iiiii i&i iii i<i iiiiCiiDi iiUiip iG ie i i i ii iEi;iViFi'iGi)i ii} iHi*iiWiiiIiiTii i iiiiii iiJi. igi~iiUiij i iiiXiii8ii*i iiiKi~ iii iLiiH i iii% ii+if iii iitiiMiNiii ii ii iiik iYiiYiiii*i/ iiOiR iZiH ii#iI i i iii5iS i]iE i[i\i isiI i+iPiQi,i i iRiiSiTii,iiUi]iVi iiLi-i!i iT i\i4iWi iiXiYiZii[i>iwiiiii+i^iiJ ii_i$ii\iiiili.i]i`ii^i_iq i%ii`iaiiii i6ig iU ibicidiai iq ixi iei ibiyi ifihigihiii'iii iii ijir ii i0 i i=iibi|iiiiii i iih iki iliii iaiicimiii ini iiui iidieiis iK i/ioi7ii& i iJ i8ipiqii ij ifii igi iii(ii`irisi i iii<iFik ifiiiii]iiiIiti>iiuiivii iii i0ii-iiwiL ii iiiiiGiaii it i ii iSiKixiiyiziiiV iiiOii1 ii i{i i iiF iyii9iZ ii|i}i~iFi iii i i:iiiii2 iiiipii;iG iii ihiiiii7 iiiiiqiiiM iiiiiii iii iii i(i8 iiN i ii3 i iiO i<iiiiTiiW i iiiii(iiii1iiiiiiiiu iZii i iciijiP ipiii iiii iv iiijii iiii iQ iK i ii i' iiiiii9 iii i izii i iiikiiiii ii2i iiliii>i=iCi'iii>i iiZiiii imini iibiii i iqi?ii ii ii i"ii i?i i}iiioi@i>iiAi i1ii)iiii?i i iiiixi ii ii i3i i ihii il iiii)ii ipii'iiii4ii i i iiiiAi_i iBiiL ii iPiqii iiriiiiiX iicisi! iiiiHii iw iiiiCiDitiiiiiiix i ii i/ iiii5i i4 iuiir iii_iii/i i<ii~i7iy im iii^i5 in ibi itiiil iiii6ii i7iiiiiiiviiii i8i: ii ii0 i i1 iiwiiIiiiii i( iiiBi ii4isi iiiii i iii?iCiixi iEiHi iiFiyiziGiiiii{iiii|ii; i}i9imiH i~iiz iiis i iiM iiii iiim iiiIiiiio iiiiiiii_iiii i2 i3 iHii*iii iiii< iiii:ii ii iiAii iiR iiiii i ii" ii iiiiii ii iii iiiQi i iY iiiiiiii{iKii ii[i iJiiiN iiS iiiiiI iiiT i@ii iii i i@iiRiii it iiiiJiIii iiiii iU iimiiii{ i ii i i;iV iii iii<iii idiiiiXiii9iii6 iiiUiiii iiLi!i= ii) ii i i iii i iii ii=i$i(iviii&iicii iii i ii>iiii{i"iiiii#iiiiiW ii iiii ii$i i%iiiiii&i^iii[ i ii'iiiiiiii(i iii2i\ iJi ip ipin iiu iq i)ii iO i> ii i*iX iii i+ii i,i iii iiiiiKii i iiIi-iii!iiiiii.iDiiiJisi# iii ii7 iiii/iwii|iiiiii"i4 i| i,iiiii iiiiiiiii?iY iP i0ii1iZ iZ i ii i ii i#i@i2ii|iiziiiiiAi'ii? ii ii8 ii iyiiLiVi i3i i i i$iiiiMii%ii i i i i i.ii4iNiii ii i i iiii i5i9 i%iiiiKiQ iiixi6i7iiBi i iOiv i[ iii&ii ii iiiXii* ii\ iPi i iNi+ iw iiiiiWiCi ii iQiDi i?i] iiii^ io iR i i iEii iiFi i'iiXiiGiiS i iiT iRii8ixi ikiSiiTiii ii iiDii9i!iiiii(iiLi"i#i$i%i_ ii&iJi:iUii}i i)ii i;i<i iEi+i'i iiHi=iVi*iHiyi, i+iWi3i>i ii?i(iIi,i-i` iqiia i iii ii)i*iXi i+iiOiiYi,i-ii.ii/i ii0i.iiii1i iJir i2i3i$ i i4ii5i6i7i iib i iii} i ifiiKii i i i i8iZi9ii: ii ic ii:ii i;is iJ i@ii it i; i i/i<id ii iiiU ii0iie i=iif iu iMiiiii i>i i[iiAiLiiii?i i i i i@iAiv iBiCi\iijii[ i ig iDiEiiiFi iBiiGi iiHii(iiMiCih iiIiNiJi iKiLiiiMii1i2iNiiidiOidiV iDi`ii#i5 iPi]iQiEini i iii^iRi_iFiSip ii iiTiUii i iVii i iiGiiiieiHiOi iiPi`iWiw ij iXi ii3i iaiiIiYiJix i7iiii- i i4ik iibiZi-iii. icii[i iri;i iKi5iLiXii i iii\i]iii6i% i^i_iii~i`il iy iaizi~ iiibi?iici7idi iiieiiz iiiMifigi/ ihi|ii@idiiiiiNiiii8ifiiijikiiOiPiiilimi@ini9i ioi i iii< isii\ iQi.iRiNi@i iipiiSi{ iqiiAii iiiiTi iiii:iiiri isi@ iW i;itiui iviwixiyi| iiiiizi i i3iQiiimiiAi{i|ii ii}iOiRi iigiii~iZi iiiiii i\im ii} ii<iX ii ii i iiii iiY iiii0 iiii i=i iiiiLiiiiihiiiiii>i>ii/i?iUiiiiiiei)ix itiifiSii iy i in i i iiTi i@ii i iiiiiiii ii iziUigiiViio iii6 ip itiiiiiii iAiiJiVihiZ i iiWiiii(tBIG5_TYPICAL_DISTRIBUTION_RATIOtBIG5_TABLE_SIZEtBIG5_CHAR_TO_FREQ_ORDER(((s@/usr/lib/python2.7/site-packages/pip/_vendor/chardet/big5freq.pyt+sPKZA0ЭЭ-site-packages/pip/_vendor/chardet/jisfreq.pyonu[ abc@sdZdZdZdS(g@ii(iiiiii'iOii}i ii ii] i i ii iiiiiii iiiiiXi}iiikiig i i ikiiiiiiiiii%i&i0i1i,i-iiiiiii iii<iiipiiiiiiigiiiiWiXii ih i"iiiii i ii\iii i/ ii iiiii0 ii ihi iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii ii!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7iiViji4iiiiBiii8i9i:i;i<i=i>iviiiiSiieiifi iid i+iiiaiw iiiiIii~ii ii?i@iAiBiCiDi ii- i iiiii iiEi^ i. i i3iii iFi/ iYi i iiiji iiGiHiIiJiqii1 iiYiiki/ii2iii#i iii*iiiiiii[ii\i5ii!i!i i%i@ilii'iAii i i4i iiiiii<iiii-iii7iSiii~iKi=iiEii;iii7i7i8imi&iii iiOiKi=i~idiiiLiMiNiOiPiQiRiSiTiUiViYi>iJi"ip ipiiiiiTi_iii.iXiiiLijieii9iPi iliyiii iDiii i)ihi iFi?ii+iiigiciiiBi]iNiii8iji:i5iii7iiiRi4iGidiiiiniihiti6i3i$iWiCi ii: ix iii*iV i iWiXiYiZi[i\i]i^i_i`iiaibicidieifigihiiijikiliminioipiqirisiti iuiviwixiyizi{i|iiiiili}i i~iii i i iiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiikii i> i i i i i ij iiiiZi[i\i]ii ii ii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i iiiiiii2 iii{iTiia i i^iii|i i=iimik i i ii iViy i ii iq iiii i iiiii!ii ii ii iiCiOi3iiiiiir iiinii+iiii ipiqi i; iAi iiiCi0 iini iiiTiCioi ii i# iii~iiPiiiiii ii i i$ iz ii i&iiiii iiiii_iiiUii is i ii iIiiiiiiii3 iii$iWi1 i5iPi iiiiiiiX iiiil iiiHii iiiiii i iWii~ipii$iiii@iLi i iiiiuii iJiI iW iii_ii i<iiii]iDiiiiihiiiiiiifi-ii}it ii iiiii,ii ibiiiXiiiFii{ i`iii+i3iqiimii4 iiiiiiBiliX i ii% iii% i2 iuii i iiJii|irii@i iii iii ii iL iiiiiiib iJ i icii iiiiii4 iRiim iTiii iiiiiie iii? i iiiii-i ii*iEi ii+i ii& i iii/iii.isi[ii+ii ii iBic iiiiii i) ii,i iu ii ii9i& iiiiiiv iiY i i3 ii iiiiDiiiiidiii| i(iyiiiii<i8i iiii i id i5 iBiini iiiii|i i!ii)i@ in imi i i0iaiii izi'iii iWii[iviii iii io i i ii iii' ii i"iii9iii iiJiiihiiie iii\iiiip i@iiQi9iiii iiiiw iiiEiiJiIi iiiiiifii ii i i1i ii iii' iri iiii iiici iqiixiii i2iiiii i*iiibiFiiviidi iii!iRiiiQiiiiPiiiiiiii( iii_ ii` iiif ii6 iiAiiia iiiuiTi i2iiiiii i^iCii ii[ii ivi%iiiini!iFiiZi^iiiidiiiix iii]i iBiici i iiiPiqi i iHiY iii( ii iigi}iKi iii4 iiWii iiCig i-idiikii4i} iiq i~ i+i iiihi@ i i i@iA iDi:iiiei iiqiwiiiiiiiidiiZii* iM i[iiisiZ i i ii>ii'i-i i ii ii) iiiiitiFi7 iiti#iiiiiiYiiifikib i1ii ii6ioic iiqi iiii iuii:iiiN iiUi=i iviiliNiiii]ii;iii iiiliLi i}iiiiiB ii8 iQii#i`iTii ixiiri iiiiii ii3iniiiiiii{iii@i iiii9 iiii5 ih id iZ iiiiiiiiiisi-ir iiiii[ ii0iiiii.iSi iRiO iii ii;i i4iy i i;iiz iipii i i ii: i(i|i ii"iiiiii iOiieiiii iC iti) ii6iKiii8iiP iihii ii ii3i$iii* i=ibie ii i4iii i.i7ii\ iiiiji iiiiiiii/iiii i=ii^ii5 i ii i\iiiii9i#i+ i is i,ii7iiiYiii iMii+ ii iQiii6 iiiiiiyi iisi#i{ i iUifi<iiiviii)i iiiiiimi<iii iiigii ilii iD ibiui i iDiBiii i[ i ii8iii>i iii iiii i] iiDiiii6iZii i5ii i i<imi i, i iiui^ i iiigiiiIiiigi ii\i:i iMiti iiEiioiiiiiEiRii iiEij iigiWiiiKi iCii=ii]i$i!ii iiii`iKiviii0iiii^ii i3ii"iiiiiiaik ii; iwiiiiiii iyiiiiiPiii iwiiiit i iii iiiJiaiii]iiii iiiiiLihiiii iu ijiii#iCiii iaii!isi| ii iYii iiiihiOi5iiiziii iSiiLii.ii&ii i< i7iisiA iiiMi iisii*i, i%i i iikii&ifiiOiji"ii(i-ii[iii ii- i=ii} iiiMiSiii iiiii ii i i ii ii~ iiJitikivieiyiiii< i if iiMiki iiioiiiiwiv ilii]iii.i iiii/if iqi$ig iiGiiinii>i6 i= ii iii iNi ixiiei* iih iiii$iiiioibi,ii iiitiii i ii iVii iiiiiiiw iiipiHi iViiii<iZi i8iriwii&ii ii/iiiii>ii> iE ixii&ieiiwi iii5iii ii\iiiiSii iiiii2iiiii+i'ii%iiix iOii iQ iiiiiii_iHiiii igiiiy ii i0i&i+ i iiii il i'ii'izii i ii_ iiii i ii? i ii iiz iim i|i ii ig i0iii ii*ii i i`iwiii i#iici)iiiR i iiiTiiiriiViiii_iiiiiriii i` ixiii iiifiiin i= i*iAii i(iixiiS ii{iT ii9iiiMi iriii;ii(ii%ii[ia ib iiiDiiii- i\ iii iio iEimi)ii!iiic ii, i1ii] iiii>iIiii iTiiii iiiyiikizixiiNi iiii i5i i i"iiHi<id ii iiih i ii ip iii:iLi1iii iii> iiq i?iZiMiIi iiB iK iDii#i i iYi>iiiii`iiiiiiiiiiiii iiiciiiiiiiTiiaii2iyi/ii"iU iiibii ii/ii ii i i{i iij ii ii5i*iiiiiaixiii iiFiiii7 iii i?iiwiiiiii+ii8 ii iNi iii,iinii i i ioisi_i?ii iii i iii?i0i i iiyi3ifi i=iiilii iivi]ii i iiizi iiyijiiui iipiijiii iiiziiir iiiiiiiiiFii'i i{ ini ixiui$iiiiMiiiik iipi i^ iqi i`i|ii ii i| i i i ii i iiii iGi ii-iiZi1iYii- i ieiii[iC iiiie i i.i-iiii iii i i7 ibil i iiii{ii.i i iizii i ii? iii9 ii iiii}ii iaii i iQi idi;iV ii/i^iD ii?im i i i9iIi iii iii i{i}i iiiiii&ii~iiXii i} i ii9i ii:ii"i\iii i_ ii8iWi~ i^i%iPisi iwii ii8 i iLiii. iii iiiNiiiPiiSiiii:iRi'i0ibiii iiiiizii iiiiiKiii i iriiqi iiin iL iiiiiiiiiiXi@ iigiiiii4ii: iiiiri iiQi ii i;i'i i>iibimiiwiiiis iiiiA iit i9 iiiii io i^iii2iiciiii i iriii i!iitii1i{i iiiiiisiiiii iixiiviaip iii iiLiOiiiUi i.iii)i. iiEi i"iiyii iiiii(i iiiq iE i6i i i2iui6i iiii. iii iiiViiisi iiiGiGijitiiWii iii`iiiiui ii ii{iQii iOiCi ii iiJi i i i$iiii/ iii_iiiij iRii7iii/ ii3iDiu i6iXi4iii iiii>iiii0 i`i` i iiiiUii i irii i)ii5iiHii iii~ir ii iiiii iii]iiiiiii i@ iii iaiiviiii/i!i6iiii7iwi i i3ici8ixii iiB iiiM iiiF iigiiii(ii i: iiiHii if iii"i i~iii i iyii i ii i!iNi i4iA iidieiiiziiii iiii0 ii iiiiiii{iii i iifi(iliiGi iii^iB imiF i|iiUiii2iRiii#ii iiiFii iUiiii1i icikiiN iSi iii iii$iO ii iYiiii iiGi iiiW i iC i?i iii~iiiUiFiiiiiiii iC i iiiiei iiiii/ iQi_iv i; i i iPi)iiGimi iiiG i,iIizi_i ii i9inii iAi i iX ii#iVi)i< i%ii}iiiHiw i5iipi.iiiimiiii i#ihi|ibii ii@i(i^izi iiAi i i i"i ig ii|iiiiii6iiXiii,iii iii$i#iIik iiiii:i iiQi ioih iVi$i iitiiiiY i,iii i iD ii i~i i iii%iii iiii ii i i7i/i iiii iEiVii iE iuii&iiii i iioiiiZiiiii i iiiii!iini8ii'iiiiiiii iiGi iRi2iii:ii,iiiZ ilii(iXiP i?ii i ioiiiii iiWi*iii;i8ii&i)ii i ioiiiiiHi_iii0iix ii0 ii; ii iiiXii iii1iiZi*i%i iii|idi`iiiSiji}i\iiiiG i ii ifiqi%iBii1 ipii"iiiii i|iiii iil i2 ii i iii@ihii;iiii ii iii&iiii}ii%i iii i?ii iia iy ii iii ii= i1 ii<ii{i#iiiii iAii iitiii ii> i iKi3 iii=iii\i i i'iAi+igiib iKiiiii ii ii4 ii*iim i ii ioiNii0i(ii ii$i4i i{i iiiQ iiiUiiii,i`ii iii ij i)i ii-i6ii iF i2i)ii i*i iYN(i(iiiiii'iOii}i ii ii] i i ii iiiiiii iiiiiXi}iiikiig i i ikiiiiiiiiii%i&i0i1i,i-iiiiiii iii<iiipiiiiiiigiiiiWiXii ih i"iiiii i ii\iii i/ ii iiiii0 ii ihi iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii ii!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7iiViji4iiiiBiii8i9i:i;i<i=i>iviiiiSiieiifi iid i+iiiaiw iiiiIii~ii ii?i@iAiBiCiDi ii- i iiiii iiEi^ i. i i3iii iFi/ iYi i iiiji iiGiHiIiJiqii1 iiYiiki/ii2iii#i iii*iiiiiii[ii\i5ii!i!i i%i@ilii'iAii i i4i iiiiii<iiii-iii7iSiii~iKi=iiEii;iii7i7i8imi&iii iiOiKi=i~idiiiLiMiNiOiPiQiRiSiTiUiViYi>iJi"ip ipiiiiiTi_iii.iXiiiLijieii9iPi iliyiii iDiii i)ihi iFi?ii+iiigiciiiBi]iNiii8iji:i5iii7iiiRi4iGidiiiiniihiti6i3i$iWiCi ii: ix iii*iV i iWiXiYiZi[i\i]i^i_i`iiaibicidieifigihiiijikiliminioipiqirisiti iuiviwixiyizi{i|iiiiili}i i~iii i i iiii iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiikii i> i i i i i ij iiiiZi[i\i]ii ii ii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i i i i iiiiiiiiiiiiiiiiiii i!i"i#i$i%i&i'i(i)i*i+i,i-i.i/i0i1i2i3i4i5i6i7i8i9i:i;i<i=i>i?i@iAiBiCiDiEiFiGiHiIiJiKiLiMiNiOiPiQiRiSiTiUiViWiXiYiZi[i\i]i^i_i`iaibicidieifigihiiijikiliminioipiqirisitiuiviwixiyizi{i|i}i~iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii i iiiiiii2 iii{iTiia i i^iii|i i=iimik i i ii iViy i ii iq iiii i iiiii!ii ii ii iiCiOi3iiiiiir iiinii+iiii ipiqi i; iAi iiiCi0 iini iiiTiCioi ii i# iii~iiPiiiiii ii i i$ iz ii i&iiiii iiiii_iiiUii is i ii iIiiiiiiii3 iii$iWi1 i5iPi iiiiiiiX iiiil iiiHii iiiiii i iWii~ipii$iiii@iLi i iiiiuii iJiI iW iii_ii i<iiii]iDiiiiihiiiiiiifi-ii}it ii iiiii,ii ibiiiXiiiFii{ i`iii+i3iqiimii4 iiiiiiBiliX i ii% iii% i2 iuii i iiJii|irii@i iii iii ii iL iiiiiiib iJ i icii iiiiii4 iRiim iTiii iiiiiie iii? i iiiii-i ii*iEi ii+i ii& i iii/iii.isi[ii+ii ii iBic iiiiii i) ii,i iu ii ii9i& iiiiiiv iiY i i3 ii iiiiDiiiiidiii| i(iyiiiii<i8i iiii i id i5 iBiini iiiii|i i!ii)i@ in imi i i0iaiii izi'iii iWii[iviii iii io i i ii iii' ii i"iii9iii iiJiiihiiie iii\iiiip i@iiQi9iiii iiiiw iiiEiiJiIi iiiiiifii ii i i1i ii iii' iri iiii iiici iqiixiii i2iiiii i*iiibiFiiviidi iii!iRiiiQiiiiPiiiiiiii( iii_ ii` iiif ii6 iiAiiia iiiuiTi i2iiiiii i^iCii ii[ii ivi%iiiini!iFiiZi^iiiidiiiix iii]i iBiici i iiiPiqi i iHiY iii( ii iigi}iKi iii4 iiWii iiCig i-idiikii4i} iiq i~ i+i iiihi@ i i i@iA iDi:iiiei iiqiwiiiiiiiidiiZii* iM i[iiisiZ i i ii>ii'i-i i ii ii) iiiiitiFi7 iiti#iiiiiiYiiifikib i1ii ii6ioic iiqi iiii iuii:iiiN iiUi=i iviiliNiiii]ii;iii iiiliLi i}iiiiiB ii8 iQii#i`iTii ixiiri iiiiii ii3iniiiiiii{iii@i iiii9 iiii5 ih id iZ iiiiiiiiiisi-ir iiiii[ ii0iiiii.iSi iRiO iii ii;i i4iy i i;iiz iipii i i ii: i(i|i ii"iiiiii iOiieiiii iC iti) ii6iKiii8iiP iihii ii ii3i$iii* i=ibie ii i4iii i.i7ii\ iiiiji iiiiiiii/iiii i=ii^ii5 i ii i\iiiii9i#i+ i is i,ii7iiiYiii iMii+ ii iQiii6 iiiiiiyi iisi#i{ i iUifi<iiiviii)i iiiiiimi<iii iiigii ilii iD ibiui i iDiBiii i[ i ii8iii>i iii iiii i] iiDiiii6iZii i5ii i i<imi i, i iiui^ i iiigiiiIiiigi ii\i:i iMiti iiEiioiiiiiEiRii iiEij iigiWiiiKi iCii=ii]i$i!ii iiii`iKiviii0iiii^ii i3ii"iiiiiiaik ii; iwiiiiiii iyiiiiiPiii iwiiiit i iii iiiJiaiii]iiii iiiiiLihiiii iu ijiii#iCiii iaii!isi| ii iYii iiiihiOi5iiiziii iSiiLii.ii&ii i< i7iisiA iiiMi iisii*i, i%i i iikii&ifiiOiji"ii(i-ii[iii ii- i=ii} iiiMiSiii iiiii ii i i ii ii~ iiJitikivieiyiiii< i if iiMiki iiioiiiiwiv ilii]iii.i iiii/if iqi$ig iiGiiinii>i6 i= ii iii iNi ixiiei* iih iiii$iiiioibi,ii iiitiii i ii iVii iiiiiiiw iiipiHi iViiii<iZi i8iriwii&ii ii/iiiii>ii> iE ixii&ieiiwi iii5iii ii\iiiiSii iiiii2iiiii+i'ii%iiix iOii iQ iiiiiii_iHiiii igiiiy ii i0i&i+ i iiii il i'ii'izii i ii_ iiii i ii? i ii iiz iim i|i ii ig i0iii ii*ii i i`iwiii i#iici)iiiR i iiiTiiiriiViiii_iiiiiriii i` ixiii iiifiiin i= i*iAii i(iixiiS ii{iT ii9iiiMi iriii;ii(ii%ii[ia ib iiiDiiii- i\ iii iio iEimi)ii!iiic ii, i1ii] iiii>iIiii iTiiii iiiyiikizixiiNi iiii i5i i i"iiHi<id ii iiih i ii ip iii:iLi1iii iii> iiq i?iZiMiIi iiB iK iDii#i i iYi>iiiii`iiiiiiiiiiiii iiiciiiiiiiTiiaii2iyi/ii"iU iiibii ii/ii ii i i{i iij ii ii5i*iiiiiaixiii iiFiiii7 iii i?iiwiiiiii+ii8 ii iNi iii,iinii i i ioisi_i?ii iii i iii?i0i i iiyi3ifi i=iiilii iivi]ii i iiizi iiyijiiui iipiijiii iiiziiir iiiiiiiiiFii'i i{ ini ixiui$iiiiMiiiik iipi i^ iqi i`i|ii ii i| i i i ii i iiii iGi ii-iiZi1iYii- i ieiii[iC iiiie i i.i-iiii iii i i7 ibil i iiii{ii.i i iizii i ii? iii9 ii iiii}ii iaii i iQi idi;iV ii/i^iD ii?im i i i9iIi iii iii i{i}i iiiiii&ii~iiXii i} i ii9i ii:ii"i\iii i_ ii8iWi~ i^i%iPisi iwii ii8 i iLiii. iii iiiNiiiPiiSiiii:iRi'i0ibiii iiiiizii iiiiiKiii i iriiqi iiin iL iiiiiiiiiiXi@ iigiiiii4ii: iiiiri iiQi ii i;i'i i>iibimiiwiiiis iiiiA iit i9 iiiii io i^iii2iiciiii i iriii i!iitii1i{i iiiiiisiiiii iixiiviaip iii iiLiOiiiUi i.iii)i. iiEi i"iiyii iiiii(i iiiq iE i6i i i2iui6i iiii. iii iiiViiisi iiiGiGijitiiWii iii`iiiiui ii ii{iQii iOiCi ii iiJi i i i$iiii/ iii_iiiij iRii7iii/ ii3iDiu i6iXi4iii iiii>iiii0 i`i` i iiiiUii i irii i)ii5iiHii iii~ir ii iiiii iii]iiiiiii i@ iii iaiiviiii/i!i6iiii7iwi i i3ici8ixii iiB iiiM iiiF iigiiii(ii i: iiiHii if iii"i i~iii i iyii i ii i!iNi i4iA iidieiiiziiii iiii0 ii iiiiiii{iii i iifi(iliiGi iii^iB imiF i|iiUiii2iRiii#ii iiiFii iUiiii1i icikiiN iSi iii iii$iO ii iYiiii iiGi iiiW i iC i?i iii~iiiUiFiiiiiiii iC i iiiiei iiiii/ iQi_iv i; i i iPi)iiGimi iiiG i,iIizi_i ii i9inii iAi i iX ii#iVi)i< i%ii}iiiHiw i5iipi.iiiimiiii i#ihi|ibii ii@i(i^izi iiAi i i i"i ig ii|iiiiii6iiXiii,iii iii$i#iIik iiiii:i iiQi ioih iVi$i iitiiiiY i,iii i iD ii i~i i iii%iii iiii ii i i7i/i iiii iEiVii iE iuii&iiii i iioiiiZiiiii i iiiii!iini8ii'iiiiiiii iiGi iRi2iii:ii,iiiZ ilii(iXiP i?ii i ioiiiii iiWi*iii;i8ii&i)ii i ioiiiiiHi_iii0iix ii0 ii; ii iiiXii iii1iiZi*i%i iii|idi`iiiSiji}i\iiiiG i ii ifiqi%iBii1 ipii"iiiii i|iiii iil i2 ii i iii@ihii;iiii ii iii&iiii}ii%i iii i?ii iia iy ii iii ii= i1 ii<ii{i#iiiii iAii iitiii ii> i iKi3 iii=iii\i i i'iAi+igiib iKiiiii ii ii4 ii*iim i ii ioiNii0i(ii ii$i4i i{i iiiQ iiiUiiii,i`ii iii ij i)i ii-i6ii iF i2i)ii i*i iY(tJIS_TYPICAL_DISTRIBUTION_RATIOtJIS_TABLE_SIZEtJIS_CHAR_TO_FREQ_ORDER(((s?/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jisfreq.pyt,s$PKZOT 1site-packages/pip/_vendor/chardet/eucjpprober.pycnu[ abc@sddlmZmZddlmZddlmZddlmZddl m Z ddl m Z defdYZ d S( i(t ProbingStatet MachineState(tMultiByteCharSetProber(tCodingStateMachine(tEUCJPDistributionAnalysis(tEUCJPContextAnalysis(tEUCJP_SM_MODELt EUCJPProbercBsJeZdZdZedZedZdZdZRS(cCsHtt|jtt|_t|_t|_ |j dS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzerRtcontext_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR %s   cCs$tt|j|jjdS(N(RRR R (R((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR ,scCsdS(NsEUC-JP((R((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyt charset_name0scCsdS(NtJapanese((R((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pytlanguage4scCsx>tt|D]*}|jj||}|tjkrm|jjd|j|j |t j |_ Pq|tj krt j|_ Pq|tjkr|jj}|dkr|d|jd<|jj|j||jj|j|q=|jj||d|d!||jj||d|d!|qqW|d|jd<|jt jkr|jjr|j|jkrt j|_ qn|jS(Ns!%s %s prober hit error at byte %siii(trangetlenR t next_stateRtERRORtloggertdebugRRRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlent _last_charR tfeedR tstatet DETECTINGtgot_enough_datatget_confidencetSHORTCUT_THRESHOLD(Rtbyte_strtit coding_statetchar_len((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR8s4    cCs+|jj}|jj}t||S(N(R R#R tmax(Rt context_conft distrib_conf((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR#Ys( t__name__t __module__R R tpropertyRRRR#(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR$s    !N(tenumsRRtmbcharsetproberRtcodingstatemachineRtchardistributionRtjpcntxRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyts PKZ/553site-packages/pip/_vendor/chardet/charsetprober.pyonu[ abc@sBddlZddlZddlmZdefdYZdS(iNi(t ProbingStatet CharSetProbercBseZdZd dZdZedZdZedZ dZ e dZ e dZ e d ZRS( gffffff?cCs(d|_||_tjt|_dS(N(tNonet_statet lang_filtertloggingt getLoggert__name__tlogger(tselfR((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyt__init__'s  cCstj|_dS(N(Rt DETECTINGR(R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytreset,scCsdS(N(R(R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyt charset_name/scCsdS(N((R tbuf((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytfeed3scCs|jS(N(R(R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytstate6scCsdS(Ng((R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytget_confidence:scCstjdd|}|S(Ns([-])+t (tretsub(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytfilter_high_byte_only=scCszt}tjd|}xX|D]P}|j|d |d}|j re|dkred}n|j|q"W|S(s5 We define three types of bytes: alphabet: english alphabets [a-zA-Z] international: international characters [-] marker: everything else [^a-zA-Z-] The input buffer can be thought to contain a series of words delimited by markers. This function works to filter all words that contain at least one international character. All contiguous sequences of markers are replaced by a single space ascii character. This filter applies to all scripts which do not use English characters. s%[a-zA-Z]*[-]+[a-zA-Z]*[^a-zA-Z-]?isR(t bytearrayRtfindalltextendtisalpha(Rtfilteredtwordstwordt last_char((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytfilter_international_wordsBs      cCst}t}d}xtt|D]}|||d!}|dkrTt}n|dkrit}n|dkr(|j r(||kr| r|j|||!|jdn|d}q(q(W|s|j||n|S(s Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. Also retains English alphabet and high byte characters immediately before occurrences of >. This filter can be applied to all scripts which contain both English characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. iit>ts  PKZ/U|?`?`4site-packages/pip/_vendor/chardet/langgreekmodel.pycnu[ abc@svdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6ZdS(iiiiiRidihi^ibieitifioiiui\iXiqiUiOiviiiSiCiriwi_icimiiHiFiPiQi<i`i]iYiDixiaiMiViEi7iNisiAiBi:iLijigiWikipiiZiJiii=i$i.iGiIi6ili{inii3i+i)i"i[i(i4i/i,i5i&i1i;i'i#i0ii%i!i-i8i2iTi9iyiiiii|iiiiii i iii ii iiii iiiii iii*ii@iKiiiitchar_to_order_maptprecedence_matrixgs?ttypical_positive_ratiotkeep_english_letters ISO-8859-7t charset_nametGreektlanguages windows-1253N(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiRidihi^ibieitifioiiui\iXiqiUiOiviiiSiCiriwi_icimiiiiiiiiHiFiPiQi<i`i]iYiDixiaiMiViEi7iNisiAiBi:iLijigiWikipiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiZiiiiiiiiiiiJiiiiiiiii=i$i.iGiIii6iili{inii3i+i)i"i[i(i4i/i,i5i&i1i;i'i#i0ii%i!i-i8i2iTi9ixiyiiiii|iiiiii i iii ii iiii iiiii iii*ii@iKiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiRidihi^ibieitifioiiui\iXiqiUiOiviiiSiCiriwi_icimiiiiiiiiHiFiPiQi<i`i]iYiDixiaiMiViEi7iNisiAiBi:iLijigiWikipiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii=iiiiiiiiiiiJiiiiiiiiii$i.iGiIii6iili{inii3i+i)i"i[i(i4i/i,i5i&i1i;i'i#i0ii%i!i-i8i2iTi9ixiyiiiii|iiiiii i iii ii iiii iiiii iii*ii@iKiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin7_char_to_order_maptwin1253_char_to_order_maptGreekLangModeltFalsetLatin7GreekModeltWin1253GreekModel(((sF/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langgreekmodel.pyt#sZ  PKZ<'6site-packages/pip/_vendor/chardet/chardistribution.pycnu[ abc@s*ddlmZmZmZddlmZmZmZddlm Z m Z m Z ddl m Z mZmZddlmZmZmZdefdYZdefd YZd efd YZd efd YZdefdYZdefdYZdefdYZdS(i(tEUCTW_CHAR_TO_FREQ_ORDERtEUCTW_TABLE_SIZEt EUCTW_TYPICAL_DISTRIBUTION_RATIO(tEUCKR_CHAR_TO_FREQ_ORDERtEUCKR_TABLE_SIZEt EUCKR_TYPICAL_DISTRIBUTION_RATIO(tGB2312_CHAR_TO_FREQ_ORDERtGB2312_TABLE_SIZEt!GB2312_TYPICAL_DISTRIBUTION_RATIO(tBIG5_CHAR_TO_FREQ_ORDERtBIG5_TABLE_SIZEtBIG5_TYPICAL_DISTRIBUTION_RATIO(tJIS_CHAR_TO_FREQ_ORDERtJIS_TABLE_SIZEtJIS_TYPICAL_DISTRIBUTION_RATIOtCharDistributionAnalysiscBsVeZdZdZdZdZdZdZdZdZ dZ d Z RS( igGz?g{Gz?icCsDd|_d|_d|_d|_d|_d|_|jdS(N(tNonet_char_to_freq_ordert _table_sizettypical_distribution_ratiot_donet _total_charst _freq_charstreset(tself((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyt__init__.s      cCst|_d|_d|_dS(sreset analyser, clear any stateiN(tFalseRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR=s  cCs}|dkr|j|}nd}|dkry|jd7_||jkryd|j|krv|jd7_qvqyndS(s"feed a character with known lengthiiiiiN(t get_orderRRRR(Rtchartchar_lentorder((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytfeedFs  cCsu|jdks!|j|jkr(|jS|j|jkrn|j|j|j|j}||jkrn|Sn|jS(s(return confidence based on existing datai(RRtMINIMUM_DATA_THRESHOLDtSURE_NORtSURE_YES(Rtr((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytget_confidenceTs! cCs|j|jkS(N(RtENOUGH_DATA_THRESHOLD(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytgot_enough_datadscCsdS(Ni((Rtbyte_str((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRis( t__name__t __module__R%R"R!R RRRR$R&R(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR(s    tEUCTWDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( tsuperR*RRRRRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRrs  cCs6|d}|dkr.d|d|ddSdSdS(Niii^iii((RR't first_char((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRxs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR*qs tEUCKRDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R-RRRRRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCs6|d}|dkr.d|d|ddSdSdS(Niii^iii((RR'R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR-s tGB2312DistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R.RRRRRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCsI|d|d}}|dkrA|dkrAd|d|dSdSdS(Niiiii^i((RR'R,t second_char((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs(R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR.s tBig5DistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R0RR RR RR R(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCsd|d|d}}|dkr\|dkrEd|d|ddSd|d|dSndSdS( Niiiiii?i@i((RR'R,R/((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs   (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR0s tSJISDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R1RR RR RRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCs|d|d}}|dkr>|dkr>d|d}n1|dkrk|dkrkd|dd}nd S||d }|d krd }n|S( Niiiiiiiiii@i((RR'R,R/R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR1s tEUCJPDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R2RR RR RRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCs6|d}|dkr.d|d|ddSdSdS(Niii^iii((RR'R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR2s N(t euctwfreqRRRt euckrfreqRRRt gb2312freqRRRtbig5freqR R R tjisfreqR R RtobjectRR*R-R.R0R1R2(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytsIPKZQ[[5site-packages/pip/_vendor/chardet/langhebrewmodel.pycnu[ abc@s@dZdZied6ed6dd6ed6dd6dd6ZdS(iiiiiEi[iOiPi\iYiaiZiDioipiRiIi_iUiNiyiViGiCifikiTirigisi2iJi<i=i*iLiFi@i5iii]i8iAi6i1iBini3i+i,i?iQiMibiKili|iiiii(i:iiiiiiiiiiiSi4i/i.iHi i^iiqiimiiiii"itiividiiiuiwihi}iiiWiciijizi{ii7iiieiiixii0i'i9iii;i)iXi!i%i$iii#ii>iii~iii&i-iiiiiiiiiiiiii iiiiiiiiiiiii iii ii iiiiiii iiii`itchar_to_order_maptprecedence_matrixg C|?ttypical_positive_ratiotkeep_english_letters windows-1255t charset_nametHebrewtlanguageN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiEi[iOiPi\iYiaiZiDioipiRiIi_iUiNiyiViGiCifikiTirigisiiiiiii2iJi<i=i*iLiFi@i5iii]i8iAi6i1iBini3i+i,i?iQiMibiKiliiiiii|iiiii(i:iiiiiiiiiiiSi4i/i.iHi i^iiqiimiiiii"itiividiiiuiwihi}iiiWiciijizi{ii7iiieiiixii0i'i9iii;i)iXi!i%i$iii#ii>iii~iii&i-iiiiiiiiiiiiii iiiiiiiiiiiii iii ii iiiiiii iiiii`i(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tWIN1255_CHAR_TO_ORDER_MAPtHEBREW_LANG_MODELtFalsetWin1255HebrewModel(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.pyt&s, PKZ 8__2site-packages/pip/_vendor/chardet/gb2312prober.pyonu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tGB2312DistributionAnalysis(tGB2312_SM_MODELt GB2312ProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyR"s cCsdS(NtGB2312((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyt charset_name(scCsdS(NtChinese((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pytlanguage,s(t__name__t __module__RtpropertyR R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pyR!s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312prober.pytsPKZtVV1site-packages/pip/_vendor/chardet/euctwprober.pycnu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tEUCTWDistributionAnalysis(tEUCTW_SM_MODELt EUCTWProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyR"s cCsdS(NsEUC-TW((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyt charset_name(scCsdS(NtTaiwan((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pytlanguage,s(t__name__t __module__RtpropertyR R (((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pyR!s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwprober.pytsPKZ\7H7site-packages/pip/_vendor/chardet/codingstatemachine.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import logging from .enums import MachineState class CodingStateMachine(object): """ A state machine to verify a byte sequence for a particular encoding. For each byte the detector receives, it will feed that byte to every active state machine available, one byte at a time. The state machine changes its state based on its previous state and the byte it receives. There are 3 states in a state machine that are of interest to an auto-detector: START state: This is the state to start with, or a legal byte sequence (i.e. a valid code point) for character has been identified. ME state: This indicates that the state machine identified a byte sequence that is specific to the charset it is designed for and that there is no other possible encoding which can contain this byte sequence. This will to lead to an immediate positive answer for the detector. ERROR state: This indicates the state machine identified an illegal byte sequence for that encoding. This will lead to an immediate negative answer for this encoding. Detector will exclude this encoding from consideration from here on. """ def __init__(self, sm): self._model = sm self._curr_byte_pos = 0 self._curr_char_len = 0 self._curr_state = None self.logger = logging.getLogger(__name__) self.reset() def reset(self): self._curr_state = MachineState.START def next_state(self, c): # for each byte we get its class # if it is first byte, we also get byte length byte_class = self._model['class_table'][c] if self._curr_state == MachineState.START: self._curr_byte_pos = 0 self._curr_char_len = self._model['char_len_table'][byte_class] # from byte's class and state_table, we get its next state curr_state = (self._curr_state * self._model['class_factor'] + byte_class) self._curr_state = self._model['state_table'][curr_state] self._curr_byte_pos += 1 return self._curr_state def get_current_charlen(self): return self._curr_char_len def get_coding_state_machine(self): return self._model['name'] @property def language(self): return self._model['language'] PKZޏ7site-packages/pip/_vendor/chardet/universaldetector.pyonu[ abc@sdZddlZddlZddlZddlmZddlmZmZm Z ddl m Z ddl m Z ddlmZdd lmZd efd YZdS( s Module containing the UniversalDetector detector class, which is the primary class a user of ``chardet`` should use. :author: Mark Pilgrim (initial port to Python) :author: Shy Shalom (original C code) :author: Dan Blanchard (major refactoring for 3.0) :author: Ian Cordasco iNi(tCharSetGroupProber(t InputStatetLanguageFiltert ProbingState(tEscCharSetProber(t Latin1Prober(tMBCSGroupProber(tSBCSGroupProbertUniversalDetectorcBseZdZdZejdZejdZejdZidd6dd6d d 6d d 6d d6dd6dd6dd6Z e j dZ dZ dZdZRS(sq The ``UniversalDetector`` class underlies the ``chardet.detect`` function and coordinates all of the different charset probers. To get a ``dict`` containing an encoding and its confidence, you can simply run: .. code:: u = UniversalDetector() u.feed(some_bytes) u.close() detected = u.result g?s[-]s(|~{)s[-]s Windows-1252s iso-8859-1s Windows-1250s iso-8859-2s Windows-1251s iso-8859-5s Windows-1256s iso-8859-6s Windows-1253s iso-8859-7s Windows-1255s iso-8859-8s Windows-1254s iso-8859-9s Windows-1257s iso-8859-13cCsqd|_g|_d|_d|_d|_d|_d|_||_t j t |_ d|_ |jdS(N(tNonet_esc_charset_probert_charset_proberstresulttdonet _got_datat _input_statet _last_chart lang_filtertloggingt getLoggert__name__tloggert_has_win_bytestreset(tselfR((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyt__init__Qs         cCsidd6dd6dd6|_t|_t|_t|_tj|_d|_ |j rg|j j nx|j D]}|j qqWdS(s Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents. tencodinggt confidencetlanguagetN( R R tFalseR RRRt PURE_ASCIIRRR RR (Rtprober((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyR^s      cCsy|jr dSt|sdSt|ts;t|}n|js{|jtjrwidd6dd6dd6|_n|jtj tj fridd6dd6dd6|_n|jd rid d6dd6dd6|_nl|jd rid d6dd6dd6|_n<|jtj tj frOid d6dd6dd6|_nt |_|jddk r{t |_dSn|jtjkr|jj|rtj|_q|jtjkr|jj|j|rtj|_qn|d|_|jtjkr|js(t|j|_n|jj|tjkrui|jjd6|jjd6|jj d6|_t |_qun|jtjkru|j!st"|jg|_!|jt#j$@r|j!j%t&n|j!j%t'nx`|j!D]U}|j|tjkri|jd6|jd6|j d6|_t |_PqqW|j(j|rut |_)qundS(s Takes a chunk of a document and feeds it through all of the relevant charset probers. After calling ``feed``, you can check the value of the ``done`` attribute to see if you need to continue feeding the ``UniversalDetector`` more data, or if it has made a prediction (in the ``result`` attribute). .. note:: You should always call ``close`` when you're done feeding in your document if ``done`` is not already ``True``. Ns UTF-8-SIGRg?RRRsUTF-32ssX-ISO-10646-UCS-4-3412ssX-ISO-10646-UCS-4-2143sUTF-16i(*R tlent isinstancet bytearrayRt startswithtcodecstBOM_UTF8R t BOM_UTF32_LEt BOM_UTF32_BEtBOM_LEtBOM_BEtTrueR RRRtHIGH_BYTE_DETECTORtsearcht HIGH_BYTEt ESC_DETECTORRt ESC_ASCIIR RRtfeedRtFOUND_ITt charset_nametget_confidenceRR RRtNON_CJKtappendRRtWIN_BYTE_DETECTORR(Rtbyte_strR ((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyR1os~                  c Cs>|jr|jSt|_|js5|jjdn1|jtjkrhidd6dd6dd6|_n|jtj krfd }d}d }xD|j D]9}|sqn|j }||kr|}|}qqW|rf||j krf|j}|jj}|j }|jd r?|jr?|jj||}q?ni|d6|d6|jd6|_qfn|jjtjkr7|jdd kr7|jjd x|j D]}|sqnt|trx^|jD]+}|jjd |j|j|j qWq|jjd |j|j|j qWq7n|jS( s Stop analyzing the current document and come up with a final prediction. :returns: The ``result`` attribute, a ``dict`` with the keys `encoding`, `confidence`, and `language`. sno data received!tasciiRg?RRRgsiso-8859s no probers hit minimum thresholds%s %s confidence = %sN(R R R+RRtdebugRRRR.R R R4tMINIMUM_THRESHOLDR3tlowerR$Rt ISO_WIN_MAPtgetRtgetEffectiveLevelRtDEBUGR"Rtprobers( Rtprober_confidencetmax_prober_confidencet max_proberR R3tlower_charset_nameRt group_prober((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pytcloses`              (Rt __module__t__doc__R;tretcompileR,R/R7R=RtALLRRR1RG(((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyR3s"    m(RIR%RRJtcharsetgroupproberRtenumsRRRt escproberRt latin1proberRtmbcsgroupproberRtsbcsgroupproberRtobjectR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/chardet/universaldetector.pyt$s   PKZ cFUU1site-packages/pip/_vendor/chardet/cp949prober.pyonu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tEUCKRDistributionAnalysis(tCodingStateMachine(tMultiByteCharSetProber(tCP949_SM_MODELt CP949ProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyR#s cCsdS(NtCP949((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyt charset_name+scCsdS(NtKorean((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pytlanguage/s(t__name__t __module__RtpropertyR R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyR"s N( tchardistributionRtcodingstatemachineRtmbcharsetproberRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pytsPKZy%jLL+site-packages/pip/_vendor/chardet/jpcntx.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # This is hiragana 2-char sequence table, the number in each cell represents its frequency category jp2CharContext = ( (0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), (2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), (0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), (0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), (0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), (0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), (0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), (1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), (0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), (0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), (0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), (0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), (0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), (2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), (0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), (0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), (0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), (2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), (0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), (1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), (0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), (0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), (0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), (0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), (0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), (0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), (0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), (0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), (0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), (0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), (0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), (0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), (1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), (0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), (0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), (0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), (0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), (0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), (2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), (0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), (0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), (0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), (0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), (0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), (0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), (0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), (0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), (0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), (0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), (0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), (0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), (0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), (0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), (0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), (0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), (0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), (0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), (0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), (2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), (0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), (0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), (0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), (0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), (1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), (0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), (0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), (0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), (0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), (0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), (0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), (0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), (0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), (1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), (0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), (0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), (0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), (0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), (0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), (0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), ) class JapaneseContextAnalysis(object): NUM_OF_CATEGORY = 6 DONT_KNOW = -1 ENOUGH_REL_THRESHOLD = 100 MAX_REL_THRESHOLD = 1000 MINIMUM_DATA_THRESHOLD = 4 def __init__(self): self._total_rel = None self._rel_sample = None self._need_to_skip_char_num = None self._last_char_order = None self._done = None self.reset() def reset(self): self._total_rel = 0 # total sequence received # category counters, each integer counts sequence in its category self._rel_sample = [0] * self.NUM_OF_CATEGORY # if last byte in current buffer is not the last byte of a character, # we need to know how many bytes to skip in next buffer self._need_to_skip_char_num = 0 self._last_char_order = -1 # The order of previous char # If this flag is set to True, detection is done and conclusion has # been made self._done = False def feed(self, byte_str, num_bytes): if self._done: return # The buffer we got is byte oriented, and a character may span in more than one # buffers. In case the last one or two byte in last buffer is not # complete, we record how many byte needed to complete that character # and skip these bytes here. We can choose to record those bytes as # well and analyse the character once it is complete, but since a # character will not make much difference, by simply skipping # this character will simply our logic and improve performance. i = self._need_to_skip_char_num while i < num_bytes: order, char_len = self.get_order(byte_str[i:i + 2]) i += char_len if i > num_bytes: self._need_to_skip_char_num = i - num_bytes self._last_char_order = -1 else: if (order != -1) and (self._last_char_order != -1): self._total_rel += 1 if self._total_rel > self.MAX_REL_THRESHOLD: self._done = True break self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 self._last_char_order = order def got_enough_data(self): return self._total_rel > self.ENOUGH_REL_THRESHOLD def get_confidence(self): # This is just one way to calculate confidence. It works well for me. if self._total_rel > self.MINIMUM_DATA_THRESHOLD: return (self._total_rel - self._rel_sample[0]) / self._total_rel else: return self.DONT_KNOW def get_order(self, byte_str): return -1, 1 class SJISContextAnalysis(JapaneseContextAnalysis): def __init__(self): super(SJISContextAnalysis, self).__init__() self._charset_name = "SHIFT_JIS" @property def charset_name(self): return self._charset_name def get_order(self, byte_str): if not byte_str: return -1, 1 # find out current char's byte length first_char = byte_str[0] if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): char_len = 2 if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): self._charset_name = "CP932" else: char_len = 1 # return its order if it is hiragana if len(byte_str) > 1: second_char = byte_str[1] if (first_char == 202) and (0x9F <= second_char <= 0xF1): return second_char - 0x9F, char_len return -1, char_len class EUCJPContextAnalysis(JapaneseContextAnalysis): def get_order(self, byte_str): if not byte_str: return -1, 1 # find out current char's byte length first_char = byte_str[0] if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): char_len = 2 elif first_char == 0x8F: char_len = 3 else: char_len = 1 # return its order if it is hiragana if len(byte_str) > 1: second_char = byte_str[1] if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): return second_char - 0xA1, char_len return -1, char_len PKZo[[3site-packages/pip/_vendor/chardet/langthaimodel.pycnu[ abc@s@dZdZied6ed6dd6ed6dd6dd6ZdS(iiiiiijikidiiiiei^iiiliminioiiiiYi_ipiqiiiii@iHiIiriJisitifiQiiuiZigiNiRi`ii[iOiTihiiiaibi\iiiiiiiXiiiiiiiiviiiiiciUiSiiiiiiiiiiiiiiiiiiKiii4i"i3iwi/i:i9i1i5i7i+iii,ii0iiii'i>ii6i-i iii=iii i*i.iiiLiiBi?ii ii$ii i(ii i#iViiiiii ii)ii!ii2i%iiiCiMi&i]iiiDi8i;iAiEi<iFiPiGiWiiiiitchar_to_order_maptprecedence_matrixg@?ttypical_positive_ratiotkeep_english_lettersTIS-620t charset_nametThaitlanguageN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiijikidiiiiei^iiiliminioiiiiYi_ipiqiiiiiiiiiii@iHiIiriJisitifiQiiuiZigiNiRi`ii[iOiTihiiiaibi\iiiiiiiiiiiiXiiiiiiiiviiiiiciUiSiiiiiiiiiiiiiiiiiiKiii4i"i3iwi/i:i9i1i5i7i+iii,ii0iiii'i>ii6i-i iii=iii i*i.iiiLiiBi?ii ii$ii i(ii i#iViiiiii ii)ii!ii2i%iiiCiMi&i]iiiDi8i;iAiEi<iFiPiGiWiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tTIS620CharToOrderMapt ThaiLangModeltFalsetTIS620ThaiModel(((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.pyt%s, PKZuG=006site-packages/pip/_vendor/chardet/universaldetector.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### """ Module containing the UniversalDetector detector class, which is the primary class a user of ``chardet`` should use. :author: Mark Pilgrim (initial port to Python) :author: Shy Shalom (original C code) :author: Dan Blanchard (major refactoring for 3.0) :author: Ian Cordasco """ import codecs import logging import re from .charsetgroupprober import CharSetGroupProber from .enums import InputState, LanguageFilter, ProbingState from .escprober import EscCharSetProber from .latin1prober import Latin1Prober from .mbcsgroupprober import MBCSGroupProber from .sbcsgroupprober import SBCSGroupProber class UniversalDetector(object): """ The ``UniversalDetector`` class underlies the ``chardet.detect`` function and coordinates all of the different charset probers. To get a ``dict`` containing an encoding and its confidence, you can simply run: .. code:: u = UniversalDetector() u.feed(some_bytes) u.close() detected = u.result """ MINIMUM_THRESHOLD = 0.20 HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') ESC_DETECTOR = re.compile(b'(\033|~{)') WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', 'iso-8859-2': 'Windows-1250', 'iso-8859-5': 'Windows-1251', 'iso-8859-6': 'Windows-1256', 'iso-8859-7': 'Windows-1253', 'iso-8859-8': 'Windows-1255', 'iso-8859-9': 'Windows-1254', 'iso-8859-13': 'Windows-1257'} def __init__(self, lang_filter=LanguageFilter.ALL): self._esc_charset_prober = None self._charset_probers = [] self.result = None self.done = None self._got_data = None self._input_state = None self._last_char = None self.lang_filter = lang_filter self.logger = logging.getLogger(__name__) self._has_win_bytes = None self.reset() def reset(self): """ Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to call this directly in between analyses of different documents. """ self.result = {'encoding': None, 'confidence': 0.0, 'language': None} self.done = False self._got_data = False self._has_win_bytes = False self._input_state = InputState.PURE_ASCII self._last_char = b'' if self._esc_charset_prober: self._esc_charset_prober.reset() for prober in self._charset_probers: prober.reset() def feed(self, byte_str): """ Takes a chunk of a document and feeds it through all of the relevant charset probers. After calling ``feed``, you can check the value of the ``done`` attribute to see if you need to continue feeding the ``UniversalDetector`` more data, or if it has made a prediction (in the ``result`` attribute). .. note:: You should always call ``close`` when you're done feeding in your document if ``done`` is not already ``True``. """ if self.done: return if not len(byte_str): return if not isinstance(byte_str, bytearray): byte_str = bytearray(byte_str) # First check for known BOMs, since these are guaranteed to be correct if not self._got_data: # If the data starts with BOM, we know it is UTF if byte_str.startswith(codecs.BOM_UTF8): # EF BB BF UTF-8 with BOM self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0, 'language': ''} elif byte_str.startswith((codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)): # FF FE 00 00 UTF-32, little-endian BOM # 00 00 FE FF UTF-32, big-endian BOM self.result = {'encoding': "UTF-32", 'confidence': 1.0, 'language': ''} elif byte_str.startswith(b'\xFE\xFF\x00\x00'): # FE FF 00 00 UCS-4, unusual octet order BOM (3412) self.result = {'encoding': "X-ISO-10646-UCS-4-3412", 'confidence': 1.0, 'language': ''} elif byte_str.startswith(b'\x00\x00\xFF\xFE'): # 00 00 FF FE UCS-4, unusual octet order BOM (2143) self.result = {'encoding': "X-ISO-10646-UCS-4-2143", 'confidence': 1.0, 'language': ''} elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): # FF FE UTF-16, little endian BOM # FE FF UTF-16, big endian BOM self.result = {'encoding': "UTF-16", 'confidence': 1.0, 'language': ''} self._got_data = True if self.result['encoding'] is not None: self.done = True return # If none of those matched and we've only see ASCII so far, check # for high bytes and escape sequences if self._input_state == InputState.PURE_ASCII: if self.HIGH_BYTE_DETECTOR.search(byte_str): self._input_state = InputState.HIGH_BYTE elif self._input_state == InputState.PURE_ASCII and \ self.ESC_DETECTOR.search(self._last_char + byte_str): self._input_state = InputState.ESC_ASCII self._last_char = byte_str[-1:] # If we've seen escape sequences, use the EscCharSetProber, which # uses a simple state machine to check for known escape sequences in # HZ and ISO-2022 encodings, since those are the only encodings that # use such sequences. if self._input_state == InputState.ESC_ASCII: if not self._esc_charset_prober: self._esc_charset_prober = EscCharSetProber(self.lang_filter) if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = {'encoding': self._esc_charset_prober.charset_name, 'confidence': self._esc_charset_prober.get_confidence(), 'language': self._esc_charset_prober.language} self.done = True # If we've seen high bytes (i.e., those with values greater than 127), # we need to do more complicated checks using all our multi-byte and # single-byte probers that are left. The single-byte probers # use character bigram distributions to determine the encoding, whereas # the multi-byte probers use a combination of character unigram and # bigram distributions. elif self._input_state == InputState.HIGH_BYTE: if not self._charset_probers: self._charset_probers = [MBCSGroupProber(self.lang_filter)] # If we're checking non-CJK encodings, use single-byte prober if self.lang_filter & LanguageFilter.NON_CJK: self._charset_probers.append(SBCSGroupProber()) self._charset_probers.append(Latin1Prober()) for prober in self._charset_probers: if prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = {'encoding': prober.charset_name, 'confidence': prober.get_confidence(), 'language': prober.language} self.done = True break if self.WIN_BYTE_DETECTOR.search(byte_str): self._has_win_bytes = True def close(self): """ Stop analyzing the current document and come up with a final prediction. :returns: The ``result`` attribute, a ``dict`` with the keys `encoding`, `confidence`, and `language`. """ # Don't bother with checks if we're already done if self.done: return self.result self.done = True if not self._got_data: self.logger.debug('no data received!') # Default to ASCII if it is all we've seen so far elif self._input_state == InputState.PURE_ASCII: self.result = {'encoding': 'ascii', 'confidence': 1.0, 'language': ''} # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD elif self._input_state == InputState.HIGH_BYTE: prober_confidence = None max_prober_confidence = 0.0 max_prober = None for prober in self._charset_probers: if not prober: continue prober_confidence = prober.get_confidence() if prober_confidence > max_prober_confidence: max_prober_confidence = prober_confidence max_prober = prober if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): charset_name = max_prober.charset_name lower_charset_name = max_prober.charset_name.lower() confidence = max_prober.get_confidence() # Use Windows encoding name instead of ISO-8859 if we saw any # extra Windows-specific bytes if lower_charset_name.startswith('iso-8859'): if self._has_win_bytes: charset_name = self.ISO_WIN_MAP.get(lower_charset_name, charset_name) self.result = {'encoding': charset_name, 'confidence': confidence, 'language': max_prober.language} # Log all prober confidences if none met MINIMUM_THRESHOLD if self.logger.getEffectiveLevel() == logging.DEBUG: if self.result['encoding'] is None: self.logger.debug('no probers hit minimum threshold') for group_prober in self._charset_probers: if not group_prober: continue if isinstance(group_prober, CharSetGroupProber): for prober in group_prober.probers: self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, prober.get_confidence()) else: self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, prober.get_confidence()) return self.result PKZS-site-packages/pip/_vendor/chardet/__init__.pynu[######################## BEGIN LICENSE BLOCK ######################## # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) return detector.close() PKZIՅ{{.site-packages/pip/_vendor/chardet/euctwfreq.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # EUCTW frequency table # Converted from big5 work # by Taiwan's Mandarin Promotion Council # # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 # Char to FreqOrder table , EUCTW_TABLE_SIZE = 5376 EUCTW_CHAR_TO_FREQ_ORDER = ( 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 ) PKZa VV1site-packages/pip/_vendor/chardet/euckrprober.pycnu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tEUCKRDistributionAnalysis(tEUCKR_SM_MODELt EUCKRProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyR#s cCsdS(NsEUC-KR((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyt charset_name)scCsdS(NtKorean((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pytlanguage-s(t__name__t __module__RtpropertyR R (((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyR"s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pytsPKZ8ЕЕ0site-packages/pip/_vendor/chardet/gb2312freq.pyonu[ abc@sdZdZdZdS(g?iiiii< iTii iQii i iii<iw ii9 ii iiiiZ iiiqi i i iW iyiiieioiiv ii i iiii iLiBi iQ iiiiQiiiEiifiiiiiiiidi( iyiii,i i i iE ieii iWiRiii| iR ii i i iX i ijiEii i+iPiiiiiii;iim iiliiii iiieiHiiiii^ iki6ii" iFi i i i i^ i i iiii?i`iui$i1i ii i iUi i1 iiigiliii iiGii2iiq i i iPiiL iiiiiy iii i iMiikii4 iiiiii i i@ i`ii5iiiN ii7i3iiniiuiGi&ibihi0i iM iNii i iY i ii i iii i&i iQi i7i^ihiiiJ iiPiiiiriiiiiiiT i-iGi,iui i ii$iii<iBii$i>iiix iLiWiiini iiTiiZii i i{ iii i iii i?i i0ibi;iDiiiiiCiSiiUi> i|igi ii i iKii iiii% i5 iiiiil iihi iii?ii i iUiiii ii*i< i i ii ii iii ii i! iX iii iiQ igiiii iiri ii iqii iiz ii i3ii iii i i]iciziii7 ii_ii iCii&iaiHi| ili ii=iiiIiPi ii~ i8i`i iw i i iTii i iiii iiiiii[iq iii igii|iCi ii iN iIiii5 i i(ii/iHiK i iiiii9iiIiii|iiii ii iWii iii^i~iiii i i= iXii3i iIiii iiiii3iriiiioi ii i{iiiiCii!iHi iiiifii= ii1i[i i| iDii%iitii i ii ii.iiiiyiiTiii9 i i i#iiiJiiniiii ii>iBi"iiiai iiMiKii iW i i i i! ir iFi+ i i i iix iHiJiiiii2iKiiR i$ii9i it ii ii iiiiiiiiii%i~i i` iiiii'iviii ii|ii'i iin i*ii iiFii%ii)i~ii iiiJi&ii i!iD iiwii[i ii i i i' i iikiii i i iiiiiiiih iBi_i i?iigihii'iiiiiiiiiiiii5i] ii iiiiiiii1 ii)ia iiiii; iii_iiiQi/iu i i iji[ii}ii iii iiiiQi^ i iGiizi iiiViOii ii ii imiui i6ii i-i$i iiRii} i ipir i] i iDi iiiiiiiiiiii1iniiiitiYiG i i i/ i|iti0i iiiii\i i iivii iiciiii i~iAi"ii" i!i8iCi8 ii iji i1iili>i ii] i i i~iiLi(iiiKidiiviDiiii%i iii i, ii2ii iiij i-i.i imiKi iiioi i;i ii idi@iai iM i1 iLi@i2 iiwiitiQi i/ii ii~iiiii i/ii i_ i#i iOiiOiGiiiki[iOioiii- i iii i!i i iIiiii{iri i.ihi i iT iiii'iL iaii i@iii iii iT i/ iiiii iiMiii il iii i{i8i#i iii9iiSiRi iwiiZii0 iEigig ii i iiLiiipiiiSi iiiii i@iiI iLi; iOiqiii) i i:iFii5iJidiA ii i) iEiLiYiiii im i ii i iiic iiRii!iiii ii(iii]iIii i iieiiii iiii`ii iii ii[iiC i(iPi,i i}iO i' iLi\ i i i iiiiZiiisi'i iUiiiiiN iiii i iiMiiiiii iiiiiigi'iii ii iiPi0 is iiiip i iniiii i\i iii i!ihiiimi(i& i iCiiBi)i5 iiiiiii iii>iiiii i ii{iNiiiiEi3iiJi#iicii ii$ ii5iixi i]iiikii iiiiP ii iiiriiL i izidiii#i iii[ iSiihiiGiiiii iUi+i iyiiwi"iiVi2 i&izi i iiJ iP i iiiii% iZi$ io iT iKi1 i ii`iVi ii id ii iii+i iiAii iiii$ii i_i3i\ iiiiiibi ifiiiii iAiniiixiiFi iOi[iii(i ii i#i7i2 ixiif i i i!i6i{ iiiYiiii+iiiii iiii i iKiei i i iTi$iiiih ii iRiii"i iiZi i_ ii@iiai iii3 i iki)ijiii ieii iA i^ii iW iiihi: iz i) ii| iii ii i8 ijii iii3i i iziipiyi i i2 ioiD i=ii.ii9 iaifii*i_ii i*ii i i ii i- ii i}ii i ii* i itiiti6 iqiiiiiii iSi iii!i)iiyi iii ij ii iiViIiiig iiiLii iipiDii i ii%ii iM iiii$i i iiE i"iU iie i i|i iQiii ii ii i1 iiiii$i i]i4i&iiiziii`iiwi iiii i)i% i&iiii*iii8i2i> iSi iii1i#i ii ii#i i iiii iF iii, i> i+i i iiZ i3iqii i$ii iVi i;iiiNiimiKiWiiii5i i iRi4i iIiiDiiii i i i iiii%iKi|iii iiiii9iin iii i-i3 i2 ipiii? ii~iiiiiiCii[ii iii i3 i> iili i`ix iiii i# iiii0iiii iL i iiQiiRii?i(imii` ii3iiii& i ii isiDii0 ib ifi@ i iii ii=i i i<iiii ii6iiis iii iSiii i8iZ iii ii iii i ii iiiiNi i i iLi iJ iii ibiig iii[ iiiiii~ ii i iiifiiiii i i` i4i= iiiili? iici ii iiii iiiiiiiiwiii i%ii id i^i+iii iZiiF iI iit ii i/i iiiyi* isi&i+iii) iii iii i i ie iiiQi] ii i_i7 ii0iciuiiiLiki iiiii#iiiiiiii?iiii ini i iii iii iHiiYiiu ii i iiJii_ i_i&iiii iiiZi%ii isiiSii i iViiiiii1i'iiiFi\i i|i iWiii|iiSiWi i* i iRiliPi#i i i-iq iii i ii4iiiiiia iiiii@ i iXiii%iii8 i3i,ii} iiiidixi ii# ii6i i i.iiiAi iir i i iR idiii+ iiii i{i{iHi i iiiiiici i i iii,iiI iici ii iii=i iJi. i,i>iiiiLiei]ii iC iiii*i; i ii iipi iiiRii9 i i ii iii?i:i i i iQi'isiiwi[i ii-i i)iiiMiiiii]i}i6i@i iiUiiii/iiis i iiik i? ii, i= i1iMi iii/iS id iiio i6 ikiui iiiWi{iixiWii iiiix i^ iii[ i+ i7i:iiU iii!iii i.iHiTiiYiii ii@i"iCisi i i iG ibii7i~i. iUi iqi iiii{iiiiYi/i iiiAi ii*i:i8i'i i ii iiA i_i0iiiiiiiiKi i iit i* iii iiii i i iiiDiiii>iiivii i i'iiViii iiikii i\iiiii!i ii*iU iiiiiizii\if iii i iiii iiii i iiB iii iii{ii=i ii&i iiV i ii"i! i i iii i( i=ii i ii9i ii4iaiii-iQ ii ii imiiiiiCiii7i}iqiDi4iiuii iiiv iiiF iiu ia iiH ioi i iliKiA iii izi4 i i! iH ii ii5 iijii`ii iB i:iOiai i#iiiidiii(i i& ii^imig iE iio ii(iG i$i@i iIiiiAi ii`iXii7ii/ii$ii iihiiii iUi"iTiiiiimiii!iiiiii iiMi iiii i2iqiii9iii iiiii/iiii ipi@iX iijiii i^ ibi i iniiviiiioioiiiviii0ii9ii$ iifiS izi iiDi{iiXiii iVi3 iYi ii iAiiiiiiqiU i i: ii2iji ii i{ ii iFi i i ii iiiiciiiR i;ik ihii i iiY iOixiiziZi<iv ii i i iiIiii4 iMiiiiib iiii ii iiCii iiiO ii8iii i.i]i iVic i iii iH iiTiii ii> i ii i3iiii,iiifiiiiliTiWiHi.i'i>ipiiii"ii iJii i ivi2iii ixi i2iigi i4iii i iii{ iz i i i iii iiiiii?iiii iJiiii i i8iiiE ij i\ii8i8 ii iiii`ii>ibiSiiii i iii0i iiiEi iidiiii1iiiini@ii iiTii iiiQiiCii iK i;ikiiiiii_i\ ieiiwiiiiiMii. iriaiii i ii iiiigii i' i iii i iTiii4ioii iXiyiixii i i iU i^i: iiii ii"i#iii ii iiiiNiUiiip ii" i-i i iii:ii ii iiii iiiW i iG iLii iGiw ioiii iMir iii iiQ iiI iNi%i iXiii i7 i iaii iii i' iiihi i}iNiyi?itii iD iiiii iliiii iiie iJisi2i*iyiiiiii iiqi ii5i}is i$iiii5 iBiiii# idisi} ii~iiib iiO i/ i]iii i]i2iiifi7iiiOi iRii# i i[iiEii if iS i idiFiiiiC iV i>ii iiBii i i1i6imiii iiiiip i ii i i<izi i| iXiiiisie i]iiii i iiOi i+i iiiiii:i iAii ii ii( i imii4iP ii inii ioioiiimiiyiii8i ii<iii<ic iC iii&iwi i i$i i9i&ii i)i`iiiiiiGibiEii:iQio iF i iii i iFi|i;i iii3i4 i%ii:i iciipi6 i\iCii0i9 ii iiii~ iii"ih iiii i)ii i iii* ii i i i%ii iiii i.iii9i iii iii ii"i! ii, i ig iJ igiF iiii ii;ii iZiEi^iA iiili5ii ii4i' iiiki ii i2ii ii iXi7iii i{iiBixi=iei(ii iii'iiiiiiiiiii iD iiikii iii-i, i iei7 i iii iiiini} ip i7ijii ii9ii iiiii0iPi iiiiiiO i i i iiUi iii< iii iJiitiNii)i i.iii*i iAiir i] i)iiiiiGii_ ipik iii i% i ii ii>ibi_i^i?ii:iYiDi%iii iiiiiiYii i iiiiil iAii i i iDiBi i ii i@ i(iv iiiigi i i}iviiii iiiii iiiiiiii i-i~iiy i%i i i_i iiiiiijiMi iiGi+i iiii iiii3 i;i iGi)ii iii i ii ii i ii,iIi iii iXiAi\iiiiP ivii iVii=i i i6 i<iiX i i i ii8 i- iY iiiii iiiciiii9i i- iuii ii iR i i ii6ii iih i}i i? iw i ii ia i( i ii1ii ii>iii i ii]i iXiriPi0 i,ii+ ijii`iiiii. iii ii& iWiB iii|i7 iUii iiWiiYii iiw i iii iiiiiuiibiiq iiibiii ii=i$ iiNiJiiiUiiii iiiiiKi,iii4iri{i iiHiipi5iii=i i iEi9ii;i i<ii5i iii ii\iiiipiiiiiriiNiii^iiiiiK ii i*ii{ i+ i ii*i8iiii iiii ij ii@il i6ii2i8ii iiii'iiiiB iiii\iiiS iV ii~iiiii i iiVi iz i i^i i iii_ i} iG iqiiI i iii8itihiS ii}iRi i9iiSin iPiiB iiiisiiXi}ifiiii iiiiiiin iIi^iiTiixiiriii_iibiiOiPiQiRiSN(iiii< iTii iQii i iii<iw ii9 ii iiiiZ iiiqi i i iW iyiiieioiiv ii i iiii iLiBi iQ iiiiQiiiEiifiiiiiiiidi( iyiii,i i i iE ieii iWiRiii| iR ii i i iX i ijiEii i+iPiiiiiii;iim iiliiii iiieiHiiiii^ iki6ii" iFi i i i i^ i i iiii?i`iui$i1i ii i iUi i1 iiigiliii iiGii2iiq i i iPiiL iiiiiy iii i iMiikii4 iiiiii i i@ i`ii5iiiN ii7i3iiniiuiGi&ibihi0i iM iNii i iY i ii i iii i&i iQi i7i^ihiiiJ iiPiiiiriiiiiiiT i-iGi,iui i ii$iii<iBii$i>iiix iLiWiiini iiTiiZii i i{ iii i iii i?i i0ibi;iDiiiiiCiSiiUi> i|igi ii i iKii iiii% i5 iiiiil iihi iii?ii i iUiiii ii*i< i i ii ii iii ii i! iX iii iiQ igiiii iiri ii iqii iiz ii i3ii iii i i]iciziii7 ii_ii iCii&iaiHi| ili ii=iiiIiPi ii~ i8i`i iw i i iTii i iiii iiiiii[iq iii igii|iCi ii iN iIiii5 i i(ii/iHiK i iiiii9iiIiii|iiii ii iWii iii^i~iiii i i= iXii3i iIiii iiiii3iriiiioi ii i{iiiiCii!iHi iiiifii= ii1i[i i| iDii%iitii i ii ii.iiiiyiiTiii9 i i i#iiiJiiniiii ii>iBi"iiiai iiMiKii iW i i i i! ir iFi+ i i i iix iHiJiiiii2iKiiR i$ii9i it ii ii iiiiiiiiii%i~i i` iiiii'iviii ii|ii'i iin i*ii iiFii%ii)i~ii iiiJi&ii i!iD iiwii[i ii i i i' i iikiii i i iiiiiiiih iBi_i i?iigihii'iiiiiiiiiiiii5i] ii iiiiiiii1 ii)ia iiiii; iii_iiiQi/iu i i iji[ii}ii iii iiiiQi^ i iGiizi iiiViOii ii ii imiui i6ii i-i$i iiRii} i ipir i] i iDi iiiiiiiiiiii1iniiiitiYiG i i i/ i|iti0i iiiii\i i iivii iiciiii i~iAi"ii" i!i8iCi8 ii iji i1iili>i ii] i i i~iiLi(iiiKidiiviDiiii%i iii i, ii2ii iiij i-i.i imiKi iiioi i;i ii idi@iai iM i1 iLi@i2 iiwiitiQi i/ii ii~iiiii i/ii i_ i#i iOiiOiGiiiki[iOioiii- i iii i!i i iIiiii{iri i.ihi i iT iiii'iL iaii i@iii iii iT i/ iiiii iiMiii il iii i{i8i#i iii9iiSiRi iwiiZii0 iEigig ii i iiLiiipiiiSi iiiii i@iiI iLi; iOiqiii) i i:iFii5iJidiA ii i) iEiLiYiiii im i ii i iiic iiRii!iiii ii(iii]iIii i iieiiii iiii`ii iii ii[iiC i(iPi,i i}iO i' iLi\ i i i iiiiZiiisi'i iUiiiiiN iiii i iiMiiiiii iiiiiigi'iii ii iiPi0 is iiiip i iniiii i\i iii i!ihiiimi(i& i iCiiBi)i5 iiiiiii iii>iiiii i ii{iNiiiiEi3iiJi#iicii ii$ ii5iixi i]iiikii iiiiP ii iiiriiL i izidiii#i iii[ iSiihiiGiiiii iUi+i iyiiwi"iiVi2 i&izi i iiJ iP i iiiii% iZi$ io iT iKi1 i ii`iVi ii id ii iii+i iiAii iiii$ii i_i3i\ iiiiiibi ifiiiii iAiniiixiiFi iOi[iii(i ii i#i7i2 ixiif i i i!i6i{ iiiYiiii+iiiii iiii i iKiei i i iTi$iiiih ii iRiii"i iiZi i_ ii@iiai iii3 i iki)ijiii ieii iA i^ii iW iiihi: iz i) ii| iii ii i8 ijii iii3i i iziipiyi i i2 ioiD i=ii.ii9 iaifii*i_ii i*ii i i ii i- ii i}ii i ii* i itiiti6 iqiiiiiii iSi iii!i)iiyi iii ij ii iiViIiiig iiiLii iipiDii i ii%ii iM iiii$i i iiE i"iU iie i i|i iQiii ii ii i1 iiiii$i i]i4i&iiiziii`iiwi iiii i)i% i&iiii*iii8i2i> iSi iii1i#i ii ii#i i iiii iF iii, i> i+i i iiZ i3iqii i$ii iVi i;iiiNiimiKiWiiii5i i iRi4i iIiiDiiii i i i iiii%iKi|iii iiiii9iin iii i-i3 i2 ipiii? ii~iiiiiiCii[ii iii i3 i> iili i`ix iiii i# iiii0iiii iL i iiQiiRii?i(imii` ii3iiii& i ii isiDii0 ib ifi@ i iii ii=i i i<iiii ii6iiis iii iSiii i8iZ iii ii iii i ii iiiiNi i i iLi iJ iii ibiig iii[ iiiiii~ ii i iiifiiiii i i` i4i= iiiili? iici ii iiii iiiiiiiiwiii i%ii id i^i+iii iZiiF iI iit ii i/i iiiyi* isi&i+iii) iii iii i i ie iiiQi] ii i_i7 ii0iciuiiiLiki iiiii#iiiiiiii?iiii ini i iii iii iHiiYiiu ii i iiJii_ i_i&iiii iiiZi%ii isiiSii i iViiiiii1i'iiiFi\i i|i iWiii|iiSiWi i* i iRiliPi#i i i-iq iii i ii4iiiiiia iiiii@ i iXiii%iii8 i3i,ii} iiiidixi ii# ii6i i i.iiiAi iir i i iR idiii+ iiii i{i{iHi i iiiiiici i i iii,iiI iici ii iii=i iJi. i,i>iiiiLiei]ii iC iiii*i; i ii iipi iiiRii9 i i ii iii?i:i i i iQi'isiiwi[i ii-i i)iiiMiiiii]i}i6i@i iiUiiii/iiis i iiik i? ii, i= i1iMi iii/iS id iiio i6 ikiui iiiWi{iixiWii iiiix i^ iii[ i+ i7i:iiU iii!iii i.iHiTiiYiii ii@i"iCisi i i iG ibii7i~i. iUi iqi iiii{iiiiYi/i iiiAi ii*i:i8i'i i ii iiA i_i0iiiiiiiiKi i iit i* iii iiii i i iiiDiiii>iiivii i i'iiViii iiikii i\iiiii!i ii*iU iiiiiizii\if iii i iiii iiii i iiB iii iii{ii=i ii&i iiV i ii"i! i i iii i( i=ii i ii9i ii4iaiii-iQ ii ii imiiiiiCiii7i}iqiDi4iiuii iiiv iiiF iiu ia iiH ioi i iliKiA iii izi4 i i! iH ii ii5 iijii`ii iB i:iOiai i#iiiidiii(i i& ii^imig iE iio ii(iG i$i@i iIiiiAi ii`iXii7ii/ii$ii iihiiii iUi"iTiiiiimiii!iiiiii iiMi iiii i2iqiii9iii iiiii/iiii ipi@iX iijiii i^ ibi i iniiviiiioioiiiviii0ii9ii$ iifiS izi iiDi{iiXiii iVi3 iYi ii iAiiiiiiqiU i i: ii2iji ii i{ ii iFi i i ii iiiiciiiR i;ik ihii i iiY iOixiiziZi<iv ii i i iiIiii4 iMiiiiib iiii ii iiCii iiiO ii8iii i.i]i iVic i iii iH iiTiii ii> i ii i3iiii,iiifiiiiliTiWiHi.i'i>ipiiii"ii iJii i ivi2iii ixi i2iigi i4iii i iii{ iz i i i iii iiiiii?iiii iJiiii i i8iiiE ij i\ii8i8 ii iiii`ii>ibiSiiii i iii0i iiiEi iidiiii1iiiini@ii iiTii iiiQiiCii iK i;ikiiiiii_i\ ieiiwiiiiiMii. iriaiii i ii iiiigii i' i iii i iTiii4ioii iXiyiixii i i iU i^i: iiii ii"i#iii ii iiiiNiUiiip ii" i-i i iii:ii ii iiii iiiW i iG iLii iGiw ioiii iMir iii iiQ iiI iNi%i iXiii i7 i iaii iii i' iiihi i}iNiyi?itii iD iiiii iliiii iiie iJisi2i*iyiiiiii iiqi ii5i}is i$iiii5 iBiiii# idisi} ii~iiib iiO i/ i]iii i]i2iiifi7iiiOi iRii# i i[iiEii if iS i idiFiiiiC iV i>ii iiBii i i1i6imiii iiiiip i ii i i<izi i| iXiiiisie i]iiii i iiOi i+i iiiiii:i iAii ii ii( i imii4iP ii inii ioioiiimiiyiii8i ii<iii<ic iC iii&iwi i i$i i9i&ii i)i`iiiiiiGibiEii:iQio iF i iii i iFi|i;i iii3i4 i%ii:i iciipi6 i\iCii0i9 ii iiii~ iii"ih iiii i)ii i iii* ii i i i%ii iiii i.iii9i iii iii ii"i! ii, i ig iJ igiF iiii ii;ii iZiEi^iA iiili5ii ii4i' iiiki ii i2ii ii iXi7iii i{iiBixi=iei(ii iii'iiiiiiiiiii iD iiikii iii-i, i iei7 i iii iiiini} ip i7ijii ii9ii iiiii0iPi iiiiiiO i i i iiUi iii< iii iJiitiNii)i i.iii*i iAiir i] i)iiiiiGii_ ipik iii i% i ii ii>ibi_i^i?ii:iYiDi%iii iiiiiiYii i iiiiil iAii i i iDiBi i ii i@ i(iv iiiigi i i}iviiii iiiii iiiiiiii i-i~iiy i%i i i_i iiiiiijiMi iiGi+i iiii iiii3 i;i iGi)ii iii i ii ii i ii,iIi iii iXiAi\iiiiP ivii iVii=i i i6 i<iiX i i i ii8 i- iY iiiii iiiciiii9i i- iuii ii iR i i ii6ii iih i}i i? iw i ii ia i( i ii1ii ii>iii i ii]i iXiriPi0 i,ii+ ijii`iiiii. iii ii& iWiB iii|i7 iUii iiWiiYii iiw i iii iiiiiuiibiiq iiibiii ii=i$ iiNiJiiiUiiii iiiiiKi,iii4iri{i iiHiipi5iii=i i iEi9ii;i i<ii5i iii ii\iiiipiiiiiriiNiii^iiiiiK ii i*ii{ i+ i ii*i8iiii iiii ij ii@il i6ii2i8ii iiii'iiiiB iiii\iiiS iV ii~iiiii i iiVi iz i i^i i iii_ i} iG iqiiI i iii8itihiS ii}iRi i9iiSin iPiiB iiiisiiXi}ifiiii iiiiiiin iIi^iiTiixiiriii_iibiiOiPiQiRiS(t!GB2312_TYPICAL_DISTRIBUTION_RATIOtGB2312_TABLE_SIZEtGB2312_CHAR_TO_FREQ_ORDER(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/gb2312freq.pyt*sPKZ+q[[6site-packages/pip/_vendor/chardet/langturkishmodel.pycnu[ abc@s@dZdZied6ed6dd6ed6dd6dd6ZdS(iii%i/i'ii4i$i-i5i<ii1ii.i*i0iEi,i#ii3i&i>iAi+i8iiii iiiiiii ii iiii@iii ii i9i:i iiiiiiiiiiiiiiiiiiiiiiiieiiiiiiiiijiiiiiiiidiiiiiiiii^iPi]iiiiii?iiiiiii~i}i|ihiIiciOiUi{i6izibi\iyixi[igiwiDiviuiaitisi2iZiriqipioi7i)i(iViYiFi;iNiGiRiXi!iMiBiTiSiniKi=i`iiCimiJiWifi"i_iQiliLiHiiiikitchar_to_order_maptprecedence_matrixgX4 ?ttypical_positive_ratiotkeep_english_letters ISO-8859-9t charset_nametTurkishtlanguageN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii%i/i'ii4i$i-i5i<ii1ii.i*i0iEi,i#ii3i&i>iAi+i8iiiiiiiiii iiiiiii ii iiii@iii ii i9i:i iiiiiiiiiiiiiiiiiiiiiiiiiiiiieiiiiiiiiijiiiiiiiidiiiiiiiii^iPi]iiiiii?iiiiiii~i}i|ihiIiciOiUi{i6izibi\iyixi[igiwiDiviuiaitisi2iZiriqipioi7i)i(iViYiFi;iNiGiRiXi!iMiBiTiSiniKi=i`iiCimiJiWifi"i_iQiliLiHiiiik(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin5_TurkishCharToOrderMaptTurkishLangModeltTruetLatin5TurkishModel(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langturkishmodel.pyt%s,PKZݾuN9 9 +site-packages/pip/_vendor/chardet/enums.pycnu[ abc@sdZdefdYZdefdYZdefdYZdefdYZd efd YZd efd YZd S(sr All of the Enums that are used throughout the chardet package. :author: Dan Blanchard (dan.blanchard@gmail.com) t InputStatecBs eZdZdZdZdZRS(sS This enum represents the different states a universal detector can be in. iii(t__name__t __module__t__doc__t PURE_ASCIIt ESC_ASCIIt HIGH_BYTE(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyRstLanguageFiltercBsJeZdZdZdZdZdZdZdZeeBZ e eBeBZ RS(sj This enum represents the different language filters we can apply to a ``UniversalDetector``. iiiiii( RRRtCHINESE_SIMPLIFIEDtCHINESE_TRADITIONALtJAPANESEtKOREANtNON_CJKtALLtCHINESEtCJK(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyRs t ProbingStatecBs eZdZdZdZdZRS(sG This enum represents the different states a prober can be in. iii(RRRt DETECTINGtFOUND_ITtNOT_ME(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR st MachineStatecBs eZdZdZdZdZRS(sN This enum represents the different states a state machine can be in. iii(RRRtSTARTtERRORtITS_ME(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR)stSequenceLikelihoodcBs5eZdZdZdZdZdZedZRS(sX This enum represents the likelihood of a character following the previous one. iiiicCsdS(s::returns: The number of likelihood categories in the enum.i((tcls((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pytget_num_categories;s( RRRtNEGATIVEtUNLIKELYtLIKELYtPOSITIVEt classmethodR(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR2s tCharacterCategorycBs,eZdZdZdZdZdZdZRS(s This enum represents the different categories language models for ``SingleByteCharsetProber`` put characters into. Anything less than CONTROL is considered a letter. iiiii(RRRt UNDEFINEDt LINE_BREAKtSYMBOLtDIGITtCONTROL(((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyR As N(RtobjectRRRRRR (((s=/usr/lib/python2.7/site-packages/pip/_vendor/chardet/enums.pyts    PKZMy5site-packages/pip/_vendor/chardet/sbcsgroupprober.pyonu[ abc@sddlmZddlmZddlmZmZmZmZm Z m Z ddl m Z m Z ddlmZmZddlmZddlmZddlmZdd lmZd efd YZd S( i(tCharSetGroupProber(tSingleByteCharSetProber(tWin1251CyrillicModelt Koi8rModeltLatin5CyrillicModeltMacCyrillicModelt Ibm866Modelt Ibm855Model(tLatin7GreekModeltWin1253GreekModel(tLatin5BulgarianModeltWin1251BulgarianModel(tTIS620ThaiModel(tWin1255HebrewModel(t HebrewProber(tLatin5TurkishModeltSBCSGroupProbercBseZdZRS(c Cstt|jtttttttttttt tt tt tt tt ttttg |_t}ttt|}ttt|}|j|||jj|||g|jdS(N(tsuperRt__init__RRRRRRRRR R R R RtprobersRR tFalsetTruetset_model_proberstextendtreset(tselft hebrew_probertlogical_hebrew_probertvisual_hebrew_prober((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyR,s,                (t__name__t __module__R(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyR+sN(tcharsetgroupproberRtsbcharsetproberRtlangcyrillicmodelRRRRRRtlanggreekmodelRR tlangbulgarianmodelR R t langthaimodelR tlanghebrewmodelR t hebrewproberRtlangturkishmodelRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyts.PKZ{7bb,site-packages/pip/_vendor/chardet/jpcntx.pyonu[ abc@sRdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddfSfSZdefdYZdefd YZd efd YZd S( iiiiiitJapaneseContextAnalysiscBs\eZdZdZdZdZdZdZdZdZ dZ d Z d Z RS( iiidiicCs;d|_d|_d|_d|_d|_|jdS(N(tNonet _total_relt _rel_samplet_need_to_skip_char_numt_last_char_ordert_donetreset(tself((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyt__init__{s      cCs;d|_dg|j|_d|_d|_t|_dS(Nii(RtNUM_OF_CATEGORYRRRtFalseR(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyRs    cCs|jr dS|j}x||kr|j|||d!\}}||7}||krt|||_d|_q|dkr|jdkr|jd7_|j|jkrt|_Pn|jt|j|cd7/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytfeeds         !cCs|j|jkS(N(RtENOUGH_REL_THRESHOLD(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytgot_enough_datascCs6|j|jkr+|j|jd|jS|jSdS(Ni(RtMINIMUM_DATA_THRESHOLDRt DONT_KNOW(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytget_confidencescCsdS(Nii(ii((RR((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR s( t__name__t __module__R RRR RR RRRRR (((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyRts    tSJISContextAnalysiscBs)eZdZedZdZRS(cCs tt|jd|_dS(Nt SHIFT_JIS(tsuperRR t _charset_name(R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR scCs|jS(N(R (R((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyt charset_namescCs|s dS|d}d|ko+dknsLd|koGdknrd}|d kszd |koudknrd |_qnd}t|dkr|d}|d krd|kod knr|d|fSnd|fS(NiiiiiiiiiitCP932ii(ii(R tlen(RRt first_charRt second_char((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR s 8( ((RRR tpropertyR!R (((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyRs tEUCJPContextAnalysiscBseZdZRS(cCs|s d S|d}|dks<d|ko7dknrEd}n|dkrZd }nd}t|dkr|d}|d krd|kod knr|d|fSnd|fS( Niiiiiiiiiii(ii(R#(RRR$RR%((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR s (    ((RRR (((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pyR'sN(RtobjectRRR'(((s>/usr/lib/python2.7/site-packages/pip/_vendor/chardet/jpcntx.pytsCPKZMy5site-packages/pip/_vendor/chardet/sbcsgroupprober.pycnu[ abc@sddlmZddlmZddlmZmZmZmZm Z m Z ddl m Z m Z ddlmZmZddlmZddlmZddlmZdd lmZd efd YZd S( i(tCharSetGroupProber(tSingleByteCharSetProber(tWin1251CyrillicModelt Koi8rModeltLatin5CyrillicModeltMacCyrillicModelt Ibm866Modelt Ibm855Model(tLatin7GreekModeltWin1253GreekModel(tLatin5BulgarianModeltWin1251BulgarianModel(tTIS620ThaiModel(tWin1255HebrewModel(t HebrewProber(tLatin5TurkishModeltSBCSGroupProbercBseZdZRS(c Cstt|jtttttttttttt tt tt tt tt ttttg |_t}ttt|}ttt|}|j|||jj|||g|jdS(N(tsuperRt__init__RRRRRRRRR R R R RtprobersRR tFalsetTruetset_model_proberstextendtreset(tselft hebrew_probertlogical_hebrew_probertvisual_hebrew_prober((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyR,s,                (t__name__t __module__R(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyR+sN(tcharsetgroupproberRtsbcharsetproberRtlangcyrillicmodelRRRRRRtlanggreekmodelRR tlangbulgarianmodelR R t langthaimodelR tlanghebrewmodelR t hebrewproberRtlangturkishmodelRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.pyts.PKZ: 8site-packages/pip/_vendor/chardet/codingstatemachine.pyonu[ abc@s6ddlZddlmZdefdYZdS(iNi(t MachineStatetCodingStateMachinecBsJeZdZdZdZdZdZdZedZ RS(s A state machine to verify a byte sequence for a particular encoding. For each byte the detector receives, it will feed that byte to every active state machine available, one byte at a time. The state machine changes its state based on its previous state and the byte it receives. There are 3 states in a state machine that are of interest to an auto-detector: START state: This is the state to start with, or a legal byte sequence (i.e. a valid code point) for character has been identified. ME state: This indicates that the state machine identified a byte sequence that is specific to the charset it is designed for and that there is no other possible encoding which can contain this byte sequence. This will to lead to an immediate positive answer for the detector. ERROR state: This indicates the state machine identified an illegal byte sequence for that encoding. This will lead to an immediate negative answer for this encoding. Detector will exclude this encoding from consideration from here on. cCsD||_d|_d|_d|_tjt|_|j dS(Ni( t_modelt_curr_byte_post_curr_char_lentNonet _curr_statetloggingt getLoggert__name__tloggertreset(tselftsm((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyt__init__7s     cCstj|_dS(N(RtSTARTR(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyR ?scCs|jd|}|jtjkrCd|_|jd||_n|j|jd|}|jd||_|jd7_|jS(Nt class_tableitchar_len_tablet class_factort state_tablei(RRRRRR(R tct byte_classt curr_state((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyt next_stateBs cCs|jS(N(R(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pytget_current_charlenPscCs |jdS(Ntname(R(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pytget_coding_state_machineSscCs |jdS(Ntlanguage(R(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyRVs( R t __module__t__doc__RR RRRtpropertyR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyR!s     (RtenumsRtobjectR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/codingstatemachine.pyts PKZF 0site-packages/pip/_vendor/chardet/sjisprober.pycnu[ abc@sddlmZddlmZddlmZddlmZddlm Z ddl m Z m Z defdYZ d S( i(tMultiByteCharSetProber(tCodingStateMachine(tSJISDistributionAnalysis(tSJISContextAnalysis(t SJIS_SM_MODEL(t ProbingStatet MachineStatet SJISProbercBsJeZdZdZedZedZdZdZRS(cCsHtt|jtt|_t|_t|_ |j dS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzerRtcontext_analyzertreset(tself((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR %s   cCs$tt|j|jjdS(N(RRR R (R((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR ,scCs |jjS(N(R t charset_name(R((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR0scCsdS(NtJapanese((R((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pytlanguage4scCsxNtt|D]:}|jj||}|tjkrm|jjd|j|j |t j |_ Pq|tj krt j|_ Pq|tjkr|jj}|dkr|d|jd<|jj|jd|||jj|j|qM|jj||d||d|!||jj||d|d!|qqW|d|jd<|jt jkr|jjr|j|jkrt j|_ qn|jS(Ns!%s %s prober hit error at byte %siiiii(trangetlenR t next_stateRtERRORtloggertdebugRRRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlent _last_charR tfeedR tstatet DETECTINGtgot_enough_datatget_confidencetSHORTCUT_THRESHOLD(Rtbyte_strtit coding_statetchar_len((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR8s6    cCs+|jj}|jj}t||S(N(R R#R tmax(Rt context_conft distrib_conf((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR#Ys( t__name__t __module__R R tpropertyRRRR#(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR$s    !N(tmbcharsetproberRtcodingstatemachineRtchardistributionRtjpcntxRtmbcssmRtenumsRRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyts PKZ)113site-packages/pip/_vendor/chardet/langgreekmodel.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: Latin7_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 ) win1253_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 ) # Model Table: # total sequences: 100% # first 512 sequences: 98.2851% # first 1024 sequences:1.7001% # rest sequences: 0.0359% # negative sequences: 0.0148% GreekLangModel = ( 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, 3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, 2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, 0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, 2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, 2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, 0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, 2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, 0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, 3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, 3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, 2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, 2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, 0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, 0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, 0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, 0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, 0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, 0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, 0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, 0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, 0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, 0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, 0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, 0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, 0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, 0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, 0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, 0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, 0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, 0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, 0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, 0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, 0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, 0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, 0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, 0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, 0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, 0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, 0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, 0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, 0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, 0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, ) Latin7GreekModel = { 'char_to_order_map': Latin7_char_to_order_map, 'precedence_matrix': GreekLangModel, 'typical_positive_ratio': 0.982851, 'keep_english_letter': False, 'charset_name': "ISO-8859-7", 'language': 'Greek', } Win1253GreekModel = { 'char_to_order_map': win1253_char_to_order_map, 'precedence_matrix': GreekLangModel, 'typical_positive_ratio': 0.982851, 'keep_english_letter': False, 'charset_name': "windows-1253", 'language': 'Greek', } PKZ/553site-packages/pip/_vendor/chardet/charsetprober.pycnu[ abc@sBddlZddlZddlmZdefdYZdS(iNi(t ProbingStatet CharSetProbercBseZdZd dZdZedZdZedZ dZ e dZ e dZ e d ZRS( gffffff?cCs(d|_||_tjt|_dS(N(tNonet_statet lang_filtertloggingt getLoggert__name__tlogger(tselfR((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyt__init__'s  cCstj|_dS(N(Rt DETECTINGR(R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytreset,scCsdS(N(R(R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pyt charset_name/scCsdS(N((R tbuf((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytfeed3scCs|jS(N(R(R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytstate6scCsdS(Ng((R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytget_confidence:scCstjdd|}|S(Ns([-])+t (tretsub(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytfilter_high_byte_only=scCszt}tjd|}xX|D]P}|j|d |d}|j re|dkred}n|j|q"W|S(s5 We define three types of bytes: alphabet: english alphabets [a-zA-Z] international: international characters [-] marker: everything else [^a-zA-Z-] The input buffer can be thought to contain a series of words delimited by markers. This function works to filter all words that contain at least one international character. All contiguous sequences of markers are replaced by a single space ascii character. This filter applies to all scripts which do not use English characters. s%[a-zA-Z]*[-]+[a-zA-Z]*[^a-zA-Z-]?isR(t bytearrayRtfindalltextendtisalpha(Rtfilteredtwordstwordt last_char((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetprober.pytfilter_international_wordsBs      cCst}t}d}xtt|D]}|||d!}|dkrTt}n|dkrit}n|dkr(|j r(||kr| r|j|||!|jdn|d}q(q(W|s|j||n|S(s Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. Also retains English alphabet and high byte characters immediately before occurrences of >. This filter can be applied to all scripts which contain both English characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. iit>ts  PKZaa8site-packages/pip/_vendor/chardet/langbulgarianmodel.pyonu[ abc@svdZdZdZied6ed6dd6ed6dd6dd6Zied6ed6dd6ed6dd6dd6ZdS(iiiiiMiZicidiHimikieiOiiQifiLi^iRiniili[iJiwiTi`ioiisiAiEiFiBi?iDipigi\iihi_iViWiGitiiUi]iaiqiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii-iiii i#i+i%i,i7i/i(i;i!i.i&i$i)ii'ii"i3i0i1i5i2i6i9i=iiCii<i8iii ii iiiiii i iiii iiiiiiiiiiiiKi4ii*ii>iiii:iibiiiiiiixiNi@iSiyiuiXiziYijiIiPiviritchar_to_order_maptprecedence_matrixg! _B?ttypical_positive_ratiotkeep_english_letters ISO-8859-5t charset_namet Bulgairantlanguages windows-1251t BulgarianN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiMiZicidiHimikieiOiiQifiLi^iRiniili[iJiwiTi`ioiisiiiiiiiAiEiFiBi?iDipigi\iihi_iViWiGitiiUi]iaiqiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiQiiiiiiiiiiiiii-iiii i#i+i%i,i7i/i(i;i!i.i&i$i)ii'ii"i3i0i1i5i2i6i9i=iiCii<i8iii ii iiiiii i iiii iiiiiiiiiiiiKi4ii*ii>iiii:iibiiiiiii[ii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiMiZicidiHimikieiOiiQifiLi^iRiniili[iJiwiTi`ioiisiiiiiiiAiEiFiBi?iDipigi\iihi_iViWiGitiiUi]iaiqiiiiiiiiiiiiiiiiiiixiiiiiiiiiNi@iSiyibiuiiiiiiiiiiiXiiiiiziYijiiiiii-iiiIiPiviriiiiii>i:iiiiiii i#i+i%i,i7i/i(i;i!i.i&i$i)ii'ii"i3i0i1i5i2i6i9i=iiCii<i8iii ii iiiiii i iiii iiiiiiiiiiiiKi4ii*i(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tLatin5_BulgarianCharToOrderMaptwin1251BulgarianCharToOrderMaptBulgarianLangModeltFalsetLatin5BulgarianModeltWin1251BulgarianModel(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.pyt&sZ  PKZD}}*site-packages/pip/_vendor/chardet/enums.pynu[""" All of the Enums that are used throughout the chardet package. :author: Dan Blanchard (dan.blanchard@gmail.com) """ class InputState(object): """ This enum represents the different states a universal detector can be in. """ PURE_ASCII = 0 ESC_ASCII = 1 HIGH_BYTE = 2 class LanguageFilter(object): """ This enum represents the different language filters we can apply to a ``UniversalDetector``. """ CHINESE_SIMPLIFIED = 0x01 CHINESE_TRADITIONAL = 0x02 JAPANESE = 0x04 KOREAN = 0x08 NON_CJK = 0x10 ALL = 0x1F CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL CJK = CHINESE | JAPANESE | KOREAN class ProbingState(object): """ This enum represents the different states a prober can be in. """ DETECTING = 0 FOUND_IT = 1 NOT_ME = 2 class MachineState(object): """ This enum represents the different states a state machine can be in. """ START = 0 ERROR = 1 ITS_ME = 2 class SequenceLikelihood(object): """ This enum represents the likelihood of a character following the previous one. """ NEGATIVE = 0 UNLIKELY = 1 LIKELY = 2 POSITIVE = 3 @classmethod def get_num_categories(cls): """:returns: The number of likelihood categories in the enum.""" return 4 class CharacterCategory(object): """ This enum represents the different categories language models for ``SingleByteCharsetProber`` put characters into. Anything less than CONTROL is considered a letter. """ UNDEFINED = 255 LINE_BREAK = 254 SYMBOL = 253 DIGIT = 252 CONTROL = 251 PKZB1??0site-packages/pip/_vendor/chardet/cp949prober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .chardistribution import EUCKRDistributionAnalysis from .codingstatemachine import CodingStateMachine from .mbcharsetprober import MultiByteCharSetProber from .mbcssm import CP949_SM_MODEL class CP949Prober(MultiByteCharSetProber): def __init__(self): super(CP949Prober, self).__init__() self.coding_sm = CodingStateMachine(CP949_SM_MODEL) # NOTE: CP949 is a superset of EUC-KR, so the distribution should be # not different. self.distribution_analyzer = EUCKRDistributionAnalysis() self.reset() @property def charset_name(self): return "CP949" @property def language(self): return "Korean" PKZ^ 0site-packages/pip/_vendor/chardet/utf8prober.pycnu[ abc@s`ddlmZddlmZmZddlmZddlmZdefdYZ dS(i(t CharSetProber(t ProbingStatet MachineState(tCodingStateMachine(t UTF8_SM_MODELt UTF8ProbercBsPeZdZdZdZedZedZdZdZ RS(g?cCs9tt|jtt|_d|_|jdS(N( tsuperRt__init__RRt coding_smtNonet _num_mb_charstreset(tself((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyR&s cCs-tt|j|jjd|_dS(Ni(RRR RR (R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyR ,s cCsdS(Nsutf-8((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyt charset_name1scCsdS(Nt((R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pytlanguage5scCsx|D]}|jj|}|tjkr>tj|_Pq|tjkr]tj|_Pq|tj kr|jj dkr|j d7_ qqqW|j tj kr|j|jkrtj|_qn|j S(Nii(Rt next_stateRtERRORRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlenR tstatet DETECTINGtget_confidencetSHORTCUT_THRESHOLD(R tbyte_strtct coding_state((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pytfeed9s   cCs9d}|jdkr1||j|j9}d|S|SdS(NgGz?ig?(R t ONE_CHAR_PROB(R tunlike((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyRLs ( t__name__t __module__R RR tpropertyR RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pyR#s   N( t charsetproberRtenumsRRtcodingstatemachineRtmbcssmRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/utf8prober.pytsPKZa VV1site-packages/pip/_vendor/chardet/euckrprober.pyonu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tMultiByteCharSetProber(tCodingStateMachine(tEUCKRDistributionAnalysis(tEUCKR_SM_MODELt EUCKRProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyR#s cCsdS(NsEUC-KR((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyt charset_name)scCsdS(NtKorean((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pytlanguage-s(t__name__t __module__RtpropertyR R (((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pyR"s N( tmbcharsetproberRtcodingstatemachineRtchardistributionRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euckrprober.pytsPKZ 7Х0site-packages/pip/_vendor/chardet/eucjpprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .enums import ProbingState, MachineState from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCJPDistributionAnalysis from .jpcntx import EUCJPContextAnalysis from .mbcssm import EUCJP_SM_MODEL class EUCJPProber(MultiByteCharSetProber): def __init__(self): super(EUCJPProber, self).__init__() self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) self.distribution_analyzer = EUCJPDistributionAnalysis() self.context_analyzer = EUCJPContextAnalysis() self.reset() def reset(self): super(EUCJPProber, self).reset() self.context_analyzer.reset() @property def charset_name(self): return "EUC-JP" @property def language(self): return "Japanese" def feed(self, byte_str): for i in range(len(byte_str)): # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte coding_state = self.coding_sm.next_state(byte_str[i]) if coding_state == MachineState.ERROR: self.logger.debug('%s %s prober hit error at byte %s', self.charset_name, self.language, i) self._state = ProbingState.NOT_ME break elif coding_state == MachineState.ITS_ME: self._state = ProbingState.FOUND_IT break elif coding_state == MachineState.START: char_len = self.coding_sm.get_current_charlen() if i == 0: self._last_char[1] = byte_str[0] self.context_analyzer.feed(self._last_char, char_len) self.distribution_analyzer.feed(self._last_char, char_len) else: self.context_analyzer.feed(byte_str[i - 1:i + 1], char_len) self.distribution_analyzer.feed(byte_str[i - 1:i + 1], char_len) self._last_char[0] = byte_str[-1] if self.state == ProbingState.DETECTING: if (self.context_analyzer.got_enough_data() and (self.get_confidence() > self.SHORTCUT_THRESHOLD)): self._state = ProbingState.FOUND_IT return self.state def get_confidence(self): context_conf = self.context_analyzer.get_confidence() distrib_conf = self.distribution_analyzer.get_confidence() return max(context_conf, distrib_conf) PKZ )Ӳ661site-packages/pip/_vendor/chardet/hebrewprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Shy Shalom # Portions created by the Initial Developer are Copyright (C) 2005 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .enums import ProbingState # This prober doesn't actually recognize a language or a charset. # It is a helper prober for the use of the Hebrew model probers ### General ideas of the Hebrew charset recognition ### # # Four main charsets exist in Hebrew: # "ISO-8859-8" - Visual Hebrew # "windows-1255" - Logical Hebrew # "ISO-8859-8-I" - Logical Hebrew # "x-mac-hebrew" - ?? Logical Hebrew ?? # # Both "ISO" charsets use a completely identical set of code points, whereas # "windows-1255" and "x-mac-hebrew" are two different proper supersets of # these code points. windows-1255 defines additional characters in the range # 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific # diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. # x-mac-hebrew defines similar additional code points but with a different # mapping. # # As far as an average Hebrew text with no diacritics is concerned, all four # charsets are identical with respect to code points. Meaning that for the # main Hebrew alphabet, all four map the same values to all 27 Hebrew letters # (including final letters). # # The dominant difference between these charsets is their directionality. # "Visual" directionality means that the text is ordered as if the renderer is # not aware of a BIDI rendering algorithm. The renderer sees the text and # draws it from left to right. The text itself when ordered naturally is read # backwards. A buffer of Visual Hebrew generally looks like so: # "[last word of first line spelled backwards] [whole line ordered backwards # and spelled backwards] [first word of first line spelled backwards] # [end of line] [last word of second line] ... etc' " # adding punctuation marks, numbers and English text to visual text is # naturally also "visual" and from left to right. # # "Logical" directionality means the text is ordered "naturally" according to # the order it is read. It is the responsibility of the renderer to display # the text from right to left. A BIDI algorithm is used to place general # punctuation marks, numbers and English text in the text. # # Texts in x-mac-hebrew are almost impossible to find on the Internet. From # what little evidence I could find, it seems that its general directionality # is Logical. # # To sum up all of the above, the Hebrew probing mechanism knows about two # charsets: # Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are # backwards while line order is natural. For charset recognition purposes # the line order is unimportant (In fact, for this implementation, even # word order is unimportant). # Logical Hebrew - "windows-1255" - normal, naturally ordered text. # # "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be # specifically identified. # "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew # that contain special punctuation marks or diacritics is displayed with # some unconverted characters showing as question marks. This problem might # be corrected using another model prober for x-mac-hebrew. Due to the fact # that x-mac-hebrew texts are so rare, writing another model prober isn't # worth the effort and performance hit. # #### The Prober #### # # The prober is divided between two SBCharSetProbers and a HebrewProber, # all of which are managed, created, fed data, inquired and deleted by the # SBCSGroupProber. The two SBCharSetProbers identify that the text is in # fact some kind of Hebrew, Logical or Visual. The final decision about which # one is it is made by the HebrewProber by combining final-letter scores # with the scores of the two SBCharSetProbers to produce a final answer. # # The SBCSGroupProber is responsible for stripping the original text of HTML # tags, English characters, numbers, low-ASCII punctuation characters, spaces # and new lines. It reduces any sequence of such characters to a single space. # The buffer fed to each prober in the SBCS group prober is pure text in # high-ASCII. # The two SBCharSetProbers (model probers) share the same language model: # Win1255Model. # The first SBCharSetProber uses the model normally as any other # SBCharSetProber does, to recognize windows-1255, upon which this model was # built. The second SBCharSetProber is told to make the pair-of-letter # lookup in the language model backwards. This in practice exactly simulates # a visual Hebrew model using the windows-1255 logical Hebrew model. # # The HebrewProber is not using any language model. All it does is look for # final-letter evidence suggesting the text is either logical Hebrew or visual # Hebrew. Disjointed from the model probers, the results of the HebrewProber # alone are meaningless. HebrewProber always returns 0.00 as confidence # since it never identifies a charset by itself. Instead, the pointer to the # HebrewProber is passed to the model probers as a helper "Name Prober". # When the Group prober receives a positive identification from any prober, # it asks for the name of the charset identified. If the prober queried is a # Hebrew model prober, the model prober forwards the call to the # HebrewProber to make the final decision. In the HebrewProber, the # decision is made according to the final-letters scores maintained and Both # model probers scores. The answer is returned in the form of the name of the # charset identified, either "windows-1255" or "ISO-8859-8". class HebrewProber(CharSetProber): # windows-1255 / ISO-8859-8 code points of interest FINAL_KAF = 0xea NORMAL_KAF = 0xeb FINAL_MEM = 0xed NORMAL_MEM = 0xee FINAL_NUN = 0xef NORMAL_NUN = 0xf0 FINAL_PE = 0xf3 NORMAL_PE = 0xf4 FINAL_TSADI = 0xf5 NORMAL_TSADI = 0xf6 # Minimum Visual vs Logical final letter score difference. # If the difference is below this, don't rely solely on the final letter score # distance. MIN_FINAL_CHAR_DISTANCE = 5 # Minimum Visual vs Logical model score difference. # If the difference is below this, don't rely at all on the model score # distance. MIN_MODEL_DISTANCE = 0.01 VISUAL_HEBREW_NAME = "ISO-8859-8" LOGICAL_HEBREW_NAME = "windows-1255" def __init__(self): super(HebrewProber, self).__init__() self._final_char_logical_score = None self._final_char_visual_score = None self._prev = None self._before_prev = None self._logical_prober = None self._visual_prober = None self.reset() def reset(self): self._final_char_logical_score = 0 self._final_char_visual_score = 0 # The two last characters seen in the previous buffer, # mPrev and mBeforePrev are initialized to space in order to simulate # a word delimiter at the beginning of the data self._prev = ' ' self._before_prev = ' ' # These probers are owned by the group prober. def set_model_probers(self, logicalProber, visualProber): self._logical_prober = logicalProber self._visual_prober = visualProber def is_final(self, c): return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, self.FINAL_PE, self.FINAL_TSADI] def is_non_final(self, c): # The normal Tsadi is not a good Non-Final letter due to words like # 'lechotet' (to chat) containing an apostrophe after the tsadi. This # apostrophe is converted to a space in FilterWithoutEnglishLetters # causing the Non-Final tsadi to appear at an end of a word even # though this is not the case in the original text. # The letters Pe and Kaf rarely display a related behavior of not being # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' # for example legally end with a Non-Final Pe or Kaf. However, the # benefit of these letters as Non-Final letters outweighs the damage # since these words are quite rare. return c in [self.NORMAL_KAF, self.NORMAL_MEM, self.NORMAL_NUN, self.NORMAL_PE] def feed(self, byte_str): # Final letter analysis for logical-visual decision. # Look for evidence that the received buffer is either logical Hebrew # or visual Hebrew. # The following cases are checked: # 1) A word longer than 1 letter, ending with a final letter. This is # an indication that the text is laid out "naturally" since the # final letter really appears at the end. +1 for logical score. # 2) A word longer than 1 letter, ending with a Non-Final letter. In # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, # should not end with the Non-Final form of that letter. Exceptions # to this rule are mentioned above in isNonFinal(). This is an # indication that the text is laid out backwards. +1 for visual # score # 3) A word longer than 1 letter, starting with a final letter. Final # letters should not appear at the beginning of a word. This is an # indication that the text is laid out backwards. +1 for visual # score. # # The visual score and logical score are accumulated throughout the # text and are finally checked against each other in GetCharSetName(). # No checking for final letters in the middle of words is done since # that case is not an indication for either Logical or Visual text. # # We automatically filter out all 7-bit characters (replace them with # spaces) so the word boundary detection works properly. [MAP] if self.state == ProbingState.NOT_ME: # Both model probers say it's not them. No reason to continue. return ProbingState.NOT_ME byte_str = self.filter_high_byte_only(byte_str) for cur in byte_str: if cur == ' ': # We stand on a space - a word just ended if self._before_prev != ' ': # next-to-last char was not a space so self._prev is not a # 1 letter word if self.is_final(self._prev): # case (1) [-2:not space][-1:final letter][cur:space] self._final_char_logical_score += 1 elif self.is_non_final(self._prev): # case (2) [-2:not space][-1:Non-Final letter][ # cur:space] self._final_char_visual_score += 1 else: # Not standing on a space if ((self._before_prev == ' ') and (self.is_final(self._prev)) and (cur != ' ')): # case (3) [-2:space][-1:final letter][cur:not space] self._final_char_visual_score += 1 self._before_prev = self._prev self._prev = cur # Forever detecting, till the end or until both model probers return # ProbingState.NOT_ME (handled above) return ProbingState.DETECTING @property def charset_name(self): # Make the decision: is it Logical or Visual? # If the final letter score distance is dominant enough, rely on it. finalsub = self._final_char_logical_score - self._final_char_visual_score if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: return self.LOGICAL_HEBREW_NAME if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: return self.VISUAL_HEBREW_NAME # It's not dominant enough, try to rely on the model scores instead. modelsub = (self._logical_prober.get_confidence() - self._visual_prober.get_confidence()) if modelsub > self.MIN_MODEL_DISTANCE: return self.LOGICAL_HEBREW_NAME if modelsub < -self.MIN_MODEL_DISTANCE: return self.VISUAL_HEBREW_NAME # Still no good, back to final letter distance, maybe it'll save the # day. if finalsub < 0.0: return self.VISUAL_HEBREW_NAME # (finalsub > 0 - Logical) or (don't know what to do) default to # Logical. return self.LOGICAL_HEBREW_NAME @property def language(self): return 'Hebrew' @property def state(self): # Remain active as long as any of the model probers are active. if (self._logical_prober.state == ProbingState.NOT_ME) and \ (self._visual_prober.state == ProbingState.NOT_ME): return ProbingState.NOT_ME return ProbingState.DETECTING PKZ\9 9 8site-packages/pip/_vendor/chardet/charsetgroupprober.pyonu[ abc@s:ddlmZddlmZdefdYZdS(i(t ProbingState(t CharSetProbertCharSetGroupProbercBsMeZddZdZedZedZdZdZ RS(cCs8tt|jd|d|_g|_d|_dS(Nt lang_filteri(tsuperRt__init__t _active_numtproberstNonet_best_guess_prober(tselfR((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR!s  cCshtt|jd|_x<|jD]1}|r&|jt|_|jd7_q&q&Wd|_dS(Nii( RRtresetRRtTruetactiveRR (R tprober((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR 's   cCs-|js#|j|js#dSn|jjS(N(R tget_confidenceRt charset_name(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR1s    cCs-|js#|j|js#dSn|jjS(N(R RRtlanguage(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR9s    cCsx|jD]}|sq n|js+q n|j|}|sFq n|tjkre||_|jS|tjkr t|_|j d8_ |j dkrtj|_ |jSq q W|jS(Nii( RR tfeedRtFOUND_ITR tstatetNOT_MEtFalseRt_state(R tbyte_strRR((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyRAs$    cCs|j}|tjkrdS|tjkr/dSd}d|_x|jD]}|sZqHn|js|jj d|j qHn|j }|jj d|j |j |||krH|}||_qHqHW|jsdS|S(NgGz?g{Gz?gs %s not actives%s %s confidence = %s( RRRRRR RR tloggertdebugRRR(R Rt best_confRtconf((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyRUs*      N( t__name__t __module__RRR tpropertyRRRR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR s    N(tenumsRt charsetproberRR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pytsPKZlH2site-packages/pip/_vendor/chardet/latin1prober.pyonu[ abc@sddlmZddlmZdZdZdZdZdZdZ dZ dZ d Z d Z eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee eeeeeee ee ee eeeeeeeeeeee ee ee e eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee e e e e e e e e e e e e e e e e e e e e e e ee e e e e e e e e e e e e e e e e e e e e e e e e e e e e e e ee e e e e e e e fZdZd efd YZd S(i(t CharSetProber(t ProbingStateiiiiiiiit Latin1ProbercBsJeZdZdZedZedZdZdZRS(cCs3tt|jd|_d|_|jdS(N(tsuperRt__init__tNonet_last_char_classt _freq_countertreset(tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyRas  cCs*t|_dgt|_tj|dS(Ni(tOTHRt FREQ_CAT_NUMRRR(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyRgs cCsdS(Ns ISO-8859-1((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyt charset_namelscCsdS(Nt((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytlanguagepscCs~|j|}xe|D]]}t|}t|jt|}|dkrWtj|_Pn|j|cd7<||_qW|j S(Nii( tfilter_with_english_letterstLatin1_CharToClasstLatin1ClassModelRt CLASS_NUMRtNOT_MEt_stateRtstate(R tbyte_strtct char_classtfreq((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytfeedts      cCs}|jtjkrdSt|j}|dkr:d}n |jd|jdd|}|dkrod}n|d}|S(Ng{Gz?giig4@g\(\?(RRRtsumR(R ttotalt confidence((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytget_confidences     ( t__name__t __module__RRtpropertyR RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pyR`s    N(@iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(t charsetproberRtenumsRR tUDFR tASCtASStACVtACOtASVtASORRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/latin1prober.pytsh PKZaZ2site-packages/pip/_vendor/chardet/charsetprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import logging import re from .enums import ProbingState class CharSetProber(object): SHORTCUT_THRESHOLD = 0.95 def __init__(self, lang_filter=None): self._state = None self.lang_filter = lang_filter self.logger = logging.getLogger(__name__) def reset(self): self._state = ProbingState.DETECTING @property def charset_name(self): return None def feed(self, buf): pass @property def state(self): return self._state def get_confidence(self): return 0.0 @staticmethod def filter_high_byte_only(buf): buf = re.sub(b'([\x00-\x7F])+', b' ', buf) return buf @staticmethod def filter_international_words(buf): """ We define three types of bytes: alphabet: english alphabets [a-zA-Z] international: international characters [\x80-\xFF] marker: everything else [^a-zA-Z\x80-\xFF] The input buffer can be thought to contain a series of words delimited by markers. This function works to filter all words that contain at least one international character. All contiguous sequences of markers are replaced by a single space ascii character. This filter applies to all scripts which do not use English characters. """ filtered = bytearray() # This regex expression filters out only words that have at-least one # international character. The word may include one marker character at # the end. words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', buf) for word in words: filtered.extend(word[:-1]) # If the last character in the word is a marker, replace it with a # space as markers shouldn't affect our analysis (they are used # similarly across all languages and may thus have similar # frequencies). last_char = word[-1:] if not last_char.isalpha() and last_char < b'\x80': last_char = b' ' filtered.extend(last_char) return filtered @staticmethod def filter_with_english_letters(buf): """ Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. Also retains English alphabet and high byte characters immediately before occurrences of >. This filter can be applied to all scripts which contain both English characters and extended ASCII characters, but is currently only used by ``Latin1Prober``. """ filtered = bytearray() in_tag = False prev = 0 for curr in range(len(buf)): # Slice here to get bytes instead of an int with Python 3 buf_char = buf[curr:curr + 1] # Check if we're coming out of or entering an HTML tag if buf_char == b'>': in_tag = False elif buf_char == b'<': in_tag = True # If current character is not extended-ASCII and not alphabetic... if buf_char < b'\x80' and not buf_char.isalpha(): # ...and we're not in a tag if curr > prev and not in_tag: # Keep everything after last non-extended-ASCII, # non-alphabetic character filtered.extend(buf[prev:curr]) # Output a space to delimit stretch we kept filtered.extend(b' ') prev = curr + 1 # If we're not in a tag... if not in_tag: # Keep everything after last non-extended-ASCII, non-alphabetic # character filtered.extend(buf[prev:]) return filtered PKZo[[3site-packages/pip/_vendor/chardet/langthaimodel.pyonu[ abc@s@dZdZied6ed6dd6ed6dd6dd6ZdS(iiiiiijikidiiiiei^iiiliminioiiiiYi_ipiqiiiii@iHiIiriJisitifiQiiuiZigiNiRi`ii[iOiTihiiiaibi\iiiiiiiXiiiiiiiiviiiiiciUiSiiiiiiiiiiiiiiiiiiKiii4i"i3iwi/i:i9i1i5i7i+iii,ii0iiii'i>ii6i-i iii=iii i*i.iiiLiiBi?ii ii$ii i(ii i#iViiiiii ii)ii!ii2i%iiiCiMi&i]iiiDi8i;iAiEi<iFiPiGiWiiiiitchar_to_order_maptprecedence_matrixg@?ttypical_positive_ratiotkeep_english_lettersTIS-620t charset_nametThaitlanguageN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiijikidiiiiei^iiiliminioiiiiYi_ipiqiiiiiiiiiii@iHiIiriJisitifiQiiuiZigiNiRi`ii[iOiTihiiiaibi\iiiiiiiiiiiiXiiiiiiiiviiiiiciUiSiiiiiiiiiiiiiiiiiiKiii4i"i3iwi/i:i9i1i5i7i+iii,ii0iiii'i>ii6i-i iii=iii i*i.iiiLiiBi?ii ii$ii i(ii i#iViiiiii ii)ii!ii2i%iiiCiMi&i]iiiDi8i;iAiEi<iFiPiGiWiiiiii(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tTIS620CharToOrderMapt ThaiLangModeltFalsetTIS620ThaiModel(((sE/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langthaimodel.pyt%s, PKZ<'6site-packages/pip/_vendor/chardet/chardistribution.pyonu[ abc@s*ddlmZmZmZddlmZmZmZddlm Z m Z m Z ddl m Z mZmZddlmZmZmZdefdYZdefd YZd efd YZd efd YZdefdYZdefdYZdefdYZdS(i(tEUCTW_CHAR_TO_FREQ_ORDERtEUCTW_TABLE_SIZEt EUCTW_TYPICAL_DISTRIBUTION_RATIO(tEUCKR_CHAR_TO_FREQ_ORDERtEUCKR_TABLE_SIZEt EUCKR_TYPICAL_DISTRIBUTION_RATIO(tGB2312_CHAR_TO_FREQ_ORDERtGB2312_TABLE_SIZEt!GB2312_TYPICAL_DISTRIBUTION_RATIO(tBIG5_CHAR_TO_FREQ_ORDERtBIG5_TABLE_SIZEtBIG5_TYPICAL_DISTRIBUTION_RATIO(tJIS_CHAR_TO_FREQ_ORDERtJIS_TABLE_SIZEtJIS_TYPICAL_DISTRIBUTION_RATIOtCharDistributionAnalysiscBsVeZdZdZdZdZdZdZdZdZ dZ d Z RS( igGz?g{Gz?icCsDd|_d|_d|_d|_d|_d|_|jdS(N(tNonet_char_to_freq_ordert _table_sizettypical_distribution_ratiot_donet _total_charst _freq_charstreset(tself((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyt__init__.s      cCst|_d|_d|_dS(sreset analyser, clear any stateiN(tFalseRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR=s  cCs}|dkr|j|}nd}|dkry|jd7_||jkryd|j|krv|jd7_qvqyndS(s"feed a character with known lengthiiiiiN(t get_orderRRRR(Rtchartchar_lentorder((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytfeedFs  cCsu|jdks!|j|jkr(|jS|j|jkrn|j|j|j|j}||jkrn|Sn|jS(s(return confidence based on existing datai(RRtMINIMUM_DATA_THRESHOLDtSURE_NORtSURE_YES(Rtr((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytget_confidenceTs! cCs|j|jkS(N(RtENOUGH_DATA_THRESHOLD(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytgot_enough_datadscCsdS(Ni((Rtbyte_str((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRis( t__name__t __module__R%R"R!R RRRR$R&R(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR(s    tEUCTWDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( tsuperR*RRRRRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRrs  cCs6|d}|dkr.d|d|ddSdSdS(Niii^iii((RR't first_char((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRxs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR*qs tEUCKRDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R-RRRRRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCs6|d}|dkr.d|d|ddSdSdS(Niii^iii((RR'R,((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR-s tGB2312DistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R.RRRRRRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCsI|d|d}}|dkrA|dkrAd|d|dSdSdS(Niiiii^i((RR'R,t second_char((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs(R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR.s tBig5DistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R0RR RR RR R(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCsd|d|d}}|dkr\|dkrEd|d|ddSd|d|dSndSdS( Niiiiii?i@i((RR'R,R/((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs   (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR0s tSJISDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R1RR RR RRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCs|d|d}}|dkr>|dkr>d|d}n1|dkrk|dkrkd|dd}nd S||d }|d krd }n|S( Niiiiiiiiii@i((RR'R,R/R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR1s tEUCJPDistributionAnalysiscBseZdZdZRS(cCs2tt|jt|_t|_t|_dS(N( R+R2RR RR RRR(R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  cCs6|d}|dkr.d|d|ddSdSdS(Niii^iii((RR'R((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyRs  (R(R)RR(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pyR2s N(t euctwfreqRRRt euckrfreqRRRt gb2312freqRRRtbig5freqR R R tjisfreqR R RtobjectRR*R-R.R0R1R2(((sH/usr/lib/python2.7/site-packages/pip/_vendor/chardet/chardistribution.pytsIPKZ 2site-packages/pip/_vendor/chardet/hebrewprober.pycnu[ abc@s:ddlmZddlmZdefdYZdS(i(t CharSetProber(t ProbingStatet HebrewProbercBseZdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZdZdZdZedZedZedZRS(iiiiiiiiiiig{Gz?s ISO-8859-8s windows-1255cCsWtt|jd|_d|_d|_d|_d|_d|_ |j dS(N( tsuperRt__init__tNonet_final_char_logical_scoret_final_char_visual_scoret_prevt _before_prevt_logical_probert_visual_probertreset(tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyRs      cCs(d|_d|_d|_d|_dS(Nit (RRRR (R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyR s   cCs||_||_dS(N(R R (R t logicalProbert visualProber((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytset_model_proberss cCs(||j|j|j|j|jgkS(N(t FINAL_KAFt FINAL_MEMt FINAL_NUNtFINAL_PEt FINAL_TSADI(R tc((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytis_finalscCs"||j|j|j|jgkS(N(t NORMAL_KAFt NORMAL_MEMt NORMAL_NUNt NORMAL_PE(R R((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyt is_non_finals cCs|jtjkrtjS|j|}x|D]}|dkr|jdkr|j|jrt|jd7_q|j|jr|j d7_ qqn?|jdkr|j|jr|dkr|j d7_ n|j|_||_q/Wtj S(NRi( tstateRtNOT_MEtfilter_high_byte_onlyR RRRRRt DETECTING(R tbyte_strtcur((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytfeeds     cCs|j|j}||jkr&|jS||j kr=|jS|jj|jj}||jkro|jS||j kr|jS|dkr|jS|jS(Ng( RRtMIN_FINAL_CHAR_DISTANCEtLOGICAL_HEBREW_NAMEtVISUAL_HEBREW_NAMER tget_confidenceR tMIN_MODEL_DISTANCE(R tfinalsubtmodelsub((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyt charset_names  cCsdS(NtHebrew((R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytlanguagescCs8|jjtjkr1|jjtjkr1tjStjS(N(R RRRR R!(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyRs(t__name__t __module__RRRRRRRRRt NORMAL_TSADIR%R)R'R&RR RRRR$tpropertyR,R.R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pyRs.    ;N(t charsetproberRtenumsRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/chardet/hebrewprober.pytscPKZGǐ,site-packages/pip/_vendor/chardet/version.pynu[""" This module exists only to simplify retrieving the version number of chardet from within setup.py and from chardet subpackages. :author: Dan Blanchard (dan.blanchard@gmail.com) """ __version__ = "3.0.4" VERSION = __version__.split('.') PKZOT 1site-packages/pip/_vendor/chardet/eucjpprober.pyonu[ abc@sddlmZmZddlmZddlmZddlmZddl m Z ddl m Z defdYZ d S( i(t ProbingStatet MachineState(tMultiByteCharSetProber(tCodingStateMachine(tEUCJPDistributionAnalysis(tEUCJPContextAnalysis(tEUCJP_SM_MODELt EUCJPProbercBsJeZdZdZedZedZdZdZRS(cCsHtt|jtt|_t|_t|_ |j dS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzerRtcontext_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR %s   cCs$tt|j|jjdS(N(RRR R (R((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR ,scCsdS(NsEUC-JP((R((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyt charset_name0scCsdS(NtJapanese((R((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pytlanguage4scCsx>tt|D]*}|jj||}|tjkrm|jjd|j|j |t j |_ Pq|tj krt j|_ Pq|tjkr|jj}|dkr|d|jd<|jj|j||jj|j|q=|jj||d|d!||jj||d|d!|qqW|d|jd<|jt jkr|jjr|j|jkrt j|_ qn|jS(Ns!%s %s prober hit error at byte %siii(trangetlenR t next_stateRtERRORtloggertdebugRRRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlent _last_charR tfeedR tstatet DETECTINGtgot_enough_datatget_confidencetSHORTCUT_THRESHOLD(Rtbyte_strtit coding_statetchar_len((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR8s4    cCs+|jj}|jj}t||S(N(R R#R tmax(Rt context_conft distrib_conf((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR#Ys( t__name__t __module__R R tpropertyRRRR#(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyR$s    !N(tenumsRRtmbcharsetproberRtcodingstatemachineRtchardistributionRtjpcntxRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/eucjpprober.pyts PKZ\9 9 8site-packages/pip/_vendor/chardet/charsetgroupprober.pycnu[ abc@s:ddlmZddlmZdefdYZdS(i(t ProbingState(t CharSetProbertCharSetGroupProbercBsMeZddZdZedZedZdZdZ RS(cCs8tt|jd|d|_g|_d|_dS(Nt lang_filteri(tsuperRt__init__t _active_numtproberstNonet_best_guess_prober(tselfR((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR!s  cCshtt|jd|_x<|jD]1}|r&|jt|_|jd7_q&q&Wd|_dS(Nii( RRtresetRRtTruetactiveRR (R tprober((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR 's   cCs-|js#|j|js#dSn|jjS(N(R tget_confidenceRt charset_name(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR1s    cCs-|js#|j|js#dSn|jjS(N(R RRtlanguage(R ((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR9s    cCsx|jD]}|sq n|js+q n|j|}|sFq n|tjkre||_|jS|tjkr t|_|j d8_ |j dkrtj|_ |jSq q W|jS(Nii( RR tfeedRtFOUND_ITR tstatetNOT_MEtFalseRt_state(R tbyte_strRR((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyRAs$    cCs|j}|tjkrdS|tjkr/dSd}d|_x|jD]}|sZqHn|js|jj d|j qHn|j }|jj d|j |j |||krH|}||_qHqHW|jsdS|S(NgGz?g{Gz?gs %s not actives%s %s confidence = %s( RRRRRR RR tloggertdebugRRR(R Rt best_confRtconf((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyRUs*      N( t__name__t __module__RRR tpropertyRRRR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pyR s    N(tenumsRt charsetproberRR(((sJ/usr/lib/python2.7/site-packages/pip/_vendor/chardet/charsetgroupprober.pytsPKZ cFUU1site-packages/pip/_vendor/chardet/cp949prober.pycnu[ abc@sZddlmZddlmZddlmZddlmZdefdYZdS(i(tEUCKRDistributionAnalysis(tCodingStateMachine(tMultiByteCharSetProber(tCP949_SM_MODELt CP949ProbercBs/eZdZedZedZRS(cCs<tt|jtt|_t|_|jdS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzertreset(tself((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyR#s cCsdS(NtCP949((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyt charset_name+scCsdS(NtKorean((R ((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pytlanguage/s(t__name__t __module__RtpropertyR R(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pyR"s N( tchardistributionRtcodingstatemachineRtmbcharsetproberRtmbcssmRR(((sC/usr/lib/python2.7/site-packages/pip/_vendor/chardet/cp949prober.pytsPKZQ[[5site-packages/pip/_vendor/chardet/langhebrewmodel.pyonu[ abc@s@dZdZied6ed6dd6ed6dd6dd6ZdS(iiiiiEi[iOiPi\iYiaiZiDioipiRiIi_iUiNiyiViGiCifikiTirigisi2iJi<i=i*iLiFi@i5iii]i8iAi6i1iBini3i+i,i?iQiMibiKili|iiiii(i:iiiiiiiiiiiSi4i/i.iHi i^iiqiimiiiii"itiividiiiuiwihi}iiiWiciijizi{ii7iiieiiixii0i'i9iii;i)iXi!i%i$iii#ii>iii~iii&i-iiiiiiiiiiiiii iiiiiiiiiiiii iii ii iiiiiii iiii`itchar_to_order_maptprecedence_matrixg C|?ttypical_positive_ratiotkeep_english_letters windows-1255t charset_nametHebrewtlanguageN(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiEi[iOiPi\iYiaiZiDioipiRiIi_iUiNiyiViGiCifikiTirigisiiiiiii2iJi<i=i*iLiFi@i5iii]i8iAi6i1iBini3i+i,i?iQiMibiKiliiiiii|iiiii(i:iiiiiiiiiiiSi4i/i.iHi i^iiqiimiiiii"itiividiiiuiwihi}iiiWiciijizi{ii7iiieiiixii0i'i9iii;i)iXi!i%i$iii#ii>iii~iii&i-iiiiiiiiiiiiii iiiiiiiiiiiii iii ii iiiiiii iiiii`i(iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii(tWIN1255_CHAR_TO_ORDER_MAPtHEBREW_LANG_MODELtFalsetWin1255HebrewModel(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/langhebrewmodel.pyt&s, PKZ= /site-packages/pip/_vendor/chardet/utf8prober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .enums import ProbingState, MachineState from .codingstatemachine import CodingStateMachine from .mbcssm import UTF8_SM_MODEL class UTF8Prober(CharSetProber): ONE_CHAR_PROB = 0.5 def __init__(self): super(UTF8Prober, self).__init__() self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) self._num_mb_chars = None self.reset() def reset(self): super(UTF8Prober, self).reset() self.coding_sm.reset() self._num_mb_chars = 0 @property def charset_name(self): return "utf-8" @property def language(self): return "" def feed(self, byte_str): for c in byte_str: coding_state = self.coding_sm.next_state(c) if coding_state == MachineState.ERROR: self._state = ProbingState.NOT_ME break elif coding_state == MachineState.ITS_ME: self._state = ProbingState.FOUND_IT break elif coding_state == MachineState.START: if self.coding_sm.get_current_charlen() >= 2: self._num_mb_chars += 1 if self.state == ProbingState.DETECTING: if self.get_confidence() > self.SHORTCUT_THRESHOLD: self._state = ProbingState.FOUND_IT return self.state def get_confidence(self): unlike = 0.99 if self._num_mb_chars < 6: unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars return 1.0 - unlike else: return unlike PKZF 0site-packages/pip/_vendor/chardet/sjisprober.pyonu[ abc@sddlmZddlmZddlmZddlmZddlm Z ddl m Z m Z defdYZ d S( i(tMultiByteCharSetProber(tCodingStateMachine(tSJISDistributionAnalysis(tSJISContextAnalysis(t SJIS_SM_MODEL(t ProbingStatet MachineStatet SJISProbercBsJeZdZdZedZedZdZdZRS(cCsHtt|jtt|_t|_t|_ |j dS(N( tsuperRt__init__RRt coding_smRtdistribution_analyzerRtcontext_analyzertreset(tself((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR %s   cCs$tt|j|jjdS(N(RRR R (R((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR ,scCs |jjS(N(R t charset_name(R((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR0scCsdS(NtJapanese((R((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pytlanguage4scCsxNtt|D]:}|jj||}|tjkrm|jjd|j|j |t j |_ Pq|tj krt j|_ Pq|tjkr|jj}|dkr|d|jd<|jj|jd|||jj|j|qM|jj||d||d|!||jj||d|d!|qqW|d|jd<|jt jkr|jjr|j|jkrt j|_ qn|jS(Ns!%s %s prober hit error at byte %siiiii(trangetlenR t next_stateRtERRORtloggertdebugRRRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlent _last_charR tfeedR tstatet DETECTINGtgot_enough_datatget_confidencetSHORTCUT_THRESHOLD(Rtbyte_strtit coding_statetchar_len((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR8s6    cCs+|jj}|jj}t||S(N(R R#R tmax(Rt context_conft distrib_conf((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR#Ys( t__name__t __module__R R tpropertyRRRR#(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyR$s    !N(tmbcharsetproberRtcodingstatemachineRtchardistributionRtjpcntxRtmbcssmRtenumsRRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/chardet/sjisprober.pyts PKZVFF6site-packages/pip/_vendor/chardet/langcyrillicmodel.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # KOI8-R language model # Character Mapping Table: KOI8R_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 ) win1251_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, ) latin5_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) macCyrillic_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, ) IBM855_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, 206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, 220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, 230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, 250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, ) IBM866_char_to_order_map = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) # Model Table: # total sequences: 100% # first 512 sequences: 97.6601% # first 1024 sequences: 2.3389% # rest sequences: 0.1237% # negative sequences: 0.0009% RussianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, 3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, 1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, 1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, 2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, 1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, 3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, 1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, 2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, 1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, 1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, 1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, 1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, 3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, 1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, 2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, 1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, 2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, 1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, 1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, 1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, 3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, 3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, 1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, 1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, 0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, 1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, 1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, 0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, 1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, 2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, 1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, 1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, 2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, 1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, 1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, 1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, 0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, 0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, 0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, 0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, 2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, 0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, ) Koi8rModel = { 'char_to_order_map': KOI8R_char_to_order_map, 'precedence_matrix': RussianLangModel, 'typical_positive_ratio': 0.976601, 'keep_english_letter': False, 'charset_name': "KOI8-R", 'language': 'Russian', } Win1251CyrillicModel = { 'char_to_order_map': win1251_char_to_order_map, 'precedence_matrix': RussianLangModel, 'typical_positive_ratio': 0.976601, 'keep_english_letter': False, 'charset_name': "windows-1251", 'language': 'Russian', } Latin5CyrillicModel = { 'char_to_order_map': latin5_char_to_order_map, 'precedence_matrix': RussianLangModel, 'typical_positive_ratio': 0.976601, 'keep_english_letter': False, 'charset_name': "ISO-8859-5", 'language': 'Russian', } MacCyrillicModel = { 'char_to_order_map': macCyrillic_char_to_order_map, 'precedence_matrix': RussianLangModel, 'typical_positive_ratio': 0.976601, 'keep_english_letter': False, 'charset_name': "MacCyrillic", 'language': 'Russian', } Ibm866Model = { 'char_to_order_map': IBM866_char_to_order_map, 'precedence_matrix': RussianLangModel, 'typical_positive_ratio': 0.976601, 'keep_english_letter': False, 'charset_name': "IBM866", 'language': 'Russian', } Ibm855Model = { 'char_to_order_map': IBM855_char_to_order_map, 'precedence_matrix': RussianLangModel, 'typical_positive_ratio': 0.976601, 'keep_english_letter': False, 'charset_name': "IBM855", 'language': 'Russian', } PKZgf/site-packages/pip/_vendor/chardet/big5prober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import Big5DistributionAnalysis from .mbcssm import BIG5_SM_MODEL class Big5Prober(MultiByteCharSetProber): def __init__(self): super(Big5Prober, self).__init__() self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) self.distribution_analyzer = Big5DistributionAnalysis() self.reset() @property def charset_name(self): return "Big5" @property def language(self): return "Chinese" PKZdWT 5site-packages/pip/_vendor/chardet/mbcharsetprober.pyonu[ abc@s@ddlmZddlmZmZdefdYZdS(i(t CharSetProber(t ProbingStatet MachineStatetMultiByteCharSetProbercBsSeZdZddZdZedZedZdZ dZ RS(s MultiByteCharSetProber cCs>tt|jd|d|_d|_ddg|_dS(Nt lang_filteri(tsuperRt__init__tNonetdistribution_analyzert coding_smt _last_char(tselfR((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR's  cCsXtt|j|jr,|jjn|jrE|jjnddg|_dS(Ni(RRtresetR RR (R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR -s   cCs tdS(N(tNotImplementedError(R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyt charset_name5scCs tdS(N(R (R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pytlanguage9scCsixtt|D]}|jj||}|tjkrm|jjd|j|j |t j |_ Pq|tj krt j|_ Pq|tjkr|jj}|dkr|d|jd<|jj|j|q|jj||d|d!|qqW|d|jd<|jt jkrb|jjrb|j|jkrbt j|_ qbn|jS(Ns!%s %s prober hit error at byte %siii(trangetlenR t next_stateRtERRORtloggertdebugRRRtNOT_MEt_statetITS_MEtFOUND_ITtSTARTtget_current_charlenR Rtfeedtstatet DETECTINGtgot_enough_datatget_confidencetSHORTCUT_THRESHOLD(R tbyte_strtit coding_statetchar_len((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR=s.    cCs |jjS(N(RR (R ((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR ZsN( t__name__t __module__t__doc__RRR tpropertyRRRR (((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pyR"s   N(t charsetproberRtenumsRRR(((sG/usr/lib/python2.7/site-packages/pip/_vendor/chardet/mbcharsetprober.pytsPKZ֑g2/site-packages/pip/_vendor/chardet/sjisprober.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import SJISDistributionAnalysis from .jpcntx import SJISContextAnalysis from .mbcssm import SJIS_SM_MODEL from .enums import ProbingState, MachineState class SJISProber(MultiByteCharSetProber): def __init__(self): super(SJISProber, self).__init__() self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) self.distribution_analyzer = SJISDistributionAnalysis() self.context_analyzer = SJISContextAnalysis() self.reset() def reset(self): super(SJISProber, self).reset() self.context_analyzer.reset() @property def charset_name(self): return self.context_analyzer.charset_name @property def language(self): return "Japanese" def feed(self, byte_str): for i in range(len(byte_str)): coding_state = self.coding_sm.next_state(byte_str[i]) if coding_state == MachineState.ERROR: self.logger.debug('%s %s prober hit error at byte %s', self.charset_name, self.language, i) self._state = ProbingState.NOT_ME break elif coding_state == MachineState.ITS_ME: self._state = ProbingState.FOUND_IT break elif coding_state == MachineState.START: char_len = self.coding_sm.get_current_charlen() if i == 0: self._last_char[1] = byte_str[0] self.context_analyzer.feed(self._last_char[2 - char_len:], char_len) self.distribution_analyzer.feed(self._last_char, char_len) else: self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 - char_len], char_len) self.distribution_analyzer.feed(byte_str[i - 1:i + 1], char_len) self._last_char[0] = byte_str[-1] if self.state == ProbingState.DETECTING: if (self.context_analyzer.got_enough_data() and (self.get_confidence() > self.SHORTCUT_THRESHOLD)): self._state = ProbingState.FOUND_IT return self.state def get_confidence(self): context_conf = self.context_analyzer.get_confidence() distrib_conf = self.distribution_analyzer.get_confidence() return max(context_conf, distrib_conf) PKZ\/site-packages/pip/_vendor/chardet/euctwfreq.pyonu[ abc@sdZdZdZdS(g?iiiiiiii iRiiiiiii inii!i iiLi,iAiis iiiiiiL iS iii iiii.iNiiiiii:iiii?iii=iNiKiiiil iii i ii i ii iiii ioi$ iiiii ici8iiiiiiiiizi|iit i"i ie i@i\ii iiiiiiiFiiM iQiHiiiPiviif iiiiDiT iiiFiN iiEi iOii/iisii3i<i2i ii&iLiiiO iiiGiiiM iii?ii i`iiF i*iigi iZi i:ii iiKi ii iiiiii`iiiig ii i iqii~iiiP i ii iii!iiuiii*iii i ii~iiiiieiiGi^iiii iUiCiiBiiiiiii ijioi/iiiOi2 i[iii i& iiiiiSi(ii iipi]ii6iii i' iiiiii8ii+i% i[iii\iiiiiiX i( iii i0ii iHi i i"i!ii+i i1i"iii iOiG iiifi1iiiiiiii2i9iili,ii iiiiii}ih i#iq iQiMi&i iXiii#iii iiiijiiiMiii i%ii ii$iii'i iiN ii ii7iiJii!iiiiiiMii) iPiU iiii ii%i i ii ii i iIii3 ii iir ii i iiiimiii$iiixii iii ii ii%i&ii iiiiiiii&ii'iii'iii.ii iiiH iiiii$i#iiDi&iAi iiiiiiUiGiiii iQ iPiSi'iiidi i0iFiii* iiiiiiiJii iUiiiiI iR iii<iiS i:i7 i ii i i9ii}ii iiiViPiT ii)iCiiii& i i i)iiim iiiiiii4 iiiin i*iiiiiiiO iiiii i+ i(i i iiiU i(ii5iYiji iliiuii)i i*i+iV i i=i iiiiiii4ii!iiiTi, ixiiieii iiJ iPiis i5iAiV i/iki i iili!i iiAii`ii i iiAii iiii ii i iiiiiiMi iiiiiiW iii it iii+i}iij i8 iiiiiiiii- i)imi iiW ii ii iiaii iP iK i i,ii ii7i' iu ii{ik iiiii i9 iiiiiiiiiii1iibii i iiio iiX i,iQ iiiiii i i iX iiii5iDiiiliii[i iiiY ii%ii. iY ii*iii iiR iii ipiiinicigi+iii'i2iiii{iil iim iii: iifi ii|i5 iiiiiZ i i i i ii i,ii iii;iiIiYiii[ iXi"ii iiii~iii,iikiii-iiiiiiDiiii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i! iiiu iiiii i iaiiiiv iS ii}i; iiDii=ii0i ii\ i( iiiivi iiiiii.iii9iHiiiii] iEiiiiii!ii/ iii i i-iiiQi*ii.i/iiiii0 i3ii" ii*iRiiiiWiii/iibi.ii iiiiiiiRii ii) ii2ii iiiiiiiZ iiT iiw iiii iiiiiii[ iOii i i`i7ix i^ iiiiiii\ iiiiiiigiiin ii i_ii~iiii0ii iaiiy i] iiiii iii i;ii i-i iiL iiii/iiiii`ii#i"iiiii0i isiiiidisi iiY ii ikiwio ii iiii1ii2iigip iQiiUi iv iiiiiCiSii^ iii5iiiBii_i ibiNiiiXiiii iiLici i i iKiw i* iaiGi3ii i iii1i6i i2i6 iNiii i i3ii:ii\iqiiii-iii i3i iiiix i ii irii4i i_ iiU iiii@ii5i ii ii ii/i+ i6i iiiiiii i7 iZ iiii8i4iiCiii iilii` iiiiiiYiiiii5i i{iiiii$i7iiMiV i0irii iigiiVii8ii9i, iZiii{i#iW i0iii$i i i ii4ii[iiviiiciiViM i$i- iiiiX iii idiiiWi[ iii iiiq i ii1 iiiiitiiii ii i i2 ii2i:i3 i i.iii2ii iiiiii%i iii i i iiiiiii i i6i i4 ii|iiiSiiii_ i iiiii`iiiii i3iii;ir iiii7iYiLi;ii i iiiiiTi\iiii<iiVi5 iii iii ii"ixi%ii;i8 i iii2iEi=i!iiii^iiwi i"iiiis ii#i<iQiiiri$ii]ii%ii. i&i i3i5iBiiy i'iii ii i i>iY ia ib i(i i i)i*i iiiii:ii6ii^ii iii(i i+i+i i ixi,i i.ihi/ i-iiii iVi ii ihiiz i.iiiii<iii;i/i?iii i i0i8i i ii5i6i i_i)i1i2i i3iyi iii i i^i4i i i?i$i+i\ i i5i_iWii iz i6i0 ii i] i iip i7i ii8i^ i< iiVi9i i6 iiIiwi}ic i^ii i9i@ii iAi iii i:iei1iiiBiiiiii i i ii7 i;i<ii=i{ ii(i`iRi1 iYiiIi ii i i6i id i>iiie i?iioiiqiYiniiiliiCiDi{ ii i_ iEiPiUii i iiiiEimii@ii:i ii iAiiiiii# i iNi i iBiiCiWiTiTiii iif i;iDii&i i| iiiiii iii<i= i iii iVii iioii iEi iFiii iiii iiiDi-i8 iiGidi i i iHit iiiIizi iiJi i iKii i| iiiii> iiiiiiiLi:i i=iiMii? i iiiHiiiiiNiii iOiPiiFikiQi9 i iii i iu iiv iiiiiiRiSiii} i i>iiiii i: iTi iiN iiCiiiiUi~ii iWiiGiiii,iiiiiOii|iiiiOi?iVi` ii ii~ i iw iiii@i!iii; iiiiiWii"i iXiiYiiRig iuii ih i ix i=i iiZii< iii[i i#ii= i i\i$ii]iHiIi^i7iO i$ i i_i%iviJii i!iwitii`iaibi9 i&ii"i{ii i i'iciidi8iiiii[i9iiidiZ i ieiii i@ i!i(i"ifiiii i#i$iTigiKii i%iiWiMihiiiiiiii%iiiiii i` iiyi7imiLijiki i: i> i!iiii iBi)ili|i ii iziiii iiMi ii iiimiiniuiq ivii i i*iNioipiiiqi7i.iriiy i iii iifii i isi! iyi iOi i iii+iPij i i>iiii iiYi iAitiiui% i i ivi iiz i2 iwi,i i iii3iQiii ixi i6i9ik iii&i6i ii-i iiRiyiiQi9ici i'i.i ioi(iBiFi? il i i)iei i izi iSiTi iii{i/iii" i ibi/ii`iii0iuiiviii1ii i iiUii*i i/iEiiSii8iZi|i}iFi+i ii!i~i2i,iFiP iim ii iieisiiCi ir iiii iiai3iZi%i-iiii i4iiiiAiii i iii iiiiDi.iiii inii iii iii ii3 i5iJi0i is i ii ii@ iiiVii/i i i6iWiA i7ioi iiPi0ii1iin iiiaiQi2iipi:iiiPiibiiiii i{ i3i7iEiii iii| iFiGiXiiiiii]i i4iii} iii i5ii&ii8i i ii iiiii iLiiiii i iYi iiiii4 i iB i9i8iiiii iHi i/i3iZi[ii io iii:i0i ini\ii]ii^iiii i iiiIiiii;ii4ii<ii=i;i ii iiiXi iiii5 i_ii i iiiii}i0i>ip iJiiiKii ii& iFiiii i i#ii[iiiifi i>iq ii?ir ii6iiii i iiiiijiiiii@i-ipi9i; i6iyiAiii i i' iiiiiiiiViii`ii4iii iKi iai i6 i iiiiii"iiiKi i iiii iYiGioiibifici7i is ipiri i8iiiii i iA iQ i iC i,i ii iiii\iLiiuii~ i iii9iiiBiiZiii iCiiiijidii0ihigiiei iMi} i7 i ii iiDi iiiiifi:iiiNi;i iEiFii( iii<iiiGiMi=iwigii~ it i?iiii itimi# i ii+iiihiiiiii[ i iOi i iii iiB iii$ it ii iiia ii iiHiiiiiQi"i< i i ii-i1ii#iu i'iiD i i ii i iIii>iJi% iiiPi i iii i i=ia iiKiiv iiii i ii ii_iijii?iE i1i i iikii8iQii iliii i imi@i iibiiii iRi iii5ii iiiLi iw iiniiii4iHi iiMii iiiiiii\ iioiiNi] i ii ii\ix igiiEi iiiiiiipii iiqi iiiiiiOiii iri iZi isitiiii iPieiii iiioi ii^ ii}iiiiziAi& iii i iiini-ii iQiiii iiiiRiiui iiiiBiii iy ii iviwiBihiqiiii<ii*iiiSi"iTiCi>i)i ixiUiiii~iiiiziiViiiiiC i)iqiiiWi i iiiDiiEiTiFipiii#i#iyii iiiGiiz iiiii ii iiSiiXi iiiiiu i i=iHiii_ iiRiiiiiiwiiiYizi{iqiii3ii5ii i i{ ii i|i}iSi~ii i i iZi[iiiiii` iiF ii]i i iniiiIii i iiiii iG iTiJiiKiziSiri\i) ii iv ii5iiii ib iiijiiiii iii]i^ii iiciiii i ii iai:i1i iiiiAihi ia i i6iiii ib i-ii iH i_ii ii i iii iiii| ii* ifii ii i iD ii iLiWiiiiUii i ii$ii i i iNiiiR ikiiE iiiiiMiiS i' ii`iisiiF ii iai} i i i( ijii i~ iViBi[ib iWi iibi iI iic i i ii{ii i ii ici i i idi i i]iiii= ieiiiiii iii iNi iiifigi i i iihiiiii i=iiXiiiiiiiiid iiHiCi iiiiTiWi i ii9ii iNi iKiEiji4iJ i iYi iiG i ikiiihiOii i i i]iPiZiiliiQi[ikiiminiRioic iiiiiiSiXiiii\i+iiiiiIiTiUiUiii iili iDiiiipiiqi iki imii iK iiiiiiiVi8 i i iiri@iWiri iCiGiiii ii^i ii i8iti i:i i i!iinisi i"i i#i$i]iti i%i i&iriui'i+ i(i)iie i i}i iiii*i4i1i+ii i i ii ii,ii i> iXitiri? ii i-i iviui{iiw iwii.ixiT i, iii i/i0ii ii1i ii2i iGiRi i i^iiYi iZi i i[i3i^iiii ii i) i\i i4i@ivii5ii6i7iid i]i ii^i8ii&i i ii i9 iyiuii i iiziRii{ii ii(i_ii<i9iili iii_i iSi`iki i[i iii ixi:i i;i<i! i$i=ii|iii>idi i i?iei iiii ii iii i}i@iAiBii.i~iaiOi i ii`ibii i%i iii i;i iiiiCiiDi iiiif i" iL i i i ii iEi;iiFi&iGi i iix iHi)iiiciiIidiTii i ieiiiii iiJi igi~iiUiie i iiiifiai8igi i ibihiKiy iii iLii: i iii ii*iM iiii icitiiMiNiii ii ii iiif iiiYidieii*i iiOiH ii# ii#i; i i iiiiI i\i@ iii isi$ i iPiQi i i iRijiSiTii+iiUiiVi iiLi i!i iJ i\i4iWi iiXiYiZii[i>iwikifiii+iii% iii$ii\iiiili i]iili^i_ig i%ii`iaigiiii iiN iK ibicidii ic iwi iei ibiyi ifihigihihii'iii imiiz ijid iii i i i<iii|iiiiii iiiO iki iliii iaiiimiii ini iiui iniiiie i& i ioiii i i< iipiqiP iQ iiji ii iii(ii_irisi i iii<iEiR ifiiiii]iiiIiti=iiuiivii iioi i iki,iliwi' ii imiiiiFi`ii if i ii iSiJixiiyiziniiL iipiOii ii i{i i iiA iyiiiU ii|i}i~iFi iqii i iiiiii iiiipiiiB iioi iiiipii- iiriiiqiqii( iiriiiisi iii iii i(i. isi) iiti i ii* iiuiiiTiviM i{ iiitii'iiii iiiiiiiig iZiwi i iciiji+ ipiiiixiii ih iiii iiii i, i= i ii i iuiiiii/ iyii i iyivi i iiwiiiiii ii iziiiii>iiCi'iiii{iiZiiii iii iiaiii iiqi?ii ii|ii i"ii ii i}iiiii=iiAi i1ii)iiii?i i iiiixi ii ii i i iihiiS iiii(ii iii'ixiyii ii i i iiiii_i iii> ii}iPiii iiii~ii iN iicii iiiiHii ii iiiiiiiiiiiiij iii i* iiii i i iiih iii_iii/i i<ii~i7ik iT iii]i iU ibi itiiig iizii ii| iiiiiiiii{iii ii0 ii ii+ i i, iiiiIiiiii i iiiBi ii4isi iiiii i iii?iCiii iiGi iiiiiiiiiiiiiii|i1 iiimiC iiil iiii i} ii? iiii iiih iiiIiiiiV iiiiiiii^iiii i- i. iii)iii iiii2 iiiiii ii iiAii ii- iiiii i ii ii iiiiii ii iii iiiQi i iO iiiiiiiiziKii ii[i iJiii@ ii. iiiiiD ii}i/ i@ii iii i i@iiRiii ij i~iiiJiii iiiii i0 iimiiiim i~ ii i ii1 iii iiiiii idiiiiXiii9iii iiiUiiiiiiLii3 ii ii i i iii i iii iii$i(iviii&iibii iii i iiiiii{iiiiiiiiiii2 ii iiii iii iiiiiiii^iiiV i iiiiiiiiiiiiii2iW ii iW ioii iik iX iii iA i4 ii ii3 iii iii ii iii iiiiiii i iiHiiiiiiiiiiiDiiiIisi iii ii iiiiiwii{iiiiiii/ in i,iiiii i iii ii ii i ii4 iB iiiiP i5 i ii i ii ii iii|iiziiiii i'ii5 iiii! ii iyiiiViii i i iiiiiiiiii i iii-iiiiii ii i i iiii ii" i%iiiiJiC iiixiiii i i iil i6 iiiii ii iiiXii ii7 ii i iNi im iiii iWi i i!i ii i i>i8 ii"i#i9 ij iD i i ii$i i%ii iiiXi&ii'iE i i(iF ii)iixi ikiiiii*i+ii iiDiii,iiiiiiiKi-i.i/i: ii0iJiii1i|i iii iii iEi*i2i iiiiiiHixi iii3ii iii3iiii; ipii< i iii ii4i5i i i6iiOii i7i8ii9ii:i ii;iiiii<i iiY i=i>i i i?ii@iAiBi i ii= i iiio i ifiiii i i i iCi iDii# ii i> iiEii iFiZ iE iii i[ i$ i iiGi? ii iiiG iiii@ iHiiA i\ iLiiiii iIi i iiiiiiiJi i i i iKiLi] iMiNi iijiiQ i iB iOiPiiiQi iiiRi iSiTii(iiiiC iiUiiVi iWiXiiiYiiiiZiiidi[iciH ii_ii#i0 i\ii]iini i iiii^iii_ik iD ii`iaii ibii i iciiiiidiii iiiidi^ iE ieiiii iiiifii_ i7iiii i iiF iiigi-iii! iiihi iqi;i iiiiXii i iiiiijiiii ikiliii~imiG i` iniyip iiioi>iipiiqi iiiriia iiiisiti" iui|ii?iiiiviiiiiieiiiwixiyiiiiizi{i@i|ii i}i i iii% isiiR ii.iiMi?i ii~iiib iii@ii iiiii iiiiiiii ii6 iI iiii iiiiic iiiiii i i2iiiimii@iiii iiiNii iifiiiiZi iiiiii i\iH iid iiiJ ii ii i iiii iiK iiii# iiii ii iiiiKiiiiigiiiihii>ii.iiiiiiiii)in itiiiii io i iI i i iiiq iiir i iiiiiip ii ii iziiiiUiiJ iii1 iK itiiiiiii iiiJiiiL i iiiiiiN(iiiiiii iRiiiiiii inii!i iiLi,iAiis iiiiiiL iS iii iiii.iNiiiiii:iiii?iii=iNiKiiiil iii i ii i ii iiii ioi$ iiiii ici8iiiiiiiiizi|iit i"i ie i@i\ii iiiiiiiFiiM iQiHiiiPiviif iiiiDiT iiiFiN iiEi iOii/iisii3i<i2i ii&iLiiiO iiiGiiiM iii?ii i`iiF i*iigi iZi i:ii iiKi ii iiiiii`iiiig ii i iqii~iiiP i ii iii!iiuiii*iii i ii~iiiiieiiGi^iiii iUiCiiBiiiiiii ijioi/iiiOi2 i[iii i& iiiiiSi(ii iipi]ii6iii i' iiiiii8ii+i% i[iii\iiiiiiX i( iii i0ii iHi i i"i!ii+i i1i"iii iOiG iiifi1iiiiiiii2i9iili,ii iiiiii}ih i#iq iQiMi&i iXiii#iii iiiijiiiMiii i%ii ii$iii'i iiN ii ii7iiJii!iiiiiiMii) iPiU iiii ii%i i ii ii i iIii3 ii iir ii i iiiimiii$iiixii iii ii ii%i&ii iiiiiiii&ii'iii'iii.ii iiiH iiiii$i#iiDi&iAi iiiiiiUiGiiii iQ iPiSi'iiidi i0iFiii* iiiiiiiJii iUiiiiI iR iii<iiS i:i7 i ii i i9ii}ii iiiViPiT ii)iCiiii& i i i)iiim iiiiiii4 iiiin i*iiiiiiiO iiiii i+ i(i i iiiU i(ii5iYiji iliiuii)i i*i+iV i i=i iiiiiii4ii!iiiTi, ixiiieii iiJ iPiis i5iAiV i/iki i iili!i iiAii`ii i iiAii iiii ii i iiiiiiMi iiiiiiW iii it iii+i}iij i8 iiiiiiiii- i)imi iiW ii ii iiaii iP iK i i,ii ii7i' iu ii{ik iiiii i9 iiiiiiiiiii1iibii i iiio iiX i,iQ iiiiii i i iX iiii5iDiiiliii[i iiiY ii%ii. iY ii*iii iiR iii ipiiinicigi+iii'i2iiii{iil iim iii: iifi ii|i5 iiiiiZ i i i i ii i,ii iii;iiIiYiii[ iXi"ii iiii~iii,iikiii-iiiiiiDiiii>i]ii,iiiiviLiBiii&iii iiiiGiiiiBii i! iiiu iiiii i iaiiiiv iS ii}i; iiDii=ii0i ii\ i( iiiivi iiiiii.iii9iHiiiii] iEiiiiii!ii/ iii i i-iiiQi*ii.i/iiiii0 i3ii" ii*iRiiiiWiii/iibi.ii iiiiiiiRii ii) ii2ii iiiiiiiZ iiT iiw iiii iiiiiii[ iOii i i`i7ix i^ iiiiiii\ iiiiiiigiiin ii i_ii~iiii0ii iaiiy i] iiiii iii i;ii i-i iiL iiii/iiiii`ii#i"iiiii0i isiiiidisi iiY ii ikiwio ii iiii1ii2iigip iQiiUi iv iiiiiCiSii^ iii5iiiBii_i ibiNiiiXiiii iiLici i i iKiw i* iaiGi3ii i iii1i6i i2i6 iNiii i i3ii:ii\iqiiii-iii i3i iiiix i ii irii4i i_ iiU iiii@ii5i ii ii ii/i+ i6i iiiiiii i7 iZ iiii8i4iiCiii iilii` iiiiiiYiiiii5i i{iiiii$i7iiMiV i0irii iigiiVii8ii9i, iZiii{i#iW i0iii$i i i ii4ii[iiviiiciiViM i$i- iiiiX iii idiiiWi[ iii iiiq i ii1 iiiiitiiii ii i i2 ii2i:i3 i i.iii2ii iiiiii%i iii i i iiiiiii i i6i i4 ii|iiiSiiii_ i iiiii`iiiii i3iii;ir iiii7iYiLi;ii i iiiiiTi\iiii<iiVi5 iii iii ii"ixi%ii;i8 i iii2iEi=i!iiii^iiwi i"iiiis ii#i<iQiiiri$ii]ii%ii. i&i i3i5iBiiy i'iii ii i i>iY ia ib i(i i i)i*i iiiii:ii6ii^ii iii(i i+i+i i ixi,i i.ihi/ i-iiii iVi ii ihiiz i.iiiii<iii;i/i?iii i i0i8i i ii5i6i i_i)i1i2i i3iyi iii i i^i4i i i?i$i+i\ i i5i_iWii iz i6i0 ii i] i iip i7i ii8i^ i< iiVi9i i6 iiIiwi}ic i^ii i9i@ii iAi iii i:iei1iiiBiiiiii i i ii7 i;i<ii=i{ ii(i`iRi1 iYiiIi ii i i6i id i>iiie i?iioiiqiYiniiiliiCiDi{ ii i_ iEiPiUii i iiiiEimii@ii:i ii iAiiiiii# i iNi i iBiiCiWiTiTiii iif i;iDii&i i| iiiiii iii<i= i iii iVii iioii iEi iFiii iiii iiiDi-i8 iiGidi i i iHit iiiIizi iiJi i iKii i| iiiii> iiiiiiiLi:i i=iiMii? i iiiHiiiiiNiii iOiPiiFikiQi9 i iii i iu iiv iiiiiiRiSiii} i i>iiiii i: iTi iiN iiCiiiiUi~ii iWiiGiiii,iiiiiOii|iiiiOi?iVi` ii ii~ i iw iiii@i!iii; iiiiiWii"i iXiiYiiRig iuii ih i ix i=i iiZii< iii[i i#ii= i i\i$ii]iHiIi^i7iO i$ i i_i%iviJii i!iwitii`iaibi9 i&ii"i{ii i i'iciidi8iiiii[i9iiidiZ i ieiii i@ i!i(i"ifiiii i#i$iTigiKii i%iiWiMihiiiiiiii%iiiiii i` iiyi7imiLijiki i: i> i!iiii iBi)ili|i ii iziiii iiMi ii iiimiiniuiq ivii i i*iNioipiiiqi7i.iriiy i iii iifii i isi! iyi iOi i iii+iPij i i>iiii iiYi iAitiiui% i i ivi iiz i2 iwi,i i iii3iQiii ixi i6i9ik iii&i6i ii-i iiRiyiiQi9ici i'i.i ioi(iBiFi? il i i)iei i izi iSiTi iii{i/iii" i ibi/ii`iii0iuiiviii1ii i iiUii*i i/iEiiSii8iZi|i}iFi+i ii!i~i2i,iFiP iim ii iieisiiCi ir iiii iiai3iZi%i-iiii i4iiiiAiii i iii iiiiDi.iiii inii iii iii ii3 i5iJi0i is i ii ii@ iiiVii/i i i6iWiA i7ioi iiPi0ii1iin iiiaiQi2iipi:iiiPiibiiiii i{ i3i7iEiii iii| iFiGiXiiiiii]i i4iii} iii i5ii&ii8i i ii iiiii iLiiiii i iYi iiiii4 i iB i9i8iiiii iHi i/i3iZi[ii io iii:i0i ini\ii]ii^iiii i iiiIiiii;ii4ii<ii=i;i ii iiiXi iiii5 i_ii i iiiii}i0i>ip iJiiiKii ii& iFiiii i i#ii[iiiifi i>iq ii?ir ii6iiii i iiiiijiiiii@i-ipi9i; i6iyiAiii i i' iiiiiiiiViii`ii4iii iKi iai i6 i iiiiii"iiiKi i iiii iYiGioiibifici7i is ipiri i8iiiii i iA iQ i iC i,i ii iiii\iLiiuii~ i iii9iiiBiiZiii iCiiiijidii0ihigiiei iMi} i7 i ii iiDi iiiiifi:iiiNi;i iEiFii( iii<iiiGiMi=iwigii~ it i?iiii itimi# i ii+iiihiiiiii[ i iOi i iii iiB iii$ it ii iiia ii iiHiiiiiQi"i< i i ii-i1ii#iu i'iiD i i ii i iIii>iJi% iiiPi i iii i i=ia iiKiiv iiii i ii ii_iijii?iE i1i i iikii8iQii iliii i imi@i iibiiii iRi iii5ii iiiLi iw iiniiii4iHi iiMii iiiiiii\ iioiiNi] i ii ii\ix igiiEi iiiiiiipii iiqi iiiiiiOiii iri iZi isitiiii iPieiii iiioi ii^ ii}iiiiziAi& iii i iiini-ii iQiiii iiiiRiiui iiiiBiii iy ii iviwiBihiqiiii<ii*iiiSi"iTiCi>i)i ixiUiiii~iiiiziiViiiiiC i)iqiiiWi i iiiDiiEiTiFipiii#i#iyii iiiGiiz iiiii ii iiSiiXi iiiiiu i i=iHiii_ iiRiiiiiiwiiiYizi{iqiii3ii5ii i i{ ii i|i}iSi~ii i i iZi[iiiiii` iiF ii]i i iniiiIii i iiiii iG iTiJiiKiziSiri\i) ii iv ii5iiii ib iiijiiiii iii]i^ii iiciiii i ii iai:i1i iiiiAihi ia i i6iiii ib i-ii iH i_ii ii i iii iiii| ii* ifii ii i iD ii iLiWiiiiUii i ii$ii i i iNiiiR ikiiE iiiiiMiiS i' ii`iisiiF ii iai} i i i( ijii i~ iViBi[ib iWi iibi iI iic i i ii{ii i ii ici i i idi i i]iiii= ieiiiiii iii iNi iiifigi i i iihiiiii i=iiXiiiiiiiiid iiHiCi iiiiTiWi i ii9ii iNi iKiEiji4iJ i iYi iiG i ikiiihiOii i i i]iPiZiiliiQi[ikiiminiRioic iiiiiiSiXiiii\i+iiiiiIiTiUiUiii iili iDiiiipiiqi iki imii iK iiiiiiiVi8 i i iiri@iWiri iCiGiiii ii^i ii i8iti i:i i i!iinisi i"i i#i$i]iti i%i i&iriui'i+ i(i)iie i i}i iiii*i4i1i+ii i i ii ii,ii i> iXitiri? ii i-i iviui{iiw iwii.ixiT i, iii i/i0ii ii1i ii2i iGiRi i i^iiYi iZi i i[i3i^iiii ii i) i\i i4i@ivii5ii6i7iid i]i ii^i8ii&i i ii i9 iyiuii i iiziRii{ii ii(i_ii<i9iili iii_i iSi`iki i[i iii ixi:i i;i<i! i$i=ii|iii>idi i i?iei iiii ii iii i}i@iAiBii.i~iaiOi i ii`ibii i%i iii i;i iiiiCiiDi iiiif i" iL i i i ii iEi;iiFi&iGi i iix iHi)iiiciiIidiTii i ieiiiii iiJi igi~iiUiie i iiiifiai8igi i ibihiKiy iii iLii: i iii ii*iM iiii icitiiMiNiii ii ii iiif iiiYidieii*i iiOiH ii# ii#i; i i iiiiI i\i@ iii isi$ i iPiQi i i iRijiSiTii+iiUiiVi iiLi i!i iJ i\i4iWi iiXiYiZii[i>iwikifiii+iii% iii$ii\iiiili i]iili^i_ig i%ii`iaigiiii iiN iK ibicidii ic iwi iei ibiyi ifihigihihii'iii imiiz ijid iii i i i<iii|iiiiii iiiO iki iliii iaiiimiii ini iiui iniiiie i& i ioiii i i< iipiqiP iQ iiji ii iii(ii_irisi i iii<iEiR ifiiiii]iiiIiti=iiuiivii iioi i iki,iliwi' ii imiiiiFi`ii if i ii iSiJixiiyiziniiL iipiOii ii i{i i iiA iyiiiU ii|i}i~iFi iqii i iiiiii iiiipiiiB iioi iiiipii- iiriiiqiqii( iiriiiisi iii iii i(i. isi) iiti i ii* iiuiiiTiviM i{ iiitii'iiii iiiiiiiig iZiwi i iciiji+ ipiiiixiii ih iiii iiii i, i= i ii i iuiiiii/ iyii i iyivi i iiwiiiiii ii iziiiii>iiCi'iiii{iiZiiii iii iiaiii iiqi?ii ii|ii i"ii ii i}iiiii=iiAi i1ii)iiii?i i iiiixi ii ii i i iihiiS iiii(ii iii'ixiyii ii i i iiiii_i iii> ii}iPiii iiii~ii iN iicii iiiiHii ii iiiiiiiiiiiiij iii i* iiii i i iiih iii_iii/i i<ii~i7ik iT iii]i iU ibi itiiig iizii ii| iiiiiiiii{iii ii0 ii ii+ i i, iiiiIiiiii i iiiBi ii4isi iiiii i iii?iCiii iiGi iiiiiiiiiiiiiii|i1 iiimiC iiil iiii i} ii? iiii iiih iiiIiiiiV iiiiiiii^iiii i- i. iii)iii iiii2 iiiiii ii iiAii ii- iiiii i ii ii iiiiii ii iii iiiQi i iO iiiiiiiiziKii ii[i iJiii@ ii. iiiiiD ii}i/ i@ii iii i i@iiRiii ij i~iiiJiii iiiii i0 iimiiiim i~ ii i ii1 iii iiiiii idiiiiXiii9iii iiiUiiiiiiLii3 ii ii i i iii i iii iii$i(iviii&iibii iii i iiiiii{iiiiiiiiiii2 ii iiii iii iiiiiiii^iiiV i iiiiiiiiiiiiii2iW ii iW ioii iik iX iii iA i4 ii ii3 iii iii ii iii iiiiiii i iiHiiiiiiiiiiiDiiiIisi iii ii iiiiiwii{iiiiiii/ in i,iiiii i iii ii ii i ii4 iB iiiiP i5 i ii i ii ii iii|iiziiiii i'ii5 iiii! ii iyiiiViii i i iiiiiiiiii i iii-iiiiii ii i i iiii ii" i%iiiiJiC iiixiiii i i iil i6 iiiii ii iiiXii ii7 ii i iNi im iiii iWi i i!i ii i i>i8 ii"i#i9 ij iD i i ii$i i%ii iiiXi&ii'iE i i(iF ii)iixi ikiiiii*i+ii iiDiii,iiiiiiiKi-i.i/i: ii0iJiii1i|i iii iii iEi*i2i iiiiiiHixi iii3ii iii3iiii; ipii< i iii ii4i5i i i6iiOii i7i8ii9ii:i ii;iiiii<i iiY i=i>i i i?ii@iAiBi i ii= i iiio i ifiiii i i i iCi iDii# ii i> iiEii iFiZ iE iii i[ i$ i iiGi? ii iiiG iiii@ iHiiA i\ iLiiiii iIi i iiiiiiiJi i i i iKiLi] iMiNi iijiiQ i iB iOiPiiiQi iiiRi iSiTii(iiiiC iiUiiVi iWiXiiiYiiiiZiiidi[iciH ii_ii#i0 i\ii]iini i iiii^iii_ik iD ii`iaii ibii i iciiiiidiii iiiidi^ iE ieiiii iiiifii_ i7iiii i iiF iiigi-iii! iiihi iqi;i iiiiXii i iiiiijiiii ikiliii~imiG i` iniyip iiioi>iipiiqi iiiriia iiiisiti" iui|ii?iiiiviiiiiieiiiwixiyiiiiizi{i@i|ii i}i i iii% isiiR ii.iiMi?i ii~iiib iii@ii iiiii iiiiiiii ii6 iI iiii iiiiic iiiiii i i2iiiimii@iiii iiiNii iifiiiiZi iiiiii i\iH iid iiiJ ii ii i iiii iiK iiii# iiii ii iiiiKiiiiigiiiihii>ii.iiiiiiiii)in itiiiii io i iI i i iiiq iiir i iiiiiip ii ii iziiiiUiiJ iii1 iK itiiiiiii iiiJiiiL i iiiiii(t EUCTW_TYPICAL_DISTRIBUTION_RATIOtEUCTW_TABLE_SIZEtEUCTW_CHAR_TO_FREQ_ORDER(((sA/usr/lib/python2.7/site-packages/pip/_vendor/chardet/euctwfreq.pyt,sPKZAy44.site-packages/pip/_vendor/chardet/euckrfreq.pynu[######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # 128 --> 0.79 # 256 --> 0.92 # 512 --> 0.986 # 1024 --> 0.99944 # 2048 --> 0.99999 # # Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 # Random Distribution Ration = 512 / (2350-512) = 0.279. # # Typical Distribution Ratio EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 EUCKR_TABLE_SIZE = 2352 # Char to FreqOrder table , EUCKR_CHAR_TO_FREQ_ORDER = ( 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, 1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, 1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, 1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, 1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, 1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, 1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, 1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, 1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, 1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, 1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, 1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, 1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, 1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, 1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, 1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, 1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, 1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, 1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, 1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, 1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, 1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, 1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, 1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, 1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, 1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, 1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, 1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, 1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, 2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, 2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, 2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, 2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, 2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, 1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, 2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, 1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, 2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, 2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, 1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, 2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, 2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, 2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, 1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, 2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, 2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, 2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, 2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, 2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, 2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, 1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, 2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, 2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, 2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, 2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, 2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, 1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, 1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, 2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, 1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, 2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, 1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, 2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, 2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, 2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, 2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, 2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, 1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, 1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, 2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, 1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, 2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, 2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, 1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, 2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, 1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, 2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, 1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, 2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, 2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, 1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, 1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, 2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, 2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, 2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, 2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, 2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, 2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, 1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, 2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, 2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, 2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, 2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, 2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, 2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, 1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, 2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 ) PKZ?JawYwY-site-packages/pip/_vendor/urllib3/response.pynu[from __future__ import absolute_import from contextlib import contextmanager import zlib import io import logging from socket import timeout as SocketTimeout from socket import error as SocketError from ._collections import HTTPHeaderDict from .exceptions import ( BodyNotHttplibCompatible, ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked, IncompleteRead, InvalidHeader ) from .packages.six import string_types as basestring, binary_type, PY3 from .packages.six.moves import http_client as httplib from .connection import HTTPException, BaseSSLError from .util.response import is_fp_closed, is_response_to_head log = logging.getLogger(__name__) class DeflateDecoder(object): def __init__(self): self._first_try = True self._data = binary_type() self._obj = zlib.decompressobj() def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): if not data: return data if not self._first_try: return self._obj.decompress(data) self._data += data try: decompressed = self._obj.decompress(data) if decompressed: self._first_try = False self._data = None return decompressed except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None class GzipDecoder(object): def __init__(self): self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): if not data: return data return self._obj.decompress(data) def _get_decoder(mode): if mode == 'gzip': return GzipDecoder() return DeflateDecoder() class HTTPResponse(io.IOBase): """ HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. Extra parameters for behaviour not present in httplib.HTTPResponse: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, attempts to decode specific content-encoding's based on headers (like 'gzip' and 'deflate') will be skipped and raw data will be used instead. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse object, it's convenient to include the original for debug purposes. It's otherwise unused. :param retries: The retries contains the last :class:`~urllib3.util.retry.Retry` that was used during the request. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. """ CONTENT_DECODERS = ['gzip', 'deflate'] REDIRECT_STATUSES = [301, 302, 303, 307, 308] def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, original_response=None, pool=None, connection=None, retries=None, enforce_content_length=False, request_method=None): if isinstance(headers, HTTPHeaderDict): self.headers = headers else: self.headers = HTTPHeaderDict(headers) self.status = status self.version = version self.reason = reason self.strict = strict self.decode_content = decode_content self.retries = retries self.enforce_content_length = enforce_content_length self._decoder = None self._body = None self._fp = None self._original_response = original_response self._fp_bytes_read = 0 if body and isinstance(body, (basestring, binary_type)): self._body = body self._pool = pool self._connection = connection if hasattr(body, 'read'): self._fp = body # Are we using the chunked-style of transfer encoding? self.chunked = False self.chunk_left = None tr_enc = self.headers.get('transfer-encoding', '').lower() # Don't incur the penalty of creating a list and then discarding it encodings = (enc.strip() for enc in tr_enc.split(",")) if "chunked" in encodings: self.chunked = True # Determine length of response self.length_remaining = self._init_length(request_method) # If requested, preload the body. if preload_content and not self._body: self._body = self.read(decode_content=decode_content) def get_redirect_location(self): """ Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. """ if self.status in self.REDIRECT_STATUSES: return self.headers.get('location') return False def release_conn(self): if not self._pool or not self._connection: return self._pool._put_conn(self._connection) self._connection = None @property def data(self): # For backwords-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body if self._fp: return self.read(cache_content=True) @property def connection(self): return self._connection def tell(self): """ Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). """ return self._fp_bytes_read def _init_length(self, request_method): """ Set initial length value for Response content if available. """ length = self.headers.get('content-length') if length is not None and self.chunked: # This Response will fail with an IncompleteRead if it can't be # received as chunked. This method falls back to attempt reading # the response before raising an exception. log.warning("Received response with both Content-Length and " "Transfer-Encoding set. This is expressly forbidden " "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " "attempting to process response as Transfer-Encoding: " "chunked.") return None elif length is not None: try: # RFC 7230 section 3.3.2 specifies multiple content lengths can # be sent in a single Content-Length header # (e.g. Content-Length: 42, 42). This line ensures the values # are all valid ints and that as long as the `set` length is 1, # all values are the same. Otherwise, the header is invalid. lengths = set([int(val) for val in length.split(',')]) if len(lengths) > 1: raise InvalidHeader("Content-Length contained multiple " "unmatching values (%s)" % length) length = lengths.pop() except ValueError: length = None else: if length < 0: length = None # Convert status to int for comparison # In some cases, httplib returns a status of "_UNKNOWN" try: status = int(self.status) except ValueError: status = 0 # Check for responses that shouldn't include a body if status in (204, 304) or 100 <= status < 200 or request_method == 'HEAD': length = 0 return length def _init_decoder(self): """ Set-up the _decoder attribute if necessary. """ # Note: content-encoding value should be case-insensitive, per RFC 7230 # Section 3.2 content_encoding = self.headers.get('content-encoding', '').lower() if self._decoder is None and content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) def _decode(self, data, decode_content, flush_decoder): """ Decode the data passed in and potentially flush the decoder. """ try: if decode_content and self._decoder: data = self._decoder.decompress(data) except (IOError, zlib.error) as e: content_encoding = self.headers.get('content-encoding', '').lower() raise DecodeError( "Received response with content-encoding: %s, but " "failed to decode it." % content_encoding, e) if flush_decoder and decode_content: data += self._flush_decoder() return data def _flush_decoder(self): """ Flushes the decoder. Should only be called if the decoder is actually being used. """ if self._decoder: buf = self._decoder.decompress(b'') return buf + self._decoder.flush() return b'' @contextmanager def _error_catcher(self): """ Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the high-level api. On exit, release the connection back to the pool. """ clean_exit = False try: try: yield except SocketTimeout: # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but # there is yet no clean way to get at it from this context. raise ReadTimeoutError(self._pool, None, 'Read timed out.') except BaseSSLError as e: # FIXME: Is there a better way to differentiate between SSLErrors? if 'read operation timed out' not in str(e): # Defensive: # This shouldn't happen but just in case we're missing an edge # case, let's avoid swallowing SSL errors. raise raise ReadTimeoutError(self._pool, None, 'Read timed out.') except (HTTPException, SocketError) as e: # This includes IncompleteRead. raise ProtocolError('Connection broken: %r' % e, e) # If no exception is thrown, we should avoid cleaning up # unnecessarily. clean_exit = True finally: # If we didn't terminate cleanly, we need to throw away our # connection. if not clean_exit: # The response may not be closed but we're not going to use it # anymore so close it now to ensure that the connection is # released back to the pool. if self._original_response: self._original_response.close() # Closing the response may not actually be sufficient to close # everything, so if we have a hold of the connection close that # too. if self._connection: self._connection.close() # If we hold the original response but it's closed now, we should # return the connection back to the pool. if self._original_response and self._original_response.isclosed(): self.release_conn() def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ self._init_decoder() if decode_content is None: decode_content = self.decode_content if self._fp is None: return flush_decoder = False data = None with self._error_catcher(): if amt is None: # cStringIO doesn't like amt=None data = self._fp.read() flush_decoder = True else: cache_content = False data = self._fp.read(amt) if amt != 0 and not data: # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ # already do. However, versions of python released before # December 15, 2012 (http://bugs.python.org/issue16298) do # not properly close the connection in all cases. There is # no harm in redundantly calling close. self._fp.close() flush_decoder = True if self.enforce_content_length and self.length_remaining not in (0, None): # This is an edge case that httplib failed to cover due # to concerns of backward compatibility. We're # addressing it here to make sure IncompleteRead is # raised during streaming, so all calls with incorrect # Content-Length are caught. raise IncompleteRead(self._fp_bytes_read, self.length_remaining) if data: self._fp_bytes_read += len(data) if self.length_remaining is not None: self.length_remaining -= len(data) data = self._decode(data, decode_content, flush_decoder) if cache_content: self._body = data return data def stream(self, amt=2**16, decode_content=None): """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ if self.chunked and self.supports_chunked_reads(): for line in self.read_chunked(amt, decode_content=decode_content): yield line else: while not is_fp_closed(self._fp): data = self.read(amt=amt, decode_content=decode_content) if data: yield data @classmethod def from_httplib(ResponseCls, r, **response_kw): """ Given an :class:`httplib.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along with ``original_response=r``. """ headers = r.msg if not isinstance(headers, HTTPHeaderDict): if PY3: # Python 3 headers = HTTPHeaderDict(headers.items()) else: # Python 2 headers = HTTPHeaderDict.from_httplib(headers) # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, 'strict', 0) resp = ResponseCls(body=r, headers=headers, status=r.status, version=r.version, reason=r.reason, strict=strict, original_response=r, **response_kw) return resp # Backwards-compatibility methods for httplib.HTTPResponse def getheaders(self): return self.headers def getheader(self, name, default=None): return self.headers.get(name, default) # Backwards compatibility for http.cookiejar def info(self): return self.headers # Overrides from io.IOBase def close(self): if not self.closed: self._fp.close() if self._connection: self._connection.close() @property def closed(self): if self._fp is None: return True elif hasattr(self._fp, 'isclosed'): return self._fp.isclosed() elif hasattr(self._fp, 'closed'): return self._fp.closed else: return True def fileno(self): if self._fp is None: raise IOError("HTTPResponse has no file to get a fileno from") elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: raise IOError("The file-like object this HTTPResponse is wrapped " "around has no file descriptor") def flush(self): if self._fp is not None and hasattr(self._fp, 'flush'): return self._fp.flush() def readable(self): # This method is required for `io` module compatibility. return True def readinto(self, b): # This method is required for `io` module compatibility. temp = self.read(len(b)) if len(temp) == 0: return 0 else: b[:len(temp)] = temp return len(temp) def supports_chunked_reads(self): """ Checks if the underlying file-like object looks like a httplib.HTTPResponse object. We do this by testing for the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). """ return hasattr(self._fp, 'fp') def _update_chunk_length(self): # First, we'll figure out length of a chunk and then # we'll try to read it from socket. if self.chunk_left is not None: return line = self._fp.fp.readline() line = line.split(b';', 1)[0] try: self.chunk_left = int(line, 16) except ValueError: # Invalid chunked protocol response, abort. self.close() raise httplib.IncompleteRead(line) def _handle_chunk(self, amt): returned_chunk = None if amt is None: chunk = self._fp._safe_read(self.chunk_left) returned_chunk = chunk self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. self.chunk_left = None elif amt < self.chunk_left: value = self._fp._safe_read(amt) self.chunk_left = self.chunk_left - amt returned_chunk = value elif amt == self.chunk_left: value = self._fp._safe_read(amt) self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. self.chunk_left = None returned_chunk = value else: # amt > self.chunk_left returned_chunk = self._fp._safe_read(self.chunk_left) self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. self.chunk_left = None return returned_chunk def read_chunked(self, amt=None, decode_content=None): """ Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ self._init_decoder() # FIXME: Rewrite this method and make it a class with a better structured logic. if not self.chunked: raise ResponseNotChunked( "Response is not chunked. " "Header 'transfer-encoding: chunked' is missing.") if not self.supports_chunked_reads(): raise BodyNotHttplibCompatible( "Body should be httplib.HTTPResponse like. " "It should have have an fp attribute which returns raw chunks.") # Don't bother reading the body of a HEAD request. if self._original_response and is_response_to_head(self._original_response): self._original_response.close() return with self._error_catcher(): while True: self._update_chunk_length() if self.chunk_left == 0: break chunk = self._handle_chunk(amt) decoded = self._decode(chunk, decode_content=decode_content, flush_decoder=False) if decoded: yield decoded if decode_content: # On CPython and PyPy, we should never need to flush the # decoder. However, on Jython we *might* need to, so # lets defensively do it anyway. decoded = self._flush_decoder() if decoded: # Platform-specific: Jython. yield decoded # Chunk content ends with \r\n: discard it. while True: line = self._fp.fp.readline() if not line: # Some sites may not end with '\r\n'. break if line == b'\r\n': break # We read everything; close the "file". if self._original_response: self._original_response.close() PKZo3site-packages/pip/_vendor/urllib3/connectionpool.pynu[from __future__ import absolute_import import errno import logging import sys import warnings from socket import error as SocketError, timeout as SocketTimeout import socket from .exceptions import ( ClosedPoolError, ProtocolError, EmptyPoolError, HeaderParsingError, HostChangedError, LocationValueError, MaxRetryError, ProxyError, ReadTimeoutError, SSLError, TimeoutError, InsecureRequestWarning, NewConnectionError, ) from .packages.ssl_match_hostname import CertificateError from .packages import six from .packages.six.moves import queue from .connection import ( port_by_scheme, DummyConnection, HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection, HTTPException, BaseSSLError, ) from .request import RequestMethods from .response import HTTPResponse from .util.connection import is_connection_dropped from .util.request import set_file_position from .util.response import assert_header_parsing from .util.retry import Retry from .util.timeout import Timeout from .util.url import get_host, Url if six.PY2: # Queue is imported for side effects on MS Windows import Queue as _unused_module_Queue # noqa: F401 xrange = six.moves.xrange log = logging.getLogger(__name__) _Default = object() # Pool objects class ConnectionPool(object): """ Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. """ scheme = None QueueCls = queue.LifoQueue def __init__(self, host, port=None): if not host: raise LocationValueError("No host specified.") self.host = _ipv6_host(host).lower() self._proxy_host = host.lower() self.port = port def __str__(self): return '%s(host=%r, port=%r)' % (type(self).__name__, self.host, self.port) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() # Return False to re-raise any potential exceptions return False def close(self): """ Close all pooled connections and disable the pool. """ pass # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) class HTTPConnectionPool(ConnectionPool, RequestMethods): """ Thread-safe connection pool for one host. :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into :class:`httplib.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed into :class:`httplib.HTTPConnection`. :param strict: Causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line, passed into :class:`httplib.HTTPConnection`. .. note:: Only works in Python 2. This parameter is ignored in Python 3. :param timeout: Socket timeout in seconds for each individual connection. This can be a float or integer, which sets the timeout for the HTTP request, or an instance of :class:`urllib3.util.Timeout` which gives you more fine-grained control over request timeouts. After the constructor has been parsed, this is always a `urllib3.util.Timeout` object. :param maxsize: Number of connections to save that can be reused. More than 1 is useful in multithreaded situations. If ``block`` is set to False, more connections will be created but they will not be saved once they've been used. :param block: If set to True, no more than ``maxsize`` connections will be used at a time. When no free connections are available, the call will block until a connection has been released. This is a useful side effect for particular multithreaded situations where one does not want to use more than maxsize connections per host to prevent flooding. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param retries: Retry configuration to use by default with requests in this pool. :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param \\**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, :class:`urllib3.connection.HTTPSConnection` instances. """ scheme = 'http' ConnectionCls = HTTPConnection ResponseCls = HTTPResponse def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, headers=None, retries=None, _proxy=None, _proxy_headers=None, **conn_kw): ConnectionPool.__init__(self, host, port) RequestMethods.__init__(self, headers) self.strict = strict if not isinstance(timeout, Timeout): timeout = Timeout.from_float(timeout) if retries is None: retries = Retry.DEFAULT self.timeout = timeout self.retries = retries self.pool = self.QueueCls(maxsize) self.block = block self.proxy = _proxy self.proxy_headers = _proxy_headers or {} # Fill the queue up so that doing get() on it will block properly for _ in xrange(maxsize): self.pool.put(None) # These are mostly for testing and debugging purposes. self.num_connections = 0 self.num_requests = 0 self.conn_kw = conn_kw if self.proxy: # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. # We cannot know if the user has added default socket options, so we cannot replace the # list. self.conn_kw.setdefault('socket_options', []) def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. """ self.num_connections += 1 log.debug("Starting new HTTP connection (%d): %s", self.num_connections, self.host) conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) return conn def _get_conn(self, timeout=None): """ Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is ``True``. """ conn = None try: conn = self.pool.get(block=self.block, timeout=timeout) except AttributeError: # self.pool is None raise ClosedPoolError(self, "Pool is closed.") except queue.Empty: if self.block: raise EmptyPoolError(self, "Pool reached maximum size and no more " "connections are allowed.") pass # Oh well, we'll create a new connection then # If this is a persistent connection, check if it got disconnected if conn and is_connection_dropped(conn): log.debug("Resetting dropped connection: %s", self.host) conn.close() if getattr(conn, 'auto_open', 1) == 0: # This is a proxied connection that has been mutated by # httplib._tunnel() and cannot be reused (since it would # attempt to bypass the proxy) conn = None return conn or self._new_conn() def _put_conn(self, conn): """ Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded. """ try: self.pool.put(conn, block=False) return # Everything is dandy, done. except AttributeError: # self.pool is None. pass except queue.Full: # This should never happen if self.block == True log.warning( "Connection pool is full, discarding connection: %s", self.host) # Connection never got put back into the pool, close it. if conn: conn.close() def _validate_conn(self, conn): """ Called right before a request is made, after the socket is created. """ pass def _prepare_proxy(self, conn): # Nothing to do for HTTP connections. pass def _get_timeout(self, timeout): """ Helper that always returns a :class:`urllib3.util.Timeout` """ if timeout is _Default: return self.timeout.clone() if isinstance(timeout, Timeout): return timeout.clone() else: # User passed us an int/float. This is for backwards compatibility, # can be removed later return Timeout.from_float(timeout) def _raise_timeout(self, err, url, timeout_value): """Is the error actually a timeout? Will raise a ReadTimeout or pass""" if isinstance(err, SocketTimeout): raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) # See the above comment about EAGAIN in Python 3. In Python 2 we have # to specifically catch it and throw the timeout error if hasattr(err, 'errno') and err.errno in _blocking_errnos: raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) # Catch possible read timeouts thrown as SSL errors. If not the # case, rethrow the original. We need to do this because of: # http://bugs.python.org/issue10272 if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6 raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value) def _make_request(self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw): """ Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts. """ self.num_requests += 1 timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() conn.timeout = timeout_obj.connect_timeout # Trigger any extra validation we need to do. try: self._validate_conn(conn) except (SocketTimeout, BaseSSLError) as e: # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) raise # conn.request() calls httplib.*.request, not the method in # urllib3.request. It also calls makefile (recv) on the socket. if chunked: conn.request_chunked(method, url, **httplib_request_kw) else: conn.request(method, url, **httplib_request_kw) # Reset the timeout for the recv() on the socket read_timeout = timeout_obj.read_timeout # App Engine doesn't have a sock attr if getattr(conn, 'sock', None): # In Python 3 socket.py will catch EAGAIN and return None when you # try and read into the file pointer created by http.client, which # instead raises a BadStatusLine exception. Instead of catching # the exception and assuming all BadStatusLine exceptions are read # timeouts, check for a zero timeout before making the request. if read_timeout == 0: raise ReadTimeoutError( self, url, "Read timed out. (read timeout=%s)" % read_timeout) if read_timeout is Timeout.DEFAULT_TIMEOUT: conn.sock.settimeout(socket.getdefaulttimeout()) else: # None or a value conn.sock.settimeout(read_timeout) # Receive the response from the server try: try: # Python 2.7, use buffering of HTTP responses httplib_response = conn.getresponse(buffering=True) except TypeError: # Python 2.6 and older, Python 3 try: httplib_response = conn.getresponse() except Exception as e: # Remove the TypeError from the exception chain in Python 3; # otherwise it looks like a programming error was the cause. six.raise_from(e, None) except (SocketTimeout, BaseSSLError, SocketError) as e: self._raise_timeout(err=e, url=url, timeout_value=read_timeout) raise # AppEngine doesn't have a version attr. http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') log.debug("%s://%s:%s \"%s %s %s\" %s %s", self.scheme, self.host, self.port, method, url, http_version, httplib_response.status, httplib_response.length) try: assert_header_parsing(httplib_response.msg) except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 log.warning( 'Failed to parse headers (url=%s): %s', self._absolute_url(url), hpe, exc_info=True) return httplib_response def _absolute_url(self, path): return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url def close(self): """ Close all pooled connections and disable the pool. """ # Disable access to the pool old_pool, self.pool = self.pool, None try: while True: conn = old_pool.get(block=False) if conn: conn.close() except queue.Empty: pass # Done. def is_same_host(self, url): """ Check if the given ``url`` is a member of the same host as this connection pool. """ if url.startswith('/'): return True # TODO: Add optional support for socket.gethostbyname checking. scheme, host, port = get_host(url) host = _ipv6_host(host).lower() # Use explicit default port for comparison when none is given if self.port and not port: port = port_by_scheme.get(scheme) elif not self.port and port == port_by_scheme.get(scheme): port = None return (scheme, host, port) == (self.scheme, self.host, self.port) def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, chunked=False, body_pos=None, **response_kw): """ Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param body: Data to send in the request body (useful for creating POST requests, see HTTPConnectionPool.post_url for more convenience). :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When False, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. :param \\**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` """ if headers is None: headers = self.headers if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if release_conn is None: release_conn = response_kw.get('preload_content', True) # Check host if assert_same_host and not self.is_same_host(url): raise HostChangedError(self, url, retries) conn = None # Track whether `conn` needs to be released before # returning/raising/recursing. Update this variable if necessary, and # leave `release_conn` constant throughout the function. That way, if # the function recurses, the original value of `release_conn` will be # passed down into the recursive call, and its value will be respected. # # See issue #651 [1] for details. # # [1] release_this_conn = release_conn # Merge the proxy headers. Only do this in HTTP. We have to copy the # headers dict so we can safely change it without those changes being # reflected in anyone else's copy. if self.scheme == 'http': headers = headers.copy() headers.update(self.proxy_headers) # Must keep the exception bound to a separate variable or else Python 3 # complains about UnboundLocalError. err = None # Keep track of whether we cleanly exited the except block. This # ensures we do proper cleanup in finally. clean_exit = False # Rewind body position, if needed. Record current position # for future rewinds in the event of a redirect/retry. body_pos = set_file_position(body, body_pos) try: # Request a connection from the queue. timeout_obj = self._get_timeout(timeout) conn = self._get_conn(timeout=pool_timeout) conn.timeout = timeout_obj.connect_timeout is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None) if is_new_proxy_conn: self._prepare_proxy(conn) # Make the request on the httplib connection object. httplib_response = self._make_request(conn, method, url, timeout=timeout_obj, body=body, headers=headers, chunked=chunked) # If we're going to release the connection in ``finally:``, then # the response doesn't need to know about the connection. Otherwise # it will also try to release it and we'll have a double-release # mess. response_conn = conn if not release_conn else None # Pass method to Response for length checking response_kw['request_method'] = method # Import httplib's response into our own wrapper object response = self.ResponseCls.from_httplib(httplib_response, pool=self, connection=response_conn, retries=retries, **response_kw) # Everything went great! clean_exit = True except queue.Empty: # Timed out by queue. raise EmptyPoolError(self, "No pool connections are available.") except (TimeoutError, HTTPException, SocketError, ProtocolError, BaseSSLError, SSLError, CertificateError) as e: # Discard the connection for these exceptions. It will be # replaced during the next _get_conn() call. clean_exit = False if isinstance(e, (BaseSSLError, CertificateError)): e = SSLError(e) elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: e = ProxyError('Cannot connect to proxy.', e) elif isinstance(e, (SocketError, HTTPException)): e = ProtocolError('Connection aborted.', e) retries = retries.increment(method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]) retries.sleep() # Keep track of the error for the retry warning. err = e finally: if not clean_exit: # We hit some kind of exception, handled or otherwise. We need # to throw the connection away unless explicitly told not to. # Close the connection, set the variable to None, and make sure # we put the None back in the pool to avoid leaking it. conn = conn and conn.close() release_this_conn = True if release_this_conn: # Put the connection back to be reused. If the connection is # expired then it will be None, which will get replaced with a # fresh connection during _get_conn. self._put_conn(conn) if not conn: # Try again log.warning("Retrying (%r) after connection " "broken by '%r': %s", retries, err, url) return self.urlopen(method, url, body, headers, retries, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, body_pos=body_pos, **response_kw) def drain_and_release_conn(response): try: # discard any remaining response body, the connection will be # released back to the pool once the entire response is read response.read() except (TimeoutError, HTTPException, SocketError, ProtocolError, BaseSSLError, SSLError) as e: pass # Handle redirect? redirect_location = redirect and response.get_redirect_location() if redirect_location: if response.status == 303: method = 'GET' try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: # Drain and release the connection for this response, since # we're not returning it to be released manually. drain_and_release_conn(response) raise return response # drain and return the connection to the pool before recursing drain_and_release_conn(response) retries.sleep_for_retry(response) log.debug("Redirecting %s -> %s", url, redirect_location) return self.urlopen( method, redirect_location, body, headers, retries=retries, redirect=redirect, assert_same_host=assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, body_pos=body_pos, **response_kw) # Check if we should retry the HTTP response. has_retry_after = bool(response.getheader('Retry-After')) if retries.is_retry(method, response.status, has_retry_after): try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_status: # Drain and release the connection for this response, since # we're not returning it to be released manually. drain_and_release_conn(response) raise return response # drain and return the connection to the pool before recursing drain_and_release_conn(response) retries.sleep(response) log.debug("Retry: %s", url) return self.urlopen( method, url, body, headers, retries=retries, redirect=redirect, assert_same_host=assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, body_pos=body_pos, **response_kw) return response class HTTPSConnectionPool(HTTPConnectionPool): """ Same as :class:`.HTTPConnectionPool`, but HTTPS. When Python is compiled with the :mod:`ssl` module, then :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, instead of :class:`.HTTPSConnection`. :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. """ scheme = 'https' ConnectionCls = HTTPSConnection def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, headers=None, retries=None, _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, ssl_version=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None, **conn_kw): HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, block, headers, retries, _proxy, _proxy_headers, **conn_kw) if ca_certs and cert_reqs is None: cert_reqs = 'CERT_REQUIRED' self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs self.ca_cert_dir = ca_cert_dir self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint def _prepare_conn(self, conn): """ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` and establish the tunnel if proxy is used. """ if isinstance(conn, VerifiedHTTPSConnection): conn.set_cert(key_file=self.key_file, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_version return conn def _prepare_proxy(self, conn): """ Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy's IP:port. """ # Python 2.7+ try: set_tunnel = conn.set_tunnel except AttributeError: # Platform-specific: Python 2.6 set_tunnel = conn._set_tunnel if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older set_tunnel(self._proxy_host, self.port) else: set_tunnel(self._proxy_host, self.port, self.proxy_headers) conn.connect() def _new_conn(self): """ Return a fresh :class:`httplib.HTTPSConnection`. """ self.num_connections += 1 log.debug("Starting new HTTPS connection (%d): %s", self.num_connections, self.host) if not self.ConnectionCls or self.ConnectionCls is DummyConnection: raise SSLError("Can't connect to HTTPS URL because the SSL " "module is not available.") actual_host = self.host actual_port = self.port if self.proxy is not None: actual_host = self.proxy.host actual_port = self.proxy.port conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) return self._prepare_conn(conn) def _validate_conn(self, conn): """ Called right before a request is made, after the socket is created. """ super(HTTPSConnectionPool, self)._validate_conn(conn) # Force connect early to allow us to validate the connection. if not getattr(conn, 'sock', None): # AppEngine might not have `.sock` conn.connect() if not conn.is_verified: warnings.warn(( 'Unverified HTTPS request is being made. ' 'Adding certificate verification is strongly advised. See: ' 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' '#ssl-warnings'), InsecureRequestWarning) def connection_from_url(url, **kw): """ Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \\**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') """ scheme, host, port = get_host(url) port = port or port_by_scheme.get(scheme, 80) if scheme == 'https': return HTTPSConnectionPool(host, port=port, **kw) else: return HTTPConnectionPool(host, port=port, **kw) def _ipv6_host(host): """ Process IPv6 address literals """ # httplib doesn't like it when we include brackets in IPv6 addresses # Specifically, if we include brackets but also pass the port then # httplib crazily doubles up the square brackets on the Host header. # Instead, we need to make sure we never pass ``None`` as the port. # However, for backward compatibility reasons we can't actually # *assert* that. See http://bugs.python.org/issue28539 # # Also if an IPv6 address literal has a zone identifier, the # percent sign might be URIencoded, convert it back into ASCII if host.startswith('[') and host.endswith(']'): host = host.replace('%25', '%').strip('[]') return host PKZB::1site-packages/pip/_vendor/urllib3/poolmanager.pycnu[ abc@@sddlmZddlZddlZddlZddlmZddlmZm Z ddlm Z ddl m Z m Z mZddlmZdd lmZdd lmZdd lmZd d dgZejeZd4Zd5Zejd-eZd.Ziej eed/6ej eed06Z!ied/6e d06Z"d efd1YZ#d e#fd2YZ$d3Z%dS(6i(tabsolute_importNi(tRecentlyUsedContainer(tHTTPConnectionPooltHTTPSConnectionPool(tport_by_scheme(tLocationValueErrort MaxRetryErrortProxySchemeUnknown(turljoin(tRequestMethods(t parse_url(tRetryt PoolManagert ProxyManagertproxy_from_urltkey_filet cert_filet cert_reqstca_certst ssl_versiont ca_cert_dirt ssl_contextt key_schemetkey_hosttkey_portt key_timeoutt key_retriest key_strictt key_blocktkey_source_addresst key_key_filet key_cert_filet key_cert_reqst key_ca_certstkey_ssl_versiontkey_ca_cert_dirtkey_ssl_contextt key_maxsizet key_headerst key__proxytkey__proxy_headerstkey_socket_optionstkey__socks_optionstkey_assert_hostnametkey_assert_fingerprinttPoolKeycC@s|j}|dj|d<|dj|d>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') >>> r = manager.request('GET', 'http://google.com/mail') >>> r = manager.request('GET', 'http://yahoo.com/') >>> len(manager.pools) 2 i cK@sMtj||||_t|dd|_t|_tj|_dS(Nt dispose_funccS@s |jS(N(tclose(tp((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pytt(R t__init__tconnection_pool_kwRtpoolstpool_classes_by_schemetkey_fn_by_schemeR5(tselft num_poolsR0RO((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRNs    cC@s|S(N((RS((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyt __enter__scC@s|jtS(N(tcleartFalse(RStexc_typetexc_valtexc_tb((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyt__exit__s cC@s|j|}|dkr+|jj}nxdD]}|j|dq2W|dkr|x!tD]}|j|dq_Wn||||S(s Create a new :class:`ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments to the pool class used. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. R.R/tportRGN(R.R/R\(RQR7ROR5R>t SSL_KEYWORDS(RSR.R/R\RAtpool_clsRCtkw((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyt _new_pools     cC@s|jjdS(s Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. N(RPRV(RS((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRVsRGcC@sz|stdn|j|}|p-d|d<|sYtj|djd}n||d<||d<|j|S(s Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. sNo host specified.RGR.iPR\R/(Rt_merge_pool_kwargsRR:R6tconnection_from_context(RSR/R\R.t pool_kwargsRA((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pytconnection_from_hosts   cC@s<|dj}|j|}||}|j|d|S(s Get a :class:`ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. R.RA(R6RRtconnection_from_pool_key(RSRAR.tpool_key_constructortpool_key((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRbs  c C@sy|jjg|jj|}|r)|S|d}|d}|d}|j|||d|}||j| %sN(R RdR/R\R.RWR0R5tproxyR7turlopent request_uritget_redirect_locationRtstatusR:t isinstanceR tfrom_inttremove_headers_on_redirectt is_same_hostR>t incrementRtraise_on_redirecttlogtinfo( RStmethodRjRrR_RktconnRutredirect_locationRttheader((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRx-s@ $     "    N(t__name__t __module__t__doc__R7RwRNRUR[R`RVRdRbReRlRatTrueRx(((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyR ys       cB@sJeZdZddddZddddZddZedZRS(sw Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary contaning headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') >>> r2 = proxy.request('GET', 'http://httpbin.org/') >>> len(proxy.pools) 1 >>> r3 = proxy.request('GET', 'https://httpbin.org/') >>> r4 = proxy.request('GET', 'https://twitter.com/') >>> len(proxy.pools) 3 i cK@st|tr.d|j|j|jf}nt|}|jsmtj|jd}|jd|}n|jdkrt |jn||_ |pi|_ |j |d<|j |dsb    5 WPKZsh*h*0site-packages/pip/_vendor/urllib3/connection.pycnu[ abc@@s\ddlmZddlZddlZddlZddlZddlZddlmZm Z ddl Z ddl m Z ddlmZddlmZyddlZejZWn3eefk rdZdefd YZnXy eZWn'ek r d efd YZnXdd lmZmZmZm Z dd l!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'm(Z(m)Z)ddl*m+Z+ddl,m-Z-ej.e/Z0idd6dd6Z1ej2dddZ3de4fdYZ5dee4fdYZdefdYZ6de6fdYZ7dZ8erRe6Z9e7Z6ne5Z6dS(i(tabsolute_importN(terrorttimeouti(tsix(tHTTPConnection(t HTTPExceptiont BaseSSLErrorcB@seZRS((t__name__t __module__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyRstConnectionErrorcB@seZRS((RR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyR s(tNewConnectionErrortConnectTimeoutErrortSubjectAltNameWarningtSystemTimeWarning(tmatch_hostnametCertificateError(tresolve_cert_reqstresolve_ssl_versiontassert_fingerprinttcreate_urllib3_contexttssl_wrap_socket(t connection(tHTTPHeaderDictiPthttpithttpsitDummyConnectioncB@seZdZRS(s-Used to detect a failed ConnectionCls import.(RRt__doc__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyRAsRcB@sieZdZedZejejdfgZe Z dZ dZ dZ dZdddZRS( s{ Based on httplib.HTTPConnection but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass:: HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). RicO@stjr|jddn|jd|_tjdkrP|jddn|jd|j|_ t j |||dS(Ntstricttsource_addressiitsocket_options(ii( RtPY3tpoptNonetgetRtsyst version_infotdefault_socket_optionsRt_HTTPConnectiont__init__(tselftargstkw((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyR&js cC@si}|jr|j|dsN          "(  &l  PKZML990site-packages/pip/_vendor/urllib3/util/retry.pyonu[ abc@@sddlmZddlZddlZddlmZddlmZddlZddl Z ddl m Z m Z m Z mZmZmZddlmZejeZedd d d d d gZdefdYZede_dS(i(tabsolute_importN(t namedtuple(t takewhilei(tConnectTimeoutErrort MaxRetryErrort ProtocolErrortReadTimeoutErrort ResponseErrort InvalidHeader(tsixtRequestHistorytmethodturlterrortstatustredirect_locationtRetrycB@s4eZdZeddddddgZedgZedd d gZd Zd ddddedd e e de ed Z dZ e e ddZ dZdZdZddZdZddZdZdZdZedZdZdddddddZdZRS(s2 Retry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. Retries can be defined as a default for a pool:: retries = Retry(connect=5, read=2, redirect=5) http = PoolManager(retries=retries) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', retries=Retry(10)) Retries can be disabled by passing ``False``:: response = http.request('GET', 'http://example.com/', retries=False) Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless retries are disabled, in which case the causing exception will be raised. :param int total: Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other counts. It's a good idea to set this to some sensibly-high value to account for unexpected edge cases and avoid infinite retry loops. Set to ``0`` to fail on the first retry. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int connect: How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Set to ``0`` to fail on the first retry of this type. :param int read: How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Set to ``0`` to fail on the first retry of this type. :param int redirect: How many redirects to perform. Limit this to avoid infinite redirect loops. A redirect is a HTTP response with a status code 301, 302, 303, 307 or 308. Set to ``0`` to fail on the first retry of this type. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int status: How many times to retry on bad status codes. These are retries made on responses, where status code matches ``status_forcelist``. Set to ``0`` to fail on the first retry of this type. :param iterable method_whitelist: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. Set to a ``False`` value to retry on any verb. :param iterable status_forcelist: A set of integer HTTP status codes that we should force a retry on. A retry is initiated if the request method is in ``method_whitelist`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. :param float backoff_factor: A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: {backoff factor} * (2 ^ ({number of total retries} - 1)) seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer than :attr:`Retry.BACKOFF_MAX`. By default, backoff is disabled (set to 0). :param bool raise_on_redirect: Whether, if the number of redirects is exhausted, to raise a MaxRetryError, or to return a response with a response code in the 3xx range. :param iterable remove_headers_on_redirect: Sequence of headers to remove from the request when a response indicating a redirect is returned before firing off the redirected request :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: whether we should raise an exception, or return a response, if status falls in ``status_forcelist`` range and retries have been exhausted. :param tuple history: The history of the request encountered during each call to :meth:`~Retry.increment`. The list is in the order the requests occurred. Each list item is of class :class:`RequestHistory`. :param bool respect_retry_after_header: Whether to respect Retry-After header on status codes defined as :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. tHEADtGETtPUTtDELETEtOPTIONStTRACEt Authorizationiiiixi icC@s||_||_||_||_|tks<|tkrKd}t} n||_|p`t|_||_||_ | |_ | |_ | pt |_ | |_| |_dS(Ni(ttotaltconnecttreadRtFalsetredirecttsettstatus_forcelisttmethod_whitelisttbackoff_factortraise_on_redirecttraise_on_statusttuplethistorytrespect_retry_after_headertremove_headers_on_redirect(tselfRRRRRRRR R!R"R$R%R&((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt__init__s            cK@std|jd|jd|jd|jd|jd|jd|jd|jd |j d |j d |j d |j }|j |t||S( NRRRRRRRR R!R"R$R&(tdictRRRRRRRR R!R"R$R&tupdatettype(R'tkwtparams((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytnews $        cC@sx|dkr*|dk r|n|j}nt|tr=|St|oLd}||d|}tjd|||S(s4 Backwards-compatibility for the old retries format.Rs!Converted retries value: %r -> %rN(tNonetDEFAULTt isinstanceRtbooltlogtdebug(tclstretriesRtdefaultt new_retries((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytfrom_ints cC@s\tttdt|j}|dkr7dS|jd|d}t|j|S(sJ Formula for computing the current backoff :rtype: float cS@s |jdkS(N(RR/(tx((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyttiii(tlentlistRtreversedR$R tmint BACKOFF_MAX(R'tconsecutive_errors_lent backoff_value((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytget_backoff_times  cC@stjd|r!t|}nPtjj|}|dkrRtd|ntj |}|tj}|dkrd}n|S(Ns^\s*[0-9]+\s*$sInvalid Retry-After header: %si( tretmatchtinttemailtutilst parsedateR/Rttimetmktime(R't retry_aftertsecondstretry_date_tuplet retry_date((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytparse_retry_afters   cC@s,|jd}|dkrdS|j|S(s* Get the value of Retry-After in seconds. s Retry-AfterN(t getheaderR/RQ(R'tresponseRM((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytget_retry_afters cC@s*|j|}|r&tj|tStS(N(RTRKtsleeptTrueR(R'RSRM((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytsleep_for_retrys  cC@s-|j}|dkrdStj|dS(Ni(RDRKRU(R'tbackoff((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_sleep_backoffs  cC@s0|r"|j|}|r"dSn|jdS(sC Sleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it will use an exponential backoff. By default, the backoff factor is 0 and this method will return immediately. N(RWRY(R'RStslept((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyRU s cC@s t|tS(s{ Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. (R1R(R'terr((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_is_connection_errorscC@st|ttfS(s Errors that occur after the request has been started, so we should assume that the server began processing it. (R1RR(R'R[((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_is_read_error!scC@s&|jr"|j|jkr"tStS(s| Checks if a given HTTP method should be retried upon, depending if it is included on the method whitelist. (RtupperRRV(R'R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_is_method_retryable'scC@sT|j|stS|jr/||jkr/tS|joS|joS|oS||jkS(sx Is this method/status code retryable? (Based on whitelists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to be retried upon on the presence of the aforementioned header) (R_RRRVRR%tRETRY_AFTER_STATUS_CODES(R'R t status_codethas_retry_after((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytis_retry0s cC@sS|j|j|j|j|jf}ttd|}|sCtSt |dkS(s Are we out of retries? iN( RRRRRR>tfilterR/RR@(R't retry_counts((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt is_exhausted@s $c C@s|jtkr3|r3tjt|||n|j}|d k rU|d8}n|j}|j} |j} |j } d} d } d }|r|j |r|tkrtjt|||q|d k r|d8}qn |rQ|j |rQ| tks|j | r5tjt|||q| d k r| d8} qn|r|j r| d k r|| d8} nd} |j }|j } nUtj} |r|j r| d k r| d8} ntjjd|j } |j } n|jt|||| |f}|jd|d|d| d| d | d |}|jrnt|||pet| ntjd |||S( s Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. itunknownstoo many redirectsRaRRRRRR$s$Incremented Retry for (url='%s'): %rN(RRR treraiseR+R/RRRRR\R]R_tget_redirect_locationRt GENERIC_ERRORtSPECIFIC_ERRORtformatR$R R.RfRR3R4(R'R R RSR t_poolt _stacktraceRRRRt status_counttcauseRRR$t new_retry((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt incrementIsX                     "   !cC@sdjdt|d|S(Ns|{cls.__name__}(total={self.total}, connect={self.connect}, read={self.read}, redirect={self.redirect}, status={self.status})R5R'(RlR+(R'((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt__repr__s N(t__name__t __module__t__doc__t frozensettDEFAULT_METHOD_WHITELISTt"DEFAULT_REDIRECT_HEADERS_BLACKLISTR`RAR/RVR(R.t classmethodR9RDRQRTRWRYRUR\R]R_RRcRfRrRs(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyRs8x            Ji(t __future__RRKtloggingt collectionsRt itertoolsRRHREt exceptionsRRRRRRtpackagesR t getLoggerRtR3R tobjectRR0(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyts    .PKZԗ-site-packages/pip/_vendor/urllib3/util/url.pynu[from __future__ import absolute_import from collections import namedtuple import re from ..exceptions import LocationParseError url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'] # We only want to normalize urls with an HTTP(S) scheme. # urllib3 infers URLs without a scheme (None) to be http. NORMALIZABLE_SCHEMES = ('http', 'https', None) _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]') from ..packages.six.moves.urllib.parse import quote class Url(namedtuple('Url', url_attrs)): """ Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. """ __slots__ = () def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): if path and not path.startswith('/'): path = '/' + path if scheme: scheme = scheme.lower() if host and scheme in NORMALIZABLE_SCHEMES: host = host.lower() return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) @property def hostname(self): """For backwards-compatibility with urlparse. We're nice like that.""" return self.host @property def request_uri(self): """Absolute path including the query string.""" uri = self.path or '/' if self.query is not None: uri += '?' + self.query return uri @property def netloc(self): """Network location including host and port""" if self.port: return '%s:%d' % (self.host, self.port) return self.host @property def url(self): """ Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:password@host.com:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self url = '' # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: url += scheme + '://' if auth is not None: url += auth + '@' if host is not None: url += host if port is not None: url += ':' + str(port) if path is not None: url += path if query is not None: url += '?' + query if fragment is not None: url += '#' + fragment return url def __str__(self): return self.url def split_first(s, delims): """ Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """ min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return s, '', None return s[:min_idx], s[min_idx + 1:], min_delim def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. Partly backwards-compatible with :mod:`urlparse`. Example:: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ # While this code has overlap with stdlib's urlparse, it is much # simplified for our needs and less annoying. # Additionally, this implementations does silly things to be optimal # on CPython. if not url: # Empty return Url() # Prevent CVE-2019-9740. # adapted from https://github.com/python/cpython/pull/12755 url = _contains_disallowed_url_pchar_re.sub(lambda match: quote(match.group()), url) scheme = None auth = None host = None port = None path = None fragment = None query = None # Scheme if '://' in url: scheme, url = url.split('://', 1) # Find the earliest Authority Terminator # (http://tools.ietf.org/html/rfc3986#section-3.2) url, path_, delim = split_first(url, ['/', '?', '#']) if delim: # Reassemble the path path = delim + path_ # Auth if '@' in url: # Last '@' denotes end of auth part auth, url = url.rsplit('@', 1) # IPv6 if url and url[0] == '[': host, url = url.split(']', 1) host += ']' # Port if ':' in url: _host, port = url.split(':', 1) if not host: host = _host if port: # If given, ports must be integers. No whitespace, no plus or # minus prefixes, no non-integer digits such as ^2 (superscript). if not port.isdigit(): raise LocationParseError(url) try: port = int(port) except ValueError: raise LocationParseError(url) else: # Blank ports are cool, too. (rfc3986#section-3.2.3) port = None elif not host and url: host = url if not path: return Url(scheme, auth, host, port, path, query, fragment) # Fragment if '#' in path: path, fragment = path.split('#', 1) # Query if '?' in path: path, query = path.split('?', 1) return Url(scheme, auth, host, port, path, query, fragment) def get_host(url): """ Deprecated. Use :func:`parse_url` instead. """ p = parse_url(url) return p.scheme or 'http', p.hostname, p.port PKZ9' ' 2site-packages/pip/_vendor/urllib3/util/response.pynu[from __future__ import absolute_import from ..packages.six.moves import http_client as httplib from ..exceptions import HeaderParsingError def is_fp_closed(obj): """ Checks whether a given file-like object is closed. :param obj: The file-like object to check. """ try: # Check `isclosed()` first, in case Python3 doesn't set `closed`. # GH Issue #928 return obj.isclosed() except AttributeError: pass try: # Check via the official file-like-object way. return obj.closed except AttributeError: pass try: # Check if the object is a container for another file-like object that # gets released on exhaustion (e.g. HTTPResponse). return obj.fp is None except AttributeError: pass raise ValueError("Unable to determine whether fp is closed.") def assert_header_parsing(headers): """ Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """ # This will fail silently if we pass in the wrong kind of parameter. # To make debugging easier add an explicit check. if not isinstance(headers, httplib.HTTPMessage): raise TypeError('expected httplib.Message, got {0}.'.format( type(headers))) defects = getattr(headers, 'defects', None) get_payload = getattr(headers, 'get_payload', None) unparsed_data = None if get_payload: # Platform-specific: Python 3. unparsed_data = get_payload() if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) def is_response_to_head(response): """ Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param conn: :type conn: :class:`httplib.HTTPResponse` """ # FIXME: Can we do this somehow without accessing private httplib _method? method = response._method if isinstance(method, int): # Platform-specific: Appengine return method == 3 return method.upper() == 'HEAD' PKZ95site-packages/pip/_vendor/urllib3/util/connection.pycnu[ abc@@sddlmZddlZddlmZddlmZmZdZej dddZ dZ d Z d Zed ZdS( i(tabsolute_importNi(t wait_for_read(t HAS_SELECTt SelectorErrorcC@sot|dt}|tkr"tS|dkr2tSts<tSytt|ddSWntk rjtSXdS(s  Returns True if the connection is dropped and should be closed. :param conn: :class:`httplib.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. tsockttimeoutgN(tgetattrtFalsetNonetTrueRtboolRR(tconnR((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pytis_connection_droppeds    cC@sO|\}}|jdr-|jd}nd}t}xtj|||tjD]}|\} } } } } d}yltj| | | }t|||tjk r|j |n|r|j |n|j | |SWqXtj k r"}|}|dk r#|j d}q#qXXqXW|dk r<|ntj ddS(sdConnect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. t[s[]s!getaddrinfo returns an empty listN(t startswithtstripRtallowed_gai_familytsockett getaddrinfot SOCK_STREAMt_set_socket_optionst_GLOBAL_DEFAULT_TIMEOUTt settimeouttbindtconnectterrortclose(taddressRtsource_addresstsocket_optionsthosttportterrtfamilytrestaftsocktypetprotot canonnametsaRte((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pytcreate_connection$s2  "      cC@s2|dkrdSx|D]}|j|qWdS(N(Rt setsockopt(Rtoptionstopt((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyRXs  cC@stj}trtj}n|S(sThis function is designed to work in the context of getaddrinfo, where family=socket.AF_UNSPEC is the default and will perform a DNS search for both IPv6 and IPv4 records.(RtAF_INETtHAS_IPV6t AF_UNSPEC(R!((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyR`s  cC@srd}t}tjr[y/tjtj}|j|dft}Wq[tk rWq[Xn|rn|jn|S(s6 Returns True if the system can bind an IPv6 address. iN( RRRthas_ipv6tAF_INET6RR t ExceptionR(RRR0((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyt _has_ipv6ks    s::1(t __future__RRtwaitRt selectorsRRR RRR)RRR3R.(((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyts  3  PKZU3;;/site-packages/pip/_vendor/urllib3/util/retry.pynu[from __future__ import absolute_import import time import logging from collections import namedtuple from itertools import takewhile import email import re from ..exceptions import ( ConnectTimeoutError, MaxRetryError, ProtocolError, ReadTimeoutError, ResponseError, InvalidHeader, ) from ..packages import six log = logging.getLogger(__name__) # Data structure for representing the metadata of requests that result in a retry. RequestHistory = namedtuple('RequestHistory', ["method", "url", "error", "status", "redirect_location"]) class Retry(object): """ Retry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. Retries can be defined as a default for a pool:: retries = Retry(connect=5, read=2, redirect=5) http = PoolManager(retries=retries) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', retries=Retry(10)) Retries can be disabled by passing ``False``:: response = http.request('GET', 'http://example.com/', retries=False) Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless retries are disabled, in which case the causing exception will be raised. :param int total: Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other counts. It's a good idea to set this to some sensibly-high value to account for unexpected edge cases and avoid infinite retry loops. Set to ``0`` to fail on the first retry. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int connect: How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Set to ``0`` to fail on the first retry of this type. :param int read: How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Set to ``0`` to fail on the first retry of this type. :param int redirect: How many redirects to perform. Limit this to avoid infinite redirect loops. A redirect is a HTTP response with a status code 301, 302, 303, 307 or 308. Set to ``0`` to fail on the first retry of this type. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int status: How many times to retry on bad status codes. These are retries made on responses, where status code matches ``status_forcelist``. Set to ``0`` to fail on the first retry of this type. :param iterable method_whitelist: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. Set to a ``False`` value to retry on any verb. :param iterable status_forcelist: A set of integer HTTP status codes that we should force a retry on. A retry is initiated if the request method is in ``method_whitelist`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. :param float backoff_factor: A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: {backoff factor} * (2 ^ ({number of total retries} - 1)) seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer than :attr:`Retry.BACKOFF_MAX`. By default, backoff is disabled (set to 0). :param bool raise_on_redirect: Whether, if the number of redirects is exhausted, to raise a MaxRetryError, or to return a response with a response code in the 3xx range. :param iterable remove_headers_on_redirect: Sequence of headers to remove from the request when a response indicating a redirect is returned before firing off the redirected request :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: whether we should raise an exception, or return a response, if status falls in ``status_forcelist`` range and retries have been exhausted. :param tuple history: The history of the request encountered during each call to :meth:`~Retry.increment`. The list is in the order the requests occurred. Each list item is of class :class:`RequestHistory`. :param bool respect_retry_after_header: Whether to respect Retry-After header on status codes defined as :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. """ DEFAULT_METHOD_WHITELIST = frozenset([ 'HEAD', 'GET', 'PUT', 'DELETE', 'OPTIONS', 'TRACE']) DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(['Authorization']) RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Maximum backoff time. BACKOFF_MAX = 120 def __init__(self, total=10, connect=None, read=None, redirect=None, status=None, method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None, backoff_factor=0, raise_on_redirect=True, raise_on_status=True, history=None, respect_retry_after_header=True, remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST): self.total = total self.connect = connect self.read = read self.status = status if redirect is False or total is False: redirect = 0 raise_on_redirect = False self.redirect = redirect self.status_forcelist = status_forcelist or set() self.method_whitelist = method_whitelist self.backoff_factor = backoff_factor self.raise_on_redirect = raise_on_redirect self.raise_on_status = raise_on_status self.history = history or tuple() self.respect_retry_after_header = respect_retry_after_header self.remove_headers_on_redirect = remove_headers_on_redirect def new(self, **kw): params = dict( total=self.total, connect=self.connect, read=self.read, redirect=self.redirect, status=self.status, method_whitelist=self.method_whitelist, status_forcelist=self.status_forcelist, backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, raise_on_status=self.raise_on_status, history=self.history, remove_headers_on_redirect=self.remove_headers_on_redirect, ) params.update(kw) return type(self)(**params) @classmethod def from_int(cls, retries, redirect=True, default=None): """ Backwards-compatibility for the old retries format.""" if retries is None: retries = default if default is not None else cls.DEFAULT if isinstance(retries, Retry): return retries redirect = bool(redirect) and None new_retries = cls(retries, redirect=redirect) log.debug("Converted retries value: %r -> %r", retries, new_retries) return new_retries def get_backoff_time(self): """ Formula for computing the current backoff :rtype: float """ # We want to consider only the last consecutive errors sequence (Ignore redirects). consecutive_errors_len = len(list(takewhile(lambda x: x.redirect_location is None, reversed(self.history)))) if consecutive_errors_len <= 1: return 0 backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) return min(self.BACKOFF_MAX, backoff_value) def parse_retry_after(self, retry_after): # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 if re.match(r"^\s*[0-9]+\s*$", retry_after): seconds = int(retry_after) else: retry_date_tuple = email.utils.parsedate(retry_after) if retry_date_tuple is None: raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) retry_date = time.mktime(retry_date_tuple) seconds = retry_date - time.time() if seconds < 0: seconds = 0 return seconds def get_retry_after(self, response): """ Get the value of Retry-After in seconds. """ retry_after = response.getheader("Retry-After") if retry_after is None: return None return self.parse_retry_after(retry_after) def sleep_for_retry(self, response=None): retry_after = self.get_retry_after(response) if retry_after: time.sleep(retry_after) return True return False def _sleep_backoff(self): backoff = self.get_backoff_time() if backoff <= 0: return time.sleep(backoff) def sleep(self, response=None): """ Sleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it will use an exponential backoff. By default, the backoff factor is 0 and this method will return immediately. """ if response: slept = self.sleep_for_retry(response) if slept: return self._sleep_backoff() def _is_connection_error(self, err): """ Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. """ return isinstance(err, ConnectTimeoutError) def _is_read_error(self, err): """ Errors that occur after the request has been started, so we should assume that the server began processing it. """ return isinstance(err, (ReadTimeoutError, ProtocolError)) def _is_method_retryable(self, method): """ Checks if a given HTTP method should be retried upon, depending if it is included on the method whitelist. """ if self.method_whitelist and method.upper() not in self.method_whitelist: return False return True def is_retry(self, method, status_code, has_retry_after=False): """ Is this method/status code retryable? (Based on whitelists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to be retried upon on the presence of the aforementioned header) """ if not self._is_method_retryable(method): return False if self.status_forcelist and status_code in self.status_forcelist: return True return (self.total and self.respect_retry_after_header and has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES)) def is_exhausted(self): """ Are we out of retries? """ retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) retry_counts = list(filter(None, retry_counts)) if not retry_counts: return False return min(retry_counts) < 0 def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None): """ Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. """ if self.total is False and error: # Disabled, indicate to re-raise the error. raise six.reraise(type(error), error, _stacktrace) total = self.total if total is not None: total -= 1 connect = self.connect read = self.read redirect = self.redirect status_count = self.status cause = 'unknown' status = None redirect_location = None if error and self._is_connection_error(error): # Connect retry? if connect is False: raise six.reraise(type(error), error, _stacktrace) elif connect is not None: connect -= 1 elif error and self._is_read_error(error): # Read retry? if read is False or not self._is_method_retryable(method): raise six.reraise(type(error), error, _stacktrace) elif read is not None: read -= 1 elif response and response.get_redirect_location(): # Redirect retry? if redirect is not None: redirect -= 1 cause = 'too many redirects' redirect_location = response.get_redirect_location() status = response.status else: # Incrementing because of a server error like a 500 in # status_forcelist and a the given method is in the whitelist cause = ResponseError.GENERIC_ERROR if response and response.status: if status_count is not None: status_count -= 1 cause = ResponseError.SPECIFIC_ERROR.format( status_code=response.status) status = response.status history = self.history + (RequestHistory(method, url, error, status, redirect_location),) new_retry = self.new( total=total, connect=connect, read=read, redirect=redirect, status=status_count, history=history) if new_retry.is_exhausted(): raise MaxRetryError(_pool, url, error or ResponseError(cause)) log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) return new_retry def __repr__(self): return ('{cls.__name__}(total={self.total}, connect={self.connect}, ' 'read={self.read}, redirect={self.redirect}, status={self.status})').format( cls=type(self), self=self) # For backwards compatibility (equivalent to pre-v1.9): Retry.DEFAULT = Retry(3) PKZt3site-packages/pip/_vendor/urllib3/util/__init__.pycnu[ abc@@sddlmZddlmZddlmZddlmZddlm Z m Z m Z m Z m Z mZmZmZddlmZmZddlmZdd lmZmZmZmZdd lmZmZd ZdS(!i(tabsolute_importi(tis_connection_dropped(t make_headers(t is_fp_closed(t SSLContexttHAS_SNIt IS_PYOPENSSLtIS_SECURETRANSPORTtassert_fingerprinttresolve_cert_reqstresolve_ssl_versiontssl_wrap_socket(t current_timetTimeout(tRetry(tget_hostt parse_urlt split_firsttUrl(t wait_for_readtwait_for_writeRRRRRR RRR RRRRRR R RR RRN(RRRRRR RRR RRRRRR R RR RR(t __future__Rt connectionRtrequestRtresponseRtssl_RRRRRR R R ttimeoutR R tretryRturlRRRRtwaitRRt__all__(((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.pyts8: "PKZ \%%2site-packages/pip/_vendor/urllib3/util/timeout.pyonu[ abc@@stddlmZddlmZddlZddlmZeZe edejZ defdYZ dS( i(tabsolute_import(t_GLOBAL_DEFAULT_TIMEOUTNi(tTimeoutStateErrort monotonictTimeoutcB@seZdZeZd eedZdZe dZ e dZ dZ dZ dZedZed ZRS( s Timeout configuration. Timeouts can be defined as a default for a pool:: timeout = Timeout(connect=2.0, read=7.0) http = PoolManager(timeout=timeout) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``:: no_timeout = Timeout(connect=None, read=None) response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: integer, float, or None :param connect: The maximum amount of time to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: The maximum amount of time to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout. :type read: integer, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. cC@sL|j|d|_|j|d|_|j|d|_d|_dS(Ntconnecttreadttotal(t_validate_timeoutt_connectt_readRtNonet_start_connect(tselfRRR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt__init__]scC@s&dt|j|j|j|jfS(Ns!%s(connect=%r, read=%r, total=%r)(ttypet__name__R R R(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt__str__cscC@s|tkr|jS|dks.||jkr2|St|trPtdnyt|Wn-ttfk rtd||fnXy)|dkrtd||fnWn'tk rtd||fnX|S(s Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it is a numeric value less than or equal to zero, or the type is not an integer, float, or None. sDTimeout cannot be a boolean value. It must be an int, float or None.s>Timeout value %s was %s, but it must be an int, float or None.isdAttempted to set %s timeout to %s, but the timeout cannot be set to a value less than or equal to 0.N(t_DefaulttDEFAULT_TIMEOUTR t isinstancetboolt ValueErrortfloatt TypeError(tclstvaluetname((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyRgs&   cC@std|d|S(s Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None :return: Timeout object :rtype: :class:`Timeout` RR(R(Rttimeout((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt from_floatscC@s"td|jd|jd|jS(s Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` RRR(RR R R(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pytclones cC@s1|jdk rtdnt|_|jS(s Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. s'Timeout timer has already been started.N(R R Rt current_time(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt start_connects cC@s,|jdkrtdnt|jS(s Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. s:Can't get connect duration for timer that has not started.N(R R RR(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pytget_connect_durationscC@sQ|jdkr|jS|jdks7|j|jkr>|jSt|j|jS(s" Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None N(RR R Rtmin(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pytconnect_timeouts !cC@s|jdk r~|j|jk r~|jdk r~|j|jk r~|jdkrX|jStdt|j|j|jS|jdk r|j|jk rtd|j|jS|jSdS(s Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. iN(RR RR R tmaxR"R!(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt read_timeouts !N(Rt __module__t__doc__RRR RRRt classmethodRRRR R!tpropertyR#R%(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyRsF %  ( t __future__RtsocketRttimet exceptionsRtobjectRtgetattrRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyts   PKZ83site-packages/pip/_vendor/urllib3/util/response.pyonu[ abc@@sOddlmZddlmZddlmZdZdZdZ dS( i(tabsolute_importi(t http_client(tHeaderParsingErrorcC@svy|jSWntk r!nXy |jSWntk r@nXy|jdkSWntk renXtddS(st Checks whether a given file-like object is closed. :param obj: The file-like object to check. s)Unable to determine whether fp is closed.N(tisclosedtAttributeErrortclosedtfptNonet ValueError(tobj((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyt is_fp_closeds    cC@st|tjs0tdjt|nt|dd}t|dd}d}|rl|}n|sx|rtd|d|ndS(sP Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. s"expected httplib.Message, got {0}.tdefectst get_payloadt unparsed_dataN( t isinstancethttplibt HTTPMessaget TypeErrortformatttypetgetattrRR(theadersR R R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pytassert_header_parsing&s   cC@s2|j}t|tr"|dkS|jdkS(s Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param conn: :type conn: :class:`httplib.HTTPResponse` itHEAD(t_methodRtinttupper(tresponsetmethod((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pytis_response_to_headEs  N( t __future__Rtpackages.six.movesRRt exceptionsRR RR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyts   PKZML990site-packages/pip/_vendor/urllib3/util/retry.pycnu[ abc@@sddlmZddlZddlZddlmZddlmZddlZddl Z ddl m Z m Z m Z mZmZmZddlmZejeZedd d d d d gZdefdYZede_dS(i(tabsolute_importN(t namedtuple(t takewhilei(tConnectTimeoutErrort MaxRetryErrort ProtocolErrortReadTimeoutErrort ResponseErrort InvalidHeader(tsixtRequestHistorytmethodturlterrortstatustredirect_locationtRetrycB@s4eZdZeddddddgZedgZedd d gZd Zd ddddedd e e de ed Z dZ e e ddZ dZdZdZddZdZddZdZdZdZedZdZdddddddZdZRS(s2 Retry configuration. Each retry attempt will create a new Retry object with updated values, so they can be safely reused. Retries can be defined as a default for a pool:: retries = Retry(connect=5, read=2, redirect=5) http = PoolManager(retries=retries) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', retries=Retry(10)) Retries can be disabled by passing ``False``:: response = http.request('GET', 'http://example.com/', retries=False) Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless retries are disabled, in which case the causing exception will be raised. :param int total: Total number of retries to allow. Takes precedence over other counts. Set to ``None`` to remove this constraint and fall back on other counts. It's a good idea to set this to some sensibly-high value to account for unexpected edge cases and avoid infinite retry loops. Set to ``0`` to fail on the first retry. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int connect: How many connection-related errors to retry on. These are errors raised before the request is sent to the remote server, which we assume has not triggered the server to process the request. Set to ``0`` to fail on the first retry of this type. :param int read: How many times to retry on read errors. These errors are raised after the request was sent to the server, so the request may have side-effects. Set to ``0`` to fail on the first retry of this type. :param int redirect: How many redirects to perform. Limit this to avoid infinite redirect loops. A redirect is a HTTP response with a status code 301, 302, 303, 307 or 308. Set to ``0`` to fail on the first retry of this type. Set to ``False`` to disable and imply ``raise_on_redirect=False``. :param int status: How many times to retry on bad status codes. These are retries made on responses, where status code matches ``status_forcelist``. Set to ``0`` to fail on the first retry of this type. :param iterable method_whitelist: Set of uppercased HTTP method verbs that we should retry on. By default, we only retry on methods which are considered to be idempotent (multiple requests with the same parameters end with the same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. Set to a ``False`` value to retry on any verb. :param iterable status_forcelist: A set of integer HTTP status codes that we should force a retry on. A retry is initiated if the request method is in ``method_whitelist`` and the response status code is in ``status_forcelist``. By default, this is disabled with ``None``. :param float backoff_factor: A backoff factor to apply between attempts after the second try (most errors are resolved immediately by a second try without a delay). urllib3 will sleep for:: {backoff factor} * (2 ^ ({number of total retries} - 1)) seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer than :attr:`Retry.BACKOFF_MAX`. By default, backoff is disabled (set to 0). :param bool raise_on_redirect: Whether, if the number of redirects is exhausted, to raise a MaxRetryError, or to return a response with a response code in the 3xx range. :param iterable remove_headers_on_redirect: Sequence of headers to remove from the request when a response indicating a redirect is returned before firing off the redirected request :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: whether we should raise an exception, or return a response, if status falls in ``status_forcelist`` range and retries have been exhausted. :param tuple history: The history of the request encountered during each call to :meth:`~Retry.increment`. The list is in the order the requests occurred. Each list item is of class :class:`RequestHistory`. :param bool respect_retry_after_header: Whether to respect Retry-After header on status codes defined as :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. tHEADtGETtPUTtDELETEtOPTIONStTRACEt Authorizationiiiixi icC@s||_||_||_||_|tks<|tkrKd}t} n||_|p`t|_||_||_ | |_ | |_ | pt |_ | |_| |_dS(Ni(ttotaltconnecttreadRtFalsetredirecttsettstatus_forcelisttmethod_whitelisttbackoff_factortraise_on_redirecttraise_on_statusttuplethistorytrespect_retry_after_headertremove_headers_on_redirect(tselfRRRRRRRR R!R"R$R%R&((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt__init__s            cK@std|jd|jd|jd|jd|jd|jd|jd|jd |j d |j d |j d |j }|j |t||S( NRRRRRRRR R!R"R$R&(tdictRRRRRRRR R!R"R$R&tupdatettype(R'tkwtparams((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytnews $        cC@sx|dkr*|dk r|n|j}nt|tr=|St|oLd}||d|}tjd|||S(s4 Backwards-compatibility for the old retries format.Rs!Converted retries value: %r -> %rN(tNonetDEFAULTt isinstanceRtbooltlogtdebug(tclstretriesRtdefaultt new_retries((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytfrom_ints cC@s\tttdt|j}|dkr7dS|jd|d}t|j|S(sJ Formula for computing the current backoff :rtype: float cS@s |jdkS(N(RR/(tx((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyttiii(tlentlistRtreversedR$R tmint BACKOFF_MAX(R'tconsecutive_errors_lent backoff_value((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytget_backoff_times  cC@stjd|r!t|}nPtjj|}|dkrRtd|ntj |}|tj}|dkrd}n|S(Ns^\s*[0-9]+\s*$sInvalid Retry-After header: %si( tretmatchtinttemailtutilst parsedateR/Rttimetmktime(R't retry_aftertsecondstretry_date_tuplet retry_date((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytparse_retry_afters   cC@s,|jd}|dkrdS|j|S(s* Get the value of Retry-After in seconds. s Retry-AfterN(t getheaderR/RQ(R'tresponseRM((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytget_retry_afters cC@s*|j|}|r&tj|tStS(N(RTRKtsleeptTrueR(R'RSRM((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytsleep_for_retrys  cC@s-|j}|dkrdStj|dS(Ni(RDRKRU(R'tbackoff((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_sleep_backoffs  cC@s0|r"|j|}|r"dSn|jdS(sC Sleep between retry attempts. This method will respect a server's ``Retry-After`` response header and sleep the duration of the time requested. If that is not present, it will use an exponential backoff. By default, the backoff factor is 0 and this method will return immediately. N(RWRY(R'RStslept((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyRU s cC@s t|tS(s{ Errors when we're fairly sure that the server did not receive the request, so it should be safe to retry. (R1R(R'terr((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_is_connection_errorscC@st|ttfS(s Errors that occur after the request has been started, so we should assume that the server began processing it. (R1RR(R'R[((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_is_read_error!scC@s&|jr"|j|jkr"tStS(s| Checks if a given HTTP method should be retried upon, depending if it is included on the method whitelist. (RtupperRRV(R'R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt_is_method_retryable'scC@sT|j|stS|jr/||jkr/tS|joS|joS|oS||jkS(sx Is this method/status code retryable? (Based on whitelists and control variables such as the number of total retries to allow, whether to respect the Retry-After header, whether this header is present, and whether the returned status code is on the list of status codes to be retried upon on the presence of the aforementioned header) (R_RRRVRR%tRETRY_AFTER_STATUS_CODES(R'R t status_codethas_retry_after((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pytis_retry0s cC@sS|j|j|j|j|jf}ttd|}|sCtSt |dkS(s Are we out of retries? iN( RRRRRR>tfilterR/RR@(R't retry_counts((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt is_exhausted@s $c C@s|jtkr3|r3tjt|||n|j}|d k rU|d8}n|j}|j} |j} |j } d} d } d }|r|j |r|tkrtjt|||q|d k r|d8}qn |rQ|j |rQ| tks|j | r5tjt|||q| d k r| d8} qn|r|j r| d k r|| d8} nd} |j }|j } nUtj} |r|j r| d k r| d8} ntjjd|j } |j } n|jt|||| |f}|jd|d|d| d| d | d |}|jrnt|||pet| ntjd |||S( s Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object. itunknownstoo many redirectsRaRRRRRR$s$Incremented Retry for (url='%s'): %rN(RRR treraiseR+R/RRRRR\R]R_tget_redirect_locationRt GENERIC_ERRORtSPECIFIC_ERRORtformatR$R R.RfRR3R4(R'R R RSR t_poolt _stacktraceRRRRt status_counttcauseRRR$t new_retry((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt incrementIsX                     "   !cC@sdjdt|d|S(Ns|{cls.__name__}(total={self.total}, connect={self.connect}, read={self.read}, redirect={self.redirect}, status={self.status})R5R'(RlR+(R'((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyt__repr__s N(t__name__t __module__t__doc__t frozensettDEFAULT_METHOD_WHITELISTt"DEFAULT_REDIRECT_HEADERS_BLACKLISTR`RAR/RVR(R.t classmethodR9RDRQRTRWRYRUR\R]R_RRcRfRrRs(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyRs8x            Ji(t __future__RRKtloggingt collectionsRt itertoolsRRHREt exceptionsRRRRRRtpackagesR t getLoggerRtR3R tobjectRR0(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/retry.pyts    .PKZ31uu.site-packages/pip/_vendor/urllib3/util/url.pyonu[ abc@@sddlmZddlmZddlZddlmZdddd d d d gZdZ ej dZ ddl m Z dedefdYZdZdZdZdS(i(tabsolute_import(t namedtupleNi(tLocationParseErrortschemetauththosttporttpathtquerytfragmentthttpthttpss[- ](tquotetUrlcB@sweZdZdZddddddddZedZedZedZ edZ dZ RS( s Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. c C@s|r#|jd r#d|}n|r8|j}n|rY|tkrY|j}ntt|j||||||||S(Nt/(t startswithtlowertNORMALIZABLE_SCHEMEStsuperR t__new__(tclsRRRRRRR ((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyRs !cC@s|jS(s@For backwards-compatibility with urlparse. We're nice like that.(R(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pythostname$scC@s6|jp d}|jdk r2|d|j7}n|S(s)Absolute path including the query string.Rt?N(RRtNone(Rturi((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt request_uri)scC@s$|jrd|j|jfS|jS(s(Network location including host and ports%s:%d(RR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pytnetloc3s c C@s|\}}}}}}}d}|dk r>||d7}n|dk r[||d7}n|dk rt||7}n|dk r|dt|7}n|dk r||7}n|dk r|d|7}n|dk r|d|7}n|S(s Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:password@host.com:80/path?query#fragment' ts://t@t:Rt#N(Rtstr( RRRRRRRR turl((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyR!:s"         cC@s|jS(N(R!(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt__str__bs(N( t__name__t __module__t__doc__t __slots__RRtpropertyRRRR!R"(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyR s  (cC@sd}d}xV|D]N}|j|}|dkr:qn|dksR||kr|}|}qqW|dks}|dkr|ddfS|| ||d|fS(s Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. iRiN(Rtfind(tstdelimstmin_idxt min_delimtdtidx((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt split_firstfs    c C@s;|s tStjd|}d }d }d }d }d }d }d }d|krs|jdd\}}nt|dddg\}}} | r| |}nd|kr|jdd\}}n|r |dd kr |jd d\}}|d 7}nd |kr|jd d\} }|s<| }n|r|js]t|nyt |}Wqt k rt|qXqd }n| r|r|}n|st|||||||Sd|kr|jdd\}}nd|kr|jdd\}}nt|||||||S( s: Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. Partly backwards-compatible with :mod:`urlparse`. Example:: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) cS@st|jS(N(R tgroup(tmatch((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pytRs://iRRRRit[t]RN( R t!_contains_disallowed_url_pchar_retsubRtsplitR/trsplittisdigitRtintt ValueError( R!RRRRRR Rtpath_tdelimt_host((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt parse_urlsR !            cC@s(t|}|jpd|j|jfS(s4 Deprecated. Use :func:`parse_url` instead. R (R?RRR(R!tp((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pytget_hosts (R R N(t __future__Rt collectionsRtret exceptionsRt url_attrsRRtcompileR5tpackages.six.moves.urllib.parseR R R/R?RA(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyts U ! aPKZ \%%2site-packages/pip/_vendor/urllib3/util/timeout.pycnu[ abc@@stddlmZddlmZddlZddlmZeZe edejZ defdYZ dS( i(tabsolute_import(t_GLOBAL_DEFAULT_TIMEOUTNi(tTimeoutStateErrort monotonictTimeoutcB@seZdZeZd eedZdZe dZ e dZ dZ dZ dZedZed ZRS( s Timeout configuration. Timeouts can be defined as a default for a pool:: timeout = Timeout(connect=2.0, read=7.0) http = PoolManager(timeout=timeout) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``:: no_timeout = Timeout(connect=None, read=None) response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: integer, float, or None :param connect: The maximum amount of time to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: The maximum amount of time to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout. :type read: integer, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. cC@sL|j|d|_|j|d|_|j|d|_d|_dS(Ntconnecttreadttotal(t_validate_timeoutt_connectt_readRtNonet_start_connect(tselfRRR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt__init__]scC@s&dt|j|j|j|jfS(Ns!%s(connect=%r, read=%r, total=%r)(ttypet__name__R R R(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt__str__cscC@s|tkr|jS|dks.||jkr2|St|trPtdnyt|Wn-ttfk rtd||fnXy)|dkrtd||fnWn'tk rtd||fnX|S(s Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it is a numeric value less than or equal to zero, or the type is not an integer, float, or None. sDTimeout cannot be a boolean value. It must be an int, float or None.s>Timeout value %s was %s, but it must be an int, float or None.isdAttempted to set %s timeout to %s, but the timeout cannot be set to a value less than or equal to 0.N(t_DefaulttDEFAULT_TIMEOUTR t isinstancetboolt ValueErrortfloatt TypeError(tclstvaluetname((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyRgs&   cC@std|d|S(s Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None :return: Timeout object :rtype: :class:`Timeout` RR(R(Rttimeout((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt from_floatscC@s"td|jd|jd|jS(s Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` RRR(RR R R(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pytclones cC@s1|jdk rtdnt|_|jS(s Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. s'Timeout timer has already been started.N(R R Rt current_time(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt start_connects cC@s,|jdkrtdnt|jS(s Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. s:Can't get connect duration for timer that has not started.N(R R RR(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pytget_connect_durationscC@sQ|jdkr|jS|jdks7|j|jkr>|jSt|j|jS(s" Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None N(RR R Rtmin(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pytconnect_timeouts !cC@s|jdk r~|j|jk r~|jdk r~|j|jk r~|jdkrX|jStdt|j|j|jS|jdk r|j|jk rtd|j|jS|jSdS(s Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. iN(RR RR R tmaxR"R!(R ((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyt read_timeouts !N(Rt __module__t__doc__RRR RRRt classmethodRRRR R!tpropertyR#R%(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyRsF %  ( t __future__RtsocketRttimet exceptionsRtobjectRtgetattrRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/timeout.pyts   PKZt3site-packages/pip/_vendor/urllib3/util/__init__.pyonu[ abc@@sddlmZddlmZddlmZddlmZddlm Z m Z m Z m Z m Z mZmZmZddlmZmZddlmZdd lmZmZmZmZdd lmZmZd ZdS(!i(tabsolute_importi(tis_connection_dropped(t make_headers(t is_fp_closed(t SSLContexttHAS_SNIt IS_PYOPENSSLtIS_SECURETRANSPORTtassert_fingerprinttresolve_cert_reqstresolve_ssl_versiontssl_wrap_socket(t current_timetTimeout(tRetry(tget_hostt parse_urlt split_firsttUrl(t wait_for_readtwait_for_writeRRRRRR RRR RRRRRR R RR RRN(RRRRRR RRR RRRRRR R RR RR(t __future__Rt connectionRtrequestRtresponseRtssl_RRRRRR R R ttimeoutR R tretryRturlRRRRtwaitRRt__all__(((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/__init__.pyts8: "PKZ{Gm)m)/site-packages/pip/_vendor/urllib3/util/ssl_.pyonu[ abc@@sGddlmZddlZddlZddlZddlmZmZddlm Z m Z m Z ddl m Z mZmZdZeZeZeZie d6e d6e d 6Zd Zeed eZy<ddlZdd lmZmZmZdd lmZWnek rnXy ddlmZm Z m!Z!Wn#ek red+\ZZ dZ!nXdj"dddddddddddddd d!d"gZ#ydd#lmZWn3ek rddl$Z$d$e%fd%YZnXd&Z&d'Z'd(Z(ddddd)Z)dddddddddd* Z*dS(,i(tabsolute_importN(thexlifyt unhexlify(tmd5tsha1tsha256i(tSSLErrortInsecurePlatformWarningtSNIMissingWarningi i(i@cC@s`tt|t|}x7tt|t|D]\}}|||AO}q8W|dkS(s Compare two digests of equal length in constant time. The digests must be of type str/bytes. Returns True if the digests match, and False otherwise. i(tabstlentzipt bytearray(tatbtresulttltr((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyt_const_compare_digest_backports(tcompare_digest(t wrap_sockett CERT_NONEtPROTOCOL_SSLv23(tHAS_SNI(t OP_NO_SSLv2t OP_NO_SSLv3tOP_NO_COMPRESSIONiiit:sTLS13-AES-256-GCM-SHA384sTLS13-CHACHA20-POLY1305-SHA256sTLS13-AES-128-GCM-SHA256s ECDH+AESGCMs ECDH+CHACHA20s DH+AESGCMs DH+CHACHA20s ECDH+AES256s DH+AES256s ECDH+AES128sDH+AESs RSA+AESGCMsRSA+AESs!aNULLs!eNULLs!MD5(t SSLContextRcB@soeZdejko d knp1d ejkZdZdZd d dZdZ d e dZ RS( iiicC@sO||_t|_tj|_d|_d|_d|_ d|_ d|_ dS(Ni( tprotocoltFalsetcheck_hostnametsslRt verify_modetNonetca_certstoptionstcertfiletkeyfiletciphers(tselftprotocol_version((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyt__init__cs       cC@s||_||_dS(N(R%R&(R(R%R&((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytload_cert_chainns cC@s(||_|dk r$tdndS(Ns-CA directories not supported in older Pythons(R#R"R(R(tcafiletcapath((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytload_verify_locationsrs  cC@s%|jstdn||_dS(NsYour version of Python does not support setting a custom cipher suite. Please upgrade to Python 2.7, 3.2, or later if you need this functionality.(tsupports_set_cipherst TypeErrorR'(R(t cipher_suite((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyt set_ciphersxs  cC@stjdti|jd6|jd6|jd6|jd6|jd6|d6}|jrnt |d|j |St ||SdS( Ns2A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warningsR&R%R#t cert_reqst ssl_versiont server_sideR'( twarningstwarnRR&R%R#R!RR/RR'(R(tsockettserver_hostnameR5tkwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyRs       (ii(i(iiN( t__name__t __module__tsyst version_infoR/R*R+R"R.R2RR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyR_s  cC@s|jddj}t|}tj|}|sQtdj|nt|j}||j }t ||stdj|t |ndS(s Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. Rts"Fingerprint of invalid length: {0}s6Fingerprints did not match. Expected "{0}", got "{1}".N( treplacetlowerR t HASHFUNC_MAPtgetRtformatRtencodetdigestt_const_compare_digestR(tcertt fingerprintt digest_lengththashfunctfingerprint_bytest cert_digest((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytassert_fingerprints   cC@s[|dkrtSt|trWtt|d}|dkrSttd|}n|S|S(s Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_NONE`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbrevation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. tCERT_N(R"Rt isinstancetstrtgetattrR (t candidatetres((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytresolve_cert_reqss  cC@s[|dkrtSt|trWtt|d}|dkrSttd|}n|S|S(s like resolve_cert_reqs t PROTOCOL_N(R"RRPRQRRR (RSRT((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytresolve_ssl_versions  cC@st|ptj}|dkr*tjn|}|dkrcd}|tO}|tO}|tO}n|j|O_t |dt r|j |pt n||_ t |dddk rt|_n|S(sAll arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that ``ssl.create_default_context`` does on Python 3.4+. It: - Disables SSLv2, SSLv3, and compression - Sets a restricted set of server ciphers If you wish to enable SSLv3, you can do:: from urllib3.util import ssl_ context = ssl_.create_urllib3_context() context.options &= ~ssl_.OP_NO_SSLv3 You can do the same to enable compression (substituting ``COMPRESSION`` for ``SSLv3`` in the last line above). :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. :param ciphers: Which cipher suites to allow the server to select. :returns: Constructed SSLContext object with specified options :rtype: SSLContext iR/RN(RR RR"t CERT_REQUIREDRRRR$RRtTrueR2tDEFAULT_CIPHERSR!RR(R4R3R$R'tcontext((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytcreate_urllib3_contexts#      c C@s|} | dkr*t||d|} n|s6| ry| j|| Wqtk rk} t| qtk r} | jtjkrt| nqXn(|dkrt| dr| j n|r| j ||nt r| j |d|St jdt| j |S(s All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. This is not supported on Python 2.6 as the ssl module does not support it. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). R'tload_default_certsR9sAn HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. This may cause the server to present an incorrect TLS certificate, which can cause validation failures. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warningsN(R"R\R.tIOErrorRtOSErrorterrnotENOENTthasattrR]R+RRR6R7R( tsockR&R%R3R#R9R4R't ssl_contextt ca_cert_dirR[te((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytssl_wrap_sockets.      (ii(+t __future__RR`R6thmactbinasciiRRthashlibRRRt exceptionsRRRR"RRRt IS_PYOPENSSLtIS_SECURETRANSPORTRBRRRRGR RRRt ImportErrorRRRtjoinRZR=tobjectRNRURWR\Rg(((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytsv               :   >  PKZQQ4site-packages/pip/_vendor/urllib3/util/selectors.pyonu[ abc@sddlZddlZddlZddlZddlZddlZddlmZmZy ej Z Wn e e fk rejZ nXd#Z d$Z eZeZdadefdYZdZejd%krd Zn d Zed d dddgZdefdYZdefdYZeedradefdYZneedrdefdYZneedrdefdYZneedrdefd YZ needse!Znd!Z"d"Z#dS(&iN(t namedtupletMappingiit SelectorErrorcBs#eZdZdZdZRS(cCs tt|j||_dS(N(tsuperRt__init__terrno(tselfterrcode((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCsdj|jS(Ns(tformatR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt__repr__"scCs |jS(N(R (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt__str__%s(t__name__t __module__RR R (((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRs  cCst|tr|}nHyt|j}Wn/tttfk r_tdj|nX|dkrtdj|n|S(sl Return a file descriptor from a file object. If given an integer will simply return that integer back. sInvalid file object: {0!r}isInvalid file descriptor: {0}(t isinstancetinttfilenotAttributeErrort TypeErrort ValueErrorR(tfileobjtfd((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt_fileobj_to_fd)s  iicOsdy|||SWnLtttjfk r_}d}t|drP|j}nt|nXdS(s This is the short-circuit version of the below logic because in Python 3.5+ all system calls automatically restart and recalculate their timeouts. RN(tOSErrortIOErrortselectterrortNonethasattrRR(tfunct_targstkwargsteR((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt_syscall_wrapper;s c Os|jdd}|dkr-d}t}n.t|}|dkrNd}n t|}t|}|rd|krtdnt}x5|tkry|||}Wqtt t j fk r}d}t |dr|j }nt |dr|jd}n|t jkp8t t do8|t jk} | r|dk rt} | |krwtdt jn|rd|kr|| |d(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyR4s    t BaseSelectorcBseZdZdZdZd dZdZd dZd dZ dZ dZ d Z d Z d Zd ZRS(s/ Abstract Selector class A selector supports registering file objects to be monitored for specific I/O events. A file object is a file descriptor or any object with a `fileno()` method. An arbitrary object can be attached to the file object which can be used for example to store context info, a callback, etc. A selector can use various implementations (select(), poll(), epoll(), and kqueue()) depending on the platform. The 'DefaultSelector' class uses the most efficient implementation for the current platform. cCsi|_t||_dS(N(R8R4t_map(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRs cCsYyt|SWnDtk rTx-|jjD]}|j|kr.|jSq.WnXdS(sa Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. Used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping N(RRR8tvaluesRR(RRtkey((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyR:s  cCs| s|ttB@r.tdj|nt||j|||}|j|jkr|tdj||jn||j|j<|S(s8 Register a file object for a set of events to monitor. sInvalid events: {0!r}s${0!r} (FD {1}) is already registered( t EVENT_READt EVENT_WRITERRR1R:RR8R;(RRR2R3RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pytregisters cCsy|jj|j|}Wntk rGtdj|ntjk r}|jtjkrqqxV|jj D],}|j |kr|jj|j PqqWtdj|nX|S(s0 Unregister a file object from being monitored. s{0!r} is not registered( R8tpopR:R;RtsocketRRtEBADFRBRR(RRRCR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt unregisters cCsy|j|j|}Wn&tk rBtdj|nX||jkrw|j||j|||}n4||jkr|jd|}||j|j |jj|jn|t@r^|jj|jn|S(N( RRWRFRDRYtaddRRERZ(RRR2R3RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRF&s   cCsBtt|j|}|jj|j|jj|j|S(N(RRWRJRYtdiscardRRZ(RRRC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJ.scCstj||g|S(s? Wrapper for select.select because timeout is a positional arg (R(RtrtwR"((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt_select4sc Cst|j r$t|j r$gS|dkr6dn t|d}g}t|jt|j|j|\}}}t|}t|}x|||BD]p}d}||kr|t O}n||kr|t O}n|j |}|r|j |||j @fqqW|S(Ngi(R7RYRZRtmaxR!R_tTrueRXRDRERTtappendR2( RR"treadyR]R^RRR2RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyR8s$ !      !N( R R R?RRRFRJR_R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRWs     tpollt PollSelectorcBsDeZdZdZddZdZddZddZRS(s Poll-based selector cCs&tt|jtj|_dS(N(RReRRRdt_poll(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRSscCsrtt|j|||}d}|t@r>|tjO}n|t@rX|tjO}n|jj|j ||S(Ni( RReRFRDRtPOLLINREtPOLLOUTRfR(RRR2R3RCt event_mask((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRFWs  cCs/tt|j|}|jj|j|S(N(RReRJRfR(RRRC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJascCsM|dk r7|dkr!d}q7tj|d}n|jj|}|S(sj Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. ig@@N(RtmathtceilRfRd(RR"R.((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt _wrap_pollfs    cCsg}t|jtd|}x|D]z\}}d}|tj@rR|tO}n|tj@rm|tO}n|j|}|r%|j |||j @fq%q%W|S(NR"i( R!RlRaRRgRERhRDRTRbR2(RR"Rct fd_eventsRRiR2RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRts  !N( R R R?RRRFRJRlR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyReQs    tepollt EpollSelectorcBsJeZdZdZdZddZdZddZdZ RS(s Epoll-based selector cCs&tt|jtj|_dS(N(RRoRRRnt_epoll(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCs |jjS(N(RpR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCsxtt|j|||}d}|t@r>|tjO}n|t@rX|tjO}nt|j jt |j ||S(Ni( RRoRFRDRtEPOLLINREtEPOLLOUTR!RpR%R(RRR2R3RCt events_mask((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRFs  cCsMtt|j|}yt|jjt|jWntk rHnX|S(N(RRoRJR!RpR%RR(RRRC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJs  c Cs|dk rG|dkr!d}ntj|dd}t|}nd}tt|jd}g}t|jj t d|d|}x|D]z\}}d}|t j @r|t O}n|t j@r|tO}n|j|}|r|j|||j@fqqW|S( Nigg@@gMbP?giR"t maxevents(RRjRkR&R`R7R8R!RpRdRaRRqRERrRDRTRbR2( RR"t max_eventsRcRmRRiR2RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRs*      !cCs$|jjtt|jdS(N(RpRORRo(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyROs N( R R R?RRRRFRJRRO(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRos   !tkqueuetKqueueSelectorcBsJeZdZdZdZddZdZddZdZ RS(s Kqueue / Kevent-based selector cCs&tt|jtj|_dS(N(RRwRRRvt_kqueue(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCs |jjS(N(RxR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCstt|j|||}|t@rhtj|jtjtj}t |j j t |gddn|t @rtj|jtjtj}t |j j t |gddn|S(Ni(RRwRFRDRtkeventRtKQ_FILTER_READt KQ_EV_ADDR!RxtcontrolR%REtKQ_FILTER_WRITE(RRR2R3RCRy((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRFs   "   "cCstt|j|}|jt@r}tj|jtjtj }y#t |j j t |gddWq}tk ryq}Xn|jt@rtj|jtjtj }y#t |j j t |gddWqtk rqXn|S(Ni(RRwRJR2RDRRyRRzt KQ_EV_DELETER!RxR|R%RRER}(RRRCRy((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJs$   #    # c Cs9|dk rt|d}nt|jd}i}t|jjtd||}x|D]}|j}|j }d}|t j kr|t O}n|t j kr|tO}n|j|} | r\| j|kr| || j@f|| jsF           814BR  PKZ 2site-packages/pip/_vendor/urllib3/util/request.pycnu[ abc@@sddlmZddlmZddlmZmZddlmZdZ e Z d d d d d d dZ dZd Zd S( i(tabsolute_import(t b64encodei(tbt integer_types(tUnrewindableBodyErrors gzip,deflatecC@si}|rRt|trn't|tr?dj|}nt}||d>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} t,saccept-encodings user-agents keep-alivet connectionsBasic sutf-8t authorizationsproxy-authorizationsno-caches cache-control(t isinstancetstrtlisttjointACCEPT_ENCODINGRRtdecode(t keep_alivetaccept_encodingt user_agentt basic_authtproxy_basic_autht disable_cachetheaders((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyt make_headers s*$   ## cC@sk|dk rt||nKt|dddk rgy|j}Wqgttfk rct}qgXn|S(s If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. ttellN(tNonet rewind_bodytgetattrRtIOErrortOSErrort _FAILEDTELL(tbodytpos((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pytset_file_positionMs  cC@st|dd}|dk rdt|trdy||Wqttfk r`tdqXn1|tkrtdntdt |dS(s Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. tseeksAAn error occurred when rewinding request body for redirect/retry.sRUnable to record file position for rewinding request body during a redirect/retry.s4body_pos must be of type integer, instead it was %s.N( RRRRRRRRt ValueErrorttype(Rtbody_post body_seek((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyR_s  N(t __future__Rtbase64Rt packages.sixRRt exceptionsRR tobjectRRRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyts  A PKZ[322/site-packages/pip/_vendor/urllib3/util/wait.pycnu[ abc@sJddlmZmZmZmZddZddZddZdS(i(t HAS_SELECTtDefaultSelectort EVENT_READt EVENT_WRITEcCststdnt|tsNt|dr?|g}qNt|}nt]}x|D]}|j||qaWg|j|D]!}|d|@r|dj^qSWdQXdS(s Waits for IO events to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be interacted with immediately. s!Platform does not have a selectortfilenoiiN( Rt ValueErrort isinstancetlistthasattrRtregistertselecttfileobj(tsocksteventsttimeouttselectortsocktkey((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyt_wait_for_io_events s   cCst|t|S(s Waits for reading to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be read from immediately. (RR(R R((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyt wait_for_readscCst|t|S(s Waits for writing to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be written to immediately. (RR(R R((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pytwait_for_write$sN( t selectorsRRRRtNoneRRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyts"  PKZ};=RR3site-packages/pip/_vendor/urllib3/util/selectors.pynu[# Backport of selectors.py from Python 3.5+ to support Python < 3.4 # Also has the behavior specified in PEP 475 which is to retry syscalls # in the case of an EINTR error. This module is required because selectors34 # does not follow this behavior and instead returns that no dile descriptor # events have occurred rather than retry the syscall. The decision to drop # support for select.devpoll is made to maintain 100% test coverage. import errno import math import select import socket import sys import time from collections import namedtuple, Mapping try: monotonic = time.monotonic except (AttributeError, ImportError): # Python 3.3< monotonic = time.time EVENT_READ = (1 << 0) EVENT_WRITE = (1 << 1) HAS_SELECT = True # Variable that shows whether the platform has a selector. _SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. _DEFAULT_SELECTOR = None class SelectorError(Exception): def __init__(self, errcode): super(SelectorError, self).__init__() self.errno = errcode def __repr__(self): return "".format(self.errno) def __str__(self): return self.__repr__() def _fileobj_to_fd(fileobj): """ Return a file descriptor from a file object. If given an integer will simply return that integer back. """ if isinstance(fileobj, int): fd = fileobj else: try: fd = int(fileobj.fileno()) except (AttributeError, TypeError, ValueError): raise ValueError("Invalid file object: {0!r}".format(fileobj)) if fd < 0: raise ValueError("Invalid file descriptor: {0}".format(fd)) return fd # Determine which function to use to wrap system calls because Python 3.5+ # already handles the case when system calls are interrupted. if sys.version_info >= (3, 5): def _syscall_wrapper(func, _, *args, **kwargs): """ This is the short-circuit version of the below logic because in Python 3.5+ all system calls automatically restart and recalculate their timeouts. """ try: return func(*args, **kwargs) except (OSError, IOError, select.error) as e: errcode = None if hasattr(e, "errno"): errcode = e.errno raise SelectorError(errcode) else: def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): """ Wrapper function for syscalls that could fail due to EINTR. All functions should be retried if there is time left in the timeout in accordance with PEP 475. """ timeout = kwargs.get("timeout", None) if timeout is None: expires = None recalc_timeout = False else: timeout = float(timeout) if timeout < 0.0: # Timeout less than 0 treated as no timeout. expires = None else: expires = monotonic() + timeout args = list(args) if recalc_timeout and "timeout" not in kwargs: raise ValueError( "Timeout must be in args or kwargs to be recalculated") result = _SYSCALL_SENTINEL while result is _SYSCALL_SENTINEL: try: result = func(*args, **kwargs) # OSError is thrown by select.select # IOError is thrown by select.epoll.poll # select.error is thrown by select.poll.poll # Aren't we thankful for Python 3.x rework for exceptions? except (OSError, IOError, select.error) as e: # select.error wasn't a subclass of OSError in the past. errcode = None if hasattr(e, "errno"): errcode = e.errno elif hasattr(e, "args"): errcode = e.args[0] # Also test for the Windows equivalent of EINTR. is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and errcode == errno.WSAEINTR)) if is_interrupt: if expires is not None: current_time = monotonic() if current_time > expires: raise OSError(errno=errno.ETIMEDOUT) if recalc_timeout: if "timeout" in kwargs: kwargs["timeout"] = expires - current_time continue if errcode: raise SelectorError(errcode) else: raise return result SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) class _SelectorMapping(Mapping): """ Mapping of file objects to selector keys """ def __init__(self, selector): self._selector = selector def __len__(self): return len(self._selector._fd_to_key) def __getitem__(self, fileobj): try: fd = self._selector._fileobj_lookup(fileobj) return self._selector._fd_to_key[fd] except KeyError: raise KeyError("{0!r} is not registered.".format(fileobj)) def __iter__(self): return iter(self._selector._fd_to_key) class BaseSelector(object): """ Abstract Selector class A selector supports registering file objects to be monitored for specific I/O events. A file object is a file descriptor or any object with a `fileno()` method. An arbitrary object can be attached to the file object which can be used for example to store context info, a callback, etc. A selector can use various implementations (select(), poll(), epoll(), and kqueue()) depending on the platform. The 'DefaultSelector' class uses the most efficient implementation for the current platform. """ def __init__(self): # Maps file descriptors to keys. self._fd_to_key = {} # Read-only mapping returned by get_map() self._map = _SelectorMapping(self) def _fileobj_lookup(self, fileobj): """ Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. Used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping """ try: return _fileobj_to_fd(fileobj) except ValueError: # Search through all our mapped keys. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise def register(self, fileobj, events, data=None): """ Register a file object for a set of events to monitor. """ if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): raise ValueError("Invalid events: {0!r}".format(events)) key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) if key.fd in self._fd_to_key: raise KeyError("{0!r} (FD {1}) is already registered" .format(fileobj, key.fd)) self._fd_to_key[key.fd] = key return key def unregister(self, fileobj): """ Unregister a file object from being monitored. """ try: key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) # Getting the fileno of a closed socket on Windows errors with EBADF. except socket.error as e: # Platform-specific: Windows. if e.errno != errno.EBADF: raise else: for key in self._fd_to_key.values(): if key.fileobj is fileobj: self._fd_to_key.pop(key.fd) break else: raise KeyError("{0!r} is not registered".format(fileobj)) return key def modify(self, fileobj, events, data=None): """ Change a registered file object monitored events and data. """ # NOTE: Some subclasses optimize this operation even further. try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) if events != key.events: self.unregister(fileobj) key = self.register(fileobj, events, data) elif data != key.data: # Use a shortcut to update the data. key = key._replace(data=data) self._fd_to_key[key.fd] = key return key def select(self, timeout=None): """ Perform the actual selection until some monitored file objects are ready or the timeout expires. """ raise NotImplementedError() def close(self): """ Close the selector. This must be called to ensure that all underlying resources are freed. """ self._fd_to_key.clear() self._map = None def get_key(self, fileobj): """ Return the key associated with a registered file object. """ mapping = self.get_map() if mapping is None: raise RuntimeError("Selector is closed") try: return mapping[fileobj] except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) def get_map(self): """ Return a mapping of file objects to selector keys """ return self._map def _key_from_fd(self, fd): """ Return the key associated to a given file descriptor Return None if it is not found. """ try: return self._fd_to_key[fd] except KeyError: return None def __enter__(self): return self def __exit__(self, *args): self.close() # Almost all platforms have select.select() if hasattr(select, "select"): class SelectSelector(BaseSelector): """ Select-based selector. """ def __init__(self): super(SelectSelector, self).__init__() self._readers = set() self._writers = set() def register(self, fileobj, events, data=None): key = super(SelectSelector, self).register(fileobj, events, data) if events & EVENT_READ: self._readers.add(key.fd) if events & EVENT_WRITE: self._writers.add(key.fd) return key def unregister(self, fileobj): key = super(SelectSelector, self).unregister(fileobj) self._readers.discard(key.fd) self._writers.discard(key.fd) return key def _select(self, r, w, timeout=None): """ Wrapper for select.select because timeout is a positional arg """ return select.select(r, w, [], timeout) def select(self, timeout=None): # Selecting on empty lists on Windows errors out. if not len(self._readers) and not len(self._writers): return [] timeout = None if timeout is None else max(timeout, 0.0) ready = [] r, w, _ = _syscall_wrapper(self._select, True, self._readers, self._writers, timeout) r = set(r) w = set(w) for fd in r | w: events = 0 if fd in r: events |= EVENT_READ if fd in w: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready if hasattr(select, "poll"): class PollSelector(BaseSelector): """ Poll-based selector """ def __init__(self): super(PollSelector, self).__init__() self._poll = select.poll() def register(self, fileobj, events, data=None): key = super(PollSelector, self).register(fileobj, events, data) event_mask = 0 if events & EVENT_READ: event_mask |= select.POLLIN if events & EVENT_WRITE: event_mask |= select.POLLOUT self._poll.register(key.fd, event_mask) return key def unregister(self, fileobj): key = super(PollSelector, self).unregister(fileobj) self._poll.unregister(key.fd) return key def _wrap_poll(self, timeout=None): """ Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. """ if timeout is not None: if timeout <= 0: timeout = 0 else: # select.poll.poll() has a resolution of 1 millisecond, # round away from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) result = self._poll.poll(timeout) return result def select(self, timeout=None): ready = [] fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.POLLIN: events |= EVENT_WRITE if event_mask & ~select.POLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready if hasattr(select, "epoll"): class EpollSelector(BaseSelector): """ Epoll-based selector """ def __init__(self): super(EpollSelector, self).__init__() self._epoll = select.epoll() def fileno(self): return self._epoll.fileno() def register(self, fileobj, events, data=None): key = super(EpollSelector, self).register(fileobj, events, data) events_mask = 0 if events & EVENT_READ: events_mask |= select.EPOLLIN if events & EVENT_WRITE: events_mask |= select.EPOLLOUT _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) return key def unregister(self, fileobj): key = super(EpollSelector, self).unregister(fileobj) try: _syscall_wrapper(self._epoll.unregister, False, key.fd) except SelectorError: # This can occur when the fd was closed since registry. pass return key def select(self, timeout=None): if timeout is not None: if timeout <= 0: timeout = 0.0 else: # select.epoll.poll() has a resolution of 1 millisecond # but luckily takes seconds so we don't need a wrapper # like PollSelector. Just for better rounding. timeout = math.ceil(timeout * 1e3) * 1e-3 timeout = float(timeout) else: timeout = -1.0 # epoll.poll() must have a float. # We always want at least 1 to ensure that select can be called # with no file descriptors registered. Otherwise will fail. max_events = max(len(self._fd_to_key), 1) ready = [] fd_events = _syscall_wrapper(self._epoll.poll, True, timeout=timeout, maxevents=max_events) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.EPOLLIN: events |= EVENT_WRITE if event_mask & ~select.EPOLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready def close(self): self._epoll.close() super(EpollSelector, self).close() if hasattr(select, "kqueue"): class KqueueSelector(BaseSelector): """ Kqueue / Kevent-based selector """ def __init__(self): super(KqueueSelector, self).__init__() self._kqueue = select.kqueue() def fileno(self): return self._kqueue.fileno() def register(self, fileobj, events, data=None): key = super(KqueueSelector, self).register(fileobj, events, data) if events & EVENT_READ: kevent = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_ADD) _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) if events & EVENT_WRITE: kevent = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_ADD) _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) return key def unregister(self, fileobj): key = super(KqueueSelector, self).unregister(fileobj) if key.events & EVENT_READ: kevent = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_DELETE) try: _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) except SelectorError: pass if key.events & EVENT_WRITE: kevent = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE) try: _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) except SelectorError: pass return key def select(self, timeout=None): if timeout is not None: timeout = max(timeout, 0) max_events = len(self._fd_to_key) * 2 ready_fds = {} kevent_list = _syscall_wrapper(self._kqueue.control, True, None, max_events, timeout) for kevent in kevent_list: fd = kevent.ident event_mask = kevent.filter events = 0 if event_mask == select.KQ_FILTER_READ: events |= EVENT_READ if event_mask == select.KQ_FILTER_WRITE: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: if key.fd not in ready_fds: ready_fds[key.fd] = (key, events & key.events) else: old_events = ready_fds[key.fd][1] ready_fds[key.fd] = (key, (events | old_events) & key.events) return list(ready_fds.values()) def close(self): self._kqueue.close() super(KqueueSelector, self).close() if not hasattr(select, 'select'): # Platform-specific: AppEngine HAS_SELECT = False def _can_allocate(struct): """ Checks that select structs can be allocated by the underlying operating system, not just advertised by the select module. We don't check select() because we'll be hopeful that most platforms that don't have it available will not advertise it. (ie: GAE) """ try: # select.poll() objects won't fail until used. if struct == 'poll': p = select.poll() p.poll(0) # All others will fail on allocation. else: getattr(select, struct)().close() return True except (OSError, AttributeError) as e: return False # Choose the best implementation, roughly: # kqueue == epoll > poll > select. Devpoll not supported. (See above) # select() also can't accept a FD > FD_SETSIZE (usually around 1024) def DefaultSelector(): """ This function serves as a first call for DefaultSelector to detect if the select module is being monkey-patched incorrectly by eventlet, greenlet, and preserve proper behavior. """ global _DEFAULT_SELECTOR if _DEFAULT_SELECTOR is None: if _can_allocate('kqueue'): _DEFAULT_SELECTOR = KqueueSelector elif _can_allocate('epoll'): _DEFAULT_SELECTOR = EpollSelector elif _can_allocate('poll'): _DEFAULT_SELECTOR = PollSelector elif hasattr(select, 'select'): _DEFAULT_SELECTOR = SelectSelector else: # Platform-specific: AppEngine raise ValueError('Platform does not have a selector') return _DEFAULT_SELECTOR() PKZ_=4site-packages/pip/_vendor/urllib3/util/connection.pynu[from __future__ import absolute_import import socket from .wait import wait_for_read from .selectors import HAS_SELECT, SelectorError def is_connection_dropped(conn): # Platform-specific """ Returns True if the connection is dropped and should be closed. :param conn: :class:`httplib.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. """ sock = getattr(conn, 'sock', False) if sock is False: # Platform-specific: AppEngine return False if sock is None: # Connection already closed (such as by httplib). return True if not HAS_SELECT: return False try: return bool(wait_for_read(sock, timeout=0.0)) except SelectorError: return True # This function is copied from socket.py in the Python 2.7 standard # library test suite. Added to its signature is only `socket_options`. # One additional modification is that we avoid binding to IPv6 servers # discovered in DNS if the system doesn't have IPv6 functionality. def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None): """Connect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. """ host, port = address if host.startswith('['): host = host.strip('[]') err = None # Using the value from allowed_gai_family() in the context of getaddrinfo lets # us select whether to work with IPv4 DNS records, IPv6 records, or both. # The original create_connection function always returns all records. family = allowed_gai_family() for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. _set_socket_options(sock, socket_options) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except socket.error as e: err = e if sock is not None: sock.close() sock = None if err is not None: raise err raise socket.error("getaddrinfo returns an empty list") def _set_socket_options(sock, options): if options is None: return for opt in options: sock.setsockopt(*opt) def allowed_gai_family(): """This function is designed to work in the context of getaddrinfo, where family=socket.AF_UNSPEC is the default and will perform a DNS search for both IPv6 and IPv4 records.""" family = socket.AF_INET if HAS_IPV6: family = socket.AF_UNSPEC return family def _has_ipv6(host): """ Returns True if the system can bind an IPv6 address. """ sock = None has_ipv6 = False if socket.has_ipv6: # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To # determine that we must bind to an IPv6 address. # https://github.com/shazow/urllib3/pull/611 # https://bugs.python.org/issue658327 try: sock = socket.socket(socket.AF_INET6) sock.bind((host, 0)) has_ipv6 = True except Exception: pass if sock: sock.close() return has_ipv6 HAS_IPV6 = _has_ipv6('::1') PKZ95site-packages/pip/_vendor/urllib3/util/connection.pyonu[ abc@@sddlmZddlZddlmZddlmZmZdZej dddZ dZ d Z d Zed ZdS( i(tabsolute_importNi(t wait_for_read(t HAS_SELECTt SelectorErrorcC@sot|dt}|tkr"tS|dkr2tSts<tSytt|ddSWntk rjtSXdS(s  Returns True if the connection is dropped and should be closed. :param conn: :class:`httplib.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. tsockttimeoutgN(tgetattrtFalsetNonetTrueRtboolRR(tconnR((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pytis_connection_droppeds    cC@sO|\}}|jdr-|jd}nd}t}xtj|||tjD]}|\} } } } } d}yltj| | | }t|||tjk r|j |n|r|j |n|j | |SWqXtj k r"}|}|dk r#|j d}q#qXXqXW|dk r<|ntj ddS(sdConnect to *address* and return the socket object. Convenience function. Connect to *address* (a 2-tuple ``(host, port)``) and return the socket object. Passing the optional *timeout* parameter will set the timeout on the socket instance before attempting to connect. If no *timeout* is supplied, the global default timeout setting returned by :func:`getdefaulttimeout` is used. If *source_address* is set it must be a tuple of (host, port) for the socket to bind as a source address before making the connection. An host of '' or port 0 tells the OS to use the default. t[s[]s!getaddrinfo returns an empty listN(t startswithtstripRtallowed_gai_familytsockett getaddrinfot SOCK_STREAMt_set_socket_optionst_GLOBAL_DEFAULT_TIMEOUTt settimeouttbindtconnectterrortclose(taddressRtsource_addresstsocket_optionsthosttportterrtfamilytrestaftsocktypetprotot canonnametsaRte((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pytcreate_connection$s2  "      cC@s2|dkrdSx|D]}|j|qWdS(N(Rt setsockopt(Rtoptionstopt((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyRXs  cC@stj}trtj}n|S(sThis function is designed to work in the context of getaddrinfo, where family=socket.AF_UNSPEC is the default and will perform a DNS search for both IPv6 and IPv4 records.(RtAF_INETtHAS_IPV6t AF_UNSPEC(R!((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyR`s  cC@srd}t}tjr[y/tjtj}|j|dft}Wq[tk rWq[Xn|rn|jn|S(s6 Returns True if the system can bind an IPv6 address. iN( RRRthas_ipv6tAF_INET6RR t ExceptionR(RRR0((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyt _has_ipv6ks    s::1(t __future__RRtwaitRt selectorsRRR RRR)RRR3R.(((sG/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/connection.pyts  3  PKZ{Gm)m)/site-packages/pip/_vendor/urllib3/util/ssl_.pycnu[ abc@@sGddlmZddlZddlZddlZddlmZmZddlm Z m Z m Z ddl m Z mZmZdZeZeZeZie d6e d6e d 6Zd Zeed eZy<ddlZdd lmZmZmZdd lmZWnek rnXy ddlmZm Z m!Z!Wn#ek red+\ZZ dZ!nXdj"dddddddddddddd d!d"gZ#ydd#lmZWn3ek rddl$Z$d$e%fd%YZnXd&Z&d'Z'd(Z(ddddd)Z)dddddddddd* Z*dS(,i(tabsolute_importN(thexlifyt unhexlify(tmd5tsha1tsha256i(tSSLErrortInsecurePlatformWarningtSNIMissingWarningi i(i@cC@s`tt|t|}x7tt|t|D]\}}|||AO}q8W|dkS(s Compare two digests of equal length in constant time. The digests must be of type str/bytes. Returns True if the digests match, and False otherwise. i(tabstlentzipt bytearray(tatbtresulttltr((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyt_const_compare_digest_backports(tcompare_digest(t wrap_sockett CERT_NONEtPROTOCOL_SSLv23(tHAS_SNI(t OP_NO_SSLv2t OP_NO_SSLv3tOP_NO_COMPRESSIONiiit:sTLS13-AES-256-GCM-SHA384sTLS13-CHACHA20-POLY1305-SHA256sTLS13-AES-128-GCM-SHA256s ECDH+AESGCMs ECDH+CHACHA20s DH+AESGCMs DH+CHACHA20s ECDH+AES256s DH+AES256s ECDH+AES128sDH+AESs RSA+AESGCMsRSA+AESs!aNULLs!eNULLs!MD5(t SSLContextRcB@soeZdejko d knp1d ejkZdZdZd d dZdZ d e dZ RS( iiicC@sO||_t|_tj|_d|_d|_d|_ d|_ d|_ dS(Ni( tprotocoltFalsetcheck_hostnametsslRt verify_modetNonetca_certstoptionstcertfiletkeyfiletciphers(tselftprotocol_version((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyt__init__cs       cC@s||_||_dS(N(R%R&(R(R%R&((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytload_cert_chainns cC@s(||_|dk r$tdndS(Ns-CA directories not supported in older Pythons(R#R"R(R(tcafiletcapath((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytload_verify_locationsrs  cC@s%|jstdn||_dS(NsYour version of Python does not support setting a custom cipher suite. Please upgrade to Python 2.7, 3.2, or later if you need this functionality.(tsupports_set_cipherst TypeErrorR'(R(t cipher_suite((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyt set_ciphersxs  cC@stjdti|jd6|jd6|jd6|jd6|jd6|d6}|jrnt |d|j |St ||SdS( Ns2A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warningsR&R%R#t cert_reqst ssl_versiont server_sideR'( twarningstwarnRR&R%R#R!RR/RR'(R(tsockettserver_hostnameR5tkwargs((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyRs       (ii(i(iiN( t__name__t __module__tsyst version_infoR/R*R+R"R.R2RR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pyR_s  cC@s|jddj}t|}tj|}|sQtdj|nt|j}||j }t ||stdj|t |ndS(s Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. Rts"Fingerprint of invalid length: {0}s6Fingerprints did not match. Expected "{0}", got "{1}".N( treplacetlowerR t HASHFUNC_MAPtgetRtformatRtencodetdigestt_const_compare_digestR(tcertt fingerprintt digest_lengththashfunctfingerprint_bytest cert_digest((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytassert_fingerprints   cC@s[|dkrtSt|trWtt|d}|dkrSttd|}n|S|S(s Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_NONE`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbrevation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. tCERT_N(R"Rt isinstancetstrtgetattrR (t candidatetres((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytresolve_cert_reqss  cC@s[|dkrtSt|trWtt|d}|dkrSttd|}n|S|S(s like resolve_cert_reqs t PROTOCOL_N(R"RRPRQRRR (RSRT((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytresolve_ssl_versions  cC@st|ptj}|dkr*tjn|}|dkrcd}|tO}|tO}|tO}n|j|O_t |dt r|j |pt n||_ t |dddk rt|_n|S(sAll arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that ``ssl.create_default_context`` does on Python 3.4+. It: - Disables SSLv2, SSLv3, and compression - Sets a restricted set of server ciphers If you wish to enable SSLv3, you can do:: from urllib3.util import ssl_ context = ssl_.create_urllib3_context() context.options &= ~ssl_.OP_NO_SSLv3 You can do the same to enable compression (substituting ``COMPRESSION`` for ``SSLv3`` in the last line above). :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. :param ciphers: Which cipher suites to allow the server to select. :returns: Constructed SSLContext object with specified options :rtype: SSLContext iR/RN(RR RR"t CERT_REQUIREDRRRR$RRtTrueR2tDEFAULT_CIPHERSR!RR(R4R3R$R'tcontext((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytcreate_urllib3_contexts#      c C@s|} | dkr*t||d|} n|s6| ry| j|| Wqtk rk} t| qtk r} | jtjkrt| nqXn(|dkrt| dr| j n|r| j ||nt r| j |d|St jdt| j |S(s All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. This is not supported on Python 2.6 as the ssl module does not support it. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). R'tload_default_certsR9sAn HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. This may cause the server to present an incorrect TLS certificate, which can cause validation failures. You can upgrade to a newer version of Python to solve this. For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warningsN(R"R\R.tIOErrorRtOSErrorterrnotENOENTthasattrR]R+RRR6R7R( tsockR&R%R3R#R9R4R't ssl_contextt ca_cert_dirR[te((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytssl_wrap_sockets.      (ii(+t __future__RR`R6thmactbinasciiRRthashlibRRRt exceptionsRRRR"RRRt IS_PYOPENSSLtIS_SECURETRANSPORTRBRRRRGR RRRt ImportErrorRRRtjoinRZR=tobjectRNRURWR\Rg(((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/ssl_.pytsv               :   >  PKZ83site-packages/pip/_vendor/urllib3/util/response.pycnu[ abc@@sOddlmZddlmZddlmZdZdZdZ dS( i(tabsolute_importi(t http_client(tHeaderParsingErrorcC@svy|jSWntk r!nXy |jSWntk r@nXy|jdkSWntk renXtddS(st Checks whether a given file-like object is closed. :param obj: The file-like object to check. s)Unable to determine whether fp is closed.N(tisclosedtAttributeErrortclosedtfptNonet ValueError(tobj((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyt is_fp_closeds    cC@st|tjs0tdjt|nt|dd}t|dd}d}|rl|}n|sx|rtd|d|ndS(sP Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. s"expected httplib.Message, got {0}.tdefectst get_payloadt unparsed_dataN( t isinstancethttplibt HTTPMessaget TypeErrortformatttypetgetattrRR(theadersR R R ((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pytassert_header_parsing&s   cC@s2|j}t|tr"|dkS|jdkS(s Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param conn: :type conn: :class:`httplib.HTTPResponse` itHEAD(t_methodRtinttupper(tresponsetmethod((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pytis_response_to_headEs  N( t __future__Rtpackages.six.movesRRt exceptionsRR RR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/response.pyts   PKZAayy1site-packages/pip/_vendor/urllib3/util/request.pynu[from __future__ import absolute_import from base64 import b64encode from ..packages.six import b, integer_types from ..exceptions import UnrewindableBodyError ACCEPT_ENCODING = 'gzip,deflate' _FAILEDTELL = object() def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None, disable_cache=None): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING headers['accept-encoding'] = accept_encoding if user_agent: headers['user-agent'] = user_agent if keep_alive: headers['connection'] = 'keep-alive' if basic_auth: headers['authorization'] = 'Basic ' + \ b64encode(b(basic_auth)).decode('utf-8') if proxy_basic_auth: headers['proxy-authorization'] = 'Basic ' + \ b64encode(b(proxy_basic_auth)).decode('utf-8') if disable_cache: headers['cache-control'] = 'no-cache' return headers def set_file_position(body, pos): """ If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. """ if pos is not None: rewind_body(body, pos) elif getattr(body, 'tell', None) is not None: try: pos = body.tell() except (IOError, OSError): # This differentiates from None, allowing us to catch # a failed `tell()` later when trying to rewind the body. pos = _FAILEDTELL return pos def rewind_body(body, body_pos): """ Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. """ body_seek = getattr(body, 'seek', None) if body_seek is not None and isinstance(body_pos, integer_types): try: body_seek(body_pos) except (IOError, OSError): raise UnrewindableBodyError("An error occurred when rewinding request " "body for redirect/retry.") elif body_pos is _FAILEDTELL: raise UnrewindableBodyError("Unable to record file position for rewinding " "request body during a redirect/retry.") else: raise ValueError("body_pos must be of type integer, " "instead it was %s." % type(body_pos)) PKZRu2site-packages/pip/_vendor/urllib3/util/__init__.pynu[from __future__ import absolute_import # For backwards compatibility, provide imports that used to be here. from .connection import is_connection_dropped from .request import make_headers from .response import is_fp_closed from .ssl_ import ( SSLContext, HAS_SNI, IS_PYOPENSSL, IS_SECURETRANSPORT, assert_fingerprint, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, ) from .timeout import ( current_time, Timeout, ) from .retry import Retry from .url import ( get_host, parse_url, split_first, Url, ) from .wait import ( wait_for_read, wait_for_write ) __all__ = ( 'HAS_SNI', 'IS_PYOPENSSL', 'IS_SECURETRANSPORT', 'SSLContext', 'Retry', 'Timeout', 'Url', 'assert_fingerprint', 'current_time', 'is_connection_dropped', 'is_fp_closed', 'get_host', 'parse_url', 'make_headers', 'resolve_cert_reqs', 'resolve_ssl_version', 'split_first', 'ssl_wrap_socket', 'wait_for_read', 'wait_for_write' ) PKZ[322/site-packages/pip/_vendor/urllib3/util/wait.pyonu[ abc@sJddlmZmZmZmZddZddZddZdS(i(t HAS_SELECTtDefaultSelectort EVENT_READt EVENT_WRITEcCststdnt|tsNt|dr?|g}qNt|}nt]}x|D]}|j||qaWg|j|D]!}|d|@r|dj^qSWdQXdS(s Waits for IO events to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be interacted with immediately. s!Platform does not have a selectortfilenoiiN( Rt ValueErrort isinstancetlistthasattrRtregistertselecttfileobj(tsocksteventsttimeouttselectortsocktkey((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyt_wait_for_io_events s   cCst|t|S(s Waits for reading to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be read from immediately. (RR(R R((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyt wait_for_readscCst|t|S(s Waits for writing to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be written to immediately. (RR(R R((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pytwait_for_write$sN( t selectorsRRRRtNoneRRR(((sA/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/wait.pyts"  PKZQQ4site-packages/pip/_vendor/urllib3/util/selectors.pycnu[ abc@sddlZddlZddlZddlZddlZddlZddlmZmZy ej Z Wn e e fk rejZ nXd#Z d$Z eZeZdadefdYZdZejd%krd Zn d Zed d dddgZdefdYZdefdYZeedradefdYZneedrdefdYZneedrdefdYZneedrdefd YZ needse!Znd!Z"d"Z#dS(&iN(t namedtupletMappingiit SelectorErrorcBs#eZdZdZdZRS(cCs tt|j||_dS(N(tsuperRt__init__terrno(tselfterrcode((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCsdj|jS(Ns(tformatR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt__repr__"scCs |jS(N(R (R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt__str__%s(t__name__t __module__RR R (((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRs  cCst|tr|}nHyt|j}Wn/tttfk r_tdj|nX|dkrtdj|n|S(sl Return a file descriptor from a file object. If given an integer will simply return that integer back. sInvalid file object: {0!r}isInvalid file descriptor: {0}(t isinstancetinttfilenotAttributeErrort TypeErrort ValueErrorR(tfileobjtfd((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt_fileobj_to_fd)s  iicOsdy|||SWnLtttjfk r_}d}t|drP|j}nt|nXdS(s This is the short-circuit version of the below logic because in Python 3.5+ all system calls automatically restart and recalculate their timeouts. RN(tOSErrortIOErrortselectterrortNonethasattrRR(tfunct_targstkwargsteR((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt_syscall_wrapper;s c Os|jdd}|dkr-d}t}n.t|}|dkrNd}n t|}t|}|rd|krtdnt}x5|tkry|||}Wqtt t j fk r}d}t |dr|j }nt |dr|jd}n|t jkp8t t do8|t jk} | r|dk rt} | |krwtdt jn|rd|kr|| |d(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyR4s    t BaseSelectorcBseZdZdZdZd dZdZd dZd dZ dZ dZ d Z d Z d Zd ZRS(s/ Abstract Selector class A selector supports registering file objects to be monitored for specific I/O events. A file object is a file descriptor or any object with a `fileno()` method. An arbitrary object can be attached to the file object which can be used for example to store context info, a callback, etc. A selector can use various implementations (select(), poll(), epoll(), and kqueue()) depending on the platform. The 'DefaultSelector' class uses the most efficient implementation for the current platform. cCsi|_t||_dS(N(R8R4t_map(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRs cCsYyt|SWnDtk rTx-|jjD]}|j|kr.|jSq.WnXdS(sa Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. Used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping N(RRR8tvaluesRR(RRtkey((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyR:s  cCs| s|ttB@r.tdj|nt||j|||}|j|jkr|tdj||jn||j|j<|S(s8 Register a file object for a set of events to monitor. sInvalid events: {0!r}s${0!r} (FD {1}) is already registered( t EVENT_READt EVENT_WRITERRR1R:RR8R;(RRR2R3RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pytregisters cCsy|jj|j|}Wntk rGtdj|ntjk r}|jtjkrqqxV|jj D],}|j |kr|jj|j PqqWtdj|nX|S(s0 Unregister a file object from being monitored. s{0!r} is not registered( R8tpopR:R;RtsocketRRtEBADFRBRR(RRRCR ((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt unregisters cCsy|j|j|}Wn&tk rBtdj|nX||jkrw|j||j|||}n4||jkr|jd|}||j|j |jj|jn|t@r^|jj|jn|S(N( RRWRFRDRYtaddRRERZ(RRR2R3RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRF&s   cCsBtt|j|}|jj|j|jj|j|S(N(RRWRJRYtdiscardRRZ(RRRC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJ.scCstj||g|S(s? Wrapper for select.select because timeout is a positional arg (R(RtrtwR"((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt_select4sc Cst|j r$t|j r$gS|dkr6dn t|d}g}t|jt|j|j|\}}}t|}t|}x|||BD]p}d}||kr|t O}n||kr|t O}n|j |}|r|j |||j @fqqW|S(Ngi(R7RYRZRtmaxR!R_tTrueRXRDRERTtappendR2( RR"treadyR]R^RRR2RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyR8s$ !      !N( R R R?RRRFRJR_R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRWs     tpollt PollSelectorcBsDeZdZdZddZdZddZddZRS(s Poll-based selector cCs&tt|jtj|_dS(N(RReRRRdt_poll(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRSscCsrtt|j|||}d}|t@r>|tjO}n|t@rX|tjO}n|jj|j ||S(Ni( RReRFRDRtPOLLINREtPOLLOUTRfR(RRR2R3RCt event_mask((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRFWs  cCs/tt|j|}|jj|j|S(N(RReRJRfR(RRRC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJascCsM|dk r7|dkr!d}q7tj|d}n|jj|}|S(sj Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. ig@@N(RtmathtceilRfRd(RR"R.((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyt _wrap_pollfs    cCsg}t|jtd|}x|D]z\}}d}|tj@rR|tO}n|tj@rm|tO}n|j|}|r%|j |||j @fq%q%W|S(NR"i( R!RlRaRRgRERhRDRTRbR2(RR"Rct fd_eventsRRiR2RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRts  !N( R R R?RRRFRJRlR(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyReQs    tepollt EpollSelectorcBsJeZdZdZdZddZdZddZdZ RS(s Epoll-based selector cCs&tt|jtj|_dS(N(RRoRRRnt_epoll(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCs |jjS(N(RpR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCsxtt|j|||}d}|t@r>|tjO}n|t@rX|tjO}nt|j jt |j ||S(Ni( RRoRFRDRtEPOLLINREtEPOLLOUTR!RpR%R(RRR2R3RCt events_mask((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRFs  cCsMtt|j|}yt|jjt|jWntk rHnX|S(N(RRoRJR!RpR%RR(RRRC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJs  c Cs|dk rG|dkr!d}ntj|dd}t|}nd}tt|jd}g}t|jj t d|d|}x|D]z\}}d}|t j @r|t O}n|t j@r|tO}n|j|}|r|j|||j@fqqW|S( Nigg@@gMbP?giR"t maxevents(RRjRkR&R`R7R8R!RpRdRaRRqRERrRDRTRbR2( RR"t max_eventsRcRmRRiR2RC((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRs*      !cCs$|jjtt|jdS(N(RpRORRo(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyROs N( R R R?RRRRFRJRRO(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRos   !tkqueuetKqueueSelectorcBsJeZdZdZdZddZdZddZdZ RS(s Kqueue / Kevent-based selector cCs&tt|jtj|_dS(N(RRwRRRvt_kqueue(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCs |jjS(N(RxR(R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRscCstt|j|||}|t@rhtj|jtjtj}t |j j t |gddn|t @rtj|jtjtj}t |j j t |gddn|S(Ni(RRwRFRDRtkeventRtKQ_FILTER_READt KQ_EV_ADDR!RxtcontrolR%REtKQ_FILTER_WRITE(RRR2R3RCRy((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRFs   "   "cCstt|j|}|jt@r}tj|jtjtj }y#t |j j t |gddWq}tk ryq}Xn|jt@rtj|jtjtj }y#t |j j t |gddWqtk rqXn|S(Ni(RRwRJR2RDRRyRRzt KQ_EV_DELETER!RxR|R%RRER}(RRRCRy((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/selectors.pyRJs$   #    # c Cs9|dk rt|d}nt|jd}i}t|jjtd||}x|D]}|j}|j }d}|t j kr|t O}n|t j kr|tO}n|j|} | r\| j|kr| || j@f|| jsF           814BR  PKZUR &&1site-packages/pip/_vendor/urllib3/util/timeout.pynu[from __future__ import absolute_import # The default socket timeout, used by httplib to indicate that no timeout was # specified by the user from socket import _GLOBAL_DEFAULT_TIMEOUT import time from ..exceptions import TimeoutStateError # A sentinel value to indicate that no timeout was specified by the user in # urllib3 _Default = object() # Use time.monotonic if available. current_time = getattr(time, "monotonic", time.time) class Timeout(object): """ Timeout configuration. Timeouts can be defined as a default for a pool:: timeout = Timeout(connect=2.0, read=7.0) http = PoolManager(timeout=timeout) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``:: no_timeout = Timeout(connect=None, read=None) response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: integer, float, or None :param connect: The maximum amount of time to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: The maximum amount of time to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py `_. None will set an infinite timeout. :type read: integer, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. """ #: A sentinel object representing the default timeout value DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT def __init__(self, total=None, connect=_Default, read=_Default): self._connect = self._validate_timeout(connect, 'connect') self._read = self._validate_timeout(read, 'read') self.total = self._validate_timeout(total, 'total') self._start_connect = None def __str__(self): return '%s(connect=%r, read=%r, total=%r)' % ( type(self).__name__, self._connect, self._read, self.total) @classmethod def _validate_timeout(cls, value, name): """ Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it is a numeric value less than or equal to zero, or the type is not an integer, float, or None. """ if value is _Default: return cls.DEFAULT_TIMEOUT if value is None or value is cls.DEFAULT_TIMEOUT: return value if isinstance(value, bool): raise ValueError("Timeout cannot be a boolean value. It must " "be an int, float or None.") try: float(value) except (TypeError, ValueError): raise ValueError("Timeout value %s was %s, but it must be an " "int, float or None." % (name, value)) try: if value <= 0: raise ValueError("Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than or equal to 0." % (name, value)) except TypeError: # Python 3 raise ValueError("Timeout value %s was %s, but it must be an " "int, float or None." % (name, value)) return value @classmethod def from_float(cls, timeout): """ Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None :return: Timeout object :rtype: :class:`Timeout` """ return Timeout(read=timeout, connect=timeout) def clone(self): """ Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` """ # We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): """ Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. """ if self._start_connect is not None: raise TimeoutStateError("Timeout timer has already been started.") self._start_connect = current_time() return self._start_connect def get_connect_duration(self): """ Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. """ if self._start_connect is None: raise TimeoutStateError("Can't get connect duration for timer " "that has not started.") return current_time() - self._start_connect @property def connect_timeout(self): """ Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """ if self.total is None: return self._connect if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: return self.total return min(self._connect, self.total) @property def read_timeout(self): """ Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """ if (self.total is not None and self.total is not self.DEFAULT_TIMEOUT and self._read is not None and self._read is not self.DEFAULT_TIMEOUT): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: return self._read PKZ31uu.site-packages/pip/_vendor/urllib3/util/url.pycnu[ abc@@sddlmZddlmZddlZddlmZdddd d d d gZdZ ej dZ ddl m Z dedefdYZdZdZdZdS(i(tabsolute_import(t namedtupleNi(tLocationParseErrortschemetauththosttporttpathtquerytfragmentthttpthttpss[- ](tquotetUrlcB@sweZdZdZddddddddZedZedZedZ edZ dZ RS( s Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. Both the scheme and host are normalized as they are both case-insensitive according to RFC 3986. c C@s|r#|jd r#d|}n|r8|j}n|rY|tkrY|j}ntt|j||||||||S(Nt/(t startswithtlowertNORMALIZABLE_SCHEMEStsuperR t__new__(tclsRRRRRRR ((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyRs !cC@s|jS(s@For backwards-compatibility with urlparse. We're nice like that.(R(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pythostname$scC@s6|jp d}|jdk r2|d|j7}n|S(s)Absolute path including the query string.Rt?N(RRtNone(Rturi((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt request_uri)scC@s$|jrd|j|jfS|jS(s(Network location including host and ports%s:%d(RR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pytnetloc3s c C@s|\}}}}}}}d}|dk r>||d7}n|dk r[||d7}n|dk rt||7}n|dk r|dt|7}n|dk r||7}n|dk r|d|7}n|dk r|d|7}n|S(s Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:password@host.com:80/path?query#fragment' ts://t@t:Rt#N(Rtstr( RRRRRRRR turl((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyR!:s"         cC@s|jS(N(R!(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt__str__bs(N( t__name__t __module__t__doc__t __slots__RRtpropertyRRRR!R"(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyR s  (cC@sd}d}xV|D]N}|j|}|dkr:qn|dksR||kr|}|}qqW|dks}|dkr|ddfS|| ||d|fS(s Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. iRiN(Rtfind(tstdelimstmin_idxt min_delimtdtidx((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt split_firstfs    c C@s;|s tStjd|}d }d }d }d }d }d }d }d|krs|jdd\}}nt|dddg\}}} | r| |}nd|kr|jdd\}}n|r |dd kr |jd d\}}|d 7}nd |kr|jd d\} }|s<| }n|r|js]t|nyt |}Wqt k rt|qXqd }n| r|r|}n|st|||||||Sd|kr|jdd\}}nd|kr|jdd\}}nt|||||||S( s: Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. Partly backwards-compatible with :mod:`urlparse`. Example:: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) cS@st|jS(N(R tgroup(tmatch((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pytRs://iRRRRit[t]RN( R t!_contains_disallowed_url_pchar_retsubRtsplitR/trsplittisdigitRtintt ValueError( R!RRRRRR Rtpath_tdelimt_host((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyt parse_urlsR !            cC@s(t|}|jpd|j|jfS(s4 Deprecated. Use :func:`parse_url` instead. R (R?RRR(R!tp((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pytget_hosts (R R N(t __future__Rt collectionsRtret exceptionsRt url_attrsRRtcompileR5tpackages.six.moves.urllib.parseR R R/R?RA(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/url.pyts U ! aPKZ 2site-packages/pip/_vendor/urllib3/util/request.pyonu[ abc@@sddlmZddlmZddlmZmZddlmZdZ e Z d d d d d d dZ dZd Zd S( i(tabsolute_import(t b64encodei(tbt integer_types(tUnrewindableBodyErrors gzip,deflatecC@si}|rRt|trn't|tr?dj|}nt}||d>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} t,saccept-encodings user-agents keep-alivet connectionsBasic sutf-8t authorizationsproxy-authorizationsno-caches cache-control(t isinstancetstrtlisttjointACCEPT_ENCODINGRRtdecode(t keep_alivetaccept_encodingt user_agentt basic_authtproxy_basic_autht disable_cachetheaders((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyt make_headers s*$   ## cC@sk|dk rt||nKt|dddk rgy|j}Wqgttfk rct}qgXn|S(s If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. ttellN(tNonet rewind_bodytgetattrRtIOErrortOSErrort _FAILEDTELL(tbodytpos((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pytset_file_positionMs  cC@st|dd}|dk rdt|trdy||Wqttfk r`tdqXn1|tkrtdntdt |dS(s Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. tseeksAAn error occurred when rewinding request body for redirect/retry.sRUnable to record file position for rewinding request body during a redirect/retry.s4body_pos must be of type integer, instead it was %s.N( RRRRRRRRt ValueErrorttype(Rtbody_post body_seek((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyR_s  N(t __future__Rtbase64Rt packages.sixRRt exceptionsRR tobjectRRRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/util/request.pyts  A PKZ<#.site-packages/pip/_vendor/urllib3/util/wait.pynu[from .selectors import ( HAS_SELECT, DefaultSelector, EVENT_READ, EVENT_WRITE ) def _wait_for_io_events(socks, events, timeout=None): """ Waits for IO events to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be interacted with immediately. """ if not HAS_SELECT: raise ValueError('Platform does not have a selector') if not isinstance(socks, list): # Probably just a single socket. if hasattr(socks, "fileno"): socks = [socks] # Otherwise it might be a non-list iterable. else: socks = list(socks) with DefaultSelector() as selector: for sock in socks: selector.register(sock, events) return [key[0].fileobj for key in selector.select(timeout) if key[1] & events] def wait_for_read(socks, timeout=None): """ Waits for reading to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be read from immediately. """ return _wait_for_io_events(socks, EVENT_READ, timeout) def wait_for_write(socks, timeout=None): """ Waits for writing to be available from a list of sockets or optionally a single socket if passed in. Returns a list of sockets that can be written to immediately. """ return _wait_for_io_events(socks, EVENT_WRITE, timeout) PKZe//.site-packages/pip/_vendor/urllib3/util/ssl_.pynu[from __future__ import absolute_import import errno import warnings import hmac from binascii import hexlify, unhexlify from hashlib import md5, sha1, sha256 from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning SSLContext = None HAS_SNI = False IS_PYOPENSSL = False IS_SECURETRANSPORT = False # Maps the length of a digest to a possible hash function producing this digest HASHFUNC_MAP = { 32: md5, 40: sha1, 64: sha256, } def _const_compare_digest_backport(a, b): """ Compare two digests of equal length in constant time. The digests must be of type str/bytes. Returns True if the digests match, and False otherwise. """ result = abs(len(a) - len(b)) for l, r in zip(bytearray(a), bytearray(b)): result |= l ^ r return result == 0 _const_compare_digest = getattr(hmac, 'compare_digest', _const_compare_digest_backport) try: # Test for SSL features import ssl from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import HAS_SNI # Has SNI? except ImportError: pass try: from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION except ImportError: OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_COMPRESSION = 0x20000 # A secure default. # Sources for more information on TLS ciphers: # # - https://wiki.mozilla.org/Security/Server_Side_TLS # - https://www.ssllabs.com/projects/best-practices/index.html # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ # # The general intent is: # - Prefer TLS 1.3 cipher suites # - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), # - prefer ECDHE over DHE for better performance, # - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and # security, # - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, # - disable NULL authentication, MD5 MACs and DSS for security reasons. DEFAULT_CIPHERS = ':'.join([ 'TLS13-AES-256-GCM-SHA384', 'TLS13-CHACHA20-POLY1305-SHA256', 'TLS13-AES-128-GCM-SHA256', 'ECDH+AESGCM', 'ECDH+CHACHA20', 'DH+AESGCM', 'DH+CHACHA20', 'ECDH+AES256', 'DH+AES256', 'ECDH+AES128', 'DH+AES', 'RSA+AESGCM', 'RSA+AES', '!aNULL', '!eNULL', '!MD5', ]) try: from ssl import SSLContext # Modern SSL? except ImportError: import sys class SSLContext(object): # Platform-specific: Python 2 & 3.1 supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or (3, 2) <= sys.version_info) def __init__(self, protocol_version): self.protocol = protocol_version # Use default values from a real SSLContext self.check_hostname = False self.verify_mode = ssl.CERT_NONE self.ca_certs = None self.options = 0 self.certfile = None self.keyfile = None self.ciphers = None def load_cert_chain(self, certfile, keyfile): self.certfile = certfile self.keyfile = keyfile def load_verify_locations(self, cafile=None, capath=None): self.ca_certs = cafile if capath is not None: raise SSLError("CA directories not supported in older Pythons") def set_ciphers(self, cipher_suite): if not self.supports_set_ciphers: raise TypeError( 'Your version of Python does not support setting ' 'a custom cipher suite. Please upgrade to Python ' '2.7, 3.2, or later if you need this functionality.' ) self.ciphers = cipher_suite def wrap_socket(self, socket, server_hostname=None, server_side=False): warnings.warn( 'A true SSLContext object is not available. This prevents ' 'urllib3 from configuring SSL appropriately and may cause ' 'certain SSL connections to fail. You can upgrade to a newer ' 'version of Python to solve this. For more information, see ' 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' '#ssl-warnings', InsecurePlatformWarning ) kwargs = { 'keyfile': self.keyfile, 'certfile': self.certfile, 'ca_certs': self.ca_certs, 'cert_reqs': self.verify_mode, 'ssl_version': self.protocol, 'server_side': server_side, } if self.supports_set_ciphers: # Platform-specific: Python 2.7+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs) else: # Platform-specific: Python 2.6 return wrap_socket(socket, **kwargs) def assert_fingerprint(cert, fingerprint): """ Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """ fingerprint = fingerprint.replace(':', '').lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: raise SSLError( 'Fingerprint of invalid length: {0}'.format(fingerprint)) # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) cert_digest = hashfunc(cert).digest() if not _const_compare_digest(cert_digest, fingerprint_bytes): raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' .format(fingerprint, hexlify(cert_digest))) def resolve_cert_reqs(candidate): """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_NONE`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbrevation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. """ if candidate is None: return CERT_NONE if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, 'CERT_' + candidate) return res return candidate def resolve_ssl_version(candidate): """ like resolve_cert_reqs """ if candidate is None: return PROTOCOL_SSLv23 if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, 'PROTOCOL_' + candidate) return res return candidate def create_urllib3_context(ssl_version=None, cert_reqs=None, options=None, ciphers=None): """All arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that ``ssl.create_default_context`` does on Python 3.4+. It: - Disables SSLv2, SSLv3, and compression - Sets a restricted set of server ciphers If you wish to enable SSLv3, you can do:: from urllib3.util import ssl_ context = ssl_.create_urllib3_context() context.options &= ~ssl_.OP_NO_SSLv3 You can do the same to enable compression (substituting ``COMPRESSION`` for ``SSLv3`` in the last line above). :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. :param ciphers: Which cipher suites to allow the server to select. :returns: Constructed SSLContext object with specified options :rtype: SSLContext """ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) # Setting the default here, as we may have no ssl module on import cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs if options is None: options = 0 # SSLv2 is easily broken and is considered harmful and dangerous options |= OP_NO_SSLv2 # SSLv3 has several problems and is now dangerous options |= OP_NO_SSLv3 # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ # (issue #309) options |= OP_NO_COMPRESSION context.options |= options if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 context.set_ciphers(ciphers or DEFAULT_CIPHERS) context.verify_mode = cert_reqs if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 # We do our own verification, including fingerprints and alternative # hostnames. So disable it here context.check_hostname = False return context def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None, ciphers=None, ssl_context=None, ca_cert_dir=None): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. This is not supported on Python 2.6 as the ssl module does not support it. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). """ context = ssl_context if context is None: # Note: This branch of code and all the variables in it are no longer # used by urllib3 itself. We should consider deprecating and removing # this code. context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) if ca_certs or ca_cert_dir: try: context.load_verify_locations(ca_certs, ca_cert_dir) except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2 raise SSLError(e) # Py33 raises FileNotFoundError which subclasses OSError # These are not equivalent unless we check the errno attribute except OSError as e: # Platform-specific: Python 3.3 and beyond if e.errno == errno.ENOENT: raise SSLError(e) raise elif ssl_context is None and hasattr(context, 'load_default_certs'): # try to load OS default certs; works well on Windows (require Python3.4+) context.load_default_certs() if certfile: context.load_cert_chain(certfile, keyfile) if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI return context.wrap_socket(sock, server_hostname=server_hostname) warnings.warn( 'An HTTPS request has been made, but the SNI (Subject Name ' 'Indication) extension to TLS is not available on this platform. ' 'This may cause the server to present an incorrect TLS ' 'certificate, which can cause validation failures. You can upgrade to ' 'a newer version of Python to solve this. For more information, see ' 'https://urllib3.readthedocs.io/en/latest/advanced-usage.html' '#ssl-warnings', SNIMissingWarning ) return context.wrap_socket(sock) PKZ' q440site-packages/pip/_vendor/urllib3/exceptions.pycnu[ abc@@s(ddlmZddlmZdefdYZdefdYZdefd YZ d e fd YZ d efd YZ defdYZ defdYZ defdYZeZde fdYZde fdYZdefdYZdefdYZdee fdYZdefdYZd ee fd!YZd"e fd#YZd$e fd%YZd&eefd'YZd(efd)YZd*efd+YZd,efd-YZd.efd/YZd0efd1YZd2efd3YZ d4efd5YZ!d6efd7YZ"d8efd9YZ#d:eefd;YZ$d<efd=YZ%d>eefd?YZd@efdAYZ&dBe'efdCYZ(dDefdEYZ)dFefdGYZ*dHS(Ii(tabsolute_importi(tIncompleteReadt HTTPErrorcB@seZdZRS(s#Base exception used by this module.(t__name__t __module__t__doc__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRst HTTPWarningcB@seZdZRS(s!Base warning used by this module.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR st PoolErrorcB@s eZdZdZdZRS(s/Base exception for errors caused within a pool.cC@s'||_tj|d||fdS(Ns%s: %s(tpoolRt__init__(tselfRtmessage((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s cC@s |jdfS(N(NN(t __class__tNone(R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyt __reduce__s(RRRR R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRs t RequestErrorcB@s eZdZdZdZRS(s8Base exception for PoolErrors that have associated URLs.cC@s ||_tj|||dS(N(turlRR (R RRR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s cC@s|jd|jdffS(N(R R R(R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR#s(RRRR R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRs tSSLErrorcB@seZdZRS(s9Raised when SSL certificate fails in an HTTPS connection.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR(st ProxyErrorcB@seZdZRS(s,Raised when the connection to a proxy fails.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR-st DecodeErrorcB@seZdZRS(s;Raised when automatic decoding based on Content-Type fails.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR2st ProtocolErrorcB@seZdZRS(s>Raised when something unexpected happens mid-request/response.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR7st MaxRetryErrorcB@seZdZddZRS(s Raised when the maximum number of retries is exceeded. :param pool: The connection pool :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` :param string url: The requested Url :param exceptions.Exception reason: The underlying error cC@s3||_d||f}tj||||dS(Ns0Max retries exceeded with url: %s (Caused by %r)(treasonRR (R RRRR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR Ls  N(RRRR R (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRBstHostChangedErrorcB@seZdZddZRS(s?Raised when an existing pool gets a request for a foreign host.icC@s-d|}tj||||||_dS(Ns)Tried to open a foreign host with url: %s(RR tretries(R RRRR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR Xs (RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRUstTimeoutStateErrorcB@seZdZRS(s3 Raised when passing an invalid state to a timeout (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR^st TimeoutErrorcB@seZdZRS(s Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRcstReadTimeoutErrorcB@seZdZRS(sFRaised when a socket timeout occurs while receiving data from a server(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRlstConnectTimeoutErrorcB@seZdZRS(s@Raised when a socket timeout occurs while connecting to a server(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRsstNewConnectionErrorcB@seZdZRS(sHRaised when we fail to establish a new connection. Usually ECONNREFUSED.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRxstEmptyPoolErrorcB@seZdZRS(sCRaised when a pool runs out of connections and no more are allowed.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR}stClosedPoolErrorcB@seZdZRS(sCRaised when a request enters a pool after the pool has been closed.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRstLocationValueErrorcB@seZdZRS(s<Raised when there is something wrong with a given URL input.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR stLocationParseErrorcB@seZdZdZRS(s=Raised when get_host or similar fails to parse the URL input.cC@s'd|}tj||||_dS(NsFailed to parse: %s(RR tlocation(R R"R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s (RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR!st ResponseErrorcB@seZdZdZdZRS(sDUsed as a container for an error reason supplied in a MaxRetryError.stoo many error responsess&too many {status_code} error responses(RRRt GENERIC_ERRORtSPECIFIC_ERROR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR#stSecurityWarningcB@seZdZRS(s/Warned when perfoming security reducing actions(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR&stSubjectAltNameWarningcB@seZdZRS(sBWarned when connecting to a host with a certificate missing a SAN.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR'stInsecureRequestWarningcB@seZdZRS(s/Warned when making an unverified HTTPS request.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR(stSystemTimeWarningcB@seZdZRS(s0Warned when system time is suspected to be wrong(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR)stInsecurePlatformWarningcB@seZdZRS(sEWarned when certain SSL configuration is not available on a platform.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR*stSNIMissingWarningcB@seZdZRS(s9Warned when making a HTTPS request without SNI available.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR+stDependencyWarningcB@seZdZRS(sc Warned when an attempt is made to import a module with missing optional dependencies. (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR,stResponseNotChunkedcB@seZdZRS(s;Response needs to be chunked in order to read it as chunks.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR-stBodyNotHttplibCompatiblecB@seZdZRS(sz Body should be httplib.HTTPResponse like (have an fp attribute which returns raw chunks) for read_chunked(). (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR.sRcB@s eZdZdZdZRS(s Response length doesn't match expected Content-Length Subclass of http_client.IncompleteRead to allow int value for `partial` to avoid creating large objects on streamed reads. cC@stt|j||dS(N(tsuperRR (R tpartialtexpected((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR scC@sd|j|jfS(Ns/IncompleteRead(%i bytes read, %i more expected)(R0R1(R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyt__repr__s(RRRR R2(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRs t InvalidHeadercB@seZdZRS(s(The header provided was somehow invalid.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR3stProxySchemeUnknowncB@seZdZdZRS(s1ProxyManager does not support the supplied schemecC@s$d|}tt|j|dS(NsNot supported proxy scheme %s(R/R4R (R tschemeR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s (RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR4stHeaderParsingErrorcB@seZdZdZRS(sNRaised by assert_header_parsing, but we convert it to a log.warning statement.cC@s0d|p d|f}tt|j|dS(Ns%s, unparsed data: %rtUnknown(R/R6R (R tdefectst unparsed_dataR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s(RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR6stUnrewindableBodyErrorcB@seZdZRS(s9urllib3 encountered an error when trying to rewind a body(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR:sN(+t __future__Rtpackages.six.moves.http_clientRthttplib_IncompleteReadt ExceptionRtWarningRRRRRRRtConnectionErrorRRRRRRRRRt ValueErrorR R!R#R&R'R(R)R*R+R,R-R.R3tAssertionErrorR4R6R:(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pytsH      PKZ  .site-packages/pip/_vendor/urllib3/__init__.pycnu[ abc@@sdZddlmZddlZddlmZmZmZddlm Z ddl m Z ddl m Z mZmZdd lmZdd lmZdd lmZdd lmZdd lmZddlZyddlmZWn*ek rdejfdYZnXdZdZdZ d'Z!ej"e#j$eej%d"Z&[ej'd#e j(d$e)ej'd%e j*d$e)ej'd%e j+d$e)ej'd%e j,d$e)e j-d&Z.dS((s8 urllib3 - Thread-safe connection pooling and re-using. i(tabsolute_importNi(tHTTPConnectionPooltHTTPSConnectionPooltconnection_from_url(t exceptions(tencode_multipart_formdata(t PoolManagert ProxyManagertproxy_from_url(t HTTPResponse(t make_headers(tget_host(tTimeout(tRetry(t NullHandlerRcB@seZdZRS(cC@sdS(N((tselftrecord((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pytemits(t__name__t __module__R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyRss(Andrey Petrov (andrey.petrov@shazow.net)tMITs1.22RRRRR R R tadd_stderr_loggerRtdisable_warningsRR R RcC@s_tjt}tj}|jtjd|j||j||jdt|S(s Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. s%%(asctime)s %(levelname)s %(message)ss,Added a stderr logging handler to logger: %s( tloggingt getLoggerRt StreamHandlert setFormattert Formattert addHandlertsetLeveltdebug(tleveltloggerthandler((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyR9s    talwaystappendtdefaultcC@stjd|dS(s< Helper for quickly disabling all urllib3 warnings. tignoreN(twarningst simplefilter(tcategory((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyR]s(RRRRR R R RRRRR R R(/t__doc__t __future__RR&tconnectionpoolRRRtRtfilepostRt poolmanagerRRRtresponseR t util.requestR tutil.urlR t util.timeoutR t util.retryR RRt ImportErrortHandlert __author__t __license__t __version__t__all__RRRtDEBUGRR'tSecurityWarningtTruetSubjectAltNameWarningtInsecurePlatformWarningtSNIMissingWarningt HTTPWarningR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pytsT   PKZB::1site-packages/pip/_vendor/urllib3/poolmanager.pyonu[ abc@@sddlmZddlZddlZddlZddlmZddlmZm Z ddlm Z ddl m Z m Z mZddlmZdd lmZdd lmZdd lmZd d dgZejeZd4Zd5Zejd-eZd.Ziej eed/6ej eed06Z!ied/6e d06Z"d efd1YZ#d e#fd2YZ$d3Z%dS(6i(tabsolute_importNi(tRecentlyUsedContainer(tHTTPConnectionPooltHTTPSConnectionPool(tport_by_scheme(tLocationValueErrort MaxRetryErrortProxySchemeUnknown(turljoin(tRequestMethods(t parse_url(tRetryt PoolManagert ProxyManagertproxy_from_urltkey_filet cert_filet cert_reqstca_certst ssl_versiont ca_cert_dirt ssl_contextt key_schemetkey_hosttkey_portt key_timeoutt key_retriest key_strictt key_blocktkey_source_addresst key_key_filet key_cert_filet key_cert_reqst key_ca_certstkey_ssl_versiontkey_ca_cert_dirtkey_ssl_contextt key_maxsizet key_headerst key__proxytkey__proxy_headerstkey_socket_optionstkey__socks_optionstkey_assert_hostnametkey_assert_fingerprinttPoolKeycC@s|j}|dj|d<|dj|d>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') >>> r = manager.request('GET', 'http://google.com/mail') >>> r = manager.request('GET', 'http://yahoo.com/') >>> len(manager.pools) 2 i cK@sMtj||||_t|dd|_t|_tj|_dS(Nt dispose_funccS@s |jS(N(tclose(tp((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pytt(R t__init__tconnection_pool_kwRtpoolstpool_classes_by_schemetkey_fn_by_schemeR5(tselft num_poolsR0RO((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRNs    cC@s|S(N((RS((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyt __enter__scC@s|jtS(N(tcleartFalse(RStexc_typetexc_valtexc_tb((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyt__exit__s cC@s|j|}|dkr+|jj}nxdD]}|j|dq2W|dkr|x!tD]}|j|dq_Wn||||S(s Create a new :class:`ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments to the pool class used. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. R.R/tportRGN(R.R/R\(RQR7ROR5R>t SSL_KEYWORDS(RSR.R/R\RAtpool_clsRCtkw((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyt _new_pools     cC@s|jjdS(s Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. N(RPRV(RS((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRVsRGcC@sz|stdn|j|}|p-d|d<|sYtj|djd}n||d<||d<|j|S(s Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. sNo host specified.RGR.iPR\R/(Rt_merge_pool_kwargsRR:R6tconnection_from_context(RSR/R\R.t pool_kwargsRA((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pytconnection_from_hosts   cC@s<|dj}|j|}||}|j|d|S(s Get a :class:`ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. R.RA(R6RRtconnection_from_pool_key(RSRAR.tpool_key_constructortpool_key((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRbs  c C@sy|jjg|jj|}|r)|S|d}|d}|d}|j|||d|}||j| %sN(R RdR/R\R.RWR0R5tproxyR7turlopent request_uritget_redirect_locationRtstatusR:t isinstanceR tfrom_inttremove_headers_on_redirectt is_same_hostR>t incrementRtraise_on_redirecttlogtinfo( RStmethodRjRrR_RktconnRutredirect_locationRttheader((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyRx-s@ $     "    N(t__name__t __module__t__doc__R7RwRNRUR[R`RVRdRbReRlRatTrueRx(((sC/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/poolmanager.pyR ys       cB@sJeZdZddddZddddZddZedZRS(sw Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary contaning headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') >>> r2 = proxy.request('GET', 'http://httpbin.org/') >>> len(proxy.pools) 1 >>> r3 = proxy.request('GET', 'https://httpbin.org/') >>> r4 = proxy.request('GET', 'https://twitter.com/') >>> len(proxy.pools) 3 i cK@st|tr.d|j|j|jf}nt|}|jsmtj|jd}|jd|}n|jdkrt |jn||_ |pi|_ |j |d<|j |dsb    5 WPKZµLL.site-packages/pip/_vendor/urllib3/response.pyonu[ abc@@saddlmZddlmZddlZddlZddlZddlmZ ddlm Z ddl m Z ddlmZmZmZmZmZmZmZdd lmZmZmZdd lmZdd lmZm Z dd l!m"Z"m#Z#ej$e%Z&d e'fdYZ(de'fdYZ)dZ*dej+fdYZ,dS(i(tabsolute_import(tcontextmanagerN(ttimeout(terrori(tHTTPHeaderDict(tBodyNotHttplibCompatiblet ProtocolErrort DecodeErrortReadTimeoutErrortResponseNotChunkedtIncompleteReadt InvalidHeader(t string_typest binary_typetPY3(t http_client(t HTTPExceptiont BaseSSLError(t is_fp_closedtis_response_to_headtDeflateDecodercB@s#eZdZdZdZRS(cC@s(t|_t|_tj|_dS(N(tTruet _first_tryR t_datatzlibt decompressobjt_obj(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt__init__s  cC@st|j|S(N(tgetattrR(Rtname((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt __getattr__scC@s|s |S|js#|jj|S|j|7_y5|jj|}|rbt|_d|_n|SWnTtjk rt|_tjtj |_z|j|jSWdd|_XnXdS(N( RRt decompressRtFalsetNoneRRRt MAX_WBITS(Rtdatat decompressed((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR s"    (t__name__t __module__RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs  t GzipDecodercB@s#eZdZdZdZRS(cC@stjdtj|_dS(Ni(RRR#R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR9scC@st|j|S(N(RR(RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR<scC@s|s |S|jj|S(N(RR (RR$((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR ?s(R&R'RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR(7s  cC@s|dkrtStS(Ntgzip(R(R(tmode((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt _get_decoderEs t HTTPResponsecB@seZdZddgZdddddgZdd'd d d'd eed'd'd'd'ed'd Zd Z d Z e d Z e dZ dZdZdZdZdZedZd'd'edZd(d'dZedZdZd'dZdZdZe dZdZd Zd!Z d"Z!d#Z"d$Z#d%Z$d'd'd&Z%RS()s  HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. Extra parameters for behaviour not present in httplib.HTTPResponse: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, attempts to decode specific content-encoding's based on headers (like 'gzip' and 'deflate') will be skipped and raw data will be used instead. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse object, it's convenient to include the original for debug purposes. It's otherwise unused. :param retries: The retries contains the last :class:`~urllib3.util.retry.Retry` that was used during the request. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. R)tdeflatei-i.i/i3i4ticC@st|tr||_nt||_||_||_||_||_||_| |_| |_ d|_ d|_ d|_ | |_d|_|rt|ttfr||_ n| |_| |_t|dr||_ nt|_d|_|jjddj}d|jdD}d|krHt|_n|j||_|r|j r|jd||_ ndS( Nitreadstransfer-encodingR.cs@s|]}|jVqdS(N(tstrip(t.0tenc((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pys st,tchunkedtdecode_content(t isinstanceRtheaderststatustversiontreasontstrictR5tretriestenforce_content_lengthR"t_decodert_bodyt_fpt_original_responset_fp_bytes_readt basestringR t_poolt _connectionthasattrR!R4t chunk_lefttgettlowertsplitRt _init_lengthtlength_remainingR/(RtbodyR7R8R9R:R;tpreload_contentR5toriginal_responsetpoolt connectionR<R=trequest_methodttr_enct encodings((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRqs<                     cC@s&|j|jkr"|jjdStS(s Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. tlocation(R8tREDIRECT_STATUSESR7RHR!(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytget_redirect_locationscC@s8|j s|j rdS|jj|jd|_dS(N(RDREt _put_connR"(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt release_connscC@s-|jr|jS|jr)|jdtSdS(Nt cache_content(R?R@R/R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR$s  cC@s|jS(N(RE(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRQscC@s|jS(s Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). (RB(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyttellscC@sG|jjd}|d k r8|jr8tjdd S|d k ryctg|jdD]}t|^qZ}t |dkrt d|n|j }Wnt k rd }qX|dkrd }qnyt|j }Wnt k rd}nX|d ks:d |ko)d kns:|d krCd}n|S(sM Set initial length value for Response content if available. scontent-lengthsReceived response with both Content-Length and Transfer-Encoding set. This is expressly forbidden by RFC 7230 sec 3.3.2. Ignoring Content-Length and attempting to process response as Transfer-Encoding: chunked.R3is8Content-Length contained multiple unmatching values (%s)iii0iditHEADN(ii0(R7RHR"R4tlogtwarningtsetRJtinttlenR tpopt ValueErrorR8(RRRtlengthtvaltlengthsR8((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRKs,  .       4 cC@sO|jjddj}|jdkrK||jkrKt||_ndS(s= Set-up the _decoder attribute if necessary. scontent-encodingR.N(R7RHRIR>R"tCONTENT_DECODERSR+(Rtcontent_encoding((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt _init_decoderscC@sy(|r'|jr'|jj|}nWnJttjfk rt}|jjddj}td||nX|r|r||j 7}n|S(sN Decode the data passed in and potentially flush the decoder. scontent-encodingR.sEReceived response with content-encoding: %s, but failed to decode it.( R>R tIOErrorRRR7RHRIRt_flush_decoder(RR$R5t flush_decoderteRh((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt_decodes cC@s0|jr,|jjd}||jjSdS(sk Flushes the decoder. Should only be called if the decoder is actually being used. R.(R>R tflush(Rtbuf((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRks cc@st}zy dVWntk r:t|jddnktk ry}dt|krant|jddn,ttfk r}t d||nXt }Wd|s|j r|j j n|j r|j j qn|j r|j jr|jnXdS(s Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the high-level api. On exit, release the connection back to the pool. NsRead timed out.sread operation timed outsConnection broken: %r(R!t SocketTimeoutRRDR"RtstrRt SocketErrorRRRAtcloseREtisclosedRY(Rt clean_exitRm((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt_error_catcher!s(      cC@sY|j|dkr"|j}n|jdkr5dSt}d}|j|dkrr|jj}t}nqt}|jj|}|dkr| r|jjt}|j r|j dkrt |j |j qnWdQX|rU|j t |7_ |j dk r+|j t |8_ n|j|||}|rU||_qUn|S(sP Similar to :meth:`httplib.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) Ni(iN(RiR"R5R@R!RwR/RRtR=RLR RBRaRnR?(RtamtR5RZRlR$((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR/Zs4       !iicc@s}|jr=|jr=xa|j|d|D] }|Vq+Wn<x9t|jsx|jd|d|}|r@|Vq@q@WdS(s_ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. R5RxN(R4tsupports_chunked_readst read_chunkedRR@R/(RRxR5tlineR$((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytstreams cK@s|j}t|tsEtr3t|j}qEtj|}nt|dd}|d|d|d|jd|jd|j d|d||}|S( s  Given an :class:`httplib.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along with ``original_response=r``. R;iRMR7R8R9R:RO( tmsgR6RRtitemst from_httplibRR8R9R:(t ResponseClstrt response_kwR7R;tresp((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs      cC@s|jS(N(R7(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt getheadersscC@s|jj||S(N(R7RH(RRtdefault((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt getheaderscC@s|jS(N(R7(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytinfoscC@s6|js|jjn|jr2|jjndS(N(tclosedR@RtRE(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRts  cC@sV|jdkrtSt|jdr2|jjSt|jdrN|jjStSdS(NRuR(R@R"RRFRuR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs  cC@sM|jdkrtdn+t|jdr=|jjStddS(Ns-HTTPResponse has no file to get a fileno fromtfilenosOThe file-like object this HTTPResponse is wrapped around has no file descriptor(R@R"RjRFR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs  cC@s2|jdk r.t|jdr.|jjSdS(NRo(R@R"RFRo(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRos!cC@stS(N(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytreadablescC@sI|jt|}t|dkr+dS||t|*t|SdS(Ni(R/Ra(Rtbttemp((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytreadintos cC@st|jdS(s Checks if the underlying file-like object looks like a httplib.HTTPResponse object. We do this by testing for the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). tfp(RFR@(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRyscC@s|jdk rdS|jjj}|jddd}yt|d|_Wn*tk r}|jt j |nXdS(Nt;iii( RGR"R@RtreadlineRJR`RcRtthttplibR (RR{((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt_update_chunk_lengths  cC@sd}|dkrI|jj|j}|}|jjdd|_n||jkr|jj|}|j||_|}nq||jkr|jj|}|jjdd|_|}n.|jj|j}|jjdd|_|S(Ni(R"R@t _safe_readRG(RRxtreturned_chunktchunktvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt _handle_chunk%s&      c c@sT|j|js"tdn|js=tdn|jrft|jrf|jjdS|jx_t r|j |j dkrPn|j |}|j |d|dt}|rv|VqvqvW|r|j}|r|Vqnx6t r0|jjj}|sPn|dkrPqqW|jrJ|jjnWdQXdS(s Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. sHResponse is not chunked. Header 'transfer-encoding: chunked' is missing.sgBody should be httplib.HTTPResponse like. It should have have an fp attribute which returns raw chunks.NiR5Rls (RiR4R RyRRARRtRwRRRGRRnR!RkR@RR(RRxR5RtdecodedR{((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRz;s@                Ni(&R&R't__doc__RgRVR"RR!RRWRYtpropertyR$RQR[RKRiRnRkRRwR/R|t classmethodRRRRRtRRRoRRRyRRRz(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR,LsB    -    0  9E        (-t __future__Rt contextlibRRtiotloggingtsocketRRqRRst _collectionsRt exceptionsRRRRR R R t packages.sixR RCR Rtpackages.six.movesRRRQRRt util.responseRRt getLoggerR&R]tobjectRR(R+tIOBaseR,(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyts"   4! PKZ/site-packages/pip/_vendor/urllib3/exceptions.pynu[from __future__ import absolute_import from .packages.six.moves.http_client import ( IncompleteRead as httplib_IncompleteRead ) # Base Exceptions class HTTPError(Exception): "Base exception used by this module." pass class HTTPWarning(Warning): "Base warning used by this module." pass class PoolError(HTTPError): "Base exception for errors caused within a pool." def __init__(self, pool, message): self.pool = pool HTTPError.__init__(self, "%s: %s" % (pool, message)) def __reduce__(self): # For pickling purposes. return self.__class__, (None, None) class RequestError(PoolError): "Base exception for PoolErrors that have associated URLs." def __init__(self, pool, url, message): self.url = url PoolError.__init__(self, pool, message) def __reduce__(self): # For pickling purposes. return self.__class__, (None, self.url, None) class SSLError(HTTPError): "Raised when SSL certificate fails in an HTTPS connection." pass class ProxyError(HTTPError): "Raised when the connection to a proxy fails." pass class DecodeError(HTTPError): "Raised when automatic decoding based on Content-Type fails." pass class ProtocolError(HTTPError): "Raised when something unexpected happens mid-request/response." pass #: Renamed to ProtocolError but aliased for backwards compatibility. ConnectionError = ProtocolError # Leaf Exceptions class MaxRetryError(RequestError): """Raised when the maximum number of retries is exceeded. :param pool: The connection pool :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` :param string url: The requested Url :param exceptions.Exception reason: The underlying error """ def __init__(self, pool, url, reason=None): self.reason = reason message = "Max retries exceeded with url: %s (Caused by %r)" % ( url, reason) RequestError.__init__(self, pool, url, message) class HostChangedError(RequestError): "Raised when an existing pool gets a request for a foreign host." def __init__(self, pool, url, retries=3): message = "Tried to open a foreign host with url: %s" % url RequestError.__init__(self, pool, url, message) self.retries = retries class TimeoutStateError(HTTPError): """ Raised when passing an invalid state to a timeout """ pass class TimeoutError(HTTPError): """ Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. """ pass class ReadTimeoutError(TimeoutError, RequestError): "Raised when a socket timeout occurs while receiving data from a server" pass # This timeout error does not have a URL attached and needs to inherit from the # base HTTPError class ConnectTimeoutError(TimeoutError): "Raised when a socket timeout occurs while connecting to a server" pass class NewConnectionError(ConnectTimeoutError, PoolError): "Raised when we fail to establish a new connection. Usually ECONNREFUSED." pass class EmptyPoolError(PoolError): "Raised when a pool runs out of connections and no more are allowed." pass class ClosedPoolError(PoolError): "Raised when a request enters a pool after the pool has been closed." pass class LocationValueError(ValueError, HTTPError): "Raised when there is something wrong with a given URL input." pass class LocationParseError(LocationValueError): "Raised when get_host or similar fails to parse the URL input." def __init__(self, location): message = "Failed to parse: %s" % location HTTPError.__init__(self, message) self.location = location class ResponseError(HTTPError): "Used as a container for an error reason supplied in a MaxRetryError." GENERIC_ERROR = 'too many error responses' SPECIFIC_ERROR = 'too many {status_code} error responses' class SecurityWarning(HTTPWarning): "Warned when perfoming security reducing actions" pass class SubjectAltNameWarning(SecurityWarning): "Warned when connecting to a host with a certificate missing a SAN." pass class InsecureRequestWarning(SecurityWarning): "Warned when making an unverified HTTPS request." pass class SystemTimeWarning(SecurityWarning): "Warned when system time is suspected to be wrong" pass class InsecurePlatformWarning(SecurityWarning): "Warned when certain SSL configuration is not available on a platform." pass class SNIMissingWarning(HTTPWarning): "Warned when making a HTTPS request without SNI available." pass class DependencyWarning(HTTPWarning): """ Warned when an attempt is made to import a module with missing optional dependencies. """ pass class ResponseNotChunked(ProtocolError, ValueError): "Response needs to be chunked in order to read it as chunks." pass class BodyNotHttplibCompatible(HTTPError): """ Body should be httplib.HTTPResponse like (have an fp attribute which returns raw chunks) for read_chunked(). """ pass class IncompleteRead(HTTPError, httplib_IncompleteRead): """ Response length doesn't match expected Content-Length Subclass of http_client.IncompleteRead to allow int value for `partial` to avoid creating large objects on streamed reads. """ def __init__(self, partial, expected): super(IncompleteRead, self).__init__(partial, expected) def __repr__(self): return ('IncompleteRead(%i bytes read, ' '%i more expected)' % (self.partial, self.expected)) class InvalidHeader(HTTPError): "The header provided was somehow invalid." pass class ProxySchemeUnknown(AssertionError, ValueError): "ProxyManager does not support the supplied scheme" # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. def __init__(self, scheme): message = "Not supported proxy scheme %s" % scheme super(ProxySchemeUnknown, self).__init__(message) class HeaderParsingError(HTTPError): "Raised by assert_header_parsing, but we convert it to a log.warning statement." def __init__(self, defects, unparsed_data): message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data) super(HeaderParsingError, self).__init__(message) class UnrewindableBodyError(HTTPError): "urllib3 encountered an error when trying to rewind a body" pass PKZoe6site-packages/pip/_vendor/urllib3/contrib/__init__.pycnu[ abc@sdS(N((((sH/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.pyttPKZRF9gg6site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pyonu[ abc@@s|dZddlmZddlmZddlmZddlmZddlm Z ee Z defd YZ d S( s NTLM authenticating pool, contributed by erikcederstran Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 i(tabsolute_import(t getLogger(tntlmi(tHTTPSConnectionPool(tHTTPSConnectiontNTLMConnectionPoolcB@s>eZdZdZdZdZdddeedZRS(sQ Implements an NTLM authentication version of an urllib3 connection pool thttpscO@sjtt|j||||_||_|jdd}|dj|_|d|_||_ dS(s authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\username format. pw is the password for the user. s\iiN( tsuperRt__init__tauthurltrawusertsplittuppertdomaintusertpw(tselfRRR targstkwargst user_parts((sH/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pyRs   c C@s|jd7_tjd|j|j|ji}d|ds PKZ^^3site-packages/pip/_vendor/urllib3/contrib/socks.pycnu[ abc@@sdZddlmZyddlZWn@ek rhddlZddlmZejdenXddl m Z m Z ddlmZmZdd lmZmZdd lmZmZdd lmZdd lmZyddlZWnek r dZnXd efdYZdeefdYZdefdYZdefdYZdefdYZ dS(s This module contains provisional support for SOCKS proxies from within urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: - SOCKS4 - SOCKS4a - SOCKS5 - Usernames and passwords for the SOCKS proxy Known Limitations: - Currently PySocks does not support contacting remote websites via literal IPv6 addresses. Any such connection attempt will fail. You must use a domain name. - Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any such connection attempt will fail. i(tabsolute_importNi(tDependencyWarningsSOCKS support in urllib3 requires the installation of optional dependencies: specifically, PySocks. For more information, see https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies(terrorttimeout(tHTTPConnectiontHTTPSConnection(tHTTPConnectionPooltHTTPSConnectionPool(tConnectTimeoutErrortNewConnectionError(t PoolManager(t parse_urltSOCKSConnectioncB@s eZdZdZdZRS(sG A plain-text HTTP connection that connects via a SOCKS proxy. cO@s/|jd|_tt|j||dS(Nt_socks_options(tpopR tsuperR t__init__(tselftargstkwargs((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.pyR?scC@si}|jr|j|ds2    FPKZq((7site-packages/pip/_vendor/urllib3/contrib/appengine.pycnu[ abc@@sxdZddlmZddlZddlZddlZddlmZddlm Z m Z m Z m Z m Z mZddlmZddlmZdd lmZdd lmZdd lmZydd lmZWnek rdZnXejeZd e fdYZ de fdYZ!defdYZ"dZ#dZ$dZ%dZ&dZ'dS(sC This module provides a pool manager that uses Google App Engine's `URLFetch Service `_. Example usage:: from urllib3 import PoolManager from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox if is_appengine_sandbox(): # AppEngineManager uses AppEngine's URLFetch API behind the scenes http = AppEngineManager() else: # PoolManager uses a socket-level API behind the scenes http = PoolManager() r = http.request('GET', 'https://google.com/') There are `limitations `_ to the URLFetch service and it may not be the best choice for your application. There are three options for using urllib3 on Google App Engine: 1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is cost-effective in many circumstances as long as your usage is within the limitations. 2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. Sockets also have `limitations and restrictions `_ and have a lower free quota than URLFetch. To use sockets, be sure to specify the following in your ``app.yaml``:: env_variables: GAE_USE_SOCKETS_HTTPLIB : 'true' 3. If you are using `App Engine Flexible `_, you can use the standard :class:`PoolManager` without any configuration or special environment variables. i(tabsolute_importNi(turljoin(t HTTPErrort HTTPWarningt MaxRetryErrort ProtocolErrort TimeoutErrortSSLError(tBytesIO(tRequestMethods(t HTTPResponse(tTimeout(tRetry(turlfetchtAppEnginePlatformWarningcB@seZRS((t__name__t __module__(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRGstAppEnginePlatformErrorcB@seZRS((RR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRKstAppEngineManagercB@skeZdZddeedZdZdZdddeej dZ dZ dZ dZ RS( s  Connection manager for Google App Engine sandbox applications. This manager uses the URLFetch service directly instead of using the emulated httplib, and is subject to URLFetch limitations as described in the App Engine documentation `here `_. Notably it will raise an :class:`AppEnginePlatformError` if: * URLFetch is not available. * If you attempt to use this on App Engine Flexible, as full socket support is available. * If a request size is more than 10 megabytes. * If a response size is more than 32 megabtyes. * If you use an unsupported request method such as OPTIONS. Beyond those cases, it will raise normal urllib3 errors. cC@sutstdntr-tdntjdttj||||_||_ |pkt j |_ dS(Ns.URLFetch is not available in this environment.sUse normal urllib3.PoolManager instead of AppEngineManageron Managed VMs, as using URLFetch is not necessary in this environment.surllib3 is using URLFetch on Google App Engine sandbox instead of sockets. To use sockets directly instead of URLFetch see https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.( R Rtis_prod_appengine_mvmstwarningstwarnRR t__init__tvalidate_certificateturlfetch_retriesR tDEFAULTtretries(tselftheadersRRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRcs     cC@s|S(N((R((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyt __enter__{scC@stS(N(tFalse(Rtexc_typetexc_valtexc_tb((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyt__exit__~scK@sk|j||}yv|o0|jdko0|j} tj|d|d|d|pTidtd|joi| d|j|d|j} Wn"tj k r} t || ntj k r} d t | krt d | nt| ntjk r?} d t | kr0t||d | nt| nntjk rc} t d | nJtjk r} t| n)tjk r} t d|| nX|j| d||} |o| j} | r|jr|jrt||dq| jdkrd}ny"|j||d| d|}Wn0tk rp|jrlt||dn| SX|j| tjd|| t|| }|j||||d|d|d||Snt| j d}|j!|| j|rg|j||d| d|}tjd||j"| |j||d|d|d|d|d||S| S(NitpayloadtmethodRtallow_truncatedtfollow_redirectstdeadlineRs too largesOURLFetch request too large, URLFetch only supports requests up to 10mb in size.sToo many redirectstreasonsPURLFetch response too large, URLFetch only supportsresponses up to 32mb in size.s$URLFetch does not support method: %sRstoo many redirectsi/tGETtresponset_poolsRedirecting %s -> %stredirectttimeouts Retry-Afters Retry: %stbody(#t _get_retriesR,ttotalR tfetchRRt_get_absolute_timeoutRtDeadlineExceededErrorRtInvalidURLErrortstrRRt DownloadErrorRtResponseTooLargeErrortSSLCertificateErrorRtInvalidMethodErrort#_urlfetch_response_to_http_responsetget_redirect_locationtraise_on_redirecttstatust incrementtsleep_for_retrytlogtdebugRturlopentboolt getheadertis_retrytsleep(RR$turlR.RRR,R-t response_kwR&R*tet http_responsetredirect_locationt redirect_urlthas_retry_after((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRBs     "          cK@str7|jjd}|dkr7|jd=q7n|jjd}|dkr|jd}|jddj||jd's0   .       PKZCFXX=site-packages/pip/_vendor/urllib3/contrib/securetransport.pycnu[ abc!@@sdZddlmZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl mZddlmZmZmZddlmZmZmZmZydd l mZWn'ek r eZdd lmZnXyed Wnek r;ed nXd dgZe Z!ej!Z"ej#j$Z%e j&Z'e j(Z)dZ*ej+ej,ej-ej.ej/ej0ej1ej2ej3ej4ej5ej6ej7ej8ej9ej:ej;ej<ej=ej>ej?ej@ejAejBejCejDejEejFejGejHejIejJejKg!ZLiejMejNfe jO6ZPeQe drejRejRfePe jS Undo monkey-patching by :func:`inject_into_urllib3`. N(torig_util_SSLContextRRRtorig_util_HAS_SNIRtFalseR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyR s     cC@sd}ytj|}|dkr+tjS|j}|d}|j}d}d}tj|j |} t | } yx||kr|dks|dkrt j |g|} | stj tjdqn|j| ||!} || 7}| s~|s tjSPq~q~WWnVtj k rl} | j}|dk rm|tjkrm|tjkrctjSqmnX||d<||krtjSdSWn/tk r} |dk r| |_ntjSXdS(ss SecureTransport read callback. This is called by ST to request that data be returned from the socket. is timed outN(tNonet_connection_refstgetRterrSSLInternaltsockett gettimeouttctypestc_chart from_addresst memoryviewRt wait_for_readterrorterrnotEAGAINt recv_intoterrSSLClosedGracefult ECONNRESETterrSSLClosedAbortterrSSLWouldBlockt Exceptiont _exception(t connection_idt data_buffertdata_length_pointertwrapped_sockett base_sockettrequested_lengthttimeoutR(t read_counttbuffert buffer_viewt readablest chunk_sizete((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_read_callbacksN             c C@sd}yetj|}|dkr+tjS|j}|d}tj||}|j}d}d} yx| |kr|dks|dkrt j |g|} | stj t j dqn|j|} | | 7} || }qnWWnVtj k rH} | j }|dk rI|t j krI|t jkr?tjSqInX| |d<| |krftjSdSWn/tk r} |dk r| |_ntjSXdS(sx SecureTransport write callback. This is called by ST to request that data actually be sent on the network. is timed outN(RRRRR R!R#t string_atR"Rtwait_for_writeR(R)R*tsendR-R.R/R0R1( R2R3R4R5R6tbytes_to_writetdataR8R(tsentt writablest chunk_sentR>((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_write_callbacksD           t WrappedSocketcB@seZdZdZejdZdZdZdZ dZ dZ dZ dd Zd Zd Zd Zd ZdZdZedZdZdZRS(s API-compatibility wrapper for Python's OpenSSL wrapped socket object. Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage collector of PyPy. cC@sn||_d|_d|_t|_d|_d|_d|_d|_ |jj |_ |jj ddS(Ni( R!Rtcontextt_makefile_refsRt_closedR1t _keychaint _keychain_dirt_client_cert_chainR"t_timeoutt settimeout(tselfR!((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt__init__.s        cc@sGd|_dV|jdk rC|jd}|_|j|ndS(s] A context manager that can be used to wrap calls that do I/O from SecureTransport. If any of the I/O callbacks hit an exception, this context manager will correctly propagate the exception after the fact. This avoids silently swallowing those exceptions. It also correctly forces the socket closed. N(RR1tclose(RRt exception((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_raise_on_error@s  cC@sEtjttt}tj|j|tt}t|dS(s4 Sets up the allowed ciphers. By default this matches the set in util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done custom and doesn't allow changing at this time, mostly because parsing OpenSSL cipher strings is going to be a freaking nightmare. N(RtSSLCipherSuitetlent CIPHER_SUITEStSSLSetEnabledCiphersRJR(RRtcipherstresult((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt _set_ciphersUsc C@s|s dStjj|rCt|d}|j}WdQXnd}tj}zt|}tj |j t j |}t ||stjdntj||}t |tj|t}t |tj}tj|t j |}t |Wd|r'tj|n|dkrCtj|nXtjtjf}|j|kr~tjd|jndS(s Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. NtrbsFailed to copy trust references)certificate verify failed, error code: %d(tostpathtisfiletopentreadRRt SecTrustRefRtSSLCopyPeerTrustRJR#tbyrefRtssltSSLErrortSecTrustSetAnchorCertificatest!SecTrustSetAnchorCertificatesOnlyRtSecTrustResultTypetSecTrustEvaluateRt CFReleaseRtkSecTrustResultUnspecifiedtkSecTrustResultProceedtvalue( RRtverifyt trust_bundletft cert_arrayttrustR\t trust_resultt successes((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_custom_validatebs@        c C@s[tjdtjtj|_tj|jtt } t | t @t |d} x| t krw| dd} qZW|t | Z    (       > icC@s%|jd7_t|||dtS(NiRT(RKR R(RRtmodetbufsize((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytmakefilestrcO@sd}t|||||S(Ni(R (RRRt bufferingtargstkwargs((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRsRcB@seZdZdZedZejdZedZejdZedZejdZdZ d Z d Z dddd Z ddd Zeeedd ZRS(s I am a wrapper class for the SecureTransport library, to translate the interface of the standard library ``SSLContext`` object to calls into SecureTransport. cC@sPt|\|_|_d|_t|_d|_d|_d|_ d|_ dS(Ni( t_protocol_to_min_maxt _min_versiont _max_versiont_optionsRt_verifyRt _trust_bundlet _client_certt _client_keyt_client_key_passphrase(RRtprotocol((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRSs     cC@stS(s SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. (R(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytcheck_hostnamescC@sdS(s SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. N((RRRp((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRscC@s|jS(N(R(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytoptionsscC@s ||_dS(N(R(RRRp((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRscC@s|jrtjStjS(N(RRgt CERT_REQUIREDt CERT_NONE(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt verify_modescC@s"|tjkrtnt|_dS(N(RgRRRR(RRRp((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRscC@sdS(N((RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytset_default_verify_pathss cC@s |jS(N(R(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytload_default_certsscC@s%|tjjkr!tdndS(Ns5SecureTransport doesn't support custom cipher strings(RRtDEFAULT_CIPHERSR(RRR[((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt set_ciphersscC@s.|dk rtdn|p$||_dS(Ns1SecureTransport does not support cert directories(RRR(RRtcafiletcapathtcadata((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytload_verify_locationss  cC@s||_||_||_dS(N(RRt_client_cert_passphrase(RRtcertfiletkeyfiletpassword((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytload_cert_chains  c C@sl| s t|st|s%tt|}|j||j|j|j|j|j|j|j |S(N( RRIRRRRRRRR(RRtsockt server_sidetdo_handshake_on_connecttsuppress_ragged_eofsRR5((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt wrap_sockets    N(RRRRStpropertyRtsetterRRRRRRRRRRR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRs      (gRt __future__RRR#R)tos.pathR_RR!Rgt threadingtweakrefR Rt_securetransport.bindingsRRRt_securetransport.low_levelRRRRR t ImportErrorRtpackages.backports.makefileR R&t NameErrort__all__RRRRRRtWeakValueDictionaryRtLockRRtTLS_AES_256_GCM_SHA384tTLS_CHACHA20_POLY1305_SHA256tTLS_AES_128_GCM_SHA256t'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384t'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256t#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384t#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384t#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256t#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256t'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384t$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAt#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256t TLS_DHE_RSA_WITH_AES_256_CBC_SHAt TLS_DHE_DSS_WITH_AES_256_CBC_SHAt'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256t$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAt#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256t TLS_DHE_RSA_WITH_AES_128_CBC_SHAt TLS_DHE_DSS_WITH_AES_128_CBC_SHAtTLS_RSA_WITH_AES_256_GCM_SHA384tTLS_RSA_WITH_AES_128_GCM_SHA256tTLS_RSA_WITH_AES_256_CBC_SHA256tTLS_RSA_WITH_AES_128_CBC_SHA256tTLS_RSA_WITH_AES_256_CBC_SHAtTLS_RSA_WITH_AES_128_CBC_SHARYt kTLSProtocol1tkTLSProtocol12tPROTOCOL_SSLv23Rthasattrt kSSLProtocol2Rt kSSLProtocol3RRtkTLSProtocol11RRRR R R?RHt SSLReadFuncR}t SSLWriteFuncR~tobjectRIRR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyts         "         9 5 PKZíq*q*6site-packages/pip/_vendor/urllib3/contrib/appengine.pynu[""" This module provides a pool manager that uses Google App Engine's `URLFetch Service `_. Example usage:: from urllib3 import PoolManager from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox if is_appengine_sandbox(): # AppEngineManager uses AppEngine's URLFetch API behind the scenes http = AppEngineManager() else: # PoolManager uses a socket-level API behind the scenes http = PoolManager() r = http.request('GET', 'https://google.com/') There are `limitations `_ to the URLFetch service and it may not be the best choice for your application. There are three options for using urllib3 on Google App Engine: 1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is cost-effective in many circumstances as long as your usage is within the limitations. 2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. Sockets also have `limitations and restrictions `_ and have a lower free quota than URLFetch. To use sockets, be sure to specify the following in your ``app.yaml``:: env_variables: GAE_USE_SOCKETS_HTTPLIB : 'true' 3. If you are using `App Engine Flexible `_, you can use the standard :class:`PoolManager` without any configuration or special environment variables. """ from __future__ import absolute_import import logging import os import warnings from ..packages.six.moves.urllib.parse import urljoin from ..exceptions import ( HTTPError, HTTPWarning, MaxRetryError, ProtocolError, TimeoutError, SSLError ) from ..packages.six import BytesIO from ..request import RequestMethods from ..response import HTTPResponse from ..util.timeout import Timeout from ..util.retry import Retry try: from google.appengine.api import urlfetch except ImportError: urlfetch = None log = logging.getLogger(__name__) class AppEnginePlatformWarning(HTTPWarning): pass class AppEnginePlatformError(HTTPError): pass class AppEngineManager(RequestMethods): """ Connection manager for Google App Engine sandbox applications. This manager uses the URLFetch service directly instead of using the emulated httplib, and is subject to URLFetch limitations as described in the App Engine documentation `here `_. Notably it will raise an :class:`AppEnginePlatformError` if: * URLFetch is not available. * If you attempt to use this on App Engine Flexible, as full socket support is available. * If a request size is more than 10 megabytes. * If a response size is more than 32 megabtyes. * If you use an unsupported request method such as OPTIONS. Beyond those cases, it will raise normal urllib3 errors. """ def __init__(self, headers=None, retries=None, validate_certificate=True, urlfetch_retries=True): if not urlfetch: raise AppEnginePlatformError( "URLFetch is not available in this environment.") if is_prod_appengine_mvms(): raise AppEnginePlatformError( "Use normal urllib3.PoolManager instead of AppEngineManager" "on Managed VMs, as using URLFetch is not necessary in " "this environment.") warnings.warn( "urllib3 is using URLFetch on Google App Engine sandbox instead " "of sockets. To use sockets directly instead of URLFetch see " "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.", AppEnginePlatformWarning) RequestMethods.__init__(self, headers) self.validate_certificate = validate_certificate self.urlfetch_retries = urlfetch_retries self.retries = retries or Retry.DEFAULT def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): # Return False to re-raise any potential exceptions return False def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT, **response_kw): retries = self._get_retries(retries, redirect) try: follow_redirects = ( redirect and retries.redirect != 0 and retries.total) response = urlfetch.fetch( url, payload=body, method=method, headers=headers or {}, allow_truncated=False, follow_redirects=self.urlfetch_retries and follow_redirects, deadline=self._get_absolute_timeout(timeout), validate_certificate=self.validate_certificate, ) except urlfetch.DeadlineExceededError as e: raise TimeoutError(self, e) except urlfetch.InvalidURLError as e: if 'too large' in str(e): raise AppEnginePlatformError( "URLFetch request too large, URLFetch only " "supports requests up to 10mb in size.", e) raise ProtocolError(e) except urlfetch.DownloadError as e: if 'Too many redirects' in str(e): raise MaxRetryError(self, url, reason=e) raise ProtocolError(e) except urlfetch.ResponseTooLargeError as e: raise AppEnginePlatformError( "URLFetch response too large, URLFetch only supports" "responses up to 32mb in size.", e) except urlfetch.SSLCertificateError as e: raise SSLError(e) except urlfetch.InvalidMethodError as e: raise AppEnginePlatformError( "URLFetch does not support method: %s" % method, e) http_response = self._urlfetch_response_to_http_response( response, retries=retries, **response_kw) # Handle redirect? redirect_location = redirect and http_response.get_redirect_location() if redirect_location: # Check for redirect response if (self.urlfetch_retries and retries.raise_on_redirect): raise MaxRetryError(self, url, "too many redirects") else: if http_response.status == 303: method = 'GET' try: retries = retries.increment(method, url, response=http_response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: raise MaxRetryError(self, url, "too many redirects") return http_response retries.sleep_for_retry(http_response) log.debug("Redirecting %s -> %s", url, redirect_location) redirect_url = urljoin(url, redirect_location) return self.urlopen( method, redirect_url, body, headers, retries=retries, redirect=redirect, timeout=timeout, **response_kw) # Check if we should retry the HTTP response. has_retry_after = bool(http_response.getheader('Retry-After')) if retries.is_retry(method, http_response.status, has_retry_after): retries = retries.increment( method, url, response=http_response, _pool=self) log.debug("Retry: %s", url) retries.sleep(http_response) return self.urlopen( method, url, body=body, headers=headers, retries=retries, redirect=redirect, timeout=timeout, **response_kw) return http_response def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): if is_prod_appengine(): # Production GAE handles deflate encoding automatically, but does # not remove the encoding header. content_encoding = urlfetch_resp.headers.get('content-encoding') if content_encoding == 'deflate': del urlfetch_resp.headers['content-encoding'] transfer_encoding = urlfetch_resp.headers.get('transfer-encoding') # We have a full response's content, # so let's make sure we don't report ourselves as chunked data. if transfer_encoding == 'chunked': encodings = transfer_encoding.split(",") encodings.remove('chunked') urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings) return HTTPResponse( # In order for decoding to work, we must present the content as # a file-like object. body=BytesIO(urlfetch_resp.content), headers=urlfetch_resp.headers, status=urlfetch_resp.status_code, **response_kw ) def _get_absolute_timeout(self, timeout): if timeout is Timeout.DEFAULT_TIMEOUT: return None # Defer to URLFetch's default. if isinstance(timeout, Timeout): if timeout._read is not None or timeout._connect is not None: warnings.warn( "URLFetch does not support granular timeout settings, " "reverting to total or default URLFetch timeout.", AppEnginePlatformWarning) return timeout.total return timeout def _get_retries(self, retries, redirect): if not isinstance(retries, Retry): retries = Retry.from_int( retries, redirect=redirect, default=self.retries) if retries.connect or retries.read or retries.redirect: warnings.warn( "URLFetch only supports total retries and does not " "recognize connect, read, or redirect retry parameters.", AppEnginePlatformWarning) return retries def is_appengine(): return (is_local_appengine() or is_prod_appengine() or is_prod_appengine_mvms()) def is_appengine_sandbox(): return is_appengine() and not is_prod_appengine_mvms() def is_local_appengine(): return ('APPENGINE_RUNTIME' in os.environ and 'Development/' in os.environ['SERVER_SOFTWARE']) def is_prod_appengine(): return ('APPENGINE_RUNTIME' in os.environ and 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and not is_prod_appengine_mvms()) def is_prod_appengine_mvms(): return os.environ.get('GAE_VM', False) == 'true' PKZoe6site-packages/pip/_vendor/urllib3/contrib/__init__.pyonu[ abc@sdS(N((((sH/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/__init__.pyttPKZGۇDD7site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyonu[ abc@@sdZddlmZddlZddlmZddlmZ ddl m Z ddl m Z mZddlmZydd l mZWn'ek rdZd d lmZnXddlZddlZd d lmZddlZd d lmZddgZeZ iej!j"ej#6ej!j$ej%6Z&e'edrie'ej!driej!j(e&ej)ej?e@ZAdZBdZCdZDdZEdZFdeGfdYZHerddZIneZIeIeH_IdeGfd YZJd!ZKdS("sb SSL with SNI_-support for Python 2. Follow these instructions if you would like to verify SSL certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: * pyOpenSSL (tested with 16.0.0) * cryptography (minimum 1.3.4, from pyopenssl) * idna (minimum 2.0, from cryptography) However, pyopenssl depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this:: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). If you want to configure the default list of supported cipher suites, you can set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) i(tabsolute_importN(tx509(tbackend(t _Certificate(ttimeoutterror(tBytesIO(t _fileobjecti(tbackport_makefile(tsix(tutiltinject_into_urllib3textract_from_urllib3tPROTOCOL_TLSv1_1tTLSv1_1_METHODtPROTOCOL_TLSv1_2tTLSv1_2_METHODcc@s!|]\}}||fVqdS(N((t.0tktv((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pys `si@cC@sAtttj_tt_ttj_tt_ttj_dS(s7Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.N(t_validate_dependencies_mettPyOpenSSLContextR tssl_t SSLContexttHAS_SNItTruet IS_PYOPENSSL(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyR ms     cC@s:ttj_tt_ttj_tt_ttj_dS(s4Undo monkey-patching by :func:`inject_into_urllib3`.N(torig_util_SSLContextR RRtorig_util_HAS_SNIRtFalseR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyR ys     cC@s{ddlm}t|dddkr7tdnddlm}|}t|dddkrwtdndS( s{ Verifies that PyOpenSSL's package-level dependencies have been met. Throws `ImportError` if they are not met. i(t Extensionstget_extension_for_classsX'cryptography' module missing required functionality. Try upgrading to v1.3.4 or newer.(tX509t_x509sS'pyOpenSSL' module missing required functionality. Try upgrading to v0.14 or newer.N(tcryptography.x509.extensionsRtgetattrtNonet ImportErrortOpenSSL.cryptoR (RR R((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs cC@s:d}||}tjdkr6|jd}n|S(s Converts a dNSName SubjectAlternativeName field to the form used by the standard library on the given Python version. Cryptography produces a dNSName as a unicode string that was idna-decoded from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). cS@siddl}xMddgD]?}|j|r|t|}|jd|j|SqW|j|S(s Borrowed wholesale from the Python Cryptography Project. It turns out that we can't just safely call `idna.encode`: it can explode for wildcard names. This avoids that problem. iNu*.u.tascii(tidnat startswithtlentencode(tnameR(tprefix((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt idna_encodes  iisutf-8(ii(tsyst version_infotdecode(R,R.((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_dnsname_to_stdlibs  cC@st|dr|j}ntt|j}y|jjtjj }WnMtj k rcgStj tj tj tfk r}tjd|gSXg|jtjD]}dt|f^q}|jd|jtjD|S(sU Given an PyOpenSSL certificate, provides all the subject alternative names. tto_cryptographysA problem was encountered with the certificate that prevented urllib3 from finding the SubjectAlternativeName field. This can affect certificate validation. The error was %stDNScs@s!|]}dt|fVqdS(s IP AddressN(tstr(RR,((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pys s(thasattrR3Rtopenssl_backendR!t extensionsRRtSubjectAlternativeNametvaluetExtensionNotFoundtDuplicateExtensiontUnsupportedExtensiontUnsupportedGeneralNameTypet UnicodeErrortlogtwarningtget_values_for_typetDNSNameR2textendt IPAddress(t peer_certtcerttextteR,tnames((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytget_subj_alt_names(   .t WrappedSocketcB@seZdZedZdZdZdZdZdZ dZ dZ d Z d Z ed Zd Zd ZRS(sAPI-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. cC@s1||_||_||_d|_t|_dS(Ni(t connectiontsockettsuppress_ragged_eofst_makefile_refsRt_closed(tselfRMRNRO((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt__init__s     cC@s |jjS(N(RNtfileno(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRTscC@s;|jdkr!|jd8_n|jr7|jndS(Nii(RPRQtclose(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_decref_socketioss cO@sy|jj||}Wntjjk rb}|jrM|jdkrMdStt|ntjj k r}|jj tjj krdSn^tjj k rt j|j|jj}|stdq|j||SnX|SdS(NisUnexpected EOFtsThe read operation timed out(isUnexpected EOF(RMtrecvtOpenSSLtSSLt SysCallErrorROtargst SocketErrorR5tZeroReturnErrort get_shutdowntRECEIVED_SHUTDOWNt WantReadErrorR t wait_for_readRNt gettimeoutR(RRR\tkwargstdataRItrd((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRXs cO@sy|jj||SWntjjk r`}|jrK|jdkrKdStt|ntjj k r}|jj tjj krdSnZtjj k rt j|j|jj}|stdq|j||SnXdS(NisUnexpected EOFisThe read operation timed out(isUnexpected EOF(RMt recv_intoRYRZR[ROR\R]R5R^R_R`RaR RbRNRcR(RRR\RdRIRf((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRgscC@s|jj|S(N(RNt settimeout(RRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRh*scC@sxtry|jj|SWqtjjk ritj|j|jj }|st qqqtjj k r}t t |qXqWdS(N(RRMtsendRYRZtWantWriteErrorR twait_for_writeRNRcRR[R]R5(RRRetwrRI((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_send_until_done-s  cC@sGd}x:|t|krB|j|||t!}||7}q WdS(Ni(R*RmtSSL_WRITE_BLOCKSIZE(RRRet total_senttsent((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytsendall9scC@s|jjdS(N(RMtshutdown(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRr?scC@sZ|jdkrGyt|_|jjSWqVtjjk rCdSXn|jd8_dS(Ni(RPRRQRMRURYRZtError(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRUCs cC@se|jj}|s|S|r8tjjtjj|Sid|jjfffd6t|d6S(Nt commonNametsubjecttsubjectAltName( RMtget_peer_certificateRYtcryptotdump_certificatet FILETYPE_ASN1t get_subjecttCNRK(RRt binary_formR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt getpeercertMs  cC@s|jd7_dS(Ni(RP(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_reuse_scC@s/|jdkr|jn|jd8_dS(Ni(RPRU(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_dropbs (t__name__t __module__t__doc__RRSRTRVRXRgRhRmRqRrRURR~RR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRLs          icC@s%|jd7_t|||dtS(NiRU(RPRR(RRtmodetbufsize((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytmakefilejsRcB@seZdZdZedZejdZedZejdZdZdZ d d d dZ d d d Z e eed d ZRS( s I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. cC@s;t||_tjj|j|_d|_t|_dS(Ni( t_openssl_versionstprotocolRYRZtContextt_ctxt_optionsRtcheck_hostname(RRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRSys  cC@s|jS(N(R(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytoptionsscC@s||_|jj|dS(N(RRt set_options(RRR:((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs cC@st|jjS(N(t_openssl_to_stdlib_verifyRtget_verify_mode(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt verify_modescC@s|jjt|tdS(N(Rt set_verifyt_stdlib_to_openssl_verifyt_verify_callback(RRR:((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs cC@s|jjdS(N(Rtset_default_verify_paths(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRscC@s8t|tjr$|jd}n|jj|dS(Nsutf-8(t isinstanceR t text_typeR+Rtset_cipher_list(RRtciphers((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt set_ciphersscC@sx|dk r|jd}n|dk r<|jd}n|jj|||dk rt|jjt|ndS(Nsutf-8(R$R+Rtload_verify_locationsR(RRtcafiletcapathtcadata((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs   c@sR|jj|dk r8|jjfdn|jj|pJ|dS(Nc@sS(N((t max_lengtht prompt_twicetuserdata(tpassword(sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytRW(Rtuse_certificate_fileR$t set_passwd_cbtuse_privatekey_file(RRtcertfiletkeyfileR((RsI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytload_cert_chains c C@stjj|j|}t|tjr<|jd}n|dk rX|j |n|j xt ry|j Wnrtjj k rtj||j}|setdqeqen,tjjk r}tjd|nXPqeWt||S(Nsutf-8sselect timed outsbad handshake: %r(RYRZt ConnectionRRR RR+R$tset_tlsext_host_nametset_connect_stateRt do_handshakeRaR RbRcRRstssltSSLErrorRL( RRtsockt server_sidetdo_handshake_on_connectROtserver_hostnametcnxRfRI((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt wrap_sockets$   N(RRRRStpropertyRtsetterRRRR$RRRRR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRss    cC@s |dkS(Ni((RRterr_not err_deptht return_code((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs(LRt __future__Rt OpenSSL.SSLRYt cryptographyRt$cryptography.hazmat.backends.opensslRR7t)cryptography.hazmat.backends.openssl.x509RRNRRR]tioRRR%R$tpackages.backports.makefileRtloggingRtpackagesR R/RWR t__all__RRRZt SSLv23_METHODtPROTOCOL_SSLv23t TLSv1_METHODtPROTOCOL_TLSv1RR6RR RRtupdatet SSLv3_METHODtPROTOCOL_SSLv3tAttributeErrort VERIFY_NONEt CERT_NONEt VERIFY_PEERt CERT_OPTIONALtVERIFY_FAIL_IF_NO_PEER_CERTt CERT_REQUIREDRtdicttitemsRRnRRRRt getLoggerRR@R R RR2RKtobjectRLRRR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt+sh      !!!      3 SPKZGۇDD7site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pycnu[ abc@@sdZddlmZddlZddlmZddlmZ ddl m Z ddl m Z mZddlmZydd l mZWn'ek rdZd d lmZnXddlZddlZd d lmZddlZd d lmZddgZeZ iej!j"ej#6ej!j$ej%6Z&e'edrie'ej!driej!j(e&ej)ej?e@ZAdZBdZCdZDdZEdZFdeGfdYZHerddZIneZIeIeH_IdeGfd YZJd!ZKdS("sb SSL with SNI_-support for Python 2. Follow these instructions if you would like to verify SSL certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: * pyOpenSSL (tested with 16.0.0) * cryptography (minimum 1.3.4, from pyopenssl) * idna (minimum 2.0, from cryptography) However, pyopenssl depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this:: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). If you want to configure the default list of supported cipher suites, you can set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) i(tabsolute_importN(tx509(tbackend(t _Certificate(ttimeoutterror(tBytesIO(t _fileobjecti(tbackport_makefile(tsix(tutiltinject_into_urllib3textract_from_urllib3tPROTOCOL_TLSv1_1tTLSv1_1_METHODtPROTOCOL_TLSv1_2tTLSv1_2_METHODcc@s!|]\}}||fVqdS(N((t.0tktv((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pys `si@cC@sAtttj_tt_ttj_tt_ttj_dS(s7Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.N(t_validate_dependencies_mettPyOpenSSLContextR tssl_t SSLContexttHAS_SNItTruet IS_PYOPENSSL(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyR ms     cC@s:ttj_tt_ttj_tt_ttj_dS(s4Undo monkey-patching by :func:`inject_into_urllib3`.N(torig_util_SSLContextR RRtorig_util_HAS_SNIRtFalseR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyR ys     cC@s{ddlm}t|dddkr7tdnddlm}|}t|dddkrwtdndS( s{ Verifies that PyOpenSSL's package-level dependencies have been met. Throws `ImportError` if they are not met. i(t Extensionstget_extension_for_classsX'cryptography' module missing required functionality. Try upgrading to v1.3.4 or newer.(tX509t_x509sS'pyOpenSSL' module missing required functionality. Try upgrading to v0.14 or newer.N(tcryptography.x509.extensionsRtgetattrtNonet ImportErrortOpenSSL.cryptoR (RR R((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs cC@s:d}||}tjdkr6|jd}n|S(s Converts a dNSName SubjectAlternativeName field to the form used by the standard library on the given Python version. Cryptography produces a dNSName as a unicode string that was idna-decoded from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). cS@siddl}xMddgD]?}|j|r|t|}|jd|j|SqW|j|S(s Borrowed wholesale from the Python Cryptography Project. It turns out that we can't just safely call `idna.encode`: it can explode for wildcard names. This avoids that problem. iNu*.u.tascii(tidnat startswithtlentencode(tnameR(tprefix((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt idna_encodes  iisutf-8(ii(tsyst version_infotdecode(R,R.((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_dnsname_to_stdlibs  cC@st|dr|j}ntt|j}y|jjtjj }WnMtj k rcgStj tj tj tfk r}tjd|gSXg|jtjD]}dt|f^q}|jd|jtjD|S(sU Given an PyOpenSSL certificate, provides all the subject alternative names. tto_cryptographysA problem was encountered with the certificate that prevented urllib3 from finding the SubjectAlternativeName field. This can affect certificate validation. The error was %stDNScs@s!|]}dt|fVqdS(s IP AddressN(tstr(RR,((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pys s(thasattrR3Rtopenssl_backendR!t extensionsRRtSubjectAlternativeNametvaluetExtensionNotFoundtDuplicateExtensiontUnsupportedExtensiontUnsupportedGeneralNameTypet UnicodeErrortlogtwarningtget_values_for_typetDNSNameR2textendt IPAddress(t peer_certtcerttextteR,tnames((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytget_subj_alt_names(   .t WrappedSocketcB@seZdZedZdZdZdZdZdZ dZ dZ d Z d Z ed Zd Zd ZRS(sAPI-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. cC@s1||_||_||_d|_t|_dS(Ni(t connectiontsockettsuppress_ragged_eofst_makefile_refsRt_closed(tselfRMRNRO((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt__init__s     cC@s |jjS(N(RNtfileno(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRTscC@s;|jdkr!|jd8_n|jr7|jndS(Nii(RPRQtclose(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_decref_socketioss cO@sy|jj||}Wntjjk rb}|jrM|jdkrMdStt|ntjj k r}|jj tjj krdSn^tjj k rt j|j|jj}|stdq|j||SnX|SdS(NisUnexpected EOFtsThe read operation timed out(isUnexpected EOF(RMtrecvtOpenSSLtSSLt SysCallErrorROtargst SocketErrorR5tZeroReturnErrort get_shutdowntRECEIVED_SHUTDOWNt WantReadErrorR t wait_for_readRNt gettimeoutR(RRR\tkwargstdataRItrd((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRXs cO@sy|jj||SWntjjk r`}|jrK|jdkrKdStt|ntjj k r}|jj tjj krdSnZtjj k rt j|j|jj}|stdq|j||SnXdS(NisUnexpected EOFisThe read operation timed out(isUnexpected EOF(RMt recv_intoRYRZR[ROR\R]R5R^R_R`RaR RbRNRcR(RRR\RdRIRf((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRgscC@s|jj|S(N(RNt settimeout(RRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRh*scC@sxtry|jj|SWqtjjk ritj|j|jj }|st qqqtjj k r}t t |qXqWdS(N(RRMtsendRYRZtWantWriteErrorR twait_for_writeRNRcRR[R]R5(RRRetwrRI((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_send_until_done-s  cC@sGd}x:|t|krB|j|||t!}||7}q WdS(Ni(R*RmtSSL_WRITE_BLOCKSIZE(RRRet total_senttsent((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytsendall9scC@s|jjdS(N(RMtshutdown(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRr?scC@sZ|jdkrGyt|_|jjSWqVtjjk rCdSXn|jd8_dS(Ni(RPRRQRMRURYRZtError(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRUCs cC@se|jj}|s|S|r8tjjtjj|Sid|jjfffd6t|d6S(Nt commonNametsubjecttsubjectAltName( RMtget_peer_certificateRYtcryptotdump_certificatet FILETYPE_ASN1t get_subjecttCNRK(RRt binary_formR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt getpeercertMs  cC@s|jd7_dS(Ni(RP(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_reuse_scC@s/|jdkr|jn|jd8_dS(Ni(RPRU(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt_dropbs (t__name__t __module__t__doc__RRSRTRVRXRgRhRmRqRrRURR~RR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRLs          icC@s%|jd7_t|||dtS(NiRU(RPRR(RRtmodetbufsize((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytmakefilejsRcB@seZdZdZedZejdZedZejdZdZdZ d d d dZ d d d Z e eed d ZRS( s I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. cC@s;t||_tjj|j|_d|_t|_dS(Ni( t_openssl_versionstprotocolRYRZtContextt_ctxt_optionsRtcheck_hostname(RRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRSys  cC@s|jS(N(R(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytoptionsscC@s||_|jj|dS(N(RRt set_options(RRR:((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs cC@st|jjS(N(t_openssl_to_stdlib_verifyRtget_verify_mode(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt verify_modescC@s|jjt|tdS(N(Rt set_verifyt_stdlib_to_openssl_verifyt_verify_callback(RRR:((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs cC@s|jjdS(N(Rtset_default_verify_paths(RR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRscC@s8t|tjr$|jd}n|jj|dS(Nsutf-8(t isinstanceR t text_typeR+Rtset_cipher_list(RRtciphers((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt set_ciphersscC@sx|dk r|jd}n|dk r<|jd}n|jj|||dk rt|jjt|ndS(Nsutf-8(R$R+Rtload_verify_locationsR(RRtcafiletcapathtcadata((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs   c@sR|jj|dk r8|jjfdn|jj|pJ|dS(Nc@sS(N((t max_lengtht prompt_twicetuserdata(tpassword(sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytRW(Rtuse_certificate_fileR$t set_passwd_cbtuse_privatekey_file(RRtcertfiletkeyfileR((RsI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pytload_cert_chains c C@stjj|j|}t|tjr<|jd}n|dk rX|j |n|j xt ry|j Wnrtjj k rtj||j}|setdqeqen,tjjk r}tjd|nXPqeWt||S(Nsutf-8sselect timed outsbad handshake: %r(RYRZt ConnectionRRR RR+R$tset_tlsext_host_nametset_connect_stateRt do_handshakeRaR RbRcRRstssltSSLErrorRL( RRtsockt server_sidetdo_handshake_on_connectROtserver_hostnametcnxRfRI((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt wrap_sockets$   N(RRRRStpropertyRtsetterRRRR$RRRRR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRss    cC@s |dkS(Ni((RRterr_not err_deptht return_code((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyRs(LRt __future__Rt OpenSSL.SSLRYt cryptographyRt$cryptography.hazmat.backends.opensslRR7t)cryptography.hazmat.backends.openssl.x509RRNRRR]tioRRR%R$tpackages.backports.makefileRtloggingRtpackagesR R/RWR t__all__RRRZt SSLv23_METHODtPROTOCOL_SSLv23t TLSv1_METHODtPROTOCOL_TLSv1RR6RR RRtupdatet SSLv3_METHODtPROTOCOL_SSLv3tAttributeErrort VERIFY_NONEt CERT_NONEt VERIFY_PEERt CERT_OPTIONALtVERIFY_FAIL_IF_NO_PEER_CERTt CERT_REQUIREDRtdicttitemsRRnRRRRt getLoggerRR@R R RR2RKtobjectRLRRR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pyt+sh      !!!      3 SPKZ8bXX=site-packages/pip/_vendor/urllib3/contrib/securetransport.pyonu[ abc!@@sdZddlmZddlZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl mZddlmZmZmZddlmZmZmZmZydd l mZWn'ek r eZdd lmZnXyed Wnek r;ed nXd dgZe Z!ej!Z"ej#j$Z%e j&Z'e j(Z)dZ*ej+ej,ej-ej.ej/ej0ej1ej2ej3ej4ej5ej6ej7ej8ej9ej:ej;ej<ej=ej>ej?ej@ejAejBejCejDejEejFejGejHejIejJejKg!ZLiejMejNfe jO6ZPeQe drejRejRfePe jS Undo monkey-patching by :func:`inject_into_urllib3`. N(torig_util_SSLContextRRRtorig_util_HAS_SNIRtFalseR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyR s     cC@sd}ytj|}|dkr+tjS|j}|d}|j}d}d}tj|j |} t | } yx||kr|dks|dkrt j |g|} | stj tjdqn|j| ||!} || 7}| s~|s tjSPq~q~WWnVtj k rl} | j}|dk rm|tjkrm|tjkrctjSqmnX||d<||krtjSdSWn/tk r} |dk r| |_ntjSXdS(ss SecureTransport read callback. This is called by ST to request that data be returned from the socket. is timed outN(tNonet_connection_refstgetRterrSSLInternaltsockett gettimeouttctypestc_chart from_addresst memoryviewRt wait_for_readterrorterrnotEAGAINt recv_intoterrSSLClosedGracefult ECONNRESETterrSSLClosedAbortterrSSLWouldBlockt Exceptiont _exception(t connection_idt data_buffertdata_length_pointertwrapped_sockett base_sockettrequested_lengthttimeoutR(t read_counttbuffert buffer_viewt readablest chunk_sizete((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_read_callbacksN             c C@sd}yetj|}|dkr+tjS|j}|d}tj||}|j}d}d} yx| |kr|dks|dkrt j |g|} | stj t j dqn|j|} | | 7} || }qnWWnVtj k rH} | j }|dk rI|t j krI|t jkr?tjSqInX| |d<| |krftjSdSWn/tk r} |dk r| |_ntjSXdS(sx SecureTransport write callback. This is called by ST to request that data actually be sent on the network. is timed outN(RRRRR R!R#t string_atR"Rtwait_for_writeR(R)R*tsendR-R.R/R0R1( R2R3R4R5R6tbytes_to_writetdataR8R(tsentt writablest chunk_sentR>((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_write_callbacksD           t WrappedSocketcB@seZdZdZejdZdZdZdZ dZ dZ dZ dd Zd Zd Zd Zd ZdZdZedZdZdZRS(s API-compatibility wrapper for Python's OpenSSL wrapped socket object. Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage collector of PyPy. cC@sn||_d|_d|_t|_d|_d|_d|_d|_ |jj |_ |jj ddS(Ni( R!Rtcontextt_makefile_refsRt_closedR1t _keychaint _keychain_dirt_client_cert_chainR"t_timeoutt settimeout(tselfR!((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt__init__.s        cc@sGd|_dV|jdk rC|jd}|_|j|ndS(s] A context manager that can be used to wrap calls that do I/O from SecureTransport. If any of the I/O callbacks hit an exception, this context manager will correctly propagate the exception after the fact. This avoids silently swallowing those exceptions. It also correctly forces the socket closed. N(RR1tclose(RRt exception((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_raise_on_error@s  cC@sEtjttt}tj|j|tt}t|dS(s4 Sets up the allowed ciphers. By default this matches the set in util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done custom and doesn't allow changing at this time, mostly because parsing OpenSSL cipher strings is going to be a freaking nightmare. N(RtSSLCipherSuitetlent CIPHER_SUITEStSSLSetEnabledCiphersRJR(RRtcipherstresult((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt _set_ciphersUsc C@s|s dStjj|rCt|d}|j}WdQXnd}tj}zt|}tj |j t j |}t ||stjdntj||}t |tj|t}t |tj}tj|t j |}t |Wd|r'tj|n|dkrCtj|nXtjtjf}|j|kr~tjd|jndS(s Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. NtrbsFailed to copy trust references)certificate verify failed, error code: %d(tostpathtisfiletopentreadRRt SecTrustRefRtSSLCopyPeerTrustRJR#tbyrefRtssltSSLErrortSecTrustSetAnchorCertificatest!SecTrustSetAnchorCertificatesOnlyRtSecTrustResultTypetSecTrustEvaluateRt CFReleaseRtkSecTrustResultUnspecifiedtkSecTrustResultProceedtvalue( RRtverifyt trust_bundletft cert_arrayttrustR\t trust_resultt successes((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt_custom_validatebs@        c C@s[tjdtjtj|_tj|jtt } t | t @t |d} x| t krw| dd} qZW|t | Z    (       > icC@s%|jd7_t|||dtS(NiRT(RKR R(RRtmodetbufsize((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytmakefilestrcO@sd}t|||||S(Ni(R (RRRt bufferingtargstkwargs((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRsRcB@seZdZdZedZejdZedZejdZedZejdZdZ d Z d Z dddd Z ddd Zeeedd ZRS(s I am a wrapper class for the SecureTransport library, to translate the interface of the standard library ``SSLContext`` object to calls into SecureTransport. cC@sPt|\|_|_d|_t|_d|_d|_d|_ d|_ dS(Ni( t_protocol_to_min_maxt _min_versiont _max_versiont_optionsRt_verifyRt _trust_bundlet _client_certt _client_keyt_client_key_passphrase(RRtprotocol((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRSs     cC@stS(s SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. (R(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytcheck_hostnamescC@sdS(s SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. N((RRRp((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRscC@s|jS(N(R(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytoptionsscC@s ||_dS(N(R(RRRp((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRscC@s|jrtjStjS(N(RRgt CERT_REQUIREDt CERT_NONE(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt verify_modescC@s"|tjkrtnt|_dS(N(RgRRRR(RRRp((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRscC@sdS(N((RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytset_default_verify_pathss cC@s |jS(N(R(RR((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytload_default_certsscC@s%|tjjkr!tdndS(Ns5SecureTransport doesn't support custom cipher strings(RRtDEFAULT_CIPHERSR(RRR[((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt set_ciphersscC@s.|dk rtdn|p$||_dS(Ns1SecureTransport does not support cert directories(RRR(RRtcafiletcapathtcadata((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytload_verify_locationss  cC@s||_||_||_dS(N(RRt_client_cert_passphrase(RRtcertfiletkeyfiletpassword((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pytload_cert_chains  c C@sGt|}|j||j|j|j|j|j|j|j|S(N( RIRRRRRRRR(RRtsockt server_sidetdo_handshake_on_connecttsuppress_ragged_eofsRR5((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyt wrap_sockets N(RRRRStpropertyRtsetterRRRRRRRRRRR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyRs      (gRt __future__RRR#R)tos.pathR_RR!Rgt threadingtweakrefR Rt_securetransport.bindingsRRRt_securetransport.low_levelRRRRR t ImportErrorRtpackages.backports.makefileR R&t NameErrort__all__RRRRRRtWeakValueDictionaryRtLockRRtTLS_AES_256_GCM_SHA384tTLS_CHACHA20_POLY1305_SHA256tTLS_AES_128_GCM_SHA256t'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384t'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256t#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384t#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384t#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256t#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256t'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384t$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAt#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256t TLS_DHE_RSA_WITH_AES_256_CBC_SHAt TLS_DHE_DSS_WITH_AES_256_CBC_SHAt'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256t$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAt#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256t TLS_DHE_RSA_WITH_AES_128_CBC_SHAt TLS_DHE_DSS_WITH_AES_128_CBC_SHAtTLS_RSA_WITH_AES_256_GCM_SHA384tTLS_RSA_WITH_AES_128_GCM_SHA256tTLS_RSA_WITH_AES_256_CBC_SHA256tTLS_RSA_WITH_AES_128_CBC_SHA256tTLS_RSA_WITH_AES_256_CBC_SHAtTLS_RSA_WITH_AES_128_CBC_SHARYt kTLSProtocol1tkTLSProtocol12tPROTOCOL_SSLv23Rthasattrt kSSLProtocol2Rt kSSLProtocol3RRtkTLSProtocol11RRRR R R?RHt SSLReadFuncR}t SSLWriteFuncR~tobjectRIRR(((sO/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/securetransport.pyts         "         9 5 PKZS)332site-packages/pip/_vendor/urllib3/contrib/socks.pynu[# -*- coding: utf-8 -*- """ This module contains provisional support for SOCKS proxies from within urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: - SOCKS4 - SOCKS4a - SOCKS5 - Usernames and passwords for the SOCKS proxy Known Limitations: - Currently PySocks does not support contacting remote websites via literal IPv6 addresses. Any such connection attempt will fail. You must use a domain name. - Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any such connection attempt will fail. """ from __future__ import absolute_import try: import socks except ImportError: import warnings from ..exceptions import DependencyWarning warnings.warn(( 'SOCKS support in urllib3 requires the installation of optional ' 'dependencies: specifically, PySocks. For more information, see ' 'https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies' ), DependencyWarning ) raise from socket import error as SocketError, timeout as SocketTimeout from ..connection import ( HTTPConnection, HTTPSConnection ) from ..connectionpool import ( HTTPConnectionPool, HTTPSConnectionPool ) from ..exceptions import ConnectTimeoutError, NewConnectionError from ..poolmanager import PoolManager from ..util.url import parse_url try: import ssl except ImportError: ssl = None class SOCKSConnection(HTTPConnection): """ A plain-text HTTP connection that connects via a SOCKS proxy. """ def __init__(self, *args, **kwargs): self._socks_options = kwargs.pop('_socks_options') super(SOCKSConnection, self).__init__(*args, **kwargs) def _new_conn(self): """ Establish a new connection via the SOCKS proxy. """ extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = socks.create_connection( (self.host, self.port), proxy_type=self._socks_options['socks_version'], proxy_addr=self._socks_options['proxy_host'], proxy_port=self._socks_options['proxy_port'], proxy_username=self._socks_options['username'], proxy_password=self._socks_options['password'], proxy_rdns=self._socks_options['rdns'], timeout=self.timeout, **extra_kw ) except SocketTimeout as e: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except socks.ProxyError as e: # This is fragile as hell, but it seems to be the only way to raise # useful errors here. if e.socket_err: error = e.socket_err if isinstance(error, SocketTimeout): raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout) ) else: raise NewConnectionError( self, "Failed to establish a new connection: %s" % error ) else: raise NewConnectionError( self, "Failed to establish a new connection: %s" % e ) except SocketError as e: # Defensive: PySocks should catch all these. raise NewConnectionError( self, "Failed to establish a new connection: %s" % e) return conn # We don't need to duplicate the Verified/Unverified distinction from # urllib3/connection.py here because the HTTPSConnection will already have been # correctly set to either the Verified or Unverified form by that module. This # means the SOCKSHTTPSConnection will automatically be the correct type. class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): pass class SOCKSHTTPConnectionPool(HTTPConnectionPool): ConnectionCls = SOCKSConnection class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): ConnectionCls = SOCKSHTTPSConnection class SOCKSProxyManager(PoolManager): """ A version of the urllib3 ProxyManager that routes connections via the defined SOCKS proxy. """ pool_classes_by_scheme = { 'http': SOCKSHTTPConnectionPool, 'https': SOCKSHTTPSConnectionPool, } def __init__(self, proxy_url, username=None, password=None, num_pools=10, headers=None, **connection_pool_kw): parsed = parse_url(proxy_url) if parsed.scheme == 'socks5': socks_version = socks.PROXY_TYPE_SOCKS5 rdns = False elif parsed.scheme == 'socks5h': socks_version = socks.PROXY_TYPE_SOCKS5 rdns = True elif parsed.scheme == 'socks4': socks_version = socks.PROXY_TYPE_SOCKS4 rdns = False elif parsed.scheme == 'socks4a': socks_version = socks.PROXY_TYPE_SOCKS4 rdns = True else: raise ValueError( "Unable to determine SOCKS version from %s" % proxy_url ) self.proxy_url = proxy_url socks_options = { 'socks_version': socks_version, 'proxy_host': parsed.host, 'proxy_port': parsed.port, 'username': username, 'password': password, 'rdns': rdns } connection_pool_kw['_socks_options'] = socks_options super(SOCKSProxyManager, self).__init__( num_pools, headers, **connection_pool_kw ) self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme PKZ5site-packages/pip/_vendor/urllib3/contrib/__init__.pynu[PKZq((7site-packages/pip/_vendor/urllib3/contrib/appengine.pyonu[ abc@@sxdZddlmZddlZddlZddlZddlmZddlm Z m Z m Z m Z m Z mZddlmZddlmZdd lmZdd lmZdd lmZydd lmZWnek rdZnXejeZd e fdYZ de fdYZ!defdYZ"dZ#dZ$dZ%dZ&dZ'dS(sC This module provides a pool manager that uses Google App Engine's `URLFetch Service `_. Example usage:: from urllib3 import PoolManager from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox if is_appengine_sandbox(): # AppEngineManager uses AppEngine's URLFetch API behind the scenes http = AppEngineManager() else: # PoolManager uses a socket-level API behind the scenes http = PoolManager() r = http.request('GET', 'https://google.com/') There are `limitations `_ to the URLFetch service and it may not be the best choice for your application. There are three options for using urllib3 on Google App Engine: 1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is cost-effective in many circumstances as long as your usage is within the limitations. 2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. Sockets also have `limitations and restrictions `_ and have a lower free quota than URLFetch. To use sockets, be sure to specify the following in your ``app.yaml``:: env_variables: GAE_USE_SOCKETS_HTTPLIB : 'true' 3. If you are using `App Engine Flexible `_, you can use the standard :class:`PoolManager` without any configuration or special environment variables. i(tabsolute_importNi(turljoin(t HTTPErrort HTTPWarningt MaxRetryErrort ProtocolErrort TimeoutErrortSSLError(tBytesIO(tRequestMethods(t HTTPResponse(tTimeout(tRetry(turlfetchtAppEnginePlatformWarningcB@seZRS((t__name__t __module__(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRGstAppEnginePlatformErrorcB@seZRS((RR(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRKstAppEngineManagercB@skeZdZddeedZdZdZdddeej dZ dZ dZ dZ RS( s  Connection manager for Google App Engine sandbox applications. This manager uses the URLFetch service directly instead of using the emulated httplib, and is subject to URLFetch limitations as described in the App Engine documentation `here `_. Notably it will raise an :class:`AppEnginePlatformError` if: * URLFetch is not available. * If you attempt to use this on App Engine Flexible, as full socket support is available. * If a request size is more than 10 megabytes. * If a response size is more than 32 megabtyes. * If you use an unsupported request method such as OPTIONS. Beyond those cases, it will raise normal urllib3 errors. cC@sutstdntr-tdntjdttj||||_||_ |pkt j |_ dS(Ns.URLFetch is not available in this environment.sUse normal urllib3.PoolManager instead of AppEngineManageron Managed VMs, as using URLFetch is not necessary in this environment.surllib3 is using URLFetch on Google App Engine sandbox instead of sockets. To use sockets directly instead of URLFetch see https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.( R Rtis_prod_appengine_mvmstwarningstwarnRR t__init__tvalidate_certificateturlfetch_retriesR tDEFAULTtretries(tselftheadersRRR((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRcs     cC@s|S(N((R((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyt __enter__{scC@stS(N(tFalse(Rtexc_typetexc_valtexc_tb((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyt__exit__~scK@sk|j||}yv|o0|jdko0|j} tj|d|d|d|pTidtd|joi| d|j|d|j} Wn"tj k r} t || ntj k r} d t | krt d | nt| ntjk r?} d t | kr0t||d | nt| nntjk rc} t d | nJtjk r} t| n)tjk r} t d|| nX|j| d||} |o| j} | r|jr|jrt||dq| jdkrd}ny"|j||d| d|}Wn0tk rp|jrlt||dn| SX|j| tjd|| t|| }|j||||d|d|d||Snt| j d}|j!|| j|rg|j||d| d|}tjd||j"| |j||d|d|d|d|d||S| S(NitpayloadtmethodRtallow_truncatedtfollow_redirectstdeadlineRs too largesOURLFetch request too large, URLFetch only supports requests up to 10mb in size.sToo many redirectstreasonsPURLFetch response too large, URLFetch only supportsresponses up to 32mb in size.s$URLFetch does not support method: %sRstoo many redirectsi/tGETtresponset_poolsRedirecting %s -> %stredirectttimeouts Retry-Afters Retry: %stbody(#t _get_retriesR,ttotalR tfetchRRt_get_absolute_timeoutRtDeadlineExceededErrorRtInvalidURLErrortstrRRt DownloadErrorRtResponseTooLargeErrortSSLCertificateErrorRtInvalidMethodErrort#_urlfetch_response_to_http_responsetget_redirect_locationtraise_on_redirecttstatust incrementtsleep_for_retrytlogtdebugRturlopentboolt getheadertis_retrytsleep(RR$turlR.RRR,R-t response_kwR&R*tet http_responsetredirect_locationt redirect_urlthas_retry_after((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/appengine.pyRBs     "          cK@str7|jjd}|dkr7|jd=q7n|jjd}|dkr|jd}|jddj||jd's0   .       PKZ.q~~5site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pynu[""" NTLM authenticating pool, contributed by erikcederstran Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 """ from __future__ import absolute_import from logging import getLogger from ntlm import ntlm from .. import HTTPSConnectionPool from ..packages.six.moves.http_client import HTTPSConnection log = getLogger(__name__) class NTLMConnectionPool(HTTPSConnectionPool): """ Implements an NTLM authentication version of an urllib3 connection pool """ scheme = 'https' def __init__(self, user, pw, authurl, *args, **kwargs): """ authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\\username format. pw is the password for the user. """ super(NTLMConnectionPool, self).__init__(*args, **kwargs) self.authurl = authurl self.rawuser = user user_parts = user.split('\\', 1) self.domain = user_parts[0].upper() self.user = user_parts[1] self.pw = pw def _new_conn(self): # Performs the NTLM handshake that secures the connection. The socket # must be kept open while requests are performed. self.num_connections += 1 log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s', self.num_connections, self.host, self.authurl) headers = {} headers['Connection'] = 'Keep-Alive' req_header = 'Authorization' resp_header = 'www-authenticate' conn = HTTPSConnection(host=self.host, port=self.port) # Send negotiation message headers[req_header] = ( 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser)) log.debug('Request headers: %s', headers) conn.request('GET', self.authurl, None, headers) res = conn.getresponse() reshdr = dict(res.getheaders()) log.debug('Response status: %s %s', res.status, res.reason) log.debug('Response headers: %s', reshdr) log.debug('Response data: %s [...]', res.read(100)) # Remove the reference to the socket, so that it can not be closed by # the response object (we want to keep the socket open) res.fp = None # Server should respond with a challenge message auth_header_values = reshdr[resp_header].split(', ') auth_header_value = None for s in auth_header_values: if s[:5] == 'NTLM ': auth_header_value = s[5:] if auth_header_value is None: raise Exception('Unexpected %s response header: %s' % (resp_header, reshdr[resp_header])) # Send authentication message ServerChallenge, NegotiateFlags = \ ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value) auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags) headers[req_header] = 'NTLM %s' % auth_msg log.debug('Request headers: %s', headers) conn.request('GET', self.authurl, None, headers) res = conn.getresponse() log.debug('Response status: %s %s', res.status, res.reason) log.debug('Response headers: %s', dict(res.getheaders())) log.debug('Response data: %s [...]', res.read()[:100]) if res.status != 200: if res.status == 401: raise Exception('Server rejected request: wrong ' 'username or password') raise Exception('Wrong server response: %s %s' % (res.status, res.reason)) res.fp = None log.debug('Connection established') return conn def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True): if headers is None: headers = {} headers['Connection'] = 'Keep-Alive' return super(NTLMConnectionPool, self).urlopen(method, url, body, headers, retries, redirect, assert_same_host) PKZRF9gg6site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pycnu[ abc@@s|dZddlmZddlmZddlmZddlmZddlm Z ee Z defd YZ d S( s NTLM authenticating pool, contributed by erikcederstran Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 i(tabsolute_import(t getLogger(tntlmi(tHTTPSConnectionPool(tHTTPSConnectiontNTLMConnectionPoolcB@s>eZdZdZdZdZdddeedZRS(sQ Implements an NTLM authentication version of an urllib3 connection pool thttpscO@sjtt|j||||_||_|jdd}|dj|_|d|_||_ dS(s authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\username format. pw is the password for the user. s\iiN( tsuperRt__init__tauthurltrawusertsplittuppertdomaintusertpw(tselfRRR targstkwargst user_parts((sH/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.pyRs   c C@s|jd7_tjd|j|j|ji}d|ds PKZKoGsite-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pycnu[ abc@sdS(N((((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pyttPKZKoGsite-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pyonu[ abc@sdS(N((((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pyttPKZܐ//Gsite-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pynu[""" Low-level helpers for the SecureTransport bindings. These are Python functions that are not directly related to the high-level APIs but are necessary to get them to work. They include a whole bunch of low-level CoreFoundation messing about and memory management. The concerns in this module are almost entirely about trying to avoid memory leaks and providing appropriate and useful assistance to the higher-level code. """ import base64 import ctypes import itertools import re import os import ssl import tempfile from .bindings import Security, CoreFoundation, CFConst # This regular expression is used to grab PEM data out of a PEM bundle. _PEM_CERTS_RE = re.compile( b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL ) def _cf_data_from_bytes(bytestring): """ Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller. """ return CoreFoundation.CFDataCreate( CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) ) def _cf_dictionary_from_tuples(tuples): """ Given a list of Python tuples, create an associated CFDictionary. """ dictionary_size = len(tuples) # We need to get the dictionary keys and values out in the same order. keys = (t[0] for t in tuples) values = (t[1] for t in tuples) cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) return CoreFoundation.CFDictionaryCreate( CoreFoundation.kCFAllocatorDefault, cf_keys, cf_values, dictionary_size, CoreFoundation.kCFTypeDictionaryKeyCallBacks, CoreFoundation.kCFTypeDictionaryValueCallBacks, ) def _cf_string_to_unicode(value): """ Creates a Unicode string from a CFString object. Used entirely for error reporting. Yes, it annoys me quite a lot that this function is this complex. """ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) string = CoreFoundation.CFStringGetCStringPtr( value_as_void_p, CFConst.kCFStringEncodingUTF8 ) if string is None: buffer = ctypes.create_string_buffer(1024) result = CoreFoundation.CFStringGetCString( value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 ) if not result: raise OSError('Error copying C string from CFStringRef') string = buffer.value if string is not None: string = string.decode('utf-8') return string def _assert_no_error(error, exception_class=None): """ Checks the return code and throws an exception if there is an error to report """ if error == 0: return cf_error_string = Security.SecCopyErrorMessageString(error, None) output = _cf_string_to_unicode(cf_error_string) CoreFoundation.CFRelease(cf_error_string) if output is None or output == u'': output = u'OSStatus %s' % error if exception_class is None: exception_class = ssl.SSLError raise exception_class(output) def _cert_array_from_pem(pem_bundle): """ Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. """ der_certs = [ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) ] if not der_certs: raise ssl.SSLError("No root certificates specified") cert_array = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) ) if not cert_array: raise ssl.SSLError("Unable to allocate memory!") try: for der_bytes in der_certs: certdata = _cf_data_from_bytes(der_bytes) if not certdata: raise ssl.SSLError("Unable to allocate memory!") cert = Security.SecCertificateCreateWithData( CoreFoundation.kCFAllocatorDefault, certdata ) CoreFoundation.CFRelease(certdata) if not cert: raise ssl.SSLError("Unable to build cert object!") CoreFoundation.CFArrayAppendValue(cert_array, cert) CoreFoundation.CFRelease(cert) except Exception: # We need to free the array before the exception bubbles further. # We only want to do that if an error occurs: otherwise, the caller # should free. CoreFoundation.CFRelease(cert_array) return cert_array def _is_cert(item): """ Returns True if a given CFTypeRef is a certificate. """ expected = Security.SecCertificateGetTypeID() return CoreFoundation.CFGetTypeID(item) == expected def _is_identity(item): """ Returns True if a given CFTypeRef is an identity. """ expected = Security.SecIdentityGetTypeID() return CoreFoundation.CFGetTypeID(item) == expected def _temporary_keychain(): """ This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it. """ # Unfortunately, SecKeychainCreate requires a path to a keychain. This # means we cannot use mkstemp to use a generic temporary file. Instead, # we're going to create a temporary directory and a filename to use there. # This filename will be 8 random bytes expanded into base64. We also need # some random bytes to password-protect the keychain we're creating, so we # ask for 40 random bytes. random_bytes = os.urandom(40) filename = base64.b64encode(random_bytes[:8]).decode('utf-8') password = base64.b64encode(random_bytes[8:]) # Must be valid UTF-8 tempdirectory = tempfile.mkdtemp() keychain_path = os.path.join(tempdirectory, filename).encode('utf-8') # We now want to create the keychain itself. keychain = Security.SecKeychainRef() status = Security.SecKeychainCreate( keychain_path, len(password), password, False, None, ctypes.byref(keychain) ) _assert_no_error(status) # Having created the keychain, we want to pass it off to the caller. return keychain, tempdirectory def _load_items_from_file(keychain, path): """ Given a single file, loads all the trust objects from it into arrays and the keychain. Returns a tuple of lists: the first list is a list of identities, the second a list of certs. """ certificates = [] identities = [] result_array = None with open(path, 'rb') as f: raw_filedata = f.read() try: filedata = CoreFoundation.CFDataCreate( CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) ) result_array = CoreFoundation.CFArrayRef() result = Security.SecItemImport( filedata, # cert data None, # Filename, leaving it out for now None, # What the type of the file is, we don't care None, # what's in the file, we don't care 0, # import flags None, # key params, can include passphrase in the future keychain, # The keychain to insert into ctypes.byref(result_array) # Results ) _assert_no_error(result) # A CFArray is not very useful to us as an intermediary # representation, so we are going to extract the objects we want # and then free the array. We don't need to keep hold of keys: the # keychain already has them! result_count = CoreFoundation.CFArrayGetCount(result_array) for index in range(result_count): item = CoreFoundation.CFArrayGetValueAtIndex( result_array, index ) item = ctypes.cast(item, CoreFoundation.CFTypeRef) if _is_cert(item): CoreFoundation.CFRetain(item) certificates.append(item) elif _is_identity(item): CoreFoundation.CFRetain(item) identities.append(item) finally: if result_array: CoreFoundation.CFRelease(result_array) CoreFoundation.CFRelease(filedata) return (identities, certificates) def _load_client_cert_chain(keychain, *paths): """ Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain. """ # Ok, the strategy. # # This relies on knowing that macOS will not give you a SecIdentityRef # unless you have imported a key into a keychain. This is a somewhat # artificial limitation of macOS (for example, it doesn't necessarily # affect iOS), but there is nothing inside Security.framework that lets you # get a SecIdentityRef without having a key in a keychain. # # So the policy here is we take all the files and iterate them in order. # Each one will use SecItemImport to have one or more objects loaded from # it. We will also point at a keychain that macOS can use to work with the # private key. # # Once we have all the objects, we'll check what we actually have. If we # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, # we'll take the first certificate (which we assume to be our leaf) and # ask the keychain to give us a SecIdentityRef with that cert's associated # key. # # We'll then return a CFArray containing the trust chain: one # SecIdentityRef and then zero-or-more SecCertificateRef objects. The # responsibility for freeing this CFArray will be with the caller. This # CFArray must remain alive for the entire connection, so in practice it # will be stored with a single SSLSocket, along with the reference to the # keychain. certificates = [] identities = [] # Filter out bad paths. paths = (path for path in paths if path) try: for file_path in paths: new_identities, new_certs = _load_items_from_file( keychain, file_path ) identities.extend(new_identities) certificates.extend(new_certs) # Ok, we have everything. The question is: do we have an identity? If # not, we want to grab one from the first cert we have. if not identities: new_identity = Security.SecIdentityRef() status = Security.SecIdentityCreateWithCertificate( keychain, certificates[0], ctypes.byref(new_identity) ) _assert_no_error(status) identities.append(new_identity) # We now want to release the original certificate, as we no longer # need it. CoreFoundation.CFRelease(certificates.pop(0)) # We now need to build a new CFArray that holds the trust chain. trust_chain = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), ) for item in itertools.chain(identities, certificates): # ArrayAppendValue does a CFRetain on the item. That's fine, # because the finally block will release our other refs to them. CoreFoundation.CFArrayAppendValue(trust_chain, item) return trust_chain finally: for obj in itertools.chain(identities, certificates): CoreFoundation.CFRelease(obj) PKZY}DDFsite-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pynu[""" This module uses ctypes to bind a whole bunch of functions and constants from SecureTransport. The goal here is to provide the low-level API to SecureTransport. These are essentially the C-level functions and constants, and they're pretty gross to work with. This code is a bastardised version of the code found in Will Bond's oscrypto library. An enormous debt is owed to him for blazing this trail for us. For that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import absolute_import import platform from ctypes.util import find_library from ctypes import ( c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_uint32, c_ulong, c_long, c_bool ) from ctypes import CDLL, POINTER, CFUNCTYPE security_path = find_library('Security') if not security_path: raise ImportError('The library Security could not be found') core_foundation_path = find_library('CoreFoundation') if not core_foundation_path: raise ImportError('The library CoreFoundation could not be found') version = platform.mac_ver()[0] version_info = tuple(map(int, version.split('.'))) if version_info < (10, 8): raise OSError( 'Only OS X 10.8 and newer are supported, not %s.%s' % ( version_info[0], version_info[1] ) ) Security = CDLL(security_path, use_errno=True) CoreFoundation = CDLL(core_foundation_path, use_errno=True) Boolean = c_bool CFIndex = c_long CFStringEncoding = c_uint32 CFData = c_void_p CFString = c_void_p CFArray = c_void_p CFMutableArray = c_void_p CFDictionary = c_void_p CFError = c_void_p CFType = c_void_p CFTypeID = c_ulong CFTypeRef = POINTER(CFType) CFAllocatorRef = c_void_p OSStatus = c_int32 CFDataRef = POINTER(CFData) CFStringRef = POINTER(CFString) CFArrayRef = POINTER(CFArray) CFMutableArrayRef = POINTER(CFMutableArray) CFDictionaryRef = POINTER(CFDictionary) CFArrayCallBacks = c_void_p CFDictionaryKeyCallBacks = c_void_p CFDictionaryValueCallBacks = c_void_p SecCertificateRef = POINTER(c_void_p) SecExternalFormat = c_uint32 SecExternalItemType = c_uint32 SecIdentityRef = POINTER(c_void_p) SecItemImportExportFlags = c_uint32 SecItemImportExportKeyParameters = c_void_p SecKeychainRef = POINTER(c_void_p) SSLProtocol = c_uint32 SSLCipherSuite = c_uint32 SSLContextRef = POINTER(c_void_p) SecTrustRef = POINTER(c_void_p) SSLConnectionRef = c_uint32 SecTrustResultType = c_uint32 SecTrustOptionFlags = c_uint32 SSLProtocolSide = c_uint32 SSLConnectionType = c_uint32 SSLSessionOption = c_uint32 try: Security.SecItemImport.argtypes = [ CFDataRef, CFStringRef, POINTER(SecExternalFormat), POINTER(SecExternalItemType), SecItemImportExportFlags, POINTER(SecItemImportExportKeyParameters), SecKeychainRef, POINTER(CFArrayRef), ] Security.SecItemImport.restype = OSStatus Security.SecCertificateGetTypeID.argtypes = [] Security.SecCertificateGetTypeID.restype = CFTypeID Security.SecIdentityGetTypeID.argtypes = [] Security.SecIdentityGetTypeID.restype = CFTypeID Security.SecKeyGetTypeID.argtypes = [] Security.SecKeyGetTypeID.restype = CFTypeID Security.SecCertificateCreateWithData.argtypes = [ CFAllocatorRef, CFDataRef ] Security.SecCertificateCreateWithData.restype = SecCertificateRef Security.SecCertificateCopyData.argtypes = [ SecCertificateRef ] Security.SecCertificateCopyData.restype = CFDataRef Security.SecCopyErrorMessageString.argtypes = [ OSStatus, c_void_p ] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SecIdentityCreateWithCertificate.argtypes = [ CFTypeRef, SecCertificateRef, POINTER(SecIdentityRef) ] Security.SecIdentityCreateWithCertificate.restype = OSStatus Security.SecKeychainCreate.argtypes = [ c_char_p, c_uint32, c_void_p, Boolean, c_void_p, POINTER(SecKeychainRef) ] Security.SecKeychainCreate.restype = OSStatus Security.SecKeychainDelete.argtypes = [ SecKeychainRef ] Security.SecKeychainDelete.restype = OSStatus Security.SecPKCS12Import.argtypes = [ CFDataRef, CFDictionaryRef, POINTER(CFArrayRef) ] Security.SecPKCS12Import.restype = OSStatus SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) SSLWriteFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)) Security.SSLSetIOFuncs.argtypes = [ SSLContextRef, SSLReadFunc, SSLWriteFunc ] Security.SSLSetIOFuncs.restype = OSStatus Security.SSLSetPeerID.argtypes = [ SSLContextRef, c_char_p, c_size_t ] Security.SSLSetPeerID.restype = OSStatus Security.SSLSetCertificate.argtypes = [ SSLContextRef, CFArrayRef ] Security.SSLSetCertificate.restype = OSStatus Security.SSLSetCertificateAuthorities.argtypes = [ SSLContextRef, CFTypeRef, Boolean ] Security.SSLSetCertificateAuthorities.restype = OSStatus Security.SSLSetConnection.argtypes = [ SSLContextRef, SSLConnectionRef ] Security.SSLSetConnection.restype = OSStatus Security.SSLSetPeerDomainName.argtypes = [ SSLContextRef, c_char_p, c_size_t ] Security.SSLSetPeerDomainName.restype = OSStatus Security.SSLHandshake.argtypes = [ SSLContextRef ] Security.SSLHandshake.restype = OSStatus Security.SSLRead.argtypes = [ SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t) ] Security.SSLRead.restype = OSStatus Security.SSLWrite.argtypes = [ SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t) ] Security.SSLWrite.restype = OSStatus Security.SSLClose.argtypes = [ SSLContextRef ] Security.SSLClose.restype = OSStatus Security.SSLGetNumberSupportedCiphers.argtypes = [ SSLContextRef, POINTER(c_size_t) ] Security.SSLGetNumberSupportedCiphers.restype = OSStatus Security.SSLGetSupportedCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), POINTER(c_size_t) ] Security.SSLGetSupportedCiphers.restype = OSStatus Security.SSLSetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), c_size_t ] Security.SSLSetEnabledCiphers.restype = OSStatus Security.SSLGetNumberEnabledCiphers.argtype = [ SSLContextRef, POINTER(c_size_t) ] Security.SSLGetNumberEnabledCiphers.restype = OSStatus Security.SSLGetEnabledCiphers.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite), POINTER(c_size_t) ] Security.SSLGetEnabledCiphers.restype = OSStatus Security.SSLGetNegotiatedCipher.argtypes = [ SSLContextRef, POINTER(SSLCipherSuite) ] Security.SSLGetNegotiatedCipher.restype = OSStatus Security.SSLGetNegotiatedProtocolVersion.argtypes = [ SSLContextRef, POINTER(SSLProtocol) ] Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus Security.SSLCopyPeerTrust.argtypes = [ SSLContextRef, POINTER(SecTrustRef) ] Security.SSLCopyPeerTrust.restype = OSStatus Security.SecTrustSetAnchorCertificates.argtypes = [ SecTrustRef, CFArrayRef ] Security.SecTrustSetAnchorCertificates.restype = OSStatus Security.SecTrustSetAnchorCertificatesOnly.argstypes = [ SecTrustRef, Boolean ] Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus Security.SecTrustEvaluate.argtypes = [ SecTrustRef, POINTER(SecTrustResultType) ] Security.SecTrustEvaluate.restype = OSStatus Security.SecTrustGetCertificateCount.argtypes = [ SecTrustRef ] Security.SecTrustGetCertificateCount.restype = CFIndex Security.SecTrustGetCertificateAtIndex.argtypes = [ SecTrustRef, CFIndex ] Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef Security.SSLCreateContext.argtypes = [ CFAllocatorRef, SSLProtocolSide, SSLConnectionType ] Security.SSLCreateContext.restype = SSLContextRef Security.SSLSetSessionOption.argtypes = [ SSLContextRef, SSLSessionOption, Boolean ] Security.SSLSetSessionOption.restype = OSStatus Security.SSLSetProtocolVersionMin.argtypes = [ SSLContextRef, SSLProtocol ] Security.SSLSetProtocolVersionMin.restype = OSStatus Security.SSLSetProtocolVersionMax.argtypes = [ SSLContextRef, SSLProtocol ] Security.SSLSetProtocolVersionMax.restype = OSStatus Security.SecCopyErrorMessageString.argtypes = [ OSStatus, c_void_p ] Security.SecCopyErrorMessageString.restype = CFStringRef Security.SSLReadFunc = SSLReadFunc Security.SSLWriteFunc = SSLWriteFunc Security.SSLContextRef = SSLContextRef Security.SSLProtocol = SSLProtocol Security.SSLCipherSuite = SSLCipherSuite Security.SecIdentityRef = SecIdentityRef Security.SecKeychainRef = SecKeychainRef Security.SecTrustRef = SecTrustRef Security.SecTrustResultType = SecTrustResultType Security.SecExternalFormat = SecExternalFormat Security.OSStatus = OSStatus Security.kSecImportExportPassphrase = CFStringRef.in_dll( Security, 'kSecImportExportPassphrase' ) Security.kSecImportItemIdentity = CFStringRef.in_dll( Security, 'kSecImportItemIdentity' ) # CoreFoundation time! CoreFoundation.CFRetain.argtypes = [ CFTypeRef ] CoreFoundation.CFRetain.restype = CFTypeRef CoreFoundation.CFRelease.argtypes = [ CFTypeRef ] CoreFoundation.CFRelease.restype = None CoreFoundation.CFGetTypeID.argtypes = [ CFTypeRef ] CoreFoundation.CFGetTypeID.restype = CFTypeID CoreFoundation.CFStringCreateWithCString.argtypes = [ CFAllocatorRef, c_char_p, CFStringEncoding ] CoreFoundation.CFStringCreateWithCString.restype = CFStringRef CoreFoundation.CFStringGetCStringPtr.argtypes = [ CFStringRef, CFStringEncoding ] CoreFoundation.CFStringGetCStringPtr.restype = c_char_p CoreFoundation.CFStringGetCString.argtypes = [ CFStringRef, c_char_p, CFIndex, CFStringEncoding ] CoreFoundation.CFStringGetCString.restype = c_bool CoreFoundation.CFDataCreate.argtypes = [ CFAllocatorRef, c_char_p, CFIndex ] CoreFoundation.CFDataCreate.restype = CFDataRef CoreFoundation.CFDataGetLength.argtypes = [ CFDataRef ] CoreFoundation.CFDataGetLength.restype = CFIndex CoreFoundation.CFDataGetBytePtr.argtypes = [ CFDataRef ] CoreFoundation.CFDataGetBytePtr.restype = c_void_p CoreFoundation.CFDictionaryCreate.argtypes = [ CFAllocatorRef, POINTER(CFTypeRef), POINTER(CFTypeRef), CFIndex, CFDictionaryKeyCallBacks, CFDictionaryValueCallBacks ] CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef CoreFoundation.CFDictionaryGetValue.argtypes = [ CFDictionaryRef, CFTypeRef ] CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef CoreFoundation.CFArrayCreate.argtypes = [ CFAllocatorRef, POINTER(CFTypeRef), CFIndex, CFArrayCallBacks, ] CoreFoundation.CFArrayCreate.restype = CFArrayRef CoreFoundation.CFArrayCreateMutable.argtypes = [ CFAllocatorRef, CFIndex, CFArrayCallBacks ] CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef CoreFoundation.CFArrayAppendValue.argtypes = [ CFMutableArrayRef, c_void_p ] CoreFoundation.CFArrayAppendValue.restype = None CoreFoundation.CFArrayGetCount.argtypes = [ CFArrayRef ] CoreFoundation.CFArrayGetCount.restype = CFIndex CoreFoundation.CFArrayGetValueAtIndex.argtypes = [ CFArrayRef, CFIndex ] CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( CoreFoundation, 'kCFAllocatorDefault' ) CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(CoreFoundation, 'kCFTypeArrayCallBacks') CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( CoreFoundation, 'kCFTypeDictionaryKeyCallBacks' ) CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( CoreFoundation, 'kCFTypeDictionaryValueCallBacks' ) CoreFoundation.CFTypeRef = CFTypeRef CoreFoundation.CFArrayRef = CFArrayRef CoreFoundation.CFStringRef = CFStringRef CoreFoundation.CFDictionaryRef = CFDictionaryRef except (AttributeError): raise ImportError('Error initializing ctypes') class CFConst(object): """ A class object that acts as essentially a namespace for CoreFoundation constants. """ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) class SecurityConst(object): """ A class object that acts as essentially a namespace for Security constants. """ kSSLSessionOptionBreakOnServerAuth = 0 kSSLProtocol2 = 1 kSSLProtocol3 = 2 kTLSProtocol1 = 4 kTLSProtocol11 = 7 kTLSProtocol12 = 8 kSSLClientSide = 1 kSSLStreamType = 0 kSecFormatPEMSequence = 10 kSecTrustResultInvalid = 0 kSecTrustResultProceed = 1 # This gap is present on purpose: this was kSecTrustResultConfirm, which # is deprecated. kSecTrustResultDeny = 3 kSecTrustResultUnspecified = 4 kSecTrustResultRecoverableTrustFailure = 5 kSecTrustResultFatalTrustFailure = 6 kSecTrustResultOtherError = 7 errSSLProtocol = -9800 errSSLWouldBlock = -9803 errSSLClosedGraceful = -9805 errSSLClosedNoNotify = -9816 errSSLClosedAbort = -9806 errSSLXCertChainInvalid = -9807 errSSLCrypto = -9809 errSSLInternal = -9810 errSSLCertExpired = -9814 errSSLCertNotYetValid = -9815 errSSLUnknownRootCert = -9812 errSSLNoRootCert = -9813 errSSLHostNameMismatch = -9843 errSSLPeerHandshakeFail = -9824 errSSLPeerUserCancelled = -9839 errSSLWeakPeerEphemeralDHKey = -9850 errSSLServerAuthCompleted = -9841 errSSLRecordOverflow = -9847 errSecVerifyFailed = -67808 errSecNoTrustSettings = -25263 errSecItemNotFound = -25300 errSecInvalidTrustSettings = -25262 # Cipher suites. We only pick the ones our default cipher string allows. TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3 TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2 TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040 TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032 TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F TLS_AES_128_GCM_SHA256 = 0x1301 TLS_AES_256_GCM_SHA384 = 0x1302 TLS_CHACHA20_POLY1305_SHA256 = 0x1303 PKZy//Gsite-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyonu[ abc @@sE dZddlmZddlZddlmZddlmZmZm Z m Z m Z m Z m Z mZmZddlmZmZmZedZesedned Zesed nejdZeeeejd Zedkr+edededfneedeZeedeZ eZ!eZ"e Z#eZ$eZ%eZ&eZ'eZ(eZ)eZ*e Z+ee*Z,eZ-eZ.ee$Z/ee%Z0ee&Z1ee'Z2ee(Z3eZ4eZ5eZ6eeZ7e Z8e Z9eeZ:e Z;eZ<eeZ=e Z>e Z?eeZ@eeZAe ZBe ZCe ZDe ZEe ZFe ZGyze/e0ee8ee9e;ee<e=ee1gejH_Ie.ejH_JgejK_Ie+ejK_JgejL_Ie+ejL_JgejM_Ie+ejM_Je-e/gejN_Ie7ejN_Je7gejO_Ie/ejO_Je.egejP_Ie0ejP_Je,e7ee:gejQ_Ie.ejQ_Je e ee!eee=gejR_Ie.ejR_Je=gejS_Ie.ejS_Je/e3ee1gejT_Ie.ejT_Jee.eBeee ZUee.eBee ee ZVe@eUeVgejW_Ie.ejW_Je@e e gejX_Ie.ejX_Je@e1gejY_Ie.ejY_Je@e,e!gejZ_Ie.ejZ_Je@eBgej[_Ie.ej[_Je@e e gej\_Ie.ej\_Je@gej]_Ie.ej]_Je@e e ee gej^_Ie.ej^_Je@e e ee gej__Ie.ej__Je@gej`_Ie.ej`_Je@ee geja_Ie.eja_Je@ee?ee gejb_Ie.ejb_Je@ee?e gejc_Ie.ejc_Je@ee gejd_ee.ejd_Je@ee?ee gejf_Ie.ejf_Je@ee?gejg_Ie.ejg_Je@ee>gejh_Ie.ejh_Je@eeAgeji_Ie.eji_JeAe1gejj_Ie.ejj_JeAe!gejk_le.ejk_JeAeeCgejm_Ie.ejm_JeAgejn_Ie"ejn_JeAe"gejo_Ie7ejo_Je-eEeFgejp_Ie@ejp_Je@eGe!gejq_Ie.ejq_Je@e>gejr_Ie.ejr_Je@e>gejs_Ie.ejs_Je.egejP_Ie0ejP_JeUe_UeVe_Ve@e_@e>e_>e?e_?e:e_:e=e_=eAe_AeCe_Ce8e_8e.e_.e0jtede_ue0jtede_ve,ge jw_Ie,e jw_Je,ge jx_Ide jx_Je,ge jz_Ie+e jz_Je-e e#ge j{_Ie0e j{_Je0e#ge j|_Ie e j|_Je0e e"e#ge j}_Iee j}_Je-e e"ge j~_Ie/e j~_Je/ge j_Ie"e j_Je/ge j_Iee j_Je-ee,ee,e"e5e6ge j_Ie3e j_Je3e,ge j_Ie,e j_Je-ee,e"e4ge j_Ie1e j_Je-e"e4ge j_Ie2e j_Je2ege j_Ide j_Je1ge j_Ie"e j_Je1e"ge j_Iee j_Je-jte de _ejte de _ejte de _ejte de _e,e _,e1e _1e0e _0e3e _3Wnek r ednXdefdYZdefdYZdS(sy This module uses ctypes to bind a whole bunch of functions and constants from SecureTransport. The goal here is to provide the low-level API to SecureTransport. These are essentially the C-level functions and constants, and they're pretty gross to work with. This code is a bastardised version of the code found in Will Bond's oscrypto library. An enormous debt is owed to him for blazing this trail for us. For that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. i(tabsolute_importN(t find_library( tc_void_ptc_int32tc_char_ptc_size_ttc_bytetc_uint32tc_ulongtc_longtc_bool(tCDLLtPOINTERt CFUNCTYPEtSecuritys'The library Security could not be foundtCoreFoundations-The library CoreFoundation could not be foundt.i is1Only OS X 10.8 and newer are supported, not %s.%sit use_errnotkSecImportExportPassphrasetkSecImportItemIdentitytkCFAllocatorDefaulttkCFTypeArrayCallBackstkCFTypeDictionaryKeyCallBackstkCFTypeDictionaryValueCallBackssError initializing ctypestCFConstcB@seZdZedZRS(s_ A class object that acts as essentially a namespace for CoreFoundation constants. i(t__name__t __module__t__doc__tCFStringEncodingtkCFStringEncodingUTF8(((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyRst SecurityConstcB@seZdZdZdZdZdZdZdZdZ dZ dZ dZ dZ dZdZd Zd ZdZd Zd Zd ZdZdZdZdZdZdZdZdZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'd Z(d!Z)d"Z*d#Z+d$Z,d%Z-d&Z.d'Z/d(Z0d)Z1d*Z2d+Z3d,Z4d-Z5d.Z6d/Z7d0Z8d1Z9d2Z:d3Z;d4Z<d5Z=d6Z>d7Z?d8Z@d9ZAd:ZBd;ZCd<ZDd=ZEd>ZFd?ZGd@ZHdAZIRS(BsU A class object that acts as essentially a namespace for Security constants. iiiiiii iiiiiiiiiiiiiiiiiiiiii iQi,iRi,i0i+i/iiiii$i(i iikiji9i8i#i'i iigi@i3i2iii=i<i5i/iii(JRRRt"kSSLSessionOptionBreakOnServerAutht kSSLProtocol2t kSSLProtocol3t kTLSProtocol1tkTLSProtocol11tkTLSProtocol12tkSSLClientSidetkSSLStreamTypetkSecFormatPEMSequencetkSecTrustResultInvalidtkSecTrustResultProceedtkSecTrustResultDenytkSecTrustResultUnspecifiedt&kSecTrustResultRecoverableTrustFailuret kSecTrustResultFatalTrustFailuretkSecTrustResultOtherErrorterrSSLProtocolterrSSLWouldBlockterrSSLClosedGracefulterrSSLClosedNoNotifyterrSSLClosedAbortterrSSLXCertChainInvalidt errSSLCryptoterrSSLInternalterrSSLCertExpiredterrSSLCertNotYetValidterrSSLUnknownRootCertterrSSLNoRootCertterrSSLHostNameMismatchterrSSLPeerHandshakeFailterrSSLPeerUserCancelledterrSSLWeakPeerEphemeralDHKeyterrSSLServerAuthCompletedterrSSLRecordOverflowterrSecVerifyFailedterrSecNoTrustSettingsterrSecItemNotFoundterrSecInvalidTrustSettingst'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384t'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256t#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384t#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384t#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256t#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256t'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384t$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAt#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256t TLS_DHE_RSA_WITH_AES_256_CBC_SHAt TLS_DHE_DSS_WITH_AES_256_CBC_SHAt'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256t$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAt#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256t TLS_DHE_RSA_WITH_AES_128_CBC_SHAt TLS_DHE_DSS_WITH_AES_128_CBC_SHAtTLS_RSA_WITH_AES_256_GCM_SHA384tTLS_RSA_WITH_AES_128_GCM_SHA256tTLS_RSA_WITH_AES_256_CBC_SHA256tTLS_RSA_WITH_AES_128_CBC_SHA256tTLS_RSA_WITH_AES_256_CBC_SHAtTLS_RSA_WITH_AES_128_CBC_SHAtTLS_AES_128_GCM_SHA256tTLS_AES_256_GCM_SHA384tTLS_CHACHA20_POLY1305_SHA256(((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyRs(i i(Rt __future__Rtplatformt ctypes.utilRtctypesRRRRRRRR R R R R t security_patht ImportErrortcore_foundation_pathtmac_vertversionttupletmaptinttsplitt version_infotOSErrortTrueRRtBooleantCFIndexRtCFDatatCFStringtCFArraytCFMutableArrayt CFDictionarytCFErrortCFTypetCFTypeIDt CFTypeReftCFAllocatorReftOSStatust CFDataReft CFStringReft CFArrayReftCFMutableArrayReftCFDictionaryReftCFArrayCallBackstCFDictionaryKeyCallBackstCFDictionaryValueCallBackstSecCertificateReftSecExternalFormattSecExternalItemTypetSecIdentityReftSecItemImportExportFlagst SecItemImportExportKeyParameterstSecKeychainReft SSLProtocoltSSLCipherSuitet SSLContextReft SecTrustReftSSLConnectionReftSecTrustResultTypetSecTrustOptionFlagstSSLProtocolSidetSSLConnectionTypetSSLSessionOptiont SecItemImporttargtypestrestypetSecCertificateGetTypeIDtSecIdentityGetTypeIDtSecKeyGetTypeIDtSecCertificateCreateWithDatatSecCertificateCopyDatatSecCopyErrorMessageStringt SecIdentityCreateWithCertificatetSecKeychainCreatetSecKeychainDeletetSecPKCS12Importt SSLReadFunct SSLWriteFunct SSLSetIOFuncst SSLSetPeerIDtSSLSetCertificatetSSLSetCertificateAuthoritiestSSLSetConnectiontSSLSetPeerDomainNamet SSLHandshaketSSLReadtSSLWritetSSLClosetSSLGetNumberSupportedCipherstSSLGetSupportedCipherstSSLSetEnabledCipherstSSLGetNumberEnabledCipherstargtypetSSLGetEnabledCipherstSSLGetNegotiatedCiphertSSLGetNegotiatedProtocolVersiontSSLCopyPeerTrusttSecTrustSetAnchorCertificatest!SecTrustSetAnchorCertificatesOnlyt argstypestSecTrustEvaluatetSecTrustGetCertificateCounttSecTrustGetCertificateAtIndextSSLCreateContexttSSLSetSessionOptiontSSLSetProtocolVersionMintSSLSetProtocolVersionMaxtin_dllRRtCFRetaint CFReleasetNonet CFGetTypeIDtCFStringCreateWithCStringtCFStringGetCStringPtrtCFStringGetCStringt CFDataCreatetCFDataGetLengthtCFDataGetBytePtrtCFDictionaryCreatetCFDictionaryGetValuet CFArrayCreatetCFArrayCreateMutabletCFArrayAppendValuetCFArrayGetCounttCFArrayGetValueAtIndexRRRRtAttributeErrortobjectRR(((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyts, @                               !                                                                  PKZFsite-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.pynu[PKZN$$Hsite-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pycnu[ abc@sdZddlZddlZddlZddlZddlZddlZddlZddlm Z m Z m Z ej dej ZdZdZdZdd Zd Zd Zd Zd ZdZdZdS(s Low-level helpers for the SecureTransport bindings. These are Python functions that are not directly related to the high-level APIs but are necessary to get them to work. They include a whole bunch of low-level CoreFoundation messing about and memory management. The concerns in this module are almost entirely about trying to avoid memory leaks and providing appropriate and useful assistance to the higher-level code. iNi(tSecuritytCoreFoundationtCFConsts;-----BEGIN CERTIFICATE----- (.*?) -----END CERTIFICATE-----cCstjtj|t|S(sv Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller. (Rt CFDataCreatetkCFAllocatorDefaulttlen(t bytestring((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cf_data_from_bytesscCswt|}d|D}d|D}tj||}tj||}tjtj|||tjtjS(sK Given a list of Python tuples, create an associated CFDictionary. css|]}|dVqdS(iN((t.0tt((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pys ,scss|]}|dVqdS(iN((RR ((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pys -s(RRt CFTypeReftCFDictionaryCreateRtkCFTypeDictionaryKeyCallBackstkCFTypeDictionaryValueCallBacks(ttuplestdictionary_sizetkeystvaluestcf_keyst cf_values((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cf_dictionary_from_tuples%s cCstj|tjtj}tj|tj}|dkrtj d}tj ||dtj}|s~t dn|j }n|dk r|j d}n|S(s Creates a Unicode string from a CFString object. Used entirely for error reporting. Yes, it annoys me quite a lot that this function is this complex. is'Error copying C string from CFStringRefsutf-8N(tctypestcasttPOINTERtc_void_pRtCFStringGetCStringPtrRtkCFStringEncodingUTF8tNonetcreate_string_buffertCFStringGetCStringtOSErrortvaluetdecode(Rtvalue_as_void_ptstringtbuffertresult((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cf_string_to_unicode;s"     cCs|dkrdStj|d}t|}tj||dksS|dkr`d|}n|dkrxtj}n||dS(s[ Checks the return code and throws an exception if there is an error to report iNuu OSStatus %s(RtSecCopyErrorMessageStringRR%Rt CFReleasetssltSSLError(terrortexception_classtcf_error_stringtoutput((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_assert_no_errorXs      cCs=gtj|D]}tj|jd^q}|sLtjdntjtj dt j tj }|stjdnyx|D]}t |}|stjdntjtj |}tj||stjdntj||tj|qWWntk r8tj|nX|S(s Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. isNo root certificates specifiedisUnable to allocate memory!sUnable to build cert object!(t _PEM_CERTS_REtfinditertbase64t b64decodetgroupR(R)RtCFArrayCreateMutableRRtbyreftkCFTypeArrayCallBacksRRtSecCertificateCreateWithDataR'tCFArrayAppendValuet Exception(t pem_bundletmatcht der_certst cert_arrayt der_bytestcertdatatcert((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cert_array_from_pemms21    cCstj}tj||kS(s= Returns True if a given CFTypeRef is a certificate. (RtSecCertificateGetTypeIDRt CFGetTypeID(titemtexpected((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_is_certs cCstj}tj||kS(s; Returns True if a given CFTypeRef is an identity. (RtSecIdentityGetTypeIDRRC(RDRE((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt _is_identitys cCstjd}tj|d jd}tj|d}tj}tjj||j d}t j }t j |t ||tdtj|}t|||fS(s This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it. i(isutf-8N(tosturandomR1t b64encodeR ttempfiletmkdtemptpathtjointencodeRtSecKeychainReftSecKeychainCreateRtFalseRRR5R.(t random_bytestfilenametpasswordt tempdirectoryt keychain_pathtkeychaintstatus((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_temporary_keychains    c Cskg}g}d}t|d}|j}WdQXztjtj|t|}tj}tj |ddddd|t j |}t |tj |} xt| D]} tj|| } t j| tj} t| r tj| |j| qt| rtj| |j| qqWWd|rStj|ntj|X||fS(s Given a single file, loads all the trust objects from it into arrays and the keychain. Returns a tuple of lists: the first list is a list of identities, the second a list of certs. trbNi(RtopentreadRRRRt CFArrayRefRt SecItemImportRR5R.tCFArrayGetCounttrangetCFArrayGetValueAtIndexRR RFtCFRetaintappendRHR'( RYRNt certificatest identitiest result_arraytft raw_filedatatfiledataR$t result_counttindexRD((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_load_items_from_filesH       c GsKg}g}d|D}zx=|D]5}t||\}}|j||j|q&W|stj}tj||dtj|}t||j|t j |j dnt j t j dtjt j} x*tj||D]} t j| | qW| SWdx'tj||D]} t j | q/WXdS(s Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain. css|]}|r|VqdS(N((RRN((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pys /siN(RntextendRtSecIdentityReft SecIdentityCreateWithCertificateRR5R.ReRR'tpopR4RR6t itertoolstchainR8( RYtpathsRfRgt file_pathtnew_identitiest new_certst new_identityRZt trust_chainRDtobj((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_load_client_cert_chain s6      (t__doc__R1RRstreRIR(RLtbindingsRRRtcompiletDOTALLR/RRR%RR.RARFRHR[RnR|(((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt s(           +   ( ;PKZN$$Hsite-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyonu[ abc@sdZddlZddlZddlZddlZddlZddlZddlZddlm Z m Z m Z ej dej ZdZdZdZdd Zd Zd Zd Zd ZdZdZdS(s Low-level helpers for the SecureTransport bindings. These are Python functions that are not directly related to the high-level APIs but are necessary to get them to work. They include a whole bunch of low-level CoreFoundation messing about and memory management. The concerns in this module are almost entirely about trying to avoid memory leaks and providing appropriate and useful assistance to the higher-level code. iNi(tSecuritytCoreFoundationtCFConsts;-----BEGIN CERTIFICATE----- (.*?) -----END CERTIFICATE-----cCstjtj|t|S(sv Given a bytestring, create a CFData object from it. This CFData object must be CFReleased by the caller. (Rt CFDataCreatetkCFAllocatorDefaulttlen(t bytestring((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cf_data_from_bytesscCswt|}d|D}d|D}tj||}tj||}tjtj|||tjtjS(sK Given a list of Python tuples, create an associated CFDictionary. css|]}|dVqdS(iN((t.0tt((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pys ,scss|]}|dVqdS(iN((RR ((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pys -s(RRt CFTypeReftCFDictionaryCreateRtkCFTypeDictionaryKeyCallBackstkCFTypeDictionaryValueCallBacks(ttuplestdictionary_sizetkeystvaluestcf_keyst cf_values((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cf_dictionary_from_tuples%s cCstj|tjtj}tj|tj}|dkrtj d}tj ||dtj}|s~t dn|j }n|dk r|j d}n|S(s Creates a Unicode string from a CFString object. Used entirely for error reporting. Yes, it annoys me quite a lot that this function is this complex. is'Error copying C string from CFStringRefsutf-8N(tctypestcasttPOINTERtc_void_pRtCFStringGetCStringPtrRtkCFStringEncodingUTF8tNonetcreate_string_buffertCFStringGetCStringtOSErrortvaluetdecode(Rtvalue_as_void_ptstringtbuffertresult((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cf_string_to_unicode;s"     cCs|dkrdStj|d}t|}tj||dksS|dkr`d|}n|dkrxtj}n||dS(s[ Checks the return code and throws an exception if there is an error to report iNuu OSStatus %s(RtSecCopyErrorMessageStringRR%Rt CFReleasetssltSSLError(terrortexception_classtcf_error_stringtoutput((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_assert_no_errorXs      cCs=gtj|D]}tj|jd^q}|sLtjdntjtj dt j tj }|stjdnyx|D]}t |}|stjdntjtj |}tj||stjdntj||tj|qWWntk r8tj|nX|S(s Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. isNo root certificates specifiedisUnable to allocate memory!sUnable to build cert object!(t _PEM_CERTS_REtfinditertbase64t b64decodetgroupR(R)RtCFArrayCreateMutableRRtbyreftkCFTypeArrayCallBacksRRtSecCertificateCreateWithDataR'tCFArrayAppendValuet Exception(t pem_bundletmatcht der_certst cert_arrayt der_bytestcertdatatcert((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_cert_array_from_pemms21    cCstj}tj||kS(s= Returns True if a given CFTypeRef is a certificate. (RtSecCertificateGetTypeIDRt CFGetTypeID(titemtexpected((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_is_certs cCstj}tj||kS(s; Returns True if a given CFTypeRef is an identity. (RtSecIdentityGetTypeIDRRC(RDRE((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt _is_identitys cCstjd}tj|d jd}tj|d}tj}tjj||j d}t j }t j |t ||tdtj|}t|||fS(s This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it. i(isutf-8N(tosturandomR1t b64encodeR ttempfiletmkdtemptpathtjointencodeRtSecKeychainReftSecKeychainCreateRtFalseRRR5R.(t random_bytestfilenametpasswordt tempdirectoryt keychain_pathtkeychaintstatus((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_temporary_keychains    c Cskg}g}d}t|d}|j}WdQXztjtj|t|}tj}tj |ddddd|t j |}t |tj |} xt| D]} tj|| } t j| tj} t| r tj| |j| qt| rtj| |j| qqWWd|rStj|ntj|X||fS(s Given a single file, loads all the trust objects from it into arrays and the keychain. Returns a tuple of lists: the first list is a list of identities, the second a list of certs. trbNi(RtopentreadRRRRt CFArrayRefRt SecItemImportRR5R.tCFArrayGetCounttrangetCFArrayGetValueAtIndexRR RFtCFRetaintappendRHR'( RYRNt certificatest identitiest result_arraytft raw_filedatatfiledataR$t result_counttindexRD((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_load_items_from_filesH       c GsKg}g}d|D}zx=|D]5}t||\}}|j||j|q&W|stj}tj||dtj|}t||j|t j |j dnt j t j dtjt j} x*tj||D]} t j| | qW| SWdx'tj||D]} t j | q/WXdS(s Load certificates and maybe keys from a number of files. Has the end goal of returning a CFArray containing one SecIdentityRef, and then zero or more SecCertificateRef objects, suitable for use as a client certificate trust chain. css|]}|r|VqdS(N((RRN((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pys /siN(RntextendRtSecIdentityReft SecIdentityCreateWithCertificateRR5R.ReRR'tpopR4RR6t itertoolstchainR8( RYtpathsRfRgt file_pathtnew_identitiest new_certst new_identityRZt trust_chainRDtobj((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt_load_client_cert_chain s6      (t__doc__R1RRstreRIR(RLtbindingsRRRtcompiletDOTALLR/RRR%RR.RARFRHR[RnR|(((sZ/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.pyt s(           +   ( ;PKZy//Gsite-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pycnu[ abc @@sE dZddlmZddlZddlmZddlmZmZm Z m Z m Z m Z m Z mZmZddlmZmZmZedZesedned Zesed nejdZeeeejd Zedkr+edededfneedeZeedeZ eZ!eZ"e Z#eZ$eZ%eZ&eZ'eZ(eZ)eZ*e Z+ee*Z,eZ-eZ.ee$Z/ee%Z0ee&Z1ee'Z2ee(Z3eZ4eZ5eZ6eeZ7e Z8e Z9eeZ:e Z;eZ<eeZ=e Z>e Z?eeZ@eeZAe ZBe ZCe ZDe ZEe ZFe ZGyze/e0ee8ee9e;ee<e=ee1gejH_Ie.ejH_JgejK_Ie+ejK_JgejL_Ie+ejL_JgejM_Ie+ejM_Je-e/gejN_Ie7ejN_Je7gejO_Ie/ejO_Je.egejP_Ie0ejP_Je,e7ee:gejQ_Ie.ejQ_Je e ee!eee=gejR_Ie.ejR_Je=gejS_Ie.ejS_Je/e3ee1gejT_Ie.ejT_Jee.eBeee ZUee.eBee ee ZVe@eUeVgejW_Ie.ejW_Je@e e gejX_Ie.ejX_Je@e1gejY_Ie.ejY_Je@e,e!gejZ_Ie.ejZ_Je@eBgej[_Ie.ej[_Je@e e gej\_Ie.ej\_Je@gej]_Ie.ej]_Je@e e ee gej^_Ie.ej^_Je@e e ee gej__Ie.ej__Je@gej`_Ie.ej`_Je@ee geja_Ie.eja_Je@ee?ee gejb_Ie.ejb_Je@ee?e gejc_Ie.ejc_Je@ee gejd_ee.ejd_Je@ee?ee gejf_Ie.ejf_Je@ee?gejg_Ie.ejg_Je@ee>gejh_Ie.ejh_Je@eeAgeji_Ie.eji_JeAe1gejj_Ie.ejj_JeAe!gejk_le.ejk_JeAeeCgejm_Ie.ejm_JeAgejn_Ie"ejn_JeAe"gejo_Ie7ejo_Je-eEeFgejp_Ie@ejp_Je@eGe!gejq_Ie.ejq_Je@e>gejr_Ie.ejr_Je@e>gejs_Ie.ejs_Je.egejP_Ie0ejP_JeUe_UeVe_Ve@e_@e>e_>e?e_?e:e_:e=e_=eAe_AeCe_Ce8e_8e.e_.e0jtede_ue0jtede_ve,ge jw_Ie,e jw_Je,ge jx_Ide jx_Je,ge jz_Ie+e jz_Je-e e#ge j{_Ie0e j{_Je0e#ge j|_Ie e j|_Je0e e"e#ge j}_Iee j}_Je-e e"ge j~_Ie/e j~_Je/ge j_Ie"e j_Je/ge j_Iee j_Je-ee,ee,e"e5e6ge j_Ie3e j_Je3e,ge j_Ie,e j_Je-ee,e"e4ge j_Ie1e j_Je-e"e4ge j_Ie2e j_Je2ege j_Ide j_Je1ge j_Ie"e j_Je1e"ge j_Iee j_Je-jte de _ejte de _ejte de _ejte de _e,e _,e1e _1e0e _0e3e _3Wnek r ednXdefdYZdefdYZdS(sy This module uses ctypes to bind a whole bunch of functions and constants from SecureTransport. The goal here is to provide the low-level API to SecureTransport. These are essentially the C-level functions and constants, and they're pretty gross to work with. This code is a bastardised version of the code found in Will Bond's oscrypto library. An enormous debt is owed to him for blazing this trail for us. For that reason, this code should be considered to be covered both by urllib3's license and by oscrypto's: Copyright (c) 2015-2016 Will Bond Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. i(tabsolute_importN(t find_library( tc_void_ptc_int32tc_char_ptc_size_ttc_bytetc_uint32tc_ulongtc_longtc_bool(tCDLLtPOINTERt CFUNCTYPEtSecuritys'The library Security could not be foundtCoreFoundations-The library CoreFoundation could not be foundt.i is1Only OS X 10.8 and newer are supported, not %s.%sit use_errnotkSecImportExportPassphrasetkSecImportItemIdentitytkCFAllocatorDefaulttkCFTypeArrayCallBackstkCFTypeDictionaryKeyCallBackstkCFTypeDictionaryValueCallBackssError initializing ctypestCFConstcB@seZdZedZRS(s_ A class object that acts as essentially a namespace for CoreFoundation constants. i(t__name__t __module__t__doc__tCFStringEncodingtkCFStringEncodingUTF8(((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyRst SecurityConstcB@seZdZdZdZdZdZdZdZdZ dZ dZ dZ dZ dZdZd Zd ZdZd Zd Zd ZdZdZdZdZdZdZdZdZdZdZdZ dZ!dZ"dZ#dZ$dZ%dZ&dZ'd Z(d!Z)d"Z*d#Z+d$Z,d%Z-d&Z.d'Z/d(Z0d)Z1d*Z2d+Z3d,Z4d-Z5d.Z6d/Z7d0Z8d1Z9d2Z:d3Z;d4Z<d5Z=d6Z>d7Z?d8Z@d9ZAd:ZBd;ZCd<ZDd=ZEd>ZFd?ZGd@ZHdAZIRS(BsU A class object that acts as essentially a namespace for Security constants. iiiiiii iiiiiiiiiiiiiiiiiiiiii iQi,iRi,i0i+i/iiiii$i(i iikiji9i8i#i'i iigi@i3i2iii=i<i5i/iii(JRRRt"kSSLSessionOptionBreakOnServerAutht kSSLProtocol2t kSSLProtocol3t kTLSProtocol1tkTLSProtocol11tkTLSProtocol12tkSSLClientSidetkSSLStreamTypetkSecFormatPEMSequencetkSecTrustResultInvalidtkSecTrustResultProceedtkSecTrustResultDenytkSecTrustResultUnspecifiedt&kSecTrustResultRecoverableTrustFailuret kSecTrustResultFatalTrustFailuretkSecTrustResultOtherErrorterrSSLProtocolterrSSLWouldBlockterrSSLClosedGracefulterrSSLClosedNoNotifyterrSSLClosedAbortterrSSLXCertChainInvalidt errSSLCryptoterrSSLInternalterrSSLCertExpiredterrSSLCertNotYetValidterrSSLUnknownRootCertterrSSLNoRootCertterrSSLHostNameMismatchterrSSLPeerHandshakeFailterrSSLPeerUserCancelledterrSSLWeakPeerEphemeralDHKeyterrSSLServerAuthCompletedterrSSLRecordOverflowterrSecVerifyFailedterrSecNoTrustSettingsterrSecItemNotFoundterrSecInvalidTrustSettingst'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384t'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256t#TLS_DHE_DSS_WITH_AES_256_GCM_SHA384t#TLS_DHE_RSA_WITH_AES_256_GCM_SHA384t#TLS_DHE_DSS_WITH_AES_128_GCM_SHA256t#TLS_DHE_RSA_WITH_AES_128_GCM_SHA256t'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384t%TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384t$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHAt#TLS_DHE_RSA_WITH_AES_256_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_256_CBC_SHA256t TLS_DHE_RSA_WITH_AES_256_CBC_SHAt TLS_DHE_DSS_WITH_AES_256_CBC_SHAt'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256t%TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256t$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHAt"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHAt#TLS_DHE_RSA_WITH_AES_128_CBC_SHA256t#TLS_DHE_DSS_WITH_AES_128_CBC_SHA256t TLS_DHE_RSA_WITH_AES_128_CBC_SHAt TLS_DHE_DSS_WITH_AES_128_CBC_SHAtTLS_RSA_WITH_AES_256_GCM_SHA384tTLS_RSA_WITH_AES_128_GCM_SHA256tTLS_RSA_WITH_AES_256_CBC_SHA256tTLS_RSA_WITH_AES_128_CBC_SHA256tTLS_RSA_WITH_AES_256_CBC_SHAtTLS_RSA_WITH_AES_128_CBC_SHAtTLS_AES_128_GCM_SHA256tTLS_AES_256_GCM_SHA384tTLS_CHACHA20_POLY1305_SHA256(((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyRs(i i(Rt __future__Rtplatformt ctypes.utilRtctypesRRRRRRRR R R R R t security_patht ImportErrortcore_foundation_pathtmac_vertversionttupletmaptinttsplitt version_infotOSErrortTrueRRtBooleantCFIndexRtCFDatatCFStringtCFArraytCFMutableArrayt CFDictionarytCFErrortCFTypetCFTypeIDt CFTypeReftCFAllocatorReftOSStatust CFDataReft CFStringReft CFArrayReftCFMutableArrayReftCFDictionaryReftCFArrayCallBackstCFDictionaryKeyCallBackstCFDictionaryValueCallBackstSecCertificateReftSecExternalFormattSecExternalItemTypetSecIdentityReftSecItemImportExportFlagst SecItemImportExportKeyParameterstSecKeychainReft SSLProtocoltSSLCipherSuitet SSLContextReft SecTrustReftSSLConnectionReftSecTrustResultTypetSecTrustOptionFlagstSSLProtocolSidetSSLConnectionTypetSSLSessionOptiont SecItemImporttargtypestrestypetSecCertificateGetTypeIDtSecIdentityGetTypeIDtSecKeyGetTypeIDtSecCertificateCreateWithDatatSecCertificateCopyDatatSecCopyErrorMessageStringt SecIdentityCreateWithCertificatetSecKeychainCreatetSecKeychainDeletetSecPKCS12Importt SSLReadFunct SSLWriteFunct SSLSetIOFuncst SSLSetPeerIDtSSLSetCertificatetSSLSetCertificateAuthoritiestSSLSetConnectiontSSLSetPeerDomainNamet SSLHandshaketSSLReadtSSLWritetSSLClosetSSLGetNumberSupportedCipherstSSLGetSupportedCipherstSSLSetEnabledCipherstSSLGetNumberEnabledCipherstargtypetSSLGetEnabledCipherstSSLGetNegotiatedCiphertSSLGetNegotiatedProtocolVersiontSSLCopyPeerTrusttSecTrustSetAnchorCertificatest!SecTrustSetAnchorCertificatesOnlyt argstypestSecTrustEvaluatetSecTrustGetCertificateCounttSecTrustGetCertificateAtIndextSSLCreateContexttSSLSetSessionOptiontSSLSetProtocolVersionMintSSLSetProtocolVersionMaxtin_dllRRtCFRetaint CFReleasetNonet CFGetTypeIDtCFStringCreateWithCStringtCFStringGetCStringPtrtCFStringGetCStringt CFDataCreatetCFDataGetLengthtCFDataGetBytePtrtCFDictionaryCreatetCFDictionaryGetValuet CFArrayCreatetCFArrayCreateMutabletCFArrayAppendValuetCFArrayGetCounttCFArrayGetValueAtIndexRRRRtAttributeErrortobjectRR(((sY/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.pyts, @                               !                                                                  PKZS%w%w<site-packages/pip/_vendor/urllib3/contrib/securetransport.pynu[""" SecureTranport support for urllib3 via ctypes. This makes platform-native TLS available to urllib3 users on macOS without the use of a compiler. This is an important feature because the Python Package Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL that ships with macOS is not capable of doing TLSv1.2. The only way to resolve this is to give macOS users an alternative solution to the problem, and that solution is to use SecureTransport. We use ctypes here because this solution must not require a compiler. That's because pip is not allowed to require a compiler either. This is not intended to be a seriously long-term solution to this problem. The hope is that PEP 543 will eventually solve this issue for us, at which point we can retire this contrib module. But in the short term, we need to solve the impending tire fire that is Python on Mac without this kind of contrib module. So...here we are. To use this module, simply import and inject it:: import urllib3.contrib.securetransport urllib3.contrib.securetransport.inject_into_urllib3() Happy TLSing! """ from __future__ import absolute_import import contextlib import ctypes import errno import os.path import shutil import socket import ssl import threading import weakref from .. import util from ._securetransport.bindings import ( Security, SecurityConst, CoreFoundation ) from ._securetransport.low_level import ( _assert_no_error, _cert_array_from_pem, _temporary_keychain, _load_client_cert_chain ) try: # Platform-specific: Python 2 from socket import _fileobject except ImportError: # Platform-specific: Python 3 _fileobject = None from ..packages.backports.makefile import backport_makefile try: memoryview(b'') except NameError: raise ImportError("SecureTransport only works on Pythons with memoryview") __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI always works HAS_SNI = True orig_util_HAS_SNI = util.HAS_SNI orig_util_SSLContext = util.ssl_.SSLContext # This dictionary is used by the read callback to obtain a handle to the # calling wrapped socket. This is a pretty silly approach, but for now it'll # do. I feel like I should be able to smuggle a handle to the wrapped socket # directly in the SSLConnectionRef, but for now this approach will work I # guess. # # We need to lock around this structure for inserts, but we don't do it for # reads/writes in the callbacks. The reasoning here goes as follows: # # 1. It is not possible to call into the callbacks before the dictionary is # populated, so once in the callback the id must be in the dictionary. # 2. The callbacks don't mutate the dictionary, they only read from it, and # so cannot conflict with any of the insertions. # # This is good: if we had to lock in the callbacks we'd drastically slow down # the performance of this code. _connection_refs = weakref.WeakValueDictionary() _connection_ref_lock = threading.Lock() # Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over # for no better reason than we need *a* limit, and this one is right there. SSL_WRITE_BLOCKSIZE = 16384 # This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to # individual cipher suites. We need to do this becuase this is how # SecureTransport wants them. CIPHER_SUITES = [ SecurityConst.TLS_AES_256_GCM_SHA384, SecurityConst.TLS_CHACHA20_POLY1305_SHA256, SecurityConst.TLS_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_DHE_DSS_WITH_AES_256_CBC_SHA, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, SecurityConst.TLS_DHE_DSS_WITH_AES_128_CBC_SHA, SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, ] # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. _protocol_to_min_max = { ssl.PROTOCOL_SSLv23: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), } if hasattr(ssl, "PROTOCOL_SSLv2"): _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( SecurityConst.kSSLProtocol2, SecurityConst.kSSLProtocol2 ) if hasattr(ssl, "PROTOCOL_SSLv3"): _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( SecurityConst.kSSLProtocol3, SecurityConst.kSSLProtocol3 ) if hasattr(ssl, "PROTOCOL_TLSv1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol1 ) if hasattr(ssl, "PROTOCOL_TLSv1_1"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( SecurityConst.kTLSProtocol11, SecurityConst.kTLSProtocol11 ) if hasattr(ssl, "PROTOCOL_TLSv1_2"): _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( SecurityConst.kTLSProtocol12, SecurityConst.kTLSProtocol12 ) if hasattr(ssl, "PROTOCOL_TLS"): _protocol_to_min_max[ssl.PROTOCOL_TLS] = _protocol_to_min_max[ssl.PROTOCOL_SSLv23] def inject_into_urllib3(): """ Monkey-patch urllib3 with SecureTransport-backed SSL-support. """ util.ssl_.SSLContext = SecureTransportContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI util.IS_SECURETRANSPORT = True util.ssl_.IS_SECURETRANSPORT = True def extract_from_urllib3(): """ Undo monkey-patching by :func:`inject_into_urllib3`. """ util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI util.IS_SECURETRANSPORT = False util.ssl_.IS_SECURETRANSPORT = False def _read_callback(connection_id, data_buffer, data_length_pointer): """ SecureTransport read callback. This is called by ST to request that data be returned from the socket. """ wrapped_socket = None try: wrapped_socket = _connection_refs.get(connection_id) if wrapped_socket is None: return SecurityConst.errSSLInternal base_socket = wrapped_socket.socket requested_length = data_length_pointer[0] timeout = wrapped_socket.gettimeout() error = None read_count = 0 buffer = (ctypes.c_char * requested_length).from_address(data_buffer) buffer_view = memoryview(buffer) try: while read_count < requested_length: if timeout is None or timeout >= 0: readables = util.wait_for_read([base_socket], timeout) if not readables: raise socket.error(errno.EAGAIN, 'timed out') # We need to tell ctypes that we have a buffer that can be # written to. Upsettingly, we do that like this: chunk_size = base_socket.recv_into( buffer_view[read_count:requested_length] ) read_count += chunk_size if not chunk_size: if not read_count: return SecurityConst.errSSLClosedGraceful break except (socket.error) as e: error = e.errno if error is not None and error != errno.EAGAIN: if error == errno.ECONNRESET: return SecurityConst.errSSLClosedAbort raise data_length_pointer[0] = read_count if read_count != requested_length: return SecurityConst.errSSLWouldBlock return 0 except Exception as e: if wrapped_socket is not None: wrapped_socket._exception = e return SecurityConst.errSSLInternal def _write_callback(connection_id, data_buffer, data_length_pointer): """ SecureTransport write callback. This is called by ST to request that data actually be sent on the network. """ wrapped_socket = None try: wrapped_socket = _connection_refs.get(connection_id) if wrapped_socket is None: return SecurityConst.errSSLInternal base_socket = wrapped_socket.socket bytes_to_write = data_length_pointer[0] data = ctypes.string_at(data_buffer, bytes_to_write) timeout = wrapped_socket.gettimeout() error = None sent = 0 try: while sent < bytes_to_write: if timeout is None or timeout >= 0: writables = util.wait_for_write([base_socket], timeout) if not writables: raise socket.error(errno.EAGAIN, 'timed out') chunk_sent = base_socket.send(data) sent += chunk_sent # This has some needless copying here, but I'm not sure there's # much value in optimising this data path. data = data[chunk_sent:] except (socket.error) as e: error = e.errno if error is not None and error != errno.EAGAIN: if error == errno.ECONNRESET: return SecurityConst.errSSLClosedAbort raise data_length_pointer[0] = sent if sent != bytes_to_write: return SecurityConst.errSSLWouldBlock return 0 except Exception as e: if wrapped_socket is not None: wrapped_socket._exception = e return SecurityConst.errSSLInternal # We need to keep these two objects references alive: if they get GC'd while # in use then SecureTransport could attempt to call a function that is in freed # memory. That would be...uh...bad. Yeah, that's the word. Bad. _read_callback_pointer = Security.SSLReadFunc(_read_callback) _write_callback_pointer = Security.SSLWriteFunc(_write_callback) class WrappedSocket(object): """ API-compatibility wrapper for Python's OpenSSL wrapped socket object. Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage collector of PyPy. """ def __init__(self, socket): self.socket = socket self.context = None self._makefile_refs = 0 self._closed = False self._exception = None self._keychain = None self._keychain_dir = None self._client_cert_chain = None # We save off the previously-configured timeout and then set it to # zero. This is done because we use select and friends to handle the # timeouts, but if we leave the timeout set on the lower socket then # Python will "kindly" call select on that socket again for us. Avoid # that by forcing the timeout to zero. self._timeout = self.socket.gettimeout() self.socket.settimeout(0) @contextlib.contextmanager def _raise_on_error(self): """ A context manager that can be used to wrap calls that do I/O from SecureTransport. If any of the I/O callbacks hit an exception, this context manager will correctly propagate the exception after the fact. This avoids silently swallowing those exceptions. It also correctly forces the socket closed. """ self._exception = None # We explicitly don't catch around this yield because in the unlikely # event that an exception was hit in the block we don't want to swallow # it. yield if self._exception is not None: exception, self._exception = self._exception, None self.close() raise exception def _set_ciphers(self): """ Sets up the allowed ciphers. By default this matches the set in util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done custom and doesn't allow changing at this time, mostly because parsing OpenSSL cipher strings is going to be a freaking nightmare. """ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) result = Security.SSLSetEnabledCiphers( self.context, ciphers, len(CIPHER_SUITES) ) _assert_no_error(result) def _custom_validate(self, verify, trust_bundle): """ Called when we have set custom validation. We do this in two cases: first, when cert validation is entirely disabled; and second, when using a custom trust DB. """ # If we disabled cert validation, just say: cool. if not verify: return # We want data in memory, so load it up. if os.path.isfile(trust_bundle): with open(trust_bundle, 'rb') as f: trust_bundle = f.read() cert_array = None trust = Security.SecTrustRef() try: # Get a CFArray that contains the certs we want. cert_array = _cert_array_from_pem(trust_bundle) # Ok, now the hard part. We want to get the SecTrustRef that ST has # created for this connection, shove our CAs into it, tell ST to # ignore everything else it knows, and then ask if it can build a # chain. This is a buuuunch of code. result = Security.SSLCopyPeerTrust( self.context, ctypes.byref(trust) ) _assert_no_error(result) if not trust: raise ssl.SSLError("Failed to copy trust reference") result = Security.SecTrustSetAnchorCertificates(trust, cert_array) _assert_no_error(result) result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) _assert_no_error(result) trust_result = Security.SecTrustResultType() result = Security.SecTrustEvaluate( trust, ctypes.byref(trust_result) ) _assert_no_error(result) finally: if trust: CoreFoundation.CFRelease(trust) if cert_array is None: CoreFoundation.CFRelease(cert_array) # Ok, now we can look at what the result was. successes = ( SecurityConst.kSecTrustResultUnspecified, SecurityConst.kSecTrustResultProceed ) if trust_result.value not in successes: raise ssl.SSLError( "certificate verify failed, error code: %d" % trust_result.value ) def handshake(self, server_hostname, verify, trust_bundle, min_version, max_version, client_cert, client_key, client_key_passphrase): """ Actually performs the TLS handshake. This is run automatically by wrapped socket, and shouldn't be needed in user code. """ # First, we do the initial bits of connection setup. We need to create # a context, set its I/O funcs, and set the connection reference. self.context = Security.SSLCreateContext( None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType ) result = Security.SSLSetIOFuncs( self.context, _read_callback_pointer, _write_callback_pointer ) _assert_no_error(result) # Here we need to compute the handle to use. We do this by taking the # id of self modulo 2**31 - 1. If this is already in the dictionary, we # just keep incrementing by one until we find a free space. with _connection_ref_lock: handle = id(self) % 2147483647 while handle in _connection_refs: handle = (handle + 1) % 2147483647 _connection_refs[handle] = self result = Security.SSLSetConnection(self.context, handle) _assert_no_error(result) # If we have a server hostname, we should set that too. if server_hostname: if not isinstance(server_hostname, bytes): server_hostname = server_hostname.encode('utf-8') result = Security.SSLSetPeerDomainName( self.context, server_hostname, len(server_hostname) ) _assert_no_error(result) # Setup the ciphers. self._set_ciphers() # Set the minimum and maximum TLS versions. result = Security.SSLSetProtocolVersionMin(self.context, min_version) _assert_no_error(result) result = Security.SSLSetProtocolVersionMax(self.context, max_version) _assert_no_error(result) # If there's a trust DB, we need to use it. We do that by telling # SecureTransport to break on server auth. We also do that if we don't # want to validate the certs at all: we just won't actually do any # authing in that case. if not verify or trust_bundle is not None: result = Security.SSLSetSessionOption( self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True ) _assert_no_error(result) # If there's a client cert, we need to use it. if client_cert: self._keychain, self._keychain_dir = _temporary_keychain() self._client_cert_chain = _load_client_cert_chain( self._keychain, client_cert, client_key ) result = Security.SSLSetCertificate( self.context, self._client_cert_chain ) _assert_no_error(result) while True: with self._raise_on_error(): result = Security.SSLHandshake(self.context) if result == SecurityConst.errSSLWouldBlock: raise socket.timeout("handshake timed out") elif result == SecurityConst.errSSLServerAuthCompleted: self._custom_validate(verify, trust_bundle) continue else: _assert_no_error(result) break def fileno(self): return self.socket.fileno() # Copy-pasted from Python 3.5 source code def _decref_socketios(self): if self._makefile_refs > 0: self._makefile_refs -= 1 if self._closed: self.close() def recv(self, bufsiz): buffer = ctypes.create_string_buffer(bufsiz) bytes_read = self.recv_into(buffer, bufsiz) data = buffer[:bytes_read] return data def recv_into(self, buffer, nbytes=None): # Read short on EOF. if self._closed: return 0 if nbytes is None: nbytes = len(buffer) buffer = (ctypes.c_char * nbytes).from_buffer(buffer) processed_bytes = ctypes.c_size_t(0) with self._raise_on_error(): result = Security.SSLRead( self.context, buffer, nbytes, ctypes.byref(processed_bytes) ) # There are some result codes that we want to treat as "not always # errors". Specifically, those are errSSLWouldBlock, # errSSLClosedGraceful, and errSSLClosedNoNotify. if (result == SecurityConst.errSSLWouldBlock): # If we didn't process any bytes, then this was just a time out. # However, we can get errSSLWouldBlock in situations when we *did* # read some data, and in those cases we should just read "short" # and return. if processed_bytes.value == 0: # Timed out, no data read. raise socket.timeout("recv timed out") elif result in (SecurityConst.errSSLClosedGraceful, SecurityConst.errSSLClosedNoNotify): # The remote peer has closed this connection. We should do so as # well. Note that we don't actually return here because in # principle this could actually be fired along with return data. # It's unlikely though. self.close() else: _assert_no_error(result) # Ok, we read and probably succeeded. We should return whatever data # was actually read. return processed_bytes.value def settimeout(self, timeout): self._timeout = timeout def gettimeout(self): return self._timeout def send(self, data): processed_bytes = ctypes.c_size_t(0) with self._raise_on_error(): result = Security.SSLWrite( self.context, data, len(data), ctypes.byref(processed_bytes) ) if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: # Timed out raise socket.timeout("send timed out") else: _assert_no_error(result) # We sent, and probably succeeded. Tell them how much we sent. return processed_bytes.value def sendall(self, data): total_sent = 0 while total_sent < len(data): sent = self.send(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self): with self._raise_on_error(): Security.SSLClose(self.context) def close(self): # TODO: should I do clean shutdown here? Do I have to? if self._makefile_refs < 1: self._closed = True if self.context: CoreFoundation.CFRelease(self.context) self.context = None if self._client_cert_chain: CoreFoundation.CFRelease(self._client_cert_chain) self._client_cert_chain = None if self._keychain: Security.SecKeychainDelete(self._keychain) CoreFoundation.CFRelease(self._keychain) shutil.rmtree(self._keychain_dir) self._keychain = self._keychain_dir = None return self.socket.close() else: self._makefile_refs -= 1 def getpeercert(self, binary_form=False): # Urgh, annoying. # # Here's how we do this: # # 1. Call SSLCopyPeerTrust to get hold of the trust object for this # connection. # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. # 3. To get the CN, call SecCertificateCopyCommonName and process that # string so that it's of the appropriate type. # 4. To get the SAN, we need to do something a bit more complex: # a. Call SecCertificateCopyValues to get the data, requesting # kSecOIDSubjectAltName. # b. Mess about with this dictionary to try to get the SANs out. # # This is gross. Really gross. It's going to be a few hundred LoC extra # just to repeat something that SecureTransport can *already do*. So my # operating assumption at this time is that what we want to do is # instead to just flag to urllib3 that it shouldn't do its own hostname # validation when using SecureTransport. if not binary_form: raise ValueError( "SecureTransport only supports dumping binary certs" ) trust = Security.SecTrustRef() certdata = None der_bytes = None try: # Grab the trust store. result = Security.SSLCopyPeerTrust( self.context, ctypes.byref(trust) ) _assert_no_error(result) if not trust: # Probably we haven't done the handshake yet. No biggie. return None cert_count = Security.SecTrustGetCertificateCount(trust) if not cert_count: # Also a case that might happen if we haven't handshaked. # Handshook? Handshaken? return None leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) assert leaf # Ok, now we want the DER bytes. certdata = Security.SecCertificateCopyData(leaf) assert certdata data_length = CoreFoundation.CFDataGetLength(certdata) data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) der_bytes = ctypes.string_at(data_buffer, data_length) finally: if certdata: CoreFoundation.CFRelease(certdata) if trust: CoreFoundation.CFRelease(trust) return der_bytes def _reuse(self): self._makefile_refs += 1 def _drop(self): if self._makefile_refs < 1: self.close() else: self._makefile_refs -= 1 if _fileobject: # Platform-specific: Python 2 def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) else: # Platform-specific: Python 3 def makefile(self, mode="r", buffering=None, *args, **kwargs): # We disable buffering with SecureTransport because it conflicts with # the buffering that ST does internally (see issue #1153 for more). buffering = 0 return backport_makefile(self, mode, buffering, *args, **kwargs) WrappedSocket.makefile = makefile class SecureTransportContext(object): """ I am a wrapper class for the SecureTransport library, to translate the interface of the standard library ``SSLContext`` object to calls into SecureTransport. """ def __init__(self, protocol): self._min_version, self._max_version = _protocol_to_min_max[protocol] self._options = 0 self._verify = False self._trust_bundle = None self._client_cert = None self._client_key = None self._client_key_passphrase = None @property def check_hostname(self): """ SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. """ return True @check_hostname.setter def check_hostname(self, value): """ SecureTransport cannot have its hostname checking disabled. For more, see the comment on getpeercert() in this file. """ pass @property def options(self): # TODO: Well, crap. # # So this is the bit of the code that is the most likely to cause us # trouble. Essentially we need to enumerate all of the SSL options that # users might want to use and try to see if we can sensibly translate # them, or whether we should just ignore them. return self._options @options.setter def options(self, value): # TODO: Update in line with above. self._options = value @property def verify_mode(self): return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE @verify_mode.setter def verify_mode(self, value): self._verify = True if value == ssl.CERT_REQUIRED else False def set_default_verify_paths(self): # So, this has to do something a bit weird. Specifically, what it does # is nothing. # # This means that, if we had previously had load_verify_locations # called, this does not undo that. We need to do that because it turns # out that the rest of the urllib3 code will attempt to load the # default verify paths if it hasn't been told about any paths, even if # the context itself was sometime earlier. We resolve that by just # ignoring it. pass def load_default_certs(self): return self.set_default_verify_paths() def set_ciphers(self, ciphers): # For now, we just require the default cipher string. if ciphers != util.ssl_.DEFAULT_CIPHERS: raise ValueError( "SecureTransport doesn't support custom cipher strings" ) def load_verify_locations(self, cafile=None, capath=None, cadata=None): # OK, we only really support cadata and cafile. if capath is not None: raise ValueError( "SecureTransport does not support cert directories" ) self._trust_bundle = cafile or cadata def load_cert_chain(self, certfile, keyfile=None, password=None): self._client_cert = certfile self._client_key = keyfile self._client_cert_passphrase = password def wrap_socket(self, sock, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True, server_hostname=None): # So, what do we do here? Firstly, we assert some properties. This is a # stripped down shim, so there is some functionality we don't support. # See PEP 543 for the real deal. assert not server_side assert do_handshake_on_connect assert suppress_ragged_eofs # Ok, we're good to go. Now we want to create the wrapped socket object # and store it in the appropriate place. wrapped_socket = WrappedSocket(sock) # Now we can handshake wrapped_socket.handshake( server_hostname, self._verify, self._trust_bundle, self._min_version, self._max_version, self._client_cert, self._client_key, self._client_key_passphrase ) return wrapped_socket PKZ^^3site-packages/pip/_vendor/urllib3/contrib/socks.pyonu[ abc@@sdZddlmZyddlZWn@ek rhddlZddlmZejdenXddl m Z m Z ddlmZmZdd lmZmZdd lmZmZdd lmZdd lmZyddlZWnek r dZnXd efdYZdeefdYZdefdYZdefdYZdefdYZ dS(s This module contains provisional support for SOCKS proxies from within urllib3. This module supports SOCKS4 (specifically the SOCKS4A variant) and SOCKS5. To enable its functionality, either install PySocks or install this module with the ``socks`` extra. The SOCKS implementation supports the full range of urllib3 features. It also supports the following SOCKS features: - SOCKS4 - SOCKS4a - SOCKS5 - Usernames and passwords for the SOCKS proxy Known Limitations: - Currently PySocks does not support contacting remote websites via literal IPv6 addresses. Any such connection attempt will fail. You must use a domain name. - Currently PySocks does not support IPv6 connections to the SOCKS proxy. Any such connection attempt will fail. i(tabsolute_importNi(tDependencyWarningsSOCKS support in urllib3 requires the installation of optional dependencies: specifically, PySocks. For more information, see https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies(terrorttimeout(tHTTPConnectiontHTTPSConnection(tHTTPConnectionPooltHTTPSConnectionPool(tConnectTimeoutErrortNewConnectionError(t PoolManager(t parse_urltSOCKSConnectioncB@s eZdZdZdZRS(sG A plain-text HTTP connection that connects via a SOCKS proxy. cO@s/|jd|_tt|j||dS(Nt_socks_options(tpopR tsuperR t__init__(tselftargstkwargs((sE/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/contrib/socks.pyR?scC@si}|jr|j|ds2    FPKZ*q;;6site-packages/pip/_vendor/urllib3/contrib/pyopenssl.pynu[""" SSL with SNI_-support for Python 2. Follow these instructions if you would like to verify SSL certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: * pyOpenSSL (tested with 16.0.0) * cryptography (minimum 1.3.4, from pyopenssl) * idna (minimum 2.0, from cryptography) However, pyopenssl depends on cryptography, which depends on idna, so while we use all three directly here we end up having relatively few packages required. You can install them with the following command: pip install pyopenssl cryptography idna To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this:: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). If you want to configure the default list of supported cipher suites, you can set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) """ from __future__ import absolute_import import OpenSSL.SSL from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend from cryptography.hazmat.backends.openssl.x509 import _Certificate from socket import timeout, error as SocketError from io import BytesIO try: # Platform-specific: Python 2 from socket import _fileobject except ImportError: # Platform-specific: Python 3 _fileobject = None from ..packages.backports.makefile import backport_makefile import logging import ssl from ..packages import six import sys from .. import util __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI always works. HAS_SNI = True # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD try: _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) except AttributeError: pass _stdlib_to_openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } _openssl_to_stdlib_verify = dict( (v, k) for k, v in _stdlib_to_openssl_verify.items() ) # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 orig_util_HAS_SNI = util.HAS_SNI orig_util_SSLContext = util.ssl_.SSLContext log = logging.getLogger(__name__) def inject_into_urllib3(): 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' _validate_dependencies_met() util.ssl_.SSLContext = PyOpenSSLContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI util.IS_PYOPENSSL = True util.ssl_.IS_PYOPENSSL = True def extract_from_urllib3(): 'Undo monkey-patching by :func:`inject_into_urllib3`.' util.ssl_.SSLContext = orig_util_SSLContext util.HAS_SNI = orig_util_HAS_SNI util.ssl_.HAS_SNI = orig_util_HAS_SNI util.IS_PYOPENSSL = False util.ssl_.IS_PYOPENSSL = False def _validate_dependencies_met(): """ Verifies that PyOpenSSL's package-level dependencies have been met. Throws `ImportError` if they are not met. """ # Method added in `cryptography==1.1`; not available in older versions from cryptography.x509.extensions import Extensions if getattr(Extensions, "get_extension_for_class", None) is None: raise ImportError("'cryptography' module missing required functionality. " "Try upgrading to v1.3.4 or newer.") # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 # attribute is only present on those versions. from OpenSSL.crypto import X509 x509 = X509() if getattr(x509, "_x509", None) is None: raise ImportError("'pyOpenSSL' module missing required functionality. " "Try upgrading to v0.14 or newer.") def _dnsname_to_stdlib(name): """ Converts a dNSName SubjectAlternativeName field to the form used by the standard library on the given Python version. Cryptography produces a dNSName as a unicode string that was idna-decoded from ASCII bytes. We need to idna-encode that string to get it back, and then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). """ def idna_encode(name): """ Borrowed wholesale from the Python Cryptography Project. It turns out that we can't just safely call `idna.encode`: it can explode for wildcard names. This avoids that problem. """ import idna for prefix in [u'*.', u'.']: if name.startswith(prefix): name = name[len(prefix):] return prefix.encode('ascii') + idna.encode(name) return idna.encode(name) name = idna_encode(name) if sys.version_info >= (3, 0): name = name.decode('utf-8') return name def get_subj_alt_name(peer_cert): """ Given an PyOpenSSL certificate, provides all the subject alternative names. """ # Pass the cert to cryptography, which has much better APIs for this. if hasattr(peer_cert, "to_cryptography"): cert = peer_cert.to_cryptography() else: # This is technically using private APIs, but should work across all # relevant versions before PyOpenSSL got a proper API for this. cert = _Certificate(openssl_backend, peer_cert._x509) # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) try: ext = cert.extensions.get_extension_for_class( x509.SubjectAlternativeName ).value except x509.ExtensionNotFound: # No such extension, return the empty list. return [] except (x509.DuplicateExtension, x509.UnsupportedExtension, x509.UnsupportedGeneralNameType, UnicodeError) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. log.warning( "A problem was encountered with the certificate that prevented " "urllib3 from finding the SubjectAlternativeName field. This can " "affect certificate validation. The error was %s", e, ) return [] # We want to return dNSName and iPAddress fields. We need to cast the IPs # back to strings because the match_hostname function wants them as # strings. # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 # decoded. This is pretty frustrating, but that's what the standard library # does with certificates, and so we need to attempt to do the same. names = [ ('DNS', _dnsname_to_stdlib(name)) for name in ext.get_values_for_type(x509.DNSName) ] names.extend( ('IP Address', str(name)) for name in ext.get_values_for_type(x509.IPAddress) ) return names class WrappedSocket(object): '''API-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. ''' def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection self.socket = socket self.suppress_ragged_eofs = suppress_ragged_eofs self._makefile_refs = 0 self._closed = False def fileno(self): return self.socket.fileno() # Copy-pasted from Python 3.5 source code def _decref_socketios(self): if self._makefile_refs > 0: self._makefile_refs -= 1 if self._closed: self.close() def recv(self, *args, **kwargs): try: data = self.connection.recv(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): return b'' else: raise SocketError(str(e)) except OpenSSL.SSL.ZeroReturnError as e: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return b'' else: raise except OpenSSL.SSL.WantReadError: rd = util.wait_for_read(self.socket, self.socket.gettimeout()) if not rd: raise timeout('The read operation timed out') else: return self.recv(*args, **kwargs) else: return data def recv_into(self, *args, **kwargs): try: return self.connection.recv_into(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): return 0 else: raise SocketError(str(e)) except OpenSSL.SSL.ZeroReturnError as e: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return 0 else: raise except OpenSSL.SSL.WantReadError: rd = util.wait_for_read(self.socket, self.socket.gettimeout()) if not rd: raise timeout('The read operation timed out') else: return self.recv_into(*args, **kwargs) def settimeout(self, timeout): return self.socket.settimeout(timeout) def _send_until_done(self, data): while True: try: return self.connection.send(data) except OpenSSL.SSL.WantWriteError: wr = util.wait_for_write(self.socket, self.socket.gettimeout()) if not wr: raise timeout() continue except OpenSSL.SSL.SysCallError as e: raise SocketError(str(e)) def sendall(self, data): total_sent = 0 while total_sent < len(data): sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self): # FIXME rethrow compatible exceptions should we ever use this self.connection.shutdown() def close(self): if self._makefile_refs < 1: try: self._closed = True return self.connection.close() except OpenSSL.SSL.Error: return else: self._makefile_refs -= 1 def getpeercert(self, binary_form=False): x509 = self.connection.get_peer_certificate() if not x509: return x509 if binary_form: return OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_ASN1, x509) return { 'subject': ( (('commonName', x509.get_subject().CN),), ), 'subjectAltName': get_subj_alt_name(x509) } def _reuse(self): self._makefile_refs += 1 def _drop(self): if self._makefile_refs < 1: self.close() else: self._makefile_refs -= 1 if _fileobject: # Platform-specific: Python 2 def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) else: # Platform-specific: Python 3 makefile = backport_makefile WrappedSocket.makefile = makefile class PyOpenSSLContext(object): """ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible for translating the interface of the standard library ``SSLContext`` object to calls into PyOpenSSL. """ def __init__(self, protocol): self.protocol = _openssl_versions[protocol] self._ctx = OpenSSL.SSL.Context(self.protocol) self._options = 0 self.check_hostname = False @property def options(self): return self._options @options.setter def options(self, value): self._options = value self._ctx.set_options(value) @property def verify_mode(self): return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] @verify_mode.setter def verify_mode(self, value): self._ctx.set_verify( _stdlib_to_openssl_verify[value], _verify_callback ) def set_default_verify_paths(self): self._ctx.set_default_verify_paths() def set_ciphers(self, ciphers): if isinstance(ciphers, six.text_type): ciphers = ciphers.encode('utf-8') self._ctx.set_cipher_list(ciphers) def load_verify_locations(self, cafile=None, capath=None, cadata=None): if cafile is not None: cafile = cafile.encode('utf-8') if capath is not None: capath = capath.encode('utf-8') self._ctx.load_verify_locations(cafile, capath) if cadata is not None: self._ctx.load_verify_locations(BytesIO(cadata)) def load_cert_chain(self, certfile, keyfile=None, password=None): self._ctx.use_certificate_file(certfile) if password is not None: self._ctx.set_passwd_cb(lambda max_length, prompt_twice, userdata: password) self._ctx.use_privatekey_file(keyfile or certfile) def wrap_socket(self, sock, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True, server_hostname=None): cnx = OpenSSL.SSL.Connection(self._ctx, sock) if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 server_hostname = server_hostname.encode('utf-8') if server_hostname is not None: cnx.set_tlsext_host_name(server_hostname) cnx.set_connect_state() while True: try: cnx.do_handshake() except OpenSSL.SSL.WantReadError: rd = util.wait_for_read(sock, sock.gettimeout()) if not rd: raise timeout('select timed out') continue except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad handshake: %r' % e) break return WrappedSocket(cnx, sock) def _verify_callback(cnx, x509, err_no, err_depth, return_code): return err_no == 0 PKZ  .site-packages/pip/_vendor/urllib3/__init__.pyonu[ abc@@sdZddlmZddlZddlmZmZmZddlm Z ddl m Z ddl m Z mZmZdd lmZdd lmZdd lmZdd lmZdd lmZddlZyddlmZWn*ek rdejfdYZnXdZdZdZ d'Z!ej"e#j$eej%d"Z&[ej'd#e j(d$e)ej'd%e j*d$e)ej'd%e j+d$e)ej'd%e j,d$e)e j-d&Z.dS((s8 urllib3 - Thread-safe connection pooling and re-using. i(tabsolute_importNi(tHTTPConnectionPooltHTTPSConnectionPooltconnection_from_url(t exceptions(tencode_multipart_formdata(t PoolManagert ProxyManagertproxy_from_url(t HTTPResponse(t make_headers(tget_host(tTimeout(tRetry(t NullHandlerRcB@seZdZRS(cC@sdS(N((tselftrecord((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pytemits(t__name__t __module__R(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyRss(Andrey Petrov (andrey.petrov@shazow.net)tMITs1.22RRRRR R R tadd_stderr_loggerRtdisable_warningsRR R RcC@s_tjt}tj}|jtjd|j||j||jdt|S(s Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. s%%(asctime)s %(levelname)s %(message)ss,Added a stderr logging handler to logger: %s( tloggingt getLoggerRt StreamHandlert setFormattert Formattert addHandlertsetLeveltdebug(tleveltloggerthandler((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyR9s    talwaystappendtdefaultcC@stjd|dS(s< Helper for quickly disabling all urllib3 warnings. tignoreN(twarningst simplefilter(tcategory((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pyR]s(RRRRR R R RRRRR R R(/t__doc__t __future__RR&tconnectionpoolRRRtRtfilepostRt poolmanagerRRRtresponseR t util.requestR tutil.urlR t util.timeoutR t util.retryR RRt ImportErrortHandlert __author__t __license__t __version__t__all__RRRtDEBUGRR'tSecurityWarningtTruetSubjectAltNameWarningtInsecurePlatformWarningtSNIMissingWarningt HTTPWarningR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/__init__.pytsT   PKZ2]ijij4site-packages/pip/_vendor/urllib3/connectionpool.pyonu[ abc@@sSddlmZddlZddlZddlZddlZddlmZm Z ddlZddl m Z m Z mZmZmZmZmZmZmZmZmZmZmZddlmZddlmZddlmZdd lm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&dd l'm(Z(dd l)m*Z*dd l+m,Z,dd l-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6m7Z7ej8rddl9Z:nej;j<Z<ej=e>Z?e@ZAde@fdYZBeCejDejEgZFdeBe(fdYZGdeGfdYZHdZIdZJdS(i(tabsolute_importN(terrorttimeouti( tClosedPoolErrort ProtocolErrortEmptyPoolErrortHeaderParsingErrortHostChangedErrortLocationValueErrort MaxRetryErrort ProxyErrortReadTimeoutErrortSSLErrort TimeoutErrortInsecureRequestWarningtNewConnectionError(tCertificateError(tsix(tqueue(tport_by_schemetDummyConnectiontHTTPConnectiontHTTPSConnectiontVerifiedHTTPSConnectiont HTTPExceptiont BaseSSLError(tRequestMethods(t HTTPResponse(tis_connection_dropped(tset_file_position(tassert_header_parsing(tRetry(tTimeout(tget_hosttUrltConnectionPoolcB@sMeZdZdZejZddZdZ dZ dZ dZ RS(sz Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. cC@sF|stdnt|j|_|j|_||_dS(NsNo host specified.(Rt _ipv6_hosttlowerthostt _proxy_hosttport(tselfR&R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt__init__Cs cC@s dt|j|j|jfS(Ns%s(host=%r, port=%r)(ttypet__name__R&R((R)((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt__str__KscC@s|S(N((R)((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt __enter__OscC@s|jtS(N(tclosetFalse(R)texc_typetexc_valtexc_tb((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt__exit__Rs cC@sdS(sD Close all pooled connections and disable the pool. N((R)((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR/WsN( R,t __module__t__doc__tNonetschemeRt LifoQueuetQueueClsR*R-R.R4R/(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR#:s     tHTTPConnectionPoolc B@seZdZdZeZeZde e j de ddddd Z dZ ddZdZdZdZd Zd Zee d Zd Zd ZdZdddeeedde dd ZRS(sN Thread-safe connection pool for one host. :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into :class:`httplib.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed into :class:`httplib.HTTPConnection`. :param strict: Causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line, passed into :class:`httplib.HTTPConnection`. .. note:: Only works in Python 2. This parameter is ignored in Python 3. :param timeout: Socket timeout in seconds for each individual connection. This can be a float or integer, which sets the timeout for the HTTP request, or an instance of :class:`urllib3.util.Timeout` which gives you more fine-grained control over request timeouts. After the constructor has been parsed, this is always a `urllib3.util.Timeout` object. :param maxsize: Number of connections to save that can be reused. More than 1 is useful in multithreaded situations. If ``block`` is set to False, more connections will be created but they will not be saved once they've been used. :param block: If set to True, no more than ``maxsize`` connections will be used at a time. When no free connections are available, the call will block until a connection has been released. This is a useful side effect for particular multithreaded situations where one does not want to use more than maxsize connections per host to prevent flooding. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param retries: Retry configuration to use by default with requests in this pool. :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param \**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, :class:`urllib3.connection.HTTPSConnection` instances. thttpic K@stj|||tj||||_t|tsMtj|}n|dkretj }n||_ ||_ |j ||_ ||_| |_| pi|_x$t|D]} |j jdqWd|_d|_| |_|jr |jjdgndS(Nitsocket_options(R#R*Rtstrictt isinstanceR t from_floatR7RtDEFAULTRtretriesR:tpooltblocktproxyt proxy_headerstxrangetputtnum_connectionst num_requeststconn_kwt setdefault( R)R&R(R>RtmaxsizeRDtheadersRBt_proxyt_proxy_headersRKt_((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR*s(           c C@se|jd7_tjd|j|j|jd|jd|jd|jjd|j|j }|S(s9 Return a fresh :class:`HTTPConnection`. is%Starting new HTTP connection (%d): %sR&R(RR>( RItlogtdebugR&t ConnectionClsR(Rtconnect_timeoutR>RK(R)tconn((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _new_conns  cC@sd }y"|jjd|jd|}WnNtk rJt|dn/tjk rx|jryt|dqynX|rt |rt j d|j |j t|dddkrd }qn|p|jS( s Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is ``True``. RDRsPool is closed.s>Pool reached maximum size and no more connections are allowed.s Resetting dropped connection: %st auto_openiiN(R7RCtgetRDtAttributeErrorRRtEmptyRRRRRSR&R/tgetattrRW(R)RRV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _get_conns "     cC@soy|jj|dtdSWn7tk r1n'tjk rWtjd|jnX|rk|j ndS(s Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded. RDNs2Connection pool is full, discarding connection: %s( RCRHR0RZRtFullRRtwarningR&R/(R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _put_conns cC@sdS(sU Called right before a request is made, after the socket is created. N((R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt_validate_connscC@sdS(N((R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt_prepare_proxy!scC@sC|tkr|jjSt|tr2|jStj|SdS(s< Helper that always returns a :class:`urllib3.util.Timeout` N(t_DefaultRtcloneR?R R@(R)R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _get_timeout%s    cC@st|tr(t||d|nt|dr_|jtkr_t||d|ndt|ksdt|krt||d|ndS(sAIs the error actually a timeout? Will raise a ReadTimeout or passs!Read timed out. (read timeout=%s)terrnos timed outsdid not complete (read)N(R?t SocketTimeoutR thasattrRft_blocking_errnoststr(R)terrturlt timeout_value((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt_raise_timeout1s $c K@sh|jd7_|j|}|j|j|_y|j|Wn;ttfk r}|jd|d|d|jnX|r|j |||n|j ||||j } t |ddr-| dkrt||d| n| tjkr|jjtjq-|jj| nydy|jdt} WnGtk ry|j} Wqtk r}tj|dqXnXWn;tttfk r}|jd|d|d| nXt |d d } tjd |j|j|j ||| | j!| j" yt#| j$Wn;t%tfk rc} tj&d |j'|| d tnX| S(s Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts. iRkRlRmtsockis!Read timed out. (read timeout=%s)t bufferingt _http_vsn_strsHTTP/?s%s://%s:%s "%s %s %s" %s %ss$Failed to parse headers (url=%s): %stexc_infoN((RJRet start_connectRURRaRgRRntrequest_chunkedtrequestt read_timeoutR\R7R R tDEFAULT_TIMEOUTRot settimeouttsockettgetdefaulttimeoutt getresponsetTruet TypeErrort ExceptionRt raise_fromt SocketErrorRRRSR8R&R(tstatustlengthRtmsgRR_t _absolute_url( R)RVtmethodRlRtchunkedthttplib_request_kwt timeout_objteRvthttplib_responset http_versionthpe((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _make_requestBsT      c C@s+td|jd|jd|jd|jS(NR8R&R(tpath(R"R8R&R(Rl(R)R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRscC@sd|jd}|_y6x/trG|jdt}|r|jqqWWntjk r_nXdS(sD Close all pooled connections and disable the pool. RDN(RCR7R|RYR0R/RR[(R)told_poolRV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR/s cC@s|jdrtSt|\}}}t|j}|jr\| r\tj|}n(|j r|tj|krd}n|||f|j |j |jfkS(sj Check if the given ``url`` is a member of the same host as this connection pool. t/N( t startswithR|R!R$R%R(RRYR7R8R&(R)RlR8R&R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt is_same_hosts c K@sJ|dkr|j}nt|tsHtj|d|d|j}n| dkri| jdt} n|r|j| rt |||nd}| }|j dkr|j }|j |j nd}t}t|| } zy|j|}|jd| }|j|_|jdk o;t|dd }|rT|j|n|j|||d|d|d|d | }| s|nd}|| d <|jj|d |d |d || }t}Wn tjk rt|dntttt t!t"t#fk r}t}t|t!t#fr>t"|}nWt|tt$frn|jrnt%d|}n't|ttfrt d|}n|j&||d|d|dt'j(d}|j)|}nXWd|s|o|j*}t}n|r|j+|nX|sit,j-d||||j.|||||||d|d| d| d| | Sd}|o|j/}|r_|j0dkrd}ny"|j&||d|d|}Wn+t1k r|j2r||n|SX|||j3|t,j4d|||j.||||d |d|d|d|d| d| d| | St5|j6d }|j7||j0|rFy"|j&||d|d|}Wn+t1k r|j8r||n|SX|||j)|t,j4d!||j.||||d |d|d|d|d| d| d| | S|S("s Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param body: Data to send in the request body (useful for creating POST requests, see HTTPConnectionPool.post_url for more convenience). :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When False, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. :param \**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` tredirecttdefaulttpreload_contentR<RRotbodyRNRtrequest_methodRCt connectionRBs"No pool connections are available.sCannot connect to proxy.sConnection aborted.Rt_poolt _stacktraceiNs1Retrying (%r) after connection broken by '%r': %st pool_timeoutt release_conntbody_posc S@s:y|jWn%ttttttfk r5}nXdS(N(treadR RRRRR (tresponseR((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pytdrain_and_release_conns  i/tGETRsRedirecting %s -> %stassert_same_hosts Retry-Afters Retry: %s(9R7RNR?Rtfrom_intRBRYR|RRR8tcopytupdateRFR0RReR]RURRER\RbRt ResponseClst from_httplibRR[RR RRRRR RRR t incrementtsysRrtsleepR/R`RRR_turlopentget_redirect_locationRR traise_on_redirecttsleep_for_retryRStboolt getheadertis_retrytraise_on_status(R)RRlRRNRBRRRRRRRt response_kwRVtrelease_this_connRkt clean_exitRtis_new_proxy_connRt response_connRRRtredirect_locationthas_retry_after((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRs^  !    "               "        "        N(R,R5R6R8RRTRRR7R0R RwR*RWR]R`RaRbReRnRcRRR/RR|R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR;bs.: % &    U      tHTTPSConnectionPoolcB@s}eZdZdZeZdeej dedddddddddddddZ dZ dZ dZ dZRS( s Same as :class:`.HTTPConnectionPool`, but HTTPS. When Python is compiled with the :mod:`ssl` module, then :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, instead of :class:`.HTTPSConnection`. :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. thttpsic K@stj|||||||||| | | |rI| dkrId} n| |_| |_| |_||_||_||_||_ ||_ dS(Nt CERT_REQUIRED( R;R*R7tkey_filet cert_filet cert_reqstca_certst ca_cert_dirt ssl_versiontassert_hostnametassert_fingerprint(R)R&R(R>RRMRDRNRBRORPRRRRRRRRRK((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR*s         cC@skt|trg|jd|jd|jd|jd|jd|jd|jd|j |j |_ n|S(s Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` and establish the tunnel if proxy is used. RRRRRRR( R?Rtset_certRRRRRRRR(R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _prepare_conns      cC@sy |j}Wntk r)|j}nXtjdkrY|j rY||j|jn||j|j|j|jdS(s Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy's IP:port. iiiN(iii( t set_tunnelRZt _set_tunnelRt version_infoRFR'R(tconnect(R)RVR((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRb s   c C@s|jd7_tjd|j|j|j sA|jtkrPtdn|j}|j}|jdk r|jj}|jj}n|jd|d|d|j j d|j |j }|j|S( sB Return a fresh :class:`httplib.HTTPSConnection`. is&Starting new HTTPS connection (%d): %ssCCan't connect to HTTPS URL because the SSL module is not available.R&R(RR>N(RIRRRSR&RTRR R(RER7RRUR>RKR(R)t actual_hostt actual_portRV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRW2s     cC@sUtt|j|t|dds5|jn|jsQtjdt ndS(sU Called right before a request is made, after the socket is created. RosUnverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warningsN( tsuperRRaR\R7Rt is_verifiedtwarningstwarnR(R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRaJs  N(R,R5R6R8RRTR7R0R RwR*RRbRWRa(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRs       cK@sct|\}}}|p*tj|d}|dkrLt|d||St|d||SdS(s Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') iPRR(N(R!RRYRR;(RltkwR8R&R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pytconnection_from_url]s  cC@s@|jdr<|jdr<|jddjd}n|S(s' Process IPv6 address literals t[t]s%25t%s[](Rtendswithtreplacetstrip(R&((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR$ys(Kt __future__RRftloggingRRRyRRRRgt exceptionsRRRRRRR R R R R RRtpackages.ssl_match_hostnameRtpackagesRtpackages.six.movesRRRRRRRRRRuRRRtutil.connectionRt util.requestRt util.responseRt util.retryRt util.timeoutR tutil.urlR!R"tPY2tQueuet_unused_module_QueuetmovesRGt getLoggerR,RRtobjectRcR#tsettEAGAINt EWOULDBLOCKRiR;RRR$(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyts@     X4   %| PKZ2]ijij4site-packages/pip/_vendor/urllib3/connectionpool.pycnu[ abc@@sSddlmZddlZddlZddlZddlZddlmZm Z ddlZddl m Z m Z mZmZmZmZmZmZmZmZmZmZmZddlmZddlmZddlmZdd lm Z m!Z!m"Z"m#Z#m$Z$m%Z%m&Z&dd l'm(Z(dd l)m*Z*dd l+m,Z,dd l-m.Z.ddl/m0Z0ddl1m2Z2ddl3m4Z4ddl5m6Z6m7Z7ej8rddl9Z:nej;j<Z<ej=e>Z?e@ZAde@fdYZBeCejDejEgZFdeBe(fdYZGdeGfdYZHdZIdZJdS(i(tabsolute_importN(terrorttimeouti( tClosedPoolErrort ProtocolErrortEmptyPoolErrortHeaderParsingErrortHostChangedErrortLocationValueErrort MaxRetryErrort ProxyErrortReadTimeoutErrortSSLErrort TimeoutErrortInsecureRequestWarningtNewConnectionError(tCertificateError(tsix(tqueue(tport_by_schemetDummyConnectiontHTTPConnectiontHTTPSConnectiontVerifiedHTTPSConnectiont HTTPExceptiont BaseSSLError(tRequestMethods(t HTTPResponse(tis_connection_dropped(tset_file_position(tassert_header_parsing(tRetry(tTimeout(tget_hosttUrltConnectionPoolcB@sMeZdZdZejZddZdZ dZ dZ dZ RS(sz Base class for all connection pools, such as :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. cC@sF|stdnt|j|_|j|_||_dS(NsNo host specified.(Rt _ipv6_hosttlowerthostt _proxy_hosttport(tselfR&R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt__init__Cs cC@s dt|j|j|jfS(Ns%s(host=%r, port=%r)(ttypet__name__R&R((R)((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt__str__KscC@s|S(N((R)((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt __enter__OscC@s|jtS(N(tclosetFalse(R)texc_typetexc_valtexc_tb((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt__exit__Rs cC@sdS(sD Close all pooled connections and disable the pool. N((R)((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR/WsN( R,t __module__t__doc__tNonetschemeRt LifoQueuetQueueClsR*R-R.R4R/(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR#:s     tHTTPConnectionPoolc B@seZdZdZeZeZde e j de ddddd Z dZ ddZdZdZdZd Zd Zee d Zd Zd ZdZdddeeedde dd ZRS(sN Thread-safe connection pool for one host. :param host: Host used for this HTTP Connection (e.g. "localhost"), passed into :class:`httplib.HTTPConnection`. :param port: Port used for this HTTP Connection (None is equivalent to 80), passed into :class:`httplib.HTTPConnection`. :param strict: Causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line, passed into :class:`httplib.HTTPConnection`. .. note:: Only works in Python 2. This parameter is ignored in Python 3. :param timeout: Socket timeout in seconds for each individual connection. This can be a float or integer, which sets the timeout for the HTTP request, or an instance of :class:`urllib3.util.Timeout` which gives you more fine-grained control over request timeouts. After the constructor has been parsed, this is always a `urllib3.util.Timeout` object. :param maxsize: Number of connections to save that can be reused. More than 1 is useful in multithreaded situations. If ``block`` is set to False, more connections will be created but they will not be saved once they've been used. :param block: If set to True, no more than ``maxsize`` connections will be used at a time. When no free connections are available, the call will block until a connection has been released. This is a useful side effect for particular multithreaded situations where one does not want to use more than maxsize connections per host to prevent flooding. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param retries: Retry configuration to use by default with requests in this pool. :param _proxy: Parsed proxy URL, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param _proxy_headers: A dictionary with proxy headers, should not be used directly, instead, see :class:`urllib3.connectionpool.ProxyManager`" :param \**conn_kw: Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, :class:`urllib3.connection.HTTPSConnection` instances. thttpic K@stj|||tj||||_t|tsMtj|}n|dkretj }n||_ ||_ |j ||_ ||_| |_| pi|_x$t|D]} |j jdqWd|_d|_| |_|jr |jjdgndS(Nitsocket_options(R#R*Rtstrictt isinstanceR t from_floatR7RtDEFAULTRtretriesR:tpooltblocktproxyt proxy_headerstxrangetputtnum_connectionst num_requeststconn_kwt setdefault( R)R&R(R>RtmaxsizeRDtheadersRBt_proxyt_proxy_headersRKt_((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR*s(           c C@se|jd7_tjd|j|j|jd|jd|jd|jjd|j|j }|S(s9 Return a fresh :class:`HTTPConnection`. is%Starting new HTTP connection (%d): %sR&R(RR>( RItlogtdebugR&t ConnectionClsR(Rtconnect_timeoutR>RK(R)tconn((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _new_conns  cC@sd }y"|jjd|jd|}WnNtk rJt|dn/tjk rx|jryt|dqynX|rt |rt j d|j |j t|dddkrd }qn|p|jS( s Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is ``True``. RDRsPool is closed.s>Pool reached maximum size and no more connections are allowed.s Resetting dropped connection: %st auto_openiiN(R7RCtgetRDtAttributeErrorRRtEmptyRRRRRSR&R/tgetattrRW(R)RRV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _get_conns "     cC@soy|jj|dtdSWn7tk r1n'tjk rWtjd|jnX|rk|j ndS(s Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded. RDNs2Connection pool is full, discarding connection: %s( RCRHR0RZRtFullRRtwarningR&R/(R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _put_conns cC@sdS(sU Called right before a request is made, after the socket is created. N((R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt_validate_connscC@sdS(N((R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt_prepare_proxy!scC@sC|tkr|jjSt|tr2|jStj|SdS(s< Helper that always returns a :class:`urllib3.util.Timeout` N(t_DefaultRtcloneR?R R@(R)R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _get_timeout%s    cC@st|tr(t||d|nt|dr_|jtkr_t||d|ndt|ksdt|krt||d|ndS(sAIs the error actually a timeout? Will raise a ReadTimeout or passs!Read timed out. (read timeout=%s)terrnos timed outsdid not complete (read)N(R?t SocketTimeoutR thasattrRft_blocking_errnoststr(R)terrturlt timeout_value((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt_raise_timeout1s $c K@sh|jd7_|j|}|j|j|_y|j|Wn;ttfk r}|jd|d|d|jnX|r|j |||n|j ||||j } t |ddr-| dkrt||d| n| tjkr|jjtjq-|jj| nydy|jdt} WnGtk ry|j} Wqtk r}tj|dqXnXWn;tttfk r}|jd|d|d| nXt |d d } tjd |j|j|j ||| | j!| j" yt#| j$Wn;t%tfk rc} tj&d |j'|| d tnX| S(s Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts. iRkRlRmtsockis!Read timed out. (read timeout=%s)t bufferingt _http_vsn_strsHTTP/?s%s://%s:%s "%s %s %s" %s %ss$Failed to parse headers (url=%s): %stexc_infoN((RJRet start_connectRURRaRgRRntrequest_chunkedtrequestt read_timeoutR\R7R R tDEFAULT_TIMEOUTRot settimeouttsockettgetdefaulttimeoutt getresponsetTruet TypeErrort ExceptionRt raise_fromt SocketErrorRRRSR8R&R(tstatustlengthRtmsgRR_t _absolute_url( R)RVtmethodRlRtchunkedthttplib_request_kwt timeout_objteRvthttplib_responset http_versionthpe((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _make_requestBsT      c C@s+td|jd|jd|jd|jS(NR8R&R(tpath(R"R8R&R(Rl(R)R((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRscC@sd|jd}|_y6x/trG|jdt}|r|jqqWWntjk r_nXdS(sD Close all pooled connections and disable the pool. RDN(RCR7R|RYR0R/RR[(R)told_poolRV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR/s cC@s|jdrtSt|\}}}t|j}|jr\| r\tj|}n(|j r|tj|krd}n|||f|j |j |jfkS(sj Check if the given ``url`` is a member of the same host as this connection pool. t/N( t startswithR|R!R$R%R(RRYR7R8R&(R)RlR8R&R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt is_same_hosts c K@sJ|dkr|j}nt|tsHtj|d|d|j}n| dkri| jdt} n|r|j| rt |||nd}| }|j dkr|j }|j |j nd}t}t|| } zy|j|}|jd| }|j|_|jdk o;t|dd }|rT|j|n|j|||d|d|d|d | }| s|nd}|| d <|jj|d |d |d || }t}Wn tjk rt|dntttt t!t"t#fk r}t}t|t!t#fr>t"|}nWt|tt$frn|jrnt%d|}n't|ttfrt d|}n|j&||d|d|dt'j(d}|j)|}nXWd|s|o|j*}t}n|r|j+|nX|sit,j-d||||j.|||||||d|d| d| d| | Sd}|o|j/}|r_|j0dkrd}ny"|j&||d|d|}Wn+t1k r|j2r||n|SX|||j3|t,j4d|||j.||||d |d|d|d|d| d| d| | St5|j6d }|j7||j0|rFy"|j&||d|d|}Wn+t1k r|j8r||n|SX|||j)|t,j4d!||j.||||d |d|d|d|d| d| d| | S|S("s Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you'll need to specify all the raw details. .. note:: More commonly, it's appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param body: Data to send in the request body (useful for creating POST requests, see HTTPConnectionPool.post_url for more convenience). :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When False, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you're not preloading the response's content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get('preload_content', True)``. :param chunked: If True, urllib3 will send the body using chunked transfer encoding. Otherwise, urllib3 will send the body using the standard content-length form. Defaults to False. :param int body_pos: Position to seek to in file-like body in the event of a retry or redirect. Typically this won't need to be set because urllib3 will auto-populate the value when needed. :param \**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib` tredirecttdefaulttpreload_contentR<RRotbodyRNRtrequest_methodRCt connectionRBs"No pool connections are available.sCannot connect to proxy.sConnection aborted.Rt_poolt _stacktraceiNs1Retrying (%r) after connection broken by '%r': %st pool_timeoutt release_conntbody_posc S@s:y|jWn%ttttttfk r5}nXdS(N(treadR RRRRR (tresponseR((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pytdrain_and_release_conns  i/tGETRsRedirecting %s -> %stassert_same_hosts Retry-Afters Retry: %s(9R7RNR?Rtfrom_intRBRYR|RRR8tcopytupdateRFR0RReR]RURRER\RbRt ResponseClst from_httplibRR[RR RRRRR RRR t incrementtsysRrtsleepR/R`RRR_turlopentget_redirect_locationRR traise_on_redirecttsleep_for_retryRStboolt getheadertis_retrytraise_on_status(R)RRlRRNRBRRRRRRRt response_kwRVtrelease_this_connRkt clean_exitRtis_new_proxy_connRt response_connRRRtredirect_locationthas_retry_after((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRs^  !    "               "        "        N(R,R5R6R8RRTRRR7R0R RwR*RWR]R`RaRbReRnRcRRR/RR|R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR;bs.: % &    U      tHTTPSConnectionPoolcB@s}eZdZdZeZdeej dedddddddddddddZ dZ dZ dZ dZRS( s Same as :class:`.HTTPConnectionPool`, but HTTPS. When Python is compiled with the :mod:`ssl` module, then :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, instead of :class:`.HTTPSConnection`. :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, ``assert_hostname`` and ``host`` in this order to verify connections. If ``assert_hostname`` is False, no verification is done. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, ``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. thttpsic K@stj|||||||||| | | |rI| dkrId} n| |_| |_| |_||_||_||_||_ ||_ dS(Nt CERT_REQUIRED( R;R*R7tkey_filet cert_filet cert_reqstca_certst ca_cert_dirt ssl_versiontassert_hostnametassert_fingerprint(R)R&R(R>RRMRDRNRBRORPRRRRRRRRRK((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR*s         cC@skt|trg|jd|jd|jd|jd|jd|jd|jd|j |j |_ n|S(s Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` and establish the tunnel if proxy is used. RRRRRRR( R?Rtset_certRRRRRRRR(R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyt _prepare_conns      cC@sy |j}Wntk r)|j}nXtjdkrY|j rY||j|jn||j|j|j|jdS(s Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy's IP:port. iiiN(iii( t set_tunnelRZt _set_tunnelRt version_infoRFR'R(tconnect(R)RVR((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRb s   c C@s|jd7_tjd|j|j|j sA|jtkrPtdn|j}|j}|jdk r|jj}|jj}n|jd|d|d|j j d|j |j }|j|S( sB Return a fresh :class:`httplib.HTTPSConnection`. is&Starting new HTTPS connection (%d): %ssCCan't connect to HTTPS URL because the SSL module is not available.R&R(RR>N(RIRRRSR&RTRR R(RER7RRUR>RKR(R)t actual_hostt actual_portRV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRW2s     cC@sUtt|j|t|dds5|jn|jsQtjdt ndS(sU Called right before a request is made, after the socket is created. RosUnverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warningsN( tsuperRRaR\R7Rt is_verifiedtwarningstwarnR(R)RV((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRaJs  N(R,R5R6R8RRTR7R0R RwR*RRbRWRa(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyRs       cK@sct|\}}}|p*tj|d}|dkrLt|d||St|d||SdS(s Given a url, return an :class:`.ConnectionPool` instance of its host. This is a shortcut for not having to parse out the scheme, host, and port of the url before creating an :class:`.ConnectionPool` instance. :param url: Absolute URL string that must include the scheme. Port is optional. :param \**kw: Passes additional parameters to the constructor of the appropriate :class:`.ConnectionPool`. Useful for specifying things like timeout, maxsize, headers, etc. Example:: >>> conn = connection_from_url('http://google.com/') >>> r = conn.request('GET', '/') iPRR(N(R!RRYRR;(RltkwR8R&R(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pytconnection_from_url]s  cC@s@|jdr<|jdr<|jddjd}n|S(s' Process IPv6 address literals t[t]s%25t%s[](Rtendswithtreplacetstrip(R&((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyR$ys(Kt __future__RRftloggingRRRyRRRRgt exceptionsRRRRRRR R R R R RRtpackages.ssl_match_hostnameRtpackagesRtpackages.six.movesRRRRRRRRRRuRRRtutil.connectionRt util.requestRt util.responseRt util.retryRt util.timeoutR tutil.urlR!R"tPY2tQueuet_unused_module_QueuetmovesRGt getLoggerR,RRtobjectRcR#tsettEAGAINt EWOULDBLOCKRiR;RRR$(((sF/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connectionpool.pyts@     X4   %| PKZ332site-packages/pip/_vendor/urllib3/_collections.pyonu[ abc@@sddlmZddlmZmZyddlmZWn$ek r`dddYZnXyddlmZWn!ek rddl mZnXddl m Z m Z m Z d d gZeZd efd YZd efd YZd S(i(tabsolute_import(tMappingtMutableMapping(tRLockRcB@seZdZdZRS(cC@sdS(N((tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt __enter__scC@sdS(N((Rtexc_typet exc_valuet traceback((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__exit__ s(t__name__t __module__RR (((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRs (t OrderedDicti(titerkeyst itervaluestPY3tRecentlyUsedContainertHTTPHeaderDictcB@sbeZdZeZdd dZdZdZdZ dZ dZ dZ d Z RS( s Provides a thread-safe dict-like container which maintains up to ``maxsize`` keys while throwing away the least-recently-used keys beyond ``maxsize``. :param maxsize: Maximum number of recent elements to retain. :param dispose_func: Every time an item is evicted from the container, ``dispose_func(value)`` is called. Callback which will get called i cC@s1||_||_|j|_t|_dS(N(t_maxsizet dispose_funct ContainerClst _containerRtlock(RtmaxsizeR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__init__+s  cC@s7|j(|jj|}||j|<|SWdQXdS(N(RRtpop(Rtkeytitem((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt __getitem__2s  cC@st}|j]|jj|t}||j|>> headers = HTTPHeaderDict() >>> headers.add('Set-Cookie', 'foo=bar') >>> headers.add('set-cookie', 'baz=quxx') >>> headers['content-length'] = '7' >>> headers['SET-cookie'] 'foo=bar, baz=quxx' >>> headers['Content-Length'] '7' cK@sttt|jt|_|dk rZt|trJ|j|qZ|j|n|rp|j|ndS(N( tsuperRRR RR0t isinstancet _copy_fromtextend(Rtheaderstkwargs((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRs  cC@s*||g|j|j<|j|jS(N(Rtlower(RRtval((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR&scC@s$|j|j}dj|dS(Ns, i(RR7tjoin(RRR8((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRscC@s|j|j=dS(N(RR7(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR'scC@s|j|jkS(N(R7R(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt __contains__scC@st|t r$t|d r$tSt|t|sNt||}ntd|jDtd|jDkS(NR.cs@s'|]\}}|j|fVqdS(N(R7(t.0tktv((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pys scs@s'|]\}}|j|fVqdS(N(R7(R;R<R=((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pys s(R2RthasattrR"ttypetdictt itermerged(Rtother((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__eq__s  cC@s|j| S(N(RC(RRB((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__ne__scC@s t|jS(N(R R(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR(scc@s'x |jjD]}|dVqWdS(Ni(RR-(Rtvals((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR*scC@sGy||}Wn'tk r7||jkr3n|SX||=|SdS(sD.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. N(tKeyErrort_HTTPHeaderDict__marker(RRtdefaultR#((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRs cC@s#y ||=Wntk rnXdS(N(RF(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pytdiscards  cC@sM|j}||g}|jj||}||k rI|j|ndS(sAdds a (name, value) pair, doesn't overwrite the value if it already exists. >>> headers = HTTPHeaderDict(foo='bar') >>> headers.add('Foo', 'baz') >>> headers['foo'] 'bar, baz' N(R7Rt setdefaulttappend(RRR8t key_lowertnew_valsRE((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pytadds   cO@s]t|dkr0tdjt|nt|dkrL|dnd}t|trx|jD]\}}|j||qnWnt|trx|D]}|j|||qWndt|drxR|j D]}|j|||qWn'x$|D]\}}|j||q Wx*|j D]\}}|j||q9WdS(sGeneric import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ is9extend() takes at most 1 positional arguments ({0} given)iR.N(( R t TypeErrortformatR2Rt iteritemsRNRR>R.titems(RtargsR6RBRR8R#((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR4s" " cC@sKy|j|j}Wn%tk r>||jkr:gS|SX|dSdS(smReturns a list of all the values for the named field. Returns an empty list if the key doesn't exist.iN(RR7RFRG(RRRHRE((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pytgetlists cC@s#dt|jt|jfS(Ns%s(%s)(R?R R@RA(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__repr__scC@s\xU|D]M}|j|}t|tr:t|}n|g||j|js    JPKZpt-site-packages/pip/_vendor/urllib3/request.pycnu[ abc@@sSddlmZddlmZddlmZdgZdefdYZdS(i(tabsolute_importi(tencode_multipart_formdata(t urlencodetRequestMethodscB@szeZdZeddddgZd dZd d ed dZd d dZ d d dZ d d ed d Z RS( s Convenience mixin for classes who implement a :meth:`urlopen` method, such as :class:`~urllib3.connectionpool.HTTPConnectionPool` and :class:`~urllib3.poolmanager.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. Specifically, :meth:`.request_encode_url` is for sending requests whose fields are encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded (such as for POST, PUT, PATCH). :meth:`.request` is for making any kind of request, it will look up the appropriate encoding format and use one of the above two methods to make the request. Initializer parameters: :param headers: Headers to include with all requests, unless other headers are given explicitly. tDELETEtGETtHEADtOPTIONScC@s|p i|_dS(N(theaders(tselfR((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyt__init__)scK@stddS(NsMClasses extending RequestMethods must implement their own ``urlopen`` method.(tNotImplemented(R tmethodturltbodyRtencode_multiparttmultipart_boundarytkw((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyturlopen,scK@s]|j}||jkr:|j||d|d||S|j||d|d||SdS(s Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. tfieldsRN(tuppert_encode_url_methodstrequest_encode_urltrequest_encode_body(R R R RRt urlopen_kw((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pytrequest2s cK@sb|dkr|j}ni|d6}|j||rO|dt|7}n|j|||S(s Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. Rt?N(tNoneRtupdateRR(R R R RRRtextra_kw((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyRHs    c K@s|dkr|j}niid6}|rd|krFtdn|rgt|d|\} } nt|d} } | |ds PKZ977+site-packages/pip/_vendor/urllib3/fields.pynu[from __future__ import absolute_import import email.utils import mimetypes from .packages import six def guess_content_type(filename, default='application/octet-stream'): """ Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. """ if filename: return mimetypes.guess_type(filename)[0] or default return default def format_header_param(name, value): """ Helper function to format and quote a single header parameter. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows RFC 2231, as suggested by RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ if not any(ch in value for ch in '"\\\r\n'): result = '%s="%s"' % (name, value) try: result.encode('ascii') except (UnicodeEncodeError, UnicodeDecodeError): pass else: return result if not six.PY3 and isinstance(value, six.text_type): # Python 2: value = value.encode('utf-8') value = email.utils.encode_rfc2231(value, 'utf-8') value = '%s*=%s' % (name, value) return value class RequestField(object): """ A data container for request body parameters. :param name: The name of this request field. :param data: The data/value body. :param filename: An optional filename of the request field. :param headers: An optional dict-like object of headers to initially use for the field. """ def __init__(self, name, data, filename=None, headers=None): self._name = name self._filename = filename self.data = data self.headers = {} if headers: self.headers = dict(headers) @classmethod def from_tuples(cls, fieldname, value): """ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. """ if isinstance(value, tuple): if len(value) == 3: filename, data, content_type = value else: filename, data = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param def _render_part(self, name, value): """ Overridable helper function to format a single header parameter. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. """ return format_header_param(name, value) def _render_parts(self, header_parts): """ Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., 'Content-Disposition' fields. :param header_parts: A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`. """ parts = [] iterable = header_parts if isinstance(header_parts, dict): iterable = header_parts.items() for name, value in iterable: if value is not None: parts.append(self._render_part(name, value)) return '; '.join(parts) def render_headers(self): """ Renders the headers for this request field. """ lines = [] sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] for sort_key in sort_keys: if self.headers.get(sort_key, False): lines.append('%s: %s' % (sort_key, self.headers[sort_key])) for header_name, header_value in self.headers.items(): if header_name not in sort_keys: if header_value: lines.append('%s: %s' % (header_name, header_value)) lines.append('\r\n') return '\r\n'.join(lines) def make_multipart(self, content_disposition=None, content_type=None, content_location=None): """ Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_type: The 'Content-Type' of the request body. :param content_location: The 'Content-Location' of the request body. """ self.headers['Content-Disposition'] = content_disposition or 'form-data' self.headers['Content-Disposition'] += '; '.join([ '', self._render_parts( (('name', self._name), ('filename', self._filename)) ) ]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location PKZ`22/site-packages/pip/_vendor/urllib3/connection.pynu[from __future__ import absolute_import import datetime import logging import os import sys import socket from socket import error as SocketError, timeout as SocketTimeout import warnings from .packages import six from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection from .packages.six.moves.http_client import HTTPException # noqa: F401 try: # Compiled with SSL? import ssl BaseSSLError = ssl.SSLError except (ImportError, AttributeError): # Platform-specific: No SSL. ssl = None class BaseSSLError(BaseException): pass try: # Python 3: # Not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError except NameError: # Python 2: class ConnectionError(Exception): pass from .exceptions import ( NewConnectionError, ConnectTimeoutError, SubjectAltNameWarning, SystemTimeWarning, ) from .packages.ssl_match_hostname import match_hostname, CertificateError from .util.ssl_ import ( resolve_cert_reqs, resolve_ssl_version, assert_fingerprint, create_urllib3_context, ssl_wrap_socket ) from .util import connection from ._collections import HTTPHeaderDict log = logging.getLogger(__name__) port_by_scheme = { 'http': 80, 'https': 443, } # When updating RECENT_DATE, move it to # within two years of the current date, and no # earlier than 6 months ago. RECENT_DATE = datetime.date(2016, 1, 1) class DummyConnection(object): """Used to detect a failed ConnectionCls import.""" pass class HTTPConnection(_HTTPConnection, object): """ Based on httplib.HTTPConnection but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass:: HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port = port_by_scheme['http'] #: Disable Nagle's algorithm by default. #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] #: Whether this connection verifies the host's certificate. is_verified = False def __init__(self, *args, **kw): if six.PY3: # Python 3 kw.pop('strict', None) # Pre-set source_address in case we have an older Python like 2.6. self.source_address = kw.get('source_address') if sys.version_info < (2, 7): # Python 2.6 # _HTTPConnection on Python 2.6 will balk at this keyword arg, but # not newer versions. We can still use it when creating a # connection though, so we pop it *after* we have saved it as # self.source_address. kw.pop('source_address', None) #: The socket options provided by the user. If no options are #: provided, we use the default options. self.socket_options = kw.pop('socket_options', self.default_socket_options) # Superclass also sets self.source_address in Python 2.7+. _HTTPConnection.__init__(self, *args, **kw) def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection( (self.host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except SocketError as e: raise NewConnectionError( self, "Failed to establish a new connection: %s" % e) return conn def _prepare_conn(self, conn): self.sock = conn # the _tunnel_host attribute was added in python 2.6.3 (via # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do # not have them. if getattr(self, '_tunnel_host', None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable self.auto_open = 0 def connect(self): conn = self._new_conn() self._prepare_conn(conn) def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ headers = HTTPHeaderDict(headers if headers is not None else {}) skip_accept_encoding = 'accept-encoding' in headers skip_host = 'host' in headers self.putrequest( method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) for header, value in headers.items(): self.putheader(header, value) if 'transfer-encoding' not in headers: self.putheader('Transfer-Encoding', 'chunked') self.endheaders() if body is not None: stringish_types = six.string_types + (six.binary_type,) if isinstance(body, stringish_types): body = (body,) for chunk in body: if not chunk: continue if not isinstance(chunk, six.binary_type): chunk = chunk.encode('utf8') len_str = hex(len(chunk))[2:] self.send(len_str.encode('utf-8')) self.send(b'\r\n') self.send(chunk) self.send(b'\r\n') # After the if clause, to always have a closed body self.send(b'0\r\n\r\n') class HTTPSConnection(HTTPConnection): default_port = port_by_scheme['https'] ssl_version = None def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, ssl_context=None, **kw): HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) self.key_file = key_file self.cert_file = cert_file self.ssl_context = ssl_context # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) self._protocol = 'https' def connect(self): conn = self._new_conn() self._prepare_conn(conn) if self.ssl_context is None: self.ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(None), cert_reqs=resolve_cert_reqs(None), ) self.sock = ssl_wrap_socket( sock=conn, keyfile=self.key_file, certfile=self.cert_file, ssl_context=self.ssl_context, ) class VerifiedHTTPSConnection(HTTPSConnection): """ Based on httplib.HTTPSConnection but wraps the socket with SSL certification. """ cert_reqs = None ca_certs = None ca_cert_dir = None ssl_version = None assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None): """ This method should only be called once, before the connection is used. """ # If cert_reqs is not provided, we can try to guess. If the user gave # us a cert database, we assume they want to use it: otherwise, if # they gave us an SSL Context object we should use whatever is set for # it. if cert_reqs is None: if ca_certs or ca_cert_dir: cert_reqs = 'CERT_REQUIRED' elif self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) def connect(self): # Add certificate verification conn = self._new_conn() hostname = self.host if getattr(self, '_tunnel_host', None): # _tunnel_host was added in Python 2.6.3 # (See: http://hg.python.org/cpython/rev/0f57b30a152f) self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() # Mark this connection as not reusable self.auto_open = 0 # Override the host with the one we're requesting data from. hostname = self._tunnel_host is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: warnings.warn(( 'System time is way off (before {0}). This will probably ' 'lead to SSL verification errors').format(RECENT_DATE), SystemTimeWarning ) # Wrap socket using verification with the root certs in # trusted_root_certs if self.ssl_context is None: self.ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(self.ssl_version), cert_reqs=resolve_cert_reqs(self.cert_reqs), ) context = self.ssl_context context.verify_mode = resolve_cert_reqs(self.cert_reqs) self.sock = ssl_wrap_socket( sock=conn, keyfile=self.key_file, certfile=self.cert_file, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, server_hostname=hostname, ssl_context=context) if self.assert_fingerprint: assert_fingerprint(self.sock.getpeercert(binary_form=True), self.assert_fingerprint) elif context.verify_mode != ssl.CERT_NONE \ and not getattr(context, 'check_hostname', False) \ and self.assert_hostname is not False: # While urllib3 attempts to always turn off hostname matching from # the TLS library, this cannot always be done. So we check whether # the TLS Library still thinks it's matching hostnames. cert = self.sock.getpeercert() if not cert.get('subjectAltName', ()): warnings.warn(( 'Certificate for {0} has no `subjectAltName`, falling back to check for a ' '`commonName` for now. This feature is being removed by major browsers and ' 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 ' 'for details.)'.format(hostname)), SubjectAltNameWarning ) _match_hostname(cert, self.assert_hostname or hostname) self.is_verified = ( context.verify_mode == ssl.CERT_REQUIRED or self.assert_fingerprint is not None ) def _match_hostname(cert, asserted_hostname): try: match_hostname(cert, asserted_hostname) except CertificateError as e: log.error( 'Certificate did not match expected hostname: %s. ' 'Certificate: %s', asserted_hostname, cert ) # Add cert to exception and reraise so client code can inspect # the cert when catching the exception, if they want to e._peer_cert = cert raise if ssl: # Make a copy for testing. UnverifiedHTTPSConnection = HTTPSConnection HTTPSConnection = VerifiedHTTPSConnection else: HTTPSConnection = DummyConnection PKZsh*h*0site-packages/pip/_vendor/urllib3/connection.pyonu[ abc@@s\ddlmZddlZddlZddlZddlZddlZddlmZm Z ddl Z ddl m Z ddlmZddlmZyddlZejZWn3eefk rdZdefd YZnXy eZWn'ek r d efd YZnXdd lmZmZmZm Z dd l!m"Z"m#Z#ddl$m%Z%m&Z&m'Z'm(Z(m)Z)ddl*m+Z+ddl,m-Z-ej.e/Z0idd6dd6Z1ej2dddZ3de4fdYZ5dee4fdYZdefdYZ6de6fdYZ7dZ8erRe6Z9e7Z6ne5Z6dS(i(tabsolute_importN(terrorttimeouti(tsix(tHTTPConnection(t HTTPExceptiont BaseSSLErrorcB@seZRS((t__name__t __module__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyRstConnectionErrorcB@seZRS((RR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyR s(tNewConnectionErrortConnectTimeoutErrortSubjectAltNameWarningtSystemTimeWarning(tmatch_hostnametCertificateError(tresolve_cert_reqstresolve_ssl_versiontassert_fingerprinttcreate_urllib3_contexttssl_wrap_socket(t connection(tHTTPHeaderDictiPthttpithttpsitDummyConnectioncB@seZdZRS(s-Used to detect a failed ConnectionCls import.(RRt__doc__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyRAsRcB@sieZdZedZejejdfgZe Z dZ dZ dZ dZdddZRS( s{ Based on httplib.HTTPConnection but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass:: HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). RicO@stjr|jddn|jd|_tjdkrP|jddn|jd|j|_ t j |||dS(Ntstricttsource_addressiitsocket_options(ii( RtPY3tpoptNonetgetRtsyst version_infotdefault_socket_optionsRt_HTTPConnectiont__init__(tselftargstkw((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/connection.pyR&js cC@si}|jr|j|dsN          "(  &l  PKZµLL.site-packages/pip/_vendor/urllib3/response.pycnu[ abc@@saddlmZddlmZddlZddlZddlZddlmZ ddlm Z ddl m Z ddlmZmZmZmZmZmZmZdd lmZmZmZdd lmZdd lmZm Z dd l!m"Z"m#Z#ej$e%Z&d e'fdYZ(de'fdYZ)dZ*dej+fdYZ,dS(i(tabsolute_import(tcontextmanagerN(ttimeout(terrori(tHTTPHeaderDict(tBodyNotHttplibCompatiblet ProtocolErrort DecodeErrortReadTimeoutErrortResponseNotChunkedtIncompleteReadt InvalidHeader(t string_typest binary_typetPY3(t http_client(t HTTPExceptiont BaseSSLError(t is_fp_closedtis_response_to_headtDeflateDecodercB@s#eZdZdZdZRS(cC@s(t|_t|_tj|_dS(N(tTruet _first_tryR t_datatzlibt decompressobjt_obj(tself((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt__init__s  cC@st|j|S(N(tgetattrR(Rtname((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt __getattr__scC@s|s |S|js#|jj|S|j|7_y5|jj|}|rbt|_d|_n|SWnTtjk rt|_tjtj |_z|j|jSWdd|_XnXdS(N( RRt decompressRtFalsetNoneRRRt MAX_WBITS(Rtdatat decompressed((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR s"    (t__name__t __module__RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs  t GzipDecodercB@s#eZdZdZdZRS(cC@stjdtj|_dS(Ni(RRR#R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR9scC@st|j|S(N(RR(RR((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR<scC@s|s |S|jj|S(N(RR (RR$((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR ?s(R&R'RRR (((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR(7s  cC@s|dkrtStS(Ntgzip(R(R(tmode((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt _get_decoderEs t HTTPResponsecB@seZdZddgZdddddgZdd'd d d'd eed'd'd'd'ed'd Zd Z d Z e d Z e dZ dZdZdZdZdZedZd'd'edZd(d'dZedZdZd'dZdZdZe dZdZd Zd!Z d"Z!d#Z"d$Z#d%Z$d'd'd&Z%RS()s  HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. This class is also compatible with the Python standard library's :mod:`io` module, and can hence be treated as a readable object in the context of that framework. Extra parameters for behaviour not present in httplib.HTTPResponse: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, attempts to decode specific content-encoding's based on headers (like 'gzip' and 'deflate') will be skipped and raw data will be used instead. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse object, it's convenient to include the original for debug purposes. It's otherwise unused. :param retries: The retries contains the last :class:`~urllib3.util.retry.Retry` that was used during the request. :param enforce_content_length: Enforce content length checking. Body returned by server must match value of Content-Length header, if present. Otherwise, raise error. R)tdeflatei-i.i/i3i4ticC@st|tr||_nt||_||_||_||_||_||_| |_| |_ d|_ d|_ d|_ | |_d|_|rt|ttfr||_ n| |_| |_t|dr||_ nt|_d|_|jjddj}d|jdD}d|krHt|_n|j||_|r|j r|jd||_ ndS( Nitreadstransfer-encodingR.cs@s|]}|jVqdS(N(tstrip(t.0tenc((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pys st,tchunkedtdecode_content(t isinstanceRtheaderststatustversiontreasontstrictR5tretriestenforce_content_lengthR"t_decodert_bodyt_fpt_original_responset_fp_bytes_readt basestringR t_poolt _connectionthasattrR!R4t chunk_lefttgettlowertsplitRt _init_lengthtlength_remainingR/(RtbodyR7R8R9R:R;tpreload_contentR5toriginal_responsetpoolt connectionR<R=trequest_methodttr_enct encodings((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRqs<                     cC@s&|j|jkr"|jjdStS(s Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. tlocation(R8tREDIRECT_STATUSESR7RHR!(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytget_redirect_locationscC@s8|j s|j rdS|jj|jd|_dS(N(RDREt _put_connR"(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt release_connscC@s-|jr|jS|jr)|jdtSdS(Nt cache_content(R?R@R/R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR$s  cC@s|jS(N(RE(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRQscC@s|jS(s Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). (RB(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyttellscC@sG|jjd}|d k r8|jr8tjdd S|d k ryctg|jdD]}t|^qZ}t |dkrt d|n|j }Wnt k rd }qX|dkrd }qnyt|j }Wnt k rd}nX|d ks:d |ko)d kns:|d krCd}n|S(sM Set initial length value for Response content if available. scontent-lengthsReceived response with both Content-Length and Transfer-Encoding set. This is expressly forbidden by RFC 7230 sec 3.3.2. Ignoring Content-Length and attempting to process response as Transfer-Encoding: chunked.R3is8Content-Length contained multiple unmatching values (%s)iii0iditHEADN(ii0(R7RHR"R4tlogtwarningtsetRJtinttlenR tpopt ValueErrorR8(RRRtlengthtvaltlengthsR8((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRKs,  .       4 cC@sO|jjddj}|jdkrK||jkrKt||_ndS(s= Set-up the _decoder attribute if necessary. scontent-encodingR.N(R7RHRIR>R"tCONTENT_DECODERSR+(Rtcontent_encoding((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt _init_decoderscC@sy(|r'|jr'|jj|}nWnJttjfk rt}|jjddj}td||nX|r|r||j 7}n|S(sN Decode the data passed in and potentially flush the decoder. scontent-encodingR.sEReceived response with content-encoding: %s, but failed to decode it.( R>R tIOErrorRRR7RHRIRt_flush_decoder(RR$R5t flush_decoderteRh((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt_decodes cC@s0|jr,|jjd}||jjSdS(sk Flushes the decoder. Should only be called if the decoder is actually being used. R.(R>R tflush(Rtbuf((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRks cc@st}zy dVWntk r:t|jddnktk ry}dt|krant|jddn,ttfk r}t d||nXt }Wd|s|j r|j j n|j r|j j qn|j r|j jr|jnXdS(s Catch low-level python exceptions, instead re-raising urllib3 variants, so that low-level exceptions are not leaked in the high-level api. On exit, release the connection back to the pool. NsRead timed out.sread operation timed outsConnection broken: %r(R!t SocketTimeoutRRDR"RtstrRt SocketErrorRRRAtcloseREtisclosedRY(Rt clean_exitRm((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt_error_catcher!s(      cC@sY|j|dkr"|j}n|jdkr5dSt}d}|j|dkrr|jj}t}nqt}|jj|}|dkr| r|jjt}|j r|j dkrt |j |j qnWdQX|rU|j t |7_ |j dk r+|j t |8_ n|j|||}|rU||_qUn|S(sP Similar to :meth:`httplib.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) Ni(iN(RiR"R5R@R!RwR/RRtR=RLR RBRaRnR?(RtamtR5RZRlR$((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR/Zs4       !iicc@s}|jr=|jr=xa|j|d|D] }|Vq+Wn<x9t|jsx|jd|d|}|r@|Vq@q@WdS(s_ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. R5RxN(R4tsupports_chunked_readst read_chunkedRR@R/(RRxR5tlineR$((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytstreams cK@s|j}t|tsEtr3t|j}qEtj|}nt|dd}|d|d|d|jd|jd|j d|d||}|S( s  Given an :class:`httplib.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along with ``original_response=r``. R;iRMR7R8R9R:RO( tmsgR6RRtitemst from_httplibRR8R9R:(t ResponseClstrt response_kwR7R;tresp((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs      cC@s|jS(N(R7(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt getheadersscC@s|jj||S(N(R7RH(RRtdefault((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt getheaderscC@s|jS(N(R7(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytinfoscC@s6|js|jjn|jr2|jjndS(N(tclosedR@RtRE(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRts  cC@sV|jdkrtSt|jdr2|jjSt|jdrN|jjStSdS(NRuR(R@R"RRFRuR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs  cC@sM|jdkrtdn+t|jdr=|jjStddS(Ns-HTTPResponse has no file to get a fileno fromtfilenosOThe file-like object this HTTPResponse is wrapped around has no file descriptor(R@R"RjRFR(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRs  cC@s2|jdk r.t|jdr.|jjSdS(NRo(R@R"RFRo(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRos!cC@stS(N(R(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytreadablescC@sI|jt|}t|dkr+dS||t|*t|SdS(Ni(R/Ra(Rtbttemp((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pytreadintos cC@st|jdS(s Checks if the underlying file-like object looks like a httplib.HTTPResponse object. We do this by testing for the fp attribute. If it is present we assume it returns raw chunks as processed by read_chunked(). tfp(RFR@(R((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRyscC@s|jdk rdS|jjj}|jddd}yt|d|_Wn*tk r}|jt j |nXdS(Nt;iii( RGR"R@RtreadlineRJR`RcRtthttplibR (RR{((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt_update_chunk_lengths  cC@sd}|dkrI|jj|j}|}|jjdd|_n||jkr|jj|}|j||_|}nq||jkr|jj|}|jjdd|_|}n.|jj|j}|jjdd|_|S(Ni(R"R@t _safe_readRG(RRxtreturned_chunktchunktvalue((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyt _handle_chunk%s&      c c@sT|j|js"tdn|js=tdn|jrft|jrf|jjdS|jx_t r|j |j dkrPn|j |}|j |d|dt}|rv|VqvqvW|r|j}|r|Vqnx6t r0|jjj}|sPn|dkrPqqW|jrJ|jjnWdQXdS(s Similar to :meth:`HTTPResponse.read`, but with an additional parameter: ``decode_content``. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. sHResponse is not chunked. Header 'transfer-encoding: chunked' is missing.sgBody should be httplib.HTTPResponse like. It should have have an fp attribute which returns raw chunks.NiR5Rls (RiR4R RyRRARRtRwRRRGRRnR!RkR@RR(RRxR5RtdecodedR{((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyRz;s@                Ni(&R&R't__doc__RgRVR"RR!RRWRYtpropertyR$RQR[RKRiRnRkRRwR/R|t classmethodRRRRRtRRRoRRRyRRRz(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyR,LsB    -    0  9E        (-t __future__Rt contextlibRRtiotloggingtsocketRRqRRst _collectionsRt exceptionsRRRRR R R t packages.sixR RCR Rtpackages.six.movesRRRQRRt util.responseRRt getLoggerR&R]tobjectRR(R+tIOBaseR,(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/response.pyts"   4! PKZ&if,site-packages/pip/_vendor/urllib3/fields.pycnu[ abc@@sgddlmZddlZddlZddlmZddZdZde fd YZ dS( i(tabsolute_importNi(tsixsapplication/octet-streamcC@s!|rtj|dp|S|S(s Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. i(t mimetypest guess_type(tfilenametdefault((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pytguess_content_types c@stfddDs^d|f}y|jdWnttfk rVq^X|Sntj rttjrjdntj j dd|fS(s Helper function to format and quote a single header parameter. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows RFC 2231, as suggested by RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. c3@s|]}|kVqdS(N((t.0tch(tvalue(s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pys #ss"\ s%s="%s"tasciisutf-8s%s*=%s( tanytencodetUnicodeEncodeErrortUnicodeDecodeErrorRtPY3t isinstancet text_typetemailtutilstencode_rfc2231(tnameR tresult((R s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pytformat_header_params t RequestFieldcB@sYeZdZdddZedZdZdZdZ ddddZ RS(sK A data container for request body parameters. :param name: The name of this request field. :param data: The data/value body. :param filename: An optional filename of the request field. :param headers: An optional dict-like object of headers to initially use for the field. cC@s@||_||_||_i|_|r<t||_ndS(N(t_namet _filenametdatatheaderstdict(tselfRRRR((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt__init__?s     cC@st|trNt|dkr3|\}}}q`|\}}t|}nd}d}|}|||d|}|jd||S(s A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. iRt content_typeN(RttupletlenRtNonetmake_multipart(tclst fieldnameR RRR t request_param((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt from_tuplesGs cC@s t||S(s Overridable helper function to format a single header parameter. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. (R(RRR ((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt _render_partis cC@svg}|}t|tr*|j}nx<|D]4\}}|dk r1|j|j||q1q1Wdj|S(sO Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., 'Content-Disposition' fields. :param header_parts: A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`. s; N(RRtitemsR#tappendR)tjoin(Rt header_partstpartstiterableRR ((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt _render_partsts   cC@sg}dddg}xD|D]<}|jj|tr|jd||j|fqqWxL|jjD];\}}||krl|r|jd||fqqlqlW|jddj|S(s= Renders the headers for this request field. sContent-Dispositions Content-TypesContent-Locations%s: %ss (RtgettFalseR+R*R,(Rtlinest sort_keystsort_keyt header_namet header_value((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pytrender_headerss % ! c C@st|p d|jd<|jdcdjd|jd|jfd|jffg7<||jd<||jd/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyR$s  ' N( t__name__t __module__t__doc__R#Rt classmethodR(R)R0R8R$(((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyR2s "  ( t __future__Rt email.utilsRRtpackagesRRRtobjectR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyts     PKZ1::,site-packages/pip/_vendor/urllib3/request.pynu[from __future__ import absolute_import from .filepost import encode_multipart_formdata from .packages.six.moves.urllib.parse import urlencode __all__ = ['RequestMethods'] class RequestMethods(object): """ Convenience mixin for classes who implement a :meth:`urlopen` method, such as :class:`~urllib3.connectionpool.HTTPConnectionPool` and :class:`~urllib3.poolmanager.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. Specifically, :meth:`.request_encode_url` is for sending requests whose fields are encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded (such as for POST, PUT, PATCH). :meth:`.request` is for making any kind of request, it will look up the appropriate encoding format and use one of the above two methods to make the request. Initializer parameters: :param headers: Headers to include with all requests, unless other headers are given explicitly. """ _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS']) def __init__(self, headers=None): self.headers = headers or {} def urlopen(self, method, url, body=None, headers=None, encode_multipart=True, multipart_boundary=None, **kw): # Abstract raise NotImplemented("Classes extending RequestMethods must implement " "their own ``urlopen`` method.") def request(self, method, url, fields=None, headers=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. """ method = method.upper() if method in self._encode_url_methods: return self.request_encode_url(method, url, fields=fields, headers=headers, **urlopen_kw) else: return self.request_encode_body(method, url, fields=fields, headers=headers, **urlopen_kw) def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. """ if headers is None: headers = self.headers extra_kw = {'headers': headers} extra_kw.update(urlopen_kw) if fields: url += '?' + urlencode(fields) return self.urlopen(method, url, **extra_kw) def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw): """ Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :meth:`urllib.urlencode` is used with the 'application/x-www-form-urlencoded' content type. Multipart encoding must be used when posting files, and it's reasonably safe to use it in other times too. However, it may break request signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: fields = { 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', } When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimick behavior of browsers. Note that if ``headers`` are supplied, the 'Content-Type' header will be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter. """ if headers is None: headers = self.headers extra_kw = {'headers': {}} if fields: if 'body' in urlopen_kw: raise TypeError( "request got values for both 'fields' and 'body', can only specify one.") if encode_multipart: body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary) else: body, content_type = urlencode(fields), 'application/x-www-form-urlencoded' extra_kw['body'] = body extra_kw['headers'] = {'Content-Type': content_type} extra_kw['headers'].update(headers) extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) PKZ]  -site-packages/pip/_vendor/urllib3/filepost.pynu[from __future__ import absolute_import import codecs from uuid import uuid4 from io import BytesIO from .packages import six from .packages.six import b from .fields import RequestField writer = codecs.lookup('utf-8')[3] def choose_boundary(): """ Our embarrassingly-simple replacement for mimetools.choose_boundary. """ return uuid4().hex def iter_field_objects(fields): """ Iterate over fields. Supports list of (k, v) tuples and dicts, and lists of :class:`~urllib3.fields.RequestField`. """ if isinstance(fields, dict): i = six.iteritems(fields) else: i = iter(fields) for field in i: if isinstance(field, RequestField): yield field else: yield RequestField.from_tuples(*field) def iter_fields(fields): """ .. deprecated:: 1.6 Iterate over fields. The addition of :class:`~urllib3.fields.RequestField` makes this function obsolete. Instead, use :func:`iter_field_objects`, which returns :class:`~urllib3.fields.RequestField` objects. Supports list of (k, v) tuples and dicts. """ if isinstance(fields, dict): return ((k, v) for k, v in six.iteritems(fields)) return ((k, v) for k, v in fields) def encode_multipart_formdata(fields, boundary=None): """ Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). :param boundary: If not specified, then a random boundary will be generated using :func:`mimetools.choose_boundary`. """ body = BytesIO() if boundary is None: boundary = choose_boundary() for field in iter_field_objects(fields): body.write(b('--%s\r\n' % (boundary))) writer(body).write(field.render_headers()) data = field.data if isinstance(data, int): data = str(data) # Backwards compatibility if isinstance(data, six.text_type): writer(body).write(data) else: body.write(data) body.write(b'\r\n') body.write(b('--%s--\r\n' % (boundary))) content_type = str('multipart/form-data; boundary=%s' % boundary) return body.getvalue(), content_type PKZ>% % -site-packages/pip/_vendor/urllib3/__init__.pynu[""" urllib3 - Thread-safe connection pooling and re-using. """ from __future__ import absolute_import import warnings from .connectionpool import ( HTTPConnectionPool, HTTPSConnectionPool, connection_from_url ) from . import exceptions from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse from .util.request import make_headers from .util.url import get_host from .util.timeout import Timeout from .util.retry import Retry # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' __version__ = '1.22' __all__ = ( 'HTTPConnectionPool', 'HTTPSConnectionPool', 'PoolManager', 'ProxyManager', 'HTTPResponse', 'Retry', 'Timeout', 'add_stderr_logger', 'connection_from_url', 'disable_warnings', 'encode_multipart_formdata', 'get_host', 'make_headers', 'proxy_from_url', ) logging.getLogger(__name__).addHandler(NullHandler()) def add_stderr_logger(level=logging.DEBUG): """ Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. """ # This method needs to be in this __init__.py to get the __name__ correct # even if urllib3 is vendored within another package. logger = logging.getLogger(__name__) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) logger.debug('Added a stderr logging handler to logger: %s', __name__) return handler # ... Clean up. del NullHandler # All warning filters *must* be appended unless you're really certain that they # shouldn't be: otherwise, it's very hard for users to use most Python # mechanisms to silence them. # SecurityWarning's always go off by default. warnings.simplefilter('always', exceptions.SecurityWarning, append=True) # SubjectAltNameWarning's should go off once per host warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) # InsecurePlatformWarning's don't vary between requests, so we keep it default. warnings.simplefilter('default', exceptions.InsecurePlatformWarning, append=True) # SNIMissingWarnings should go off only once. warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) def disable_warnings(category=exceptions.HTTPWarning): """ Helper for quickly disabling all urllib3 warnings. """ warnings.simplefilter('ignore', category) PKZx~~2site-packages/pip/_vendor/urllib3/packages/six.pycnu[ abcA@@sKdZddlmZddlZddlZddlZddlZddlZdZdZ ej ddkZ ej ddkZ ej dd!dakZ e refZefZefZeZeZejZnefZeefZeejfZeZeZejjd r$edcZnVd efd YZ ye!e Wne"k rjedeZn XedgZ[ dZ#dZ$defdYZ%de%fdYZ&dej'fdYZ(de%fdYZ)defdYZ*e*e+Z,de(fdYZ-e)dddde)d d!d"d#d e)d$d!d!d%d$e)d&d'd"d(d&e)d)d'd*e)d+d!d"d,d+e)d-d.d.d/d-e)d0d.d.d-d0e)d1d'd"d2d1e)d3d'e rd4nd5d6e)d7d'd8e)d9d:d;d<e)ddde)d=d=d>e)d?d?d>e)d@d@d>e)d2d'd"d2d1e)dAd!d"dBdAe)dCd!d!dDdCe&d"d'e&dEdFe&dGdHe&dIdJdKe&dLdMdLe&dNdOdPe&dQdRdSe&dTdUdVe&dWdXdYe&dZd[d\e&d]d^d_e&d`dadbe&dcdddee&dfdgdhe&dididje&dkdkdje&dldldje&dmdmdne&dodpe&dqdre&dsdte&dudvdue&dwdxe&dydzd{e&d|d}d~e&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddd~e&ddde&ddde&ddde&de+dde&de+dde&de+de+de&ddde&ddde&dddg>Z.ejdkr;e.e&ddg7Z.nxJe.D]BZ/e0e-e/j1e/e2e/e&rBe,j3e/de/j1qBqBW[/e.e-_.e-e+dZ4e,j3e4dde(fdYZ5e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)d<dde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddgZ6x!e6D]Z/e0e5e/j1e/q0W[/e6e5_.e,j3e5e+dddde(fdYZ7e)ddde)ddde)dddgZ8x!e8D]Z/e0e7e/j1e/qW[/e8e7_.e,j3e7e+dddde(fdYZ9e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddg!Z:x!e:D]Z/e0e9e/j1e/q W[/e:e9_.e,j3e9e+dddde(fdYZ;e)ddde)ddde)ddde)dddgZ<x!e<D]Z/e0e;e/j1e/q W[/e<e;_.e,j3e;e+d d d d e(fd YZ=e)dddgZ>x!e>D]Z/e0e=e/j1e/q; W[/e>e=_.e,j3e=e+ddddej'fdYZ?e,j3e?e+dddZ@dZAe r dZBdZCdZDdZEdZFdZGn$dZBdZCdZDd ZEd!ZFd"ZGy eHZIWneJk r= d#ZInXeIZHy eKZKWneJk rj d$ZKnXe r d%ZLejMZNd&ZOeZPn7d'ZLd(ZNd)ZOd*efd+YZPeKZKe#eLd,ejQeBZRejQeCZSejQeDZTejQeEZUejQeFZVejQeGZWe rd-ZXd.ZYd/ZZd0Z[ej\d1Z]ej\d2Z^ej\d3Z_nQd4ZXd5ZYd6ZZd7Z[ej\d8Z]ej\d9Z^ej\d:Z_e#eXd;e#eYd<e#eZd=e#e[d>e rd?Z`d@ZaebZcddldZdedjedAjfZg[dejhdZiejjZkelZmddlnZnenjoZoenjpZpdBZqej d d krdCZrdDZsq4dEZrdFZsnpdGZ`dHZaecZcebZgdIZidJZkejtejuevZmddloZoeojoZoZpdKZqdCZrdDZse#e`dLe#eadMdNZwdOZxdPZye reze4j{dQZ|ddRZ~ndddSZ|e|dTej d dhkre|dUn)ej d dikre|dVn dWZeze4j{dXdZedkrdYZnej d djkrDeZdZZne#e~d[ej dd!dkkrejejd\Zn ejZd]Zd^Zd_ZgZe+Zejd`dk rge_nejr7xOeejD]>\ZZeej+dkrej1e+kreje=PqqW[[nejje,dS(ls6Utilities for writing code that runs on Python 2 and 3i(tabsolute_importNs'Benjamin Peterson s1.10.0iiitjavaiitXcB@seZdZRS(cC@sdS(NiiI((tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__len__>s(t__name__t __module__R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR<si?cC@s ||_dS(s Add documentation to a function.N(t__doc__(tfunctdoc((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt_add_docKscC@st|tj|S(s7Import module, returning the module after the last dot.(t __import__tsystmodules(tname((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt_import_modulePs t _LazyDescrcB@seZdZdZRS(cC@s ||_dS(N(R(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__init__XscC@sN|j}t||j|yt|j|jWntk rInX|S(N(t_resolvetsetattrRtdelattrt __class__tAttributeError(Rtobjttptresult((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__get__[s  (RRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRVs t MovedModulecB@s&eZddZdZdZRS(cC@sJtt|j|tr=|dkr1|}n||_n ||_dS(N(tsuperRRtPY3tNonetmod(RRtoldtnew((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRis    cC@s t|jS(N(RR(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRrscC@s/|j}t||}t||||S(N(RtgetattrR(Rtattrt_moduletvalue((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt __getattr__us N(RRRRRR&(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRgs t _LazyModulecB@s eZdZdZgZRS(cC@s)tt|j||jj|_dS(N(RR'RRR(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR~scC@s3ddg}|g|jD]}|j^q7}|S(NRR(t_moved_attributesR(RtattrsR#((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__dir__s #(RRRR*R((((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR'|s  tMovedAttributecB@s eZdddZdZRS(cC@stt|j|trp|dkr1|}n||_|dkrd|dkr[|}qd|}n||_n'||_|dkr|}n||_dS(N(RR+RRRRR#(RRtold_modtnew_modtold_attrtnew_attr((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRs           cC@st|j}t||jS(N(RRR"R#(Rtmodule((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRsN(RRRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR+st_SixMetaPathImportercB@s_eZdZdZdZdZd dZdZdZ dZ dZ e Z RS( s A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 cC@s||_i|_dS(N(Rt known_modules(Rtsix_module_name((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRs cG@s-x&|D]}||j|jd|(RR6((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt is_packagescC@s|j|dS(s;Return None Required, if is_package is implementedN(R>R(RR6((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytget_codes N( RRRRR7R8RR:R>RARDREt get_source(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR1s       t _MovedItemscB@seZdZgZRS(sLazy loading of moved objects(RRRRB(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRGst cStringIOtiotStringIOtfiltert itertoolstbuiltinstifiltert filterfalset ifilterfalsetinputt __builtin__t raw_inputtinternR tmaptimaptgetcwdtostgetcwdutgetcwdbtrangetxranget reload_modulet importlibtimptreloadtreducet functoolst shlex_quotetpipestshlextquotetUserDictt collectionstUserListt UserStringtziptizipt zip_longestt izip_longestt configparsert ConfigParsertcopyregtcopy_regtdbm_gnutgdbmsdbm.gnut _dummy_threadt dummy_threadthttp_cookiejart cookielibshttp.cookiejart http_cookiestCookies http.cookiest html_entitiesthtmlentitydefss html.entitiest html_parsert HTMLParsers html.parsert http_clientthttplibs http.clienttemail_mime_multipartsemail.MIMEMultipartsemail.mime.multiparttemail_mime_nonmultipartsemail.MIMENonMultipartsemail.mime.nonmultiparttemail_mime_textsemail.MIMETextsemail.mime.texttemail_mime_basesemail.MIMEBasesemail.mime.basetBaseHTTPServers http.servert CGIHTTPServertSimpleHTTPServertcPickletpickletqueuetQueuetreprlibtreprt socketservert SocketServert_threadtthreadttkintertTkinterttkinter_dialogtDialogstkinter.dialogttkinter_filedialogt FileDialogstkinter.filedialogttkinter_scrolledtextt ScrolledTextstkinter.scrolledtextttkinter_simpledialogt SimpleDialogstkinter.simpledialogt tkinter_tixtTixs tkinter.tixt tkinter_ttktttks tkinter.ttkttkinter_constantst Tkconstantsstkinter.constantst tkinter_dndtTkdnds tkinter.dndttkinter_colorchooserttkColorChooserstkinter.colorchooserttkinter_commondialogttkCommonDialogstkinter.commondialogttkinter_tkfiledialogt tkFileDialogt tkinter_fontttkFonts tkinter.fontttkinter_messageboxt tkMessageBoxstkinter.messageboxttkinter_tksimpledialogttkSimpleDialogt urllib_parses.moves.urllib_parses urllib.parset urllib_errors.moves.urllib_errors urllib.errorturllibs .moves.urllibturllib_robotparsert robotparsersurllib.robotparsert xmlrpc_clientt xmlrpclibs xmlrpc.clientt xmlrpc_servertSimpleXMLRPCServers xmlrpc.servertwin32twinregt_winregsmoves.s.movestmovestModule_six_moves_urllib_parsecB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_parse(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR@st ParseResultturlparset SplitResulttparse_qst parse_qslt urldefragturljointurlsplitt urlunparset urlunsplitt quote_plustunquotet unquote_plust urlencodet splitquerytsplittagt splitusert uses_fragmentt uses_netloct uses_paramst uses_queryt uses_relativesmoves.urllib_parsesmoves.urllib.parsetModule_six_moves_urllib_errorcB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_error(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRhstURLErrorturllib2t HTTPErrortContentTooShortErrors.moves.urllib.errorsmoves.urllib_errorsmoves.urllib.errortModule_six_moves_urllib_requestcB@seZdZRS(s9Lazy loading of moved objects in six.moves.urllib_request(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR|sturlopensurllib.requesttinstall_openert build_openert pathname2urlt url2pathnamet getproxiestRequesttOpenerDirectortHTTPDefaultErrorHandlertHTTPRedirectHandlertHTTPCookieProcessort ProxyHandlert BaseHandlertHTTPPasswordMgrtHTTPPasswordMgrWithDefaultRealmtAbstractBasicAuthHandlertHTTPBasicAuthHandlertProxyBasicAuthHandlertAbstractDigestAuthHandlertHTTPDigestAuthHandlertProxyDigestAuthHandlert HTTPHandlert HTTPSHandlert FileHandlert FTPHandlertCacheFTPHandlertUnknownHandlertHTTPErrorProcessort urlretrievet urlcleanupt URLopenertFancyURLopenert proxy_bypasss.moves.urllib.requestsmoves.urllib_requestsmoves.urllib.requestt Module_six_moves_urllib_responsecB@seZdZRS(s:Lazy loading of moved objects in six.moves.urllib_response(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRstaddbasesurllib.responset addclosehooktaddinfot addinfourls.moves.urllib.responsesmoves.urllib_responsesmoves.urllib.responset#Module_six_moves_urllib_robotparsercB@seZdZRS(s=Lazy loading of moved objects in six.moves.urllib_robotparser(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRstRobotFileParsers.moves.urllib.robotparsersmoves.urllib_robotparsersmoves.urllib.robotparsertModule_six_moves_urllibcB@sheZdZgZejdZejdZejdZejdZ ejdZ dZ RS(sICreate a six.moves.urllib namespace that resembles the Python 3 namespacesmoves.urllib_parsesmoves.urllib_errorsmoves.urllib_requestsmoves.urllib_responsesmoves.urllib_robotparsercC@sdddddgS(NtparseterrortrequesttresponseR((R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR*s( RRRRBt _importerR8RRRRRR*(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRss moves.urllibcC@stt|j|dS(sAdd an item to six.moves.N(RRGR(tmove((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytadd_movescC@s^ytt|WnFtk rYytj|=WqZtk rUtd|fqZXnXdS(sRemove item from six.moves.sno such move, %rN(RRGRRt__dict__R;(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt remove_moves  t__func__t__self__t __closure__t__code__t __defaults__t __globals__tim_functim_selft func_closuret func_codet func_defaultst func_globalscC@s |jS(N(tnext(tit((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytadvance_iterator scC@stdt|jDS(Ncs@s|]}d|jkVqdS(t__call__N(R (t.0tklass((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pys s(tanyttypet__mro__(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytcallablescC@s|S(N((tunbound((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytget_unbound_functionscC@s|S(N((Rtcls((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytcreate_unbound_methodscC@s|jS(N(R(R"((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR#"scC@stj|||jS(N(ttypest MethodTypeR(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytcreate_bound_method%scC@stj|d|S(N(R&R'R(RR$((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR%(stIteratorcB@seZdZRS(cC@st|j|S(N(Rt__next__(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR-s(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR)+ss3Get the function out of a possibly unbound functioncK@st|j|S(N(titertkeys(tdtkw((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytiterkeys>scK@st|j|S(N(R+tvalues(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt itervaluesAscK@st|j|S(N(R+titems(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt iteritemsDscK@st|j|S(N(R+tlists(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt iterlistsGsR,R0R2cK@s |j|S(N(R/(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR/PscK@s |j|S(N(R1(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR1SscK@s |j|S(N(R3(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR3VscK@s |j|S(N(R5(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR5Ystviewkeyst viewvaluest viewitemss1Return an iterator over the keys of a dictionary.s3Return an iterator over the values of a dictionary.s?Return an iterator over the (key, value) pairs of a dictionary.sBReturn an iterator over the (key, [values]) pairs of a dictionary.cC@s |jdS(Nslatin-1(tencode(ts((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytbkscC@s|S(N((R:((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytunss>BtassertCountEqualtassertRaisesRegexptassertRegexpMatchestassertRaisesRegext assertRegexcC@s|S(N((R:((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR;scC@st|jdddS(Ns\\s\\\\tunicode_escape(tunicodetreplace(R:((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR<scC@st|dS(Ni(tord(tbs((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytbyte2intscC@st||S(N(RE(tbufti((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt indexbytesstassertItemsEquals Byte literals Text literalcO@st|t||S(N(R"t_assertCountEqual(Rtargstkwargs((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR=scO@st|t||S(N(R"t_assertRaisesRegex(RRMRN((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR@scO@st|t||S(N(R"t _assertRegex(RRMRN((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRAstexeccC@sC|dkr|}n|j|k r9|j|n|dS(N(Rt __traceback__twith_traceback(RR%ttb((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytreraises   cB@sc|dkrBejd}|j}|dkr<|j}n~n|dkrW|}nddUdS(sExecute code in a namespace.isexec _code_ in _globs_, _locs_N(RR t _getframet f_globalstf_locals(t_code_t_globs_t_locs_tframe((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytexec_s      s9def reraise(tp, value, tb=None): raise tp, value, tb srdef raise_from(value, from_value): if from_value is None: raise value raise value from from_value sCdef raise_from(value, from_value): raise value from from_value cC@s |dS(N((R%t from_value((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt raise_fromstprintc @s|jdtjdkr%dSfd}t}|jdd}|dk rt|trpt}qt|tst dqn|jdd}|dk rt|trt}qt|tst dqn|rt dn|s0x*|D]}t|tr t}Pq q Wn|rQtd }td }n d }d }|dkrr|}n|dkr|}nx7t |D])\} }| r||n||qW||dS( s4The new-style print function for Python 2.4 and 2.5.tfileNc@st|tst|}nttrt|trjdk rtdd}|dkrrd}n|jj|}nj |dS(Nterrorststrict( R?t basestringtstrRaRCtencodingRR"R9twrite(tdataRb(tfp(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRgs  tsepssep must be None or a stringtendsend must be None or a strings$invalid keyword arguments to print()s t ( tpopR tstdoutRtFalseR?RCtTrueRet TypeErrort enumerate( RMRNRgt want_unicodeRjRktargtnewlinetspaceRI((RisD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytprint_sL              cO@sW|jdtj}|jdt}t|||rS|dk rS|jndS(NRatflush(tgetR RnRmRot_printRRx(RMRNRiRx((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRw s  sReraise an exception.c@sfd}|S(Nc@s(tj|}|_|S(N(Rbtwrapst __wrapped__(tf(tassignedtupdatedtwrapped(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytwrappers ((RR~RR((R~RRsD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR{sc@s5dffdY}tj|ddiS(s%Create a base class with a metaclass.t metaclassc@seZfdZRS(c@s||S(N((R$Rt this_basesR-(tbasestmeta(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__new__'s(RRR((RR(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR%sttemporary_class((RR(RRR((RRsD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytwith_metaclass sc@sfd}|S(s6Class decorator for creating a class with a metaclass.c@s|jj}|jd}|dk rft|trE|g}nx|D]}|j|qLWn|jdd|jdd|j|j|S(Nt __slots__R t __weakref__( R tcopyRyRR?ReRmRt __bases__(R$t orig_varstslotst slots_var(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR.s   ((RR((RsD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt add_metaclass,s cC@sJtrFd|jkr+td|jn|j|_d|_n|S(s A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. t__str__sY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cS@s|jjdS(Nsutf-8(t __unicode__R9(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytJt(tPY2R t ValueErrorRRR(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytpython_2_unicode_compatible<s t__spec__(iiIiIill(ii(ii(ii(ii(Rt __future__RRbRLtoperatorR R&t __author__t __version__t version_infoRRtPY34Ret string_typestintt integer_typesRt class_typest text_typetbytest binary_typetmaxsizetMAXSIZERdtlongt ClassTypeRCtplatformt startswithtobjectRtlent OverflowErrorR RRRt ModuleTypeR'R+R1RRRGR(R#RRR?R7RRt_urllib_parse_moved_attributesRt_urllib_error_moved_attributesRt _urllib_request_moved_attributesRt!_urllib_response_moved_attributesRt$_urllib_robotparser_moved_attributesRR R t _meth_funct _meth_selft _func_closuret _func_codet_func_defaultst _func_globalsRRt NameErrorR!R#R'R(R%R)t attrgettertget_method_functiontget_method_selftget_function_closuretget_function_codetget_function_defaultstget_function_globalsR/R1R3R5t methodcallerR6R7R8R;R<tchrtunichrtstructtStructtpacktint2bytet itemgetterRGtgetitemRJR+t iterbytesRIRJtBytesIORLRORPtpartialRVRER=R@RAR"RMR]RRUR_RwRztWRAPPER_ASSIGNMENTStWRAPPER_UPDATESR{RRRRBt __package__tglobalsRyRtsubmodule_search_locationst meta_pathRrRItimportertappend(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyts               >                                                                                 5         PKZ TCBB7site-packages/pip/_vendor/urllib3/packages/__init__.pycnu[ abc@@s*ddlmZddlmZdZdS(i(tabsolute_importi(tssl_match_hostnameRN(R(t __future__RtRt__all__(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.pytsPKZMOQsite-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pycnu[ abc@sdZddlZddlZyddlZWnek rGdZnXdZdefdYZddZ dZ d Z d Z dS( sJThe match_hostname() function from Python 3.3.3, essential when using SSL.iNs3.5.0.1tCertificateErrorcBseZRS((t__name__t __module__(((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyRsic CsRg}|stS|jd}|d}|d}|jd}||krgtdt|n|s|j|jkS|dkr|jdnY|jds|jdr|jtj |n"|jtj |j dd x$|D]}|jtj |qWtj d d j |d tj } | j|S( shMatching according to RFC 6125, section 6.4.3 http://tools.ietf.org/html/rfc6125#section-6.4.3 t.iit*s,too many wildcards in certificate DNS name: s[^.]+sxn--s\*s[^.]*s\As\.s\Z(tFalsetsplittcountRtreprtlowertappendt startswithtretescapetreplacetcompiletjoint IGNORECASEtmatch( tdnthostnamet max_wildcardstpatstpartstleftmostt remaindert wildcardstfragtpat((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyt_dnsname_matchs*    " &cCs=t|tr9tjdkr9t|dddd}n|S(Nitencodingtasciiterrorststrict(i(t isinstancetstrtsyst version_infotunicode(tobj((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyt _to_unicodeOscCs%tjt|j}||kS(sExact matching of IP addresses. RFC 6125 explicitly doesn't define an algorithm for this (section 1.7.2 - "Out of Scope"). (t ipaddresst ip_addressR(trstrip(tipnamethost_iptip((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyt_ipaddress_matchTscCs|stdnytjt|}WnUtk rGd}n?tk r]d}n)tk rtdkrd}qnXg}|jdd}x|D]\}}|dkr|dkrt||rdS|j |q|dkr|dk rt ||rdS|j |qqW|sxc|jddD]L}xC|D];\}}|dkrQt||r|dS|j |qQqQWqDWnt |dkrt d |d j tt|fn;t |dkrt d ||d fn t d dS(s)Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 rules are followed, but IP addresses are not accepted for *hostname*. CertificateError is raised on failure. On success, the function returns nothing. stempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDtsubjectAltNametDNSNs IP Addresstsubjectt commonNameis&hostname %r doesn't match either of %ss, shostname %r doesn't match %ris=no appropriate commonName or subjectAltName fields were found(((t ValueErrorR)R*R(tNonet UnicodeErrortAttributeErrortgetRR R/tlenRRtmapR(tcertRR-tdnsnamestsantkeytvaluetsub((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pytmatch_hostname`sJ          %( t__doc__R R$R)t ImportErrorR5t __version__R4RRR(R/RA(((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyts     5  PKZͩwTllJsite-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pycnu[ abc@sddlZy8ejd kr-ednddlmZmZWnUek ryddlmZmZWqek rddlmZmZqXnXd ZdS( iNiisFallback to vendored code(tCertificateErrortmatch_hostnameiRR(ii(RR( tsyst version_infot ImportErrortsslRRtbackports.ssl_match_hostnamet_implementationt__all__(((s\/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyts   PKZ:+FFPsite-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pynu["""The match_hostname() function from Python 3.3.3, essential when using SSL.""" # Note: This file is under the PSF license as the code comes from the python # stdlib. http://docs.python.org/3/license.html import re import sys # ipaddress has been backported to 2.6+ in pypi. If it is installed on the # system, use it to handle IPAddress ServerAltnames (this was added in # python-3.5) otherwise only do DNS matching. This allows # backports.ssl_match_hostname to continue to be used all the way back to # python-2.4. try: import ipaddress except ImportError: ipaddress = None __version__ = '3.5.0.1' class CertificateError(ValueError): pass def _dnsname_match(dn, hostname, max_wildcards=1): """Matching according to RFC 6125, section 6.4.3 http://tools.ietf.org/html/rfc6125#section-6.4.3 """ pats = [] if not dn: return False # Ported from python3-syntax: # leftmost, *remainder = dn.split(r'.') parts = dn.split(r'.') leftmost = parts[0] remainder = parts[1:] wildcards = leftmost.count('*') if wildcards > max_wildcards: # Issue #17980: avoid denials of service by refusing more # than one wildcard per fragment. A survey of established # policy among SSL implementations showed it to be a # reasonable choice. raise CertificateError( "too many wildcards in certificate DNS name: " + repr(dn)) # speed up common case w/o wildcards if not wildcards: return dn.lower() == hostname.lower() # RFC 6125, section 6.4.3, subitem 1. # The client SHOULD NOT attempt to match a presented identifier in which # the wildcard character comprises a label other than the left-most label. if leftmost == '*': # When '*' is a fragment by itself, it matches a non-empty dotless # fragment. pats.append('[^.]+') elif leftmost.startswith('xn--') or hostname.startswith('xn--'): # RFC 6125, section 6.4.3, subitem 3. # The client SHOULD NOT attempt to match a presented identifier # where the wildcard character is embedded within an A-label or # U-label of an internationalized domain name. pats.append(re.escape(leftmost)) else: # Otherwise, '*' matches any dotless string, e.g. www* pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) # add the remaining fragments, ignore any wildcards for frag in remainder: pats.append(re.escape(frag)) pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) return pat.match(hostname) def _to_unicode(obj): if isinstance(obj, str) and sys.version_info < (3,): obj = unicode(obj, encoding='ascii', errors='strict') return obj def _ipaddress_match(ipname, host_ip): """Exact matching of IP addresses. RFC 6125 explicitly doesn't define an algorithm for this (section 1.7.2 - "Out of Scope"). """ # OpenSSL may add a trailing newline to a subjectAltName's IP address # Divergence from upstream: ipaddress can't handle byte str ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) return ip == host_ip def match_hostname(cert, hostname): """Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 rules are followed, but IP addresses are not accepted for *hostname*. CertificateError is raised on failure. On success, the function returns nothing. """ if not cert: raise ValueError("empty or no certificate, match_hostname needs a " "SSL socket or SSL context with either " "CERT_OPTIONAL or CERT_REQUIRED") try: # Divergence from upstream: ipaddress can't handle byte str host_ip = ipaddress.ip_address(_to_unicode(hostname)) except ValueError: # Not an IP address (common case) host_ip = None except UnicodeError: # Divergence from upstream: Have to deal with ipaddress not taking # byte strings. addresses should be all ascii, so we consider it not # an ipaddress in this case host_ip = None except AttributeError: # Divergence from upstream: Make ipaddress library optional if ipaddress is None: host_ip = None else: raise dnsnames = [] san = cert.get('subjectAltName', ()) for key, value in san: if key == 'DNS': if host_ip is None and _dnsname_match(value, hostname): return dnsnames.append(value) elif key == 'IP Address': if host_ip is not None and _ipaddress_match(value, host_ip): return dnsnames.append(value) if not dnsnames: # The subject is only checked when there is no dNSName entry # in subjectAltName for sub in cert.get('subject', ()): for key, value in sub: # XXX according to RFC 2818, the most specific Common Name # must be used. if key == 'commonName': if _dnsname_match(value, hostname): return dnsnames.append(value) if len(dnsnames) > 1: raise CertificateError("hostname %r " "doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames)))) elif len(dnsnames) == 1: raise CertificateError("hostname %r " "doesn't match %r" % (hostname, dnsnames[0])) else: raise CertificateError("no appropriate commonName or " "subjectAltName fields were found") PKZͩwTllJsite-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyonu[ abc@sddlZy8ejd kr-ednddlmZmZWnUek ryddlmZmZWqek rddlmZmZqXnXd ZdS( iNiisFallback to vendored code(tCertificateErrortmatch_hostnameiRR(ii(RR( tsyst version_infot ImportErrortsslRRtbackports.ssl_match_hostnamet_implementationt__all__(((s\/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pyts   PKZMOQsite-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyonu[ abc@sdZddlZddlZyddlZWnek rGdZnXdZdefdYZddZ dZ d Z d Z dS( sJThe match_hostname() function from Python 3.3.3, essential when using SSL.iNs3.5.0.1tCertificateErrorcBseZRS((t__name__t __module__(((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyRsic CsRg}|stS|jd}|d}|d}|jd}||krgtdt|n|s|j|jkS|dkr|jdnY|jds|jdr|jtj |n"|jtj |j dd x$|D]}|jtj |qWtj d d j |d tj } | j|S( shMatching according to RFC 6125, section 6.4.3 http://tools.ietf.org/html/rfc6125#section-6.4.3 t.iit*s,too many wildcards in certificate DNS name: s[^.]+sxn--s\*s[^.]*s\As\.s\Z(tFalsetsplittcountRtreprtlowertappendt startswithtretescapetreplacetcompiletjoint IGNORECASEtmatch( tdnthostnamet max_wildcardstpatstpartstleftmostt remaindert wildcardstfragtpat((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyt_dnsname_matchs*    " &cCs=t|tr9tjdkr9t|dddd}n|S(Nitencodingtasciiterrorststrict(i(t isinstancetstrtsyst version_infotunicode(tobj((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyt _to_unicodeOscCs%tjt|j}||kS(sExact matching of IP addresses. RFC 6125 explicitly doesn't define an algorithm for this (section 1.7.2 - "Out of Scope"). (t ipaddresst ip_addressR(trstrip(tipnamethost_iptip((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyt_ipaddress_matchTscCs|stdnytjt|}WnUtk rGd}n?tk r]d}n)tk rtdkrd}qnXg}|jdd}x|D]\}}|dkr|dkrt||rdS|j |q|dkr|dk rt ||rdS|j |qqW|sxc|jddD]L}xC|D];\}}|dkrQt||r|dS|j |qQqQWqDWnt |dkrt d |d j tt|fn;t |dkrt d ||d fn t d dS(s)Verify that *cert* (in decoded format as returned by SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 rules are followed, but IP addresses are not accepted for *hostname*. CertificateError is raised on failure. On success, the function returns nothing. stempty or no certificate, match_hostname needs a SSL socket or SSL context with either CERT_OPTIONAL or CERT_REQUIREDtsubjectAltNametDNSNs IP Addresstsubjectt commonNameis&hostname %r doesn't match either of %ss, shostname %r doesn't match %ris=no appropriate commonName or subjectAltName fields were found(((t ValueErrorR)R*R(tNonet UnicodeErrortAttributeErrortgetRR R/tlenRRtmapR(tcertRR-tdnsnamestsantkeytvaluetsub((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pytmatch_hostname`sJ          %( t__doc__R R$R)t ImportErrorR5t __version__R4RRR(R/RA(((sc/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.pyts     5  PKZmIsite-packages/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.pynu[import sys try: # Our match_hostname function is the same as 3.5's, so we only want to # import the match_hostname function if it's at least that good. if sys.version_info < (3, 5): raise ImportError("Fallback to vendored code") from ssl import CertificateError, match_hostname except ImportError: try: # Backport of the function from a pypi module from backports.ssl_match_hostname import CertificateError, match_hostname except ImportError: # Our vendored copy from ._implementation import CertificateError, match_hostname # Not needed, but documenting what we provide. __all__ = ('CertificateError', 'match_hostname') PKZXMZuu1site-packages/pip/_vendor/urllib3/packages/six.pynu["""Utilities for writing code that runs on Python 2 and 3""" # Copyright (c) 2010-2015 Benjamin Peterson # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import import functools import itertools import operator import sys import types __author__ = "Benjamin Peterson " __version__ = "1.10.0" # Useful for very coarse version differentiation. PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 PY34 = sys.version_info[0:2] >= (3, 4) if PY3: string_types = str, integer_types = int, class_types = type, text_type = str binary_type = bytes MAXSIZE = sys.maxsize else: string_types = basestring, integer_types = (int, long) class_types = (type, types.ClassType) text_type = unicode binary_type = str if sys.platform.startswith("java"): # Jython always uses 32 bits. MAXSIZE = int((1 << 31) - 1) else: # It's possible to have sizeof(long) != sizeof(Py_ssize_t). class X(object): def __len__(self): return 1 << 31 try: len(X()) except OverflowError: # 32-bit MAXSIZE = int((1 << 31) - 1) else: # 64-bit MAXSIZE = int((1 << 63) - 1) del X def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc def _import_module(name): """Import module, returning the module after the last dot.""" __import__(name) return sys.modules[name] class _LazyDescr(object): def __init__(self, name): self.name = name def __get__(self, obj, tp): result = self._resolve() setattr(obj, self.name, result) # Invokes __set__. try: # This is a bit ugly, but it avoids running this again by # removing this descriptor. delattr(obj.__class__, self.name) except AttributeError: pass return result class MovedModule(_LazyDescr): def __init__(self, name, old, new=None): super(MovedModule, self).__init__(name) if PY3: if new is None: new = name self.mod = new else: self.mod = old def _resolve(self): return _import_module(self.mod) def __getattr__(self, attr): _module = self._resolve() value = getattr(_module, attr) setattr(self, attr, value) return value class _LazyModule(types.ModuleType): def __init__(self, name): super(_LazyModule, self).__init__(name) self.__doc__ = self.__class__.__doc__ def __dir__(self): attrs = ["__doc__", "__name__"] attrs += [attr.name for attr in self._moved_attributes] return attrs # Subclasses should override this _moved_attributes = [] class MovedAttribute(_LazyDescr): def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): super(MovedAttribute, self).__init__(name) if PY3: if new_mod is None: new_mod = name self.mod = new_mod if new_attr is None: if old_attr is None: new_attr = name else: new_attr = old_attr self.attr = new_attr else: self.mod = old_mod if old_attr is None: old_attr = name self.attr = old_attr def _resolve(self): module = _import_module(self.mod) return getattr(module, self.attr) class _SixMetaPathImporter(object): """ A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 """ def __init__(self, six_module_name): self.name = six_module_name self.known_modules = {} def _add_module(self, mod, *fullnames): for fullname in fullnames: self.known_modules[self.name + "." + fullname] = mod def _get_module(self, fullname): return self.known_modules[self.name + "." + fullname] def find_module(self, fullname, path=None): if fullname in self.known_modules: return self return None def __get_module(self, fullname): try: return self.known_modules[fullname] except KeyError: raise ImportError("This loader does not know module " + fullname) def load_module(self, fullname): try: # in case of a reload return sys.modules[fullname] except KeyError: pass mod = self.__get_module(fullname) if isinstance(mod, MovedModule): mod = mod._resolve() else: mod.__loader__ = self sys.modules[fullname] = mod return mod def is_package(self, fullname): """ Return true, if the named module is a package. We need this method to get correct spec objects with Python 3.4 (see PEP451) """ return hasattr(self.__get_module(fullname), "__path__") def get_code(self, fullname): """Return None Required, if is_package is implemented""" self.__get_module(fullname) # eventually raises ImportError return None get_source = get_code # same as get_code _importer = _SixMetaPathImporter(__name__) class _MovedItems(_LazyModule): """Lazy loading of moved objects""" __path__ = [] # mark as package _moved_attributes = [ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), MovedAttribute("intern", "__builtin__", "sys"), MovedAttribute("map", "itertools", "builtins", "imap", "map"), MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), MovedAttribute("reduce", "__builtin__", "functools"), MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), MovedAttribute("StringIO", "StringIO", "io"), MovedAttribute("UserDict", "UserDict", "collections"), MovedAttribute("UserList", "UserList", "collections"), MovedAttribute("UserString", "UserString", "collections"), MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), MovedModule("builtins", "__builtin__"), MovedModule("configparser", "ConfigParser"), MovedModule("copyreg", "copy_reg"), MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), MovedModule("http_cookies", "Cookie", "http.cookies"), MovedModule("html_entities", "htmlentitydefs", "html.entities"), MovedModule("html_parser", "HTMLParser", "html.parser"), MovedModule("http_client", "httplib", "http.client"), MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), MovedModule("cPickle", "cPickle", "pickle"), MovedModule("queue", "Queue"), MovedModule("reprlib", "repr"), MovedModule("socketserver", "SocketServer"), MovedModule("_thread", "thread", "_thread"), MovedModule("tkinter", "Tkinter"), MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), MovedModule("tkinter_tix", "Tix", "tkinter.tix"), MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), MovedModule("tkinter_font", "tkFont", "tkinter.font"), MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), ] # Add windows specific modules. if sys.platform == "win32": _moved_attributes += [ MovedModule("winreg", "_winreg"), ] for attr in _moved_attributes: setattr(_MovedItems, attr.name, attr) if isinstance(attr, MovedModule): _importer._add_module(attr, "moves." + attr.name) del attr _MovedItems._moved_attributes = _moved_attributes moves = _MovedItems(__name__ + ".moves") _importer._add_module(moves, "moves") class Module_six_moves_urllib_parse(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_parse""" _urllib_parse_moved_attributes = [ MovedAttribute("ParseResult", "urlparse", "urllib.parse"), MovedAttribute("SplitResult", "urlparse", "urllib.parse"), MovedAttribute("parse_qs", "urlparse", "urllib.parse"), MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), MovedAttribute("urldefrag", "urlparse", "urllib.parse"), MovedAttribute("urljoin", "urlparse", "urllib.parse"), MovedAttribute("urlparse", "urlparse", "urllib.parse"), MovedAttribute("urlsplit", "urlparse", "urllib.parse"), MovedAttribute("urlunparse", "urlparse", "urllib.parse"), MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), MovedAttribute("quote", "urllib", "urllib.parse"), MovedAttribute("quote_plus", "urllib", "urllib.parse"), MovedAttribute("unquote", "urllib", "urllib.parse"), MovedAttribute("unquote_plus", "urllib", "urllib.parse"), MovedAttribute("urlencode", "urllib", "urllib.parse"), MovedAttribute("splitquery", "urllib", "urllib.parse"), MovedAttribute("splittag", "urllib", "urllib.parse"), MovedAttribute("splituser", "urllib", "urllib.parse"), MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), MovedAttribute("uses_params", "urlparse", "urllib.parse"), MovedAttribute("uses_query", "urlparse", "urllib.parse"), MovedAttribute("uses_relative", "urlparse", "urllib.parse"), ] for attr in _urllib_parse_moved_attributes: setattr(Module_six_moves_urllib_parse, attr.name, attr) del attr Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), "moves.urllib_parse", "moves.urllib.parse") class Module_six_moves_urllib_error(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_error""" _urllib_error_moved_attributes = [ MovedAttribute("URLError", "urllib2", "urllib.error"), MovedAttribute("HTTPError", "urllib2", "urllib.error"), MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), ] for attr in _urllib_error_moved_attributes: setattr(Module_six_moves_urllib_error, attr.name, attr) del attr Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), "moves.urllib_error", "moves.urllib.error") class Module_six_moves_urllib_request(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_request""" _urllib_request_moved_attributes = [ MovedAttribute("urlopen", "urllib2", "urllib.request"), MovedAttribute("install_opener", "urllib2", "urllib.request"), MovedAttribute("build_opener", "urllib2", "urllib.request"), MovedAttribute("pathname2url", "urllib", "urllib.request"), MovedAttribute("url2pathname", "urllib", "urllib.request"), MovedAttribute("getproxies", "urllib", "urllib.request"), MovedAttribute("Request", "urllib2", "urllib.request"), MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), MovedAttribute("BaseHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), MovedAttribute("FileHandler", "urllib2", "urllib.request"), MovedAttribute("FTPHandler", "urllib2", "urllib.request"), MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), MovedAttribute("urlretrieve", "urllib", "urllib.request"), MovedAttribute("urlcleanup", "urllib", "urllib.request"), MovedAttribute("URLopener", "urllib", "urllib.request"), MovedAttribute("FancyURLopener", "urllib", "urllib.request"), MovedAttribute("proxy_bypass", "urllib", "urllib.request"), ] for attr in _urllib_request_moved_attributes: setattr(Module_six_moves_urllib_request, attr.name, attr) del attr Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), "moves.urllib_request", "moves.urllib.request") class Module_six_moves_urllib_response(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_response""" _urllib_response_moved_attributes = [ MovedAttribute("addbase", "urllib", "urllib.response"), MovedAttribute("addclosehook", "urllib", "urllib.response"), MovedAttribute("addinfo", "urllib", "urllib.response"), MovedAttribute("addinfourl", "urllib", "urllib.response"), ] for attr in _urllib_response_moved_attributes: setattr(Module_six_moves_urllib_response, attr.name, attr) del attr Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), "moves.urllib_response", "moves.urllib.response") class Module_six_moves_urllib_robotparser(_LazyModule): """Lazy loading of moved objects in six.moves.urllib_robotparser""" _urllib_robotparser_moved_attributes = [ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), ] for attr in _urllib_robotparser_moved_attributes: setattr(Module_six_moves_urllib_robotparser, attr.name, attr) del attr Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), "moves.urllib_robotparser", "moves.urllib.robotparser") class Module_six_moves_urllib(types.ModuleType): """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" __path__ = [] # mark as package parse = _importer._get_module("moves.urllib_parse") error = _importer._get_module("moves.urllib_error") request = _importer._get_module("moves.urllib_request") response = _importer._get_module("moves.urllib_response") robotparser = _importer._get_module("moves.urllib_robotparser") def __dir__(self): return ['parse', 'error', 'request', 'response', 'robotparser'] _importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib") def add_move(move): """Add an item to six.moves.""" setattr(_MovedItems, move.name, move) def remove_move(name): """Remove item from six.moves.""" try: delattr(_MovedItems, name) except AttributeError: try: del moves.__dict__[name] except KeyError: raise AttributeError("no such move, %r" % (name,)) if PY3: _meth_func = "__func__" _meth_self = "__self__" _func_closure = "__closure__" _func_code = "__code__" _func_defaults = "__defaults__" _func_globals = "__globals__" else: _meth_func = "im_func" _meth_self = "im_self" _func_closure = "func_closure" _func_code = "func_code" _func_defaults = "func_defaults" _func_globals = "func_globals" try: advance_iterator = next except NameError: def advance_iterator(it): return it.next() next = advance_iterator try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) if PY3: def get_unbound_function(unbound): return unbound create_bound_method = types.MethodType def create_unbound_method(func, cls): return func Iterator = object else: def get_unbound_function(unbound): return unbound.im_func def create_bound_method(func, obj): return types.MethodType(func, obj, obj.__class__) def create_unbound_method(func, cls): return types.MethodType(func, None, cls) class Iterator(object): def next(self): return type(self).__next__(self) callable = callable _add_doc(get_unbound_function, """Get the function out of a possibly unbound function""") get_method_function = operator.attrgetter(_meth_func) get_method_self = operator.attrgetter(_meth_self) get_function_closure = operator.attrgetter(_func_closure) get_function_code = operator.attrgetter(_func_code) get_function_defaults = operator.attrgetter(_func_defaults) get_function_globals = operator.attrgetter(_func_globals) if PY3: def iterkeys(d, **kw): return iter(d.keys(**kw)) def itervalues(d, **kw): return iter(d.values(**kw)) def iteritems(d, **kw): return iter(d.items(**kw)) def iterlists(d, **kw): return iter(d.lists(**kw)) viewkeys = operator.methodcaller("keys") viewvalues = operator.methodcaller("values") viewitems = operator.methodcaller("items") else: def iterkeys(d, **kw): return d.iterkeys(**kw) def itervalues(d, **kw): return d.itervalues(**kw) def iteritems(d, **kw): return d.iteritems(**kw) def iterlists(d, **kw): return d.iterlists(**kw) viewkeys = operator.methodcaller("viewkeys") viewvalues = operator.methodcaller("viewvalues") viewitems = operator.methodcaller("viewitems") _add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") _add_doc(itervalues, "Return an iterator over the values of a dictionary.") _add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") _add_doc(iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary.") if PY3: def b(s): return s.encode("latin-1") def u(s): return s unichr = chr import struct int2byte = struct.Struct(">B").pack del struct byte2int = operator.itemgetter(0) indexbytes = operator.getitem iterbytes = iter import io StringIO = io.StringIO BytesIO = io.BytesIO _assertCountEqual = "assertCountEqual" if sys.version_info[1] <= 1: _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" else: _assertRaisesRegex = "assertRaisesRegex" _assertRegex = "assertRegex" else: def b(s): return s # Workaround for standalone backslash def u(s): return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") unichr = unichr int2byte = chr def byte2int(bs): return ord(bs[0]) def indexbytes(buf, i): return ord(buf[i]) iterbytes = functools.partial(itertools.imap, ord) import StringIO StringIO = BytesIO = StringIO.StringIO _assertCountEqual = "assertItemsEqual" _assertRaisesRegex = "assertRaisesRegexp" _assertRegex = "assertRegexpMatches" _add_doc(b, """Byte literal""") _add_doc(u, """Text literal""") def assertCountEqual(self, *args, **kwargs): return getattr(self, _assertCountEqual)(*args, **kwargs) def assertRaisesRegex(self, *args, **kwargs): return getattr(self, _assertRaisesRegex)(*args, **kwargs) def assertRegex(self, *args, **kwargs): return getattr(self, _assertRegex)(*args, **kwargs) if PY3: exec_ = getattr(moves.builtins, "exec") def reraise(tp, value, tb=None): if value is None: value = tp() if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") exec_("""def reraise(tp, value, tb=None): raise tp, value, tb """) if sys.version_info[:2] == (3, 2): exec_("""def raise_from(value, from_value): if from_value is None: raise value raise value from from_value """) elif sys.version_info[:2] > (3, 2): exec_("""def raise_from(value, from_value): raise value from from_value """) else: def raise_from(value, from_value): raise value print_ = getattr(moves.builtins, "print", None) if print_ is None: def print_(*args, **kwargs): """The new-style print function for Python 2.4 and 2.5.""" fp = kwargs.pop("file", sys.stdout) if fp is None: return def write(data): if not isinstance(data, basestring): data = str(data) # If the file has an encoding, encode unicode with it. if (isinstance(fp, file) and isinstance(data, unicode) and fp.encoding is not None): errors = getattr(fp, "errors", None) if errors is None: errors = "strict" data = data.encode(fp.encoding, errors) fp.write(data) want_unicode = False sep = kwargs.pop("sep", None) if sep is not None: if isinstance(sep, unicode): want_unicode = True elif not isinstance(sep, str): raise TypeError("sep must be None or a string") end = kwargs.pop("end", None) if end is not None: if isinstance(end, unicode): want_unicode = True elif not isinstance(end, str): raise TypeError("end must be None or a string") if kwargs: raise TypeError("invalid keyword arguments to print()") if not want_unicode: for arg in args: if isinstance(arg, unicode): want_unicode = True break if want_unicode: newline = unicode("\n") space = unicode(" ") else: newline = "\n" space = " " if sep is None: sep = space if end is None: end = newline for i, arg in enumerate(args): if i: write(sep) write(arg) write(end) if sys.version_info[:2] < (3, 3): _print = print_ def print_(*args, **kwargs): fp = kwargs.get("file", sys.stdout) flush = kwargs.pop("flush", False) _print(*args, **kwargs) if flush and fp is not None: fp.flush() _add_doc(reraise, """Reraise an exception.""") if sys.version_info[0:2] < (3, 4): def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, updated=functools.WRAPPER_UPDATES): def wrapper(f): f = functools.wraps(wrapped, assigned, updated)(f) f.__wrapped__ = wrapped return f return wrapper else: wraps = functools.wraps def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper def python_2_unicode_compatible(klass): """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if PY2: if '__str__' not in klass.__dict__: raise ValueError("@python_2_unicode_compatible cannot be applied " "to %s because it doesn't define __str__()." % klass.__name__) klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass # Complete the moves implementation. # This code is at the end of this module to speed up module loading. # Turn this module into a package. __path__ = [] # required for PEP 302 and PEP 451 __package__ = __name__ # see PEP 366 @ReservedAssignment if globals().get("__spec__") is not None: __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable # Remove other six meta path importers, since they cause problems. This can # happen if six is removed from sys.modules and then reloaded. (Setuptools does # this for some reason.) if sys.meta_path: for i, importer in enumerate(sys.meta_path): # Here's some real nastiness: Another "instance" of the six module might # be floating around. Therefore, we can't use isinstance() to check for # the six meta path importer, since the other six instance will have # inserted an importer with different class. if (type(importer).__name__ == "_SixMetaPathImporter" and importer.name == __name__): del sys.meta_path[i] break del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer) PKZ5Asite-packages/pip/_vendor/urllib3/packages/backports/__init__.pycnu[ abc@sdS(N((((sS/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.pyttPKZ5Asite-packages/pip/_vendor/urllib3/packages/backports/__init__.pyonu[ abc@sdS(N((((sS/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/__init__.pyttPKZ@site-packages/pip/_vendor/urllib3/packages/backports/__init__.pynu[PKZx$$Asite-packages/pip/_vendor/urllib3/packages/backports/makefile.pyonu[ abc@s>dZddlZddlmZddddddZdS(s backports.makefile ~~~~~~~~~~~~~~~~~~ Backports the Python 3 ``socket.makefile`` method for use with anything that wants to create a "fake" socket object. iN(tSocketIOtrc Cs~t|tdddgks7td|fnd|k}d|kpS| }d|k}d} |r{| d7} n|r| d7} nt|| } |jd7_|d krd}n|dkrtj}n|dkr|std n| S|r"|r"tj| | |} n-|r=tj| |} ntj | |} |rY| Stj | |||} || _ | S( s: Backport of ``socket.makefile`` from Python 3.5. Rtwtbs&invalid mode %r (only r, w, b allowed)tiiis!unbuffered streams must be binaryN( tsett ValueErrorRt_makefile_refstNonetiotDEFAULT_BUFFER_SIZEtBufferedRWPairtBufferedReadertBufferedWritert TextIOWrappertmode( tselfRt bufferingtencodingterrorstnewlinetwritingtreadingtbinarytrawmodetrawtbufferttext((sS/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pytbackport_makefiles>!           (t__doc__R tsocketRRR(((sS/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pyts  PKZ9YYAsite-packages/pip/_vendor/urllib3/packages/backports/makefile.pycnu[ abc@s>dZddlZddlmZddddddZdS(s backports.makefile ~~~~~~~~~~~~~~~~~~ Backports the Python 3 ``socket.makefile`` method for use with anything that wants to create a "fake" socket object. iN(tSocketIOtrc Cst|tdddgks7td|fnd|k}d|kpS| }|sh|shtd|k}d} |r| d7} n|r| d7} nt|| } |jd7_|d krd}n|dkrtj}n|dkr|s td n| S|r4|r4tj| | |} n9|rOtj | |} n|s[ttj | |} |rw| Stj | |||} || _ | S( s: Backport of ``socket.makefile`` from Python 3.5. Rtwtbs&invalid mode %r (only r, w, b allowed)tiiis!unbuffered streams must be binaryN( tsett ValueErrortAssertionErrorRt_makefile_refstNonetiotDEFAULT_BUFFER_SIZEtBufferedRWPairtBufferedReadertBufferedWritert TextIOWrappertmode( tselfRt bufferingtencodingterrorstnewlinetwritingtreadingtbinarytrawmodetrawtbufferttext((sS/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pytbackport_makefilesB!            (t__doc__R tsocketRR R(((sS/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/backports/makefile.pyts  PKZRW@site-packages/pip/_vendor/urllib3/packages/backports/makefile.pynu[# -*- coding: utf-8 -*- """ backports.makefile ~~~~~~~~~~~~~~~~~~ Backports the Python 3 ``socket.makefile`` method for use with anything that wants to create a "fake" socket object. """ import io from socket import SocketIO def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text PKZFg"":site-packages/pip/_vendor/urllib3/packages/ordered_dict.pynu[# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. # Passes Python2.7's test suite and incorporates all the latest updates. # Copyright 2009 Raymond Hettinger, released under the MIT License. # http://code.activestate.com/recipes/576693/ try: from thread import get_ident as _get_ident except ImportError: from dummy_thread import get_ident as _get_ident try: from _abcoll import KeysView, ValuesView, ItemsView except ImportError: pass class OrderedDict(dict): 'Dictionary that remembers insertion order' # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. # Big-O running times for all methods are the same as for regular dictionaries. # The internal self.__map dictionary maps keys to links in a doubly linked list. # The circular doubly linked list starts and ends with a sentinel element. # The sentinel element never gets deleted (this simplifies the algorithm). # Each link is stored as a list of length three: [PREV, NEXT, KEY]. def __init__(self, *args, **kwds): '''Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__root except AttributeError: self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) def __setitem__(self, key, value, dict_setitem=dict.__setitem__): 'od.__setitem__(i, y) <==> od[i]=y' # Setting a new item creates a new link which goes at the end of the linked # list, and the inherited dictionary is updated with the new key/value pair. if key not in self: root = self.__root last = root[0] last[1] = root[0] = self.__map[key] = [last, root, key] dict_setitem(self, key, value) def __delitem__(self, key, dict_delitem=dict.__delitem__): 'od.__delitem__(y) <==> del od[y]' # Deleting an existing item uses self.__map to find the link which is # then removed by updating the links in the predecessor and successor nodes. dict_delitem(self, key) link_prev, link_next, key = self.__map.pop(key) link_prev[1] = link_next link_next[0] = link_prev def __iter__(self): 'od.__iter__() <==> iter(od)' root = self.__root curr = root[1] while curr is not root: yield curr[2] curr = curr[1] def __reversed__(self): 'od.__reversed__() <==> reversed(od)' root = self.__root curr = root[0] while curr is not root: yield curr[2] curr = curr[0] def clear(self): 'od.clear() -> None. Remove all items from od.' try: for node in self.__map.itervalues(): del node[:] root = self.__root root[:] = [root, root, None] self.__map.clear() except AttributeError: pass dict.clear(self) def popitem(self, last=True): '''od.popitem() -> (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. ''' if not self: raise KeyError('dictionary is empty') root = self.__root if last: link = root[0] link_prev = link[0] link_prev[1] = root root[0] = link_prev else: link = root[1] link_next = link[1] root[1] = link_next link_next[0] = root key = link[2] del self.__map[key] value = dict.pop(self, key) return key, value # -- the following methods do not depend on the internal structure -- def keys(self): 'od.keys() -> list of keys in od' return list(self) def values(self): 'od.values() -> list of values in od' return [self[key] for key in self] def items(self): 'od.items() -> list of (key, value) pairs in od' return [(key, self[key]) for key in self] def iterkeys(self): 'od.iterkeys() -> an iterator over the keys in od' return iter(self) def itervalues(self): 'od.itervalues -> an iterator over the values in od' for k in self: yield self[k] def iteritems(self): 'od.iteritems -> an iterator over the (key, value) items in od' for k in self: yield (k, self[k]) def update(*args, **kwds): '''od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' 'arguments (%d given)' % (len(args),)) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] # Make progressively weaker assumptions about "other" other = () if len(args) == 2: other = args[1] if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, 'keys'): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value __update = update # let subclasses override update without breaking __init__ __marker = object() def pop(self, key, default=__marker): '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' if key in self: result = self[key] del self[key] return result if default is self.__marker: raise KeyError(key) return default def setdefault(self, key, default=None): 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' if key in self: return self[key] self[key] = default return default def __repr__(self, _repr_running={}): 'od.__repr__() <==> repr(od)' call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] def __reduce__(self): 'Return state information for pickling' items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def copy(self): 'od.copy() -> a shallow copy of od' return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None). ''' d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. ''' if isinstance(other, OrderedDict): return len(self)==len(other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): return not self == other # -- the following methods are only used in Python 2.7 -- def viewkeys(self): "od.viewkeys() -> a set-like object providing a view on od's keys" return KeysView(self) def viewvalues(self): "od.viewvalues() -> an object providing a view on od's values" return ValuesView(self) def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self) PKZ TCBB7site-packages/pip/_vendor/urllib3/packages/__init__.pyonu[ abc@@s*ddlmZddlmZdZdS(i(tabsolute_importi(tssl_match_hostnameRN(R(t __future__RtRt__all__(((sI/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/__init__.pytsPKZ]|mm6site-packages/pip/_vendor/urllib3/packages/__init__.pynu[from __future__ import absolute_import from . import ssl_match_hostname __all__ = ('ssl_match_hostname', ) PKZx~~2site-packages/pip/_vendor/urllib3/packages/six.pyonu[ abcA@@sKdZddlmZddlZddlZddlZddlZddlZdZdZ ej ddkZ ej ddkZ ej dd!dakZ e refZefZefZeZeZejZnefZeefZeejfZeZeZejjd r$edcZnVd efd YZ ye!e Wne"k rjedeZn XedgZ[ dZ#dZ$defdYZ%de%fdYZ&dej'fdYZ(de%fdYZ)defdYZ*e*e+Z,de(fdYZ-e)dddde)d d!d"d#d e)d$d!d!d%d$e)d&d'd"d(d&e)d)d'd*e)d+d!d"d,d+e)d-d.d.d/d-e)d0d.d.d-d0e)d1d'd"d2d1e)d3d'e rd4nd5d6e)d7d'd8e)d9d:d;d<e)ddde)d=d=d>e)d?d?d>e)d@d@d>e)d2d'd"d2d1e)dAd!d"dBdAe)dCd!d!dDdCe&d"d'e&dEdFe&dGdHe&dIdJdKe&dLdMdLe&dNdOdPe&dQdRdSe&dTdUdVe&dWdXdYe&dZd[d\e&d]d^d_e&d`dadbe&dcdddee&dfdgdhe&dididje&dkdkdje&dldldje&dmdmdne&dodpe&dqdre&dsdte&dudvdue&dwdxe&dydzd{e&d|d}d~e&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddde&ddd~e&ddde&ddde&ddde&de+dde&de+dde&de+de+de&ddde&ddde&dddg>Z.ejdkr;e.e&ddg7Z.nxJe.D]BZ/e0e-e/j1e/e2e/e&rBe,j3e/de/j1qBqBW[/e.e-_.e-e+dZ4e,j3e4dde(fdYZ5e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)d<dde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddgZ6x!e6D]Z/e0e5e/j1e/q0W[/e6e5_.e,j3e5e+dddde(fdYZ7e)ddde)ddde)dddgZ8x!e8D]Z/e0e7e/j1e/qW[/e8e7_.e,j3e7e+dddde(fdYZ9e)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)ddde)dddg!Z:x!e:D]Z/e0e9e/j1e/q W[/e:e9_.e,j3e9e+dddde(fdYZ;e)ddde)ddde)ddde)dddgZ<x!e<D]Z/e0e;e/j1e/q W[/e<e;_.e,j3e;e+d d d d e(fd YZ=e)dddgZ>x!e>D]Z/e0e=e/j1e/q; W[/e>e=_.e,j3e=e+ddddej'fdYZ?e,j3e?e+dddZ@dZAe r dZBdZCdZDdZEdZFdZGn$dZBdZCdZDd ZEd!ZFd"ZGy eHZIWneJk r= d#ZInXeIZHy eKZKWneJk rj d$ZKnXe r d%ZLejMZNd&ZOeZPn7d'ZLd(ZNd)ZOd*efd+YZPeKZKe#eLd,ejQeBZRejQeCZSejQeDZTejQeEZUejQeFZVejQeGZWe rd-ZXd.ZYd/ZZd0Z[ej\d1Z]ej\d2Z^ej\d3Z_nQd4ZXd5ZYd6ZZd7Z[ej\d8Z]ej\d9Z^ej\d:Z_e#eXd;e#eYd<e#eZd=e#e[d>e rd?Z`d@ZaebZcddldZdedjedAjfZg[dejhdZiejjZkelZmddlnZnenjoZoenjpZpdBZqej d d krdCZrdDZsq4dEZrdFZsnpdGZ`dHZaecZcebZgdIZidJZkejtejuevZmddloZoeojoZoZpdKZqdCZrdDZse#e`dLe#eadMdNZwdOZxdPZye reze4j{dQZ|ddRZ~ndddSZ|e|dTej d dhkre|dUn)ej d dikre|dVn dWZeze4j{dXdZedkrdYZnej d djkrDeZdZZne#e~d[ej dd!dkkrejejd\Zn ejZd]Zd^Zd_ZgZe+Zejd`dk rge_nejr7xOeejD]>\ZZeej+dkrej1e+kreje=PqqW[[nejje,dS(ls6Utilities for writing code that runs on Python 2 and 3i(tabsolute_importNs'Benjamin Peterson s1.10.0iiitjavaiitXcB@seZdZRS(cC@sdS(NiiI((tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__len__>s(t__name__t __module__R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR<si?cC@s ||_dS(s Add documentation to a function.N(t__doc__(tfunctdoc((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt_add_docKscC@st|tj|S(s7Import module, returning the module after the last dot.(t __import__tsystmodules(tname((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt_import_modulePs t _LazyDescrcB@seZdZdZRS(cC@s ||_dS(N(R(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__init__XscC@sN|j}t||j|yt|j|jWntk rInX|S(N(t_resolvetsetattrRtdelattrt __class__tAttributeError(Rtobjttptresult((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__get__[s  (RRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRVs t MovedModulecB@s&eZddZdZdZRS(cC@sJtt|j|tr=|dkr1|}n||_n ||_dS(N(tsuperRRtPY3tNonetmod(RRtoldtnew((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRis    cC@s t|jS(N(RR(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRrscC@s/|j}t||}t||||S(N(RtgetattrR(Rtattrt_moduletvalue((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt __getattr__us N(RRRRRR&(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRgs t _LazyModulecB@s eZdZdZgZRS(cC@s)tt|j||jj|_dS(N(RR'RRR(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR~scC@s3ddg}|g|jD]}|j^q7}|S(NRR(t_moved_attributesR(RtattrsR#((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__dir__s #(RRRR*R((((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR'|s  tMovedAttributecB@s eZdddZdZRS(cC@stt|j|trp|dkr1|}n||_|dkrd|dkr[|}qd|}n||_n'||_|dkr|}n||_dS(N(RR+RRRRR#(RRtold_modtnew_modtold_attrtnew_attr((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRs           cC@st|j}t||jS(N(RRR"R#(Rtmodule((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRsN(RRRRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR+st_SixMetaPathImportercB@s_eZdZdZdZdZd dZdZdZ dZ dZ e Z RS( s A meta path importer to import six.moves and its submodules. This class implements a PEP302 finder and loader. It should be compatible with Python 2.5 and all existing versions of Python3 cC@s||_i|_dS(N(Rt known_modules(Rtsix_module_name((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRs cG@s-x&|D]}||j|jd|(RR6((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt is_packagescC@s|j|dS(s;Return None Required, if is_package is implementedN(R>R(RR6((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytget_codes N( RRRRR7R8RR:R>RARDREt get_source(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR1s       t _MovedItemscB@seZdZgZRS(sLazy loading of moved objects(RRRRB(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRGst cStringIOtiotStringIOtfiltert itertoolstbuiltinstifiltert filterfalset ifilterfalsetinputt __builtin__t raw_inputtinternR tmaptimaptgetcwdtostgetcwdutgetcwdbtrangetxranget reload_modulet importlibtimptreloadtreducet functoolst shlex_quotetpipestshlextquotetUserDictt collectionstUserListt UserStringtziptizipt zip_longestt izip_longestt configparsert ConfigParsertcopyregtcopy_regtdbm_gnutgdbmsdbm.gnut _dummy_threadt dummy_threadthttp_cookiejart cookielibshttp.cookiejart http_cookiestCookies http.cookiest html_entitiesthtmlentitydefss html.entitiest html_parsert HTMLParsers html.parsert http_clientthttplibs http.clienttemail_mime_multipartsemail.MIMEMultipartsemail.mime.multiparttemail_mime_nonmultipartsemail.MIMENonMultipartsemail.mime.nonmultiparttemail_mime_textsemail.MIMETextsemail.mime.texttemail_mime_basesemail.MIMEBasesemail.mime.basetBaseHTTPServers http.servert CGIHTTPServertSimpleHTTPServertcPickletpickletqueuetQueuetreprlibtreprt socketservert SocketServert_threadtthreadttkintertTkinterttkinter_dialogtDialogstkinter.dialogttkinter_filedialogt FileDialogstkinter.filedialogttkinter_scrolledtextt ScrolledTextstkinter.scrolledtextttkinter_simpledialogt SimpleDialogstkinter.simpledialogt tkinter_tixtTixs tkinter.tixt tkinter_ttktttks tkinter.ttkttkinter_constantst Tkconstantsstkinter.constantst tkinter_dndtTkdnds tkinter.dndttkinter_colorchooserttkColorChooserstkinter.colorchooserttkinter_commondialogttkCommonDialogstkinter.commondialogttkinter_tkfiledialogt tkFileDialogt tkinter_fontttkFonts tkinter.fontttkinter_messageboxt tkMessageBoxstkinter.messageboxttkinter_tksimpledialogttkSimpleDialogt urllib_parses.moves.urllib_parses urllib.parset urllib_errors.moves.urllib_errors urllib.errorturllibs .moves.urllibturllib_robotparsert robotparsersurllib.robotparsert xmlrpc_clientt xmlrpclibs xmlrpc.clientt xmlrpc_servertSimpleXMLRPCServers xmlrpc.servertwin32twinregt_winregsmoves.s.movestmovestModule_six_moves_urllib_parsecB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_parse(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR@st ParseResultturlparset SplitResulttparse_qst parse_qslt urldefragturljointurlsplitt urlunparset urlunsplitt quote_plustunquotet unquote_plust urlencodet splitquerytsplittagt splitusert uses_fragmentt uses_netloct uses_paramst uses_queryt uses_relativesmoves.urllib_parsesmoves.urllib.parsetModule_six_moves_urllib_errorcB@seZdZRS(s7Lazy loading of moved objects in six.moves.urllib_error(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRhstURLErrorturllib2t HTTPErrortContentTooShortErrors.moves.urllib.errorsmoves.urllib_errorsmoves.urllib.errortModule_six_moves_urllib_requestcB@seZdZRS(s9Lazy loading of moved objects in six.moves.urllib_request(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR|sturlopensurllib.requesttinstall_openert build_openert pathname2urlt url2pathnamet getproxiestRequesttOpenerDirectortHTTPDefaultErrorHandlertHTTPRedirectHandlertHTTPCookieProcessort ProxyHandlert BaseHandlertHTTPPasswordMgrtHTTPPasswordMgrWithDefaultRealmtAbstractBasicAuthHandlertHTTPBasicAuthHandlertProxyBasicAuthHandlertAbstractDigestAuthHandlertHTTPDigestAuthHandlertProxyDigestAuthHandlert HTTPHandlert HTTPSHandlert FileHandlert FTPHandlertCacheFTPHandlertUnknownHandlertHTTPErrorProcessort urlretrievet urlcleanupt URLopenertFancyURLopenert proxy_bypasss.moves.urllib.requestsmoves.urllib_requestsmoves.urllib.requestt Module_six_moves_urllib_responsecB@seZdZRS(s:Lazy loading of moved objects in six.moves.urllib_response(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRstaddbasesurllib.responset addclosehooktaddinfot addinfourls.moves.urllib.responsesmoves.urllib_responsesmoves.urllib.responset#Module_six_moves_urllib_robotparsercB@seZdZRS(s=Lazy loading of moved objects in six.moves.urllib_robotparser(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRstRobotFileParsers.moves.urllib.robotparsersmoves.urllib_robotparsersmoves.urllib.robotparsertModule_six_moves_urllibcB@sheZdZgZejdZejdZejdZejdZ ejdZ dZ RS(sICreate a six.moves.urllib namespace that resembles the Python 3 namespacesmoves.urllib_parsesmoves.urllib_errorsmoves.urllib_requestsmoves.urllib_responsesmoves.urllib_robotparsercC@sdddddgS(NtparseterrortrequesttresponseR((R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR*s( RRRRBt _importerR8RRRRRR*(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRss moves.urllibcC@stt|j|dS(sAdd an item to six.moves.N(RRGR(tmove((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytadd_movescC@s^ytt|WnFtk rYytj|=WqZtk rUtd|fqZXnXdS(sRemove item from six.moves.sno such move, %rN(RRGRRt__dict__R;(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt remove_moves  t__func__t__self__t __closure__t__code__t __defaults__t __globals__tim_functim_selft func_closuret func_codet func_defaultst func_globalscC@s |jS(N(tnext(tit((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytadvance_iterator scC@stdt|jDS(Ncs@s|]}d|jkVqdS(t__call__N(R (t.0tklass((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pys s(tanyttypet__mro__(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytcallablescC@s|S(N((tunbound((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytget_unbound_functionscC@s|S(N((Rtcls((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytcreate_unbound_methodscC@s|jS(N(R(R"((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR#"scC@stj|||jS(N(ttypest MethodTypeR(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytcreate_bound_method%scC@stj|d|S(N(R&R'R(RR$((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR%(stIteratorcB@seZdZRS(cC@st|j|S(N(Rt__next__(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR-s(RRR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR)+ss3Get the function out of a possibly unbound functioncK@st|j|S(N(titertkeys(tdtkw((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytiterkeys>scK@st|j|S(N(R+tvalues(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt itervaluesAscK@st|j|S(N(R+titems(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt iteritemsDscK@st|j|S(N(R+tlists(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt iterlistsGsR,R0R2cK@s |j|S(N(R/(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR/PscK@s |j|S(N(R1(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR1SscK@s |j|S(N(R3(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR3VscK@s |j|S(N(R5(R-R.((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR5Ystviewkeyst viewvaluest viewitemss1Return an iterator over the keys of a dictionary.s3Return an iterator over the values of a dictionary.s?Return an iterator over the (key, value) pairs of a dictionary.sBReturn an iterator over the (key, [values]) pairs of a dictionary.cC@s |jdS(Nslatin-1(tencode(ts((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytbkscC@s|S(N((R:((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytunss>BtassertCountEqualtassertRaisesRegexptassertRegexpMatchestassertRaisesRegext assertRegexcC@s|S(N((R:((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR;scC@st|jdddS(Ns\\s\\\\tunicode_escape(tunicodetreplace(R:((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR<scC@st|dS(Ni(tord(tbs((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytbyte2intscC@st||S(N(RE(tbufti((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt indexbytesstassertItemsEquals Byte literals Text literalcO@st|t||S(N(R"t_assertCountEqual(Rtargstkwargs((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR=scO@st|t||S(N(R"t_assertRaisesRegex(RRMRN((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR@scO@st|t||S(N(R"t _assertRegex(RRMRN((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRAstexeccC@sC|dkr|}n|j|k r9|j|n|dS(N(Rt __traceback__twith_traceback(RR%ttb((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytreraises   cB@sc|dkrBejd}|j}|dkr<|j}n~n|dkrW|}nddUdS(sExecute code in a namespace.isexec _code_ in _globs_, _locs_N(RR t _getframet f_globalstf_locals(t_code_t_globs_t_locs_tframe((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytexec_s      s9def reraise(tp, value, tb=None): raise tp, value, tb srdef raise_from(value, from_value): if from_value is None: raise value raise value from from_value sCdef raise_from(value, from_value): raise value from from_value cC@s |dS(N((R%t from_value((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt raise_fromstprintc @s|jdtjdkr%dSfd}t}|jdd}|dk rt|trpt}qt|tst dqn|jdd}|dk rt|trt}qt|tst dqn|rt dn|s0x*|D]}t|tr t}Pq q Wn|rQtd }td }n d }d }|dkrr|}n|dkr|}nx7t |D])\} }| r||n||qW||dS( s4The new-style print function for Python 2.4 and 2.5.tfileNc@st|tst|}nttrt|trjdk rtdd}|dkrrd}n|jj|}nj |dS(Nterrorststrict( R?t basestringtstrRaRCtencodingRR"R9twrite(tdataRb(tfp(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRgs  tsepssep must be None or a stringtendsend must be None or a strings$invalid keyword arguments to print()s t ( tpopR tstdoutRtFalseR?RCtTrueRet TypeErrort enumerate( RMRNRgt want_unicodeRjRktargtnewlinetspaceRI((RisD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytprint_sL              cO@sW|jdtj}|jdt}t|||rS|dk rS|jndS(NRatflush(tgetR RnRmRot_printRRx(RMRNRiRx((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyRw s  sReraise an exception.c@sfd}|S(Nc@s(tj|}|_|S(N(Rbtwrapst __wrapped__(tf(tassignedtupdatedtwrapped(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytwrappers ((RR~RR((R~RRsD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR{sc@s5dffdY}tj|ddiS(s%Create a base class with a metaclass.t metaclassc@seZfdZRS(c@s||S(N((R$Rt this_basesR-(tbasestmeta(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt__new__'s(RRR((RR(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR%sttemporary_class((RR(RRR((RRsD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytwith_metaclass sc@sfd}|S(s6Class decorator for creating a class with a metaclass.c@s|jj}|jd}|dk rft|trE|g}nx|D]}|j|qLWn|jdd|jdd|j|j|S(Nt __slots__R t __weakref__( R tcopyRyRR?ReRmRt __bases__(R$t orig_varstslotst slots_var(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyR.s   ((RR((RsD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyt add_metaclass,s cC@sJtrFd|jkr+td|jn|j|_d|_n|S(s A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. t__str__sY@python_2_unicode_compatible cannot be applied to %s because it doesn't define __str__().cS@s|jjdS(Nsutf-8(t __unicode__R9(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytJt(tPY2R t ValueErrorRRR(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pytpython_2_unicode_compatible<s t__spec__(iiIiIill(ii(ii(ii(ii(Rt __future__RRbRLtoperatorR R&t __author__t __version__t version_infoRRtPY34Ret string_typestintt integer_typesRt class_typest text_typetbytest binary_typetmaxsizetMAXSIZERdtlongt ClassTypeRCtplatformt startswithtobjectRtlent OverflowErrorR RRRt ModuleTypeR'R+R1RRRGR(R#RRR?R7RRt_urllib_parse_moved_attributesRt_urllib_error_moved_attributesRt _urllib_request_moved_attributesRt!_urllib_response_moved_attributesRt$_urllib_robotparser_moved_attributesRR R t _meth_funct _meth_selft _func_closuret _func_codet_func_defaultst _func_globalsRRt NameErrorR!R#R'R(R%R)t attrgettertget_method_functiontget_method_selftget_function_closuretget_function_codetget_function_defaultstget_function_globalsR/R1R3R5t methodcallerR6R7R8R;R<tchrtunichrtstructtStructtpacktint2bytet itemgetterRGtgetitemRJR+t iterbytesRIRJtBytesIORLRORPtpartialRVRER=R@RAR"RMR]RRUR_RwRztWRAPPER_ASSIGNMENTStWRAPPER_UPDATESR{RRRRBt __package__tglobalsRyRtsubmodule_search_locationst meta_pathRrRItimportertappend(((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/six.pyts               >                                                                                 5         PKZym!(!(;site-packages/pip/_vendor/urllib3/packages/ordered_dict.pycnu[ abc@syddlmZWn!ek r7ddlmZnXy ddlmZmZmZWnek rknXde fdYZ dS(i(t get_ident(tKeysViewt ValuesViewt ItemsViewt OrderedDictcBseZdZdZejdZejdZdZdZdZ e dZ dZ d Z d Zd Zd Zd ZdZeZeZedZddZidZdZdZeddZdZdZdZ dZ!dZ"RS(s)Dictionary that remembers insertion ordercOst|dkr+tdt|ny |jWn7tk rog|_}||dg|(i|_nX|j||dS(sInitialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. is$expected at most 1 arguments, got %dN(tlent TypeErrort_OrderedDict__roottAttributeErrortNonet_OrderedDict__mapt_OrderedDict__update(tselftargstkwdstroot((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__init__s    cCs\||krH|j}|d}|||g|d<|d<|j| od[i]=yiiN(RR (R tkeytvaluet dict_setitemRtlast((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __setitem__,s    )cCs@||||jj|\}}}||d<||d del od[y]iiN(R tpop(R Rt dict_delitemt link_prevt link_next((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __delitem__6s  ccs=|j}|d}x#||k r8|dV|d}qWdS(sod.__iter__() <==> iter(od)iiN(R(R Rtcurr((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__iter__?s    ccs=|j}|d}x#||k r8|dV|d}qWdS(s#od.__reversed__() <==> reversed(od)iiN(R(R RR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __reversed__Gs    cCsmyHx|jjD] }|2qW|j}||dg|(|jjWntk r[nXtj|dS(s.od.clear() -> None. Remove all items from od.N(R t itervaluesRR tclearRtdict(R tnodeR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyROs  cCs|stdn|j}|rO|d}|d}||d<||d (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. sdictionary is emptyiii(tKeyErrorRR R R(R RRtlinkRRRR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytpopitem[s            cCs t|S(sod.keys() -> list of keys in od(tlist(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytkeystscCsg|D]}||^qS(s#od.values() -> list of values in od((R R((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytvaluesxscCs!g|D]}|||f^qS(s.od.items() -> list of (key, value) pairs in od((R R((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytitems|scCs t|S(s0od.iterkeys() -> an iterator over the keys in od(titer(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytiterkeyssccsx|D]}||VqWdS(s2od.itervalues -> an iterator over the values in odN((R tk((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRs ccs$x|D]}|||fVqWdS(s=od.iteritems -> an iterator over the (key, value) items in odN((R R+((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt iteritemss cOs&t|dkr.tdt|fn|sCtdn|d}d}t|dkrr|d}nt|trxw|D]}|||| None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v is8update() takes at most 2 positional arguments (%d given)s,update() takes at least 1 argument (0 given)iiR&N((RRt isinstanceR thasattrR&R((R RR totherRR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytupdates&    cCsC||kr!||}||=|S||jkr?t|n|S(sod.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. (t_OrderedDict__markerR"(R Rtdefaulttresult((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRs  cCs"||kr||S|||<|S(sDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od((R RR2((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt setdefaults  cCsst|tf}||kr%dSd|| repr(od)s...is%s()s%s(%r)N(tidt _get_identt __class__t__name__R((R t _repr_runningtcall_key((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__repr__s  cCsg|D]}|||g^q}t|j}x'ttD]}|j|dqEW|rx|j|f|fS|j|ffS(s%Return state information for picklingN(tvarstcopyRRR R7(R R+R(t inst_dict((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __reduce__s#cCs |j|S(s!od.copy() -> a shallow copy of od(R7(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyR=scCs(|}x|D]}||| New ordered dictionary with keys from S and values equal to v (which defaults to None). ((tclstiterableRtdR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytfromkeyss  cCsMt|tr=t|t|ko<|j|jkStj||S(sod.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. (R-RRR(R t__eq__(R R/((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRDs.cCs ||k S(N((R R/((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__ne__scCs t|S(s@od.viewkeys() -> a set-like object providing a view on od's keys(R(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytviewkeysscCs t|S(s<od.viewvalues() -> an object providing a view on od's values(R(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt viewvaluesscCs t|S(sBod.viewitems() -> a set-like object providing a view on od's items(R(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt viewitemssN(#R8t __module__t__doc__RR RRRRRtTrueR$R&R'R(R*RR,R0R tobjectR1RR R4R;R?R=t classmethodRCRDRERFRGRH(((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRs:                  N( tthreadRR6t ImportErrort dummy_threadt_abcollRRRR R(((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyts   PKZym!(!(;site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyonu[ abc@syddlmZWn!ek r7ddlmZnXy ddlmZmZmZWnek rknXde fdYZ dS(i(t get_ident(tKeysViewt ValuesViewt ItemsViewt OrderedDictcBseZdZdZejdZejdZdZdZdZ e dZ dZ d Z d Zd Zd Zd ZdZeZeZedZddZidZdZdZeddZdZdZdZ dZ!dZ"RS(s)Dictionary that remembers insertion ordercOst|dkr+tdt|ny |jWn7tk rog|_}||dg|(i|_nX|j||dS(sInitialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. is$expected at most 1 arguments, got %dN(tlent TypeErrort_OrderedDict__roottAttributeErrortNonet_OrderedDict__mapt_OrderedDict__update(tselftargstkwdstroot((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__init__s    cCs\||krH|j}|d}|||g|d<|d<|j| od[i]=yiiN(RR (R tkeytvaluet dict_setitemRtlast((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __setitem__,s    )cCs@||||jj|\}}}||d<||d del od[y]iiN(R tpop(R Rt dict_delitemt link_prevt link_next((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __delitem__6s  ccs=|j}|d}x#||k r8|dV|d}qWdS(sod.__iter__() <==> iter(od)iiN(R(R Rtcurr((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__iter__?s    ccs=|j}|d}x#||k r8|dV|d}qWdS(s#od.__reversed__() <==> reversed(od)iiN(R(R RR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __reversed__Gs    cCsmyHx|jjD] }|2qW|j}||dg|(|jjWntk r[nXtj|dS(s.od.clear() -> None. Remove all items from od.N(R t itervaluesRR tclearRtdict(R tnodeR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyROs  cCs|stdn|j}|rO|d}|d}||d<||d (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. sdictionary is emptyiii(tKeyErrorRR R R(R RRtlinkRRRR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytpopitem[s            cCs t|S(sod.keys() -> list of keys in od(tlist(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytkeystscCsg|D]}||^qS(s#od.values() -> list of values in od((R R((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytvaluesxscCs!g|D]}|||f^qS(s.od.items() -> list of (key, value) pairs in od((R R((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytitems|scCs t|S(s0od.iterkeys() -> an iterator over the keys in od(titer(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytiterkeyssccsx|D]}||VqWdS(s2od.itervalues -> an iterator over the values in odN((R tk((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRs ccs$x|D]}|||fVqWdS(s=od.iteritems -> an iterator over the (key, value) items in odN((R R+((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt iteritemss cOs&t|dkr.tdt|fn|sCtdn|d}d}t|dkrr|d}nt|trxw|D]}|||| None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v is8update() takes at most 2 positional arguments (%d given)s,update() takes at least 1 argument (0 given)iiR&N((RRt isinstanceR thasattrR&R((R RR totherRR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytupdates&    cCsC||kr!||}||=|S||jkr?t|n|S(sod.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. (t_OrderedDict__markerR"(R Rtdefaulttresult((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRs  cCs"||kr||S|||<|S(sDod.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od((R RR2((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt setdefaults  cCsst|tf}||kr%dSd|| repr(od)s...is%s()s%s(%r)N(tidt _get_identt __class__t__name__R((R t _repr_runningtcall_key((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__repr__s  cCsg|D]}|||g^q}t|j}x'ttD]}|j|dqEW|rx|j|f|fS|j|ffS(s%Return state information for picklingN(tvarstcopyRRR R7(R R+R(t inst_dict((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt __reduce__s#cCs |j|S(s!od.copy() -> a shallow copy of od(R7(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyR=scCs(|}x|D]}||| New ordered dictionary with keys from S and values equal to v (which defaults to None). ((tclstiterableRtdR((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytfromkeyss  cCsMt|tr=t|t|ko<|j|jkStj||S(sod.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. (R-RRR(R t__eq__(R R/((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRDs.cCs ||k S(N((R R/((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt__ne__scCs t|S(s@od.viewkeys() -> a set-like object providing a view on od's keys(R(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pytviewkeysscCs t|S(s<od.viewvalues() -> an object providing a view on od's values(R(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt viewvaluesscCs t|S(sBod.viewitems() -> a set-like object providing a view on od's items(R(R ((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyt viewitemssN(#R8t __module__t__doc__RR RRRRRtTrueR$R&R'R(R*RR,R0R tobjectR1RR R4R;R?R=t classmethodRCRDRERFRGRH(((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyRs:                  N( tthreadRR6t ImportErrort dummy_threadt_abcollRRRR R(((sM/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/packages/ordered_dict.pyts   PKZ332site-packages/pip/_vendor/urllib3/_collections.pycnu[ abc@@sddlmZddlmZmZyddlmZWn$ek r`dddYZnXyddlmZWn!ek rddl mZnXddl m Z m Z m Z d d gZeZd efd YZd efd YZd S(i(tabsolute_import(tMappingtMutableMapping(tRLockRcB@seZdZdZRS(cC@sdS(N((tself((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt __enter__scC@sdS(N((Rtexc_typet exc_valuet traceback((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__exit__ s(t__name__t __module__RR (((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRs (t OrderedDicti(titerkeyst itervaluestPY3tRecentlyUsedContainertHTTPHeaderDictcB@sbeZdZeZdd dZdZdZdZ dZ dZ dZ d Z RS( s Provides a thread-safe dict-like container which maintains up to ``maxsize`` keys while throwing away the least-recently-used keys beyond ``maxsize``. :param maxsize: Maximum number of recent elements to retain. :param dispose_func: Every time an item is evicted from the container, ``dispose_func(value)`` is called. Callback which will get called i cC@s1||_||_|j|_t|_dS(N(t_maxsizet dispose_funct ContainerClst _containerRtlock(RtmaxsizeR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__init__+s  cC@s7|j(|jj|}||j|<|SWdQXdS(N(RRtpop(Rtkeytitem((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt __getitem__2s  cC@st}|j]|jj|t}||j|>> headers = HTTPHeaderDict() >>> headers.add('Set-Cookie', 'foo=bar') >>> headers.add('set-cookie', 'baz=quxx') >>> headers['content-length'] = '7' >>> headers['SET-cookie'] 'foo=bar, baz=quxx' >>> headers['Content-Length'] '7' cK@sttt|jt|_|dk rZt|trJ|j|qZ|j|n|rp|j|ndS(N( tsuperRRR RR0t isinstancet _copy_fromtextend(Rtheaderstkwargs((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRs  cC@s*||g|j|j<|j|jS(N(Rtlower(RRtval((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR&scC@s$|j|j}dj|dS(Ns, i(RR7tjoin(RRR8((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRscC@s|j|j=dS(N(RR7(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR'scC@s|j|jkS(N(R7R(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt __contains__scC@st|t r$t|d r$tSt|t|sNt||}ntd|jDtd|jDkS(NR.cs@s'|]\}}|j|fVqdS(N(R7(t.0tktv((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pys scs@s'|]\}}|j|fVqdS(N(R7(R;R<R=((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pys s(R2RthasattrR"ttypetdictt itermerged(Rtother((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__eq__s  cC@s|j| S(N(RC(RRB((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__ne__scC@s t|jS(N(R R(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR(scc@s'x |jjD]}|dVqWdS(Ni(RR-(Rtvals((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR*scC@sGy||}Wn'tk r7||jkr3n|SX||=|SdS(sD.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. N(tKeyErrort_HTTPHeaderDict__marker(RRtdefaultR#((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyRs cC@s#y ||=Wntk rnXdS(N(RF(RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pytdiscards  cC@sM|j}||g}|jj||}||k rI|j|ndS(sAdds a (name, value) pair, doesn't overwrite the value if it already exists. >>> headers = HTTPHeaderDict(foo='bar') >>> headers.add('Foo', 'baz') >>> headers['foo'] 'bar, baz' N(R7Rt setdefaulttappend(RRR8t key_lowertnew_valsRE((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pytadds   cO@s]t|dkr0tdjt|nt|dkrL|dnd}t|trx|jD]\}}|j||qnWnt|trx|D]}|j|||qWndt|drxR|j D]}|j|||qWn'x$|D]\}}|j||q Wx*|j D]\}}|j||q9WdS(sGeneric import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ is9extend() takes at most 1 positional arguments ({0} given)iR.N(( R t TypeErrortformatR2Rt iteritemsRNRR>R.titems(RtargsR6RBRR8R#((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyR4s" " cC@sKy|j|j}Wn%tk r>||jkr:gS|SX|dSdS(smReturns a list of all the values for the named field. Returns an empty list if the key doesn't exist.iN(RR7RFRG(RRRHRE((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pytgetlists cC@s#dt|jt|jfS(Ns%s(%s)(R?R R@RA(R((sD/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/_collections.pyt__repr__scC@s\xU|D]M}|j|}t|tr:t|}n|g||j|js    JPKZӔ''1site-packages/pip/_vendor/urllib3/_collections.pynu[from __future__ import absolute_import from collections import Mapping, MutableMapping try: from threading import RLock except ImportError: # Platform-specific: No threads available class RLock: def __enter__(self): pass def __exit__(self, exc_type, exc_value, traceback): pass try: # Python 2.7+ from collections import OrderedDict except ImportError: from .packages.ordered_dict import OrderedDict from .packages.six import iterkeys, itervalues, PY3 __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] _Null = object() class RecentlyUsedContainer(MutableMapping): """ Provides a thread-safe dict-like container which maintains up to ``maxsize`` keys while throwing away the least-recently-used keys beyond ``maxsize``. :param maxsize: Maximum number of recent elements to retain. :param dispose_func: Every time an item is evicted from the container, ``dispose_func(value)`` is called. Callback which will get called """ ContainerCls = OrderedDict def __init__(self, maxsize=10, dispose_func=None): self._maxsize = maxsize self.dispose_func = dispose_func self._container = self.ContainerCls() self.lock = RLock() def __getitem__(self, key): # Re-insert the item, moving it to the end of the eviction line. with self.lock: item = self._container.pop(key) self._container[key] = item return item def __setitem__(self, key, value): evicted_value = _Null with self.lock: # Possibly evict the existing value of 'key' evicted_value = self._container.get(key, _Null) self._container[key] = value # If we didn't evict an existing value, we might have to evict the # least recently used item from the beginning of the container. if len(self._container) > self._maxsize: _key, evicted_value = self._container.popitem(last=False) if self.dispose_func and evicted_value is not _Null: self.dispose_func(evicted_value) def __delitem__(self, key): with self.lock: value = self._container.pop(key) if self.dispose_func: self.dispose_func(value) def __len__(self): with self.lock: return len(self._container) def __iter__(self): raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') def clear(self): with self.lock: # Copy pointers to all values, then wipe the mapping values = list(itervalues(self._container)) self._container.clear() if self.dispose_func: for value in values: self.dispose_func(value) def keys(self): with self.lock: return list(iterkeys(self._container)) class HTTPHeaderDict(MutableMapping): """ :param headers: An iterable of field-value pairs. Must not contain multiple field names when compared case-insensitively. :param kwargs: Additional field-value pairs to pass in to ``dict.update``. A ``dict`` like container for storing HTTP Headers. Field names are stored and compared case-insensitively in compliance with RFC 7230. Iteration provides the first case-sensitive key seen for each case-insensitive pair. Using ``__setitem__`` syntax overwrites fields that compare equal case-insensitively in order to maintain ``dict``'s api. For fields that compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` in a loop. If multiple fields that are equal case-insensitively are passed to the constructor or ``.update``, the behavior is undefined and some will be lost. >>> headers = HTTPHeaderDict() >>> headers.add('Set-Cookie', 'foo=bar') >>> headers.add('set-cookie', 'baz=quxx') >>> headers['content-length'] = '7' >>> headers['SET-cookie'] 'foo=bar, baz=quxx' >>> headers['Content-Length'] '7' """ def __init__(self, headers=None, **kwargs): super(HTTPHeaderDict, self).__init__() self._container = OrderedDict() if headers is not None: if isinstance(headers, HTTPHeaderDict): self._copy_from(headers) else: self.extend(headers) if kwargs: self.extend(kwargs) def __setitem__(self, key, val): self._container[key.lower()] = [key, val] return self._container[key.lower()] def __getitem__(self, key): val = self._container[key.lower()] return ', '.join(val[1:]) def __delitem__(self, key): del self._container[key.lower()] def __contains__(self, key): return key.lower() in self._container def __eq__(self, other): if not isinstance(other, Mapping) and not hasattr(other, 'keys'): return False if not isinstance(other, type(self)): other = type(self)(other) return (dict((k.lower(), v) for k, v in self.itermerged()) == dict((k.lower(), v) for k, v in other.itermerged())) def __ne__(self, other): return not self.__eq__(other) if not PY3: # Python 2 iterkeys = MutableMapping.iterkeys itervalues = MutableMapping.itervalues __marker = object() def __len__(self): return len(self._container) def __iter__(self): # Only provide the originally cased names for vals in self._container.values(): yield vals[0] def pop(self, key, default=__marker): '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' # Using the MutableMapping function directly fails due to the private marker. # Using ordinary dict.pop would expose the internal structures. # So let's reinvent the wheel. try: value = self[key] except KeyError: if default is self.__marker: raise return default else: del self[key] return value def discard(self, key): try: del self[key] except KeyError: pass def add(self, key, val): """Adds a (name, value) pair, doesn't overwrite the value if it already exists. >>> headers = HTTPHeaderDict(foo='bar') >>> headers.add('Foo', 'baz') >>> headers['foo'] 'bar, baz' """ key_lower = key.lower() new_vals = [key, val] # Keep the common case aka no item present as fast as possible vals = self._container.setdefault(key_lower, new_vals) if new_vals is not vals: vals.append(val) def extend(self, *args, **kwargs): """Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__ """ if len(args) > 1: raise TypeError("extend() takes at most 1 positional " "arguments ({0} given)".format(len(args))) other = args[0] if len(args) >= 1 else () if isinstance(other, HTTPHeaderDict): for key, val in other.iteritems(): self.add(key, val) elif isinstance(other, Mapping): for key in other: self.add(key, other[key]) elif hasattr(other, "keys"): for key in other.keys(): self.add(key, other[key]) else: for key, value in other: self.add(key, value) for key, value in kwargs.items(): self.add(key, value) def getlist(self, key, default=__marker): """Returns a list of all the values for the named field. Returns an empty list if the key doesn't exist.""" try: vals = self._container[key.lower()] except KeyError: if default is self.__marker: return [] return default else: return vals[1:] # Backwards compatibility for httplib getheaders = getlist getallmatchingheaders = getlist iget = getlist # Backwards compatibility for http.cookiejar get_all = getlist def __repr__(self): return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) def _copy_from(self, other): for key in other: val = other.getlist(key) if isinstance(val, list): # Don't need to convert tuples val = list(val) self._container[key.lower()] = [key] + val def copy(self): clone = type(self)() clone._copy_from(self) return clone def iteritems(self): """Iterate over all header lines, including duplicate ones.""" for key in self: vals = self._container[key.lower()] for val in vals[1:]: yield vals[0], val def itermerged(self): """Iterate over all headers, merging duplicate ones together.""" for key in self: val = self._container[key.lower()] yield val[0], ', '.join(val[1:]) def items(self): return list(self.iteritems()) @classmethod def from_httplib(cls, message): # Python 2 """Read headers from a Python 2 httplib message object.""" # python2.7 does not expose a proper API for exporting multiheaders # efficiently. This function re-reads raw lines from the message # object and extracts the multiheaders properly. headers = [] for line in message.headers: if line.startswith((' ', '\t')): key, value = headers[-1] headers[-1] = (key, value + '\r\n' + line.rstrip()) continue key, value = line.split(':', 1) headers.append((key, value.strip())) return cls(headers) PKZ&if,site-packages/pip/_vendor/urllib3/fields.pyonu[ abc@@sgddlmZddlZddlZddlmZddZdZde fd YZ dS( i(tabsolute_importNi(tsixsapplication/octet-streamcC@s!|rtj|dp|S|S(s Guess the "Content-Type" of a file. :param filename: The filename to guess the "Content-Type" of using :mod:`mimetypes`. :param default: If no "Content-Type" can be guessed, default to `default`. i(t mimetypest guess_type(tfilenametdefault((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pytguess_content_types c@stfddDs^d|f}y|jdWnttfk rVq^X|Sntj rttjrjdntj j dd|fS(s Helper function to format and quote a single header parameter. Particularly useful for header parameters which might contain non-ASCII values, like file names. This follows RFC 2231, as suggested by RFC 2388 Section 4.4. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. c3@s|]}|kVqdS(N((t.0tch(tvalue(s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pys #ss"\ s%s="%s"tasciisutf-8s%s*=%s( tanytencodetUnicodeEncodeErrortUnicodeDecodeErrorRtPY3t isinstancet text_typetemailtutilstencode_rfc2231(tnameR tresult((R s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pytformat_header_params t RequestFieldcB@sYeZdZdddZedZdZdZdZ ddddZ RS(sK A data container for request body parameters. :param name: The name of this request field. :param data: The data/value body. :param filename: An optional filename of the request field. :param headers: An optional dict-like object of headers to initially use for the field. cC@s@||_||_||_i|_|r<t||_ndS(N(t_namet _filenametdatatheaderstdict(tselfRRRR((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt__init__?s     cC@st|trNt|dkr3|\}}}q`|\}}t|}nd}d}|}|||d|}|jd||S(s A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: 'foo': 'bar', 'fakefile': ('foofile.txt', 'contents of foofile'), 'realfile': ('barfile.txt', open('realfile').read()), 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), 'nonamefile': 'contents of nonamefile field', Field names and filenames must be unicode. iRt content_typeN(RttupletlenRtNonetmake_multipart(tclst fieldnameR RRR t request_param((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt from_tuplesGs cC@s t||S(s Overridable helper function to format a single header parameter. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string. (R(RRR ((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt _render_partis cC@svg}|}t|tr*|j}nx<|D]4\}}|dk r1|j|j||q1q1Wdj|S(sO Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., 'Content-Disposition' fields. :param header_parts: A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`. s; N(RRtitemsR#tappendR)tjoin(Rt header_partstpartstiterableRR ((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyt _render_partsts   cC@sg}dddg}xD|D]<}|jj|tr|jd||j|fqqWxL|jjD];\}}||krl|r|jd||fqqlqlW|jddj|S(s= Renders the headers for this request field. sContent-Dispositions Content-TypesContent-Locations%s: %ss (RtgettFalseR+R*R,(Rtlinest sort_keystsort_keyt header_namet header_value((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pytrender_headerss % ! c C@st|p d|jd<|jdcdjd|jd|jfd|jffg7<||jd<||jd/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyR$s  ' N( t__name__t __module__t__doc__R#Rt classmethodR(R)R0R8R$(((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyR2s "  ( t __future__Rt email.utilsRRtpackagesRRRtobjectR(((s>/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/fields.pyts     PKZC .site-packages/pip/_vendor/urllib3/filepost.pycnu[ abc@@sddlmZddlZddlmZddlmZddlmZddl m Z ddl m Z ej d d Zd Zd Zd ZddZdS(i(tabsolute_importN(tuuid4(tBytesIOi(tsix(tb(t RequestFieldsutf-8icC@s tjS(sN Our embarrassingly-simple replacement for mimetools.choose_boundary. (Rthex(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pytchoose_boundaryscc@sgt|tr!tj|}n t|}x3|D]+}t|trQ|Vq4tj|Vq4WdS(s Iterate over fields. Supports list of (k, v) tuples and dicts, and lists of :class:`~urllib3.fields.RequestField`. N(t isinstancetdictRt iteritemstiterRt from_tuples(tfieldstitfield((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pytiter_field_objectss  cC@s4t|tr&dtj|DSd|DS(s- .. deprecated:: 1.6 Iterate over fields. The addition of :class:`~urllib3.fields.RequestField` makes this function obsolete. Instead, use :func:`iter_field_objects`, which returns :class:`~urllib3.fields.RequestField` objects. Supports list of (k, v) tuples and dicts. cs@s!|]\}}||fVqdS(N((t.0tktv((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pys 6scs@s!|]\}}||fVqdS(N((RRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pys 8s(RR RR (R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyt iter_fields)s cC@st}|dkr!t}nxt|D]}|jtd|t|j|j|j}t |t rt |}nt |t j rt|j|n |j||jdq.W|jtd|t d|}|j|fS(sJ Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). :param boundary: If not specified, then a random boundary will be generated using :func:`mimetools.choose_boundary`. s--%s s s--%s-- s multipart/form-data; boundary=%sN(RtNoneRRtwriteRtwritertrender_headerstdataRtinttstrRt text_typetgetvalue(R tboundarytbodyRRt content_type((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pytencode_multipart_formdata;s     (t __future__RtcodecstuuidRtioRtpackagesRt packages.sixRR RtlookupRRRRRR!(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyts    PKZ' q440site-packages/pip/_vendor/urllib3/exceptions.pyonu[ abc@@s(ddlmZddlmZdefdYZdefdYZdefd YZ d e fd YZ d efd YZ defdYZ defdYZ defdYZeZde fdYZde fdYZdefdYZdefdYZdee fdYZdefdYZd ee fd!YZd"e fd#YZd$e fd%YZd&eefd'YZd(efd)YZd*efd+YZd,efd-YZd.efd/YZd0efd1YZd2efd3YZ d4efd5YZ!d6efd7YZ"d8efd9YZ#d:eefd;YZ$d<efd=YZ%d>eefd?YZd@efdAYZ&dBe'efdCYZ(dDefdEYZ)dFefdGYZ*dHS(Ii(tabsolute_importi(tIncompleteReadt HTTPErrorcB@seZdZRS(s#Base exception used by this module.(t__name__t __module__t__doc__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRst HTTPWarningcB@seZdZRS(s!Base warning used by this module.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR st PoolErrorcB@s eZdZdZdZRS(s/Base exception for errors caused within a pool.cC@s'||_tj|d||fdS(Ns%s: %s(tpoolRt__init__(tselfRtmessage((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s cC@s |jdfS(N(NN(t __class__tNone(R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyt __reduce__s(RRRR R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRs t RequestErrorcB@s eZdZdZdZRS(s8Base exception for PoolErrors that have associated URLs.cC@s ||_tj|||dS(N(turlRR (R RRR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s cC@s|jd|jdffS(N(R R R(R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR#s(RRRR R(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRs tSSLErrorcB@seZdZRS(s9Raised when SSL certificate fails in an HTTPS connection.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR(st ProxyErrorcB@seZdZRS(s,Raised when the connection to a proxy fails.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR-st DecodeErrorcB@seZdZRS(s;Raised when automatic decoding based on Content-Type fails.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR2st ProtocolErrorcB@seZdZRS(s>Raised when something unexpected happens mid-request/response.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR7st MaxRetryErrorcB@seZdZddZRS(s Raised when the maximum number of retries is exceeded. :param pool: The connection pool :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` :param string url: The requested Url :param exceptions.Exception reason: The underlying error cC@s3||_d||f}tj||||dS(Ns0Max retries exceeded with url: %s (Caused by %r)(treasonRR (R RRRR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR Ls  N(RRRR R (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRBstHostChangedErrorcB@seZdZddZRS(s?Raised when an existing pool gets a request for a foreign host.icC@s-d|}tj||||||_dS(Ns)Tried to open a foreign host with url: %s(RR tretries(R RRRR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR Xs (RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRUstTimeoutStateErrorcB@seZdZRS(s3 Raised when passing an invalid state to a timeout (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR^st TimeoutErrorcB@seZdZRS(s Raised when a socket timeout error occurs. Catching this error will catch both :exc:`ReadTimeoutErrors ` and :exc:`ConnectTimeoutErrors `. (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRcstReadTimeoutErrorcB@seZdZRS(sFRaised when a socket timeout occurs while receiving data from a server(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRlstConnectTimeoutErrorcB@seZdZRS(s@Raised when a socket timeout occurs while connecting to a server(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRsstNewConnectionErrorcB@seZdZRS(sHRaised when we fail to establish a new connection. Usually ECONNREFUSED.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRxstEmptyPoolErrorcB@seZdZRS(sCRaised when a pool runs out of connections and no more are allowed.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR}stClosedPoolErrorcB@seZdZRS(sCRaised when a request enters a pool after the pool has been closed.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRstLocationValueErrorcB@seZdZRS(s<Raised when there is something wrong with a given URL input.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR stLocationParseErrorcB@seZdZdZRS(s=Raised when get_host or similar fails to parse the URL input.cC@s'd|}tj||||_dS(NsFailed to parse: %s(RR tlocation(R R"R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s (RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR!st ResponseErrorcB@seZdZdZdZRS(sDUsed as a container for an error reason supplied in a MaxRetryError.stoo many error responsess&too many {status_code} error responses(RRRt GENERIC_ERRORtSPECIFIC_ERROR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR#stSecurityWarningcB@seZdZRS(s/Warned when perfoming security reducing actions(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR&stSubjectAltNameWarningcB@seZdZRS(sBWarned when connecting to a host with a certificate missing a SAN.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR'stInsecureRequestWarningcB@seZdZRS(s/Warned when making an unverified HTTPS request.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR(stSystemTimeWarningcB@seZdZRS(s0Warned when system time is suspected to be wrong(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR)stInsecurePlatformWarningcB@seZdZRS(sEWarned when certain SSL configuration is not available on a platform.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR*stSNIMissingWarningcB@seZdZRS(s9Warned when making a HTTPS request without SNI available.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR+stDependencyWarningcB@seZdZRS(sc Warned when an attempt is made to import a module with missing optional dependencies. (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR,stResponseNotChunkedcB@seZdZRS(s;Response needs to be chunked in order to read it as chunks.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR-stBodyNotHttplibCompatiblecB@seZdZRS(sz Body should be httplib.HTTPResponse like (have an fp attribute which returns raw chunks) for read_chunked(). (RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR.sRcB@s eZdZdZdZRS(s Response length doesn't match expected Content-Length Subclass of http_client.IncompleteRead to allow int value for `partial` to avoid creating large objects on streamed reads. cC@stt|j||dS(N(tsuperRR (R tpartialtexpected((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR scC@sd|j|jfS(Ns/IncompleteRead(%i bytes read, %i more expected)(R0R1(R ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyt__repr__s(RRRR R2(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyRs t InvalidHeadercB@seZdZRS(s(The header provided was somehow invalid.(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR3stProxySchemeUnknowncB@seZdZdZRS(s1ProxyManager does not support the supplied schemecC@s$d|}tt|j|dS(NsNot supported proxy scheme %s(R/R4R (R tschemeR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s (RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR4stHeaderParsingErrorcB@seZdZdZRS(sNRaised by assert_header_parsing, but we convert it to a log.warning statement.cC@s0d|p d|f}tt|j|dS(Ns%s, unparsed data: %rtUnknown(R/R6R (R tdefectst unparsed_dataR ((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR s(RRRR (((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR6stUnrewindableBodyErrorcB@seZdZRS(s9urllib3 encountered an error when trying to rewind a body(RRR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pyR:sN(+t __future__Rtpackages.six.moves.http_clientRthttplib_IncompleteReadt ExceptionRtWarningRRRRRRRtConnectionErrorRRRRRRRRRt ValueErrorR R!R#R&R'R(R)R*R+R,R-R.R3tAssertionErrorR4R6R:(((sB/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/exceptions.pytsH      PKZpt-site-packages/pip/_vendor/urllib3/request.pyonu[ abc@@sSddlmZddlmZddlmZdgZdefdYZdS(i(tabsolute_importi(tencode_multipart_formdata(t urlencodetRequestMethodscB@szeZdZeddddgZd dZd d ed dZd d dZ d d dZ d d ed d Z RS( s Convenience mixin for classes who implement a :meth:`urlopen` method, such as :class:`~urllib3.connectionpool.HTTPConnectionPool` and :class:`~urllib3.poolmanager.PoolManager`. Provides behavior for making common types of HTTP request methods and decides which type of request field encoding to use. Specifically, :meth:`.request_encode_url` is for sending requests whose fields are encoded in the URL (such as GET, HEAD, DELETE). :meth:`.request_encode_body` is for sending requests whose fields are encoded in the *body* of the request using multipart or www-form-urlencoded (such as for POST, PUT, PATCH). :meth:`.request` is for making any kind of request, it will look up the appropriate encoding format and use one of the above two methods to make the request. Initializer parameters: :param headers: Headers to include with all requests, unless other headers are given explicitly. tDELETEtGETtHEADtOPTIONScC@s|p i|_dS(N(theaders(tselfR((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyt__init__)scK@stddS(NsMClasses extending RequestMethods must implement their own ``urlopen`` method.(tNotImplemented(R tmethodturltbodyRtencode_multiparttmultipart_boundarytkw((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyturlopen,scK@s]|j}||jkr:|j||d|d||S|j||d|d||SdS(s Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`. tfieldsRN(tuppert_encode_url_methodstrequest_encode_urltrequest_encode_body(R R R RRt urlopen_kw((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pytrequest2s cK@sb|dkr|j}ni|d6}|j||rO|dt|7}n|j|||S(s Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc. Rt?N(tNoneRtupdateRR(R R R RRRtextra_kw((s?/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/request.pyRHs    c K@s|dkr|j}niid6}|rd|krFtdn|rgt|d|\} } nt|d} } | |ds PKZiAA0site-packages/pip/_vendor/urllib3/poolmanager.pynu[from __future__ import absolute_import import collections import functools import logging from ._collections import RecentlyUsedContainer from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import port_by_scheme from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown from .packages.six.moves.urllib.parse import urljoin from .request import RequestMethods from .util.url import parse_url from .util.retry import Retry __all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url'] log = logging.getLogger(__name__) SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', 'ssl_version', 'ca_cert_dir', 'ssl_context') # All known keyword arguments that could be provided to the pool manager, its # pools, or the underlying connections. This is used to construct a pool key. _key_fields = ( 'key_scheme', # str 'key_host', # str 'key_port', # int 'key_timeout', # int or float or Timeout 'key_retries', # int or Retry 'key_strict', # bool 'key_block', # bool 'key_source_address', # str 'key_key_file', # str 'key_cert_file', # str 'key_cert_reqs', # str 'key_ca_certs', # str 'key_ssl_version', # str 'key_ca_cert_dir', # str 'key_ssl_context', # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext 'key_maxsize', # int 'key_headers', # dict 'key__proxy', # parsed proxy url 'key__proxy_headers', # dict 'key_socket_options', # list of (level (int), optname (int), value (int or str)) tuples 'key__socks_options', # dict 'key_assert_hostname', # bool or string 'key_assert_fingerprint', # str ) #: The namedtuple class used to construct keys for the connection pool. #: All custom key schemes should include the fields in this key at a minimum. PoolKey = collections.namedtuple('PoolKey', _key_fields) def _default_key_normalizer(key_class, request_context): """ Create a pool key out of a request context dictionary. According to RFC 3986, both the scheme and host are case-insensitive. Therefore, this function normalizes both before constructing the pool key for an HTTPS request. If you wish to change this behaviour, provide alternate callables to ``key_fn_by_scheme``. :param key_class: The class to use when constructing the key. This should be a namedtuple with the ``scheme`` and ``host`` keys at a minimum. :type key_class: namedtuple :param request_context: A dictionary-like object that contain the context for a request. :type request_context: dict :return: A namedtuple that can be used as a connection pool key. :rtype: PoolKey """ # Since we mutate the dictionary, make a copy first context = request_context.copy() context['scheme'] = context['scheme'].lower() context['host'] = context['host'].lower() # These are both dictionaries and need to be transformed into frozensets for key in ('headers', '_proxy_headers', '_socks_options'): if key in context and context[key] is not None: context[key] = frozenset(context[key].items()) # The socket_options key may be a list and needs to be transformed into a # tuple. socket_opts = context.get('socket_options') if socket_opts is not None: context['socket_options'] = tuple(socket_opts) # Map the kwargs to the names in the namedtuple - this is necessary since # namedtuples can't have fields starting with '_'. for key in list(context.keys()): context['key_' + key] = context.pop(key) # Default to ``None`` for keys missing from the context for field in key_class._fields: if field not in context: context[field] = None return key_class(**context) #: A dictionary that maps a scheme to a callable that creates a pool key. #: This can be used to alter the way pool keys are constructed, if desired. #: Each PoolManager makes a copy of this dictionary so they can be configured #: globally here, or individually on the instance. key_fn_by_scheme = { 'http': functools.partial(_default_key_normalizer, PoolKey), 'https': functools.partial(_default_key_normalizer, PoolKey), } pool_classes_by_scheme = { 'http': HTTPConnectionPool, 'https': HTTPSConnectionPool, } class PoolManager(RequestMethods): """ Allows for arbitrary requests while transparently keeping track of necessary connection pools for you. :param num_pools: Number of connection pools to cache before discarding the least recently used pool. :param headers: Headers to include with all requests, unless other headers are given explicitly. :param \\**connection_pool_kw: Additional parameters are used to create fresh :class:`urllib3.connectionpool.ConnectionPool` instances. Example:: >>> manager = PoolManager(num_pools=2) >>> r = manager.request('GET', 'http://google.com/') >>> r = manager.request('GET', 'http://google.com/mail') >>> r = manager.request('GET', 'http://yahoo.com/') >>> len(manager.pools) 2 """ proxy = None def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) # Locally set the pool classes and keys so other PoolManagers can # override them. self.pool_classes_by_scheme = pool_classes_by_scheme self.key_fn_by_scheme = key_fn_by_scheme.copy() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.clear() # Return False to re-raise any potential exceptions return False def _new_pool(self, scheme, host, port, request_context=None): """ Create a new :class:`ConnectionPool` based on host, port, scheme, and any additional pool keyword arguments. If ``request_context`` is provided, it is provided as keyword arguments to the pool class used. This method is used to actually create the connection pools handed out by :meth:`connection_from_url` and companion methods. It is intended to be overridden for customization. """ pool_cls = self.pool_classes_by_scheme[scheme] if request_context is None: request_context = self.connection_pool_kw.copy() # Although the context has everything necessary to create the pool, # this function has historically only used the scheme, host, and port # in the positional args. When an API change is acceptable these can # be removed. for key in ('scheme', 'host', 'port'): request_context.pop(key, None) if scheme == 'http': for kw in SSL_KEYWORDS: request_context.pop(kw, None) return pool_cls(host, port, **request_context) def clear(self): """ Empty our store of pools and direct them all to close. This will not affect in-flight connections, but they will not be re-used after completion. """ self.pools.clear() def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): """ Get a :class:`ConnectionPool` based on the host, port, and scheme. If ``port`` isn't given, it will be derived from the ``scheme`` using ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is provided, it is merged with the instance's ``connection_pool_kw`` variable and used to create the new connection pool, if one is needed. """ if not host: raise LocationValueError("No host specified.") request_context = self._merge_pool_kwargs(pool_kwargs) request_context['scheme'] = scheme or 'http' if not port: port = port_by_scheme.get(request_context['scheme'].lower(), 80) request_context['port'] = port request_context['host'] = host return self.connection_from_context(request_context) def connection_from_context(self, request_context): """ Get a :class:`ConnectionPool` based on the request context. ``request_context`` must at least contain the ``scheme`` key and its value must be a key in ``key_fn_by_scheme`` instance variable. """ scheme = request_context['scheme'].lower() pool_key_constructor = self.key_fn_by_scheme[scheme] pool_key = pool_key_constructor(request_context) return self.connection_from_pool_key(pool_key, request_context=request_context) def connection_from_pool_key(self, pool_key, request_context=None): """ Get a :class:`ConnectionPool` based on the provided pool key. ``pool_key`` should be a namedtuple that only contains immutable objects. At a minimum it must have the ``scheme``, ``host``, and ``port`` fields. """ with self.pools.lock: # If the scheme, host, or port doesn't match existing open # connections, open a new ConnectionPool. pool = self.pools.get(pool_key) if pool: return pool # Make a fresh ConnectionPool of the desired type scheme = request_context['scheme'] host = request_context['host'] port = request_context['port'] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool return pool def connection_from_url(self, url, pool_kwargs=None): """ Similar to :func:`urllib3.connectionpool.connection_from_url`. If ``pool_kwargs`` is not provided and a new pool needs to be constructed, ``self.connection_pool_kw`` is used to initialize the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` is provided, it is used instead. Note that if a new pool does not need to be created for the request, the provided ``pool_kwargs`` are not used. """ u = parse_url(url) return self.connection_from_host(u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs) def _merge_pool_kwargs(self, override): """ Merge a dictionary of override values for self.connection_pool_kw. This does not modify self.connection_pool_kw and returns a new dict. Any keys in the override dictionary with a value of ``None`` are removed from the merged dictionary. """ base_pool_kwargs = self.connection_pool_kw.copy() if override: for key, value in override.items(): if value is None: try: del base_pool_kwargs[key] except KeyError: pass else: base_pool_kwargs[key] = value return base_pool_kwargs def urlopen(self, method, url, redirect=True, **kw): """ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen` with custom cross-host redirect logic and only sends the request-uri portion of the ``url``. The given ``url`` parameter must be absolute, such that an appropriate :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. """ u = parse_url(url) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) kw['assert_same_host'] = False kw['redirect'] = False if 'headers' not in kw: kw['headers'] = self.headers.copy() if self.proxy is not None and u.scheme == "http": response = conn.urlopen(method, url, **kw) else: response = conn.urlopen(method, u.request_uri, **kw) redirect_location = redirect and response.get_redirect_location() if not redirect_location: return response # Support relative URLs for redirecting. redirect_location = urljoin(url, redirect_location) # RFC 7231, Section 6.4.4 if response.status == 303: method = 'GET' retries = kw.get('retries') if not isinstance(retries, Retry): retries = Retry.from_int(retries, redirect=redirect) # Strip headers marked as unsafe to forward to the redirected location. # Check remove_headers_on_redirect to avoid a potential network call within # conn.is_same_host() which may use socket.gethostbyname() in the future. if (retries.remove_headers_on_redirect and not conn.is_same_host(redirect_location)): for header in retries.remove_headers_on_redirect: kw['headers'].pop(header, None) try: retries = retries.increment(method, url, response=response, _pool=conn) except MaxRetryError: if retries.raise_on_redirect: raise return response kw['retries'] = retries kw['redirect'] = redirect log.info("Redirecting %s -> %s", url, redirect_location) return self.urlopen(method, redirect_location, **kw) class ProxyManager(PoolManager): """ Behaves just like :class:`PoolManager`, but sends all requests through the defined proxy, using the CONNECT method for HTTPS URLs. :param proxy_url: The URL of the proxy to be used. :param proxy_headers: A dictionary contaning headers that will be sent to the proxy. In case of HTTP they are being sent with each request, while in the HTTPS/CONNECT case they are sent only once. Could be used for proxy authentication. Example: >>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> r1 = proxy.request('GET', 'http://google.com/') >>> r2 = proxy.request('GET', 'http://httpbin.org/') >>> len(proxy.pools) 1 >>> r3 = proxy.request('GET', 'https://httpbin.org/') >>> r4 = proxy.request('GET', 'https://twitter.com/') >>> len(proxy.pools) 3 """ def __init__(self, proxy_url, num_pools=10, headers=None, proxy_headers=None, **connection_pool_kw): if isinstance(proxy_url, HTTPConnectionPool): proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host, proxy_url.port) proxy = parse_url(proxy_url) if not proxy.port: port = port_by_scheme.get(proxy.scheme, 80) proxy = proxy._replace(port=port) if proxy.scheme not in ("http", "https"): raise ProxySchemeUnknown(proxy.scheme) self.proxy = proxy self.proxy_headers = proxy_headers or {} connection_pool_kw['_proxy'] = self.proxy connection_pool_kw['_proxy_headers'] = self.proxy_headers super(ProxyManager, self).__init__( num_pools, headers, **connection_pool_kw) def connection_from_host(self, host, port=None, scheme='http', pool_kwargs=None): if scheme == "https": return super(ProxyManager, self).connection_from_host( host, port, scheme, pool_kwargs=pool_kwargs) return super(ProxyManager, self).connection_from_host( self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs) def _set_proxy_headers(self, url, headers=None): """ Sets headers needed by proxies: specifically, the Accept and Host headers. Only sets headers not provided by the user. """ headers_ = {'Accept': '*/*'} netloc = parse_url(url).netloc if netloc: headers_['Host'] = netloc if headers: headers_.update(headers) return headers_ def urlopen(self, method, url, redirect=True, **kw): "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." u = parse_url(url) if u.scheme == "http": # For proxied HTTPS requests, httplib sets the necessary headers # on the CONNECT to the proxy. For HTTP, we'll definitely # need to set 'Host' at the very least. headers = kw.get('headers', self.headers) kw['headers'] = self._set_proxy_headers(url, headers) return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) def proxy_from_url(url, **kw): return ProxyManager(proxy_url=url, **kw) PKZC .site-packages/pip/_vendor/urllib3/filepost.pyonu[ abc@@sddlmZddlZddlmZddlmZddlmZddl m Z ddl m Z ej d d Zd Zd Zd ZddZdS(i(tabsolute_importN(tuuid4(tBytesIOi(tsix(tb(t RequestFieldsutf-8icC@s tjS(sN Our embarrassingly-simple replacement for mimetools.choose_boundary. (Rthex(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pytchoose_boundaryscc@sgt|tr!tj|}n t|}x3|D]+}t|trQ|Vq4tj|Vq4WdS(s Iterate over fields. Supports list of (k, v) tuples and dicts, and lists of :class:`~urllib3.fields.RequestField`. N(t isinstancetdictRt iteritemstiterRt from_tuples(tfieldstitfield((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pytiter_field_objectss  cC@s4t|tr&dtj|DSd|DS(s- .. deprecated:: 1.6 Iterate over fields. The addition of :class:`~urllib3.fields.RequestField` makes this function obsolete. Instead, use :func:`iter_field_objects`, which returns :class:`~urllib3.fields.RequestField` objects. Supports list of (k, v) tuples and dicts. cs@s!|]\}}||fVqdS(N((t.0tktv((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pys 6scs@s!|]\}}||fVqdS(N((RRR((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pys 8s(RR RR (R ((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyt iter_fields)s cC@st}|dkr!t}nxt|D]}|jtd|t|j|j|j}t |t rt |}nt |t j rt|j|n |j||jdq.W|jtd|t d|}|j|fS(sJ Encode a dictionary of ``fields`` using the multipart/form-data MIME format. :param fields: Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). :param boundary: If not specified, then a random boundary will be generated using :func:`mimetools.choose_boundary`. s--%s s s--%s-- s multipart/form-data; boundary=%sN(RtNoneRRtwriteRtwritertrender_headerstdataRtinttstrRt text_typetgetvalue(R tboundarytbodyRRt content_type((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pytencode_multipart_formdata;s     (t __future__RtcodecstuuidRtioRtpackagesRt packages.sixRR RtlookupRRRRRR!(((s@/usr/lib/python2.7/site-packages/pip/_vendor/urllib3/filepost.pyts    PKZ^PP%site-packages/pip/_vendor/appdirs.pyonu[ abc@s@dZd,ZdjeeeZddlZddlZejddkZ e r^eZ nej j drddl Z e j ddZej d rd Zqej d rd Zqd Zn ej ZdddedZdddedZdddedZdddedZdddedZdddedZdefdYZdZdZdZdZed kr!yddlZ eZ!Wq!e"k ryddl#m$Z$eZ!Wqe"k ryddl%Z&eZ!Wqe"k reZ!qXqXq!Xne'dkr<dZ(dZ)d-Z*d$GHee(e)d%d&Z+x&e*D]Z,d'e,e-e+e,fGHq`Wd(GHee(e)Z+x&e*D]Z,d'e,e-e+e,fGHqWd)GHee(Z+x&e*D]Z,d'e,e-e+e,fGHqWd*GHee(d+eZ+x)e*D]Z,d'e,e-e+e,fGHqWndS(.syUtilities for determining application-specific dirs. See for details and usage. iiit.iNitjavatWindowstwin32tMactdarwintlinux2cCs6tdkr|dkr!|}n|r-dp0d}tjjt|}|r|tk rxtjj|||}qtjj||}qn{tdkrtjjd}|rtjj||}qn<tj dtjjd}|rtjj||}n|r2|r2tjj||}n|S( sJReturn full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: macOS: ~/Library/Application Support/ Unix: ~/.local/share/ # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\\Application Data\\ Win XP (roaming): C:\Documents and Settings\\Local Settings\Application Data\\ Win 7 (not roaming): C:\Users\\AppData\Local\\ Win 7 (roaming): C:\Users\\AppData\Roaming\\ For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/". Rt CSIDL_APPDATAtCSIDL_LOCAL_APPDATARs~/Library/Application Support/t XDG_DATA_HOMEs~/.local/shareN( tsystemtNonetostpathtnormpatht_get_win_foldertFalsetjoint expandusertgetenv(tappnamet appauthortversiontroamingtconstR ((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt user_data_dir-s&      cCstdkr|d kr!|}ntjjtd}|r|tk rftjj|||}q~tjj||}qntdkrtjjd}|rtjj||}qntj dtj jddg}g|j tj D]$}tjj|j tj ^q}|rs|rEtjj||}ng|D]}tj j||g^qL}n|rtj j|}n |d}|S|r|rtjj||}n|S( siReturn full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/', if XDG_DATA_DIRS is not set Typical user data directories are: macOS: /Library/Application Support/ Unix: /usr/local/share/ or /usr/share/ Win XP: C:\Documents and Settings\All Users\Application Data\\ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\\ # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. RtCSIDL_COMMON_APPDATARs/Library/Application Supportt XDG_DATA_DIRSs/usr/local/shares /usr/shareiN(R R R R RRRRRRtpathseptsplittrstriptsep(RRRt multipathR txtpathlist((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt site_data_dirds4      =.  cCstdkr$t||d|}n<tjdtjjd}|r`tjj||}n|r|rtjj||}n|S(sReturn full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See for a discussion of issues. Typical user data directories are: macOS: same as user_data_dir Unix: ~/.config/ # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/". RRtXDG_CONFIG_HOMEs ~/.config(RRN(R RR R RR RR(RRRRR ((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pytuser_config_dirs  cCs tdkrBt||}|r|rtjj||}qntjdd}g|jtjD]$}tjj|j tj ^qg}|r|rtjj||}ng|D]}tj j||g^q}n|rtjj|}n |d}|S(sReturn full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set Typical user data directories are: macOS: same as site_data_dir Unix: /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. RRtXDG_CONFIG_DIRSs/etc/xdgi(RR( R R#R R RRRRRRR(RRRR R R!R"((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pytsite_config_dirs  =. cCsBtdkr|dkr!|}ntjjtd}|r|tk rftjj|||}ntjj||}|rtjj|d}qqn{tdkrtjjd}|rtjj||}qn<tj dtjjd}|rtjj||}n|r>|r>tjj||}n|S( sReturn full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: macOS: ~/Library/Caches/ Unix: ~/.cache/ (XDG default) Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Cache Vista: C:\Users\\AppData\Local\\\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. RRtCacheRs~/Library/CachestXDG_CACHE_HOMEs~/.cacheN( R R R R RRRRRR(RRRtopinionR ((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pytuser_cache_dirs(!      cCstdkr0tjjtjjd|}n{tdkrut|||}t}|rtjj|d}qn6t|||}t}|rtjj|d}n|r|rtjj||}n|S(sReturn full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be ".". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: macOS: ~/Library/Logs/ Unix: ~/.cache//log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\\Local Settings\Application Data\\\Logs Vista: C:\Users\\AppData\Local\\\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. Rs~/Library/LogsRtLogstlog(R R R RRRRR+(RRRR*R ((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt user_log_dir:s     tAppDirscBs}eZdZddeedZedZedZedZ edZ edZ edZ RS( s1Convenience wrapper for getting application dirs.cCs1||_||_||_||_||_dS(N(RRRRR (tselfRRRRR ((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt__init__os     cCs%t|j|jd|jd|jS(NRR(RRRRR(R0((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyRwscCs%t|j|jd|jd|jS(NRR (R#RRRR (R0((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyR#|scCs%t|j|jd|jd|jS(NRR(R%RRRR(R0((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyR%scCs%t|j|jd|jd|jS(NRR (R'RRRR (R0((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyR'scCst|j|jd|jS(NR(R+RRR(R0((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyR+scCst|j|jd|jS(NR(R.RRR(R0((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyR.sN( t__name__t __module__t__doc__R RR1tpropertyRR#R%R'R+R.(((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyR/ms  cCs\ddl}idd6dd6dd6|}|j|jd }|j||\}}|S( sThis is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. iNtAppDataRsCommon AppDataRs Local AppDataRs@Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders(t_winregtOpenKeytHKEY_CURRENT_USERt QueryValueEx(t csidl_nameR7tshell_folder_nametkeytdirttype((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt_get_win_folder_from_registrys  cCsddlm}m}|jdt||dd}yt|}t}x*|D]"}t|dkrSt}PqSqSW|ryddl }|j |}Wqt k rqXnWnt k rnX|S(Ni(tshellcontshellii( twin32com.shellRARBtSHGetFolderPathtgetattrtunicodeRtordtTruetwin32apitGetShortPathNamet ImportErrort UnicodeError(R;RARBR>t has_high_chartcRI((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt_get_win_folder_with_pywin32s$!      cCsddl}idd6dd6dd6|}|jd}|jjjd|dd |t}x*|D]"}t|d krft}PqfqfW|r|jd}|jj j |j |dr|}qn|j S( NiiRi#RiRiii( tctypestcreate_unicode_buffertwindlltshell32tSHGetFolderPathWR RRGRHtkernel32tGetShortPathNameWtvalue(R;RPt csidl_consttbufRMRNtbuf2((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt_get_win_folder_with_ctypess$   c Cs=ddl}ddlm}ddlm}|jjd}|jd|}|jj }|j dt |j |d|j j||jj|jjd}t}x*|D]"} t| dkrt}PqqW|r9|jd|}|jj } tj|||r9|jj|jjd}q9n|S(Ni(tjna(RiRNsi(tarraytcom.sunR\tcom.sun.jna.platformRtWinDeftMAX_PATHtzerostShell32tINSTANCERDR REtShlObjtSHGFP_TYPE_CURRENTtNativettoStringttostringRRRGRHtKernel32tkernalRJ( R;R]R\Rtbuf_sizeRYRBR>RMRNtkernel((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt_get_win_folder_with_jnas&  +!  '(RRt__main__tMyAppt MyCompanyRR#R%R'R+R.s%-- app dirs (with optional 'version')Rs1.0s%s: %ss) -- app dirs (without optional 'version')s+ -- app dirs (without optional 'appauthor')s( -- app dirs (with disabled 'appauthor')R(iii(RR#R%R'R+R.(.R4t__version_info__Rtmaptstrt __version__tsysR t version_infotPY3RFtplatformt startswithtjava_vertos_nameR R RRR#R%R'RHR+R.tobjectR/R@ROR[RnRCtwin32comRRKRPRRt com.sun.jnatcomR2RRtpropstdirstpropRE(((s7/usr/lib/python2.7/site-packages/pip/_vendor/appdirs.pyt s~        7B(393+                   PKZOH>>%site-packages/pip/_vendor/__init__.pynu[""" pip._vendor is for vendoring dependencies of pip to prevent needing pip to depend on something external. Files inside of pip._vendor should be considered immutable and should only be updated to versions from upstream. """ from __future__ import absolute_import import glob import os.path import sys # Downstream redistributors which have debundled our dependencies should also # patch this value to be true. This will trigger the additional patching # to cause things like "six" to be available as pip. DEBUNDLED = False # By default, look in this directory for a bunch of .whl files which we will # add to the beginning of sys.path before attempting to import anything. This # is done to support downstream re-distributors like Debian and Fedora who # wish to create their own Wheels for our dependencies to aid in debundling. WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) # Define a small helper function to alias our vendored modules to the real ones # if the vendored ones do not exist. This idea of this was taken from # https://github.com/kennethreitz/requests/pull/2567. def vendored(modulename): vendored_name = "{0}.{1}".format(__name__, modulename) try: __import__(vendored_name, globals(), locals(), level=0) except ImportError: try: __import__(modulename, globals(), locals(), level=0) except ImportError: # We can just silently allow import failures to pass here. If we # got to this point it means that ``import pip._vendor.whatever`` # failed and so did ``import whatever``. Since we're importing this # upfront in an attempt to alias imports, not erroring here will # just mean we get a regular import error whenever pip *actually* # tries to import one of these modules to use it, which actually # gives us a better error message than we would have otherwise # gotten. pass else: sys.modules[vendored_name] = sys.modules[modulename] base, head = vendored_name.rsplit(".", 1) setattr(sys.modules[base], head, sys.modules[modulename]) # If we're operating in a debundled setup, then we want to go ahead and trigger # the aliasing of our vendored libraries as well as looking for wheels to add # to our sys.path. This will cause all of this code to be a no-op typically # however downstream redistributors can enable it in a consistent way across # all platforms. if DEBUNDLED: # Actually look inside of WHEEL_DIR to find .whl files and add them to the # front of our sys.path. sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path # Actually alias all of our vendored dependencies. vendored("cachecontrol") vendored("colorama") vendored("distlib") vendored("distro") vendored("html5lib") vendored("lockfile") vendored("six") vendored("six.moves") vendored("six.moves.urllib") vendored("packaging") vendored("packaging.version") vendored("packaging.specifiers") vendored("pkg_resources") vendored("progress") vendored("retrying") vendored("requests") vendored("requests.packages") vendored("requests.packages.urllib3") vendored("requests.packages.urllib3._collections") vendored("requests.packages.urllib3.connection") vendored("requests.packages.urllib3.connectionpool") vendored("requests.packages.urllib3.contrib") vendored("requests.packages.urllib3.contrib.ntlmpool") vendored("requests.packages.urllib3.contrib.pyopenssl") vendored("requests.packages.urllib3.exceptions") vendored("requests.packages.urllib3.fields") vendored("requests.packages.urllib3.filepost") vendored("requests.packages.urllib3.packages") vendored("requests.packages.urllib3.packages.ordered_dict") vendored("requests.packages.urllib3.packages.six") vendored("requests.packages.urllib3.packages.ssl_match_hostname") vendored("requests.packages.urllib3.packages.ssl_match_hostname." "_implementation") vendored("requests.packages.urllib3.poolmanager") vendored("requests.packages.urllib3.request") vendored("requests.packages.urllib3.response") vendored("requests.packages.urllib3.util") vendored("requests.packages.urllib3.util.connection") vendored("requests.packages.urllib3.util.request") vendored("requests.packages.urllib3.util.response") vendored("requests.packages.urllib3.util.retry") vendored("requests.packages.urllib3.util.ssl_") vendored("requests.packages.urllib3.util.timeout") vendored("requests.packages.urllib3.util.url") PKZmlt''&site-packages/pip/_vendor/retrying.pycnu[ abc@sddlZddlmZddlZddlZddlZdZdZdefdYZ defdYZ d e fd YZ dS( iN(tsixi?csStdkr9tdr9d}|dSfd}|SdS(s Decorator function that instantiates the Retrying object @param *dargs: positional arguments passed to Retrying object @param **dkw: keyword arguments passed to the Retrying object iics"tjfd}|S(Ncstj||S(N(tRetryingtcall(targstkw(tf(s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyt wrapped_f$s(Rtwraps(RR((Rs8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyt wrap_simple"scs(tjfd}|S(Ncstj||S(N(RR(RR(tdargstdkwR(s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR/s(RR(RR(R R (Rs8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pytwrap-s$N(tlentcallable(R R RR ((R R s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pytretrys " RcBseZd d d d d d d d d d d d d ed d d dZdZdZdZdZdZ dZ dZ dZ d Z d Zd ZRS( cs|dkrdn||_|dkr-dn||_|dkrHdn||_|dkrcdn||_|dkr~dn||_|dkrdn||_| dkrdn| |_| dkrdn| |_| dkrt n| |_ |dkrdn||_ g|dk r3j |j n|dk rRj |jn|dk rj||_n3|dkrfd|_nt|||_dg|dk rj |jn|dk s|dk rj |jn|dk s | dk rj |jn| dk s6| dk rIj |jn|dk ra||_n3|dkrfd|_nt|||_| dkr|j|_n | |_| dkr|j|_n | |_||_dS( NiidiiicstfdDS(Nc3s|]}|VqdS(N((t.0R(tattemptstdelay(s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pys as(tany(RR(t stop_funcs(RRs8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pytatc_sdS(Ni((Rtkwargs((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRhRcstfdDS(Nc3s|]}|VqdS(N((RR(RR(s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pys ys(tmax(RR(t wait_funcs(RRs8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRyR(tNonet_stop_max_attempt_numbert_stop_max_delayt _wait_fixedt_wait_random_mint_wait_random_maxt_wait_incrementing_startt_wait_incrementing_incrementt_wait_exponential_multipliertMAX_WAITt_wait_exponential_maxt_wait_jitter_maxtappendtstop_after_attempttstop_after_delaytstoptgetattrt fixed_sleept random_sleeptincrementing_sleeptexponential_sleeptwaitt always_rejectt_retry_on_exceptiont never_rejectt_retry_on_resultt_wrap_exception(tselfR(R.tstop_max_attempt_numbertstop_max_delayt wait_fixedtwait_random_mintwait_random_maxtwait_incrementing_starttwait_incrementing_incrementtwait_exponential_multipliertwait_exponential_maxtretry_on_exceptiontretry_on_resulttwrap_exceptiont stop_funct wait_functwait_jitter_max((RRs8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyt__init__:sR              cCs ||jkS(s;Stop after the previous attempt >= stop_max_attempt_number.(R(R4tprevious_attempt_numbertdelay_since_first_attempt_ms((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR&scCs ||jkS(s=Stop after the time from the first attempt >= stop_max_delay.(R(R4RERF((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR'scCsdS(s#Don't sleep at all before retrying.i((R4RERF((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pytno_sleepscCs|jS(s0Sleep a fixed amount of time between each retry.(R(R4RERF((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR*scCstj|j|jS(sISleep a random amount of time between wait_random_min and wait_random_max(trandomtrandintRR(R4RERF((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR+scCs1|j|j|d}|dkr-d}n|S(s Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment ii(RR (R4RERFtresult((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR,s  cCsKd|}|j|}||jkr2|j}n|dkrGd}n|S(Nii(R!R#(R4RERFtexpRJ((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR-s     cCstS(N(tFalse(R4RJ((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR1scCstS(N(tTrue(R4RJ((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR/scCsFt}|jr,||j|jdO}n||j|jO}|S(Ni(RLt has_exceptionR0tvalueR2(R4tattempttreject((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyt should_rejects  c Os[tttjd}d}x2trVyt||||t}Wn%tj}t||t}nX|j|s|j |j Stttjd|}|j ||r|j r|j r|j qIt |nU|j||} |jr8tj|j} | td| } ntj| d|d7}q%WdS(Niiig@@(tinttroundttimeRMtAttemptRLtsystexc_infoRRtgetR3R(RNt RetryErrorR.R$RHRtsleep( R4tfnRRt start_timetattempt_numberRPttbRFR[tjitter((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRs*    N(t__name__t __module__RRLRDR&R'RGR*R+R,R-R1R/RRR(((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyR8s0 F        RVcBs,eZdZdZedZdZRS(s An Attempt encapsulates a call to a target function that may end as a normal return value from the function or an Exception depending on what occurred during the execution. cCs||_||_||_dS(N(ROR^RN(R4ROR^RN((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRDs  cCsT|jrI|rt|qPtj|jd|jd|jdn|jSdS(s Return the return value of this Attempt instance or raise an Exception. If wrap_exception is true, this Attempt is wrapped inside of a RetryError before being raised. iiiN(RNRZRtreraiseRO(R4R@((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRYs  +cCsO|jr5dj|jdjtj|jdSdj|j|jSdS(NsAttempts: {0}, Error: {1}RisAttempts: {0}, Value: {1}(RNtformatR^tjoint tracebackt format_tbRO(R4((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyt__repr__s ,(RaRbt__doc__RDRLRYRh(((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRVs  RZcBs eZdZdZdZRS(sU A RetryError encapsulates the last Attempt instance right before giving up. cCs ||_dS(N(t last_attempt(R4Rj((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRDscCsdj|jS(NsRetryError[{0}](RdRj(R4((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyt__str__ s(RaRbRiRDRk(((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyRZs ( RHt pip._vendorRRWRURfR"RtobjectRRVt ExceptionRZ(((s8/usr/lib/python2.7/site-packages/pip/_vendor/retrying.pyts     !PKZ$ʴ  .site-packages/pip/_vendor/certifi/__init__.pycnu[ abc@s ddlmZmZdZdS(i(twheret old_wheres 2018.01.18N(tcoreRRt __version__(((s@/usr/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.pytsPKZ8RLL*site-packages/pip/_vendor/certifi/core.pycnu[ abc@sadZddlZddlZdefdYZdZdZedkr]eGHndS(sU certifi.py ~~~~~~~~~~ This module returns the installation location of cacert.pem. iNtDeprecatedBundleWarningcBseZdZRS(s The weak security bundle is being deprecated. Please bother your service provider to get them to stop using cross-signed roots. (t__name__t __module__t__doc__(((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyRscCsdS(Ns /etc/pki/tls/certs/ca-bundle.crt((((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pytwherescCstjdttS(NsThe weak security bundle has been removed. certifi.old_where() is now an alias of certifi.where(). Please update your code to use certifi.where() instead. certifi.old_where() will be removed in 2018.(twarningstwarnRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyt old_wherest__main__(RtosRtDeprecationWarningRRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyt s    PKZ+))-site-packages/pip/_vendor/certifi/__main__.pynu[from certifi import where print(where()) PKZj[0&&)site-packages/pip/_vendor/certifi/core.pynu[#!/usr/bin/env python # -*- coding: utf-8 -*- """ certifi.py ~~~~~~~~~~ This module returns the installation location of cacert.pem. """ import os import warnings class DeprecatedBundleWarning(DeprecationWarning): """ The weak security bundle is being deprecated. Please bother your service provider to get them to stop using cross-signed roots. """ def where(): return '/etc/pki/tls/certs/ca-bundle.crt' def old_where(): warnings.warn( "The weak security bundle has been removed. certifi.old_where() is now an alias " "of certifi.where(). Please update your code to use certifi.where() instead. " "certifi.old_where() will be removed in 2018.", DeprecatedBundleWarning ) return where() if __name__ == '__main__': print(where()) PKZ$ʴ  .site-packages/pip/_vendor/certifi/__init__.pyonu[ abc@s ddlmZmZdZdS(i(twheret old_wheres 2018.01.18N(tcoreRRt __version__(((s@/usr/lib/python2.7/site-packages/pip/_vendor/certifi/__init__.pytsPKZpG??-site-packages/pip/_vendor/certifi/__init__.pynu[from .core import where, old_where __version__ = "2018.01.18" PKZ|.site-packages/pip/_vendor/certifi/__main__.pycnu[ abc@sddlmZeGHdS(i(twhereN(tcertifiR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.pytsPKZ8RLL*site-packages/pip/_vendor/certifi/core.pyonu[ abc@sadZddlZddlZdefdYZdZdZedkr]eGHndS(sU certifi.py ~~~~~~~~~~ This module returns the installation location of cacert.pem. iNtDeprecatedBundleWarningcBseZdZRS(s The weak security bundle is being deprecated. Please bother your service provider to get them to stop using cross-signed roots. (t__name__t __module__t__doc__(((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyRscCsdS(Ns /etc/pki/tls/certs/ca-bundle.crt((((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pytwherescCstjdttS(NsThe weak security bundle has been removed. certifi.old_where() is now an alias of certifi.where(). Please update your code to use certifi.where() instead. certifi.old_where() will be removed in 2018.(twarningstwarnRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyt old_wherest__main__(RtosRtDeprecationWarningRRRR(((s</usr/lib/python2.7/site-packages/pip/_vendor/certifi/core.pyt s    PKZ|.site-packages/pip/_vendor/certifi/__main__.pyonu[ abc@sddlmZeGHdS(i(twhereN(tcertifiR(((s@/usr/lib/python2.7/site-packages/pip/_vendor/certifi/__main__.pytsPKZFj$+$+0site-packages/pip/_vendor/html5lib/_tokenizer.pynu[from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import unichr as chr from collections import deque from .constants import spaceCharacters from .constants import entities from .constants import asciiLetters, asciiUpper2Lower from .constants import digits, hexDigits, EOF from .constants import tokenTypes, tagTokenTypes from .constants import replacementCharacters from ._inputstream import HTMLInputStream from ._trie import Trie entitiesTrie = Trie(entities) class HTMLTokenizer(object): """ This class takes care of tokenizing HTML. * self.currentToken Holds the token that is currently being processed. * self.state Holds a reference to the method to be invoked... XXX * self.stream Points to HTMLInputStream object. """ def __init__(self, stream, parser=None, **kwargs): self.stream = HTMLInputStream(stream, **kwargs) self.parser = parser # Setup the initial tokenizer state self.escapeFlag = False self.lastFourChars = [] self.state = self.dataState self.escape = False # The current token being created self.currentToken = None super(HTMLTokenizer, self).__init__() def __iter__(self): """ This is where the magic happens. We do our usually processing through the states and when we have a token to return we yield the token which pauses processing until the next token is requested. """ self.tokenQueue = deque([]) # Start processing. When EOF is reached self.state will return False # instead of True and the loop will terminate. while self.state(): while self.stream.errors: yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} while self.tokenQueue: yield self.tokenQueue.popleft() def consumeNumberEntity(self, isHex): """This function returns either U+FFFD or the character based on the decimal or hexadecimal representation. It also discards ";" if present. If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. """ allowed = digits radix = 10 if isHex: allowed = hexDigits radix = 16 charStack = [] # Consume all the characters that are in range while making sure we # don't hit an EOF. c = self.stream.char() while c in allowed and c is not EOF: charStack.append(c) c = self.stream.char() # Convert the set of characters consumed to an int. charAsInt = int("".join(charStack), radix) # Certain characters get replaced with others if charAsInt in replacementCharacters: char = replacementCharacters[charAsInt] self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "illegal-codepoint-for-numeric-entity", "datavars": {"charAsInt": charAsInt}}) elif ((0xD800 <= charAsInt <= 0xDFFF) or (charAsInt > 0x10FFFF)): char = "\uFFFD" self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "illegal-codepoint-for-numeric-entity", "datavars": {"charAsInt": charAsInt}}) else: # Should speed up this check somehow (e.g. move the set to a constant) if ((0x0001 <= charAsInt <= 0x0008) or (0x000E <= charAsInt <= 0x001F) or (0x007F <= charAsInt <= 0x009F) or (0xFDD0 <= charAsInt <= 0xFDEF) or charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, 0x10FFFE, 0x10FFFF])): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "illegal-codepoint-for-numeric-entity", "datavars": {"charAsInt": charAsInt}}) try: # Try/except needed as UCS-2 Python builds' unichar only works # within the BMP. char = chr(charAsInt) except ValueError: v = charAsInt - 0x10000 char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) # Discard the ; if present. Otherwise, put it back on the queue and # invoke parseError on parser. if c != ";": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "numeric-entity-without-semicolon"}) self.stream.unget(c) return char def consumeEntity(self, allowedChar=None, fromAttribute=False): # Initialise to the default output for when no entity is matched output = "&" charStack = [self.stream.char()] if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or (allowedChar is not None and allowedChar == charStack[0])): self.stream.unget(charStack[0]) elif charStack[0] == "#": # Read the next character to see if it's hex or decimal hex = False charStack.append(self.stream.char()) if charStack[-1] in ("x", "X"): hex = True charStack.append(self.stream.char()) # charStack[-1] should be the first digit if (hex and charStack[-1] in hexDigits) \ or (not hex and charStack[-1] in digits): # At least one digit found, so consume the whole number self.stream.unget(charStack[-1]) output = self.consumeNumberEntity(hex) else: # No digits found self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-numeric-entity"}) self.stream.unget(charStack.pop()) output = "&" + "".join(charStack) else: # At this point in the process might have named entity. Entities # are stored in the global variable "entities". # # Consume characters and compare to these to a substring of the # entity names in the list until the substring no longer matches. while (charStack[-1] is not EOF): if not entitiesTrie.has_keys_with_prefix("".join(charStack)): break charStack.append(self.stream.char()) # At this point we have a string that starts with some characters # that may match an entity # Try to find the longest entity the string will match to take care # of ¬i for instance. try: entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) entityLength = len(entityName) except KeyError: entityName = None if entityName is not None: if entityName[-1] != ";": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "named-entity-without-semicolon"}) if (entityName[-1] != ";" and fromAttribute and (charStack[entityLength] in asciiLetters or charStack[entityLength] in digits or charStack[entityLength] == "=")): self.stream.unget(charStack.pop()) output = "&" + "".join(charStack) else: output = entities[entityName] self.stream.unget(charStack.pop()) output += "".join(charStack[entityLength:]) else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-named-entity"}) self.stream.unget(charStack.pop()) output = "&" + "".join(charStack) if fromAttribute: self.currentToken["data"][-1][1] += output else: if output in spaceCharacters: tokenType = "SpaceCharacters" else: tokenType = "Characters" self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) def processEntityInAttribute(self, allowedChar): """This method replaces the need for "entityInAttributeValueState". """ self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) def emitCurrentToken(self): """This method is a generic handler for emitting the tags. It also sets the state to "data" because that's what's needed after a token has been emitted. """ token = self.currentToken # Add token to the queue to be yielded if (token["type"] in tagTokenTypes): token["name"] = token["name"].translate(asciiUpper2Lower) if token["type"] == tokenTypes["EndTag"]: if token["data"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "attributes-in-end-tag"}) if token["selfClosing"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "self-closing-flag-on-end-tag"}) self.tokenQueue.append(token) self.state = self.dataState # Below are the various tokenizer states worked out. def dataState(self): data = self.stream.char() if data == "&": self.state = self.entityDataState elif data == "<": self.state = self.tagOpenState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\u0000"}) elif data is EOF: # Tokenization ends. return False elif data in spaceCharacters: # Directly after emitting a token you switch back to the "data # state". At that point spaceCharacters are important so they are # emitted separately. self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": data + self.stream.charsUntil(spaceCharacters, True)}) # No need to update lastFourChars here, since the first space will # have already been appended to lastFourChars and will have broken # any sequences else: chars = self.stream.charsUntil(("&", "<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def entityDataState(self): self.consumeEntity() self.state = self.dataState return True def rcdataState(self): data = self.stream.char() if data == "&": self.state = self.characterReferenceInRcdata elif data == "<": self.state = self.rcdataLessThanSignState elif data == EOF: # Tokenization ends. return False elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data in spaceCharacters: # Directly after emitting a token you switch back to the "data # state". At that point spaceCharacters are important so they are # emitted separately. self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": data + self.stream.charsUntil(spaceCharacters, True)}) # No need to update lastFourChars here, since the first space will # have already been appended to lastFourChars and will have broken # any sequences else: chars = self.stream.charsUntil(("&", "<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def characterReferenceInRcdata(self): self.consumeEntity() self.state = self.rcdataState return True def rawtextState(self): data = self.stream.char() if data == "<": self.state = self.rawtextLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: # Tokenization ends. return False else: chars = self.stream.charsUntil(("<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def scriptDataState(self): data = self.stream.char() if data == "<": self.state = self.scriptDataLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: # Tokenization ends. return False else: chars = self.stream.charsUntil(("<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def plaintextState(self): data = self.stream.char() if data == EOF: # Tokenization ends. return False elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + self.stream.charsUntil("\u0000")}) return True def tagOpenState(self): data = self.stream.char() if data == "!": self.state = self.markupDeclarationOpenState elif data == "/": self.state = self.closeTagOpenState elif data in asciiLetters: self.currentToken = {"type": tokenTypes["StartTag"], "name": data, "data": [], "selfClosing": False, "selfClosingAcknowledged": False} self.state = self.tagNameState elif data == ">": # XXX In theory it could be something besides a tag name. But # do we really care? self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-tag-name-but-got-right-bracket"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) self.state = self.dataState elif data == "?": # XXX In theory it could be something besides a tag name. But # do we really care? self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-tag-name-but-got-question-mark"}) self.stream.unget(data) self.state = self.bogusCommentState else: # XXX self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-tag-name"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.dataState return True def closeTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.currentToken = {"type": tokenTypes["EndTag"], "name": data, "data": [], "selfClosing": False} self.state = self.tagNameState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-closing-tag-but-got-right-bracket"}) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-closing-tag-but-got-eof"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "": self.emitCurrentToken() elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-tag-name"}) self.state = self.dataState elif data == "/": self.state = self.selfClosingStartTagState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["name"] += "\uFFFD" else: self.currentToken["name"] += data # (Don't use charsUntil here, because tag names are # very short and it's faster to not do anything fancy) return True def rcdataLessThanSignState(self): data = self.stream.char() if data == "/": self.temporaryBuffer = "" self.state = self.rcdataEndTagOpenState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.rcdataState return True def rcdataEndTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.temporaryBuffer += data self.state = self.rcdataEndTagNameState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) self.state = self.scriptDataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataEscapedState elif data == EOF: self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataEscapedState return True def scriptDataEscapedLessThanSignState(self): data = self.stream.char() if data == "/": self.temporaryBuffer = "" self.state = self.scriptDataEscapedEndTagOpenState elif data in asciiLetters: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) self.temporaryBuffer = data self.state = self.scriptDataDoubleEscapeStartState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.scriptDataEscapedState return True def scriptDataEscapedEndTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.temporaryBuffer = data self.state = self.scriptDataEscapedEndTagNameState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ""))): self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) if self.temporaryBuffer.lower() == "script": self.state = self.scriptDataDoubleEscapedState else: self.state = self.scriptDataEscapedState elif data in asciiLetters: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.temporaryBuffer += data else: self.stream.unget(data) self.state = self.scriptDataEscapedState return True def scriptDataDoubleEscapedState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataDoubleEscapedDashState elif data == "<": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.state = self.scriptDataDoubleEscapedLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-script-in-script"}) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) return True def scriptDataDoubleEscapedDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataDoubleEscapedDashDashState elif data == "<": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.state = self.scriptDataDoubleEscapedLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataDoubleEscapedState elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-script-in-script"}) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataDoubleEscapedState return True def scriptDataDoubleEscapedDashDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) elif data == "<": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.state = self.scriptDataDoubleEscapedLessThanSignState elif data == ">": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) self.state = self.scriptDataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataDoubleEscapedState elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-script-in-script"}) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataDoubleEscapedState return True def scriptDataDoubleEscapedLessThanSignState(self): data = self.stream.char() if data == "/": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) self.temporaryBuffer = "" self.state = self.scriptDataDoubleEscapeEndState else: self.stream.unget(data) self.state = self.scriptDataDoubleEscapedState return True def scriptDataDoubleEscapeEndState(self): data = self.stream.char() if data in (spaceCharacters | frozenset(("/", ">"))): self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) if self.temporaryBuffer.lower() == "script": self.state = self.scriptDataEscapedState else: self.state = self.scriptDataDoubleEscapedState elif data in asciiLetters: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.temporaryBuffer += data else: self.stream.unget(data) self.state = self.scriptDataDoubleEscapedState return True def beforeAttributeNameState(self): data = self.stream.char() if data in spaceCharacters: self.stream.charsUntil(spaceCharacters, True) elif data in asciiLetters: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data == ">": self.emitCurrentToken() elif data == "/": self.state = self.selfClosingStartTagState elif data in ("'", '"', "=", "<"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-character-in-attribute-name"}) self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"].append(["\uFFFD", ""]) self.state = self.attributeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-attribute-name-but-got-eof"}) self.state = self.dataState else: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState return True def attributeNameState(self): data = self.stream.char() leavingThisState = True emitToken = False if data == "=": self.state = self.beforeAttributeValueState elif data in asciiLetters: self.currentToken["data"][-1][0] += data +\ self.stream.charsUntil(asciiLetters, True) leavingThisState = False elif data == ">": # XXX If we emit here the attributes are converted to a dict # without being checked and when the code below runs we error # because data is a dict not a list emitToken = True elif data in spaceCharacters: self.state = self.afterAttributeNameState elif data == "/": self.state = self.selfClosingStartTagState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][0] += "\uFFFD" leavingThisState = False elif data in ("'", '"', "<"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-character-in-attribute-name"}) self.currentToken["data"][-1][0] += data leavingThisState = False elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-name"}) self.state = self.dataState else: self.currentToken["data"][-1][0] += data leavingThisState = False if leavingThisState: # Attributes are not dropped at this stage. That happens when the # start tag token is emitted so values can still be safely appended # to attributes, but we do want to report the parse error in time. self.currentToken["data"][-1][0] = ( self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) for name, _ in self.currentToken["data"][:-1]: if self.currentToken["data"][-1][0] == name: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "duplicate-attribute"}) break # XXX Fix for above XXX if emitToken: self.emitCurrentToken() return True def afterAttributeNameState(self): data = self.stream.char() if data in spaceCharacters: self.stream.charsUntil(spaceCharacters, True) elif data == "=": self.state = self.beforeAttributeValueState elif data == ">": self.emitCurrentToken() elif data in asciiLetters: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data == "/": self.state = self.selfClosingStartTagState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"].append(["\uFFFD", ""]) self.state = self.attributeNameState elif data in ("'", '"', "<"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-character-after-attribute-name"}) self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-end-of-tag-but-got-eof"}) self.state = self.dataState else: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState return True def beforeAttributeValueState(self): data = self.stream.char() if data in spaceCharacters: self.stream.charsUntil(spaceCharacters, True) elif data == "\"": self.state = self.attributeValueDoubleQuotedState elif data == "&": self.state = self.attributeValueUnQuotedState self.stream.unget(data) elif data == "'": self.state = self.attributeValueSingleQuotedState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-attribute-value-but-got-right-bracket"}) self.emitCurrentToken() elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" self.state = self.attributeValueUnQuotedState elif data in ("=", "<", "`"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "equals-in-unquoted-attribute-value"}) self.currentToken["data"][-1][1] += data self.state = self.attributeValueUnQuotedState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-attribute-value-but-got-eof"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data self.state = self.attributeValueUnQuotedState return True def attributeValueDoubleQuotedState(self): data = self.stream.char() if data == "\"": self.state = self.afterAttributeValueState elif data == "&": self.processEntityInAttribute('"') elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-value-double-quote"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data +\ self.stream.charsUntil(("\"", "&", "\u0000")) return True def attributeValueSingleQuotedState(self): data = self.stream.char() if data == "'": self.state = self.afterAttributeValueState elif data == "&": self.processEntityInAttribute("'") elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-value-single-quote"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data +\ self.stream.charsUntil(("'", "&", "\u0000")) return True def attributeValueUnQuotedState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeAttributeNameState elif data == "&": self.processEntityInAttribute(">") elif data == ">": self.emitCurrentToken() elif data in ('"', "'", "=", "<", "`"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-character-in-unquoted-attribute-value"}) self.currentToken["data"][-1][1] += data elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-value-no-quotes"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data + self.stream.charsUntil( frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) return True def afterAttributeValueState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeAttributeNameState elif data == ">": self.emitCurrentToken() elif data == "/": self.state = self.selfClosingStartTagState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-EOF-after-attribute-value"}) self.stream.unget(data) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-character-after-attribute-value"}) self.stream.unget(data) self.state = self.beforeAttributeNameState return True def selfClosingStartTagState(self): data = self.stream.char() if data == ">": self.currentToken["selfClosing"] = True self.emitCurrentToken() elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-EOF-after-solidus-in-tag"}) self.stream.unget(data) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-character-after-solidus-in-tag"}) self.stream.unget(data) self.state = self.beforeAttributeNameState return True def bogusCommentState(self): # Make a new comment token and give it as value all the characters # until the first > or EOF (charsUntil checks for EOF automatically) # and emit it. data = self.stream.charsUntil(">") data = data.replace("\u0000", "\uFFFD") self.tokenQueue.append( {"type": tokenTypes["Comment"], "data": data}) # Eat the character directly after the bogus comment which is either a # ">" or an EOF. self.stream.char() self.state = self.dataState return True def markupDeclarationOpenState(self): charStack = [self.stream.char()] if charStack[-1] == "-": charStack.append(self.stream.char()) if charStack[-1] == "-": self.currentToken = {"type": tokenTypes["Comment"], "data": ""} self.state = self.commentStartState return True elif charStack[-1] in ('d', 'D'): matched = True for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), ('y', 'Y'), ('p', 'P'), ('e', 'E')): charStack.append(self.stream.char()) if charStack[-1] not in expected: matched = False break if matched: self.currentToken = {"type": tokenTypes["Doctype"], "name": "", "publicId": None, "systemId": None, "correct": True} self.state = self.doctypeState return True elif (charStack[-1] == "[" and self.parser is not None and self.parser.tree.openElements and self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): matched = True for expected in ["C", "D", "A", "T", "A", "["]: charStack.append(self.stream.char()) if charStack[-1] != expected: matched = False break if matched: self.state = self.cdataSectionState return True self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-dashes-or-doctype"}) while charStack: self.stream.unget(charStack.pop()) self.state = self.bogusCommentState return True def commentStartState(self): data = self.stream.char() if data == "-": self.state = self.commentStartDashState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "incorrect-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += data self.state = self.commentState return True def commentStartDashState(self): data = self.stream.char() if data == "-": self.state = self.commentEndState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "-\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "incorrect-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += "-" + data self.state = self.commentState return True def commentState(self): data = self.stream.char() if data == "-": self.state = self.commentEndDashState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += data + \ self.stream.charsUntil(("-", "\u0000")) return True def commentEndDashState(self): data = self.stream.char() if data == "-": self.state = self.commentEndState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "-\uFFFD" self.state = self.commentState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment-end-dash"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += "-" + data self.state = self.commentState return True def commentEndState(self): data = self.stream.char() if data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "--\uFFFD" self.state = self.commentState elif data == "!": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-bang-after-double-dash-in-comment"}) self.state = self.commentEndBangState elif data == "-": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-dash-after-double-dash-in-comment"}) self.currentToken["data"] += data elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment-double-dash"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: # XXX self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-comment"}) self.currentToken["data"] += "--" + data self.state = self.commentState return True def commentEndBangState(self): data = self.stream.char() if data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "-": self.currentToken["data"] += "--!" self.state = self.commentEndDashState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "--!\uFFFD" self.state = self.commentState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment-end-bang-state"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += "--!" + data self.state = self.commentState return True def doctypeState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeDoctypeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-doctype-name-but-got-eof"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "need-space-after-doctype"}) self.stream.unget(data) self.state = self.beforeDoctypeNameState return True def beforeDoctypeNameState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-doctype-name-but-got-right-bracket"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["name"] = "\uFFFD" self.state = self.doctypeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-doctype-name-but-got-eof"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["name"] = data self.state = self.doctypeNameState return True def doctypeNameState(self): data = self.stream.char() if data in spaceCharacters: self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) self.state = self.afterDoctypeNameState elif data == ">": self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["name"] += "\uFFFD" self.state = self.doctypeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype-name"}) self.currentToken["correct"] = False self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["name"] += data return True def afterDoctypeNameState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.currentToken["correct"] = False self.stream.unget(data) self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: if data in ("p", "P"): matched = True for expected in (("u", "U"), ("b", "B"), ("l", "L"), ("i", "I"), ("c", "C")): data = self.stream.char() if data not in expected: matched = False break if matched: self.state = self.afterDoctypePublicKeywordState return True elif data in ("s", "S"): matched = True for expected in (("y", "Y"), ("s", "S"), ("t", "T"), ("e", "E"), ("m", "M")): data = self.stream.char() if data not in expected: matched = False break if matched: self.state = self.afterDoctypeSystemKeywordState return True # All the characters read before the current 'data' will be # [a-zA-Z], so they're garbage in the bogus doctype and can be # discarded; only the latest character might be '>' or EOF # and needs to be ungetted self.stream.unget(data) self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-space-or-right-bracket-in-doctype", "datavars": {"data": data}}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def afterDoctypePublicKeywordState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeDoctypePublicIdentifierState elif data in ("'", '"'): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.stream.unget(data) self.state = self.beforeDoctypePublicIdentifierState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.stream.unget(data) self.state = self.beforeDoctypePublicIdentifierState return True def beforeDoctypePublicIdentifierState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == "\"": self.currentToken["publicId"] = "" self.state = self.doctypePublicIdentifierDoubleQuotedState elif data == "'": self.currentToken["publicId"] = "" self.state = self.doctypePublicIdentifierSingleQuotedState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def doctypePublicIdentifierDoubleQuotedState(self): data = self.stream.char() if data == "\"": self.state = self.afterDoctypePublicIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["publicId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["publicId"] += data return True def doctypePublicIdentifierSingleQuotedState(self): data = self.stream.char() if data == "'": self.state = self.afterDoctypePublicIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["publicId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["publicId"] += data return True def afterDoctypePublicIdentifierState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.betweenDoctypePublicAndSystemIdentifiersState elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == '"': self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierDoubleQuotedState elif data == "'": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierSingleQuotedState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def betweenDoctypePublicAndSystemIdentifiersState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == '"': self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierDoubleQuotedState elif data == "'": self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierSingleQuotedState elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def afterDoctypeSystemKeywordState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeDoctypeSystemIdentifierState elif data in ("'", '"'): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.stream.unget(data) self.state = self.beforeDoctypeSystemIdentifierState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.stream.unget(data) self.state = self.beforeDoctypeSystemIdentifierState return True def beforeDoctypeSystemIdentifierState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == "\"": self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierDoubleQuotedState elif data == "'": self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierSingleQuotedState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def doctypeSystemIdentifierDoubleQuotedState(self): data = self.stream.char() if data == "\"": self.state = self.afterDoctypeSystemIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["systemId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["systemId"] += data return True def doctypeSystemIdentifierSingleQuotedState(self): data = self.stream.char() if data == "'": self.state = self.afterDoctypeSystemIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["systemId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["systemId"] += data return True def afterDoctypeSystemIdentifierState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.state = self.bogusDoctypeState return True def bogusDoctypeState(self): data = self.stream.char() if data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: # XXX EMIT self.stream.unget(data) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: pass return True def cdataSectionState(self): data = [] while True: data.append(self.stream.charsUntil("]")) data.append(self.stream.charsUntil(">")) char = self.stream.char() if char == EOF: break else: assert char == ">" if data[-1][-2:] == "]]": data[-1] = data[-1][:-2] break else: data.append(char) data = "".join(data) # pylint:disable=redefined-variable-type # Deal with null here rather than in the parser nullCount = data.count("\u0000") if nullCount > 0: for _ in range(nullCount): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) data = data.replace("\u0000", "\uFFFD") if data: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.dataState return True PKZ ]j]j3site-packages/pip/_vendor/html5lib/_inputstream.pyonu[ abc!@`sddlmZmZmZddlmZmZddlmZm Z ddl Z ddl Z ddl m Z ddlmZmZmZmZddlmZdd lmZdd lmZydd lmZWnek reZnXegeD]Zejd ^qZegeD]Zejd ^q"ZegeD]Zejd ^qJZeed dgBZ dZ!ej"re j#e!d e$ddZ%ne j#e!Z%e&dddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2g Z'e j#d3Z(iZ)d4e*fd5YZ+d6Z,d7e*fd8YZ-d9e-fd:YZ.d;e/fd<YZ0d=e*fd>YZ1d?e*fd@YZ2dAZ3dS(Bi(tabsolute_importtdivisiontunicode_literals(t text_typet binary_type(t http_clientturllibN(t webencodingsi(tEOFtspaceCharacterst asciiLetterstasciiUppercase(tReparseException(t_utils(tStringIO(tBytesIOuasciit>tt|j||krL|t|j|8}|d7}qW||g|_dS(Nii(RRR(RRtoffsetti((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytseekLs cC`sp|js|j|S|jdt|jkr_|jdt|jdkr_|j|S|j|SdS(Niii(Rt _readStreamRRt_readFromBuffer(Rtbytes((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytreadUs     cC`s&tg|jD]}t|^q S(N(tsumRR(Rtitem((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt_bufferedBytes^scC`sL|jj|}|jj||jdcd7 Normalized stream from source for use by html5lib. source can be either a file-object, local filename or a string. The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) u􏿿iiuutf-8ucertainN( R tsupports_lone_surrogatestNonetreportCharacterErrorsRtcharacterErrorsUCS4tcharacterErrorsUCS2tnewLinestlookupEncodingt charEncodingt openStreamt dataStreamtreset(RR>((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs    cC`sCd|_d|_d|_g|_d|_d|_d|_dS(Nui(Rt chunkSizet chunkOffsetterrorst prevNumLinest prevNumColsREt_bufferedCharacter(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRNs      cC`s(t|dr|}n t|}|S(uvProduces a file object from source. source can be either a file object, local filename or a string. uread(R9R(RR>R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRLs  cC`st|j}|jdd|}|j|}|jdd|}|dkr\|j|}n||d}||fS(Nu iii(RtcountRRtrfindRS(RRRtnLinest positionLinet lastLinePostpositionColumn((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt _positions   cC`s&|j|j\}}|d|fS(u:Returns (line, col) of the current position in the stream.i(R[RP(Rtlinetcol((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRscC`sL|j|jkr%|js%tSn|j}|j|}|d|_|S(uo Read one character from the stream or queue if available. Return EOF when EOF is reached. i(RPROt readChunkRR(RRPtchar((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyR_s    cC`sO|dkr|j}n|j|j\|_|_d|_d|_d|_|jj |}|j r|j |}d|_ n |st St |dkrt |d}|dksd|kodknr|d|_ |d }qn|jr|j|n|jdd }|jd d }||_t ||_tS( Nuiiii iiu u u (REt_defaultChunkSizeR[RORRRSRRPRMR"RTR8RtordRFtreplacetTrue(RROR'tlastv((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyR^s0        (   cC`s:x3tttj|D]}|jjdqWdS(Nuinvalid-codepoint(trangeRtinvalid_unicode_retfindallRQR&(RR't_((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRG%s"cC`st}xtj|D]}|r(qnt|j}|j}tj|||d!rtj|||d!}|t kr|j j dnt }q|dkr|dkr|t |dkr|j j dqt}|j j dqWdS(Niuinvalid-codepointiii(R8RftfinditerRatgrouptstartR tisSurrogatePairtsurrogatePairToCodepointtnon_bmp_invalid_codepointsRQR&RcR(RR'tskiptmatcht codepointRtchar_val((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRH)s    c C`sTyt||f}Wnqtk rdjg|D]}dt|^q1}|scd|}ntjd|}t||f Normalized stream from source for use by html5lib. source can be either a file-object, local filename or a string. The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) iidN(RLt rawStreamR<Rt numBytesMetatnumBytesChardettoverride_encodingttransport_encodingtsame_origin_parent_encodingtlikely_encodingtdefault_encodingtdetermineEncodingRKRN(RR>RRRRRt useChardet((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs       cC`s3|jdjj|jd|_tj|dS(Niureplace(RKt codec_infot streamreaderRRMR<RN(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRNs"cC`sUt|dr|}n t|}y|j|jWnt|}nX|S(uvProduces a file object from source. source can be either a file object, local filename or a string. uread(R9RRRR(RR>R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRLs  cC`s |jdf}|ddk r&|St|jdf}|ddk rO|St|jdf}|ddk rx|S|jdf}|ddk r|St|jdf}|ddk r|djjd r|St|j df}|ddk r|S|ryddl m }Wnt k r4qXg}|}xF|j s|jj|j}|soPn|j||j|qGW|jt|jd}|jjd|dk r|dfSnt|jdf}|ddk r|StddfS(Nucertainiu tentativeuutf-16(tUniversalDetectoruencodingu windows-1252(t detectBOMRERJRRtdetectEncodingMetaRtnamet startswithRtchardet.universaldetectorRt ImportErrortdoneRR"RR&tfeedtclosetresultRR(RtchardetRKRtbufferstdetectorRtencoding((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRsP'       cC`st|}|dkrdS|jdkr:td}nr||jdkrf|jddf|_nF|jjd|df|_|jtd|jd|fdS(Nuutf-16beuutf-16leuutf-8iucertainuEncoding changed from %s to %s(uutf-16beuutf-16le(RJRERRKRRRNR (Rt newEncoding((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytchangeEncodings    cC`sidtj6dtj6dtj6dtj6dtj6}|jjd}|j|d }d}|s|j|}d}|s|j|d }d}qn|r|jj |t |S|jj d d Sd S( uAttempts to detect at BOM at the start of the stream. If an encoding can be determined from the BOM return the name of the encoding otherwise return Noneuutf-8uutf-16leuutf-16beuutf-32leuutf-32beiiiiN( tcodecstBOM_UTF8t BOM_UTF16_LEt BOM_UTF16_BEt BOM_UTF32_LEt BOM_UTF32_BERR"tgetRRJRE(RtbomDicttstringRR((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs$   cC`sk|jj|j}t|}|jjd|j}|dk rg|jdkrgtd}n|S(u9Report the encoding declared by the meta element iuutf-16beuutf-16leuutf-8N(uutf-16beuutf-16le( RR"RtEncodingParserRt getEncodingRERRJ(RRtparserR((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyR9s  N( R0R1R2RERcRRNRLRRRR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyR=s(   >  "t EncodingBytescB`seZdZdZdZdZdZdZdZdZ dZ e e e Z d Z e e Zed Zd Zd Zd ZRS(uString-like object with an associated position and various extra methods If the position is ever greater than the string length then an exception is raisedcC`stj||jS(N(R!t__new__tlower(Rtvalue((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRLscC`s d|_dS(Ni(R[(RR((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRPscC`s|S(N((R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt__iter__TscC`sS|jd}|_|t|kr/tn|dkrDtn|||d!S(Nii(R[Rt StopIterationR;(Rtp((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt__next__Ws    cC`s |jS(N(R(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytnext_scC`sY|j}|t|kr$tn|dkr9tn|d|_}|||d!S(Nii(R[RRR;(RR((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytpreviouscs    cC`s+|jt|krtn||_dS(N(R[RR(RR((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt setPositionls cC`s<|jt|krtn|jdkr4|jSdSdS(Ni(R[RRRE(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt getPositionqs  cC`s||j|jd!S(Ni(R(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytgetCurrentByte{scC`sc|j}xJ|t|krU|||d!}||krH||_|S|d7}q W||_dS(uSkip past a list of charactersiN(RRR[RE(RRzRR{((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRos    cC`sc|j}xJ|t|krU|||d!}||krH||_|S|d7}q W||_dS(Ni(RRR[RE(RRzRR{((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt skipUntils    cC`sQ|j}|||t|!}|j|}|rM|jt|7_n|S(uLook for a sequence of bytes at the start of a string. If the bytes are found return True and advance the position to the byte after the match. Otherwise return False and leave the position alone(RRR(RR!RR'R+((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyt matchBytess  cC`sh||jj|}|dkr^|jdkr=d|_n|j|t|d7_tStdS(uLook for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the matchiiiN(RtfindR[RRcR(RR!t newPosition((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytjumpTos  (R0R1R2RRRRRRRRtpropertyRRt currentBytetspaceCharactersBytesRoRRR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRHs           RcB`s_eZdZdZdZdZdZdZdZdZ dZ d Z RS( u?Mini parser for detecting character encoding from meta elementscC`st||_d|_dS(u3string - the data to work on for encoding detectionN(RR'RER(RR'((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRscC`sd|jfd|jfd|jfd|jfd|jfd|jff}xv|jD]k}t}xR|D]J\}}|jj|rky|}PWqtk rt }PqXqkqkW|sXPqXqXW|j S(Ns(R'R(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRscC`sK|jjtkrtSt}d}x"trF|j}|dkrGtS|ddkr|ddk}|rC|dk rC||_tSq%|ddkr|d}t|}|dk rC||_tSq%|ddkr%t t |d}|j }|dk rCt|}|dk r@|r4||_tS|}q@qCq%q%WdS(Nis http-equivis content-typetcharsettcontent( R'RRRcR8REt getAttributeRRJtContentAttrParserRtparse(Rt hasPragmatpendingEncodingtattrttentativeEncodingtcodect contentParser((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs:             cC`s |jtS(N(thandlePossibleTagR8(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRscC`st|j|jtS(N(RR'RRc(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs cC`s|j}|jtkr9|r5|j|jntS|jt}|dkra|jn+|j}x|dk r|j}qpWtS(NR( R'RtasciiLettersBytesRRRcRtspacesAngleBracketsRRE(RtendTagR'R{R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs      cC`s|jjdS(NR(R'R(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRscC`s|j}|jttdgB}|dkr5dSg}g}xtr|dkr`|r`Pnz|tkr||j}Pn^|d krdj|dfS|tkr|j|j n|dkrdS|j|t |}qDW|dkr|j dj|dfSt ||j}|d kr|}xtrt |}||krt |dj|dj|fS|tkr|j|j q>|j|q>Wn^|dkrdj|dfS|tkr|j|j n|dkr dS|j|x}trt |}|t krSdj|dj|fS|tkru|j|j q|dkrdS|j|qWdS( u_Return a name,value pair for the next attribute in the stream, if one is found, or Nonet/Rt=R(t't"N(RN(RR(RR( R'RoRt frozensetRERcR)tasciiUppercaseBytesR&RRRR(RR'R{tattrNamet attrValuet quoteChar((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRsf                            ( R0R1R2RRRRRRRRR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRs    $    RcB`seZdZdZRS(cC`s ||_dS(N(R'(RR'((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRfscC`s:y!|jjd|jjd7_|jj|jjdksHdS|jjd7_|jj|jjdkr|jj}|jjd7_|jj}|jj|r|j||jj!SdSnP|jj}y(|jjt|j||jj!SWntk r|j|SXWntk r5dSXdS(NRiRRR(RR( R'RRRoRRERRR(Rt quoteMarkt oldPosition((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRjs.       (R0R1RR(((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRes cC`swt|tr:y|jd}Wq:tk r6dSXn|dk roytj|SWqstk rkdSXndSdS(u{Return the python codec name corresponding to an encoding or None if the string doesn't correspond to a valid encoding.uasciiN(R3RtdecodetUnicodeDecodeErrorRERtlookuptAttributeError(R((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pyRJs   (4t __future__RRRtpip._vendor.sixRRtpip._vendor.six.movesRRRRut pip._vendorRt constantsRR R R R R(R tioRRRRR$tencodeRRRRtinvalid_unicode_no_surrogateRDRvtevalRftsetRntascii_punctuation_reRstobjectRRCR<R=R!RRRRJ(((sE/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.pytsP  "  (((  J h'PKZo<o<0site-packages/pip/_vendor/html5lib/_ihatexml.pyonu[ abc@`sZddlmZmZmZddlZddlZddlmZdZdZ dZ dZ d Z d j ee gZd j ee d d d e e gZd j ed gZejdZejdZdZdZeddZdZdZdZdZejdZejdZejdZdefdYZ dS(i(tabsolute_importtdivisiontunicode_literalsNi(tDataLossWarningu^ [#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | [#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] | [#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] | [#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 | [#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] | [#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] | [#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] | [#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] | [#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 | [#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] | [#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] | [#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D | [#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] | [#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] | [#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] | [#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] | [#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] | [#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] | [#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 | [#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] | [#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] | [#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] | [#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] | [#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] | [#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] | [#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] | [#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] | [#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] | [#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] | [#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A | #x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 | #x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] | #x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] | [#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] | [#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C | #x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 | [#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] | [#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] | [#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 | [#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] | [#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B | #x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE | [#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] | [#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 | [#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] | [#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]u*[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]u [#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] | [#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 | [#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] | [#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] | #x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] | [#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] | [#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 | #x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] | [#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC | [#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] | #x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] | [#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] | [#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] | [#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] | [#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] | [#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] | #x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 | [#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] | #x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] | [#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] | [#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] | #x3099 | #x309Au  [#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] | [#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] | [#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] | [#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]u} #x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 | #[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]u | u.u-u_u#x([\d|A-F]{4,4})u'\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]cC`s g|jdD]}|j^q}g}x|D]}t}xttfD]}|j|}|dk rN|jg|jD]}t |^qt |ddkr|dd|dRBRIR7RERVRSRU(((sB/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.pyR*s"       (!t __future__RRRRZR5t constantsRtbaseChart ideographictcombiningCharactertdigittextenderR"tletterR8RMR[RRRRR$RRR#R R RLRJRCtobjectR*(((sB/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_ihatexml.pyts2  0     PKZz-site-packages/pip/_vendor/html5lib/_utils.pyonu[ abc@`sGddlmZmZmZddlZddlmZddlmZyddl j j Z Wn#e k rddlj jZ nXddddd d d gZejdd koejd dkZy.edZeeesedZnWn eZnXeZdefdYZdZdZdZdZdS(i(tabsolute_importtdivisiontunicode_literalsN(t ModuleType(t text_typeu default_etreeuMethodDispatcheruisSurrogatePairusurrogatePairToCodepointumoduleFactoryFactoryusupports_lone_surrogatesuPY27iiiu"\uD800"u u"\uD800"tMethodDispatchercB`s#eZdZddZdZRS(upDict with 2 special properties: On initiation, keys that are lists, sets or tuples are converted to multiple keys so accessing any one of the items in the original list-like object returns the matching value md = MethodDispatcher({("foo", "bar"):"baz"}) md["foo"] == "baz" A default value which can be set through the default attribute. cC`sg}xi|D]a\}}t|ttttfr[x7|D]}|j||fq;Wq |j||fq Wtj||d|_ dS(N( t isinstancetlistttuplet frozensettsettappendtdictt__init__tNonetdefault(tselftitemst _dictEntriestnametvaluetitem((s?/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pyR 4s cC`stj|||jS(N(R tgetR(Rtkey((s?/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pyt __getitem__Cs((t__name__t __module__t__doc__R R(((s?/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pyR's  cC`sht|dkogt|ddkogt|ddkogt|ddkogt|ddkS(Niiiiiii(tlentord(tdata((s?/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pytisSurrogatePairJs,cC`s2dt|dddt|dd}|S(Niiiiii(R(Rtchar_val((s?/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/_utils.pytsurrogatePairToCodepointPsc`sifd}|S(Nc`sttjtdr(d|j}n d|j}t|j}y|||SWntk rt|}|||}|jj|dkri|s0    &   #  PKZ{hDD0site-packages/pip/_vendor/html5lib/constants.pyonu[ abcP@`sNddlmZmZmZddlZdZidd6dd6dd6d d 6d d 6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d"6d#d$6d%d&6d'd(6d)d*6d+d,6d-d.6d/d06d1d26d3d46d5d66d7d86d9d:6d;d<6d=d>6d?d@6dAdB6dCdD6dEdF6dGdH6dIdJ6dKdL6dMdN6dOdP6dQdR6dSdT6dUdV6dWdX6dYdZ6d[d\6dUd]6dUd^6d_d`6dadb6dcdd6dedf6dgdh6didj6dkdl6dmdn6dodp6dqdr6dsdt6dudv6dwdx6dydz6d{d|6d}d~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6Zidd6dd6d d 6d d 6d d6dd6Ze eddfeddfeddfeddfeddfeddfeddfeddfeddfeddfeddfeddfeddfeddfed dfed dfed d fgZ e edd!fedd"fedd#fedd$fedd%fedd&fedd'fedd(fedd)fedd*fedd+fedd,fedd-fedd.fgZ e edd/feddfedd0fedd1fedd2fedd3fedd4fedd5fedd6fedd7fedd8fedd9feddfedd:fedd;fedd<fedd=fedd>fedd?fedd@feddAfeddBfeddCfeddDfeddEfeddFfeddGfeddHfeddIfeddJfeddKfeddLfeddMfeddNfeddOfeddPfeddQfeddRfeddSfeddfeddTfeddUfeddVfeddWfeddXfeddYfeddZfedd[feddfedd\fedd]fedd^fedd_fedd`feddafeddfeddbfeddcfedddfeddefeddffeddgfeddhfeddifeddjfeddfeddkfeddfeddlfeddmfeddfeddnfedd feddofeddpfeddqfeddrfed dfgNZ e eddsfed dfed dfed d fgZ e eddfeddfeddfeddfeddfgZi>dtdu6dvdw6dxdy6dzd{6d|d}6d~d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6Zidd6Zi d ded fd6d ded fd6d ded fd6d ded fd6d ded fd6d d ed fd6d ded fd6dd3edfd6ddedfd6ddedfd6ddedfd6dd edfd6ZegejD]'\Z\ZZZeefef^q Ze ddddd gZe ddkdmdndogZe ejZe ejZe ejZ e ej!Z!e ej"Z#egejD]$Z$e%e$e%e$j&f^q Z'dZ(e d3d=d dZd]dSd8dVdDddd0d;dWd d gZ)e d dlgZ*e djdgdrdTd_d`dagZ+ie d gd6e dgdj6e dgdV6e ddgd6e ddgd6e ddgdg6e dgd?6e ddgd6e ddddgd=6e dgdS6e dgd\6e dd gdE6e dd d!gd"6e dd gd#6e dd$gd96e dd d%d$ddgdW6e dd d$dgdi6e dd gd&6Z,dZ-e dCdDdEdFdGgZ.idHdI6dHdJ6dKdL6dKdM6dNdO6dNdP6dQdR6dSdT6dSdU6dVdW6dXdY6dZd[6dZd\6d]d^6d_d`6dadb6dcdd6dedf6dgdh6didj6didk6dldm6dndo6dpdq6dpdr6dsdt6dsdu6dvdw6dxdy6dzd{6d|d}6d~d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6d d!6d"d#6d$d%6d&d'6d(d)6d*d+6d,d-6d.d/6d0d16d2d36dd46d5d66d7d86d9d:6d;d<6d;d=6d>d?6d>d@6dAdB6dCdD6dCdE6dFdG6dHdI6dJdK6dLdM6dLdN6dOdP6dQdR6dSdT6dUdV6dWdX6dYdZ6d[d\6d]d^6d_d`6dadb6dcdd6dedf6dgdh6didj6didk6dldm6dndo6dpdq6drds6dtdu6dvdw6dxdy6dzd{6d|d}6d|d~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d"6d#d$6d%d&6d'd(6d)d*6d+d,6d-d.6d/d06d1d26d3d46d5d66d7d86d9d:6d;d<6d=d>6d?d@6dAdB6dCdD6dEdF6dGdH6dIdJ6dKdL6dMdN6dOdP6dQdR6dSdT6dUdV6ddW6ddX6dYdZ6d[d\6d]d^6d_d`6dadb6dcdd6dedf6dgdh6didj6dkdl6dmdn6dodp6dqdr6d ds6d dt6ddu6dvdw6dxdy6dzd{6dd|6d}d~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6dd!6d"d#6d"d$6d%d&6d'd(6d)d*6d+d,6d+d-6d.d/6d0d16d2d36d4d56d6d76d8d96d:d;6d<d=6d>d?6d>d@6dAdB6dAdC6dDdE6dFdG6dFdH6dIdJ6dKdL6dMdN6dOdP6dQdR6dSdT6dUdV6dWdX6dYdZ6d[d\6dd]6d^d_6d`da6dbdc6ddde6dfdg6dhdi6djdk6dldm6ddn6dodp6dqdr6dsdt6dudv6dudw6dxdy6dzd{6d|d}6d~d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d)d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6dd 6d d 6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6dd!6d"d#6d$d%6d&d'6dd(6d)d*6d+d,6d-d.6d/d06d1d26d3d46d5d66d7d86d9d:6d;d<6d=d>6d?d@6dAdB6dCdD6dEdF6dGdH6dIdJ6dKdL6dKdM6dNdO6dPdQ6dRdS6dTdU6dVdW6dVdX6dYdZ6d[d\6d]d^6d_d`6d_da6dbdc6ddde6dfdg6dhdi6djdk6dldm6dndo6dpdq6drds6ddt6dudv6dwdx6dydz6d{d|6d}d~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dgd6dd6dd6dd 6d d 6d d 6d d6dd6dd6dKd6dKdE6dd6dd6dd6dd6dd6dd6d d!6dd"6d#d$6d%d&6d'd(6d)d*6d+d,6d-d.6d/d06d1d26d3d46d5d66d7d86d9d:6d;d<6did=6d>d?6d@dA6dBdC6dAdD6dEdF6dGdH6dIdJ6dKdL6dMdF6dAdN6dIdO6dPdQ6dPdR6dSdT6dUdV6dAdW6ddX6dYdZ6dYd[6d\d]6d\d^6dd_6d`da6dbdc6ddde6dfdg6dhdi6djdk6dldm6dndo6dpdq6dpdr6dhds6dtdu6dddv6dwdx6dydz6d~d{6d~d|6d}d~6dfd6dd6dd6dd6dd6dd6dd6dd6dld6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dvd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d}d6d}d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d d 6d d 6d d6dd6dd6dd6dd6dd6dhd6dd6dd6dd6dd6d d!6djd"6dld#6d$d%6d&d'6d(d)6d*d+6d*d,6dd-6d.d/6dd06dd16d2d36d4d56d6d76d8d96d:d;6d<d=6d>d?6d@dA6dBdC6ddD6dEdF6dGdH6dIdJ6dIdK6dLdM6dNdO6dPdQ6dRdS6ddT6ddU6dVdW6dXdY6dXdZ6dd[6d\d]6d^d_6d`da6d`db6dcdd6dedf6dgdh6didj6dkdl6dmdn6dodp6ddq6drds6dtdu6dvdw6dxdy6dkdz6d{d|6d}d~6dd6dd6dd6dd6dnd6dnd6dd6dd6dd6dd6dd6dd6d?d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d?d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d5d6dd 6dd 6dd 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dd 6dd 6d d 6d d! 6d" d# 6d$ d% 6dzd& 6dd' 6dd( 6d5d) 6dd* 6d~d+ 6d, d- 6d. d/ 6d0 d1 6d2 d3 6d4 d5 6d6 d7 6d8 d9 6d: d; 6dd< 6dd= 6dd> 6d? d@ 6dA dB 6dC dD 6ddE 6d dF 6dG dH 6dG dI 6dJ dK 6dL dM 6dN dO 6dP dQ 6dP dR 6dS dT 6dU dV 6dW dX 6dndY 6dZ d[ 6d\ d] 6d^ d_ 6d` da 6d` db 6dc dd 6de df 6dg dh 6di dj 6dk dl 6dm dn 6do dp 6dq dr 6ds dt 6ds du 6ds dv 6dw dx 6dy dz 6d{ d| 6d} d~ 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dN d 6dS d 6d_d 6dc d 6dm d 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6d d 6dd 6d_d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dld 6dcd 6dnd 6dZ d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dzd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dd 6dd 6dd 6dd 6dd 6dd 6d d 6d d 6d d 6d d 6d d 6d d! 6d" d# 6dd$ 6dd% 6d& d' 6d( d) 6dd* 6d+ d, 6d- d. 6d/ d0 6d1 d2 6d3 d4 6d3 d5 6d6 d7 6d6 d8 6d1 d9 6d: d; 6d< d= 6dd> 6d? d@ 6ddA 6dB dC 6dD dE 6ddF 6ddD6dG dH 6dI dJ 6dK dL 6dM dN 6dO dP 6d dQ 6dR dS 6dK dT 6ddU 6d dV 6ddW 6ddX 6dY dZ 6dY d[ 6dd\ 6dd] 6d d^ 6dd_ 6d` da 6d;db 6dc dd 6de df 6dg dh 6di dj 6dk dl 6dk dm 6dn do 6dp dq 6dr ds 6dt du 6dv dw 6dx dy 6dz d{ 6d| d} 6d~ d 6d d 6d d 6d d 6dg d 6d d 6d d 6dd 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6dd 6d d 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6dd 6dd 6dd 6d d 6d d 6d d 6dOd 6d d 6d d 6d d 6d d 6dd 6d d 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dOd 6d d 6d d 6d d 6d d 6dOd 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6did 6dd 6d d 6d d 6d[d 6d d 6d d 6d d 6d d 6dd 6d d 6d'd 6d d 6d'd 6d! d" 6d# d$ 6d# d% 6d)d& 6d+d' 6d( d) 6d* d+ 6d| d, 6d- d. 6d/ d0 6d1 d2 6d3 d4 6d5 d6 6d7 d8 6d9 d: 6d; d< 6d= d> 6d? d@ 6dA dB 6dC dD 6dE dF 6dG dH 6dI dJ 6dK dL 6dM dN 6d/dO 6dA dP 6dQ dR 6dS dT 6d6dU 6dydV 6dW dX 6dY dZ 6d[ d\ 6d] d^ 6d)d_ 6d3 d` 6d&da 6dSdb 6dc dd 6d;de 6d-df 6ddg 6de dh 6di dj 6dYdk 6d] dl 6d[dm 6dadn 6dado 6dp dq 6dr ds 6dt du 6dv dw 6dx dy 6dz d{ 6d! d| 6d} d~ 6dYd 6d d 6d]d 6dcd 6d d 6d9d 6d d 6d]d 6d d 6d&d 6dSd 6d d 6d d 6d d 6dd 6dc d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d1d 6dmd 6dod 6d d 6dqd 6d- d 6d d 6d d 6d d 6d d 6d d 6d d 6ddd 6d d 6d d 6dd 6d d 6d d 6d-d 6d, d 6dd 6d d 6d d 6d d 6d d 6d d 6d}d 6dcd 6d d 6d d 6dC d 6d8d 6d d 6d d 6dd 6ddC6d d 6d d 6d} d 6di d 6d d 6d d 6d d 6d d 6d d 6dId 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dd 6dd 6d2d 6dAd 6dd 6d d 6d d 6d d 6d d 6d#d 6d d 6d d 6d d 6d d 6dd 6dUd 6d d 6dd 6dd! 6d" d# 6dd$ 6d d% 6d& d' 6d( d) 6dn d* 6dd+ 6d, d- 6d. d/ 6dd0 6d1 d2 6dd3 6d4 d5 6d6 d7 6d6 d8 6d9 d: 6d; d< 6dd= 6d> d? 6d@ dA 6dB dC 6dD dE 6ddF 6dG dH 6dI dJ 6dK dL 6ddM 6dN dO 6dP dQ 6ddR 6dS dT 6dU dV 6dW dX 6ddY 6dZ d[ 6dZ d\ 6dd] 6dd^ 6dd_ 6dd` 6dda 6db dc 6dd de 6df dg 6ddh 6di dj 6dk dl 6dm dn 6do dp 6ddq 6dr ds 6dt du 6ddv 6ddw 6dx dy 6ddz 6d{ d| 6dd} 6dd~ 6dd 6d d 6dd 6dd 6dd 6dd 6dd 6dd 6dd 6dd 6dd 6d@ d 6d d 6d d 6dd 6d d 6d d 6dd 6d d 6d> d 6d d 6d d 6d d 6dd 6d d 6d d 6dd 6d d 6dd 6dd 6dd 6dd 6dd 6dd 6dd 6dd 6d d 6d d 6d d 6dd 6d d 6d d 6dd 6d d 6d d 6dd 6dd 6d d 6d d 6dd 6dd 6d d 6d d 6d d 6dd 6dd 6dd 6dd 6dd 6dG d 6d d 6d d 6d d 6d d 6dd 6dd 6dd 6dd 6dd 6d d 6dd 6dd 6d d 6dd 6dd 6dd 6dd 6dd 6dd 6d d 6d d 6dd 6dd 6dd 6d d 6dd 6dd 6d d 6d d 6d d 6dd 6d d 6d d 6dd 6d d 6d d 6dd 6dd 6dd 6dd 6dd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6d! d" 6d# d$ 6d% d& 6d' d( 6dd) 6dd* 6d+ d, 6drd- 6d. d/ 6d. d0 6dtd1 6dvd2 6d3 d4 6d3 d5 6d6 d7 6dxd8 6d9 d: 6d; d< 6dd= 6d> d? 6d@ dA 6dB dC 6dD dE 6dF dG 6dH dI 6dH dJ 6dK dL 6dM dN 6d0dO 6ddP 6dmdQ 6dR dS 6dT dU 6dIdV 6dW dX 6dY dZ 6d[ d\ 6d] d^ 6d_ d` 6dda 6db dc 6dd de 6df dg 6ddh 6di dj 6dodk 6dl dm 6dn do 6dn dp 6dq dr 6dq ds 6dt du 6dt dv 6dw dx 6dy dz 6d{ d| 6d} d~ 6dn d 6d d 6d d 6d d 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6dd 6d d 6d d 6dd 6d d 6d d 6dQd 6d d 6d d 6d d 6d d 6d}d 6d d 6d d 6d d 6d d 6dd 6d d 6d d 6d d 6d d 6dg d 6d d 6dg d 6d d 6d d 6dd 6d d 6d" d 6d d 6d d 6d[d 6d[d 6d d 6d d 6d[d 6d d 6d d 6d d 6d d 6dbd 6d d 6d d 6dfd 6ddd 6dbd 6d d 6dfd 6ddd 6d d 6d d 6d d 6dhd 6d d 6d^d 6d d 6d d 6d d 6dld 6d d 6d d 6d d 6dod 6dod 6dhd 6d d 6d d 6d d 6d d 6d d 6d d 6d d 6dd6dd6dd6dd6dd6d d 6dud 6dudG6dd 6dd 6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6dd!6dd"6d#d$6dd%6d&d'6d(d)6d*d+6d~ d,6d d-6d.d/6d0d16d2d36d4d56d6d76d8d96dzd:6dd;6d<d=6d>d?6d@dA6dBdC6dDdE6dFdG6dHdI6dJdK6ddL6d>dM6dNdO6dPdQ6dRdS6ddT6ddU6dVdW6ddX6ddY6ddZ6dd[6d\d]6dd^6dd_6d`da6ddb6dcdd6d,de6ddf6dgdh6didj6dkdl6ddm6d2dn6d,do6ddp6ddq6dadr6dsdt6d4du6dvdw6dxdy6d dz6dd{6dad|6d}d~6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dsd6dd6dd6dd6d@d6dd6dd6dvd6dd6dd6dd6dd6dd6dd6dd6dd6d d6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d$ d6dd6dd6dt d6dzd6dzd6dd6dd6dd6dd6dvd6dvd6dd6dd6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6d;d6dd6d=d6d=d6dd6dd6dd6dd6dd6dd6dd6d)d6dvd6dd6dd6dd 6d d 6d d 6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d"6dd#6d$d%6dd&6dd'6dd(6dd)6dd*6dd+6dd,6dd-6dd.6dd/6dvd06dvd16dd26d3d46dvd56d d66dd76d8d96dd:6d d;6d d<6d d=6d>d?6d@dA6dBdC6d dD6dEdF6dGdH6dIdJ6dKdL6dMdN6dOdP6d>dQ6d dR6d@dS6dKdT6dIdU6dVdW6dXdY6dZd[6d d\6dd]6dd^6dd_6dd`6dda6ddb6ddc6ddd6dedf6dgdh6dgdi6djdk6djdl6dmdn6dmdo6ddp6dqdr6dsdt6dudv6ddw6dxdy6dzd{6d|d}6d~d6dd6dd6dd6dd6dd6dd6dqd6dd6dd6dd6dd6dd6dd6dv d6dxd6dxd6dd6dd6dd6dd6dd6dMd6dd6dd6dd6dEd6dd6dd6d3d6d3d6dd6dd6dd6dAd6d;d6d9d6dAd6d;d6dd6dd6dd6dd6dd6dd6dd6dd6d d6d{ d6d0d6dd6dd6dd6dd6dd6dd6d"d6dd6d: d6d d6dId6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dwd6dd6d{d6d d 6d d 6d d6d d6dOd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d"6dd#6dyd$6dOd%6dd&6dnd'6d(d)6dd*6d(d+6d,d-6d.d/6d.d06d1d26d3d46d5d66d7d86d9d:6d;d<6dd=6dd>6d,d?6d@dA6d@dB6dCdD6ddE6dFdG6dHdI6ddJ6dKdL6d dM6d dN6ds dO6d dP6d dQ6dodR6dydS6dkdT6ddU6dVdW6dXdY6dZd[6d\d]6dd^6dEd_6dd`6dadb6ddc6di dd6dedf6dgdh6didj6ddk6ddl6dmdn6dEdo6ddp6ddq6drds6dodt6ddu6dvdw6dXdx6dVdy6d\dz6dZd{6d|d}6d~d6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dld6dd6dd6d d6dod6dd6d d6dmd6d d6dd6dd6dd6dd6dd6dd6dqd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6dd6Z/i"dd6d d6d d6dd6d d6d d6dyd6dn d6dd6dd6did6d d6dd6d d6dd6dd6dd6dd6dd6d8d6dd6d6d6dd6d*d6do d6d d6dd6d"d6dd6dd6d@ d6dd6dd6dd6Z0idd6dd6d d 6d d 6d d6dd6dd6dd6Z1e e1d e1de1dgZ2egejD]\Z3Z4e4e3f^qDNZ5de5d' instead.u'expected-tag-name-but-got-right-bracketuSExpected tag name. Got '?' instead. (HTML doesn't support processing instructions.)u'expected-tag-name-but-got-question-marku-Expected tag name. Got something else insteaduexpected-tag-nameu6Expected closing tag. Got '>' instead. Ignoring ''.u*expected-closing-tag-but-got-right-bracketu-Expected closing tag. Unexpected end of file.u expected-closing-tag-but-got-eofu<Expected closing tag. Unexpected character '%(data)s' found.u!expected-closing-tag-but-got-charu'Unexpected end of file in the tag name.ueof-in-tag-nameu8Unexpected end of file. Expected attribute name instead.u#expected-attribute-name-but-got-eofu)Unexpected end of file in attribute name.ueof-in-attribute-nameu#Invalid character in attribute nameu#invalid-character-in-attribute-nameu#Dropped duplicate attribute on tag.uduplicate-attributeu1Unexpected end of file. Expected = or end of tag.u$expected-end-of-tag-name-but-got-eofu1Unexpected end of file. Expected attribute value.u$expected-attribute-value-but-got-eofu*Expected attribute value. Got '>' instead.u.expected-attribute-value-but-got-right-bracketu"Unexpected = in unquoted attributeu"equals-in-unquoted-attribute-valueu*Unexpected character in unquoted attributeu0unexpected-character-in-unquoted-attribute-valueu*Unexpected character after attribute name.u&invalid-character-after-attribute-nameu+Unexpected character after attribute value.u*unexpected-character-after-attribute-valueu.Unexpected end of file in attribute value (").u#eof-in-attribute-value-double-quoteu.Unexpected end of file in attribute value (').u#eof-in-attribute-value-single-quoteu*Unexpected end of file in attribute value.u eof-in-attribute-value-no-quotesu)Unexpected end of file in tag. Expected >u#unexpected-EOF-after-solidus-in-tagu/Unexpected character after / in tag. Expected >u)unexpected-character-after-solidus-in-tagu&Expected '--' or 'DOCTYPE'. Not found.uexpected-dashes-or-doctypeu Unexpected ! after -- in commentu,unexpected-bang-after-double-dash-in-commentu$Unexpected space after -- in commentu-unexpected-space-after-double-dash-in-commentuIncorrect comment.uincorrect-commentu"Unexpected end of file in comment.ueof-in-commentu%Unexpected end of file in comment (-)ueof-in-comment-end-dashu+Unexpected '-' after '--' found in comment.u,unexpected-dash-after-double-dash-in-commentu'Unexpected end of file in comment (--).ueof-in-comment-double-dashueof-in-comment-end-space-stateueof-in-comment-end-bang-stateu&Unexpected character in comment found.uunexpected-char-in-commentu(No space after literal string 'DOCTYPE'.uneed-space-after-doctypeu.Unexpected > character. Expected DOCTYPE name.u+expected-doctype-name-but-got-right-bracketu.Unexpected end of file. Expected DOCTYPE name.u!expected-doctype-name-but-got-eofu'Unexpected end of file in DOCTYPE name.ueof-in-doctype-nameu"Unexpected end of file in DOCTYPE.ueof-in-doctypeu%Expected space or '>'. Got '%(data)s'u*expected-space-or-right-bracket-in-doctypeuUnexpected end of DOCTYPE.uunexpected-end-of-doctypeu Unexpected character in DOCTYPE.uunexpected-char-in-doctypeuXXX innerHTML EOFueof-in-innerhtmluUnexpected DOCTYPE. Ignored.uunexpected-doctypeu%html needs to be the first start tag.u non-html-rootu)Unexpected End of file. Expected DOCTYPE.uexpected-doctype-but-got-eofuErroneous DOCTYPE.uunknown-doctypeu2Unexpected non-space characters. Expected DOCTYPE.uexpected-doctype-but-got-charsu2Unexpected start tag (%(name)s). Expected DOCTYPE.u"expected-doctype-but-got-start-tagu0Unexpected end tag (%(name)s). Expected DOCTYPE.u expected-doctype-but-got-end-tagu?Unexpected end tag (%(name)s) after the (implied) root element.uend-tag-after-implied-rootu4Unexpected end of file. Expected end tag (%(name)s).u&expected-named-closing-tag-but-got-eofu4Unexpected start tag head in existing head. Ignored.u!two-heads-are-not-better-than-oneu'Unexpected end tag (%(name)s). Ignored.uunexpected-end-tagu;Unexpected start tag (%(name)s) that can be in head. Moved.u#unexpected-start-tag-out-of-my-headu Unexpected start tag (%(name)s).uunexpected-start-taguMissing end tag (%(name)s).umissing-end-taguMissing end tags (%(name)s).umissing-end-tagsuCUnexpected start tag (%(startName)s) implies end tag (%(endName)s).u$unexpected-start-tag-implies-end-tagu@Unexpected start tag (%(originalName)s). Treated as %(newName)s.uunexpected-start-tag-treated-asu,Unexpected start tag %(name)s. Don't use it!udeprecated-tagu'Unexpected start tag %(name)s. Ignored.uunexpected-start-tag-ignoreduEUnexpected end tag (%(gotName)s). Missing end tag (%(expectedName)s).u$expected-one-end-tag-but-got-anotheru:End tag (%(name)s) seen too early. Expected other end tag.uend-tag-too-earlyuFUnexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).uend-tag-too-early-namedu+End tag (%(name)s) seen too early. Ignored.uend-tag-too-early-ignoreduQEnd tag (%(name)s) violates step 1, paragraph 1 of the adoption agency algorithm.uadoption-agency-1.1uQEnd tag (%(name)s) violates step 1, paragraph 2 of the adoption agency algorithm.uadoption-agency-1.2uQEnd tag (%(name)s) violates step 1, paragraph 3 of the adoption agency algorithm.uadoption-agency-1.3uQEnd tag (%(name)s) violates step 4, paragraph 4 of the adoption agency algorithm.uadoption-agency-4.4u>Unexpected end tag (%(originalName)s). Treated as %(newName)s.uunexpected-end-tag-treated-asu'This element (%(name)s) has no end tag.u no-end-tagu9Unexpected implied end tag (%(name)s) in the table phase.u#unexpected-implied-end-tag-in-tableu>Unexpected implied end tag (%(name)s) in the table body phase.u(unexpected-implied-end-tag-in-table-bodyuDUnexpected non-space characters in table context caused voodoo mode.u$unexpected-char-implies-table-voodoou3Unexpected input with type hidden in table context.u unexpected-hidden-input-in-tableu!Unexpected form in table context.uunexpected-form-in-tableuDUnexpected start tag (%(name)s) in table context caused voodoo mode.u)unexpected-start-tag-implies-table-voodoouBUnexpected end tag (%(name)s) in table context caused voodoo mode.u'unexpected-end-tag-implies-table-voodoouCUnexpected table cell start tag (%(name)s) in the table body phase.uunexpected-cell-in-table-bodyuFGot table cell end tag (%(name)s) while required end tags are missing.uunexpected-cell-end-tagu?Unexpected end tag (%(name)s) in the table body phase. Ignored.u unexpected-end-tag-in-table-bodyu=Unexpected implied end tag (%(name)s) in the table row phase.u'unexpected-implied-end-tag-in-table-rowu>Unexpected end tag (%(name)s) in the table row phase. Ignored.uunexpected-end-tag-in-table-rowuJUnexpected select start tag in the select phase treated as select end tag.uunexpected-select-in-selectu/Unexpected input start tag in the select phase.uunexpected-input-in-selectuBUnexpected start tag token (%(name)s in the select phase. Ignored.uunexpected-start-tag-in-selectu;Unexpected end tag (%(name)s) in the select phase. Ignored.uunexpected-end-tag-in-selectuKUnexpected table element start tag (%(name)s) in the select in table phase.u5unexpected-table-element-start-tag-in-select-in-tableuIUnexpected table element end tag (%(name)s) in the select in table phase.u3unexpected-table-element-end-tag-in-select-in-tableu8Unexpected non-space characters in the after body phase.uunexpected-char-after-bodyu>Unexpected start tag token (%(name)s) in the after body phase.uunexpected-start-tag-after-bodyu<Unexpected end tag token (%(name)s) in the after body phase.uunexpected-end-tag-after-bodyu@Unexpected characters in the frameset phase. Characters ignored.uunexpected-char-in-framesetuEUnexpected start tag token (%(name)s) in the frameset phase. Ignored.u unexpected-start-tag-in-framesetuFUnexpected end tag token (frameset) in the frameset phase (innerHTML).u)unexpected-frameset-in-frameset-innerhtmluCUnexpected end tag token (%(name)s) in the frameset phase. Ignored.uunexpected-end-tag-in-framesetuEUnexpected non-space characters in the after frameset phase. Ignored.uunexpected-char-after-framesetuEUnexpected start tag (%(name)s) in the after frameset phase. Ignored.u#unexpected-start-tag-after-framesetuCUnexpected end tag (%(name)s) in the after frameset phase. Ignored.u!unexpected-end-tag-after-framesetu(Unexpected end tag after body(innerHtml)u'unexpected-end-tag-after-body-innerhtmlu6Unexpected non-space characters. Expected end of file.uexpected-eof-but-got-charu6Unexpected start tag (%(name)s). Expected end of file.uexpected-eof-but-got-start-tagu4Unexpected end tag (%(name)s). Expected end of file.uexpected-eof-but-got-end-tagu/Unexpected end of file. Expected table content.u eof-in-tableu0Unexpected end of file. Expected select content.u eof-in-selectu2Unexpected end of file. Expected frameset content.ueof-in-framesetu0Unexpected end of file. Expected script content.ueof-in-script-in-scriptu0Unexpected end of file. Expected foreign contentueof-in-foreign-landsu0Trailing solidus not allowed on element %(name)su&non-void-element-with-trailing-solidusu2Element %(name)s not allowed in a non-html contextu*unexpected-html-element-in-foreign-contentu*Unexpected end tag (%(name)s) before html.uunexpected-end-tag-before-htmlu9Element %(name)s not allowed in a inhead-noscript contextuunexpected-inhead-noscript-tagu8Unexpected end of file. Expected inhead-noscript contentueof-in-head-noscriptu@Unexpected non-space character. Expected inhead-noscript contentuchar-in-head-noscriptu0Undefined error (this sucks and should be fixed)uXXX-undefined-erroruhttp://www.w3.org/1999/xhtmluhtmlu"http://www.w3.org/1998/Math/MathMLumathmluhttp://www.w3.org/2000/svgusvguhttp://www.w3.org/1999/xlinkuxlinku$http://www.w3.org/XML/1998/namespaceuxmluhttp://www.w3.org/2000/xmlns/uxmlnsuappletucaptionumarqueeuobjectutableutduthumiumoumnumsumtextuannotation-xmlu foreignObjectudescutitleuaububigucodeuemufontuiunobrususmallustrikeustronguttuuuaddressuareauarticleuasideubaseubasefontubgsoundu blockquoteubodyubrubuttonucenterucolucolgroupucommanduddudetailsudirudivudludtuembedufieldsetufigureufooteruformuframeuframesetuh1uh2uh3uh4uh5uh6uheaduheaderuhruiframeuimageuimguinputuisindexuliulinkulistingumenuumetaunavunoembedunoframesunoscriptuolupuparamu plaintextupreuscriptusectionuselectustyleutbodyutextareautfootutheadutruuluwbruxmpu annotaion-xmlu attributeNameu attributenameu attributeTypeu attributetypeu baseFrequencyu basefrequencyu baseProfileu baseprofileucalcModeucalcmodeu clipPathUnitsu clippathunitsucontentScriptTypeucontentscripttypeucontentStyleTypeucontentstyletypeudiffuseConstantudiffuseconstantuedgeModeuedgemodeuexternalResourcesRequireduexternalresourcesrequiredu filterResu filterresu filterUnitsu filterunitsuglyphRefuglyphrefugradientTransformugradienttransformu gradientUnitsu gradientunitsu kernelMatrixu kernelmatrixukernelUnitLengthukernelunitlengthu keyPointsu keypointsu keySplinesu keysplinesukeyTimesukeytimesu lengthAdjustu lengthadjustulimitingConeAngleulimitingconeangleu markerHeightu markerheightu markerUnitsu markerunitsu markerWidthu markerwidthumaskContentUnitsumaskcontentunitsu maskUnitsu maskunitsu numOctavesu numoctavesu pathLengthu pathlengthupatternContentUnitsupatterncontentunitsupatternTransformupatterntransformu patternUnitsu patternunitsu pointsAtXu pointsatxu pointsAtYu pointsatyu pointsAtZu pointsatzu preserveAlphau preservealphaupreserveAspectRatioupreserveaspectratiouprimitiveUnitsuprimitiveunitsurefXurefxurefYurefyu repeatCountu repeatcountu repeatDuru repeatdururequiredExtensionsurequiredextensionsurequiredFeaturesurequiredfeaturesuspecularConstantuspecularconstantuspecularExponentuspecularexponentu spreadMethodu spreadmethodu startOffsetu startoffsetu stdDeviationu stddeviationu stitchTilesu stitchtilesu surfaceScaleu surfacescaleusystemLanguageusystemlanguageu tableValuesu tablevaluesutargetXutargetxutargetYutargetyu textLengthu textlengthuviewBoxuviewboxu viewTargetu viewtargetuxChannelSelectoruxchannelselectoruyChannelSelectoruychannelselectoru zoomAndPanu zoomandpanu definitionURLu definitionurluactuateu xlink:actuateuarcroleu xlink:arcroleuhrefu xlink:hrefuroleu xlink:roleushowu xlink:showu xlink:titleutypeu xlink:typeuxml:baseulanguxml:languspaceu xml:spaceu xmlns:xlinku u u u u u event-sourceusourceutracku irrelevantuuscopeduismapuautoplayucontrolsuaudiouvideoudeferuasyncuopenumultipleudisabledudatagriduhiddenucheckedudefaultunoshadeu autosubmitureadonlyuselecteduoptionuoptgroupu autofocusurequireduoutputi ii ii i& i i! ii0 i`i9 iRi}i i i i i" i i ii"!iai: iSi~ixult;ugt;uamp;uapos;uquot;uÆuAEliguAElig;u&uAMPuAMP;uÁuAacuteuAacute;uĂuAbreve;uÂuAcircuAcirc;uАuAcy;u𝔄uAfr;uÀuAgraveuAgrave;uΑuAlpha;uĀuAmacr;u⩓uAnd;uĄuAogon;u𝔸uAopf;u⁡uApplyFunction;uÅuAringuAring;u𝒜uAscr;u≔uAssign;uÃuAtildeuAtilde;uÄuAumluAuml;u∖u Backslash;u⫧uBarv;u⌆uBarwed;uБuBcy;u∵uBecause;uℬu Bernoullis;uΒuBeta;u𝔅uBfr;u𝔹uBopf;u˘uBreve;uBscr;u≎uBumpeq;uЧuCHcy;u©uCOPYuCOPY;uĆuCacute;u⋒uCap;uⅅuCapitalDifferentialD;uℭuCayleys;uČuCcaron;uÇuCcediluCcedil;uĈuCcirc;u∰uCconint;uĊuCdot;u¸uCedilla;u·u CenterDot;uCfr;uΧuChi;u⊙u CircleDot;u⊖u CircleMinus;u⊕u CirclePlus;u⊗u CircleTimes;u∲uClockwiseContourIntegral;u”uCloseCurlyDoubleQuote;u’uCloseCurlyQuote;u∷uColon;u⩴uColone;u≡u Congruent;u∯uConint;u∮uContourIntegral;uℂuCopf;u∐u Coproduct;u∳u CounterClockwiseContourIntegral;u⨯uCross;u𝒞uCscr;u⋓uCup;u≍uCupCap;uDD;u⤑u DDotrahd;uЂuDJcy;uЅuDScy;uЏuDZcy;u‡uDagger;u↡uDarr;u⫤uDashv;uĎuDcaron;uДuDcy;u∇uDel;uΔuDelta;u𝔇uDfr;u´uDiacriticalAcute;u˙uDiacriticalDot;u˝uDiacriticalDoubleAcute;u`uDiacriticalGrave;u˜uDiacriticalTilde;u⋄uDiamond;uⅆuDifferentialD;u𝔻uDopf;u¨uDot;u⃜uDotDot;u≐u DotEqual;uDoubleContourIntegral;u DoubleDot;u⇓uDoubleDownArrow;u⇐uDoubleLeftArrow;u⇔uDoubleLeftRightArrow;uDoubleLeftTee;u⟸uDoubleLongLeftArrow;u⟺uDoubleLongLeftRightArrow;u⟹uDoubleLongRightArrow;u⇒uDoubleRightArrow;u⊨uDoubleRightTee;u⇑uDoubleUpArrow;u⇕uDoubleUpDownArrow;u∥uDoubleVerticalBar;u↓u DownArrow;u⤓u DownArrowBar;u⇵uDownArrowUpArrow;ȗu DownBreve;u⥐uDownLeftRightVector;u⥞uDownLeftTeeVector;u↽uDownLeftVector;u⥖uDownLeftVectorBar;u⥟uDownRightTeeVector;u⇁uDownRightVector;u⥗uDownRightVectorBar;u⊤uDownTee;u↧u DownTeeArrow;u Downarrow;u𝒟uDscr;uĐuDstrok;uŊuENG;uÐuETHuETH;uÉuEacuteuEacute;uĚuEcaron;uÊuEcircuEcirc;uЭuEcy;uĖuEdot;u𝔈uEfr;uÈuEgraveuEgrave;u∈uElement;uĒuEmacr;u◻uEmptySmallSquare;u▫uEmptyVerySmallSquare;uĘuEogon;u𝔼uEopf;uΕuEpsilon;u⩵uEqual;u≂u EqualTilde;u⇌u Equilibrium;uℰuEscr;u⩳uEsim;uΗuEta;uËuEumluEuml;u∃uExists;uⅇu ExponentialE;uФuFcy;u𝔉uFfr;u◼uFilledSmallSquare;u▪uFilledVerySmallSquare;u𝔽uFopf;u∀uForAll;uℱu Fouriertrf;uFscr;uЃuGJcy;u>uGTuGT;uΓuGamma;uϜuGammad;uĞuGbreve;uĢuGcedil;uĜuGcirc;uГuGcy;uĠuGdot;u𝔊uGfr;u⋙uGg;u𝔾uGopf;u≥u GreaterEqual;u⋛uGreaterEqualLess;u≧uGreaterFullEqual;u⪢uGreaterGreater;u≷u GreaterLess;u⩾uGreaterSlantEqual;u≳u GreaterTilde;u𝒢uGscr;u≫uGt;uЪuHARDcy;uˇuHacek;u^uHat;uĤuHcirc;uℌuHfr;uℋu HilbertSpace;uℍuHopf;u─uHorizontalLine;uHscr;uĦuHstrok;u HumpDownHump;u≏u HumpEqual;uЕuIEcy;uIJuIJlig;uЁuIOcy;uÍuIacuteuIacute;uÎuIcircuIcirc;uИuIcy;uİuIdot;uℑuIfr;uÌuIgraveuIgrave;uIm;uĪuImacr;uⅈu ImaginaryI;uImplies;u∬uInt;u∫u Integral;u⋂u Intersection;u⁣uInvisibleComma;u⁢uInvisibleTimes;uĮuIogon;u𝕀uIopf;uΙuIota;uℐuIscr;uĨuItilde;uІuIukcy;uÏuIumluIuml;uĴuJcirc;uЙuJcy;u𝔍uJfr;u𝕁uJopf;u𝒥uJscr;uЈuJsercy;uЄuJukcy;uХuKHcy;uЌuKJcy;uΚuKappa;uĶuKcedil;uКuKcy;u𝔎uKfr;u𝕂uKopf;u𝒦uKscr;uЉuLJcy;u⃒unvgt;u⧞unvinfin;u⤂unvlArr;u≤⃒unvle;u<⃒unvlt;u⊴⃒unvltrie;u⤃unvrArr;u⊵⃒unvrtrie;u∼⃒unvsim;u⇖unwArr;u⤣unwarhk;unwarr;unwarrow;u⤧unwnear;uoS;uóuoacuteuoacute;uoast;uocir;uôuocircuocirc;uоuocy;uodash;uőuodblac;u⨸uodiv;uodot;u⦼uodsold;uœuoelig;u⦿uofcir;u𝔬uofr;u˛uogon;uòuograveuograve;u⧁uogt;u⦵uohbar;uohm;uoint;uolarr;u⦾uolcir;u⦻uolcross;uoline;u⧀uolt;uōuomacr;uωuomega;uοuomicron;u⦶uomid;uominus;u𝕠uoopf;u⦷uopar;u⦹uoperp;uoplus;u∨uor;uorarr;u⩝uord;uℴuorder;uorderof;uªuordfuordf;uºuordmuordm;u⊶uorigof;u⩖uoror;u⩗uorslope;u⩛uorv;uoscr;uøuoslashuoslash;u⊘uosol;uõuotildeuotilde;uotimes;u⨶u otimesas;uöuoumluouml;u⌽uovbar;upar;u¶uparaupara;u parallel;u⫳uparsim;u⫽uparsl;upart;uпupcy;u%upercnt;u.uperiod;u‰upermil;uperp;u‱upertenk;u𝔭upfr;uφuphi;uϕuphiv;uphmmat;u☎uphone;uπupi;u pitchfork;uϖupiv;uplanck;uℎuplanckh;uplankv;u+uplus;u⨣u plusacir;uplusb;u⨢upluscir;uplusdo;u⨥uplusdu;u⩲upluse;uplusmnuplusmn;u⨦uplussim;u⨧uplustwo;upm;u⨕u pointint;u𝕡upopf;u£upoundupound;upr;u⪳uprE;u⪷uprap;uprcue;upre;uprec;u precapprox;u preccurlyeq;upreceq;u⪹u precnapprox;u⪵u precneqq;u⋨u precnsim;uprecsim;u′uprime;uprimes;uprnE;uprnap;uprnsim;uprod;u⌮u profalar;u⌒u profline;u⌓u profsurf;uprop;upropto;uprsim;u⊰uprurel;u𝓅upscr;uψupsi;u upuncsp;u𝔮uqfr;uqint;u𝕢uqopf;u⁗uqprime;u𝓆uqscr;u quaternions;u⨖uquatint;u?uquest;uquesteq;uquoturAarr;urArr;u⤜urAtail;urBarr;u⥤urHar;u∽̱urace;uŕuracute;uradic;u⦳u raemptyv;urang;u⦒urangd;u⦥urange;urangle;u»uraquouraquo;urarr;u⥵urarrap;urarrb;u⤠urarrbfs;u⤳urarrc;u⤞urarrfs;urarrhk;urarrlp;u⥅urarrpl;u⥴urarrsim;u↣urarrtl;u↝urarrw;u⤚uratail;u∶uratio;u rationals;urbarr;u❳urbbrk;u}urbrace;u]urbrack;u⦌urbrke;u⦎urbrksld;u⦐urbrkslu;uřurcaron;uŗurcedil;urceil;urcub;uрurcy;u⤷urdca;u⥩urdldhar;urdquo;urdquor;u↳urdsh;ureal;urealine;u realpart;ureals;u▭urect;uregureg;u⥽urfisht;urfloor;u𝔯urfr;urhard;urharu;u⥬urharul;uρurho;uϱurhov;u rightarrow;urightarrowtail;urightharpoondown;urightharpoonup;urightleftarrows;urightleftharpoons;u⇉urightrightarrows;urightsquigarrow;u⋌urightthreetimes;u˚uring;u risingdotseq;urlarr;urlhar;u‏urlm;u⎱urmoust;u rmoustache;u⫮urnmid;u⟭uroang;u⇾uroarr;urobrk;u⦆uropar;u𝕣uropf;u⨮uroplus;u⨵urotimes;u)urpar;u⦔urpargt;u⨒u rppolint;urrarr;u›ursaquo;u𝓇urscr;ursh;ursqb;ursquo;ursquor;urthree;u⋊urtimes;u▹urtri;urtrie;urtrif;u⧎u rtriltri;u⥨uruluhar;u℞urx;uśusacute;usbquo;usc;u⪴uscE;u⪸uscap;ušuscaron;usccue;usce;uşuscedil;uŝuscirc;u⪶uscnE;u⪺uscnap;u⋩uscnsim;u⨓u scpolint;uscsim;uсuscy;u⋅usdot;usdotb;u⩦usdote;u⇘useArr;usearhk;usearr;usearrow;u§usectusect;u;usemi;u⤩useswar;u setminus;usetmn;u✶usext;u𝔰usfr;usfrown;u♯usharp;uщushchcy;uшushcy;u shortmid;ushortparallel;u­ushyushy;uσusigma;uςusigmaf;usigmav;usim;u⩪usimdot;usime;usimeq;u⪞usimg;u⪠usimgE;u⪝usiml;u⪟usimlE;u≆usimne;u⨤usimplus;u⥲usimrarr;uslarr;usmallsetminus;u⨳usmashp;u⧤u smeparsl;usmid;u⌣usmile;u⪪usmt;u⪬usmte;u⪬︀usmtes;uьusoftcy;u/usol;u⧄usolb;u⌿usolbar;u𝕤usopf;u♠uspades;u spadesuit;uspar;usqcap;u⊓︀usqcaps;usqcup;u⊔︀usqcups;usqsub;usqsube;u sqsubset;u sqsubseteq;usqsup;usqsupe;u sqsupset;u sqsupseteq;usqu;usquare;usquarf;usquf;usrarr;u𝓈usscr;ussetmn;ussmile;usstarf;u☆ustar;ustarf;ustraightepsilon;u straightphi;ustrns;u⊂usub;u⫅usubE;u⪽usubdot;usube;u⫃usubedot;u⫁usubmult;u⫋usubnE;u⊊usubne;u⪿usubplus;u⥹usubrarr;usubset;u subseteq;u subseteqq;u subsetneq;u subsetneqq;u⫇usubsim;u⫕usubsub;u⫓usubsup;usucc;u succapprox;u succcurlyeq;usucceq;u succnapprox;u succneqq;u succnsim;usuccsim;usum;u♪usung;u¹usup1usup1;u²usup2usup2;u³usup3usup3;usup;u⫆usupE;u⪾usupdot;u⫘usupdsub;usupe;u⫄usupedot;u⟉usuphsol;u⫗usuphsub;u⥻usuplarr;u⫂usupmult;u⫌usupnE;u⊋usupne;u⫀usupplus;usupset;u supseteq;u supseteqq;u supsetneq;u supsetneqq;u⫈usupsim;u⫔usupsub;u⫖usupsup;u⇙uswArr;uswarhk;uswarr;uswarrow;u⤪uswnwar;ußuszliguszlig;u⌖utarget;uτutau;utbrk;uťutcaron;uţutcedil;uтutcy;utdot;u⌕utelrec;u𝔱utfr;uthere4;u therefore;uθutheta;uϑu thetasym;uthetav;u thickapprox;u thicksim;uthinsp;uthkap;uthksim;uþuthornuthorn;utilde;u×utimesutimes;utimesb;u⨱u timesbar;u⨰utimesd;utint;utoea;utop;u⌶utopbot;u⫱utopcir;u𝕥utopf;u⫚utopfork;utosa;u‴utprime;utrade;u▵u triangle;u triangledown;u triangleleft;utrianglelefteq;u≜u triangleq;utriangleright;utrianglerighteq;u◬utridot;utrie;u⨺u triminus;u⨹utriplus;u⧍utrisb;u⨻utritime;u⏢u trpezium;u𝓉utscr;uцutscy;uћutshcy;uŧutstrok;utwixt;utwoheadleftarrow;utwoheadrightarrow;uuArr;u⥣uuHar;uúuuacuteuuacute;uuarr;uўuubrcy;uŭuubreve;uûuucircuucirc;uуuucy;uudarr;uűuudblac;uudhar;u⥾uufisht;u𝔲uufr;uùuugraveuugrave;uuharl;uuharr;u▀uuhblk;u⌜uulcorn;u ulcorner;u⌏uulcrop;u◸uultri;uūuumacr;uumluuml;uųuuogon;u𝕦uuopf;uuparrow;u updownarrow;uupharpoonleft;uupharpoonright;uuplus;uυuupsi;uupsih;uupsilon;u⇈u upuparrows;u⌝uurcorn;u urcorner;u⌎uurcrop;uůuuring;u◹uurtri;u𝓊uuscr;u⋰uutdot;uũuutilde;uutri;uutrif;uuuarr;uüuuumluuuml;u⦧uuwangle;uvArr;u⫨uvBar;u⫩uvBarv;uvDash;u⦜uvangrt;u varepsilon;u varkappa;u varnothing;uvarphi;uvarpi;u varpropto;uvarr;uvarrho;u varsigma;u⊊︀u varsubsetneq;u⫋︀uvarsubsetneqq;u⊋︀u varsupsetneq;u⫌︀uvarsupsetneqq;u vartheta;uvartriangleleft;uvartriangleright;uвuvcy;uvdash;uvee;u⊻uveebar;u≚uveeeq;u⋮uvellip;uverbar;uvert;u𝔳uvfr;uvltri;uvnsub;uvnsup;u𝕧uvopf;uvprop;uvrtri;u𝓋uvscr;uvsubnE;uvsubne;uvsupnE;uvsupne;u⦚uvzigzag;uŵuwcirc;u⩟uwedbar;uwedge;u≙uwedgeq;u℘uweierp;u𝔴uwfr;u𝕨uwopf;uwp;uwr;uwreath;u𝓌uwscr;uxcap;uxcirc;uxcup;uxdtri;u𝔵uxfr;uxhArr;uxharr;uξuxi;uxlArr;uxlarr;uxmap;u⋻uxnis;uxodot;u𝕩uxopf;uxoplus;uxotime;uxrArr;uxrarr;u𝓍uxscr;uxsqcup;uxuplus;uxutri;uxvee;uxwedge;uýuyacuteuyacute;uяuyacy;uŷuycirc;uыuycy;u¥uyenuyen;u𝔶uyfr;uїuyicy;u𝕪uyopf;u𝓎uyscr;uюuyucy;uÿuyumluyuml;uźuzacute;užuzcaron;uзuzcy;użuzdot;uzeetrf;uζuzeta;u𝔷uzfr;uжuzhcy;u⇝uzigrarr;u𝕫uzopf;u𝓏uzscr;u‍uzwj;u‌uzwnj;u�i iuiiiiiiiiiiiiuiiuiuiiiiiiiiiiiiiuiiiuDoctypeiu CharactersiuSpaceCharactersiuStartTagiuEndTagiuEmptyTagiuCommentiu ParseErrorumathtDataLossWarningcB`seZRS((t__name__t __module__(((sB/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.pyR| stReparseExceptioncB`seZRS((RR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.pyR s(uh1uh2uh3uh4uh5uh6( i ii ii i& i i! ii0 i`i9 iRii}iii i i i i" i i ii"!iai: iSii~ix(:t __future__RRRtstringtNonetEOFtEt namespacest frozensettscopingElementstformattingElementstspecialElementsthtmlIntegrationPointElementst"mathmlTextIntegrationPointElementstadjustSVGAttributestadjustMathMLAttributestadjustForeignAttributestdicttitemstqnametprefixtlocaltnstunadjustForeignAttributestspaceCharactersttableInsertModeElementstascii_lowercasetasciiLowercasetascii_uppercasetasciiUppercaset ascii_letterst asciiLetterstdigitst hexdigitst hexDigitstctordtlowertasciiUpper2LowertheadingElementst voidElementst cdataElementstrcdataElementstbooleanAttributestentitiesWindows1252t xmlEntitiestentitiestreplacementCharacterst tokenTypest tagTokenTypestktvtprefixest UserWarningRt ExceptionR(((sB/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/constants.pytsL                                                                                                                      :  4     1 PKZeNJJ/site-packages/pip/_vendor/html5lib/__init__.pycnu[ abc@`sdZddlmZmZmZddlmZmZmZddl m Z ddl m Z ddl mZdd d d d d gZdZdS(uM HTML parsing library based on the WHATWG "HTML5" specification. The parser is designed to be compatible with existing HTML found in the wild and implements well-defined error recovery that is largely compatible with modern desktop web browsers. Example usage: import html5lib f = open("my_document.html") tree = html5lib.parse(f) i(tabsolute_importtdivisiontunicode_literalsi(t HTMLParsertparset parseFragment(tgetTreeBuilder(t getTreeWalker(t serializeu HTMLParseruparseu parseFragmentugetTreeBuilderu getTreeWalkeru serializeu1.0b10N(t__doc__t __future__RRRt html5parserRRRt treebuildersRt treewalkersRt serializerRt__all__t __version__(((sA/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.pyt s  PKZ|:2site-packages/pip/_vendor/html5lib/html5parser.pycnu[ abc@`sddlmZmZmZddlmZmZmZddlZyddl m Z Wn!e k r{ddl m Z nXddl mZddl mZddl mZdd lmZdd l mZdd lmZmZmZmZmZmZmZmZmZmZmZm Z!m"Z"m#Z#m$Z$m%Z%d e&d Z'dd e&dZ(dZ)de*fdYZ+ej,dZ-dZ.de/e0dZ1de2fdYZ3dS(i(tabsolute_importtdivisiontunicode_literals(twith_metaclasstviewkeystPY3N(t OrderedDicti(t _inputstream(t _tokenizer(t treebuilders(tMarker(t_utils(tspaceCharacterstasciiUpper2LowertspecialElementstheadingElementst cdataElementstrcdataElementst tokenTypest tagTokenTypest namespacesthtmlIntegrationPointElementst"mathmlTextIntegrationPointElementstadjustForeignAttributestadjustMathMLAttributestadjustSVGAttributestEtReparseExceptionuetreecK`s1tj|}t|d|}|j||S(u.Parse a string or file-like object into a treetnamespaceHTMLElements(R tgetTreeBuildert HTMLParsertparse(tdoct treebuilderRtkwargsttbtp((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRsudivcK`s7tj|}t|d|}|j|d||S(NRt container(R RRt parseFragment(R R%R!RR"R#R$((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR&&sc`s dtffdY}|S(Nt Decoratedc`seZfdZRS(c`s^xE|jD]7\}}t|tjr:|}n|||tphasetinsertHtmlElementtresetInsertionModeR9t lastPhasetbeforeRCDataPhasetTruet framesetOK(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRI^s*           cC`s't|dsdS|jjjdjS(uThe name of the character encoding that was used to decode the input stream, or :obj:`None` if that is not determined yet. u tokenizeriN(thasattrR9RHRKt charEncodingRA(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytdocumentEncodingscC`se|jdkrK|jtdkrKd|jkoJ|jdjtdkS|j|jftkSdS(Nuannotation-xmlumathmluencodingu text/htmluapplication/xhtml+xml(u text/htmluapplication/xhtml+xml(RAt namespaceRt attributest translateR R(R?telement((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytisHTMLIntegrationPoints cC`s|j|jftkS(N(RaRAR(R?Rd((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytisMathMLTextIntegrationPointscC`sBtd}td}td}td}td}td}td}x|jD]}d} |} x=| dk r| } |jjr|jjdnd} | r| jnd} | r| jnd} | d }||kr|j| d | jd id} qht |jjd ks| |jj ks|j | rx||krf|d t ddgks|||fks| t dkr| dkr||kr|d dks|j| r||||fkr|j}n |jd}||kr |j| } qh||kr)|j| } qh||krG|j| } qh||kre|j| } qh||kr|j| } qh||krh|j| } qhqhW||krS| drS| d rS|jdi| d d 6qSqSWt}g}xG|r=|j|j|jj}|r|j|ks:tqqWdS(Nu CharactersuSpaceCharactersuStartTaguEndTaguCommentuDoctypeu ParseErroriutypeudataudatavarsiunameumglyphu malignmarkumathmluannotation-xmlusvguinForeignContentu selfClosinguselfClosingAcknowledgedu&non-void-element-with-trailing-solidus(RtnormalizedTokensR9R:t openElementsRaRAt parseErrortgettlentdefaultNamespaceRft frozensetRReRWR>tprocessCharacterstprocessSpaceCharacterstprocessStartTagt processEndTagtprocessCommenttprocessDoctypeR\tappendt processEOFtAssertionError(R?tCharactersTokentSpaceCharactersTokent StartTagTokent EndTagTokent CommentTokent DoctypeTokentParseErrorTokenttokent prev_tokent new_tokent currentNodetcurrentNodeNamespacetcurrentNodeNameR,RWt reprocessR>((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRJsp       "                  cc`s&x|jD]}|j|Vq WdS(N(RHtnormalizeToken(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRgscO`s&|j|td|||jjS(uParse a HTML document into a well-formed tree stream - a filelike object or string containing the HTML to be parsed The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) scripting - treat noscript elements as if javascript was turned on N(RMRNR9R:t getDocument(R?RKtargsR"((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cO`s#|j|t|||jjS(u2Parse a HTML fragment into a well-formed tree fragment container - name of the element we're setting the innerHTML property if set to None, default to 'div' stream - a filelike object or string containing the HTML to be parsed The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) scripting - treat noscript elements as if javascript was turned on (RMR\R:t getFragment(R?RKRR"((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR&suXXX-undefined-errorcC`s^|dkri}n|jj|jjj||f|jrZtt||ndS(N( R9R;RtRHRKtpositionR8t ParseErrorR(R?t errorcodetdatavars((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRis   % cC`sr|dtdkrn|d}t||dRW(R?tlasttnewModestnodetnodeNamet new_phase((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRY!sD       cC`su|dkst|jj||dkrC|jj|j_n|jj|j_|j|_|j d|_dS(uYGeneric RCDATA/RAWTEXT Parsing algorithm contentType - RCDATA or RAWTEXT uRAWTEXTuRCDATAutextN(uRAWTEXTuRCDATA( RvR:t insertElementRHRURTRSRWt originalPhaseR>(R?R~t contentType((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytparseRCDataRawtextMs  N(R5R6t__doc__R9RNR\RCRMRItpropertyR`ReRfRJRgRR&RiRRRRRRYR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR8s& "  C        ,c`sTd}d}dt|||fdYdfdY}dfdY}d ffd Y}d ffd Y}d ffdY}dffdY}dffdY} dffdY} dffdY} dffdY} dffdY} dffdY}dffdY}dffd Y}d!ffd"Y}d#ffd$Y}d%ffd&Y}d'ffd(Y}d)ffd*Y}d+ffd,Y}d-ffd.Y}d/ffd0Y}d1ffd2Y}i|d36|d46|d56|d66|d76|d86| d96| d:6| d;6| d<6| d=6|d>6|d?6|d@6|dA6|dB6|dC6|dD6|dE6|dF6|dG6|dH6|dI6S(JNc`s2tdtjDfd}|S(u4Logger that records which phase processes each tokencs`s!|]\}}||fVqdS(N((t.0tkeytvalue((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pys csc`sjjdrt|dkr|d}yi|dd6}Wn nX|dtkru|d|dRW(R?R~RAtpublicIdtsystemIdtcorrect((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRss              cS`s&d|j_|jjd|j_dS(Nuquirksu beforeHtml(RDRQR>RW(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyt anythingElses cS`s|jjd|j|S(Nuexpected-doctype-but-got-chars(RDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRns cS`s,|jjdi|dd6|j|S(Nu"expected-doctype-but-got-start-taguname(RDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRps  cS`s,|jjdi|dd6|j|S(Nu expected-doctype-but-got-end-taguname(RDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRqs  cS`s|jjd|jtS(Nuexpected-doctype-but-got-eof(RDRiRR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu%s ( R5R6RoRrRsRRnRpRqRu(((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs   _    tBeforeHtmlPhasecB`sGeZdZdZdZdZdZdZdZRS(cS`s3|jjtdd|jjd|j_dS(NuhtmluStartTagu beforeHead(R:t insertRoottimpliedTagTokenRDR>RW(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRX,scS`s|jtS(N(RXR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu1s cS`s|jj||jjdS(N(R:RR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRr5scS`sdS(N((R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRo8scS`s|j|S(N(RX(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn;s cS`s-|ddkrt|j_n|j|S(Nunameuhtml(R\RDRORX(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRp?s cS`sC|ddkr1|jjdi|dd6n|j|SdS(Nunameuheadubodyuhtmlubruunexpected-end-tag-before-html(uheadubodyuhtmlubr(RDRiRX(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRqEs   ( R5R6RXRuRrRoRnRpRq(((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR*s      tBeforeHeadPhasec`s_eZfdZdZdZdZdZdZdZdZ dZ RS( c`s}j|||tjd|jfd|jfg|_|j|j_tjd|jfg|_ |j |j _dS(Nuhtmluheadubodyubr(uheadubodyuhtmlubr( RCR tMethodDispatcherRt startTagHeadRt startTagOthertdefaulttendTagImplyHeadRt endTagOther(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCNs cS`s|jtddtS(NuheaduStartTag(RRR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu\scS`sdS(N((R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRo`scS`s|jtdd|S(NuheaduStartTag(RR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRncscS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRgscS`s@|jj||jjd|j_|jjd|j_dS(NiuinHead(R:RRht headPointerRDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRjscS`s|jtdd|S(NuheaduStartTag(RR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRoscS`s|jtdd|S(NuheaduStartTag(RR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRsscS`s"|jjdi|dd6dS(Nuend-tag-after-implied-rootuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRws ( R5R6RCRuRoRnRRRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRMs       t InHeadPhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZRS(c `sj|||tjd|jfd|jfd|jfd|jfd|jfd|jfd |j fd |j fg|_ |j |j _ tjd |jfd|jfg|_|j|j_ dS(Nuhtmlutitleunoframesustyleunoscriptuscriptubaseubasefontubgsounducommandulinkumetauheadubrubody(unoframesustyle(ubaseubasefontubgsounducommandulink(ubruhtmlubody(RCR RRt startTagTitletstartTagNoFramesStyletstartTagNoscripttstartTagScripttstartTagBaseLinkCommandt startTagMetaRRRRt endTagHeadtendTagHtmlBodyBrRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC|s         cS`s|jtS(N(RR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRus cS`s|j|S(N(R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRns cS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|jjddS(Nu!two-heads-are-not-better-than-one(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s.|jj||jjjt|dRW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`sT|jj||jjj|jj_|jj|j_|jjd|j_dS(Nutext( R:RRDRHtscriptDataStateRTRWRR>(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|j|S(N(R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`sQ|jjjj}|jdks7td|j|jjd|j_dS(NuheaduExpected head got %su afterHead(RDR:RhRRARvR>RW(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs"cS`s|j|S(N(R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`s"|jjdi|dd6dS(Nuunexpected-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|jtddS(Nuhead(RR(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs(R5R6RCRuRnRRRRRRRRRRRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR{s               tInHeadNoscriptPhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d ZRS( c`sj|||tjd|jfd |jfd |jfg|_|j|j_tjd |j fd |j fg|_ |j |j _dS( Nuhtmlubasefontubgsoundulinkumetaunoframesustyleuheadunoscriptubr(ubasefontubgsoundulinkumetaunoframesustyle(uheadunoscript( RCR RRRtstartTagHeadNoscriptRRRtendTagNoscripttendTagBrRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCs   cS`s|jjd|jtS(Nueof-in-head-noscript(RDRiRR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRus cS`s|jjdj|S(NuinHead(RDR>Rr(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRrscS`s|jjd|j|S(Nuchar-in-head-noscript(RDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRns cS`s|jjdj|S(NuinHead(RDR>Ro(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRoscS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|jjdj|S(NuinHead(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s"|jjdi|dd6dS(Nuunexpected-start-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR scS`s,|jjdi|dd6|j|S(Nuunexpected-inhead-noscript-taguname(RDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s cS`sQ|jjjj}|jdks7td|j|jjd|j_dS(NunoscriptuExpected noscript got %suinHead(RDR:RhRRARvR>RW(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs"cS`s,|jjdi|dd6|j|S(Nuunexpected-inhead-noscript-taguname(RDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`s"|jjdi|dd6dS(Nuunexpected-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|jtddS(Nunoscript(RR(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs(R5R6RCRuRrRnRoRRRRRRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs           tAfterHeadPhasec`szeZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z RS( c `sj|||tjd|jfd|jfd|jfd|jfd |jfg|_|j |j_ tjd|j fg|_ |j |j _ dS(Nuhtmlubodyuframesetubaseubasefontubgsoundulinkumetaunoframesuscriptustyleutitleuheadubr( ubaseubasefontubgsoundulinkumetaunoframesuscriptustyleutitle(ubodyuhtmlubr(RCR RRt startTagBodytstartTagFramesettstartTagFromHeadRRRRRRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC#s     cS`s|jtS(N(RR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu4s cS`s|j|S(N(R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn8s cS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR<scS`s6t|j_|jj||jjd|j_dS(NuinBody(RNRDR]R:RR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR?s cS`s*|jj||jjd|j_dS(Nu inFrameset(R:RRDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRDscS`s|jjdi|dd6|jjj|jj|jjdj|xG|jjdddD],}|jdkrh|jjj |PqhqhWdS(Nu#unexpected-start-tag-out-of-my-headunameuinHeadiuhead( RDRiR:RhRtRR>RpRAtremove(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRHs  cS`s"|jjdi|dd6dS(Nuunexpected-start-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRRscS`s|j|S(N(R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRUs cS`s|j|S(N(R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRYs cS`s"|jjdi|dd6dS(Nuunexpected-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR]scS`s?|jjtdd|jjd|j_t|j_dS(NubodyuStartTaguinBody(R:RRRDR>RWR\R](R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR`s(R5R6RCRuRnRRRRRRRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR"s         t InBodyPhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZdZ dZ!d Z"d!Z#d"Z$d#Z%d$Z&d%Z'd&Z(d'Z)d(Z*d)Z+d*Z,d+Z-d,Z.d-Z/d.Z0d/Z1d0Z2d1Z3d2Z4RS(3c,`sij||||j|_tjd|jfdd|jfd |jfd |jfde|j ft |j fdf|j fd&|j fdg|jfd*|jfd+|jfdh|jfd8|jfd9|jfdi|jfd=|jfd>|jfdj|jfdk|jfdH|jfdI|jfdJ|jfdK|jfdL|jfdM|jfdN|jfdl|j fdQ|j!fdm|j"fdn|j#fdV|j$fdW|j%fdo|j&fg!|_'|j(|j'_)tjd |j*fd|j+fdp|j,fd&|j-fd |j.fdq|j/ft |j0fdr|j1fds|j2fd@|j3fg |_4|j5|j4_)dS(tNuhtmlubaseubasefontubgsounducommandulinkumetauscriptustyleutitleubodyuframesetuaddressuarticleuasideu blockquoteucenterudetailsudirudivudlufieldsetu figcaptionufigureufooteruheaderuhgroupumainumenuunavuolupusectionusummaryuulupreulistinguformuliuddudtu plaintextuaububigucodeuemufontuiususmallustrikeustronguttuuunobrubuttonuappletumarqueeuobjectuxmputableuareaubruembeduimgukeygenuwbruparamusourceutrackuinputuhruimageuisindexutextareauiframeunoscriptunoembedunoframesuselecturpurtuoptionuoptgroupumathusvgucaptionucolucolgroupuframeuheadutbodyutdutfootuthutheadutrudialog( ubaseubasefontubgsounducommandulinkumetauscriptustyleutitle(uaddressuarticleuasideu blockquoteucenterudetailsudirudivudlufieldsetu figcaptionufigureufooteruheaderuhgroupumainumenuunavuolupusectionusummaryuul(upreulisting(uliuddudt( ububigucodeuemufontuiususmallustrikeustronguttuu(uappletumarqueeuobject(uareaubruembeduimgukeygenuwbr(uparamusourceutrack(unoembedunoframes(urpurt(uoptionuoptgroup( ucaptionucolucolgroupuframeuheadutbodyutdutfootuthutheadutr(uaddressuarticleuasideu blockquoteubuttonucenterudetailsudialogudirudivudlufieldsetu figcaptionufigureufooteruheaderuhgroupulistingumainumenuunavuolupreusectionusummaryuul(uddudtuli(uaububigucodeuemufontuiunobrususmallustrikeustronguttuu(uappletumarqueeuobject(6RCtprocessSpaceCharactersNonPreRoR RRtstartTagProcessInHeadRRtstartTagClosePRtstartTagHeadingtstartTagPreListingt startTagFormtstartTagListItemtstartTagPlaintextt startTagAtstartTagFormattingt startTagNobrtstartTagButtontstartTagAppletMarqueeObjectt startTagXmpt startTagTabletstartTagVoidFormattingtstartTagParamSourcet startTagInputt startTagHrt startTagImagetstartTagIsIndextstartTagTextareatstartTagIFrameRtstartTagRawtexttstartTagSelectt startTagRpRtt startTagOptt startTagMatht startTagSvgtstartTagMisplacedRRRt endTagBodyt endTagHtmlt endTagBlockt endTagFormtendTagPtendTagListItemt endTagHeadingtendTagFormattingtendTagAppletMarqueeObjectRRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRChs~                                          cS`s4|j|jko3|j|jko3|j|jkS(N(RARaRb(R?tnode1tnode2((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytisMatchingFormattingElementscS`s|jj||jjd}g}xS|jjdddD]8}|tkrVPq@|j||r@|j|q@q@Wt|dkstt|dkr|jjj |dn|jjj|dS(Nii( R:RRhtactiveFormattingElementsR RRtRkRvR(R?R~RdtmatchingElementsR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytaddFormattingElements  c S`sWtd}xD|jjddd D])}|j|kr&|jjdPq&q&WdS(Nuddudtuliuputbodyutdutfootuthutheadutrubodyuhtmliu expected-closing-tag-but-got-eof( uddudtuliuputbodyutdutfootuthutheadutrubodyuhtml(RmR:RhRARDRi(R?tallowed_elementsR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRus  cS`s|d}|j|_|jdrb|jjdjdkrb|jjdj rb|d}n|r|jj|jj|ndS( Nudatau iupreulistingutextareai(upreulistingutextarea( RRoRR:RhRAt hasContentt#reconstructActiveFormattingElementsR(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyt!processSpaceCharactersDropNewlines    cS`s}|ddkrdS|jj|jj|d|jjrytg|dD]}|tk^qOryt|j_ndS(Nudatau(R:R RRDR]tanyR RN(R?R~tchar((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRns  #cS`s%|jj|jj|ddS(Nudata(R:R R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`s|jjdj|S(NuinHead(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|jjdidd6t|jjdksK|jjdjdkr`|jjstn`t|j_ xQ|dj D]?\}}||jjdj kr}||jjdj |RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs1 'cS`sB|jjdddr.|jtdn|jj|dS(Nuptvariantubutton(R:telementInScopeRRR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR scS`sZ|jjdddr.|jtdn|jj|t|j_|j|_ dS(NupRubutton( R:RRRRRNRDR]R Ro(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs  cS`s|jjr)|jjdidd6nT|jjdddrW|jtdn|jj||jjd|j_dS(Nuunexpected-start-taguformunameupRubuttoni( R:t formPointerRDRiRRRRRh(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs  cS`st|j_idgd6ddgd6ddgd6}||d}xnt|jjD]Z}|j|kr|jjjt |jdPn|j t krW|jd krWPqWqWW|jj dd d r|jjjt ddn|jj |dS( NuliudtuddunameuEndTaguaddressudivupRubutton(uaddressudivup(RNRDR]treversedR:RhRARWRqRt nameTupleRRR(R?R~t stopNamesMapt stopNamesR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs"     cS`sZ|jjdddr.|jtdn|jj||jjj|jj_dS(NupRubutton( R:RRRRRDRHRVRT(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR4scS`s|jjdddr.|jtdn|jjdjtkrx|jjdi|dd6|jjj n|jj |dS(NupRubuttoniuunexpected-start-taguname( R:RRRRhRARRDRiRR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR:s cS`s|jjd}|r|jjdidd6dd6|jtd||jjkrt|jjj|n||jjkr|jjj|qn|jj |j |dS(Nuau$unexpected-start-tag-implies-end-tagu startNameuendName( R:t!elementInActiveFormattingElementsRDRiRRRhRRR R (R?R~t afeAElement((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRBs  cS`s|jj|j|dS(N(R:R R (R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyROs cS`st|jj|jjdrc|jjdidd6dd6|jtd|jjn|j|dS(Nunobru$unexpected-start-tag-implies-end-tagu startNameuendName(R:R RRDRiRqRR (R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRSs  cS`sw|jjdrJ|jjdidd6dd6|jtd|S|jj|jj|t|j_ dS(Nubuttonu$unexpected-start-tag-implies-end-tagu startNameuendName( R:RRDRiRqRR RRNR](R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR]s  cS`s@|jj|jj||jjjtt|j_dS(N( R:R RRRtR RNRDR](R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRhs cS`s^|jjdddr.|jtdn|jjt|j_|jj|ddS(NupRubuttonuRAWTEXT( R:RRRR RNRDR]R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRns   cS`sy|jjdkrC|jjdddrC|jtdqCn|jj|t|j_|jj d|j_ dS(NuquirksupRubuttonuinTable( RDRQR:RRqRRRNR]R>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRus  cS`sG|jj|jj||jjjt|d(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs      cS`s_|jjdrK|jj|jjdjdkrK|jjqKn|jj|dS(Nurubyi(R:RtgenerateImpliedEndTagsRhRARDRiR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs  cS`sv|jj|jj||jj|td|d<|jj||drr|jjjt |dRW(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR!s     cS`s-|jjdr)|jtd|SdS(Nubody(R:RRR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR3scS`s|ddkr|j|_n|jj|d}|rK|jjn|jjdj|dkr|jjdi|dd6n|r|jjj }x,|j|dkr|jjj }qWndS(Nunameupreiuend-tag-too-early( RRoR:RRRhRARDRiR(R?R~tinScopeR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR9s!cS`s|jj}d|j_|dks7|jj| rT|jjdidd6nS|jj|jjd|kr|jjdidd6n|jjj|dS(Nuunexpected-end-taguformunameiuend-tag-too-early-ignored( R:RR9RRDRiRRhR(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRGs     cS`s|ddkrd}nd}|jj|dd|s\|jjdi|dd6n|jjd|d|jjdj|dkr|jjdi|dd6n|jjj}x)|j|dkr|jjj}qWdS( NunameuliulistRuunexpected-end-tagtexcludeiuend-tag-too-early( R9R:RRDRiRRhRAR(R?R~RR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRTs ! cS`sx1tD])}|jj|r|jjPqqW|jjdj|dkrr|jjdi|dd6nx^tD]V}|jj|ry|jjj}x%|jtkr|jjj}qWPqyqyWdS(Niunameuend-tag-too-early( RR:RRRhRARDRiR(R?R~titem((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRes  ! cS`s3d}x&|dkr.|d7}|jj|d}| sd||jjkru|jj|j ru|j|dS||jjkr|jjdi|dd6|jjj |dS|jj|js|jjdi|dd6dS||jjdkr*|jjd i|dd6n|jjj |}d}x1|jj|D]}|j t krV|}PqVqVW|dkr|jjj}x"||kr|jjj}qW|jjj |dS|jj|d}|jjj |}|} } d} |jjj | } x| d kr9| d7} | d8} |jj| } | |jjkr|jjj | q$n| |krPn| |kr|jjj | d}n| j} | |jj|jjj | <| |jj|jjj | <| } | jr#| jj| n| j| | } q$W| jrV| jj| n|jtdkr|jj\}}|j| |n |j| |j} |j| |j| |jjj ||jjj|| |jjj ||jjj|jjj |d| q WdS(u)The much-feared adoption agency algorithmiiiunameNuadoption-agency-1.2uadoption-agency-4.4iuadoption-agency-1.3iutableutbodyutfootutheadutr(utableutbodyutfootutheadutr(R:RRhRRARRDRiRRtindexR9RRRt cloneNodeRRt appendChildRmtgetTableMisnestedNodePositiont insertBeforetreparentChildrentinsert(R?R~touterLoopCountertformattingElementtafeIndext furthestBlockRdtcommonAncestortbookmarktlastNodeRtinnerLoopCounterR#tcloneRR'((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRts   !                cS`s|jj|dr&|jjn|jjdj|dkrd|jjdi|dd6n|jj|dr|jjj}x)|j|dkr|jjj}qW|jjndS(Nunameiuend-tag-too-early( R:RRRhRARDRiRtclearActiveFormattingElements(R?R~Rd((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs!cS`s[|jjdidd6dd6|jj|jjtdd|jjjdS(Nuunexpected-end-tag-treated-asubru originalNameu br elementunewNameuStartTag(RDRiR:R RRRhR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR#s   cS`sx|jjdddD]}|j|dkr|jjd|d|jjdj|dkr|jjdi|dd6nx|jjj|krqWPq|jtkr|jjdi|dd6PqqWdS(NiunameR!uunexpected-end-tag( R:RhRARRDRiRRR(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR*s !(5R5R6RCRR RuR RnRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyResfG                                  t TextPhasec`sDeZfdZdZdZdZdZdZRS(c`sej|||tjg|_|j|j_tjd|jfg|_|j|j_dS(Nuscript( RCR RRRRt endTagScriptRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC9s cS`s|jj|ddS(Nudata(R:R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnAscS`sM|jjdi|jjdjd6|jjj|jj|j_tS(Nu&expected-named-closing-tag-but-got-eofiuname( RDRiR:RhRARRRWR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRuDs  cS`ststd|ddS(Nu4Tried to process start tag %s in RCDATA/RAWTEXT modeuname(RNRv(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRKscS`s=|jjj}|jdks't|jj|j_dS(Nuscript(R:RhRRARvRDRRW(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR5NscS`s&|jjj|jj|j_dS(N(R:RhRRDRRW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRUs(R5R6RCRnRuRR5R((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR48s     t InTablePhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZdZdZdZdZdZRS(c `sj|||tjd|jfd|jfd|jfd|jfd|jfd|jfd |j fd|j fd|j fd|j fg |_ |j|j _tjd |jfd|jfg|_|j|j_dS(Nuhtmlucaptionucolgroupucolutbodyutfootutheadutduthutrutableustyleuscriptuinputuformubody(utbodyutfootuthead(utduthutr(ustyleuscript( ubodyucaptionucolucolgroupuhtmlutbodyutdutfootuthutheadutr(RCR RRtstartTagCaptiontstartTagColgroupt startTagColtstartTagRowGrouptstartTagImplyTbodyRtstartTagStyleScriptRRRRRt endTagTablet endTagIgnoreRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC[s$          cS`s4x-|jjdjdkr/|jjjqWdS(Niutableuhtml(utableuhtml(R:RhRAR(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytclearStackToTableContextsscS`sB|jjdjdkr,|jjdn|jjs>tdS(Niuhtmlu eof-in-table(R:RhRARDRiRLRv(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu|scS`sH|jj}|jjd|j_||jj_|jjj|dS(Nu inTableText(RDRWR>RRo(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRos cS`sH|jj}|jjd|j_||jj_|jjj|dS(Nu inTableText(RDRWR>RRn(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRns cS`s3t|j_|jjdj|t|j_dS(NuinBody(R\R:tinsertFromTableRDR>RnRN(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`sG|j|jjjt|jj||jjd|j_dS(Nu inCaption( R?R:RRtR RRDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR7s cS`s4|j|jj||jjd|j_dS(Nu inColumnGroup(R?R:RRDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR8s cS`s|jtdd|S(NucolgroupuStartTag(R8R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR9scS`s4|j|jj||jjd|j_dS(Nu inTableBody(R?R:RRDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR:s cS`s|jtdd|S(NutbodyuStartTag(R:R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR;scS`sN|jjdidd6dd6|jjjtd|jjsJ|SdS(Nu$unexpected-start-tag-implies-end-tagutableu startNameuendName(RDRiRWRqRRL(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs   cS`s|jjdj|S(NuinHead(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR<scS`sqd|dkr`|ddjtdkr`|jjd|jj||jjjn |j|dS(Nutypeudatauhiddenu unexpected-hidden-input-in-table( RcR RDRiR:RRhRR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`s_|jjd|jjdkr[|jj||jjd|j_|jjjndS(Nuunexpected-form-in-tablei(RDRiR:RR9RRhR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`sQ|jjdi|dd6t|j_|jjdj|t|j_dS(Nu)unexpected-start-tag-implies-table-voodoounameuinBody(RDRiR\R:R@R>RpRN(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`s|jjdddr|jj|jjdjdkro|jjdidd6|jjdjd6nx-|jjdjdkr|jjjqrW|jjj|jjn|jj st |jjdS(NutableRiuend-tag-too-early-namedugotNameu expectedName( R:RRRhRARDRiRRYRLRv(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR=s   cS`s"|jjdi|dd6dS(Nuunexpected-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR>scS`sQ|jjdi|dd6t|j_|jjdj|t|j_dS(Nu'unexpected-end-tag-implies-table-voodoounameuinBody(RDRiR\R:R@R>RqRN(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs (R5R6RCR?RuRoRnRR7R8R9R:R;RR<RRRR=R>R((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR6Ys&               tInTableTextPhasec`sVeZfdZdZdZdZdZdZdZdZ RS(c`s)j|||d|_g|_dS(N(RCR9RtcharacterTokens(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCs cS`sdjg|jD]}|d^q}tg|D]}|tk^q3ritdd6|d6}|jjdj|n|r|jj|ng|_dS(Nuudatau CharactersutypeuinTable( tjoinRBRR RRDR>RR:(R?R"RR~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytflushCharacterss)%cS`s|j|j|j_|S(N(RDRRDRW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRrs cS`s|j|j|j_tS(N(RDRRDRWR\(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRus cS`s(|ddkrdS|jj|dS(Nudatau(RBRt(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnscS`s|jj|dS(N(RBRt(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRoscS`s|j|j|j_|S(N(RDRRDRW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRp s cS`s|j|j|j_|S(N(RDRRDRW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRqs ( R5R6RCRDRrRuRnRoRpRq((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRAs     tInCaptionPhasec`sheZfdZdZdZdZdZdZdZdZ dZ d Z RS( c `sj|||tjd|jfd |jfg|_|j|j_tjd|jfd |j fd|j fg|_ |j |j _dS(Nuhtmlucaptionucolucolgrouputbodyutdutfootuthutheadutrutableubody( ucaptionucolucolgrouputbodyutdutfootuthutheadutr( ubodyucolucolgroupuhtmlutbodyutdutfootuthutheadutr( RCR RRtstartTagTableElementRRRt endTagCaptionR=R>RR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCs   cS`s|jjddd S(NucaptionRutable(R:R(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytignoreEndTagCaption+scS`s|jjdjdS(NuinBody(RDR>Ru(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu.scS`s|jjdj|S(NuinBody(RDR>Rn(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn1scS`s@|jj|j}|jjjtd|s<|SdS(Nucaption(RDRiRHRWRqR(R?R~t ignoreEndTag((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRF4s   cS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR<scS`s|js|jj|jjdjdkrc|jjdidd6|jjdjd6nx-|jjdjdkr|jjjqfW|jjj|jj|jj d|j_ n|jj st |jjdS(Niucaptionu$expected-one-end-tag-but-got-anotherugotNameu expectedNameuinTable( RHR:RRhRARDRiRR3R>RWRLRv(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRG?s     cS`s@|jj|j}|jjjtd|s<|SdS(Nucaption(RDRiRHRWRqR(R?R~RI((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR=Qs   cS`s"|jjdi|dd6dS(Nuunexpected-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR>XscS`s|jjdj|S(NuinBody(RDR>Rq(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR[s( R5R6RCRHRuRnRFRRGR=R>R((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyREs        tInColumnGroupPhasec`s_eZfdZdZdZdZdZdZdZdZ dZ RS( c`sj|||tjd|jfd|jfg|_|j|j_tjd|jfd|j fg|_ |j |j _dS(Nuhtmlucolucolgroup( RCR RRR9RRRtendTagColgroupt endTagColRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCas  cS`s|jjdjdkS(Niuhtml(R:RhRA(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytignoreEndTagColgrouppscS`s\|jjdjdkr/|jjs+tdS|j}|jtd|sXt SdS(Niuhtmlucolgroup( R:RhRARDRLRvRMRKRR\(R?RI((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRuss cS`s-|j}|jtd|s)|SdS(Nucolgroup(RMRKR(R?R~RI((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn}s cS`s.|jj||jjjt|dRW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRKs  cS`s|jjdidd6dS(Nu no-end-tagucoluname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRLscS`s-|j}|jtd|s)|SdS(Nucolgroup(RMRKR(R?R~RI((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs ( R5R6RCRMRuRnR9RRKRLR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRJ^s     tInTableBodyPhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d ZRS( c `sj|||tjd|jfd|jfd |jfd|jfg|_|j|j_ tjd|j fd |j fd|j fg|_ |j|j _ dS(Nuhtmlutrutduthucaptionucolucolgrouputbodyutfootutheadutableubody(utduth(ucaptionucolucolgrouputbodyutfootuthead(utbodyutfootuthead(ubodyucaptionucolucolgroupuhtmlutduthutr(RCR RRt startTagTrtstartTagTableCelltstartTagTableOtherRRRtendTagTableRowGroupR=R>RR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCs     cS`sbx-|jjdjdkr/|jjjqW|jjdjdkr^|jjs^tndS(Niutbodyutfootutheaduhtml(utbodyutfootutheaduhtml(R:RhRARRDRLRv(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytclearStackToTableBodyContexts  cS`s|jjdjdS(NuinTable(RDR>Ru(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRuscS`s|jjdj|S(NuinTable(RDR>Ro(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRoscS`s|jjdj|S(NuinTable(RDR>Rn(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnscS`s4|j|jj||jjd|j_dS(NuinRow(RSR:RRDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyROs cS`s8|jjdi|dd6|jtdd|S(Nuunexpected-cell-in-table-bodyunameutruStartTag(RDRiROR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRPs cS`s|jjdddsH|jjdddsH|jjdddrv|j|jt|jjdj|S|jjst |jj dS(NutbodyRutableutheadutfooti( R:RRSRRRRhRARDRLRvRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRQs cS`s|jjdj|S(NuinTable(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`sq|jj|dddrO|j|jjj|jjd|j_n|jjdi|dd6dS(NunameRutableuinTableu unexpected-end-tag-in-table-body( R:RRSRhRRDR>RWRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRRs   cS`s|jjdddsH|jjdddsH|jjdddrv|j|jt|jjdj|S|jjst |jj dS(NutbodyRutableutheadutfooti( R:RRSRRRRhRARDRLRvRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR=s cS`s"|jjdi|dd6dS(Nu unexpected-end-tag-in-table-bodyuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR>s cS`s|jjdj|S(NuinTable(RDR>Rq(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs(R5R6RCRSRuRoRnRORPRQRRRR=R>R((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRNs        t InRowPhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d Zd ZRS(c `sj|||tjd|jfd |jfd|jfg|_|j|j_tjd |j fd |j fd|j fd|j fg|_ |j|j _dS(Nuhtmlutduthucaptionucolucolgrouputbodyutfootutheadutrutableubody(utduth(ucaptionucolucolgrouputbodyutfootutheadutr(utbodyutfootuthead(ubodyucaptionucolucolgroupuhtmlutduth(RCR RRRPRQRRRtendTagTrR=RRR>RR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCs     cS`s[xT|jjdjdkrV|jjdi|jjdjd6|jjjqWdS(Niutruhtmlu'unexpected-implied-end-tag-in-table-rowuname(utruhtml(R:RhRARDRiR(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytclearStackToTableRowContexts cS`s|jjddd S(NutrRutable(R:R(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pytignoreEndTagTrscS`s|jjdjdS(NuinTable(RDR>Ru(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu"scS`s|jjdj|S(NuinTable(RDR>Ro(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRo%scS`s|jjdj|S(NuinTable(RDR>Rn(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn(scS`sG|j|jj||jjd|j_|jjjtdS(NuinCell( RVR:RRDR>RWRRtR (R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRP+s cS`s-|j}|jtd|s)|SdS(Nutr(RWRUR(R?R~RI((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRQ1s cS`s|jjdj|S(NuinTable(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR8scS`sb|js?|j|jjj|jjd|j_n|jjsQt |jj dS(Nu inTableBody( RWRVR:RhRRDR>RWRLRvRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRU;s   cS`s-|j}|jtd|s)|SdS(Nutr(RWRUR(R?R~RI((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR=Es cS`sD|jj|dddr3|jtd|S|jjdS(NunameRutableutr(R:RRURRDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRRMscS`s"|jjdi|dd6dS(Nuunexpected-end-tag-in-table-rowuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR>Ts cS`s|jjdj|S(NuinTable(RDR>Rq(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRXs(R5R6RCRVRWRuRoRnRPRQRRUR=RRR>R((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRTs           t InCellPhasec`sheZfdZdZdZdZdZdZdZdZ dZ d Z RS( c `sj|||tjd|jfd |jfg|_|j|j_tjd|jfd|j fd|j fg|_ |j |j _dS(Nuhtmlucaptionucolucolgrouputbodyutdutfootuthutheadutrubodyutable( ucaptionucolucolgrouputbodyutdutfootuthutheadutr(utduth(ubodyucaptionucolucolgroupuhtml(utableutbodyutfootutheadutr( RCR RRRQRRRtendTagTableCellR>t endTagImplyRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC]s   cS`s`|jjdddr.|jtdn.|jjdddr\|jtdndS(NutdRutableuth(R:RRYR(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyt closeCellnscS`s|jjdjdS(NuinBody(RDR>Ru(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRuuscS`s|jjdj|S(NuinBody(RDR>Rn(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnxscS`sa|jjddds0|jjdddr>|j|S|jjsPt|jjdS(NutdRutableuth(R:RR[RDRLRvRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRQ{s  cS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s|jj|dddr|jj|d|jjdj|dkr|jjdi|dd6xFtr|jjj}|j|dkrnPqnqnWn|jjj|jj |jj d|j_ n|jjdi|dd6dS(NunameRutableiuunexpected-cell-end-taguinRowuunexpected-end-tag( R:RRRhRARDRiR\RR3R>RW(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRYs    cS`s"|jjdi|dd6dS(Nuunexpected-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR>scS`s;|jj|dddr*|j|S|jjdS(NunameRutable(R:RR[RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRZs cS`s|jjdj|S(NuinBody(RDR>Rq(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs( R5R6RCR[RuRnRQRRYR>RZR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRX[s       t InSelectPhasec`seZfdZdZdZdZdZdZdZdZ dZ d Z d Z d Z d ZRS( c`sj|||tjd|jfd|jfd|jfd|jfd |jfd|jfg|_ |j |j _ tjd|j fd|j fd|jfg|_|j|j_ dS( Nuhtmluoptionuoptgroupuselectuinputukeygenutextareauscript(uinputukeygenutextarea(RCR RRtstartTagOptiontstartTagOptgroupRRRRRRt endTagOptiontendTagOptgroupt endTagSelectRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCs       cS`sB|jjdjdkr,|jjdn|jjs>tdS(Niuhtmlu eof-in-select(R:RhRARDRiRLRv(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRuscS`s,|ddkrdS|jj|ddS(Nudatau(R:R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnscS`s@|jjdjdkr,|jjjn|jj|dS(Niuoption(R:RhRARR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR]scS`sl|jjdjdkr,|jjjn|jjdjdkrX|jjjn|jj|dS(Niuoptionuoptgroup(R:RhRARR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR^s cS`s'|jjd|jtddS(Nuunexpected-select-in-selectuselect(RDRiRaR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`sU|jjd|jjdddr?|jtd|S|jjsQtdS(Nuunexpected-input-in-selectuselectR(RDRiR:RRaRRLRv(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`s|jjdj|S(NuinHead(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRscS`s"|jjdi|dd6dS(Nuunexpected-start-tag-in-selectuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRs cS`sJ|jjdjdkr,|jjjn|jjdidd6dS(Niuoptionuunexpected-end-tag-in-selectuname(R:RhRARRDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR_s cS`s|jjdjdkrE|jjdjdkrE|jjjn|jjdjdkrq|jjjn|jjdidd6dS(Niuoptioniuoptgroupuunexpected-end-tag-in-selectuname(R:RhRARRDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR`s cS`s|jjdddrb|jjj}x%|jdkrQ|jjj}q-W|jjn|jjstt|jj dS(NuselectR( R:RRhRRARDRYRLRvRi(R?R~R((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRascS`s"|jjdi|dd6dS(Nuunexpected-end-tag-in-selectuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s (R5R6RCRuRnR]R^RRRRR_R`RaR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR\s          tInSelectInTablePhasec`sMeZfdZdZdZdZdZdZdZRS(c `sqj|||tjd |jfg|_|j|j_tjd |jfg|_|j |j_dS( Nucaptionutableutbodyutfootutheadutrutduth(ucaptionutableutbodyutfootutheadutrutduth(ucaptionutableutbodyutfootutheadutrutduth( RCR RRRRRR=RR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC scS`s|jjdjdS(NuinSelect(RDR>Ru(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu scS`s|jjdj|S(NuinSelect(RDR>Rn(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn scS`s5|jjdi|dd6|jtd|S(Nu5unexpected-table-element-start-tag-in-select-in-tableunameuselect(RDRiRR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR! scS`s|jjdj|S(NuinSelect(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR& scS`sU|jjdi|dd6|jj|dddrQ|jtd|SdS(Nu3unexpected-table-element-end-tag-in-select-in-tableunameRutableuselect(RDRiR:RRR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR=) scS`s|jjdj|S(NuinSelect(RDR>Rq(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR/ s( R5R6RCRuRnRRR=R((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRb s     tInForeignContentPhasec-`seZedddddddddd d d d d ddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+g,Zfd,Zd-Zfd.Zd/Zd0ZRS(1ububigu blockquoteubodyubrucenterucodeuddudivudludtuemuembeduh1uh2uh3uh4uh5uh6uheaduhruiuimguliulistingumenuumetaunobruolupupreurubyususmalluspanustrongustrikeusubusuputableuttuuuuluvarc`sj|||dS(N(RC(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC< scS`s+i$dd6dd6dd6dd6d d 6d d 6d d6dd6dd6dd6dd6dd6dd6dd6dd6dd 6d!d"6d#d$6d%d&6d'd(6d)d*6d+d,6d-d.6d/d06d1d26d3d46d5d66d7d86d9d:6d;d<6d=d>6d?d@6dAdB6dCdD6dEdF6dGdH6}|dI|kr'||dI|dIl s(RDR]RRNRn(R?R~(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnh s   cS`s|jjd}|d|jks\|ddkrt|djtdddg@r|jjdi|dd6xm|jjdj|jjkr|jj |jjd r|jj |jjd r|jjj q}W|S|jt d kr|jj |n3|jt d krG|j||jj|n|jj||j|d <|jj||d r|jjj t|d RDRRRvR9RaRlRq(R?R~t nodeIndexRR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRq s(!  ( R5R6RmRfRCReRnRpRq((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRc2 s ) tAfterBodyPhasec`sVeZfdZdZdZdZdZdZdZdZ RS(c`sqj|||tjd|jfg|_|j|j_tjd|jfg|_|j |j_dS(Nuhtml( RCR RRRRRRRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC s cS`sdS(N((R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu scS`s!|jj||jjddS(Ni(R:RRh(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRr scS`s*|jjd|jjd|j_|S(Nuunexpected-char-after-bodyuinBody(RDRiR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn scS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR scS`s8|jjdi|dd6|jjd|j_|S(Nuunexpected-start-tag-after-bodyunameuinBody(RDRiR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s cS`s9|jjr|jjdn|jjd|j_dS(Nu'unexpected-end-tag-after-body-innerhtmluafterAfterBody(RDRLRiR>RW(R?RA((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s cS`s8|jjdi|dd6|jjd|j_|S(Nuunexpected-end-tag-after-bodyunameuinBody(RDRiR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s ( R5R6RCRuRrRnRRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRj s      tInFramesetPhasec`s_eZfdZdZdZdZdZdZdZdZ dZ RS( c`sj|||tjd|jfd|jfd|jfd|jfg|_|j|j_ tjd|j fg|_ |j |j _ dS(Nuhtmluframesetuframeunoframes( RCR RRRt startTagFrametstartTagNoframesRRRtendTagFramesetRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC s   cS`sB|jjdjdkr,|jjdn|jjs>tdS(Niuhtmlueof-in-frameset(R:RhRARDRiRLRv(R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu scS`s|jjddS(Nuunexpected-char-in-frameset(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn scS`s|jj|dS(N(R:R(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR scS`s$|jj||jjjdS(N(R:RRhR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRl scS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRm scS`s"|jjdi|dd6dS(Nu unexpected-start-tag-in-framesetuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s cS`s|jjdjdkr,|jjdn|jjj|jj r{|jjdjdkr{|jjd|j_ndS(Niuhtmlu)unexpected-frameset-in-frameset-innerhtmluframesetu afterFrameset( R:RhRARDRiRRLR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn s  cS`s"|jjdi|dd6dS(Nuunexpected-end-tag-in-framesetuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s ( R5R6RCRuRnRRlRmRRnR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRk s       tAfterFramesetPhasec`sMeZfdZdZdZdZdZdZdZRS(c`s}j|||tjd|jfd|jfg|_|j|j_tjd|jfg|_ |j |j _dS(Nuhtmlunoframes( RCR RRRmRRRRRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC s cS`sdS(N((R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu scS`s|jjddS(Nuunexpected-char-after-frameset(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn! scS`s|jjdj|S(NuinHead(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRm$ scS`s"|jjdi|dd6dS(Nu#unexpected-start-tag-after-framesetuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR' s cS`s|jjd|j_dS(NuafterAfterFrameset(RDR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR+ scS`s"|jjdi|dd6dS(Nu!unexpected-end-tag-after-framesetuname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR. s ( R5R6RCRuRnRmRRR((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRo s     tAfterAfterBodyPhasec`sVeZfdZdZdZdZdZdZdZdZ RS(c`sDj|||tjd|jfg|_|j|j_dS(Nuhtml(RCR RRRRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRC3 scS`sdS(N((R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRu; scS`s|jj||jjdS(N(R:RR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRr> scS`s|jjdj|S(NuinBody(RDR>Ro(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRoA scS`s*|jjd|jjd|j_|S(Nuexpected-eof-but-got-charuinBody(RDRiR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnD scS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRI scS`s8|jjdi|dd6|jjd|j_|S(Nuexpected-eof-but-got-start-tagunameuinBody(RDRiR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRL s cS`s8|jjdi|dd6|jjd|j_|S(Nuexpected-eof-but-got-end-tagunameuinBody(RDRiR>RW(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRqR s ( R5R6RCRuRrRoRnRRRq((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRp2 s      tAfterAfterFramesetPhasec`s_eZfdZdZdZdZdZdZdZdZ dZ RS( c`sPj|||tjd|jfd|jfg|_|j|j_dS(Nuhtmlunoframes(RCR RRtstartTagNoFramesRRR(R?RDR:(R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRCY s  cS`sdS(N((R?((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRub scS`s|jj||jjdS(N(R:RR(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRre scS`s|jjdj|S(NuinBody(RDR>Ro(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRoh scS`s|jjddS(Nuexpected-eof-but-got-char(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRnk scS`s|jjdj|S(NuinBody(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRn scS`s|jjdj|S(NuinHead(RDR>Rp(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRrq scS`s"|jjdi|dd6dS(Nuexpected-eof-but-got-start-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRt s cS`s"|jjdi|dd6dS(Nuexpected-eof-but-got-end-taguname(RDRi(R?R~((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRqx s ( R5R6RCRuRrRoRnRRrRRq((R(sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyRqX s       uinitialu beforeHtmlu beforeHeaduinHeaduinHeadNoscriptu afterHeaduinBodyutextuinTableu inTableTextu inCaptionu inColumnGroupu inTableBodyuinRowuinCelluinSelectuinSelectInTableuinForeignContentu afterBodyu inFramesetu afterFramesetuafterAfterBodyuafterAfterFrameset(R(R@RPRRRRRRRRR4R6RARERJRNRTRXR\RbRcRjRkRoRpRq((RsD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR=_sh  %)#.g@C!-GBbYLd's/9%&%c`s}tstjr,t|dt@}nt|dt@}|rytfd|djD|d s(RR tPY27RRmRR((R~Rdtneeds_adjustment((RdsD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s uEndTagcC`s9|dkri}nit|d6|d6|d6|d6S(Nutypeunameudatau selfClosing(R9R(RAR,RbR((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s  RcB`seZdZRS(uError in parsed document(R5R6R(((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyR s(4t __future__RRRtpip._vendor.sixRRRR*t collectionsRt ImportErrortpip._vendor.ordereddicttRRR ttreebuilders.baseR R t constantsR R RRRRRRRRRRRRRRRR\RR&R7tobjectRtmemoizeR=RR9RNRt ExceptionR(((sD/usr/lib/python2.7/site-packages/pip/_vendor/html5lib/html5parser.pyts>  j  (C  PKZcwܲ1site-packages/pip/_vendor/html5lib/html5parser.pynu[from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import with_metaclass, viewkeys, PY3 import types try: from collections import OrderedDict except ImportError: from pip._vendor.ordereddict import OrderedDict from . import _inputstream from . import _tokenizer from . import treebuilders from .treebuilders.base import Marker from . import _utils from .constants import ( spaceCharacters, asciiUpper2Lower, specialElements, headingElements, cdataElements, rcdataElements, tokenTypes, tagTokenTypes, namespaces, htmlIntegrationPointElements, mathmlTextIntegrationPointElements, adjustForeignAttributes as adjustForeignAttributesMap, adjustMathMLAttributes, adjustSVGAttributes, E, ReparseException ) def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): """Parse a string or file-like object into a tree""" tb = treebuilders.getTreeBuilder(treebuilder) p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) return p.parse(doc, **kwargs) def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): tb = treebuilders.getTreeBuilder(treebuilder) p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) return p.parseFragment(doc, container=container, **kwargs) def method_decorator_metaclass(function): class Decorated(type): def __new__(meta, classname, bases, classDict): for attributeName, attribute in classDict.items(): if isinstance(attribute, types.FunctionType): attribute = function(attribute) classDict[attributeName] = attribute return type.__new__(meta, classname, bases, classDict) return Decorated class HTMLParser(object): """HTML parser. Generates a tree structure from a stream of (possibly malformed) HTML""" def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): """ strict - raise an exception when a parse error is encountered tree - a treebuilder class controlling the type of tree that will be returned. Built in treebuilders can be accessed through html5lib.treebuilders.getTreeBuilder(treeType) """ # Raise an exception on the first error encountered self.strict = strict if tree is None: tree = treebuilders.getTreeBuilder("etree") self.tree = tree(namespaceHTMLElements) self.errors = [] self.phases = dict([(name, cls(self, self.tree)) for name, cls in getPhases(debug).items()]) def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): self.innerHTMLMode = innerHTML self.container = container self.scripting = scripting self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) self.reset() try: self.mainLoop() except ReparseException: self.reset() self.mainLoop() def reset(self): self.tree.reset() self.firstStartTag = False self.errors = [] self.log = [] # only used with debug mode # "quirks" / "limited quirks" / "no quirks" self.compatMode = "no quirks" if self.innerHTMLMode: self.innerHTML = self.container.lower() if self.innerHTML in cdataElements: self.tokenizer.state = self.tokenizer.rcdataState elif self.innerHTML in rcdataElements: self.tokenizer.state = self.tokenizer.rawtextState elif self.innerHTML == 'plaintext': self.tokenizer.state = self.tokenizer.plaintextState else: # state already is data state # self.tokenizer.state = self.tokenizer.dataState pass self.phase = self.phases["beforeHtml"] self.phase.insertHtmlElement() self.resetInsertionMode() else: self.innerHTML = False # pylint:disable=redefined-variable-type self.phase = self.phases["initial"] self.lastPhase = None self.beforeRCDataPhase = None self.framesetOK = True @property def documentEncoding(self): """The name of the character encoding that was used to decode the input stream, or :obj:`None` if that is not determined yet. """ if not hasattr(self, 'tokenizer'): return None return self.tokenizer.stream.charEncoding[0].name def isHTMLIntegrationPoint(self, element): if (element.name == "annotation-xml" and element.namespace == namespaces["mathml"]): return ("encoding" in element.attributes and element.attributes["encoding"].translate( asciiUpper2Lower) in ("text/html", "application/xhtml+xml")) else: return (element.namespace, element.name) in htmlIntegrationPointElements def isMathMLTextIntegrationPoint(self, element): return (element.namespace, element.name) in mathmlTextIntegrationPointElements def mainLoop(self): CharactersToken = tokenTypes["Characters"] SpaceCharactersToken = tokenTypes["SpaceCharacters"] StartTagToken = tokenTypes["StartTag"] EndTagToken = tokenTypes["EndTag"] CommentToken = tokenTypes["Comment"] DoctypeToken = tokenTypes["Doctype"] ParseErrorToken = tokenTypes["ParseError"] for token in self.normalizedTokens(): prev_token = None new_token = token while new_token is not None: prev_token = new_token currentNode = self.tree.openElements[-1] if self.tree.openElements else None currentNodeNamespace = currentNode.namespace if currentNode else None currentNodeName = currentNode.name if currentNode else None type = new_token["type"] if type == ParseErrorToken: self.parseError(new_token["data"], new_token.get("datavars", {})) new_token = None else: if (len(self.tree.openElements) == 0 or currentNodeNamespace == self.tree.defaultNamespace or (self.isMathMLTextIntegrationPoint(currentNode) and ((type == StartTagToken and token["name"] not in frozenset(["mglyph", "malignmark"])) or type in (CharactersToken, SpaceCharactersToken))) or (currentNodeNamespace == namespaces["mathml"] and currentNodeName == "annotation-xml" and type == StartTagToken and token["name"] == "svg") or (self.isHTMLIntegrationPoint(currentNode) and type in (StartTagToken, CharactersToken, SpaceCharactersToken))): phase = self.phase else: phase = self.phases["inForeignContent"] if type == CharactersToken: new_token = phase.processCharacters(new_token) elif type == SpaceCharactersToken: new_token = phase.processSpaceCharacters(new_token) elif type == StartTagToken: new_token = phase.processStartTag(new_token) elif type == EndTagToken: new_token = phase.processEndTag(new_token) elif type == CommentToken: new_token = phase.processComment(new_token) elif type == DoctypeToken: new_token = phase.processDoctype(new_token) if (type == StartTagToken and prev_token["selfClosing"] and not prev_token["selfClosingAcknowledged"]): self.parseError("non-void-element-with-trailing-solidus", {"name": prev_token["name"]}) # When the loop finishes it's EOF reprocess = True phases = [] while reprocess: phases.append(self.phase) reprocess = self.phase.processEOF() if reprocess: assert self.phase not in phases def normalizedTokens(self): for token in self.tokenizer: yield self.normalizeToken(token) def parse(self, stream, *args, **kwargs): """Parse a HTML document into a well-formed tree stream - a filelike object or string containing the HTML to be parsed The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) scripting - treat noscript elements as if javascript was turned on """ self._parse(stream, False, None, *args, **kwargs) return self.tree.getDocument() def parseFragment(self, stream, *args, **kwargs): """Parse a HTML fragment into a well-formed tree fragment container - name of the element we're setting the innerHTML property if set to None, default to 'div' stream - a filelike object or string containing the HTML to be parsed The optional encoding parameter must be a string that indicates the encoding. If specified, that encoding will be used, regardless of any BOM or later declaration (such as in a meta element) scripting - treat noscript elements as if javascript was turned on """ self._parse(stream, True, *args, **kwargs) return self.tree.getFragment() def parseError(self, errorcode="XXX-undefined-error", datavars=None): # XXX The idea is to make errorcode mandatory. if datavars is None: datavars = {} self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) if self.strict: raise ParseError(E[errorcode] % datavars) def normalizeToken(self, token): """ HTML5 specific normalizations to the token stream """ if token["type"] == tokenTypes["StartTag"]: raw = token["data"] token["data"] = OrderedDict(raw) if len(raw) > len(token["data"]): # we had some duplicated attribute, fix so first wins token["data"].update(raw[::-1]) return token def adjustMathMLAttributes(self, token): adjust_attributes(token, adjustMathMLAttributes) def adjustSVGAttributes(self, token): adjust_attributes(token, adjustSVGAttributes) def adjustForeignAttributes(self, token): adjust_attributes(token, adjustForeignAttributesMap) def reparseTokenNormal(self, token): # pylint:disable=unused-argument self.parser.phase() def resetInsertionMode(self): # The name of this method is mostly historical. (It's also used in the # specification.) last = False newModes = { "select": "inSelect", "td": "inCell", "th": "inCell", "tr": "inRow", "tbody": "inTableBody", "thead": "inTableBody", "tfoot": "inTableBody", "caption": "inCaption", "colgroup": "inColumnGroup", "table": "inTable", "head": "inBody", "body": "inBody", "frameset": "inFrameset", "html": "beforeHead" } for node in self.tree.openElements[::-1]: nodeName = node.name new_phase = None if node == self.tree.openElements[0]: assert self.innerHTML last = True nodeName = self.innerHTML # Check for conditions that should only happen in the innerHTML # case if nodeName in ("select", "colgroup", "head", "html"): assert self.innerHTML if not last and node.namespace != self.tree.defaultNamespace: continue if nodeName in newModes: new_phase = self.phases[newModes[nodeName]] break elif last: new_phase = self.phases["inBody"] break self.phase = new_phase def parseRCDataRawtext(self, token, contentType): """Generic RCDATA/RAWTEXT Parsing algorithm contentType - RCDATA or RAWTEXT """ assert contentType in ("RAWTEXT", "RCDATA") self.tree.insertElement(token) if contentType == "RAWTEXT": self.tokenizer.state = self.tokenizer.rawtextState else: self.tokenizer.state = self.tokenizer.rcdataState self.originalPhase = self.phase self.phase = self.phases["text"] @_utils.memoize def getPhases(debug): def log(function): """Logger that records which phase processes each token""" type_names = dict((value, key) for key, value in tokenTypes.items()) def wrapped(self, *args, **kwargs): if function.__name__.startswith("process") and len(args) > 0: token = args[0] try: info = {"type": type_names[token['type']]} except: raise if token['type'] in tagTokenTypes: info["name"] = token['name'] self.parser.log.append((self.parser.tokenizer.state.__name__, self.parser.phase.__class__.__name__, self.__class__.__name__, function.__name__, info)) return function(self, *args, **kwargs) else: return function(self, *args, **kwargs) return wrapped def getMetaclass(use_metaclass, metaclass_func): if use_metaclass: return method_decorator_metaclass(metaclass_func) else: return type # pylint:disable=unused-argument class Phase(with_metaclass(getMetaclass(debug, log))): """Base class for helper object that implements each phase of processing """ def __init__(self, parser, tree): self.parser = parser self.tree = tree def processEOF(self): raise NotImplementedError def processComment(self, token): # For most phases the following is correct. Where it's not it will be # overridden. self.tree.insertComment(token, self.tree.openElements[-1]) def processDoctype(self, token): self.parser.parseError("unexpected-doctype") def processCharacters(self, token): self.tree.insertText(token["data"]) def processSpaceCharacters(self, token): self.tree.insertText(token["data"]) def processStartTag(self, token): return self.startTagHandler[token["name"]](token) def startTagHtml(self, token): if not self.parser.firstStartTag and token["name"] == "html": self.parser.parseError("non-html-root") # XXX Need a check here to see if the first start tag token emitted is # this token... If it's not, invoke self.parser.parseError(). for attr, value in token["data"].items(): if attr not in self.tree.openElements[0].attributes: self.tree.openElements[0].attributes[attr] = value self.parser.firstStartTag = False def processEndTag(self, token): return self.endTagHandler[token["name"]](token) class InitialPhase(Phase): def processSpaceCharacters(self, token): pass def processComment(self, token): self.tree.insertComment(token, self.tree.document) def processDoctype(self, token): name = token["name"] publicId = token["publicId"] systemId = token["systemId"] correct = token["correct"] if (name != "html" or publicId is not None or systemId is not None and systemId != "about:legacy-compat"): self.parser.parseError("unknown-doctype") if publicId is None: publicId = "" self.tree.insertDoctype(token) if publicId != "": publicId = publicId.translate(asciiUpper2Lower) if (not correct or token["name"] != "html" or publicId.startswith( ("+//silmaril//dtd html pro v0r11 19970101//", "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", "-//as//dtd html 3.0 aswedit + extensions//", "-//ietf//dtd html 2.0 level 1//", "-//ietf//dtd html 2.0 level 2//", "-//ietf//dtd html 2.0 strict level 1//", "-//ietf//dtd html 2.0 strict level 2//", "-//ietf//dtd html 2.0 strict//", "-//ietf//dtd html 2.0//", "-//ietf//dtd html 2.1e//", "-//ietf//dtd html 3.0//", "-//ietf//dtd html 3.2 final//", "-//ietf//dtd html 3.2//", "-//ietf//dtd html 3//", "-//ietf//dtd html level 0//", "-//ietf//dtd html level 1//", "-//ietf//dtd html level 2//", "-//ietf//dtd html level 3//", "-//ietf//dtd html strict level 0//", "-//ietf//dtd html strict level 1//", "-//ietf//dtd html strict level 2//", "-//ietf//dtd html strict level 3//", "-//ietf//dtd html strict//", "-//ietf//dtd html//", "-//metrius//dtd metrius presentational//", "-//microsoft//dtd internet explorer 2.0 html strict//", "-//microsoft//dtd internet explorer 2.0 html//", "-//microsoft//dtd internet explorer 2.0 tables//", "-//microsoft//dtd internet explorer 3.0 html strict//", "-//microsoft//dtd internet explorer 3.0 html//", "-//microsoft//dtd internet explorer 3.0 tables//", "-//netscape comm. corp.//dtd html//", "-//netscape comm. corp.//dtd strict html//", "-//o'reilly and associates//dtd html 2.0//", "-//o'reilly and associates//dtd html extended 1.0//", "-//o'reilly and associates//dtd html extended relaxed 1.0//", "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", "-//spyglass//dtd html 2.0 extended//", "-//sq//dtd html 2.0 hotmetal + extensions//", "-//sun microsystems corp.//dtd hotjava html//", "-//sun microsystems corp.//dtd hotjava strict html//", "-//w3c//dtd html 3 1995-03-24//", "-//w3c//dtd html 3.2 draft//", "-//w3c//dtd html 3.2 final//", "-//w3c//dtd html 3.2//", "-//w3c//dtd html 3.2s draft//", "-//w3c//dtd html 4.0 frameset//", "-//w3c//dtd html 4.0 transitional//", "-//w3c//dtd html experimental 19960712//", "-//w3c//dtd html experimental 970421//", "-//w3c//dtd w3 html//", "-//w3o//dtd w3 html 3.0//", "-//webtechs//dtd mozilla html 2.0//", "-//webtechs//dtd mozilla html//")) or publicId in ("-//w3o//dtd w3 html strict 3.0//en//", "-/w3c/dtd html 4.0 transitional/en", "html") or publicId.startswith( ("-//w3c//dtd html 4.01 frameset//", "-//w3c//dtd html 4.01 transitional//")) and systemId is None or systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): self.parser.compatMode = "quirks" elif (publicId.startswith( ("-//w3c//dtd xhtml 1.0 frameset//", "-//w3c//dtd xhtml 1.0 transitional//")) or publicId.startswith( ("-//w3c//dtd html 4.01 frameset//", "-//w3c//dtd html 4.01 transitional//")) and systemId is not None): self.parser.compatMode = "limited quirks" self.parser.phase = self.parser.phases["beforeHtml"] def anythingElse(self): self.parser.compatMode = "quirks" self.parser.phase = self.parser.phases["beforeHtml"] def processCharacters(self, token): self.parser.parseError("expected-doctype-but-got-chars") self.anythingElse() return token def processStartTag(self, token): self.parser.parseError("expected-doctype-but-got-start-tag", {"name": token["name"]}) self.anythingElse() return token def processEndTag(self, token): self.parser.parseError("expected-doctype-but-got-end-tag", {"name": token["name"]}) self.anythingElse() return token def processEOF(self): self.parser.parseError("expected-doctype-but-got-eof") self.anythingElse() return True class BeforeHtmlPhase(Phase): # helper methods def insertHtmlElement(self): self.tree.insertRoot(impliedTagToken("html", "StartTag")) self.parser.phase = self.parser.phases["beforeHead"] # other def processEOF(self): self.insertHtmlElement() return True def processComment(self, token): self.tree.insertComment(token, self.tree.document) def processSpaceCharacters(self, token): pass def processCharacters(self, token): self.insertHtmlElement() return token def processStartTag(self, token): if token["name"] == "html": self.parser.firstStartTag = True self.insertHtmlElement() return token def processEndTag(self, token): if token["name"] not in ("head", "body", "html", "br"): self.parser.parseError("unexpected-end-tag-before-html", {"name": token["name"]}) else: self.insertHtmlElement() return token class BeforeHeadPhase(Phase): def __init__(self, parser, tree): Phase.__init__(self, parser, tree) self.startTagHandler = _utils.MethodDispatcher([ ("html", self.startTagHtml), ("head", self.startTagHead) ]) self.startTagHandler.default = self.startTagOther self.endTagHandler = _utils.MethodDispatcher([ (("head", "body", "html", "br"), self.endTagImplyHead) ]) self.endTagHandler.default = self.endTagOther def processEOF(self): self.startTagHead(impliedTagToken("head", "StartTag")) return True def processSpaceCharacters(self, token): pass def processCharacters(self, token): self.startTagHead(impliedTagToken("head", "StartTag")) return token def startTagHtml(self, token): return self.parser.phases["inBody"].processStartTag(token) def startTagHead(self, token): self.tree.insertElement(token) self.tree.headPointer = self.tree.openElements[-1] self.parser.phase = self.parser.phases["inHead"] def startTagOther(self, token): self.startTagHead(impliedTagToken("head", "StartTag")) return token def endTagImplyHead(self, token): self.startTagHead(impliedTagToken("head", "StartTag")) return token def endTagOther(self, token): self.parser.parseError("end-tag-after-implied-root", {"name": token["name"]}) class InHeadPhase(Phase): def __init__(self, parser, tree): Phase.__init__(self, parser, tree) self.startTagHandler = _utils.MethodDispatcher([ ("html", self.startTagHtml), ("title", self.startTagTitle), (("noframes", "style"), self.startTagNoFramesStyle), ("noscript", self.startTagNoscript), ("script", self.startTagScript), (("base", "basefont", "bgsound", "command", "link"), self.startTagBaseLinkCommand), ("meta", self.startTagMeta), ("head", self.startTagHead) ]) self.startTagHandler.default = self.startTagOther self.endTagHandler = _utils.MethodDispatcher([ ("head", self.endTagHead), (("br", "html", "body"), self.endTagHtmlBodyBr) ]) self.endTagHandler.default = self.endTagOther # the real thing def processEOF(self): self.anythingElse() return True def processCharacters(self, token): self.anythingElse() return token def startTagHtml(self, token): return self.parser.phases["inBody"].processStartTag(token) def startTagHead(self, token): self.parser.parseError("two-heads-are-not-better-than-one") def startTagBaseLinkCommand(self, token): self.tree.insertElement(token) self.tree.openElements.pop() token["selfClosingAcknowledged"] = True def startTagMeta(self, token): self.tree.insertElement(token) self.tree.openElements.pop() token["selfClosingAcknowledged"] = True attributes = token["data"] if self.parser.tokenizer.stream.charEncoding[1] == "tentative": if "charset" in attributes: self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) elif ("content" in attributes and "http-equiv" in attributes and attributes["http-equiv"].lower() == "content-type"): # Encoding it as UTF-8 here is a hack, as really we should pass # the abstract Unicode string, and just use the # ContentAttrParser on that, but using UTF-8 allows all chars # to be encoded and as a ASCII-superset works. data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) parser = _inputstream.ContentAttrParser(data) codec = parser.parse() self.parser.tokenizer.stream.changeEncoding(codec) def startTagTitle(self, token): self.parser.parseRCDataRawtext(token, "RCDATA") def startTagNoFramesStyle(self, token): # Need to decide whether to implement the scripting-disabled case self.parser.parseRCDataRawtext(token, "RAWTEXT") def startTagNoscript(self, token): if self.parser.scripting: self.parser.parseRCDataRawtext(token, "RAWTEXT") else: self.tree.insertElement(token) self.parser.phase = self.parser.phases["inHeadNoscript"] def startTagScript(self, token): self.tree.insertElement(token) self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState self.parser.originalPhase = self.parser.phase self.parser.phase = self.parser.phases["text"] def startTagOther(self, token): self.anythingElse() return token def endTagHead(self, token): node = self.parser.tree.openElements.pop() assert node.name == "head", "Expected head got %s" % node.name self.parser.phase = self.parser.phases["afterHead"] def endTagHtmlBodyBr(self, token): self.anythingElse() return token def endTagOther(self, token): self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) def anythingElse(self): self.endTagHead(impliedTagToken("head")) class InHeadNoscriptPhase(Phase): def __init__(self, parser, tree): Phase.__init__(self, parser, tree) self.startTagHandler = _utils.MethodDispatcher([ ("html", self.startTagHtml), (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), (("head", "noscript"), self.startTagHeadNoscript), ]) self.startTagHandler.default = self.startTagOther self.endTagHandler = _utils.MethodDispatcher([ ("noscript", self.endTagNoscript), ("br", self.endTagBr), ]) self.endTagHandler.default = self.endTagOther def processEOF(self): self.parser.parseError("eof-in-head-noscript") self.anythingElse() return True def processComment(self, token): return self.parser.phases["inHead"].processComment(token) def processCharacters(self, token): self.parser.parseError("char-in-head-noscript") self.anythingElse() return token def processSpaceCharacters(self, token): return self.parser.phases["inHead"].processSpaceCharacters(token) def startTagHtml(self, token): return self.parser.phases["inBody"].processStartTag(token) def startTagBaseLinkCommand(self, token): return self.parser.phases["inHead"].processStartTag(token) def startTagHeadNoscript(self, token): self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) def startTagOther(self, token): self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) self.anythingElse() return token def endTagNoscript(self, token): node = self.parser.tree.openElements.pop() assert node.name == "noscript", "Expected noscript got %s" % node.name self.parser.phase = self.parser.phases["inHead"] def endTagBr(self, token): self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) self.anythingElse() return token def endTagOther(self, token): self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) def anythingElse(self): # Caller must raise parse error first! self.endTagNoscript(impliedTagToken("noscript")) class AfterHeadPhase(Phase): def __init__(self, parser, tree): Phase.__init__(self, parser, tree) self.startTagHandler = _utils.MethodDispatcher([ ("html", self.startTagHtml), ("body", self.startTagBody), ("frameset", self.startTagFrameset), (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", "style", "title"), self.startTagFromHead), ("head", self.startTagHead) ]) self.startTagHandler.default = self.startTagOther self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), self.endTagHtmlBodyBr)]) self.endTagHandler.default = self.endTagOther def processEOF(self): self.anythingElse() return True def processCharacters(self, token): self.anythingElse() return token def startTagHtml(self, token): return self.parser.phases["inBody"].processStartTag(token) def startTagBody(self, token): self.parser.framesetOK = False self.tree.insertElement(token) self.parser.phase = self.parser.phases["inBody"] def startTagFrameset(self, token): self.tree.insertElement(token) self.parser.phase = self.parser.phases["inFrameset"] def startTagFromHead(self, token): self.parser.parseError("unexpected-start-tag-out-of-my-head", {"name": token["name"]}) self.tree.openElements.append(self.tree.headPointer) self.parser.phases["inHead"].processStartTag(token) for node in self.tree.openElements[::-1]: if node.name == "head": self.tree.openElements.remove(node) break def startTagHead(self, token): self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) def startTagOther(self, token): self.anythingElse() return token def endTagHtmlBodyBr(self, token): self.anythingElse() return token def endTagOther(self, token): self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) def anythingElse(self): self.tree.insertElement(impliedTagToken("body", "StartTag")) self.parser.phase = self.parser.phases["inBody"] self.parser.framesetOK = True class InBodyPhase(Phase): # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody # the really-really-really-very crazy mode def __init__(self, parser, tree): Phase.__init__(self, parser, tree) # Set this to the default handler self.processSpaceCharacters = self.processSpaceCharactersNonPre self.startTagHandler = _utils.MethodDispatcher([ ("html", self.startTagHtml), (("base", "basefont", "bgsound", "command", "link", "meta", "script", "style", "title"), self.startTagProcessInHead), ("body", self.startTagBody), ("frameset", self.startTagFrameset), (("address", "article", "aside", "blockquote", "center", "details", "dir", "div", "dl", "fieldset", "figcaption", "figure", "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", "section", "summary", "ul"), self.startTagCloseP), (headingElements, self.startTagHeading), (("pre", "listing"), self.startTagPreListing), ("form", self.startTagForm), (("li", "dd", "dt"), self.startTagListItem), ("plaintext", self.startTagPlaintext), ("a", self.startTagA), (("b", "big", "code", "em", "font", "i", "s", "small", "strike", "strong", "tt", "u"), self.startTagFormatting), ("nobr", self.startTagNobr), ("button", self.startTagButton), (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), ("xmp", self.startTagXmp), ("table", self.startTagTable), (("area", "br", "embed", "img", "keygen", "wbr"), self.startTagVoidFormatting), (("param", "source", "track"), self.startTagParamSource), ("input", self.startTagInput), ("hr", self.startTagHr), ("image", self.startTagImage), ("isindex", self.startTagIsIndex), ("textarea", self.startTagTextarea), ("iframe", self.startTagIFrame), ("noscript", self.startTagNoscript), (("noembed", "noframes"), self.startTagRawtext), ("select", self.startTagSelect), (("rp", "rt"), self.startTagRpRt), (("option", "optgroup"), self.startTagOpt), (("math"), self.startTagMath), (("svg"), self.startTagSvg), (("caption", "col", "colgroup", "frame", "head", "tbody", "td", "tfoot", "th", "thead", "tr"), self.startTagMisplaced) ]) self.startTagHandler.default = self.startTagOther self.endTagHandler = _utils.MethodDispatcher([ ("body", self.endTagBody), ("html", self.endTagHtml), (("address", "article", "aside", "blockquote", "button", "center", "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", "section", "summary", "ul"), self.endTagBlock), ("form", self.endTagForm), ("p", self.endTagP), (("dd", "dt", "li"), self.endTagListItem), (headingElements, self.endTagHeading), (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", "strike", "strong", "tt", "u"), self.endTagFormatting), (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), ("br", self.endTagBr), ]) self.endTagHandler.default = self.endTagOther def isMatchingFormattingElement(self, node1, node2): return (node1.name == node2.name and node1.namespace == node2.namespace and node1.attributes == node2.attributes) # helper def addFormattingElement(self, token): self.tree.insertElement(token) element = self.tree.openElements[-1] matchingElements = [] for node in self.tree.activeFormattingElements[::-1]: if node is Marker: break elif self.isMatchingFormattingElement(node, element): matchingElements.append(node) assert len(matchingElements) <= 3 if len(matchingElements) == 3: self.tree.activeFormattingElements.remove(matchingElements[-1]) self.tree.activeFormattingElements.append(element) # the real deal def processEOF(self): allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", "tfoot", "th", "thead", "tr", "body", "html")) for node in self.tree.openElements[::-1]: if node.name not in allowed_elements: self.parser.parseError("expected-closing-tag-but-got-eof") break # Stop parsing def processSpaceCharactersDropNewline(self, token): # Sometimes (start of
, , and